From e9379891d4d54f8f56cd19af7ad1ebd0ddc0c8f1 Mon Sep 17 00:00:00 2001 From: Littlegnal <8847263+littleGnAl@users.noreply.github.com> Date: Thu, 6 Jun 2024 14:21:59 +0800 Subject: [PATCH] feat: upgrade native sdk 4.3.2 (#1795) Co-authored-by: littleGnAl Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- android/build.gradle | 6 +- .../main/cpp/iris_rtc_rendering_android.cc | 4 + .../third_party/include/agora_rtc/AgoraBase.h | 6 +- .../agora_rtc/IAgoraMusicContentCenter.h | 47 +- internal/deps_summary.txt | 22 +- ios/agora_rtc_engine.podspec | 4 +- lib/src/agora_base.dart | 72 +- lib/src/agora_base.g.dart | 3 - lib/src/agora_media_base.dart | 24 +- lib/src/agora_music_content_center.dart | 35 + lib/src/agora_music_content_center.g.dart | 6 + lib/src/agora_rtc_engine.dart | 232 +-- lib/src/agora_rtc_engine_ex.dart | 7 +- lib/src/agora_spatial_audio.dart | 4 +- .../agora_music_content_center_impl.dart | 36 +- ...ra_music_content_center_impl_override.dart | 39 +- .../io/native_iris_api_engine_bindings.dart | 5 +- macos/agora_rtc_engine.podspec | 4 +- pubspec.yaml | 2 +- scripts/artifacts_version.sh | 8 +- scripts/build-iris-macos.sh | 2 +- .../integration_test/apis_call_fake_test.dart | 5 +- ...udiodevicemanager_fake_test.generated.dart | 26 +- .../h265transcoder_fake_test.generated.dart | 22 +- ...ranscoderobserver_testcases.generated.dart | 10 +- ...patialaudioengine_fake_test.generated.dart | 102 +- ...udioframeobserver_testcases.generated.dart | 28 +- ..._faceinfoobserver_testcases.generated.dart | 2 +- .../mediaengine_fake_test.generated.dart | 180 +- ...odedframeobserver_testcases.generated.dart | 33 +- ...ideoframeobserver_testcases.generated.dart | 217 +- ...audiopcmframesink_testcases.generated.dart | 17 +- ...ospectrumobserver_testcases.generated.dart | 29 +- .../mediaplayer_fake_test.generated.dart | 136 +- ...yersourceobserver_testcases.generated.dart | 74 +- ...ideoframeobserver_testcases.generated.dart | 40 +- .../mediarecorder_fake_test.generated.dart | 14 +- ...arecorderobserver_testcases.generated.dart | 20 +- ...usiccontentcenter_fake_test.generated.dart | 50 +- ...entereventhandler_testcases.generated.dart | 50 +- ...odedframeobserver_testcases.generated.dart | 90 +- ...ospectrumobserver_testcases.generated.dart | 25 +- .../rtcengine_fake_test.generated.dart | 1853 ++++++++--------- ..._metadataobserver_testcases.generated.dart | 14 +- ...ngineeventhandler_testcases.generated.dart | 1254 +++++------ .../rtcengineex_fake_test.generated.dart | 784 ++++--- ...ideodevicemanager_fake_test.generated.dart | 10 +- .../musiccontentcenter_testcases.dart | 53 + ...ngine_rtcengineeventhandler_testcases.dart | 162 ++ test_shard/fake_test_app/lib/main.dart | 4 +- ...ayercachemanager_smoke_test.generated.dart | 12 +- .../fake_agora_video_view_testcases.dart | 4 - .../lib/fake_remote_user_main.dart | 4 +- test_shard/integration_test_app/lib/main.dart | 4 +- .../src/platform/io/iris_tester_bindings.dart | 14 + .../lib/src/platform/io/iris_tester_io.dart | 1 + test_shard/rendering_test/lib/main.dart | 4 +- tool/terra/package.json | 3 +- tool/terra/terra_config_main.yaml | 26 +- .../bin/event_handler_gen_config.dart | 2 + .../bin/method_call_gen_config.dart | 4 +- tool/testcase_gen/build.sh | 2 + tool/testcase_gen/lib/default_generator.dart | 124 +- .../testcase_gen/lib/templated_generator.dart | 13 +- windows/CMakeLists.txt | 4 +- 65 files changed, 3187 insertions(+), 2906 deletions(-) create mode 100644 test_shard/fake_test_app/integration_test/testcases/musiccontentcenter_testcases.dart diff --git a/android/build.gradle b/android/build.gradle index f9e0ddd56..cf0b205da 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -57,9 +57,9 @@ dependencies { if (isDev(project)) { api fileTree(dir: "libs", include: ["*.jar"]) } else { - api 'io.agora.rtc:iris-rtc:4.3.1-build.1' - api 'io.agora.rtc:full-sdk:4.3.1' - api 'io.agora.rtc:full-screen-sharing:4.3.1' + api 'io.agora.rtc:iris-rtc:4.3.2-build.1' + api 'io.agora.rtc:full-sdk:4.3.2' + api 'io.agora.rtc:full-screen-sharing:4.3.2' } } diff --git a/android/src/main/cpp/iris_rtc_rendering_android.cc b/android/src/main/cpp/iris_rtc_rendering_android.cc index 9a5dd8e8d..ff4d6e171 100644 --- a/android/src/main/cpp/iris_rtc_rendering_android.cc +++ b/android/src/main/cpp/iris_rtc_rendering_android.cc @@ -535,6 +535,10 @@ class YUVRendering final : public RenderingOp { glViewport(0, 0, width, height); CHECK_GL_ERROR() + // Ensure that the unpack alignment is set to 1 byte to avoid any alignment issues with YUV data. + glPixelStorei(GL_UNPACK_ALIGNMENT, 1); + CHECK_GL_ERROR() + glEnableVertexAttribArray(aPositionLoc_); CHECK_GL_ERROR() diff --git a/android/src/main/cpp/third_party/include/agora_rtc/AgoraBase.h b/android/src/main/cpp/third_party/include/agora_rtc/AgoraBase.h index b2be815ca..fc74e5841 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/AgoraBase.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/AgoraBase.h @@ -3124,7 +3124,7 @@ enum REMOTE_USER_STATE { struct VideoTrackInfo { VideoTrackInfo() : isLocal(false), ownerUid(0), trackId(0), channelId(OPTIONAL_NULLPTR) - , streamType(VIDEO_STREAM_HIGH), codecType(VIDEO_CODEC_H265) + , codecType(VIDEO_CODEC_H265) , encodedFrameOnly(false), sourceType(VIDEO_SOURCE_CAMERA_PRIMARY) , observationPosition(agora::media::base::POSITION_POST_CAPTURER) {} /** @@ -3145,10 +3145,6 @@ struct VideoTrackInfo { * The channel ID of the video track. */ const char* channelId; - /** - * The video stream type: #VIDEO_STREAM_TYPE. - */ - VIDEO_STREAM_TYPE streamType; /** * The video codec type: #VIDEO_CODEC_TYPE. */ diff --git a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMusicContentCenter.h b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMusicContentCenter.h index c3e4a5bc3..ebb4d5237 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMusicContentCenter.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMusicContentCenter.h @@ -13,6 +13,28 @@ namespace agora { namespace rtc { +/** + * Modes for playing songs. + */ +typedef enum +{ + /** + * 0: The music player is in the origin mode, which means playing the original song. + */ + kMusicPlayModeOriginal = 0, + + /** + * 1: The music player is in the accompany mode, which means playing the accompaniment only. + */ + kMusicPlayModeAccompany = 1, + + /** + * 2: The music player is in the lead sing mode, which means playing the lead vocals. + */ + kMusicPlayModeLeadSing = 2, + +} MusicPlayMode; + typedef enum { /** @@ -329,6 +351,18 @@ class IMusicPlayer : public IMediaPlayer { * - < 0: Failure. */ virtual int open(int64_t songCode, int64_t startPos = 0) = 0; + + /** + * Set the mode for playing songs. + * You can call this method to switch from original to accompaniment or lead vocals. + * If you do not call this method to set the mode, the SDK plays the accompaniment by default. + * + * @param model The playing mode. + * @return + * - 0: Success. + * - < 0: Failure. + */ + virtual int setPlayMode(MusicPlayMode mode) = 0; }; class IMusicContentCenter @@ -383,6 +417,15 @@ class IMusicContentCenter * - The empty pointer NULL, if the method call fails. */ virtual agora_refptr createMusicPlayer() = 0; + + /** + * Destroy a music player source object and return result. + * @param music_player The pointer to \ref rtc::IMusicPlayer "IMusicPlayer". + * @return + * - 0: Success. + * - < 0: Failure. + */ + virtual int destroyMusicPlayer(agora_refptr music_player) = 0; /** * Get music chart collection of music. @@ -501,12 +544,12 @@ class IMusicContentCenter * * @param requestId The request id you will get of this query, format is uuid. * @param songCode The identifier of the media file that you want to play. - * @param LyricType The type of the lyric file. 0:xml or 1:lrc. + * @param lyricType The type of the lyric file. 0:xml or 1:lrc. * @return * - 0: Success. * - < 0: Failure. */ - virtual int getLyric(agora::util::AString& requestId, int64_t songCode, int32_t LyricType = 0) = 0; + virtual int getLyric(agora::util::AString& requestId, int64_t songCode, int32_t lyricType = 0) = 0; /** * Gets the metadata of a specific music. Once this method is called, the SDK triggers the onSongSimpleInfoResult callback to report the metadata of the music. diff --git a/internal/deps_summary.txt b/internal/deps_summary.txt index 22c1ed3ac..db0574bb9 100644 --- a/internal/deps_summary.txt +++ b/internal/deps_summary.txt @@ -1,18 +1,18 @@ Iris: -https://download.agora.io/sdk/release/iris_4.3.1-build.1_DCG_Android_Video_20240429_1017_481.zip -https://download.agora.io/sdk/release/iris_4.3.1-build.1_DCG_iOS_Video_20240428_0641_388.zip -https://download.agora.io/sdk/release/iris_4.3.1-build.1_DCG_Mac_Video_20240428_0641_389.zip -https://download.agora.io/sdk/release/iris_4.3.1-build.1_DCG_Windows_Video_20240428_0641_423.zip -implementation 'io.agora.rtc:iris-rtc:4.3.1-build.1' -pod 'AgoraIrisRTC_iOS', '4.3.1-build.1' -pod 'AgoraIrisRTC_macOS', '4.3.1-build.1' +https://download.agora.io/sdk/release/iris_4.3.2-build.1_DCG_Android_Video_20240604_0456_504.zip +https://download.agora.io/sdk/release/iris_4.3.2-build.1_DCG_iOS_Video_20240604_0459_409.zip +https://download.agora.io/sdk/release/iris_4.3.2-build.1_DCG_Mac_Video_20240604_0500_404.zip +https://download.agora.io/sdk/release/iris_4.3.2-build.1_DCG_Windows_Video_20240604_0456_441.zip +implementation 'io.agora.rtc:iris-rtc:4.3.2-build.1' +pod 'AgoraIrisRTC_iOS', '4.3.2-build.1' +pod 'AgoraIrisRTC_macOS', '4.3.2-build.1' Native: -implementation 'io.agora.rtc:full-sdk:4.3.1' -implementation 'io.agora.rtc:full-screen-sharing:4.3.1' -pod 'AgoraRtcEngine_iOS', '4.3.1' -pod 'AgoraRtcEngine_macOS', '4.3.1' \ No newline at end of file +implementation 'io.agora.rtc:full-sdk:4.3.2' +implementation 'io.agora.rtc:full-screen-sharing:4.3.2' +pod 'AgoraRtcEngine_iOS', '4.3.2' +pod 'AgoraRtcEngine_macOS', '4.3.2' \ No newline at end of file diff --git a/ios/agora_rtc_engine.podspec b/ios/agora_rtc_engine.podspec index 701739824..926f6d303 100644 --- a/ios/agora_rtc_engine.podspec +++ b/ios/agora_rtc_engine.podspec @@ -23,8 +23,8 @@ Pod::Spec.new do |s| puts '[plugin_dev] Found .plugin_dev file, use vendored_frameworks instead.' s.vendored_frameworks = 'libs/*.xcframework' else - s.dependency 'AgoraIrisRTC_iOS', '4.3.1-build.1' - s.dependency 'AgoraRtcEngine_iOS', '4.3.1' + s.dependency 'AgoraIrisRTC_iOS', '4.3.2-build.1' + s.dependency 'AgoraRtcEngine_iOS', '4.3.2' end s.platform = :ios, '9.0' diff --git a/lib/src/agora_base.dart b/lib/src/agora_base.dart index f099d1c6c..d72667f29 100644 --- a/lib/src/agora_base.dart +++ b/lib/src/agora_base.dart @@ -587,11 +587,11 @@ extension UserOfflineReasonTypeExt on UserOfflineReasonType { /// The interface class. @JsonEnum(alwaysCreate: true) enum InterfaceIdType { - /// The AudioDeviceManager interface class. + /// 1: The AudioDeviceManager interface class. @JsonValue(1) agoraIidAudioDeviceManager, - /// The VideoDeviceManager interface class. + /// 2: The VideoDeviceManager interface class. @JsonValue(2) agoraIidVideoDeviceManager, @@ -599,7 +599,7 @@ enum InterfaceIdType { @JsonValue(3) agoraIidParameterEngine, - /// The MediaEngine interface class. + /// 4: The MediaEngine interface class. @JsonValue(4) agoraIidMediaEngine, @@ -1723,7 +1723,7 @@ extension VideoMirrorModeTypeExt on VideoMirrorModeType { } } -/// The bit mask that indicates the device codec capability. +/// The bit mask of the codec type. @JsonEnum(alwaysCreate: true) enum CodecCapMask { /// (0): The device does not support encoding or decoding. @@ -1782,7 +1782,7 @@ class CodecCapLevels { Map toJson() => _$CodecCapLevelsToJson(this); } -/// The codec capability of the device. +/// The codec capability of the SDK. @JsonSerializable(explicitToJson: true, includeIfNull: false) class CodecCapInfo { /// @nodoc @@ -1792,11 +1792,11 @@ class CodecCapInfo { @JsonKey(name: 'codecType') final VideoCodecType? codecType; - /// The bit mask of the codec type. See CodecCapMask. + /// Bit mask of the codec types in SDK. See CodecCapMask. @JsonKey(name: 'codecCapMask') final int? codecCapMask; - /// The level of the codec capability. See CodecCapLevels. + /// Codec capability of the SDK. See CodecCapLevels. @JsonKey(name: 'codecLevels') final CodecCapLevels? codecLevels; @@ -1859,7 +1859,7 @@ class VideoEncoderConfiguration { @JsonKey(name: 'frameRate') final int? frameRate; - /// The encoding bitrate (Kbps) of the video. This parameter does not need to be set; keeping the default value standardBitrate is sufficient. The SDK automatically matches the most suitable bitrate based on the video resolution and frame rate you have set. For the correspondence between video resolution, frame rate, and bitrate, please refer to. standardBitrate (0): (Recommended) Standard bitrate mode. compatibleBitrate (-1): Adaptive bitrate mode. In general, Agora suggests that you do not use this value. + /// The encoding bitrate (Kbps) of the video. This parameter does not need to be set; keeping the default value standardBitrate is sufficient. The SDK automatically matches the most suitable bitrate based on the video resolution and frame rate you have set. For the correspondence between video resolution and frame rate, see. standardBitrate (0): (Recommended) Standard bitrate mode. compatibleBitrate (-1): Adaptive bitrate mode. In general, Agora suggests that you do not use this value. @JsonKey(name: 'bitrate') final int? bitrate; @@ -3266,7 +3266,6 @@ class VideoTrackInfo { this.ownerUid, this.trackId, this.channelId, - this.streamType, this.codecType, this.encodedFrameOnly, this.sourceType, @@ -3288,10 +3287,6 @@ class VideoTrackInfo { @JsonKey(name: 'channelId') final String? channelId; - /// @nodoc - @JsonKey(name: 'streamType') - final VideoStreamType? streamType; - /// @nodoc @JsonKey(name: 'codecType') final VideoCodecType? codecType; @@ -3985,7 +3980,7 @@ class LiveTranscoding { @JsonKey(name: 'height') final int? height; - /// Bitrate of the output video stream for Media Push in Kbps. The default value is 400 Kbps. Set this member according to the table. If you set a bitrate beyond the proper range, the SDK automatically adapts it to a value within the range. + /// The encoding bitrate (Kbps) of the video. This parameter does not need to be set; keeping the default value standardBitrate is sufficient. The SDK automatically matches the most suitable bitrate based on the video resolution and frame rate you have set. For the correspondence between video resolution and frame rate, see. @JsonKey(name: 'videoBitrate') final int? videoBitrate; @@ -4364,11 +4359,11 @@ enum ConnectionChangedReasonType { @JsonValue(2) connectionChangedInterrupted, - /// 3: The connection between the SDK and the Agora edge server is banned by the Agora edge server. This error occurs when the user is kicked out of the channel by the server. + /// 3: The connection between the SDK and the Agora edge server is banned by the Agora edge server. For example, when a user is kicked out of the channel, this status will be returned. @JsonValue(3) connectionChangedBannedByServer, - /// 4: The SDK fails to join the channel. When the SDK fails to join the channel for more than 20 minutes, this error occurs and the SDK stops reconnecting to the channel. + /// 4: The SDK fails to join the channel. When the SDK fails to join the channel for more than 20 minutes, this code will be returned and the SDK stops reconnecting to the channel. You need to prompt the user to try to switch to another network and rejoin the channel. @JsonValue(4) connectionChangedJoinFailed, @@ -4376,21 +4371,30 @@ enum ConnectionChangedReasonType { @JsonValue(5) connectionChangedLeaveChannel, - /// 6: The connection failed because the App ID is not valid. Please rejoin the channel with a valid App ID. + /// 6: The App ID is invalid. You need to rejoin the channel with a valid APP ID and make sure the App ID you are using is consistent with the one generated in the Agora Console. @JsonValue(6) connectionChangedInvalidAppId, - /// 7: The connection failed since channel name is not valid. Rejoin the channel with a valid channel name. + /// 7: Invalid channel name. Rejoin the channel with a valid channel name. A valid channel name is a string of up to 64 bytes in length. Supported characters (89 characters in total): + /// All lowercase English letters: a to z. + /// All uppercase English letters: A to Z. + /// All numeric characters: 0 to 9. + /// Space + /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," @JsonValue(7) connectionChangedInvalidChannelName, - /// 8: The connection failed because the token is not valid. Possible reasons are as follows: - /// The App Certificate for the project is enabled in Agora Console, but you do not use a token when joining the channel. If you enable the App Certificate, you must use a token to join the channel. + /// 8: Invalid token. Possible reasons are as follows: + /// The App Certificate for the project is enabled in Agora Console, but you do not pass in a token when joining a channel. /// The uid specified when calling joinChannel to join the channel is inconsistent with the uid passed in when generating the token. + /// The generated token and the token used to join the channel are not consistent. Ensure the following: + /// When your project enables App Certificate, you need to pass in a token to join a channel. + /// The user ID specified when generating the token is consistent with the user ID used when joining the channel. + /// The generated token is the same as the token passed in to join the channel. @JsonValue(8) connectionChangedInvalidToken, - /// 9: The connection failed since token is expired. + /// (9): The token currently being used has expired. You need to generate a new token on your server and rejoin the channel with the new token. @JsonValue(9) connectionChangedTokenExpired, @@ -4408,7 +4412,7 @@ enum ConnectionChangedReasonType { @JsonValue(12) connectionChangedRenewToken, - /// 13: The IP address of the client has changed, possibly because the network type, IP address, or port has been changed. + /// (13): Client IP address changed. If you receive this code multiple times, You need to prompt the user to switch networks and try joining the channel again. @JsonValue(13) connectionChangedClientIpAddressChanged, @@ -5503,7 +5507,7 @@ class ScreenCaptureParameters { @JsonKey(name: 'captureMouseCursor') final bool? captureMouseCursor; - /// Whether to bring the window to the front when calling the startScreenCaptureByWindowId method to share it: true : Bring the window to the front. false : (Default) Do not bring the window to the front. + /// Whether to bring the window to the front when calling the startScreenCaptureByWindowId method to share it: true : Bring the window to the front. false : (Default) Do not bring the window to the front. Due to macOS system limitations, when setting this member to bring the window to the front, if the current app has multiple windows, only the main window will be brought to the front. @JsonKey(name: 'windowFocus') final bool? windowFocus; @@ -6630,41 +6634,41 @@ class VideoRenderingTracingInfo { this.remoteJoined2UnmuteVideo, this.remoteJoined2PacketReceived}); - /// The time interval from calling the startMediaRenderingTracing method to SDK triggering the onVideoRenderingTracingResult callback. The unit is milliseconds. Agora recommends you call startMediaRenderingTracing before joining a channel. + /// The time interval (ms) from startMediaRenderingTracing to SDK triggering the onVideoRenderingTracingResult callback. Agora recommends you call startMediaRenderingTracing before joining a channel. @JsonKey(name: 'elapsedTime') final int? elapsedTime; - /// The time interval from calling startMediaRenderingTracing to calling joinChannel. The unit is milliseconds. A negative number means to call joinChannel after calling startMediaRenderingTracing. + /// The time interval (ms) from startMediaRenderingTracing to joinChannel. A negative number indicates that startMediaRenderingTracing is called after calling joinChannel. @JsonKey(name: 'start2JoinChannel') final int? start2JoinChannel; - /// Time interval from calling joinChannel to successfully joining the channel. The unit is milliseconds. + /// The time interval (ms) from or joinChannel to successfully joining the channel. @JsonKey(name: 'join2JoinSuccess') final int? join2JoinSuccess; - /// If the local user calls startMediaRenderingTracing before successfully joining the channel, this value is the time interval from the local user successfully joining the channel to the remote user joining the channel. The unit is milliseconds. - /// If the local user calls startMediaRenderingTracing after successfully joining the channel, the value is the time interval from calling startMediaRenderingTracing to when the remote user joins the channel. The unit is milliseconds. + /// If the local user calls startMediaRenderingTracing before successfully joining the channel, this value is the time interval (ms) from the local user successfully joining the channel to the remote user joining the channel. + /// If the local user calls startMediaRenderingTracing after successfully joining the channel, the value is the time interval (ms) from startMediaRenderingTracing to when the remote user joins the channel. /// If the local user calls startMediaRenderingTracing after the remote user joins the channel, the value is 0 and meaningless. /// In order to reduce the time of rendering the first frame for remote users, Agora recommends that the local user joins the channel when the remote user is in the channel to reduce this value. @JsonKey(name: 'joinSuccess2RemoteJoined') final int? joinSuccess2RemoteJoined; - /// If the local user calls startMediaRenderingTracing before the remote user joins the channel, this value is the time interval from when the remote user joins the channel to when the local user sets the remote view. The unit is milliseconds. - /// If the local user calls startMediaRenderingTracing after the remote user joins the channel, this value is the time interval from calling startMediaRenderingTracing to setting the remote view. The unit is milliseconds. + /// If the local user calls startMediaRenderingTracing before the remote user joins the channel, this value is the time interval (ms) from when the remote user joins the channel to when the local user sets the remote view. + /// If the local user calls startMediaRenderingTracing after the remote user joins the channel, this value is the time interval (ms) from calling startMediaRenderingTracing to setting the remote view. /// If the local user calls startMediaRenderingTracing after setting the remote view, the value is 0 and has no effect. /// In order to reduce the time of rendering the first frame for remote users, Agora recommends that the local user sets the remote view before the remote user joins the channel, or sets the remote view immediately after the remote user joins the channel to reduce this value. @JsonKey(name: 'remoteJoined2SetView') final int? remoteJoined2SetView; - /// If the local user calls startMediaRenderingTracing before the remote user joins the channel, this value is the time interval from the remote user joining the channel to subscribing to the remote video stream. The unit is milliseconds. - /// If the local user calls startMediaRenderingTracing after the remote user joins the channel, this value is the time interval from calling startMediaRenderingTracing to subscribing to the remote video stream. The unit is milliseconds. + /// If the local user calls startMediaRenderingTracing before the remote user joins the channel, this value is the time interval (ms) from the remote user joining the channel to subscribing to the remote video stream. + /// If the local user calls startMediaRenderingTracing after the remote user joins the channel, this value is the time interval (ms) from startMediaRenderingTracing to subscribing to the remote video stream. /// If the local user calls startMediaRenderingTracing after subscribing to the remote video stream, the value is 0 and has no effect. /// In order to reduce the time of rendering the first frame for remote users, Agora recommends that after the remote user joins the channel, the local user immediately subscribes to the remote video stream to reduce this value. @JsonKey(name: 'remoteJoined2UnmuteVideo') final int? remoteJoined2UnmuteVideo; - /// If the local user calls startMediaRenderingTracing before the remote user joins the channel, this value is the time interval from when the remote user joins the channel to when the local user receives the remote video stream. The unit is milliseconds. - /// If the local user calls startMediaRenderingTracing after the remote user joins the channel, this value is the time interval from calling startMediaRenderingTracing to receiving the remote video stream. The unit is milliseconds. + /// If the local user calls startMediaRenderingTracing before the remote user joins the channel, this value is the time interval (ms) from when the remote user joins the channel to when the local user receives the remote video stream. + /// If the local user calls startMediaRenderingTracing after the remote user joins the channel, this value is the time interval (ms) from startMediaRenderingTracing to receiving the remote video stream. /// If the local user calls startMediaRenderingTracing after receiving the remote video stream, the value is 0 and has no effect. /// In order to reduce the time of rendering the first frame for remote users, Agora recommends that the remote user publishes video streams immediately after joining the channel, and the local user immediately subscribes to remote video streams to reduce this value. @JsonKey(name: 'remoteJoined2PacketReceived') diff --git a/lib/src/agora_base.g.dart b/lib/src/agora_base.g.dart index b79f032c5..3b6728e30 100644 --- a/lib/src/agora_base.g.dart +++ b/lib/src/agora_base.g.dart @@ -726,8 +726,6 @@ VideoTrackInfo _$VideoTrackInfoFromJson(Map json) => ownerUid: (json['ownerUid'] as num?)?.toInt(), trackId: (json['trackId'] as num?)?.toInt(), channelId: json['channelId'] as String?, - streamType: - $enumDecodeNullable(_$VideoStreamTypeEnumMap, json['streamType']), codecType: $enumDecodeNullable(_$VideoCodecTypeEnumMap, json['codecType']), encodedFrameOnly: json['encodedFrameOnly'] as bool?, @@ -749,7 +747,6 @@ Map _$VideoTrackInfoToJson(VideoTrackInfo instance) { writeNotNull('ownerUid', instance.ownerUid); writeNotNull('trackId', instance.trackId); writeNotNull('channelId', instance.channelId); - writeNotNull('streamType', _$VideoStreamTypeEnumMap[instance.streamType]); writeNotNull('codecType', _$VideoCodecTypeEnumMap[instance.codecType]); writeNotNull('encodedFrameOnly', instance.encodedFrameOnly); writeNotNull('sourceType', _$VideoSourceTypeEnumMap[instance.sourceType]); diff --git a/lib/src/agora_media_base.dart b/lib/src/agora_media_base.dart index 629b3ea54..93017c997 100644 --- a/lib/src/agora_media_base.dart +++ b/lib/src/agora_media_base.dart @@ -1317,12 +1317,12 @@ class AudioFrameObserver extends AudioFrameObserverBase { onEarMonitoringAudioFrame: onEarMonitoringAudioFrame, ); - /// Retrieves the audio frame of a specified user before mixing. + /// Retrieves the audio frame before mixing of subscribed remote users. /// /// Due to framework limitations, this callback does not support sending processed audio data back to the SDK. /// /// * [channelId] The channel ID. - /// * [uid] The user ID of the specified user. + /// * [uid] The ID of subscribed remote users. /// * [audioFrame] The raw audio data. See AudioFrame. final void Function(String channelId, int uid, AudioFrame audioFrame)? onPlaybackAudioFrameBeforeMixing; @@ -1434,9 +1434,9 @@ class VideoFrameObserver { /// /// * [sourceType] Video source types, including cameras, screens, or media player. See VideoSourceType. /// * [videoFrame] The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows: - /// Android: I420 or RGB (GLES20.GL_TEXTURE_2D) - /// iOS: I420 or CVPixelBufferRef - /// macOS: I420 or CVPixelBufferRef + /// Android: I420 + /// iOS: I420 + /// macOS: I420 /// Windows: YUV420 final void Function(VideoSourceType sourceType, VideoFrame videoFrame)? onCaptureVideoFrame; @@ -1448,9 +1448,9 @@ class VideoFrameObserver { /// The video data that this callback gets has been preprocessed, with its content cropped and rotated, and the image enhanced. /// /// * [videoFrame] The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows: - /// Android: I420 or RGB (GLES20.GL_TEXTURE_2D) - /// iOS: I420 or CVPixelBufferRef - /// macOS: I420 or CVPixelBufferRef + /// Android: I420 + /// iOS: I420 + /// macOS: I420 /// Windows: YUV420 /// * [sourceType] The type of the video source. See VideoSourceType. final void Function(VideoSourceType sourceType, VideoFrame videoFrame)? @@ -1467,9 +1467,9 @@ class VideoFrameObserver { /// Due to framework limitations, this callback does not support sending processed video data back to the SDK. /// /// * [videoFrame] The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows: - /// Android: I420 or RGB (GLES20.GL_TEXTURE_2D) - /// iOS: I420 or CVPixelBufferRef - /// macOS: I420 or CVPixelBufferRef + /// Android: I420 + /// iOS: I420 + /// macOS: I420 /// Windows: YUV420 /// * [remoteUid] The user ID of the remote user who sends the current video frame. /// * [channelId] The channel ID. @@ -1687,7 +1687,7 @@ class MediaRecorderConfiguration { /// Facial information observer. /// -/// You can call registerFaceInfoObserver to register or unregister the FaceInfoObserver object. +/// You can call registerFaceInfoObserver to register one FaceInfoObserver observer. class FaceInfoObserver { /// @nodoc const FaceInfoObserver({ diff --git a/lib/src/agora_music_content_center.dart b/lib/src/agora_music_content_center.dart index 522ccb303..a65f1935d 100644 --- a/lib/src/agora_music_content_center.dart +++ b/lib/src/agora_music_content_center.dart @@ -1,6 +1,35 @@ import 'package:agora_rtc_engine/src/binding_forward_export.dart'; part 'agora_music_content_center.g.dart'; +/// @nodoc +@JsonEnum(alwaysCreate: true) +enum MusicPlayMode { + /// @nodoc + @JsonValue(0) + kMusicPlayModeOriginal, + + /// @nodoc + @JsonValue(1) + kMusicPlayModeAccompany, + + /// @nodoc + @JsonValue(2) + kMusicPlayModeLeadSing, +} + +/// @nodoc +extension MusicPlayModeExt on MusicPlayMode { + /// @nodoc + static MusicPlayMode fromValue(int value) { + return $enumDecode(_$MusicPlayModeEnumMap, value); + } + + /// @nodoc + int value() { + return _$MusicPlayModeEnumMap[this]!; + } +} + /// @nodoc @JsonEnum(alwaysCreate: true) enum PreloadState { @@ -393,6 +422,9 @@ class MusicContentCenterConfiguration { /// @nodoc abstract class MusicPlayer implements MediaPlayer { + /// @nodoc + Future setPlayMode(MusicPlayMode mode); + /// @nodoc Future openWithSongCode({required int songCode, int startPos = 0}); } @@ -417,6 +449,9 @@ abstract class MusicContentCenter { /// @nodoc Future createMusicPlayer(); + /// @nodoc + Future destroyMusicPlayer(MusicPlayer musicPlayer); + /// @nodoc Future getMusicCharts(); diff --git a/lib/src/agora_music_content_center.g.dart b/lib/src/agora_music_content_center.g.dart index 39962ca59..f2a5bcc83 100644 --- a/lib/src/agora_music_content_center.g.dart +++ b/lib/src/agora_music_content_center.g.dart @@ -172,6 +172,12 @@ Map _$MusicContentCenterConfigurationToJson( return val; } +const _$MusicPlayModeEnumMap = { + MusicPlayMode.kMusicPlayModeOriginal: 0, + MusicPlayMode.kMusicPlayModeAccompany: 1, + MusicPlayMode.kMusicPlayModeLeadSing: 2, +}; + const _$PreloadStateEnumMap = { PreloadState.kPreloadStateCompleted: 0, PreloadState.kPreloadStateFailed: 1, diff --git a/lib/src/agora_rtc_engine.dart b/lib/src/agora_rtc_engine.dart index 08011d3fc..41ea7779b 100644 --- a/lib/src/agora_rtc_engine.dart +++ b/lib/src/agora_rtc_engine.dart @@ -1300,7 +1300,7 @@ class ImageTrackOptions { /// The channel media options. /// -/// Agora supports publishing multiple audio streams and one video stream at the same time and in the same RtcConnection. For example, publishMicrophoneTrack, publishCustomAudioTrack, and publishMediaPlayerAudioTrack can be set as true at the same time, but only one of publishCameraTrack, publishScreenCaptureVideo publishScreenTrack, publishCustomVideoTrack, or publishEncodedVideoTrack can be set as true. Agora recommends that you set member parameter values yourself according to your business scenario, otherwise the SDK will automatically assign values to member parameters. +/// Agora supports publishing multiple audio streams and one video stream at the same time and in the same RtcConnection. For example, publishMicrophoneTrack, publishCustomAudioTrack, and publishMediaPlayerAudioTrack can be set as true at the same time, but only one of publishCameraTrack, publishScreenCaptureVideo, publishScreenTrack, publishCustomVideoTrack, or publishEncodedVideoTrack can be set as true. Agora recommends that you set member parameter values yourself according to your business scenario, otherwise the SDK will automatically assign values to member parameters. @JsonSerializable(explicitToJson: true, includeIfNull: false) class ChannelMediaOptions { /// @nodoc @@ -1777,9 +1777,7 @@ class RtcEngineEventHandler { /// * [stats] The statistics of the call. See RtcStats. final void Function(RtcConnection connection, RtcStats stats)? onLeaveChannel; - /// Reports the statistics of the current call. - /// - /// The SDK triggers this callback once every two seconds after the user joins the channel. + /// Reports the statistics about the current call. /// /// * [connection] The connection information. See RtcConnection. /// * [stats] Statistics of the RTC engine. See RtcStats. @@ -1865,7 +1863,7 @@ class RtcEngineEventHandler { /// * [source] The type of the video source. See VideoSourceType. /// * [width] The width (px) of the first local video frame. /// * [height] The height (px) of the first local video frame. - /// * [elapsed] Time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. If you call startPreview before calling joinChannel, then this parameter is the time elapsed from calling the startPreview method until the SDK triggers this callback. + /// * [elapsed] The time elapsed (ms) from the local user calling joinChannel to join the channel to when the SDK triggers this callback. If startPreviewWithoutSourceType / startPreview is called before joining the channel, this parameter indicates the time elapsed from calling startPreviewWithoutSourceType or startPreview to when this event occurred. final void Function( VideoSourceType source, int width, int height, int elapsed)? onFirstLocalVideoFrame; @@ -1873,12 +1871,12 @@ class RtcEngineEventHandler { /// Occurs when the first video frame is published. /// /// The SDK triggers this callback under one of the following circumstances: - /// The local client enables the video module and calls joinChannel successfully. + /// The local client enables the video module and calls joinChannel to join the channel successfully. /// The local client calls muteLocalVideoStream (true) and muteLocalVideoStream (false) in sequence. /// The local client calls disableVideo and enableVideo in sequence. /// /// * [connection] The connection information. See RtcConnection. - /// * [elapsed] Time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. + /// * [elapsed] Time elapsed (ms) from the local user calling joinChannel until this callback is triggered. final void Function(VideoSourceType source, int elapsed)? onFirstLocalVideoFramePublished; @@ -2171,19 +2169,25 @@ class RtcEngineEventHandler { /// Occurs when the token expires. /// - /// When the token expires during a call, the SDK triggers this callback to remind the app to renew the token. When receiving this callback, you need to generate a new token on your token server and you can renew your token through one of the following ways: + /// The SDK triggers this callback if the token expires. When receiving this callback, you need to generate a new token on your token server and you can renew your token through one of the following ways: + /// In scenarios involving one channel: /// Call renewToken to pass in the new token. /// Call leaveChannel to leave the current channel and then pass in the new token when you call joinChannel to join a channel. + /// In scenarios involving mutiple channels: Call updateChannelMediaOptionsEx to pass in the new token. /// /// * [connection] The connection information. See RtcConnection. final void Function(RtcConnection connection)? onRequestToken; /// Occurs when the token expires in 30 seconds. /// - /// When the token is about to expire in 30 seconds, the SDK triggers this callback to remind the app to renew the token. Upon receiving this callback, you need to generate a new token on your server, and call renewToken to pass the new token to the SDK. In scenarios involving multiple channels, you need to call updateChannelMediaOptionsEx to pass the new token to the SDK. + /// When receiving this callback, you need to generate a new token on your token server and you can renew your token through one of the following ways: + /// In scenarios involving one channel: + /// Call renewToken to pass in the new token. + /// Call leaveChannel to leave the current channel and then pass in the new token when you call joinChannel to join a channel. + /// In scenarios involving mutiple channels: Call updateChannelMediaOptionsEx to pass in the new token. /// /// * [connection] The connection information. See RtcConnection. - /// * [token] The token that expires in 30 seconds. + /// * [token] The token that is about to expire. final void Function(RtcConnection connection, String token)? onTokenPrivilegeWillExpire; @@ -3108,7 +3112,7 @@ abstract class RtcEngine { /// The specific error or warning description. Future getErrorDescription(int code); - /// Queries the current device's supported video codec capabilities. + /// Queries the video codec capabilities of the SDK. /// /// * [size] The size of CodecCapInfo. /// @@ -3126,11 +3130,7 @@ abstract class RtcEngine { /// Preloads a channel with token, channelId, and uid. /// - /// When audience members need to switch between different channels frequently, calling the method can help shortening the time of joining a channel, thus reducing the time it takes for audience members to hear and see the host. As it may take a while for the SDK to preload a channel, Agora recommends that you call this method as soon as possible after obtaining the channel name and user ID to join a channel. - /// When calling this method, ensure you set the user role as audience and do not set the audio scenario as audioScenarioChorus, otherwise, this method does not take effect. - /// You also need to make sure that the channel name, user ID and token passed in for preloading are the same as the values passed in when joinning the channel, otherwise, this method does not take effect. - /// One RtcEngine instance supports preloading 20 channels at most. When exceeding this limit, the latest 20 preloaded channels take effect. - /// Failing to preload a channel does not mean that you can't join a channel, nor will it increase the time of joining a channel. If you join a preloaded channel, leave it and want to rejoin the same channel, you do not need to call this method unless the token for preloading the channel expires. + /// When audience members need to switch between different channels frequently, calling the method can help shortening the time of joining a channel, thus reducing the time it takes for audience members to hear and see the host. If you join a preloaded channel, leave it and want to rejoin the same channel, you do not need to call this method unless the token for preloading the channel expires. Failing to preload a channel does not mean that you can't join a channel, nor will it increase the time of joining a channel. /// /// * [token] The token generated on your server for authentication. When the token for preloading channels expires, you can update the token based on the number of channels you preload. /// When preloading one channel, calling this method to pass in the new token. @@ -3143,7 +3143,7 @@ abstract class RtcEngine { /// All numeric characters: 0 to 9. /// Space /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," - /// * [uid] The user ID. This parameter is used to identify the user in the channel for real-time audio and video interaction. You need to set and manage user IDs yourself, and ensure that each user ID in the same channel is unique. This parameter is a 32-bit unsigned integer. The value range is 1 to 2 32 -1. If the user ID is not assigned (or set to 0), the SDK assigns a random user ID and returns it in the onJoinChannelSuccess callback. Your application must record and maintain the returned user ID, because the SDK does not do so. + /// * [uid] The user ID. This parameter is used to identify the user in the channel for real-time audio and video interaction. You need to set and manage user IDs yourself, and ensure that each user ID in the same channel is unique. This parameter is a 32-bit unsigned integer. The value range is 1 to 2 32 -1. If the user ID is not assigned (or set to 0), the SDK assigns a random user ID and onJoinChannelSuccess returns it in the callback. Your application must record and maintain the returned user ID, because the SDK does not do so. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3185,7 +3185,7 @@ abstract class RtcEngine { /// All numeric characters: 0 to 9. /// Space /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," - /// * [uid] The user ID. This parameter is used to identify the user in the channel for real-time audio and video interaction. You need to set and manage user IDs yourself, and ensure that each user ID in the same channel is unique. This parameter is a 32-bit unsigned integer. The value range is 1 to 2 32 -1. If the user ID is not assigned (or set to 0), the SDK assigns a random user ID and returns it in the onJoinChannelSuccess callback. Your app must record and maintain the returned user ID, because the SDK does not do so. + /// * [uid] The user ID. This parameter is used to identify the user in the channel for real-time audio and video interaction. You need to set and manage user IDs yourself, and ensure that each user ID in the same channel is unique. This parameter is a 32-bit unsigned integer. The value range is 1 to 2 32 -1. If the user ID is not assigned (or set to 0), the SDK assigns a random user ID and onJoinChannelSuccess returns it in the callback. Your application must record and maintain the returned user ID, because the SDK does not do so. /// * [options] The channel media options. See ChannelMediaOptions. /// /// Returns @@ -3225,7 +3225,7 @@ abstract class RtcEngine { /// Renews the token. /// - /// The SDK triggers the onTokenPrivilegeWillExpire callback. onConnectionStateChanged The connectionChangedTokenExpired callback reports (9). + /// You can call this method to pass a new token to the SDK. A token will expire after a certain period of time, at which point the SDK will be unable to establish a connection with the server. /// /// * [token] The new token. /// @@ -3317,9 +3317,7 @@ abstract class RtcEngine { /// Disables the video module. /// - /// This method can be called before joining a channel or during a call to disable the video module. If it is called before joining a channel, an audio call starts when you join the channel; if called during a call, a video call switches to an audio call. Call enableVideo to enable the video module. A successful call of this method triggers the onUserEnableVideo (false) callback on the remote client. - /// This method affects the internal engine and can be called after leaving the channel. - /// This method resets the internal engine and thus might takes some time to take effect. Agora recommends using the following APIs to control the video modules separately: enableLocalVideo : Whether to enable the camera to create the local video stream. muteLocalVideoStream : Whether to publish the local video stream. muteRemoteVideoStream : Whether to subscribe to and play the remote video stream. muteAllRemoteVideoStreams : Whether to subscribe to and play all remote video streams. + /// This method is used to disable the video module. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3327,12 +3325,7 @@ abstract class RtcEngine { /// Enables the local video preview and specifies the video source for the preview. /// - /// You can call this method to enable local video preview. Call this method after the following: - /// Call setupLocalVideo to initialize the local preview. - /// Call enableVideo to enable the video module. - /// The local preview enables the mirror mode by default. - /// After the local video preview is enabled, if you call leaveChannel to exit the channel, the local preview remains until you call stopPreview to disable it. - /// The video source type set in this method needs to be consistent with the video source type of VideoCanvas you set in setupLocalVideo. + /// This method is used to start local video preview and specify the video source that appears in the preview screen. /// /// * [sourceType] The type of the video source. See VideoSourceType. /// @@ -3343,8 +3336,6 @@ abstract class RtcEngine { /// Stops the local video preview. /// - /// After calling startPreview to start the preview, if you want to close the local video preview, call this method. Call this method before joining a channel or after leaving a channel. - /// /// * [sourceType] The type of the video source. See VideoSourceType. /// /// Returns @@ -3373,7 +3364,7 @@ abstract class RtcEngine { /// Sets the video encoder configuration. /// - /// Sets the encoder configuration for the local video. Each configuration profile corresponds to a set of video parameters, including the resolution, frame rate, and bitrate. The config specified in this method is the maximum value under ideal network conditions. If the video engine cannot render the video using the specified config due to unreliable network conditions, the parameters further down the list are considered until a successful configuration is found. You can call this method either before or after joining a channel. If the user does not need to reset the video encoding properties after joining the channel, Agora recommends calling this method before enableVideo to reduce the time to render the first video frame. + /// Sets the encoder configuration for the local video. Each configuration profile corresponds to a set of video parameters, including the resolution, frame rate, and bitrate. /// /// * [config] Video profile. See VideoEncoderConfiguration. /// @@ -3551,10 +3542,7 @@ abstract class RtcEngine { /// Enables the audio module. /// - /// The audio mode is enabled by default. - /// This method enables the internal engine and can be called anytime after initialization. It is still valid after one leaves channel. - /// Calling this method will reset the entire engine, resulting in a slow response time. Instead of callling this method, you can independently control a specific audio module based on your actual needs using the following methods: enableLocalAudio : Whether to enable the microphone to create the local audio stream. muteLocalAudioStream : Whether to publish the local audio stream. muteRemoteAudioStream : Whether to subscribe and play the remote audio stream. muteAllRemoteAudioStreams : Whether to subscribe to and play all remote audio streams. - /// A successful call of this method resets enableLocalAudio, muteRemoteAudioStream, and muteAllRemoteAudioStreams. Proceed it with caution. + /// The audio module is enabled by default After calling disableAudio to disable the audio module, you can call this method to re-enable it. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3562,8 +3550,7 @@ abstract class RtcEngine { /// Disables the audio module. /// - /// This method disables the internal engine and can be called anytime after initialization. It is still valid after one leaves channel. - /// This method resets the internal engine and takes some time to take effect. Agora recommends using the following API methods to control the audio modules separately: enableLocalAudio : Whether to enable the microphone to create the local audio stream. enableLoopbackRecording : Whether to enable loopback audio capturing. muteLocalAudioStream : Whether to publish the local audio stream. muteRemoteAudioStream : Whether to subscribe and play the remote audio stream. muteAllRemoteAudioStreams : Whether to subscribe to and play all remote audio streams. + /// The audio module is enabled by default, and you can call this method to disable the audio module. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3571,10 +3558,6 @@ abstract class RtcEngine { /// Sets the audio profile and audio scenario. /// - /// You can call this method either before or after joining a channel. - /// Due to iOS system restrictions, some audio routes cannot be recognized in call volume mode. Therefore, if you need to use an external sound card, it is recommended to set the audio scenario to audioScenarioGameStreaming (3). In this scenario, the SDK will switch to media volume to avoid this issue. - /// In scenarios requiring high-quality audio, such as online music tutoring, Agora recommends you set profile as audioProfileMusicHighQuality (4) and scenario as audioScenarioGameStreaming (3). - /// /// * [profile] The audio profile, including the sampling rate, bitrate, encoding mode, and the number of channels. See AudioProfileType. /// * [scenario] The audio scenarios. Under different audio scenarios, the device uses different volume types. See AudioScenarioType. /// @@ -3586,9 +3569,6 @@ abstract class RtcEngine { /// Sets audio scenarios. /// - /// Due to iOS system restrictions, some audio routes cannot be recognized in call volume mode. Therefore, if you need to use an external sound card, it is recommended to set the audio scenario to audioScenarioGameStreaming (3). In this scenario, the SDK will switch to media volume to avoid this issue. - /// You can call this method either before or after joining a channel. - /// /// * [scenario] The audio scenarios. Under different audio scenarios, the device uses different volume types. See AudioScenarioType. /// /// Returns @@ -3597,9 +3577,7 @@ abstract class RtcEngine { /// Enables or disables the local audio capture. /// - /// The audio function is enabled by default when users joining a channel. This method disables or re-enables the local audio function to stop or restart local audio capturing. This method does not affect receiving the remote audio streams, and enableLocalAudio (false) is applicable to scenarios where the user wants to receive remote audio streams without sending any audio stream to other users in the channel. Once the local audio function is disabled or re-enabled, the SDK triggers the onLocalAudioStateChanged callback, which reports localAudioStreamStateStopped (0) or localAudioStreamStateRecording (1). - /// The difference between this method and muteLocalAudioStream are as follow: enableLocalAudio : Disables or re-enables the local audio capturing and processing. If you disable or re-enable local audio capturing using the enableLocalAudio method, the local user might hear a pause in the remote audio playback. muteLocalAudioStream : Sends or stops sending the local audio streams. - /// You can call this method either before or after joining a channel. Calling it before joining a channel only sets the device state, and it takes effect immediately after you join the channel. + /// The audio function is enabled by default when users joining a channel. This method disables or re-enables the local audio function to stop or restart local audio capturing. The difference between this method and muteLocalAudioStream are as follows: enableLocalAudio : Disables or re-enables the local audio capturing and processing. If you disable or re-enable local audio capturing using the enableLocalAudio method, the local user might hear a pause in the remote audio playback. muteLocalAudioStream : Sends or stops sending the local audio streams without affecting the audio capture status. /// /// * [enabled] true : (Default) Re-enable the local audio function, that is, to start the local audio capturing device (for example, the microphone). false : Disable the local audio function, that is, to stop local audio capturing. /// @@ -3609,7 +3587,7 @@ abstract class RtcEngine { /// Stops or resumes publishing the local audio stream. /// - /// This method does not affect any ongoing audio recording, because it does not disable the audio capture device. A successful call of this method triggers the onUserMuteAudio and onRemoteAudioStateChanged callbacks on the remote client. + /// This method is used to control whether to publish the locally captured audio stream. If you call this method to stop publishing locally captured audio streams, the audio capturing device will still work and won't be affected. /// /// * [mute] Whether to stop publishing the local audio stream: true : Stops publishing the local audio stream. false : (Default) Resumes publishing the local audio stream. /// @@ -3619,9 +3597,7 @@ abstract class RtcEngine { /// Stops or resumes subscribing to the audio streams of all remote users. /// - /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users. - /// Call this method after joining a channel. - /// If you do not want to subscribe the audio streams of remote users before joining a channel, you can set autoSubscribeAudio as false when calling joinChannel. + /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users. By default, the SDK subscribes to the audio streams of all remote users when joining a channel. To modify this behavior, you can set autoSubscribeAudio to false when calling joinChannel to join the channel, which will cancel the subscription to the audio streams of all users upon joining the channel. /// /// * [mute] Whether to stop subscribing to the audio streams of all remote users: true : Stops subscribing to the audio streams of all remote users. false : (Default) Subscribes to the audio streams of all remote users by default. /// @@ -3634,8 +3610,6 @@ abstract class RtcEngine { /// Stops or resumes subscribing to the audio stream of a specified user. /// - /// Call this method after joining a channel. - /// /// * [uid] The user ID of the specified user. /// * [mute] Whether to subscribe to the specified remote user's audio stream. true : Stop subscribing to the audio stream of the specified user. false : (Default) Subscribe to the audio stream of the specified user. /// @@ -3645,9 +3619,7 @@ abstract class RtcEngine { /// Stops or resumes publishing the local video stream. /// - /// A successful call of this method triggers the onUserMuteVideo callback on the remote client. - /// This method executes faster than the enableLocalVideo (false) method, which controls the sending of the local video stream. - /// This method does not affect any ongoing video recording, because it does not disable the camera. + /// This method is used to control whether to publish the locally captured video stream. If you call this method to stop publishing locally captured video streams, the video capturing device will still work and won't be affected. Compared to enableLocalVideo (false), which can also cancel the publishing of local video stream by turning off the local video stream capture, this method responds faster. /// /// * [mute] Whether to stop publishing the local video stream. true : Stop publishing the local video stream. false : (Default) Publish the local video stream. /// @@ -3669,9 +3641,7 @@ abstract class RtcEngine { /// Stops or resumes subscribing to the video streams of all remote users. /// - /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users. - /// Call this method after joining a channel. - /// If you do not want to subscribe the video streams of remote users before joining a channel, you can call joinChannel and set autoSubscribeVideo as false. + /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users. By default, the SDK subscribes to the video streams of all remote users when joining a channel. To modify this behavior, you can set autoSubscribeVideo to false when calling joinChannel to join the channel, which will cancel the subscription to the video streams of all users upon joining the channel. /// /// * [mute] Whether to stop subscribing to the video streams of all remote users. true : Stop subscribing to the video streams of all remote users. false : (Default) Subscribe to the audio streams of all remote users by default. /// @@ -3696,8 +3666,6 @@ abstract class RtcEngine { /// Stops or resumes subscribing to the video stream of a specified user. /// - /// Call this method after joining a channel. - /// /// * [uid] The user ID of the specified user. /// * [mute] Whether to subscribe to the specified remote user's video stream. true : Stop subscribing to the video streams of the specified user. false : (Default) Subscribe to the video stream of the specified user. /// @@ -3807,7 +3775,7 @@ abstract class RtcEngine { /// Enables the reporting of users' volume indication. /// - /// This method enables the SDK to regularly report the volume information to the app of the local user who sends a stream and remote users (three users at most) whose instantaneous volumes are the highest. Once you call this method and users send streams in the channel, the SDK triggers the onAudioVolumeIndication callback at the time interval set in this method. You can call this method either before or after joining a channel. + /// This method enables the SDK to regularly report the volume information to the app of the local user who sends a stream and remote users (three users at most) whose instantaneous volumes are the highest. /// /// * [interval] Sets the time interval between two consecutive volume indications: /// ≤ 0: Disables the volume indication. @@ -3817,7 +3785,6 @@ abstract class RtcEngine { /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. - /// < 0: Failure. Future enableAudioVolumeIndication( {required int interval, required int smooth, required bool reportVad}); @@ -4312,7 +4279,9 @@ abstract class RtcEngine { /// Sets a preset voice beautifier effect. /// - /// Call this method to set a preset voice beautifier effect for the local user who sends an audio stream. After setting a voice beautifier effect, all users in the channel can hear the effect. You can set different voice beautifier effects for different scenarios. For better voice effects, Agora recommends that you call setAudioProfile and set scenario to audioScenarioGameStreaming (3) and profile to audioProfileMusicHighQuality (4) or audioProfileMusicHighQualityStereo (5) before calling this method. + /// Call this method to set a preset voice beautifier effect for the local user who sends an audio stream. After setting a voice beautifier effect, all users in the channel can hear the effect. You can set different voice beautifier effects for different scenarios. To achieve better vocal effects, it is recommended that you call the following APIs before calling this method: + /// Call setAudioScenario to set the audio scenario to high-quality audio scenario, namely audioScenarioGameStreaming (3). + /// Call setAudioProfile to set the profile parameter to audioProfileMusicHighQuality (4) or audioProfileMusicHighQualityStereo (5). /// You can call this method either before or after joining a channel. /// Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard (1) or audioProfileIot (6), or the method does not take effect. /// This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music. @@ -4327,10 +4296,11 @@ abstract class RtcEngine { /// Sets an SDK preset audio effect. /// - /// Call this method to set an SDK preset audio effect for the local user who sends an audio stream. This audio effect does not change the gender characteristics of the original voice. After setting an audio effect, all users in the channel can hear the effect. To get better audio effect quality, Agora recommends setting the scenario parameter of setAudioProfile as audioScenarioGameStreaming (3) before calling this method. + /// To achieve better vocal effects, it is recommended that you call the following APIs before calling this method: + /// Call setAudioScenario to set the audio scenario to high-quality audio scenario, namely audioScenarioGameStreaming (3). + /// Call setAudioProfile to set the profile parameter to audioProfileMusicHighQuality (4) or audioProfileMusicHighQualityStereo (5). Call this method to set an SDK preset audio effect for the local user who sends an audio stream. This audio effect does not change the gender characteristics of the original voice. After setting an audio effect, all users in the channel can hear the effect. + /// Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard (1) or audioProfileIot (6), or the method does not take effect. /// You can call this method either before or after joining a channel. - /// Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard (1) audioProfileIot or (6), or the method does not take effect. - /// This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music. /// If you call setAudioEffectPreset and set enumerators except for roomAcoustics3dVoice or pitchCorrection, do not call setAudioEffectParameters; otherwise, setAudioEffectPreset is overridden. /// After calling setAudioEffectPreset, Agora does not recommend you to call the following methods, otherwise the effect set by setAudioEffectPreset will be overwritten: setVoiceBeautifierPreset setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceBeautifierParameters setVoiceConversionPreset /// This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. @@ -4343,9 +4313,11 @@ abstract class RtcEngine { /// Sets a preset voice beautifier effect. /// - /// Call this method to set a preset voice beautifier effect for the local user who sends an audio stream. After setting an audio effect, all users in the channel can hear the effect. You can set different voice beautifier effects for different scenarios. To achieve better audio effect quality, Agora recommends that you call setAudioProfile and set the profile to audioProfileMusicHighQuality (4) or audioProfileMusicHighQualityStereo (5) and scenario to audioScenarioGameStreaming (3) before calling this method. - /// You can call this method either before or after joining a channel. + /// To achieve better vocal effects, it is recommended that you call the following APIs before calling this method: + /// Call setAudioScenario to set the audio scenario to high-quality audio scenario, namely audioScenarioGameStreaming (3). + /// Call setAudioProfile to set the profile parameter to audioProfileMusicHighQuality (4) or audioProfileMusicHighQualityStereo (5). Call this method to set a preset voice beautifier effect for the local user who sends an audio stream. After setting an audio effect, all users in the channel can hear the effect. You can set different voice beautifier effects for different scenarios. /// Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard (1) or audioProfileIot (6), or the method does not take effect. + /// You can call this method either before or after joining a channel. /// This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music. /// After calling setVoiceConversionPreset, Agora does not recommend you to call the following methods, otherwise the effect set by setVoiceConversionPreset will be overwritten: setAudioEffectPreset setAudioEffectParameters setVoiceBeautifierPreset setVoiceBeautifierParameters setLocalVoicePitch setLocalVoiceFormant setLocalVoiceEqualization setLocalVoiceReverb /// This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. @@ -4359,18 +4331,19 @@ abstract class RtcEngine { /// Sets parameters for SDK preset audio effects. /// - /// Call this method to set the following parameters for the local user who sends an audio stream: + /// To achieve better vocal effects, it is recommended that you call the following APIs before calling this method: + /// Call setAudioScenario to set the audio scenario to high-quality audio scenario, namely audioScenarioGameStreaming (3). + /// Call setAudioProfile to set the profile parameter to audioProfileMusicHighQuality (4) or audioProfileMusicHighQualityStereo (5). Call this method to set the following parameters for the local user who sends an audio stream: /// 3D voice effect: Sets the cycle period of the 3D voice effect. /// Pitch correction effect: Sets the basic mode and tonic pitch of the pitch correction effect. Different songs have different modes and tonic pitches. Agora recommends bounding this method with interface elements to enable users to adjust the pitch correction interactively. After setting the audio parameters, all users in the channel can hear the effect. + /// Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard (1) or audioProfileIot (6), or the method does not take effect. /// You can call this method either before or after joining a channel. - /// To get better audio effect quality, Agora recommends setting the scenario parameter of setAudioProfile as audioScenarioGameStreaming (3) before calling this method. - /// Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard (1) audioProfileIot or (6), or the method does not take effect. /// This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music. /// After calling setAudioEffectParameters, Agora does not recommend you to call the following methods, otherwise the effect set by setAudioEffectParameters will be overwritten: setAudioEffectPreset setVoiceBeautifierPreset setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceBeautifierParameters setVoiceConversionPreset /// /// * [preset] The options for SDK preset audio effects: roomAcoustics3dVoice, 3D voice effect: - /// Call setAudioProfile and set the profile parameter in to audioProfileMusicStandardStereo (3) or audioProfileMusicHighQualityStereo (5) before setting this enumerator; otherwise, the enumerator setting does not take effect. - /// If the 3D voice effect is enabled, users need to use stereo audio playback devices to hear the anticipated voice effect. pitchCorrection, Pitch correction effect: To achieve better audio effect quality, Agora recommends setting the profile parameter in setAudioProfile to audioProfileMusicHighQuality (4) or audioProfileMusicHighQualityStereo (5) before setting this enumerator. + /// You need to set the profile parameter in setAudioProfile to audioProfileMusicStandardStereo (3) or audioProfileMusicHighQualityStereo (5) before setting this enumerator; otherwise, the enumerator setting does not take effect. + /// If the 3D voice effect is enabled, users need to use stereo audio playback devices to hear the anticipated voice effect. pitchCorrection, Pitch correction effect: /// * [param1] If you set preset to roomAcoustics3dVoice, param1 sets the cycle period of the 3D voice effect. The value range is [1,60] and the unit is seconds. The default value is 10, indicating that the voice moves around you every 10 seconds. /// If you set preset to pitchCorrection, param1 indicates the basic mode of the pitch correction effect: 1 : (Default) Natural major scale. 2 : Natural minor scale. 3 : Japanese pentatonic scale. /// * [param2] If you set preset to roomAcoustics3dVoice , you need to set param2 to 0. @@ -4385,9 +4358,11 @@ abstract class RtcEngine { /// Sets parameters for the preset voice beautifier effects. /// - /// Call this method to set a gender characteristic and a reverberation effect for the singing beautifier effect. This method sets parameters for the local user who sends an audio stream. After setting the audio parameters, all users in the channel can hear the effect. For better voice effects, Agora recommends that you call setAudioProfile and set scenario to audioScenarioGameStreaming (3) and profile to audioProfileMusicHighQuality (4) or audioProfileMusicHighQualityStereo (5) before calling this method. - /// You can call this method either before or after joining a channel. + /// To achieve better vocal effects, it is recommended that you call the following APIs before calling this method: + /// Call setAudioScenario to set the audio scenario to high-quality audio scenario, namely audioScenarioGameStreaming (3). + /// Call setAudioProfile to set the profile parameter to audioProfileMusicHighQuality (4) or audioProfileMusicHighQualityStereo (5). Call this method to set a gender characteristic and a reverberation effect for the singing beautifier effect. This method sets parameters for the local user who sends an audio stream. After setting the audio parameters, all users in the channel can hear the effect. /// Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard (1) or audioProfileIot (6), or the method does not take effect. + /// You can call this method either before or after joining a channel. /// This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music. /// After calling setVoiceBeautifierParameters, Agora does not recommend calling the following methods, otherwise the effect set by setVoiceBeautifierParameters will be overwritten: setAudioEffectPreset setAudioEffectParameters setVoiceBeautifierPreset setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceConversionPreset /// @@ -4750,10 +4725,10 @@ abstract class RtcEngine { /// Adjusts the capturing signal volume. /// - /// You can call this method either before or after joining a channel. + /// If you only need to mute the audio signal, Agora recommends that you use muteRecordingSignal instead. /// /// * [volume] The volume of the user. The value range is [0,400]. - /// 0: Mute. If you only need to mute the audio signal, Agora recommends that you use muteRecordingSignal instead. + /// 0: Mute. /// 100: (Default) The original volume. /// 400: Four times the original volume (amplifying the audio signals by four times). /// @@ -4771,11 +4746,10 @@ abstract class RtcEngine { /// Adjusts the playback signal volume of all remote users. /// - /// This method adjusts the playback volume that is the mixed volume of all remote users. - /// You can call this method either before or after joining a channel. + /// This method is used to adjust the signal volume of all remote users mixed and played locally. If you need to adjust the signal volume of a specified remote user played locally, it is recommended that you call adjustUserPlaybackSignalVolume instead. /// /// * [volume] The volume of the user. The value range is [0,400]. - /// 0: Mute. If you only need to mute the audio signal, Agora recommends that you use muteRecordingSignal instead. + /// 0: Mute. /// 100: (Default) The original volume. /// 400: Four times the original volume (amplifying the audio signals by four times). /// @@ -4786,11 +4760,12 @@ abstract class RtcEngine { /// Adjusts the playback signal volume of a specified remote user. /// /// You can call this method to adjust the playback volume of a specified remote user. To adjust the playback volume of different remote users, call the method as many times, once for each remote user. - /// Call this method after joining a channel. - /// The playback volume here refers to the mixed volume of a specified remote user. /// /// * [uid] The user ID of the remote user. - /// * [volume] Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. + /// * [volume] The volume of the user. The value range is [0,400]. + /// 0: Mute. + /// 100: (Default) The original volume. + /// 400: Four times the original volume (amplifying the audio signals by four times). /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -4818,10 +4793,6 @@ abstract class RtcEngine { /// Enables or disables extensions. /// - /// To call this method, call it immediately after initializing the RtcEngine object. - /// If you want to enable multiple extensions, you need to call this method multiple times. - /// The data processing order of different extensions in the SDK is determined by the order in which the extensions are enabled. That is, the extension that is enabled first will process the data first. - /// /// * [provider] The name of the extension provider. /// * [extension] The name of the extension. /// * [enable] Whether to enable the extension: true : Enable the extension. false : Disable the extension. @@ -4922,20 +4893,18 @@ abstract class RtcEngine { /// Sets the volume of the in-ear monitor. /// - /// You can call this method either before or after joining a channel. - /// - /// * [volume] The volume of the in-ear monitor. The value range is [0,400]. + /// * [volume] The volume of the user. The value range is [0,400]. /// 0: Mute. /// 100: (Default) The original volume. - /// 400: Four times the original volume. + /// 400: Four times the original volume (amplifying the audio signals by four times). /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setInEarMonitoringVolume(int volume); - /// Adds an extension to the SDK. + /// Loads an extension. /// - /// (For Windows and Android only) + /// This method is used to add extensions external to the SDK (such as those from Extensions Marketplace and SDK extensions) to the SDK. /// /// * [path] The extension library path and name. For example: /library/libagora_segmentation_extension.dll. /// * [unloadAfterUse] Whether to uninstall the current extension when you no longer using it: true : Uninstall the extension when the RtcEngine is destroyed. false : (Rcommended) Do not uninstall the extension until the process terminates. @@ -4947,7 +4916,7 @@ abstract class RtcEngine { /// Sets the properties of the extension provider. /// - /// You can call this method to set the attributes of the extension provider and initialize the relevant parameters according to the type of the provider. Call this method after enableExtension, and before enabling the audio (enableAudio / enableLocalAudio) or the video (enableVideo / enableLocalVideo). + /// You can call this method to set the attributes of the extension provider and initialize the relevant parameters according to the type of the provider. /// /// * [provider] The name of the extension provider. /// * [key] The key of the extension. @@ -4960,9 +4929,7 @@ abstract class RtcEngine { /// Registers an extension. /// - /// After the extension is loaded, you can call this method to register the extension. - /// Before calling this method, you need to call loadExtensionProvider to load the extension first. - /// For extensions external to the SDK (such as those from Extensions Marketplace and SDK Extensions), you need to call this method before calling setExtensionProperty. + /// For extensions external to the SDK (such as those from Extensions Marketplace and SDK Extensions), you need to load them before calling this method. Extensions internal to the SDK (those included in the full SDK package) are automatically loaded and registered after the initialization of RtcEngine. /// /// * [provider] The name of the extension provider. /// * [extension] The name of the extension. @@ -5226,7 +5193,7 @@ abstract class RtcEngine { /// Sets the default audio playback route. /// - /// This method applies to Android and iOS only. + /// This method is for Android and iOS only. /// Ensure that you call this method before joining a channel. If you need to change the audio route after joining a channel, call setEnableSpeakerphone. Most mobile phones have two audio routes: an earpiece at the top, and a speakerphone at the bottom. The earpiece plays at a lower volume, and the speakerphone at a higher volume. When setting the default audio route, you determine whether audio playback comes through the earpiece or speakerphone when no external audio device is connected. In different scenarios, the default audio routing of the system is also different. See the following: /// Voice call: Earpiece. /// Audio broadcast: Speakerphone. @@ -5241,8 +5208,8 @@ abstract class RtcEngine { /// Enables/Disables the audio route to the speakerphone. /// - /// If the default audio route of the SDK (see Set the Audio Route) or the setting in setDefaultAudioRouteToSpeakerphone cannot meet your requirements, you can call setEnableSpeakerphone to switch the current audio route. After a successful method call, the SDK triggers the onAudioRoutingChanged callback. This method only sets the audio route in the current channel and does not influence the default audio route. If the user leaves the current channel and joins another channel, the default audio route is used. - /// This method applies to Android and iOS only. + /// If the default audio route of the SDK or the setting in setDefaultAudioRouteToSpeakerphone cannot meet your requirements, you can call setEnableSpeakerphone to switch the current audio route. After a successful method call, the SDK triggers the onAudioRoutingChanged callback. For the default audio route in different scenarios, see Audio Route. This method only sets the audio route in the current channel and does not influence the default audio route. If the user leaves the current channel and joins another channel, the default audio route is used. + /// This method is for Android and iOS only. /// Call this method after joining a channel. /// If the user uses an external audio playback device such as a Bluetooth or wired headset, this method does not take effect, and the SDK plays audio through the external device. When the user uses multiple external devices, the SDK plays audio through the last connected device. /// @@ -5329,9 +5296,7 @@ abstract class RtcEngine { /// Captures the screen by specifying the display ID. /// - /// This method shares a screen or part of the screen. There are two ways to start screen sharing, you can choose one according to your needs: - /// Call this method before joining a channel, and then call joinChannel to join a channel and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing. - /// Call this method after joining a channel, and then call updateChannelMediaOptions and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing. This method is for Windows and macOS only. + /// Captures the video stream of a screen or a part of the screen area. This method is for Windows and macOS only. /// /// * [displayId] The display ID of the screen to be shared. For the Windows platform, if you need to simultaneously share two screens (main screen and secondary screen), you can set displayId to -1 when calling this method. /// * [regionRect] (Optional) Sets the relative location of the region to the screen. Pass in nil to share the entire screen. See Rectangle. @@ -5346,9 +5311,9 @@ abstract class RtcEngine { /// Captures the whole or part of a screen by specifying the screen rect. /// - /// There are two ways to start screen sharing, you can choose one according to your needs: + /// You can call this method either before or after joining the channel, with the following differences: /// Call this method before joining a channel, and then call joinChannel to join a channel and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing. - /// Call this method after joining a channel, and then call updateChannelMediaOptions and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing. Deprecated: This method is deprecated. Use startScreenCaptureByDisplayId instead. Agora strongly recommends using startScreenCaptureByDisplayId if you need to start screen sharing on a device connected to another display. This method shares a screen or part of the screen. You need to specify the area of the screen to be shared. This method applies to Windows only. + /// Call this method after joining a channel, and then call updateChannelMediaOptions to join a channel and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing. Deprecated: This method is deprecated. Use startScreenCaptureByDisplayId instead. Agora strongly recommends using startScreenCaptureByDisplayId if you need to start screen sharing on a device connected to another display. This method shares a screen or part of the screen. You need to specify the area of the screen to be shared. This method applies to Windows only. /// /// * [screenRect] Sets the relative location of the screen to the virtual screen. /// * [regionRect] Sets the relative location of the region to the screen. If you do not set this parameter, the SDK shares the whole screen. See Rectangle. If the specified region overruns the screen, the SDK shares only the region within it; if you set width or height as 0, the SDK shares the whole screen. @@ -5375,11 +5340,7 @@ abstract class RtcEngine { /// Captures the whole or part of a window by specifying the window ID. /// - /// There are two ways to start screen sharing, you can choose one according to your needs: - /// Call this method before joining a channel, and then call joinChannel to join a channel and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing. - /// Call this method after joining a channel, and then call updateChannelMediaOptions and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing. This method captures a window or part of the window. You need to specify the ID of the window to be captured. - /// This method applies to the macOS and Windows only. - /// The window sharing feature of the Agora SDK relies on WGC (Windows Graphics Capture) or GDI (Graphics Device Interface) capture, and WGC cannot be set to disable mouse capture on systems earlier than Windows 10 2004. Therefore, captureMouseCursor(false) might not work when you start window sharing on a device with a system earlier than Windows 10 2004. See ScreenCaptureParameters. This method supports window sharing of UWP (Universal Windows Platform) applications. Agora tests the mainstream UWP applications by using the lastest SDK, see details as follows: + /// This method captures a window or part of the window. You need to specify the ID of the window to be captured. This method applies to the macOS and Windows only. This method supports window sharing of UWP (Universal Windows Platform) applications. Agora tests the mainstream UWP applications by using the lastest SDK, see details as follows: /// /// * [windowId] The ID of the window to be shared. /// * [regionRect] (Optional) Sets the relative location of the region to the screen. If you do not set this parameter, the SDK shares the whole screen. See Rectangle. If the specified region overruns the window, the SDK shares only the region within it; if you set width or height as 0, the SDK shares the whole window. @@ -5426,21 +5387,10 @@ abstract class RtcEngine { /// Starts screen capture. /// - /// There are two options for enabling screen sharing. You can choose the one that best suits your specific scenario: - /// Call this method before joining a channel, then call joinChannel to join channel and set publishScreenCaptureVideo to true to start screen sharing. - /// Call this method after joining a channel, then call updateChannelMediaOptions and set publishScreenCaptureVideo to true to start screen sharing. - /// This method applies to Android and iOS only. - /// On the iOS platform, screen sharing is only available on iOS 12.0 and later. - /// The billing for the screen sharing stream is based on the dimensions in ScreenVideoParameters. When you do not pass in a value, Agora bills you at 1280 × 720; when you pass a value in, Agora bills you at that value. For billing details, see. - /// If you are using the custom audio source instead of the SDK to capture audio, Agora recommends you add the keep-alive processing logic to your application to avoid screen sharing stopping when the application goes to the background. - /// This feature requires high-performance device, and Agora recommends that you use it on iPhone X and later models. - /// This method relies on the iOS screen sharing dynamic library AgoraReplayKitExtension.xcframework. If the dynamic library is deleted, screen sharing cannot be enabled normally. - /// On the Android platform, if the user has not granted the app screen capture permission, the SDK reports the onPermissionError (2) callback. - /// On Android 9 and later, to avoid the application being killed by the system after going to the background, Agora recommends you add the foreground service android.permission.FOREGROUND_SERVICE to the /app/Manifests/AndroidManifest.xml file. - /// Due to performance limitations, screen sharing is not supported on Android TV. - /// Due to system limitations, if you are using Huawei phones, do not adjust the video encoding resolution of the screen sharing stream during the screen sharing, or you could experience crashes. - /// Due to system limitations, some Xiaomi devices do not support capturing system audio during screen sharing. - /// To avoid system audio capture failure when screen sharing, Agora recommends that you set the audio application scenario to audioScenarioGameStreaming by using the setAudioScenario method before joining the channel. + /// This method is for Android and iOS only. + /// The billing for the screen sharing stream is based on the dimensions in ScreenVideoParameters : + /// When you do not pass in a value, Agora bills you at 1280 × 720. + /// When you pass in a value, Agora bills you at that value. For billing examples, see. /// /// * [captureParams] The screen sharing encoding parameters. The default video dimension is 1920 x 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters2. /// @@ -5453,7 +5403,7 @@ abstract class RtcEngine { /// If the system audio is not captured when screen sharing is enabled, and then you want to update the parameter configuration and publish the system audio, you can refer to the following steps: /// Call this method, and set captureAudio to true. /// Call updateChannelMediaOptions, and set publishScreenCaptureAudio to true to publish the audio captured by the screen. - /// This method applies to Android and iOS only. + /// This method is for Android and iOS only. /// On the iOS platform, screen sharing is only available on iOS 12.0 and later. /// /// * [captureParams] The screen sharing encoding parameters. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters2. @@ -5489,8 +5439,6 @@ abstract class RtcEngine { /// Stops screen capture. /// - /// After calling startScreenCaptureByWindowId or startScreenCaptureByDisplayId to start screen capture, call this method to stop screen capture. - /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stopScreenCapture(); @@ -5508,7 +5456,7 @@ abstract class RtcEngine { /// Ensure that you call this method after leaving a channel. /// /// * [callId] The current call ID. You can get the call ID by calling getCallId. - /// * [rating] The rating of the call. The value is between 1 (the lowest score) and 5 (the highest score). If you set a value out of this range, the SDK returns the -2 (ERR_INVALID_ARGUMENT) error. + /// * [rating] The value is between 1 (the lowest score) and 5 (the highest score). /// * [description] A description of the call. The string length should be less than 800 bytes. /// /// Returns @@ -5760,7 +5708,7 @@ abstract class RtcEngine { /// If the orientation mode of the encoding video (OrientationMode) is fixed portrait mode or the adaptive portrait mode, the watermark uses the portrait orientation. /// When setting the watermark position, the region must be less than the dimensions set in the setVideoEncoderConfiguration method; otherwise, the watermark image will be cropped. /// Ensure that calling this method after enableVideo. - /// If you only want to add a watermark to the media push, you can call this method or the method. + /// If you only want to add a watermark to the media push, you can call this method or the startRtmpStreamWithTranscoding method. /// This method supports adding a watermark image in the PNG file format only. Supported pixel formats of the PNG image are RGBA, RGB, Palette, Gray, and Alpha_gray. /// If the dimensions of the PNG image differ from your settings in this method, the image will be cropped or zoomed to conform to your settings. /// If you have enabled the mirror mode for the local video, the watermark on the local video is also mirrored. To avoid mirroring the watermark, Agora recommends that you do not use the mirror and watermark functions for the local video at the same time. You can implement the watermark function in your application layer. @@ -6286,15 +6234,11 @@ abstract class RtcEngine { Future sendAudioMetadata( {required Uint8List metadata, required int length}); - /// Starts screen capture. + /// Starts screen capture from the specified video source. /// - /// This method, as well as startScreenCapture, startScreenCaptureByDisplayId, and startScreenCaptureByWindowId, can all be used to start screen capture, with the following differences: startScreenCapture only applies to Android and iOS, whereas this method only applies to Windows and iOS. startScreenCaptureByDisplayId and startScreenCaptureByWindowId only support capturing video from a single screen or window. By calling this method and specifying the sourceType parameter, you can capture multiple video streams used for local video mixing or multi-channel publishing. - /// This method applies to the macOS and Windows only. - /// If you call this method to start screen capture, Agora recommends that you call stopScreenCaptureBySourceType to stop the capture and avoid using stopScreenCapture. + /// This method applies to the macOS and Windows only. /// - /// * [sourceType] The type of the video source. See VideoSourceType. - /// Windows supports up to four screen capture video streams. - /// macOS supports only one screen capture video stream. You can only set this parameter to videoSourceScreen (2). + /// * [sourceType] The type of the video source. See VideoSourceType. On the macOS platform, this parameter can only be set to videoSourceScreen (2). /// * [config] The configuration of the captured screen. See ScreenCaptureConfiguration. /// /// Returns @@ -6303,11 +6247,9 @@ abstract class RtcEngine { {required VideoSourceType sourceType, required ScreenCaptureConfiguration config}); - /// Stops screen capture. + /// Stops screen capture from the specified video source. /// - /// After calling startScreenCaptureBySourceType to start capturing video from one or more screens, you can call this method and set the sourceType parameter to stop capturing from the specified screens. - /// This method applies to the macOS and Windows only. - /// If you call startScreenCapture, startScreenCaptureByWindowId, or startScreenCaptureByDisplayId to start screen capure, Agora recommends that you call stopScreenCapture instead to stop the capture. + /// This method applies to the macOS and Windows only. /// /// * [sourceType] The type of the video source. See VideoSourceType. /// @@ -6326,11 +6268,7 @@ abstract class RtcEngine { /// Enables the local video preview. /// - /// You can call this method to enable local video preview. Call this method after the following: - /// Call setupLocalVideo to initialize the local preview. - /// Call enableVideo to enable the video module. - /// The local preview enables the mirror mode by default. - /// After the local video preview is enabled, if you call leaveChannel to exit the channel, the local preview remains until you call stopPreview to disable it. + /// You can call this method to enable local video preview. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. diff --git a/lib/src/agora_rtc_engine_ex.dart b/lib/src/agora_rtc_engine_ex.dart index 1841553bf..4ccf439be 100644 --- a/lib/src/agora_rtc_engine_ex.dart +++ b/lib/src/agora_rtc_engine_ex.dart @@ -81,7 +81,7 @@ abstract class RtcEngineEx implements RtcEngine { /// Sets the video encoder configuration. /// - /// Sets the encoder configuration for the local video. Each configuration profile corresponds to a set of video parameters, including the resolution, frame rate, and bitrate. The config specified in this method is the maximum value under ideal network conditions. If the video engine cannot render the video using the specified config due to unreliable network conditions, the parameters further down the list are considered until a successful configuration is found. + /// Sets the encoder configuration for the local video. Each configuration profile corresponds to a set of video parameters, including the resolution, frame rate, and bitrate. /// /// * [config] Video profile. See VideoEncoderConfiguration. /// * [connection] The connection information. See RtcConnection. @@ -375,7 +375,10 @@ abstract class RtcEngineEx implements RtcEngine { /// The playback volume here refers to the mixed volume of a specified remote user. /// /// * [uid] The user ID of the remote user. - /// * [volume] Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. + /// * [volume] The volume of the user. The value range is [0,400]. + /// 0: Mute. + /// 100: (Default) The original volume. + /// 400: Four times the original volume (amplifying the audio signals by four times). /// * [connection] The connection information. See RtcConnection. /// /// Returns diff --git a/lib/src/agora_spatial_audio.dart b/lib/src/agora_spatial_audio.dart index 595d3e3fd..2df5083a7 100644 --- a/lib/src/agora_spatial_audio.dart +++ b/lib/src/agora_spatial_audio.dart @@ -104,7 +104,7 @@ abstract class LocalSpatialAudioEngine { /// Updates the spatial position of the specified remote user. /// - /// After successfully calling this method, the SDK calculates the spatial audio parameters based on the relative position of the local and remote user. Call this method after joinChannel. + /// After successfully calling this method, the SDK calculates the spatial audio parameters based on the relative position of the local and remote user. Call this method after the or joinChannel method. /// /// * [uid] The user ID. This parameter must be the same as the user ID passed in when the user joined the channel. /// * [posInfo] The spatial position of the remote user. See RemoteVoicePositionInfo. @@ -122,7 +122,7 @@ abstract class LocalSpatialAudioEngine { /// Removes the spatial position of the specified remote user. /// - /// After successfully calling this method, the local user no longer hears the specified remote user. After leaving the channel, to avoid wasting resources, you can also call this method to delete the spatial position of the specified remote user. + /// After successfully calling this method, the local user no longer hears the specified remote user. After leaving the channel, to avoid wasting computing resources, call this method to delete the spatial position information of the specified remote user. Otherwise, the user's spatial position information will be saved continuously. When the number of remote users exceeds the number of audio streams that can be received as set in setMaxAudioRecvCount, the system automatically unsubscribes from the audio stream of the user who is furthest away based on relative distance. /// /// * [uid] The user ID. This parameter must be the same as the user ID passed in when the user joined the channel. /// diff --git a/lib/src/binding/agora_music_content_center_impl.dart b/lib/src/binding/agora_music_content_center_impl.dart index e5a82d07d..d9de2274f 100644 --- a/lib/src/binding/agora_music_content_center_impl.dart +++ b/lib/src/binding/agora_music_content_center_impl.dart @@ -171,6 +171,23 @@ class MusicPlayerImpl extends MediaPlayerImpl implements MusicPlayer { @protected String get className => 'MusicPlayer'; + @override + Future setPlayMode(MusicPlayMode mode) async { + final apiType = + '${isOverrideClassName ? className : 'MusicPlayer'}_setPlayMode_748bee0'; + final param = createParams({'mode': mode.value()}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future openWithSongCode( {required int songCode, int startPos = 0}) async { @@ -308,6 +325,23 @@ class MusicContentCenterImpl implements MusicContentCenter { return result as MusicPlayer; } + @override + Future destroyMusicPlayer(MusicPlayer musicPlayer) async { + final apiType = + '${isOverrideClassName ? className : 'MusicContentCenter'}_destroyMusicPlayer_876d086'; + final param = createParams({'music_player': musicPlayer}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future getMusicCharts() async { final apiType = @@ -459,7 +493,7 @@ class MusicContentCenterImpl implements MusicContentCenter { Future getLyric({required int songCode, int lyricType = 0}) async { final apiType = '${isOverrideClassName ? className : 'MusicContentCenter'}_getLyric_5ab5efd'; - final param = createParams({'songCode': songCode, 'LyricType': lyricType}); + final param = createParams({'songCode': songCode, 'lyricType': lyricType}); final callApiResult = await irisMethodChannel.invokeMethod( IrisMethodCall(apiType, jsonEncode(param), buffers: null)); if (callApiResult.irisReturnCode < 0) { diff --git a/lib/src/impl/agora_music_content_center_impl_override.dart b/lib/src/impl/agora_music_content_center_impl_override.dart index 40afa61e3..60af3f6d9 100644 --- a/lib/src/impl/agora_music_content_center_impl_override.dart +++ b/lib/src/impl/agora_music_content_center_impl_override.dart @@ -110,21 +110,35 @@ class MusicPlayerImpl extends media_player_impl.MediaPlayerImpl throw AgoraRtcException(code: result); } } + + @override + Future setPlayMode(MusicPlayMode mode) async { + final apiType = + '${isOverrideClassName ? className : 'MusicPlayer'}_setPlayMode'; + final param = createParams({'mode': mode.value()}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } } class MusicContentCenterImpl extends binding.MusicContentCenterImpl with ScopedDisposableObjectMixin { MusicContentCenterImpl._(RtcEngine rtcEngine) - : _rtcEngine = rtcEngine, - super(rtcEngine.irisMethodChannel); + : super(rtcEngine.irisMethodChannel); factory MusicContentCenterImpl.create(RtcEngine rtcEngine) { return rtcEngine.objectPool.putIfAbsent( _musicContentCenterScopeKey, () => MusicContentCenterImpl._(rtcEngine)); } - final RtcEngine _rtcEngine; - static const _musicContentCenterScopeKey = TypedScopedKey(MusicContentCenterImpl); @@ -153,6 +167,17 @@ class MusicContentCenterImpl extends binding.MusicContentCenterImpl return mp; } + @override + Future destroyMusicPlayer(MusicPlayer musicPlayer) async { + final apiType = + '${isOverrideClassName ? className : 'MusicContentCenter'}_destroyMusicPlayer'; + final param = createParams({'playerId': musicPlayer.getMediaPlayerId()}); + await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + + _removeMusicPlayerById(musicPlayer.getMediaPlayerId()); + } + @override void registerEventHandler(MusicContentCenterEventHandler eventHandler) async { if (_musicContentCenterEventHandler != null) return; @@ -186,7 +211,7 @@ class MusicContentCenterImpl extends binding.MusicContentCenterImpl jsonEncode({})); } - void removeMusicPlayerById(int musicPlayerId) { + void _removeMusicPlayerById(int musicPlayerId) { _musicPlayers.remove(musicPlayerId); } @@ -200,10 +225,6 @@ class MusicContentCenterImpl extends binding.MusicContentCenterImpl // do nothing } - for (final player in _musicPlayers.values) { - await _rtcEngine.destroyMediaPlayer(player); - } - _musicPlayers.clear(); _musicContentCenterEventHandler = null; } diff --git a/lib/src/impl/platform/io/native_iris_api_engine_bindings.dart b/lib/src/impl/platform/io/native_iris_api_engine_bindings.dart index a95768bf2..79222097f 100644 --- a/lib/src/impl/platform/io/native_iris_api_engine_bindings.dart +++ b/lib/src/impl/platform/io/native_iris_api_engine_bindings.dart @@ -386,8 +386,9 @@ class NativeIrisApiEngineBinding { } late final _StopDumpVideoPtr = _lookup< - ffi.NativeFunction< - ffi.Int32 Function(IrisVideoFrameBufferManagerPtr)>>('StopDumpVideo'); + ffi + .NativeFunction>( + 'StopDumpVideo'); late final _StopDumpVideo = _StopDumpVideoPtr.asFunction< int Function(IrisVideoFrameBufferManagerPtr)>(); diff --git a/macos/agora_rtc_engine.podspec b/macos/agora_rtc_engine.podspec index 02bc192ce..54f681f9c 100644 --- a/macos/agora_rtc_engine.podspec +++ b/macos/agora_rtc_engine.podspec @@ -21,8 +21,8 @@ A new flutter plugin project. puts '[plugin_dev] Found .plugin_dev file, use vendored_frameworks instead.' s.vendored_frameworks = 'libs/*.xcframework', 'libs/*.framework' else - s.dependency 'AgoraRtcEngine_macOS', '4.3.1' - s.dependency 'AgoraIrisRTC_macOS', '4.3.1-build.1' + s.dependency 'AgoraRtcEngine_macOS', '4.3.2' + s.dependency 'AgoraIrisRTC_macOS', '4.3.2-build.1' end s.platform = :osx, '10.11' diff --git a/pubspec.yaml b/pubspec.yaml index 9b3c13757..4b576372e 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -2,7 +2,7 @@ name: agora_rtc_engine description: >- Flutter plugin of Agora RTC SDK, allow you to simply integrate Agora Video Calling or Live Video Streaming to your app with just a few lines of code. -version: 6.3.1 +version: 6.3.2 homepage: https://www.agora.io repository: https://github.com/AgoraIO-Extensions/Agora-Flutter-SDK/tree/main environment: diff --git a/scripts/artifacts_version.sh b/scripts/artifacts_version.sh index b3d39f257..2c766684a 100644 --- a/scripts/artifacts_version.sh +++ b/scripts/artifacts_version.sh @@ -1,6 +1,6 @@ set -e -export IRIS_CDN_URL_ANDROID="https://download.agora.io/sdk/release/iris_4.3.1-build.1_DCG_Android_Video_20240429_1017_481.zip" -export IRIS_CDN_URL_IOS="https://download.agora.io/sdk/release/iris_4.3.1-build.1_DCG_iOS_Video_20240428_0641_388.zip" -export IRIS_CDN_URL_MACOS="https://download.agora.io/sdk/release/iris_4.3.1-build.1_DCG_Mac_Video_20240428_0641_389.zip" -export IRIS_CDN_URL_WINDOWS="https://download.agora.io/sdk/release/iris_4.3.1-build.1_DCG_Windows_Video_20240428_0641_423.zip" +export IRIS_CDN_URL_ANDROID="https://download.agora.io/sdk/release/iris_4.3.2-build.1_DCG_Android_Video_20240604_0456_504.zip" +export IRIS_CDN_URL_IOS="https://download.agora.io/sdk/release/iris_4.3.2-build.1_DCG_iOS_Video_20240604_0459_409.zip" +export IRIS_CDN_URL_MACOS="https://download.agora.io/sdk/release/iris_4.3.2-build.1_DCG_Mac_Video_20240604_0500_404.zip" +export IRIS_CDN_URL_WINDOWS="https://download.agora.io/sdk/release/iris_4.3.2-build.1_DCG_Windows_Video_20240604_0456_441.zip" diff --git a/scripts/build-iris-macos.sh b/scripts/build-iris-macos.sh index 1862ba570..c6f443d44 100644 --- a/scripts/build-iris-macos.sh +++ b/scripts/build-iris-macos.sh @@ -6,7 +6,7 @@ set -x AGORA_FLUTTER_PROJECT_PATH=$(pwd) IRIS_PROJECT_PATH=$1 BUILD_TYPE=$2 -NATIVE_SDK_PATH_NAME=$3 # Agora_Native_SDK_for_Mac_rel.v3.8.201.2_39877_full_20220608_2158 +NATIVE_SDK_PATH_NAME=$3 # Agora_Native_SDK_for_Mac_rel.v3.8.201.2_39877_full_20220608_2158 Agora_Native_SDK_for_Mac_FULL IRIS_TYPE="dcg" SCRIPTS_PATH=$(dirname "$0") diff --git a/test_shard/fake_test_app/integration_test/apis_call_fake_test.dart b/test_shard/fake_test_app/integration_test/apis_call_fake_test.dart index 5af4b2c79..2c54c1181 100644 --- a/test_shard/fake_test_app/integration_test/apis_call_fake_test.dart +++ b/test_shard/fake_test_app/integration_test/apis_call_fake_test.dart @@ -10,8 +10,7 @@ import 'testcases/localspatialaudioengine_testcases.dart' import 'generated/mediaengine_fake_test.generated.dart' as mediaengine; import 'generated/mediaplayer_fake_test.generated.dart' as mediaplayer; import 'generated/mediarecorder_fake_test.generated.dart' as mediarecorder; -import 'generated/musiccontentcenter_fake_test.generated.dart' - as musiccontentcenter; +import 'testcases/musiccontentcenter_testcases.dart' as musiccontentcenter; import 'testcases/rtcengine_debug_testcases.dart' as rtcengine_debug; import 'testcases/rtcengine_testcases.dart' as rtcengine; import 'testcases/rtcengineex_testcases.dart' as rtcengineex; @@ -67,7 +66,7 @@ void main() { localspatialaudioengine.testCases(); mediaplayer.mediaPlayerControllerSmokeTestCases(); mediarecorder.mediaRecorderSmokeTestCases(); - musiccontentcenter.musicContentCenterSmokeTestCases(); + musiccontentcenter.testCases(); rtcengine_debug.testCases(); } diff --git a/test_shard/fake_test_app/integration_test/generated/audiodevicemanager_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/audiodevicemanager_fake_test.generated.dart index 5caf1ff29..d3ffddb0f 100644 --- a/test_shard/fake_test_app/integration_test/generated/audiodevicemanager_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/audiodevicemanager_fake_test.generated.dart @@ -96,7 +96,7 @@ void audioDeviceManagerSmokeTestCases() { final audioDeviceManager = rtcEngine.getAudioDeviceManager(); try { - const String deviceId = "hello"; + String deviceId = "hello"; await audioDeviceManager.setPlaybackDevice( deviceId, ); @@ -204,7 +204,7 @@ void audioDeviceManagerSmokeTestCases() { final audioDeviceManager = rtcEngine.getAudioDeviceManager(); try { - const int volume = 10; + int volume = 5; await audioDeviceManager.setPlaybackDeviceVolume( volume, ); @@ -277,7 +277,7 @@ void audioDeviceManagerSmokeTestCases() { final audioDeviceManager = rtcEngine.getAudioDeviceManager(); try { - const String deviceId = "hello"; + String deviceId = "hello"; await audioDeviceManager.setRecordingDevice( deviceId, ); @@ -385,7 +385,7 @@ void audioDeviceManagerSmokeTestCases() { final audioDeviceManager = rtcEngine.getAudioDeviceManager(); try { - const int volume = 10; + int volume = 5; await audioDeviceManager.setRecordingDeviceVolume( volume, ); @@ -458,7 +458,7 @@ void audioDeviceManagerSmokeTestCases() { final audioDeviceManager = rtcEngine.getAudioDeviceManager(); try { - const String deviceId = "hello"; + String deviceId = "hello"; await audioDeviceManager.setLoopbackDevice( deviceId, ); @@ -531,7 +531,7 @@ void audioDeviceManagerSmokeTestCases() { final audioDeviceManager = rtcEngine.getAudioDeviceManager(); try { - const bool mute = true; + bool mute = true; await audioDeviceManager.setPlaybackDeviceMute( mute, ); @@ -604,7 +604,7 @@ void audioDeviceManagerSmokeTestCases() { final audioDeviceManager = rtcEngine.getAudioDeviceManager(); try { - const bool mute = true; + bool mute = true; await audioDeviceManager.setRecordingDeviceMute( mute, ); @@ -677,7 +677,7 @@ void audioDeviceManagerSmokeTestCases() { final audioDeviceManager = rtcEngine.getAudioDeviceManager(); try { - const String testAudioFilePath = "hello"; + String testAudioFilePath = "hello"; await audioDeviceManager.startPlaybackDeviceTest( testAudioFilePath, ); @@ -750,7 +750,7 @@ void audioDeviceManagerSmokeTestCases() { final audioDeviceManager = rtcEngine.getAudioDeviceManager(); try { - const int indicationInterval = 10; + int indicationInterval = 5; await audioDeviceManager.startRecordingDeviceTest( indicationInterval, ); @@ -823,7 +823,7 @@ void audioDeviceManagerSmokeTestCases() { final audioDeviceManager = rtcEngine.getAudioDeviceManager(); try { - const int indicationInterval = 10; + int indicationInterval = 5; await audioDeviceManager.startAudioDeviceLoopbackTest( indicationInterval, ); @@ -896,7 +896,7 @@ void audioDeviceManagerSmokeTestCases() { final audioDeviceManager = rtcEngine.getAudioDeviceManager(); try { - const bool enable = true; + bool enable = true; await audioDeviceManager.followSystemPlaybackDevice( enable, ); @@ -934,7 +934,7 @@ void audioDeviceManagerSmokeTestCases() { final audioDeviceManager = rtcEngine.getAudioDeviceManager(); try { - const bool enable = true; + bool enable = true; await audioDeviceManager.followSystemRecordingDevice( enable, ); @@ -972,7 +972,7 @@ void audioDeviceManagerSmokeTestCases() { final audioDeviceManager = rtcEngine.getAudioDeviceManager(); try { - const bool enable = true; + bool enable = true; await audioDeviceManager.followSystemLoopbackDevice( enable, ); diff --git a/test_shard/fake_test_app/integration_test/generated/h265transcoder_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/h265transcoder_fake_test.generated.dart index 69a37f4d1..8504d6585 100644 --- a/test_shard/fake_test_app/integration_test/generated/h265transcoder_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/h265transcoder_fake_test.generated.dart @@ -25,9 +25,9 @@ void generatedTestCases() { final h265Transcoder = rtcEngine.getH265Transcoder(); try { - const String token = "hello"; - const String channel = "hello"; - const int uid = 10; + String token = "hello"; + String channel = "hello"; + int uid = 5; await h265Transcoder.enableTranscode( token: token, channel: channel, @@ -66,9 +66,9 @@ void generatedTestCases() { final h265Transcoder = rtcEngine.getH265Transcoder(); try { - const String token = "hello"; - const String channel = "hello"; - const int uid = 10; + String token = "hello"; + String channel = "hello"; + int uid = 5; await h265Transcoder.queryChannel( token: token, channel: channel, @@ -107,9 +107,9 @@ void generatedTestCases() { final h265Transcoder = rtcEngine.getH265Transcoder(); try { - const String token = "hello"; - const String channel = "hello"; - const int uid = 10; + String token = "hello"; + String channel = "hello"; + int uid = 5; await h265Transcoder.triggerTranscode( token: token, channel: channel, @@ -149,7 +149,7 @@ void generatedTestCases() { final h265Transcoder = rtcEngine.getH265Transcoder(); try { - final H265TranscoderObserver observer = H265TranscoderObserver( + H265TranscoderObserver observer = H265TranscoderObserver( onEnableTranscode: (H265TranscodeResult result) {}, onQueryChannel: (H265TranscodeResult result, String originChannel, String transcodeChannel) {}, @@ -192,7 +192,7 @@ void generatedTestCases() { final h265Transcoder = rtcEngine.getH265Transcoder(); try { - final H265TranscoderObserver observer = H265TranscoderObserver( + H265TranscoderObserver observer = H265TranscoderObserver( onEnableTranscode: (H265TranscodeResult result) {}, onQueryChannel: (H265TranscodeResult result, String originChannel, String transcodeChannel) {}, diff --git a/test_shard/fake_test_app/integration_test/generated/h265transcoder_h265transcoderobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/h265transcoder_h265transcoderobserver_testcases.generated.dart index 57969daf7..a94723441 100644 --- a/test_shard/fake_test_app/integration_test/generated/h265transcoder_h265transcoderobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/h265transcoder_h265transcoderobserver_testcases.generated.dart @@ -41,7 +41,7 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const H265TranscodeResult result = + H265TranscodeResult result = H265TranscodeResult.h265TranscodeResultUnknown; final eventJson = { @@ -107,10 +107,10 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const H265TranscodeResult result = + H265TranscodeResult result = H265TranscodeResult.h265TranscodeResultUnknown; - const String originChannel = "hello"; - const String transcodeChannel = "hello"; + String originChannel = "hello"; + String transcodeChannel = "hello"; final eventJson = { 'result': result.value(), @@ -176,7 +176,7 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const H265TranscodeResult result = + H265TranscodeResult result = H265TranscodeResult.h265TranscodeResultUnknown; final eventJson = { diff --git a/test_shard/fake_test_app/integration_test/generated/localspatialaudioengine_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/localspatialaudioengine_fake_test.generated.dart index 15f716c6f..d54271a27 100644 --- a/test_shard/fake_test_app/integration_test/generated/localspatialaudioengine_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/localspatialaudioengine_fake_test.generated.dart @@ -101,10 +101,10 @@ void localSpatialAudioEngineSmokeTestCases() { await localSpatialAudioEngine.initialize(); try { - const int uid = 10; - const List posInfoPosition = []; - const List posInfoForward = []; - const RemoteVoicePositionInfo posInfo = RemoteVoicePositionInfo( + int uid = 5; + List posInfoPosition = List.filled(5, 5.0); + List posInfoForward = List.filled(5, 5.0); + RemoteVoicePositionInfo posInfo = RemoteVoicePositionInfo( position: posInfoPosition, forward: posInfoForward, ); @@ -148,16 +148,16 @@ void localSpatialAudioEngineSmokeTestCases() { await localSpatialAudioEngine.initialize(); try { - const int uid = 10; - const List posInfoPosition = []; - const List posInfoForward = []; - const RemoteVoicePositionInfo posInfo = RemoteVoicePositionInfo( + int uid = 5; + List posInfoPosition = List.filled(5, 5.0); + List posInfoForward = List.filled(5, 5.0); + RemoteVoicePositionInfo posInfo = RemoteVoicePositionInfo( position: posInfoPosition, forward: posInfoForward, ); - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -202,7 +202,7 @@ void localSpatialAudioEngineSmokeTestCases() { await localSpatialAudioEngine.initialize(); try { - const int uid = 10; + int uid = 5; await localSpatialAudioEngine.removeRemotePosition( uid, ); @@ -242,10 +242,10 @@ void localSpatialAudioEngineSmokeTestCases() { await localSpatialAudioEngine.initialize(); try { - const int uid = 10; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + int uid = 5; + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -289,9 +289,9 @@ void localSpatialAudioEngineSmokeTestCases() { await localSpatialAudioEngine.initialize(); try { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -334,7 +334,7 @@ void localSpatialAudioEngineSmokeTestCases() { await localSpatialAudioEngine.initialize(); try { - const int maxCount = 10; + int maxCount = 5; await localSpatialAudioEngine.setMaxAudioRecvCount( maxCount, ); @@ -374,7 +374,7 @@ void localSpatialAudioEngineSmokeTestCases() { await localSpatialAudioEngine.initialize(); try { - const double range = 10.0; + double range = 5.0; await localSpatialAudioEngine.setAudioRecvRange( range, ); @@ -414,7 +414,7 @@ void localSpatialAudioEngineSmokeTestCases() { await localSpatialAudioEngine.initialize(); try { - const double unit = 10.0; + double unit = 5.0; await localSpatialAudioEngine.setDistanceUnit( unit, ); @@ -454,10 +454,10 @@ void localSpatialAudioEngineSmokeTestCases() { await localSpatialAudioEngine.initialize(); try { - const int playerId = 10; - const List positionInfoPosition = []; - const List positionInfoForward = []; - const RemoteVoicePositionInfo positionInfo = RemoteVoicePositionInfo( + int playerId = 5; + List positionInfoPosition = List.filled(5, 5.0); + List positionInfoForward = List.filled(5, 5.0); + RemoteVoicePositionInfo positionInfo = RemoteVoicePositionInfo( position: positionInfoPosition, forward: positionInfoForward, ); @@ -501,7 +501,7 @@ void localSpatialAudioEngineSmokeTestCases() { await localSpatialAudioEngine.initialize(); try { - const String params = "hello"; + String params = "hello"; await localSpatialAudioEngine.setParameters( params, ); @@ -541,7 +541,7 @@ void localSpatialAudioEngineSmokeTestCases() { await localSpatialAudioEngine.initialize(); try { - const bool mute = true; + bool mute = true; await localSpatialAudioEngine.muteLocalAudioStream( mute, ); @@ -581,7 +581,7 @@ void localSpatialAudioEngineSmokeTestCases() { await localSpatialAudioEngine.initialize(); try { - const bool mute = true; + bool mute = true; await localSpatialAudioEngine.muteAllRemoteAudioStreams( mute, ); @@ -621,8 +621,8 @@ void localSpatialAudioEngineSmokeTestCases() { await localSpatialAudioEngine.initialize(); try { - const int uid = 10; - const bool mute = true; + int uid = 5; + bool mute = true; await localSpatialAudioEngine.muteRemoteAudioStream( uid: uid, mute: mute, @@ -663,9 +663,9 @@ void localSpatialAudioEngineSmokeTestCases() { await localSpatialAudioEngine.initialize(); try { - const int uid = 10; - const double attenuation = 10.0; - const bool forceSet = true; + int uid = 5; + double attenuation = 5.0; + bool forceSet = true; await localSpatialAudioEngine.setRemoteAudioAttenuation( uid: uid, attenuation: attenuation, @@ -707,8 +707,32 @@ void localSpatialAudioEngineSmokeTestCases() { await localSpatialAudioEngine.initialize(); try { - const List zones = []; - const int zoneCount = 10; + final List zones = () { + int zonesItemZoneSetId = 5; + List zonesItemPosition = List.filled(5, 5.0); + List zonesItemForward = List.filled(5, 5.0); + List zonesItemRight = List.filled(5, 5.0); + List zonesItemUp = List.filled(5, 5.0); + double zonesItemForwardLength = 5.0; + double zonesItemRightLength = 5.0; + double zonesItemUpLength = 5.0; + double zonesItemAudioAttenuation = 5.0; + SpatialAudioZone zonesItem = SpatialAudioZone( + zoneSetId: zonesItemZoneSetId, + position: zonesItemPosition, + forward: zonesItemForward, + right: zonesItemRight, + up: zonesItemUp, + forwardLength: zonesItemForwardLength, + rightLength: zonesItemRightLength, + upLength: zonesItemUpLength, + audioAttenuation: zonesItemAudioAttenuation, + ); + + return List.filled(5, zonesItem); + }(); + + int zoneCount = 5; await localSpatialAudioEngine.setZones( zones: zones, zoneCount: zoneCount, @@ -749,9 +773,9 @@ void localSpatialAudioEngineSmokeTestCases() { await localSpatialAudioEngine.initialize(); try { - const int playerId = 10; - const double attenuation = 10.0; - const bool forceSet = true; + int playerId = 5; + double attenuation = 5.0; + bool forceSet = true; await localSpatialAudioEngine.setPlayerAttenuation( playerId: playerId, attenuation: attenuation, diff --git a/test_shard/fake_test_app/integration_test/generated/mediaengine_audioframeobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaengine_audioframeobserver_testcases.generated.dart index dea098e5e..ed3a67ac3 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaengine_audioframeobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaengine_audioframeobserver_testcases.generated.dart @@ -41,21 +41,21 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String channelId = "hello"; - const int uid = 10; - const AudioFrameType audioFrameType = AudioFrameType.frameTypePcm16; - const BytesPerSample audioFrameBytesPerSample = + String channelId = "hello"; + int uid = 5; + AudioFrameType audioFrameType = AudioFrameType.frameTypePcm16; + BytesPerSample audioFrameBytesPerSample = BytesPerSample.twoBytesPerSample; - const int audioFrameSamplesPerChannel = 10; - const int audioFrameChannels = 10; - const int audioFrameSamplesPerSec = 10; - Uint8List audioFrameBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int audioFrameRenderTimeMs = 10; - const int audioFrameAvsyncType = 10; - const int audioFramePresentationMs = 10; - const int audioFrameAudioTrackNumber = 10; - const int audioFrameRtpTimestamp = 10; - final AudioFrame audioFrame = AudioFrame( + int audioFrameSamplesPerChannel = 5; + int audioFrameChannels = 5; + int audioFrameSamplesPerSec = 5; + Uint8List audioFrameBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + int audioFrameRenderTimeMs = 5; + int audioFrameAvsyncType = 5; + int audioFramePresentationMs = 5; + int audioFrameAudioTrackNumber = 5; + int audioFrameRtpTimestamp = 5; + AudioFrame audioFrame = AudioFrame( type: audioFrameType, samplesPerChannel: audioFrameSamplesPerChannel, bytesPerSample: audioFrameBytesPerSample, diff --git a/test_shard/fake_test_app/integration_test/generated/mediaengine_faceinfoobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaengine_faceinfoobserver_testcases.generated.dart index d9dd3dae3..86aae4ad6 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaengine_faceinfoobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaengine_faceinfoobserver_testcases.generated.dart @@ -40,7 +40,7 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String outFaceInfo = "hello"; + String outFaceInfo = "hello"; final eventJson = { 'outFaceInfo': outFaceInfo, diff --git a/test_shard/fake_test_app/integration_test/generated/mediaengine_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaengine_fake_test.generated.dart index 3f5affbe1..a72fec8b7 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaengine_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaengine_fake_test.generated.dart @@ -27,7 +27,7 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - final AudioFrameObserver observer = AudioFrameObserver( + AudioFrameObserver observer = AudioFrameObserver( onRecordAudioFrame: (String channelId, AudioFrame audioFrame) {}, onPlaybackAudioFrame: (String channelId, AudioFrame audioFrame) {}, onMixedAudioFrame: (String channelId, AudioFrame audioFrame) {}, @@ -73,7 +73,7 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - final VideoFrameObserver observer = VideoFrameObserver( + VideoFrameObserver observer = VideoFrameObserver( onCaptureVideoFrame: (VideoSourceType sourceType, VideoFrame videoFrame) {}, onPreEncodeVideoFrame: @@ -122,7 +122,7 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - final VideoEncodedFrameObserver observer = VideoEncodedFrameObserver( + VideoEncodedFrameObserver observer = VideoEncodedFrameObserver( onEncodedVideoFrameReceived: (int uid, Uint8List imageBuffer, int length, EncodedVideoFrameInfo videoEncodedFrameInfo) {}, ); @@ -164,7 +164,7 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - final FaceInfoObserver observer = FaceInfoObserver( + FaceInfoObserver observer = FaceInfoObserver( onFaceInfo: (String outFaceInfo) {}, ); mediaEngine.registerFaceInfoObserver( @@ -205,19 +205,18 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - const AudioFrameType frameType = AudioFrameType.frameTypePcm16; - const BytesPerSample frameBytesPerSample = - BytesPerSample.twoBytesPerSample; - const int frameSamplesPerChannel = 10; - const int frameChannels = 10; - const int frameSamplesPerSec = 10; - Uint8List frameBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int frameRenderTimeMs = 10; - const int frameAvsyncType = 10; - const int framePresentationMs = 10; - const int frameAudioTrackNumber = 10; - const int frameRtpTimestamp = 10; - final AudioFrame frame = AudioFrame( + AudioFrameType frameType = AudioFrameType.frameTypePcm16; + BytesPerSample frameBytesPerSample = BytesPerSample.twoBytesPerSample; + int frameSamplesPerChannel = 5; + int frameChannels = 5; + int frameSamplesPerSec = 5; + Uint8List frameBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + int frameRenderTimeMs = 5; + int frameAvsyncType = 5; + int framePresentationMs = 5; + int frameAudioTrackNumber = 5; + int frameRtpTimestamp = 5; + AudioFrame frame = AudioFrame( type: frameType, samplesPerChannel: frameSamplesPerChannel, bytesPerSample: frameBytesPerSample, @@ -230,7 +229,7 @@ void mediaEngineSmokeTestCases() { audioTrackNumber: frameAudioTrackNumber, rtpTimestamp: frameRtpTimestamp, ); - const int trackId = 10; + int trackId = 5; await mediaEngine.pushAudioFrame( frame: frame, trackId: trackId, @@ -269,19 +268,18 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - const AudioFrameType frameType = AudioFrameType.frameTypePcm16; - const BytesPerSample frameBytesPerSample = - BytesPerSample.twoBytesPerSample; - const int frameSamplesPerChannel = 10; - const int frameChannels = 10; - const int frameSamplesPerSec = 10; - Uint8List frameBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int frameRenderTimeMs = 10; - const int frameAvsyncType = 10; - const int framePresentationMs = 10; - const int frameAudioTrackNumber = 10; - const int frameRtpTimestamp = 10; - final AudioFrame frame = AudioFrame( + AudioFrameType frameType = AudioFrameType.frameTypePcm16; + BytesPerSample frameBytesPerSample = BytesPerSample.twoBytesPerSample; + int frameSamplesPerChannel = 5; + int frameChannels = 5; + int frameSamplesPerSec = 5; + Uint8List frameBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + int frameRenderTimeMs = 5; + int frameAvsyncType = 5; + int framePresentationMs = 5; + int frameAudioTrackNumber = 5; + int frameRtpTimestamp = 5; + AudioFrame frame = AudioFrame( type: frameType, samplesPerChannel: frameSamplesPerChannel, bytesPerSample: frameBytesPerSample, @@ -331,15 +329,14 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - const bool enabled = true; - const bool useTexture = true; - const ExternalVideoSourceType sourceType = - ExternalVideoSourceType.videoFrame; - const TCcMode encodedVideoOptionCcMode = TCcMode.ccEnabled; - const VideoCodecType encodedVideoOptionCodecType = + bool enabled = true; + bool useTexture = true; + ExternalVideoSourceType sourceType = ExternalVideoSourceType.videoFrame; + TCcMode encodedVideoOptionCcMode = TCcMode.ccEnabled; + VideoCodecType encodedVideoOptionCodecType = VideoCodecType.videoCodecNone; - const int encodedVideoOptionTargetBitrate = 10; - const SenderOptions encodedVideoOption = SenderOptions( + int encodedVideoOptionTargetBitrate = 5; + SenderOptions encodedVideoOption = SenderOptions( ccMode: encodedVideoOptionCcMode, codecType: encodedVideoOptionCodecType, targetBitrate: encodedVideoOptionTargetBitrate, @@ -385,11 +382,11 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - const bool enabled = true; - const int sampleRate = 10; - const int channels = 10; - const bool localPlayback = true; - const bool publish = true; + bool enabled = true; + int sampleRate = 5; + int channels = 5; + bool localPlayback = true; + bool publish = true; await mediaEngine.setExternalAudioSource( enabled: enabled, sampleRate: sampleRate, @@ -432,7 +429,7 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - const int trackId = 10; + int trackId = 5; await mediaEngine.destroyCustomAudioTrack( trackId, ); @@ -471,9 +468,9 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - const bool enabled = true; - const int sampleRate = 10; - const int channels = 10; + bool enabled = true; + int sampleRate = 5; + int channels = 5; await mediaEngine.setExternalAudioSink( enabled: enabled, sampleRate: sampleRate, @@ -514,8 +511,8 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - const int trackId = 10; - const bool enabled = true; + int trackId = 5; + bool enabled = true; await mediaEngine.enableCustomAudioLocalPlayback( trackId: trackId, enabled: enabled, @@ -555,26 +552,26 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - const VideoBufferType frameType = VideoBufferType.videoBufferRawData; - const VideoPixelFormat frameFormat = VideoPixelFormat.videoPixelDefault; - const EglContextType frameEglType = EglContextType.eglContext10; - Uint8List frameBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int frameStride = 10; - const int frameHeight = 10; - const int frameCropLeft = 10; - const int frameCropTop = 10; - const int frameCropRight = 10; - const int frameCropBottom = 10; - const int frameRotation = 10; - const int frameTimestamp = 10; - const int frameTextureId = 10; - const List frameMatrix = []; - Uint8List frameMetadataBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int frameMetadataSize = 10; - Uint8List frameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const bool frameFillAlphaBuffer = true; - const int frameTextureSliceIndex = 10; - final ExternalVideoFrame frame = ExternalVideoFrame( + VideoBufferType frameType = VideoBufferType.videoBufferRawData; + VideoPixelFormat frameFormat = VideoPixelFormat.videoPixelDefault; + EglContextType frameEglType = EglContextType.eglContext10; + Uint8List frameBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + int frameStride = 5; + int frameHeight = 5; + int frameCropLeft = 5; + int frameCropTop = 5; + int frameCropRight = 5; + int frameCropBottom = 5; + int frameRotation = 5; + int frameTimestamp = 5; + int frameTextureId = 5; + List frameMatrix = List.filled(5, 5.0); + Uint8List frameMetadataBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + int frameMetadataSize = 5; + Uint8List frameAlphaBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + bool frameFillAlphaBuffer = true; + int frameTextureSliceIndex = 5; + ExternalVideoFrame frame = ExternalVideoFrame( type: frameType, format: frameFormat, buffer: frameBuffer, @@ -595,7 +592,7 @@ void mediaEngineSmokeTestCases() { fillAlphaBuffer: frameFillAlphaBuffer, textureSliceIndex: frameTextureSliceIndex, ); - const int videoTrackId = 10; + int videoTrackId = 5; await mediaEngine.pushVideoFrame( frame: frame, videoTrackId: videoTrackId, @@ -634,26 +631,25 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - Uint8List imageBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int length = 10; - const VideoCodecType videoEncodedFrameInfoCodecType = + Uint8List imageBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + int length = 5; + VideoCodecType videoEncodedFrameInfoCodecType = VideoCodecType.videoCodecNone; - const VideoFrameType videoEncodedFrameInfoFrameType = + VideoFrameType videoEncodedFrameInfoFrameType = VideoFrameType.videoFrameTypeBlankFrame; - const VideoOrientation videoEncodedFrameInfoRotation = + VideoOrientation videoEncodedFrameInfoRotation = VideoOrientation.videoOrientation0; - const VideoStreamType videoEncodedFrameInfoStreamType = + VideoStreamType videoEncodedFrameInfoStreamType = VideoStreamType.videoStreamHigh; - const int videoEncodedFrameInfoUid = 10; - const int videoEncodedFrameInfoWidth = 10; - const int videoEncodedFrameInfoHeight = 10; - const int videoEncodedFrameInfoFramesPerSecond = 10; - const int videoEncodedFrameInfoTrackId = 10; - const int videoEncodedFrameInfoCaptureTimeMs = 10; - const int videoEncodedFrameInfoDecodeTimeMs = 10; - const int videoEncodedFrameInfoPresentationMs = 10; - const EncodedVideoFrameInfo videoEncodedFrameInfo = - EncodedVideoFrameInfo( + int videoEncodedFrameInfoUid = 5; + int videoEncodedFrameInfoWidth = 5; + int videoEncodedFrameInfoHeight = 5; + int videoEncodedFrameInfoFramesPerSecond = 5; + int videoEncodedFrameInfoTrackId = 5; + int videoEncodedFrameInfoCaptureTimeMs = 5; + int videoEncodedFrameInfoDecodeTimeMs = 5; + int videoEncodedFrameInfoPresentationMs = 5; + EncodedVideoFrameInfo videoEncodedFrameInfo = EncodedVideoFrameInfo( uid: videoEncodedFrameInfoUid, codecType: videoEncodedFrameInfoCodecType, width: videoEncodedFrameInfoWidth, @@ -667,7 +663,7 @@ void mediaEngineSmokeTestCases() { streamType: videoEncodedFrameInfoStreamType, presentationMs: videoEncodedFrameInfoPresentationMs, ); - const int videoTrackId = 10; + int videoTrackId = 5; await mediaEngine.pushEncodedVideoImage( imageBuffer: imageBuffer, length: length, @@ -744,7 +740,7 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - final AudioFrameObserver observer = AudioFrameObserver( + AudioFrameObserver observer = AudioFrameObserver( onRecordAudioFrame: (String channelId, AudioFrame audioFrame) {}, onPlaybackAudioFrame: (String channelId, AudioFrame audioFrame) {}, onMixedAudioFrame: (String channelId, AudioFrame audioFrame) {}, @@ -790,7 +786,7 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - final VideoFrameObserver observer = VideoFrameObserver( + VideoFrameObserver observer = VideoFrameObserver( onCaptureVideoFrame: (VideoSourceType sourceType, VideoFrame videoFrame) {}, onPreEncodeVideoFrame: @@ -839,7 +835,7 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - final VideoEncodedFrameObserver observer = VideoEncodedFrameObserver( + VideoEncodedFrameObserver observer = VideoEncodedFrameObserver( onEncodedVideoFrameReceived: (int uid, Uint8List imageBuffer, int length, EncodedVideoFrameInfo videoEncodedFrameInfo) {}, ); @@ -881,7 +877,7 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - final FaceInfoObserver observer = FaceInfoObserver( + FaceInfoObserver observer = FaceInfoObserver( onFaceInfo: (String outFaceInfo) {}, ); mediaEngine.unregisterFaceInfoObserver( diff --git a/test_shard/fake_test_app/integration_test/generated/mediaengine_videoencodedframeobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaengine_videoencodedframeobserver_testcases.generated.dart index c112a837e..047ed782a 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaengine_videoencodedframeobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaengine_videoencodedframeobserver_testcases.generated.dart @@ -41,27 +41,26 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const int uid = 10; - Uint8List imageBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int length = 10; - const VideoCodecType videoEncodedFrameInfoCodecType = + int uid = 5; + Uint8List imageBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + int length = 5; + VideoCodecType videoEncodedFrameInfoCodecType = VideoCodecType.videoCodecNone; - const VideoFrameType videoEncodedFrameInfoFrameType = + VideoFrameType videoEncodedFrameInfoFrameType = VideoFrameType.videoFrameTypeBlankFrame; - const VideoOrientation videoEncodedFrameInfoRotation = + VideoOrientation videoEncodedFrameInfoRotation = VideoOrientation.videoOrientation0; - const VideoStreamType videoEncodedFrameInfoStreamType = + VideoStreamType videoEncodedFrameInfoStreamType = VideoStreamType.videoStreamHigh; - const int videoEncodedFrameInfoUid = 10; - const int videoEncodedFrameInfoWidth = 10; - const int videoEncodedFrameInfoHeight = 10; - const int videoEncodedFrameInfoFramesPerSecond = 10; - const int videoEncodedFrameInfoTrackId = 10; - const int videoEncodedFrameInfoCaptureTimeMs = 10; - const int videoEncodedFrameInfoDecodeTimeMs = 10; - const int videoEncodedFrameInfoPresentationMs = 10; - const EncodedVideoFrameInfo videoEncodedFrameInfo = - EncodedVideoFrameInfo( + int videoEncodedFrameInfoUid = 5; + int videoEncodedFrameInfoWidth = 5; + int videoEncodedFrameInfoHeight = 5; + int videoEncodedFrameInfoFramesPerSecond = 5; + int videoEncodedFrameInfoTrackId = 5; + int videoEncodedFrameInfoCaptureTimeMs = 5; + int videoEncodedFrameInfoDecodeTimeMs = 5; + int videoEncodedFrameInfoPresentationMs = 5; + EncodedVideoFrameInfo videoEncodedFrameInfo = EncodedVideoFrameInfo( uid: videoEncodedFrameInfoUid, codecType: videoEncodedFrameInfoCodecType, width: videoEncodedFrameInfoWidth, diff --git a/test_shard/fake_test_app/integration_test/generated/mediaengine_videoframeobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaengine_videoframeobserver_testcases.generated.dart index 26726a41f..2de559fa9 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaengine_videoframeobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaengine_videoframeobserver_testcases.generated.dart @@ -41,30 +41,28 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const VideoSourceType sourceType = - VideoSourceType.videoSourceCameraPrimary; - const VideoPixelFormat videoFrameType = - VideoPixelFormat.videoPixelDefault; - const VideoFrameMetaInfo? videoFrameMetaInfo = null; - const int videoFrameWidth = 10; - const int videoFrameHeight = 10; - const int videoFrameYStride = 10; - const int videoFrameUStride = 10; - const int videoFrameVStride = 10; - Uint8List videoFrameYBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameUBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameVBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameRotation = 10; - const int videoFrameRenderTimeMs = 10; - const int videoFrameAvsyncType = 10; + VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary; + VideoPixelFormat videoFrameType = VideoPixelFormat.videoPixelDefault; + VideoFrameMetaInfo? videoFrameMetaInfo = null; + int videoFrameWidth = 5; + int videoFrameHeight = 5; + int videoFrameYStride = 5; + int videoFrameUStride = 5; + int videoFrameVStride = 5; + Uint8List videoFrameYBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + Uint8List videoFrameUBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + Uint8List videoFrameVBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + int videoFrameRotation = 5; + int videoFrameRenderTimeMs = 5; + int videoFrameAvsyncType = 5; Uint8List videoFrameMetadataBuffer = - Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameMetadataSize = 10; - const int videoFrameTextureId = 10; - const List videoFrameMatrix = []; - Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFramePixelBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - final VideoFrame videoFrame = VideoFrame( + Uint8List.fromList([1, 1, 1, 1, 1]); + int videoFrameMetadataSize = 5; + int videoFrameTextureId = 5; + List videoFrameMatrix = List.filled(5, 5.0); + Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + Uint8List videoFramePixelBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + VideoFrame videoFrame = VideoFrame( type: videoFrameType, width: videoFrameWidth, height: videoFrameHeight, @@ -149,30 +147,28 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const VideoSourceType sourceType = - VideoSourceType.videoSourceCameraPrimary; - const VideoPixelFormat videoFrameType = - VideoPixelFormat.videoPixelDefault; - const VideoFrameMetaInfo? videoFrameMetaInfo = null; - const int videoFrameWidth = 10; - const int videoFrameHeight = 10; - const int videoFrameYStride = 10; - const int videoFrameUStride = 10; - const int videoFrameVStride = 10; - Uint8List videoFrameYBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameUBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameVBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameRotation = 10; - const int videoFrameRenderTimeMs = 10; - const int videoFrameAvsyncType = 10; + VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary; + VideoPixelFormat videoFrameType = VideoPixelFormat.videoPixelDefault; + VideoFrameMetaInfo? videoFrameMetaInfo = null; + int videoFrameWidth = 5; + int videoFrameHeight = 5; + int videoFrameYStride = 5; + int videoFrameUStride = 5; + int videoFrameVStride = 5; + Uint8List videoFrameYBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + Uint8List videoFrameUBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + Uint8List videoFrameVBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + int videoFrameRotation = 5; + int videoFrameRenderTimeMs = 5; + int videoFrameAvsyncType = 5; Uint8List videoFrameMetadataBuffer = - Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameMetadataSize = 10; - const int videoFrameTextureId = 10; - const List videoFrameMatrix = []; - Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFramePixelBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - final VideoFrame videoFrame = VideoFrame( + Uint8List.fromList([1, 1, 1, 1, 1]); + int videoFrameMetadataSize = 5; + int videoFrameTextureId = 5; + List videoFrameMatrix = List.filled(5, 5.0); + Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + Uint8List videoFramePixelBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + VideoFrame videoFrame = VideoFrame( type: videoFrameType, width: videoFrameWidth, height: videoFrameHeight, @@ -256,28 +252,27 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const VideoPixelFormat videoFrameType = - VideoPixelFormat.videoPixelDefault; - const VideoFrameMetaInfo? videoFrameMetaInfo = null; - const int videoFrameWidth = 10; - const int videoFrameHeight = 10; - const int videoFrameYStride = 10; - const int videoFrameUStride = 10; - const int videoFrameVStride = 10; - Uint8List videoFrameYBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameUBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameVBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameRotation = 10; - const int videoFrameRenderTimeMs = 10; - const int videoFrameAvsyncType = 10; + VideoPixelFormat videoFrameType = VideoPixelFormat.videoPixelDefault; + VideoFrameMetaInfo? videoFrameMetaInfo = null; + int videoFrameWidth = 5; + int videoFrameHeight = 5; + int videoFrameYStride = 5; + int videoFrameUStride = 5; + int videoFrameVStride = 5; + Uint8List videoFrameYBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + Uint8List videoFrameUBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + Uint8List videoFrameVBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + int videoFrameRotation = 5; + int videoFrameRenderTimeMs = 5; + int videoFrameAvsyncType = 5; Uint8List videoFrameMetadataBuffer = - Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameMetadataSize = 10; - const int videoFrameTextureId = 10; - const List videoFrameMatrix = []; - Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFramePixelBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - final VideoFrame videoFrame = VideoFrame( + Uint8List.fromList([1, 1, 1, 1, 1]); + int videoFrameMetadataSize = 5; + int videoFrameTextureId = 5; + List videoFrameMatrix = List.filled(5, 5.0); + Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + Uint8List videoFramePixelBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + VideoFrame videoFrame = VideoFrame( type: videoFrameType, width: videoFrameWidth, height: videoFrameHeight, @@ -298,7 +293,7 @@ void generatedTestCases(ValueGetter irisTester) { pixelBuffer: videoFramePixelBuffer, metaInfo: videoFrameMetaInfo, ); - const int mediaPlayerId = 10; + int mediaPlayerId = 5; final eventJson = { 'videoFrame': videoFrame.toJson(), @@ -363,30 +358,29 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String channelId = "hello"; - const int remoteUid = 10; - const VideoPixelFormat videoFrameType = - VideoPixelFormat.videoPixelDefault; - const VideoFrameMetaInfo? videoFrameMetaInfo = null; - const int videoFrameWidth = 10; - const int videoFrameHeight = 10; - const int videoFrameYStride = 10; - const int videoFrameUStride = 10; - const int videoFrameVStride = 10; - Uint8List videoFrameYBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameUBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameVBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameRotation = 10; - const int videoFrameRenderTimeMs = 10; - const int videoFrameAvsyncType = 10; + String channelId = "hello"; + int remoteUid = 5; + VideoPixelFormat videoFrameType = VideoPixelFormat.videoPixelDefault; + VideoFrameMetaInfo? videoFrameMetaInfo = null; + int videoFrameWidth = 5; + int videoFrameHeight = 5; + int videoFrameYStride = 5; + int videoFrameUStride = 5; + int videoFrameVStride = 5; + Uint8List videoFrameYBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + Uint8List videoFrameUBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + Uint8List videoFrameVBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + int videoFrameRotation = 5; + int videoFrameRenderTimeMs = 5; + int videoFrameAvsyncType = 5; Uint8List videoFrameMetadataBuffer = - Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameMetadataSize = 10; - const int videoFrameTextureId = 10; - const List videoFrameMatrix = []; - Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFramePixelBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - final VideoFrame videoFrame = VideoFrame( + Uint8List.fromList([1, 1, 1, 1, 1]); + int videoFrameMetadataSize = 5; + int videoFrameTextureId = 5; + List videoFrameMatrix = List.filled(5, 5.0); + Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + Uint8List videoFramePixelBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + VideoFrame videoFrame = VideoFrame( type: videoFrameType, width: videoFrameWidth, height: videoFrameHeight, @@ -471,28 +465,27 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const VideoPixelFormat videoFrameType = - VideoPixelFormat.videoPixelDefault; - const VideoFrameMetaInfo? videoFrameMetaInfo = null; - const int videoFrameWidth = 10; - const int videoFrameHeight = 10; - const int videoFrameYStride = 10; - const int videoFrameUStride = 10; - const int videoFrameVStride = 10; - Uint8List videoFrameYBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameUBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameVBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameRotation = 10; - const int videoFrameRenderTimeMs = 10; - const int videoFrameAvsyncType = 10; + VideoPixelFormat videoFrameType = VideoPixelFormat.videoPixelDefault; + VideoFrameMetaInfo? videoFrameMetaInfo = null; + int videoFrameWidth = 5; + int videoFrameHeight = 5; + int videoFrameYStride = 5; + int videoFrameUStride = 5; + int videoFrameVStride = 5; + Uint8List videoFrameYBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + Uint8List videoFrameUBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + Uint8List videoFrameVBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + int videoFrameRotation = 5; + int videoFrameRenderTimeMs = 5; + int videoFrameAvsyncType = 5; Uint8List videoFrameMetadataBuffer = - Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameMetadataSize = 10; - const int videoFrameTextureId = 10; - const List videoFrameMatrix = []; - Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFramePixelBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - final VideoFrame videoFrame = VideoFrame( + Uint8List.fromList([1, 1, 1, 1, 1]); + int videoFrameMetadataSize = 5; + int videoFrameTextureId = 5; + List videoFrameMatrix = List.filled(5, 5.0); + Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + Uint8List videoFramePixelBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + VideoFrame videoFrame = VideoFrame( type: videoFrameType, width: videoFrameWidth, height: videoFrameHeight, diff --git a/test_shard/fake_test_app/integration_test/generated/mediaplayer_audiopcmframesink_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaplayer_audiopcmframesink_testcases.generated.dart index 15216a2f6..06ea82b10 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaplayer_audiopcmframesink_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaplayer_audiopcmframesink_testcases.generated.dart @@ -34,7 +34,7 @@ void generatedTestCases(ValueGetter irisTester) { }, ); - const RawAudioFrameOpModeType mode = + RawAudioFrameOpModeType mode = RawAudioFrameOpModeType.rawAudioFrameOpModeReadOnly; mediaPlayerController.registerAudioFrameObserver( @@ -46,14 +46,13 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const BytesPerSample frameBytesPerSample = - BytesPerSample.twoBytesPerSample; - const int frameCaptureTimestamp = 10; - const int frameSamplesPerChannel = 10; - const int frameSampleRateHz = 10; - const int frameNumChannels = 10; - const List frameData = []; - const AudioPcmFrame frame = AudioPcmFrame( + BytesPerSample frameBytesPerSample = BytesPerSample.twoBytesPerSample; + int frameCaptureTimestamp = 5; + int frameSamplesPerChannel = 5; + int frameSampleRateHz = 5; + int frameNumChannels = 5; + List frameData = List.filled(5, 5); + AudioPcmFrame frame = AudioPcmFrame( captureTimestamp: frameCaptureTimestamp, samplesPerChannel: frameSamplesPerChannel, sampleRateHz: frameSampleRateHz, diff --git a/test_shard/fake_test_app/integration_test/generated/mediaplayer_audiospectrumobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaplayer_audiospectrumobserver_testcases.generated.dart index 371bf28b1..aed2000ca 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaplayer_audiospectrumobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaplayer_audiospectrumobserver_testcases.generated.dart @@ -34,7 +34,7 @@ void generatedTestCases(ValueGetter irisTester) { }, ); - const int intervalInMS = 10; + int intervalInMS = 5; mediaPlayerController.registerMediaPlayerAudioSpectrumObserver( observer: theAudioSpectrumObserver, @@ -45,9 +45,9 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const List dataAudioSpectrumData = []; - const int dataDataLength = 10; - const AudioSpectrumData data = AudioSpectrumData( + List dataAudioSpectrumData = List.filled(5, 5.0); + int dataDataLength = 5; + AudioSpectrumData data = AudioSpectrumData( audioSpectrumData: dataAudioSpectrumData, dataLength: dataDataLength, ); @@ -107,7 +107,7 @@ void generatedTestCases(ValueGetter irisTester) { }, ); - const int intervalInMS = 10; + int intervalInMS = 5; mediaPlayerController.registerMediaPlayerAudioSpectrumObserver( observer: theAudioSpectrumObserver, @@ -118,8 +118,23 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const List spectrums = []; - const int spectrumNumber = 10; + final List spectrums = () { + List spectrumDataAudioSpectrumData = List.filled(5, 5.0); + int spectrumDataDataLength = 5; + AudioSpectrumData spectrumsItemSpectrumData = AudioSpectrumData( + audioSpectrumData: spectrumDataAudioSpectrumData, + dataLength: spectrumDataDataLength, + ); + int spectrumsItemUid = 5; + UserAudioSpectrumInfo spectrumsItem = UserAudioSpectrumInfo( + uid: spectrumsItemUid, + spectrumData: spectrumsItemSpectrumData, + ); + + return List.filled(5, spectrumsItem); + }(); + + int spectrumNumber = 5; final eventJson = { 'spectrums': spectrums, diff --git a/test_shard/fake_test_app/integration_test/generated/mediaplayer_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaplayer_fake_test.generated.dart index 19133f9b9..c1dc78243 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaplayer_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaplayer_fake_test.generated.dart @@ -66,8 +66,8 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const String url = "hello"; - const int startPos = 10; + String url = "hello"; + int startPos = 5; await mediaPlayerController.open( url: url, startPos: startPos, @@ -108,15 +108,15 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const String sourceUrl = "hello"; - const String sourceUri = "hello"; - const int sourceStartPos = 10; - const bool sourceAutoPlay = true; - const bool sourceEnableCache = true; - const bool sourceEnableMultiAudioTrack = true; - const bool sourceIsAgoraSource = true; - const bool sourceIsLiveSource = true; - const MediaSource source = MediaSource( + String sourceUrl = "hello"; + String sourceUri = "hello"; + int sourceStartPos = 5; + bool sourceAutoPlay = true; + bool sourceEnableCache = true; + bool sourceEnableMultiAudioTrack = true; + bool sourceIsAgoraSource = true; + bool sourceIsLiveSource = true; + MediaSource source = MediaSource( url: sourceUrl, uri: sourceUri, startPos: sourceStartPos, @@ -314,7 +314,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const int newPos = 10; + int newPos = 5; await mediaPlayerController.seek( newPos, ); @@ -354,7 +354,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const int pitch = 10; + int pitch = 5; await mediaPlayerController.setAudioPitch( pitch, ); @@ -505,7 +505,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const int index = 10; + int index = 5; await mediaPlayerController.getStreamInfo( index, ); @@ -545,7 +545,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const int loopCount = 10; + int loopCount = 5; await mediaPlayerController.setLoopCount( loopCount, ); @@ -585,7 +585,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const int speed = 10; + int speed = 5; await mediaPlayerController.setPlaybackSpeed( speed, ); @@ -625,7 +625,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const int index = 10; + int index = 5; await mediaPlayerController.selectAudioTrack( index, ); @@ -665,8 +665,8 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const int playoutTrackIndex = 10; - const int publishTrackIndex = 10; + int playoutTrackIndex = 5; + int publishTrackIndex = 5; await mediaPlayerController.selectMultiAudioTrack( playoutTrackIndex: playoutTrackIndex, publishTrackIndex: publishTrackIndex, @@ -708,7 +708,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const String filename = "hello"; + String filename = "hello"; await mediaPlayerController.takeScreenshot( filename, ); @@ -748,7 +748,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const int index = 10; + int index = 5; await mediaPlayerController.selectInternalSubtitle( index, ); @@ -789,7 +789,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const String url = "hello"; + String url = "hello"; await mediaPlayerController.setExternalSubtitle( url, ); @@ -867,7 +867,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const bool muted = true; + bool muted = true; await mediaPlayerController.mute( muted, ); @@ -944,7 +944,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const int volume = 10; + int volume = 5; await mediaPlayerController.adjustPlayoutVolume( volume, ); @@ -1022,7 +1022,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const int volume = 10; + int volume = 5; await mediaPlayerController.adjustPublishSignalVolume( volume, ); @@ -1101,7 +1101,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const int view = 10; + int view = 5; await mediaPlayerController.setView( view, ); @@ -1141,7 +1141,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const RenderModeType renderMode = RenderModeType.renderModeHidden; + RenderModeType renderMode = RenderModeType.renderModeHidden; await mediaPlayerController.setRenderMode( renderMode, ); @@ -1181,7 +1181,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - final MediaPlayerSourceObserver observer = MediaPlayerSourceObserver( + MediaPlayerSourceObserver observer = MediaPlayerSourceObserver( onPlayerSourceStateChanged: (MediaPlayerState state, MediaPlayerReason reason) {}, onPositionChanged: (int positionMs, int timestampMs) {}, @@ -1238,7 +1238,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - final MediaPlayerSourceObserver observer = MediaPlayerSourceObserver( + MediaPlayerSourceObserver observer = MediaPlayerSourceObserver( onPlayerSourceStateChanged: (MediaPlayerState state, MediaPlayerReason reason) {}, onPositionChanged: (int positionMs, int timestampMs) {}, @@ -1295,10 +1295,10 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - final AudioPcmFrameSink observer = AudioPcmFrameSink( + AudioPcmFrameSink observer = AudioPcmFrameSink( onFrame: (AudioPcmFrame frame) {}, ); - const RawAudioFrameOpModeType mode = + RawAudioFrameOpModeType mode = RawAudioFrameOpModeType.rawAudioFrameOpModeReadOnly; mediaPlayerController.registerAudioFrameObserver( observer: observer, @@ -1341,7 +1341,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - final AudioPcmFrameSink observer = AudioPcmFrameSink( + AudioPcmFrameSink observer = AudioPcmFrameSink( onFrame: (AudioPcmFrame frame) {}, ); mediaPlayerController.unregisterAudioFrameObserver( @@ -1384,8 +1384,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - final MediaPlayerVideoFrameObserver observer = - MediaPlayerVideoFrameObserver( + MediaPlayerVideoFrameObserver observer = MediaPlayerVideoFrameObserver( onFrame: (VideoFrame frame) {}, ); mediaPlayerController.registerVideoFrameObserver( @@ -1428,8 +1427,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - final MediaPlayerVideoFrameObserver observer = - MediaPlayerVideoFrameObserver( + MediaPlayerVideoFrameObserver observer = MediaPlayerVideoFrameObserver( onFrame: (VideoFrame frame) {}, ); mediaPlayerController.unregisterVideoFrameObserver( @@ -1472,11 +1470,11 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - final AudioSpectrumObserver observer = AudioSpectrumObserver( + AudioSpectrumObserver observer = AudioSpectrumObserver( onLocalAudioSpectrum: (AudioSpectrumData data) {}, onRemoteAudioSpectrum: (List spectrums, int spectrumNumber) {}, ); - const int intervalInMS = 10; + int intervalInMS = 5; mediaPlayerController.registerMediaPlayerAudioSpectrumObserver( observer: observer, intervalInMS: intervalInMS, @@ -1518,7 +1516,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - final AudioSpectrumObserver observer = AudioSpectrumObserver( + AudioSpectrumObserver observer = AudioSpectrumObserver( onLocalAudioSpectrum: (AudioSpectrumData data) {}, onRemoteAudioSpectrum: (List spectrums, int spectrumNumber) {}, ); @@ -1562,7 +1560,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const AudioDualMonoMode mode = AudioDualMonoMode.audioDualMonoStereo; + AudioDualMonoMode mode = AudioDualMonoMode.audioDualMonoStereo; await mediaPlayerController.setAudioDualMonoMode( mode, ); @@ -1678,8 +1676,8 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const String src = "hello"; - const int startPos = 10; + String src = "hello"; + int startPos = 5; await mediaPlayerController.openWithAgoraCDNSrc( src: src, startPos: startPos, @@ -1759,7 +1757,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const int index = 10; + int index = 5; await mediaPlayerController.switchAgoraCDNLineByIndex( index, ); @@ -1838,7 +1836,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const bool enable = true; + bool enable = true; await mediaPlayerController.enableAutoSwitchAgoraCDN( enable, ); @@ -1879,8 +1877,8 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const String token = "hello"; - const int ts = 10; + String token = "hello"; + int ts = 5; await mediaPlayerController.renewAgoraCDNSrcToken( token: token, ts: ts, @@ -1922,8 +1920,8 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const String src = "hello"; - const bool syncPts = true; + String src = "hello"; + bool syncPts = true; await mediaPlayerController.switchAgoraCDNSrc( src: src, syncPts: syncPts, @@ -1964,8 +1962,8 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const String src = "hello"; - const bool syncPts = true; + String src = "hello"; + bool syncPts = true; await mediaPlayerController.switchSrc( src: src, syncPts: syncPts, @@ -2006,8 +2004,8 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const String src = "hello"; - const int startPos = 10; + String src = "hello"; + int startPos = 5; await mediaPlayerController.preloadSrc( src: src, startPos: startPos, @@ -2048,7 +2046,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const String src = "hello"; + String src = "hello"; await mediaPlayerController.playPreloadedSrc( src, ); @@ -2088,7 +2086,7 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const String src = "hello"; + String src = "hello"; await mediaPlayerController.unloadSrc( src, ); @@ -2128,15 +2126,15 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const double paramsSpeakerAzimuth = 10.0; - const double paramsSpeakerElevation = 10.0; - const double paramsSpeakerDistance = 10.0; - const int paramsSpeakerOrientation = 10; - const bool paramsEnableBlur = true; - const bool paramsEnableAirAbsorb = true; - const double paramsSpeakerAttenuation = 10.0; - const bool paramsEnableDoppler = true; - const SpatialAudioParams params = SpatialAudioParams( + double paramsSpeakerAzimuth = 5.0; + double paramsSpeakerElevation = 5.0; + double paramsSpeakerDistance = 5.0; + int paramsSpeakerOrientation = 5; + bool paramsEnableBlur = true; + bool paramsEnableAirAbsorb = true; + double paramsSpeakerAttenuation = 5.0; + bool paramsEnableDoppler = true; + SpatialAudioParams params = SpatialAudioParams( speakerAzimuth: paramsSpeakerAzimuth, speakerElevation: paramsSpeakerElevation, speakerDistance: paramsSpeakerDistance, @@ -2186,8 +2184,8 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const double pan = 10.0; - const double gain = 10.0; + double pan = 5.0; + double gain = 5.0; await mediaPlayerController.setSoundPositionParams( pan: pan, gain: gain, @@ -2229,8 +2227,8 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const String key = "hello"; - const int value = 10; + String key = "hello"; + int value = 5; await mediaPlayerController.setPlayerOptionInInt( key: key, value: value, @@ -2272,8 +2270,8 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const String key = "hello"; - const String value = "hello"; + String key = "hello"; + String value = "hello"; await mediaPlayerController.setPlayerOptionInString( key: key, value: value, diff --git a/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayersourceobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayersourceobserver_testcases.generated.dart index 05cf3b944..dc1fd2029 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayersourceobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayersourceobserver_testcases.generated.dart @@ -43,8 +43,8 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const MediaPlayerState state = MediaPlayerState.playerStateIdle; - const MediaPlayerReason reason = MediaPlayerReason.playerReasonNone; + MediaPlayerState state = MediaPlayerState.playerStateIdle; + MediaPlayerReason reason = MediaPlayerReason.playerReasonNone; final eventJson = { 'state': state.value(), @@ -111,8 +111,8 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const int positionMs = 10; - const int timestampMs = 10; + int positionMs = 5; + int timestampMs = 5; final eventJson = { 'positionMs': positionMs, @@ -180,10 +180,9 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const MediaPlayerEvent eventCode = - MediaPlayerEvent.playerEventSeekBegin; - const int elapsedTime = 10; - const String message = "hello"; + MediaPlayerEvent eventCode = MediaPlayerEvent.playerEventSeekBegin; + int elapsedTime = 5; + String message = "hello"; final eventJson = { 'eventCode': eventCode.value(), @@ -250,8 +249,8 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - Uint8List data = Uint8List.fromList([1, 2, 3, 4, 5]); - const int length = 10; + Uint8List data = Uint8List.fromList([1, 1, 1, 1, 1]); + int length = 5; final eventJson = { 'data': data.toList(), @@ -317,7 +316,7 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const int playCachedBuffer = 10; + int playCachedBuffer = 5; final eventJson = { 'playCachedBuffer': playCachedBuffer, @@ -383,9 +382,8 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String src = "hello"; - const PlayerPreloadEvent event = - PlayerPreloadEvent.playerPreloadEventBegin; + String src = "hello"; + PlayerPreloadEvent event = PlayerPreloadEvent.playerPreloadEventBegin; final eventJson = { 'src': src, @@ -574,15 +572,15 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const int fromBitrateInKbps = 10; - const String fromName = "hello"; - const SrcInfo from = SrcInfo( + int fromBitrateInKbps = 5; + String fromName = "hello"; + SrcInfo from = SrcInfo( bitrateInKbps: fromBitrateInKbps, name: fromName, ); - const int toBitrateInKbps = 10; - const String toName = "hello"; - const SrcInfo to = SrcInfo( + int toBitrateInKbps = 5; + String toName = "hello"; + SrcInfo to = SrcInfo( bitrateInKbps: toBitrateInKbps, name: toName, ); @@ -652,14 +650,14 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String infoInternalPlayerUuid = "hello"; - const String infoDeviceId = "hello"; - const int infoVideoHeight = 10; - const int infoVideoWidth = 10; - const int infoAudioSampleRate = 10; - const int infoAudioChannels = 10; - const int infoAudioBitsPerSample = 10; - const PlayerUpdatedInfo info = PlayerUpdatedInfo( + String infoInternalPlayerUuid = "hello"; + String infoDeviceId = "hello"; + int infoVideoHeight = 5; + int infoVideoWidth = 5; + int infoAudioSampleRate = 5; + int infoAudioChannels = 5; + int infoAudioBitsPerSample = 5; + PlayerUpdatedInfo info = PlayerUpdatedInfo( internalPlayerUuid: infoInternalPlayerUuid, deviceId: infoDeviceId, videoHeight: infoVideoHeight, @@ -733,10 +731,10 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const int statsFileSize = 10; - const int statsCacheSize = 10; - const int statsDownloadSize = 10; - const CacheStatistics stats = CacheStatistics( + int statsFileSize = 5; + int statsCacheSize = 5; + int statsDownloadSize = 5; + CacheStatistics stats = CacheStatistics( fileSize: statsFileSize, cacheSize: statsCacheSize, downloadSize: statsDownloadSize, @@ -806,11 +804,11 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const int statsVideoFps = 10; - const int statsVideoBitrateInKbps = 10; - const int statsAudioBitrateInKbps = 10; - const int statsTotalBitrateInKbps = 10; - const PlayerPlaybackStats stats = PlayerPlaybackStats( + int statsVideoFps = 5; + int statsVideoBitrateInKbps = 5; + int statsAudioBitrateInKbps = 5; + int statsTotalBitrateInKbps = 5; + PlayerPlaybackStats stats = PlayerPlaybackStats( videoFps: statsVideoFps, videoBitrateInKbps: statsVideoBitrateInKbps, audioBitrateInKbps: statsAudioBitrateInKbps, @@ -881,7 +879,7 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const int volume = 10; + int volume = 5; final eventJson = { 'volume': volume, diff --git a/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayervideoframeobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayervideoframeobserver_testcases.generated.dart index 448db927f..ad45b529b 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayervideoframeobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayervideoframeobserver_testcases.generated.dart @@ -42,26 +42,26 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const VideoPixelFormat frameType = VideoPixelFormat.videoPixelDefault; - const VideoFrameMetaInfo? frameMetaInfo = null; - const int frameWidth = 10; - const int frameHeight = 10; - const int frameYStride = 10; - const int frameUStride = 10; - const int frameVStride = 10; - Uint8List frameYBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List frameUBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List frameVBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int frameRotation = 10; - const int frameRenderTimeMs = 10; - const int frameAvsyncType = 10; - Uint8List frameMetadataBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int frameMetadataSize = 10; - const int frameTextureId = 10; - const List frameMatrix = []; - Uint8List frameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List framePixelBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - final VideoFrame frame = VideoFrame( + VideoPixelFormat frameType = VideoPixelFormat.videoPixelDefault; + VideoFrameMetaInfo? frameMetaInfo = null; + int frameWidth = 5; + int frameHeight = 5; + int frameYStride = 5; + int frameUStride = 5; + int frameVStride = 5; + Uint8List frameYBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + Uint8List frameUBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + Uint8List frameVBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + int frameRotation = 5; + int frameRenderTimeMs = 5; + int frameAvsyncType = 5; + Uint8List frameMetadataBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + int frameMetadataSize = 5; + int frameTextureId = 5; + List frameMatrix = List.filled(5, 5.0); + Uint8List frameAlphaBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + Uint8List framePixelBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + VideoFrame frame = VideoFrame( type: frameType, width: frameWidth, height: frameHeight, diff --git a/test_shard/fake_test_app/integration_test/generated/mediarecorder_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediarecorder_fake_test.generated.dart index cb46afd71..2838e5898 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediarecorder_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediarecorder_fake_test.generated.dart @@ -27,7 +27,7 @@ void mediaRecorderSmokeTestCases() { RecorderStreamInfo(channelId: 'hello', uid: 0)))!; try { - final MediaRecorderObserver callback = MediaRecorderObserver( + MediaRecorderObserver callback = MediaRecorderObserver( onRecorderStateChanged: (String channelId, int uid, RecorderState state, RecorderReasonCode reason) {}, onRecorderInfoUpdated: @@ -72,14 +72,14 @@ void mediaRecorderSmokeTestCases() { RecorderStreamInfo(channelId: 'hello', uid: 0)))!; try { - const MediaRecorderContainerFormat configContainerFormat = + MediaRecorderContainerFormat configContainerFormat = MediaRecorderContainerFormat.formatMp4; - const MediaRecorderStreamType configStreamType = + MediaRecorderStreamType configStreamType = MediaRecorderStreamType.streamTypeAudio; - const String configStoragePath = "hello"; - const int configMaxDurationMs = 10; - const int configRecorderInfoUpdateInterval = 10; - const MediaRecorderConfiguration config = MediaRecorderConfiguration( + String configStoragePath = "hello"; + int configMaxDurationMs = 5; + int configRecorderInfoUpdateInterval = 5; + MediaRecorderConfiguration config = MediaRecorderConfiguration( storagePath: configStoragePath, containerFormat: configContainerFormat, streamType: configStreamType, diff --git a/test_shard/fake_test_app/integration_test/generated/mediarecorder_mediarecorderobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediarecorder_mediarecorderobserver_testcases.generated.dart index 5848892b2..28965313a 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediarecorder_mediarecorderobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediarecorder_mediarecorderobserver_testcases.generated.dart @@ -43,10 +43,10 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String channelId = "hello"; - const int uid = 10; - const RecorderState state = RecorderState.recorderStateError; - const RecorderReasonCode reason = RecorderReasonCode.recorderReasonNone; + String channelId = "hello"; + int uid = 5; + RecorderState state = RecorderState.recorderStateError; + RecorderReasonCode reason = RecorderReasonCode.recorderReasonNone; final eventJson = { 'channelId': channelId, @@ -112,12 +112,12 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String channelId = "hello"; - const int uid = 10; - const String infoFileName = "hello"; - const int infoDurationMs = 10; - const int infoFileSize = 10; - const RecorderInfo info = RecorderInfo( + String channelId = "hello"; + int uid = 5; + String infoFileName = "hello"; + int infoDurationMs = 5; + int infoFileSize = 5; + RecorderInfo info = RecorderInfo( fileName: infoFileName, durationMs: infoDurationMs, fileSize: infoFileSize, diff --git a/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_fake_test.generated.dart index 5d170e6e8..25cddc315 100644 --- a/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_fake_test.generated.dart @@ -26,12 +26,12 @@ void musicContentCenterSmokeTestCases() { final musicContentCenter = rtcEngine.getMusicContentCenter(); try { - const String configurationAppId = "hello"; - const String configurationToken = "hello"; - const int configurationMccUid = 10; - const int configurationMaxCacheSize = 10; - const String configurationMccDomain = "hello"; - const MusicContentCenterConfiguration configuration = + String configurationAppId = "hello"; + String configurationToken = "hello"; + int configurationMccUid = 5; + int configurationMaxCacheSize = 5; + String configurationMccDomain = "hello"; + MusicContentCenterConfiguration configuration = MusicContentCenterConfiguration( appId: configurationAppId, token: configurationToken, @@ -76,7 +76,7 @@ void musicContentCenterSmokeTestCases() { final musicContentCenter = rtcEngine.getMusicContentCenter(); try { - const String token = "hello"; + String token = "hello"; await musicContentCenter.renewToken( token, ); @@ -149,7 +149,7 @@ void musicContentCenterSmokeTestCases() { final musicContentCenter = rtcEngine.getMusicContentCenter(); try { - final MusicContentCenterEventHandler eventHandler = + MusicContentCenterEventHandler eventHandler = MusicContentCenterEventHandler( onMusicChartsResult: (String requestId, List result, MusicContentCenterStateReason reason) {}, @@ -276,10 +276,10 @@ void musicContentCenterSmokeTestCases() { final musicContentCenter = rtcEngine.getMusicContentCenter(); try { - const int musicChartId = 10; - const int page = 10; - const int pageSize = 10; - const String jsonOption = "hello"; + int musicChartId = 5; + int page = 5; + int pageSize = 5; + String jsonOption = "hello"; await musicContentCenter.getMusicCollectionByMusicChartId( musicChartId: musicChartId, page: page, @@ -321,10 +321,10 @@ void musicContentCenterSmokeTestCases() { final musicContentCenter = rtcEngine.getMusicContentCenter(); try { - const String keyWord = "hello"; - const int page = 10; - const int pageSize = 10; - const String jsonOption = "hello"; + String keyWord = "hello"; + int page = 5; + int pageSize = 5; + String jsonOption = "hello"; await musicContentCenter.searchMusic( keyWord: keyWord, page: page, @@ -365,7 +365,7 @@ void musicContentCenterSmokeTestCases() { final musicContentCenter = rtcEngine.getMusicContentCenter(); try { - const int songCode = 10; + int songCode = 5; await musicContentCenter.preload( songCode, ); @@ -403,7 +403,7 @@ void musicContentCenterSmokeTestCases() { final musicContentCenter = rtcEngine.getMusicContentCenter(); try { - const int songCode = 10; + int songCode = 5; await musicContentCenter.removeCache( songCode, ); @@ -441,7 +441,7 @@ void musicContentCenterSmokeTestCases() { final musicContentCenter = rtcEngine.getMusicContentCenter(); try { - const int cacheInfoSize = 10; + int cacheInfoSize = 5; await musicContentCenter.getCaches( cacheInfoSize, ); @@ -479,7 +479,7 @@ void musicContentCenterSmokeTestCases() { final musicContentCenter = rtcEngine.getMusicContentCenter(); try { - const int songCode = 10; + int songCode = 5; await musicContentCenter.isPreloaded( songCode, ); @@ -517,8 +517,8 @@ void musicContentCenterSmokeTestCases() { final musicContentCenter = rtcEngine.getMusicContentCenter(); try { - const int songCode = 10; - const int lyricType = 10; + int songCode = 5; + int lyricType = 5; await musicContentCenter.getLyric( songCode: songCode, lyricType: lyricType, @@ -557,7 +557,7 @@ void musicContentCenterSmokeTestCases() { final musicContentCenter = rtcEngine.getMusicContentCenter(); try { - const int songCode = 10; + int songCode = 5; await musicContentCenter.getSongSimpleInfo( songCode, ); @@ -596,8 +596,8 @@ void musicContentCenterSmokeTestCases() { final musicContentCenter = rtcEngine.getMusicContentCenter(); try { - const int songCode = 10; - const String jsonOption = "hello"; + int songCode = 5; + String jsonOption = "hello"; await musicContentCenter.getInternalSongCode( songCode: songCode, jsonOption: jsonOption, diff --git a/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_musiccontentcentereventhandler_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_musiccontentcentereventhandler_testcases.generated.dart index 91a10ae02..b008c1614 100644 --- a/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_musiccontentcentereventhandler_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_musiccontentcentereventhandler_testcases.generated.dart @@ -45,9 +45,19 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String requestId = "hello"; - const List result = []; - const MusicContentCenterStateReason reason = + String requestId = "hello"; + final List result = () { + String resultItemChartName = "hello"; + int resultItemId = 5; + MusicChartInfo resultItem = MusicChartInfo( + chartName: resultItemChartName, + id: resultItemId, + ); + + return List.filled(5, resultItem); + }(); + + MusicContentCenterStateReason reason = MusicContentCenterStateReason.kMusicContentCenterReasonOk; final eventJson = { @@ -118,9 +128,9 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String requestId = "hello"; - const MusicCollection? result = null; - const MusicContentCenterStateReason reason = + String requestId = "hello"; + MusicCollection? result = null; + MusicContentCenterStateReason reason = MusicContentCenterStateReason.kMusicContentCenterReasonOk; final eventJson = { @@ -190,10 +200,10 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String requestId = "hello"; - const int songCode = 10; - const String lyricUrl = "hello"; - const MusicContentCenterStateReason reason = + String requestId = "hello"; + int songCode = 5; + String lyricUrl = "hello"; + MusicContentCenterStateReason reason = MusicContentCenterStateReason.kMusicContentCenterReasonOk; final eventJson = { @@ -265,10 +275,10 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String requestId = "hello"; - const int songCode = 10; - const String simpleInfo = "hello"; - const MusicContentCenterStateReason reason = + String requestId = "hello"; + int songCode = 5; + String simpleInfo = "hello"; + MusicContentCenterStateReason reason = MusicContentCenterStateReason.kMusicContentCenterReasonOk; final eventJson = { @@ -344,12 +354,12 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String requestId = "hello"; - const int songCode = 10; - const int percent = 10; - const String lyricUrl = "hello"; - const PreloadState state = PreloadState.kPreloadStateCompleted; - const MusicContentCenterStateReason reason = + String requestId = "hello"; + int songCode = 5; + int percent = 5; + String lyricUrl = "hello"; + PreloadState state = PreloadState.kPreloadStateCompleted; + MusicContentCenterStateReason reason = MusicContentCenterStateReason.kMusicContentCenterReasonOk; final eventJson = { diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengine_audioencodedframeobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengine_audioencodedframeobserver_testcases.generated.dart index 5ac1eef56..69b11fc41 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengine_audioencodedframeobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengine_audioencodedframeobserver_testcases.generated.dart @@ -32,13 +32,12 @@ void generatedTestCases(ValueGetter irisTester) { }, ); - const AudioEncodedFrameObserverPosition configPostionType = + AudioEncodedFrameObserverPosition configPostionType = AudioEncodedFrameObserverPosition .audioEncodedFrameObserverPositionRecord; - const AudioEncodingType configEncodingType = + AudioEncodingType configEncodingType = AudioEncodingType.audioEncodingTypeAac16000Low; - const AudioEncodedFrameObserverConfig config = - AudioEncodedFrameObserverConfig( + AudioEncodedFrameObserverConfig config = AudioEncodedFrameObserverConfig( postionType: configPostionType, encodingType: configEncodingType, ); @@ -52,24 +51,23 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - Uint8List frameBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int length = 10; - const AudioCodecType audioEncodedFrameInfoCodec = + Uint8List frameBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + int length = 5; + AudioCodecType audioEncodedFrameInfoCodec = AudioCodecType.audioCodecOpus; - const bool advancedSettingsSpeech = true; - const bool advancedSettingsSendEvenIfEmpty = true; - const EncodedAudioFrameAdvancedSettings + bool advancedSettingsSpeech = true; + bool advancedSettingsSendEvenIfEmpty = true; + EncodedAudioFrameAdvancedSettings audioEncodedFrameInfoAdvancedSettings = EncodedAudioFrameAdvancedSettings( speech: advancedSettingsSpeech, sendEvenIfEmpty: advancedSettingsSendEvenIfEmpty, ); - const int audioEncodedFrameInfoSampleRateHz = 10; - const int audioEncodedFrameInfoSamplesPerChannel = 10; - const int audioEncodedFrameInfoNumberOfChannels = 10; - const int audioEncodedFrameInfoCaptureTimeMs = 10; - const EncodedAudioFrameInfo audioEncodedFrameInfo = - EncodedAudioFrameInfo( + int audioEncodedFrameInfoSampleRateHz = 5; + int audioEncodedFrameInfoSamplesPerChannel = 5; + int audioEncodedFrameInfoNumberOfChannels = 5; + int audioEncodedFrameInfoCaptureTimeMs = 5; + EncodedAudioFrameInfo audioEncodedFrameInfo = EncodedAudioFrameInfo( codec: audioEncodedFrameInfoCodec, sampleRateHz: audioEncodedFrameInfoSampleRateHz, samplesPerChannel: audioEncodedFrameInfoSamplesPerChannel, @@ -134,13 +132,12 @@ void generatedTestCases(ValueGetter irisTester) { }, ); - const AudioEncodedFrameObserverPosition configPostionType = + AudioEncodedFrameObserverPosition configPostionType = AudioEncodedFrameObserverPosition .audioEncodedFrameObserverPositionRecord; - const AudioEncodingType configEncodingType = + AudioEncodingType configEncodingType = AudioEncodingType.audioEncodingTypeAac16000Low; - const AudioEncodedFrameObserverConfig config = - AudioEncodedFrameObserverConfig( + AudioEncodedFrameObserverConfig config = AudioEncodedFrameObserverConfig( postionType: configPostionType, encodingType: configEncodingType, ); @@ -154,24 +151,23 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - Uint8List frameBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int length = 10; - const AudioCodecType audioEncodedFrameInfoCodec = + Uint8List frameBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + int length = 5; + AudioCodecType audioEncodedFrameInfoCodec = AudioCodecType.audioCodecOpus; - const bool advancedSettingsSpeech = true; - const bool advancedSettingsSendEvenIfEmpty = true; - const EncodedAudioFrameAdvancedSettings + bool advancedSettingsSpeech = true; + bool advancedSettingsSendEvenIfEmpty = true; + EncodedAudioFrameAdvancedSettings audioEncodedFrameInfoAdvancedSettings = EncodedAudioFrameAdvancedSettings( speech: advancedSettingsSpeech, sendEvenIfEmpty: advancedSettingsSendEvenIfEmpty, ); - const int audioEncodedFrameInfoSampleRateHz = 10; - const int audioEncodedFrameInfoSamplesPerChannel = 10; - const int audioEncodedFrameInfoNumberOfChannels = 10; - const int audioEncodedFrameInfoCaptureTimeMs = 10; - const EncodedAudioFrameInfo audioEncodedFrameInfo = - EncodedAudioFrameInfo( + int audioEncodedFrameInfoSampleRateHz = 5; + int audioEncodedFrameInfoSamplesPerChannel = 5; + int audioEncodedFrameInfoNumberOfChannels = 5; + int audioEncodedFrameInfoCaptureTimeMs = 5; + EncodedAudioFrameInfo audioEncodedFrameInfo = EncodedAudioFrameInfo( codec: audioEncodedFrameInfoCodec, sampleRateHz: audioEncodedFrameInfoSampleRateHz, samplesPerChannel: audioEncodedFrameInfoSamplesPerChannel, @@ -236,13 +232,12 @@ void generatedTestCases(ValueGetter irisTester) { }, ); - const AudioEncodedFrameObserverPosition configPostionType = + AudioEncodedFrameObserverPosition configPostionType = AudioEncodedFrameObserverPosition .audioEncodedFrameObserverPositionRecord; - const AudioEncodingType configEncodingType = + AudioEncodingType configEncodingType = AudioEncodingType.audioEncodingTypeAac16000Low; - const AudioEncodedFrameObserverConfig config = - AudioEncodedFrameObserverConfig( + AudioEncodedFrameObserverConfig config = AudioEncodedFrameObserverConfig( postionType: configPostionType, encodingType: configEncodingType, ); @@ -256,24 +251,23 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - Uint8List frameBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int length = 10; - const AudioCodecType audioEncodedFrameInfoCodec = + Uint8List frameBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + int length = 5; + AudioCodecType audioEncodedFrameInfoCodec = AudioCodecType.audioCodecOpus; - const bool advancedSettingsSpeech = true; - const bool advancedSettingsSendEvenIfEmpty = true; - const EncodedAudioFrameAdvancedSettings + bool advancedSettingsSpeech = true; + bool advancedSettingsSendEvenIfEmpty = true; + EncodedAudioFrameAdvancedSettings audioEncodedFrameInfoAdvancedSettings = EncodedAudioFrameAdvancedSettings( speech: advancedSettingsSpeech, sendEvenIfEmpty: advancedSettingsSendEvenIfEmpty, ); - const int audioEncodedFrameInfoSampleRateHz = 10; - const int audioEncodedFrameInfoSamplesPerChannel = 10; - const int audioEncodedFrameInfoNumberOfChannels = 10; - const int audioEncodedFrameInfoCaptureTimeMs = 10; - const EncodedAudioFrameInfo audioEncodedFrameInfo = - EncodedAudioFrameInfo( + int audioEncodedFrameInfoSampleRateHz = 5; + int audioEncodedFrameInfoSamplesPerChannel = 5; + int audioEncodedFrameInfoNumberOfChannels = 5; + int audioEncodedFrameInfoCaptureTimeMs = 5; + EncodedAudioFrameInfo audioEncodedFrameInfo = EncodedAudioFrameInfo( codec: audioEncodedFrameInfoCodec, sampleRateHz: audioEncodedFrameInfoSampleRateHz, samplesPerChannel: audioEncodedFrameInfoSamplesPerChannel, diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengine_audiospectrumobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengine_audiospectrumobserver_testcases.generated.dart index 967f5789e..5cf389721 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengine_audiospectrumobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengine_audiospectrumobserver_testcases.generated.dart @@ -39,9 +39,9 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const List dataAudioSpectrumData = []; - const int dataDataLength = 10; - const AudioSpectrumData data = AudioSpectrumData( + List dataAudioSpectrumData = List.filled(5, 5.0); + int dataDataLength = 5; + AudioSpectrumData data = AudioSpectrumData( audioSpectrumData: dataAudioSpectrumData, dataLength: dataDataLength, ); @@ -106,8 +106,23 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const List spectrums = []; - const int spectrumNumber = 10; + final List spectrums = () { + List spectrumDataAudioSpectrumData = List.filled(5, 5.0); + int spectrumDataDataLength = 5; + AudioSpectrumData spectrumsItemSpectrumData = AudioSpectrumData( + audioSpectrumData: spectrumDataAudioSpectrumData, + dataLength: spectrumDataDataLength, + ); + int spectrumsItemUid = 5; + UserAudioSpectrumInfo spectrumsItem = UserAudioSpectrumInfo( + uid: spectrumsItemUid, + spectrumData: spectrumsItemSpectrumData, + ); + + return List.filled(5, spectrumsItem); + }(); + + int spectrumNumber = 5; final eventJson = { 'spectrums': spectrums, diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengine_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengine_fake_test.generated.dart index 5bf7f0b95..a1e50c101 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengine_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengine_fake_test.generated.dart @@ -25,27 +25,26 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const ChannelProfileType contextChannelProfile = + ChannelProfileType contextChannelProfile = ChannelProfileType.channelProfileCommunication; - const AudioScenarioType contextAudioScenario = + AudioScenarioType contextAudioScenario = AudioScenarioType.audioScenarioDefault; - const LogLevel logConfigLevel = LogLevel.logLevelNone; - const String logConfigFilePath = "hello"; - const int logConfigFileSizeInKB = 10; - const LogConfig contextLogConfig = LogConfig( + LogLevel logConfigLevel = LogLevel.logLevelNone; + String logConfigFilePath = "hello"; + int logConfigFileSizeInKB = 5; + LogConfig contextLogConfig = LogConfig( filePath: logConfigFilePath, fileSizeInKB: logConfigFileSizeInKB, level: logConfigLevel, ); - const ThreadPriorityType contextThreadPriority = - ThreadPriorityType.lowest; - const String contextAppId = "hello"; - const String contextLicense = "hello"; - const int contextAreaCode = 10; - const bool contextUseExternalEglContext = true; - const bool contextDomainLimit = true; - const bool contextAutoRegisterAgoraExtensions = true; - const RtcEngineContext context = RtcEngineContext( + ThreadPriorityType contextThreadPriority = ThreadPriorityType.lowest; + String contextAppId = "hello"; + String contextLicense = "hello"; + int contextAreaCode = 5; + bool contextUseExternalEglContext = true; + bool contextDomainLimit = true; + bool contextAutoRegisterAgoraExtensions = true; + RtcEngineContext context = RtcEngineContext( appId: contextAppId, channelProfile: contextChannelProfile, license: contextLicense, @@ -121,7 +120,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int code = 10; + int code = 5; await rtcEngine.getErrorDescription( code, ); @@ -186,9 +185,9 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String token = "hello"; - const String channelId = "hello"; - const int uid = 10; + String token = "hello"; + String channelId = "hello"; + int uid = 5; await rtcEngine.preloadChannel( token: token, channelId: channelId, @@ -224,9 +223,9 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String token = "hello"; - const String channelId = "hello"; - const String userAccount = "hello"; + String token = "hello"; + String channelId = "hello"; + String userAccount = "hello"; await rtcEngine.preloadChannelWithUserAccount( token: token, channelId: channelId, @@ -263,7 +262,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String token = "hello"; + String token = "hello"; await rtcEngine.updatePreloadChannelToken( token, ); @@ -298,50 +297,50 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String token = "hello"; - const String channelId = "hello"; - const int uid = 10; - const ClientRoleType optionsClientRoleType = + String token = "hello"; + String channelId = "hello"; + int uid = 5; + ClientRoleType optionsClientRoleType = ClientRoleType.clientRoleBroadcaster; - const AudienceLatencyLevelType optionsAudienceLatencyLevel = + AudienceLatencyLevelType optionsAudienceLatencyLevel = AudienceLatencyLevelType.audienceLatencyLevelLowLatency; - const VideoStreamType optionsDefaultVideoStreamType = + VideoStreamType optionsDefaultVideoStreamType = VideoStreamType.videoStreamHigh; - const ChannelProfileType optionsChannelProfile = + ChannelProfileType optionsChannelProfile = ChannelProfileType.channelProfileCommunication; - const bool optionsPublishCameraTrack = true; - const bool optionsPublishSecondaryCameraTrack = true; - const bool optionsPublishThirdCameraTrack = true; - const bool optionsPublishFourthCameraTrack = true; - const bool optionsPublishMicrophoneTrack = true; - const bool optionsPublishScreenCaptureVideo = true; - const bool optionsPublishScreenCaptureAudio = true; - const bool optionsPublishScreenTrack = true; - const bool optionsPublishSecondaryScreenTrack = true; - const bool optionsPublishThirdScreenTrack = true; - const bool optionsPublishFourthScreenTrack = true; - const bool optionsPublishCustomAudioTrack = true; - const int optionsPublishCustomAudioTrackId = 10; - const bool optionsPublishCustomVideoTrack = true; - const bool optionsPublishEncodedVideoTrack = true; - const bool optionsPublishMediaPlayerAudioTrack = true; - const bool optionsPublishMediaPlayerVideoTrack = true; - const bool optionsPublishTranscodedVideoTrack = true; - const bool optionsPublishMixedAudioTrack = true; - const bool optionsPublishLipSyncTrack = true; - const bool optionsAutoSubscribeAudio = true; - const bool optionsAutoSubscribeVideo = true; - const bool optionsEnableAudioRecordingOrPlayout = true; - const int optionsPublishMediaPlayerId = 10; - const int optionsAudioDelayMs = 10; - const int optionsMediaPlayerAudioDelayMs = 10; - const String optionsToken = "hello"; - const bool optionsEnableBuiltInMediaEncryption = true; - const bool optionsPublishRhythmPlayerTrack = true; - const bool optionsIsInteractiveAudience = true; - const int optionsCustomVideoTrackId = 10; - const bool optionsIsAudioFilterable = true; - const ChannelMediaOptions options = ChannelMediaOptions( + bool optionsPublishCameraTrack = true; + bool optionsPublishSecondaryCameraTrack = true; + bool optionsPublishThirdCameraTrack = true; + bool optionsPublishFourthCameraTrack = true; + bool optionsPublishMicrophoneTrack = true; + bool optionsPublishScreenCaptureVideo = true; + bool optionsPublishScreenCaptureAudio = true; + bool optionsPublishScreenTrack = true; + bool optionsPublishSecondaryScreenTrack = true; + bool optionsPublishThirdScreenTrack = true; + bool optionsPublishFourthScreenTrack = true; + bool optionsPublishCustomAudioTrack = true; + int optionsPublishCustomAudioTrackId = 5; + bool optionsPublishCustomVideoTrack = true; + bool optionsPublishEncodedVideoTrack = true; + bool optionsPublishMediaPlayerAudioTrack = true; + bool optionsPublishMediaPlayerVideoTrack = true; + bool optionsPublishTranscodedVideoTrack = true; + bool optionsPublishMixedAudioTrack = true; + bool optionsPublishLipSyncTrack = true; + bool optionsAutoSubscribeAudio = true; + bool optionsAutoSubscribeVideo = true; + bool optionsEnableAudioRecordingOrPlayout = true; + int optionsPublishMediaPlayerId = 5; + int optionsAudioDelayMs = 5; + int optionsMediaPlayerAudioDelayMs = 5; + String optionsToken = "hello"; + bool optionsEnableBuiltInMediaEncryption = true; + bool optionsPublishRhythmPlayerTrack = true; + bool optionsIsInteractiveAudience = true; + int optionsCustomVideoTrackId = 5; + bool optionsIsAudioFilterable = true; + ChannelMediaOptions options = ChannelMediaOptions( publishCameraTrack: optionsPublishCameraTrack, publishSecondaryCameraTrack: optionsPublishSecondaryCameraTrack, publishThirdCameraTrack: optionsPublishThirdCameraTrack, @@ -415,47 +414,47 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const ClientRoleType optionsClientRoleType = + ClientRoleType optionsClientRoleType = ClientRoleType.clientRoleBroadcaster; - const AudienceLatencyLevelType optionsAudienceLatencyLevel = + AudienceLatencyLevelType optionsAudienceLatencyLevel = AudienceLatencyLevelType.audienceLatencyLevelLowLatency; - const VideoStreamType optionsDefaultVideoStreamType = + VideoStreamType optionsDefaultVideoStreamType = VideoStreamType.videoStreamHigh; - const ChannelProfileType optionsChannelProfile = + ChannelProfileType optionsChannelProfile = ChannelProfileType.channelProfileCommunication; - const bool optionsPublishCameraTrack = true; - const bool optionsPublishSecondaryCameraTrack = true; - const bool optionsPublishThirdCameraTrack = true; - const bool optionsPublishFourthCameraTrack = true; - const bool optionsPublishMicrophoneTrack = true; - const bool optionsPublishScreenCaptureVideo = true; - const bool optionsPublishScreenCaptureAudio = true; - const bool optionsPublishScreenTrack = true; - const bool optionsPublishSecondaryScreenTrack = true; - const bool optionsPublishThirdScreenTrack = true; - const bool optionsPublishFourthScreenTrack = true; - const bool optionsPublishCustomAudioTrack = true; - const int optionsPublishCustomAudioTrackId = 10; - const bool optionsPublishCustomVideoTrack = true; - const bool optionsPublishEncodedVideoTrack = true; - const bool optionsPublishMediaPlayerAudioTrack = true; - const bool optionsPublishMediaPlayerVideoTrack = true; - const bool optionsPublishTranscodedVideoTrack = true; - const bool optionsPublishMixedAudioTrack = true; - const bool optionsPublishLipSyncTrack = true; - const bool optionsAutoSubscribeAudio = true; - const bool optionsAutoSubscribeVideo = true; - const bool optionsEnableAudioRecordingOrPlayout = true; - const int optionsPublishMediaPlayerId = 10; - const int optionsAudioDelayMs = 10; - const int optionsMediaPlayerAudioDelayMs = 10; - const String optionsToken = "hello"; - const bool optionsEnableBuiltInMediaEncryption = true; - const bool optionsPublishRhythmPlayerTrack = true; - const bool optionsIsInteractiveAudience = true; - const int optionsCustomVideoTrackId = 10; - const bool optionsIsAudioFilterable = true; - const ChannelMediaOptions options = ChannelMediaOptions( + bool optionsPublishCameraTrack = true; + bool optionsPublishSecondaryCameraTrack = true; + bool optionsPublishThirdCameraTrack = true; + bool optionsPublishFourthCameraTrack = true; + bool optionsPublishMicrophoneTrack = true; + bool optionsPublishScreenCaptureVideo = true; + bool optionsPublishScreenCaptureAudio = true; + bool optionsPublishScreenTrack = true; + bool optionsPublishSecondaryScreenTrack = true; + bool optionsPublishThirdScreenTrack = true; + bool optionsPublishFourthScreenTrack = true; + bool optionsPublishCustomAudioTrack = true; + int optionsPublishCustomAudioTrackId = 5; + bool optionsPublishCustomVideoTrack = true; + bool optionsPublishEncodedVideoTrack = true; + bool optionsPublishMediaPlayerAudioTrack = true; + bool optionsPublishMediaPlayerVideoTrack = true; + bool optionsPublishTranscodedVideoTrack = true; + bool optionsPublishMixedAudioTrack = true; + bool optionsPublishLipSyncTrack = true; + bool optionsAutoSubscribeAudio = true; + bool optionsAutoSubscribeVideo = true; + bool optionsEnableAudioRecordingOrPlayout = true; + int optionsPublishMediaPlayerId = 5; + int optionsAudioDelayMs = 5; + int optionsMediaPlayerAudioDelayMs = 5; + String optionsToken = "hello"; + bool optionsEnableBuiltInMediaEncryption = true; + bool optionsPublishRhythmPlayerTrack = true; + bool optionsIsInteractiveAudience = true; + int optionsCustomVideoTrackId = 5; + bool optionsIsAudioFilterable = true; + ChannelMediaOptions options = ChannelMediaOptions( publishCameraTrack: optionsPublishCameraTrack, publishSecondaryCameraTrack: optionsPublishSecondaryCameraTrack, publishThirdCameraTrack: optionsPublishThirdCameraTrack, @@ -527,10 +526,10 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool optionsStopAudioMixing = true; - const bool optionsStopAllEffect = true; - const bool optionsStopMicrophoneRecording = true; - const LeaveChannelOptions options = LeaveChannelOptions( + bool optionsStopAudioMixing = true; + bool optionsStopAllEffect = true; + bool optionsStopMicrophoneRecording = true; + LeaveChannelOptions options = LeaveChannelOptions( stopAudioMixing: optionsStopAudioMixing, stopAllEffect: optionsStopAllEffect, stopMicrophoneRecording: optionsStopMicrophoneRecording, @@ -568,7 +567,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String token = "hello"; + String token = "hello"; await rtcEngine.renewToken( token, ); @@ -602,7 +601,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const ChannelProfileType profile = + ChannelProfileType profile = ChannelProfileType.channelProfileCommunication; await rtcEngine.setChannelProfile( profile, @@ -637,10 +636,10 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const ClientRoleType role = ClientRoleType.clientRoleBroadcaster; - const AudienceLatencyLevelType optionsAudienceLatencyLevel = + ClientRoleType role = ClientRoleType.clientRoleBroadcaster; + AudienceLatencyLevelType optionsAudienceLatencyLevel = AudienceLatencyLevelType.audienceLatencyLevelLowLatency; - const ClientRoleOptions options = ClientRoleOptions( + ClientRoleOptions options = ClientRoleOptions( audienceLatencyLevel: optionsAudienceLatencyLevel, ); await rtcEngine.setClientRole( @@ -677,13 +676,13 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int configView = 10; - const bool configEnableAudio = true; - const bool configEnableVideo = true; - const String configToken = "hello"; - const String configChannelId = "hello"; - const int configIntervalInSeconds = 10; - const EchoTestConfiguration config = EchoTestConfiguration( + int configView = 5; + bool configEnableAudio = true; + bool configEnableVideo = true; + String configToken = "hello"; + String configChannelId = "hello"; + int configIntervalInSeconds = 5; + EchoTestConfiguration config = EchoTestConfiguration( view: configView, enableAudio: configEnableAudio, enableVideo: configEnableVideo, @@ -755,23 +754,22 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; - const CameraDirection configCameraDirection = - CameraDirection.cameraRear; - const CameraFocalLengthType configCameraFocalLengthType = + bool enabled = true; + CameraDirection configCameraDirection = CameraDirection.cameraRear; + CameraFocalLengthType configCameraFocalLengthType = CameraFocalLengthType.cameraFocalLengthDefault; - const int formatWidth = 10; - const int formatHeight = 10; - const int formatFps = 10; - const VideoFormat configFormat = VideoFormat( + int formatWidth = 5; + int formatHeight = 5; + int formatFps = 5; + VideoFormat configFormat = VideoFormat( width: formatWidth, height: formatHeight, fps: formatFps, ); - const String configDeviceId = "hello"; - const String configCameraId = "hello"; - const bool configFollowEncodeDimensionRatio = true; - const CameraCapturerConfiguration config = CameraCapturerConfiguration( + String configDeviceId = "hello"; + String configCameraId = "hello"; + bool configFollowEncodeDimensionRatio = true; + CameraCapturerConfiguration config = CameraCapturerConfiguration( cameraDirection: configCameraDirection, cameraFocalLengthType: configCameraFocalLengthType, deviceId: configDeviceId, @@ -875,8 +873,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VideoSourceType sourceType = - VideoSourceType.videoSourceCameraPrimary; + VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary; await rtcEngine.startPreview( sourceType: sourceType, ); @@ -910,8 +907,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VideoSourceType sourceType = - VideoSourceType.videoSourceCameraPrimary; + VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary; await rtcEngine.stopPreview( sourceType: sourceType, ); @@ -945,11 +941,11 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool configProbeUplink = true; - const bool configProbeDownlink = true; - const int configExpectedUplinkBitrate = 10; - const int configExpectedDownlinkBitrate = 10; - const LastmileProbeConfig config = LastmileProbeConfig( + bool configProbeUplink = true; + bool configProbeDownlink = true; + int configExpectedUplinkBitrate = 5; + int configExpectedDownlinkBitrate = 5; + LastmileProbeConfig config = LastmileProbeConfig( probeUplink: configProbeUplink, probeDownlink: configProbeDownlink, expectedUplinkBitrate: configExpectedUplinkBitrate, @@ -1021,31 +1017,31 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VideoCodecType configCodecType = VideoCodecType.videoCodecNone; - const int dimensionsWidth = 10; - const int dimensionsHeight = 10; - const VideoDimensions configDimensions = VideoDimensions( + VideoCodecType configCodecType = VideoCodecType.videoCodecNone; + int dimensionsWidth = 5; + int dimensionsHeight = 5; + VideoDimensions configDimensions = VideoDimensions( width: dimensionsWidth, height: dimensionsHeight, ); - const OrientationMode configOrientationMode = + OrientationMode configOrientationMode = OrientationMode.orientationModeAdaptive; - const DegradationPreference configDegradationPreference = + DegradationPreference configDegradationPreference = DegradationPreference.maintainQuality; - const VideoMirrorModeType configMirrorMode = + VideoMirrorModeType configMirrorMode = VideoMirrorModeType.videoMirrorModeAuto; - const EncodingPreference advanceOptionsEncodingPreference = + EncodingPreference advanceOptionsEncodingPreference = EncodingPreference.preferAuto; - const CompressionPreference advanceOptionsCompressionPreference = + CompressionPreference advanceOptionsCompressionPreference = CompressionPreference.preferLowLatency; - const AdvanceOptions configAdvanceOptions = AdvanceOptions( + AdvanceOptions configAdvanceOptions = AdvanceOptions( encodingPreference: advanceOptionsEncodingPreference, compressionPreference: advanceOptionsCompressionPreference, ); - const int configFrameRate = 10; - const int configBitrate = 10; - const int configMinBitrate = 10; - const VideoEncoderConfiguration config = VideoEncoderConfiguration( + int configFrameRate = 5; + int configBitrate = 5; + int configMinBitrate = 5; + VideoEncoderConfiguration config = VideoEncoderConfiguration( codecType: configCodecType, dimensions: configDimensions, frameRate: configFrameRate, @@ -1090,21 +1086,21 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; - const LighteningContrastLevel optionsLighteningContrastLevel = + bool enabled = true; + LighteningContrastLevel optionsLighteningContrastLevel = LighteningContrastLevel.lighteningContrastLow; - const double optionsLighteningLevel = 10.0; - const double optionsSmoothnessLevel = 10.0; - const double optionsRednessLevel = 10.0; - const double optionsSharpnessLevel = 10.0; - const BeautyOptions options = BeautyOptions( + double optionsLighteningLevel = 5.0; + double optionsSmoothnessLevel = 5.0; + double optionsRednessLevel = 5.0; + double optionsSharpnessLevel = 5.0; + BeautyOptions options = BeautyOptions( lighteningContrastLevel: optionsLighteningContrastLevel, lighteningLevel: optionsLighteningLevel, smoothnessLevel: optionsSmoothnessLevel, rednessLevel: optionsRednessLevel, sharpnessLevel: optionsSharpnessLevel, ); - const MediaSourceType type = MediaSourceType.audioPlayoutSource; + MediaSourceType type = MediaSourceType.audioPlayoutSource; await rtcEngine.setBeautyEffectOptions( enabled: enabled, options: options, @@ -1141,16 +1137,16 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; - const LowLightEnhanceMode optionsMode = + bool enabled = true; + LowLightEnhanceMode optionsMode = LowLightEnhanceMode.lowLightEnhanceAuto; - const LowLightEnhanceLevel optionsLevel = + LowLightEnhanceLevel optionsLevel = LowLightEnhanceLevel.lowLightEnhanceLevelHighQuality; - const LowlightEnhanceOptions options = LowlightEnhanceOptions( + LowlightEnhanceOptions options = LowlightEnhanceOptions( mode: optionsMode, level: optionsLevel, ); - const MediaSourceType type = MediaSourceType.audioPlayoutSource; + MediaSourceType type = MediaSourceType.audioPlayoutSource; await rtcEngine.setLowlightEnhanceOptions( enabled: enabled, options: options, @@ -1187,16 +1183,15 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; - const VideoDenoiserMode optionsMode = - VideoDenoiserMode.videoDenoiserAuto; - const VideoDenoiserLevel optionsLevel = + bool enabled = true; + VideoDenoiserMode optionsMode = VideoDenoiserMode.videoDenoiserAuto; + VideoDenoiserLevel optionsLevel = VideoDenoiserLevel.videoDenoiserLevelHighQuality; - const VideoDenoiserOptions options = VideoDenoiserOptions( + VideoDenoiserOptions options = VideoDenoiserOptions( mode: optionsMode, level: optionsLevel, ); - const MediaSourceType type = MediaSourceType.audioPlayoutSource; + MediaSourceType type = MediaSourceType.audioPlayoutSource; await rtcEngine.setVideoDenoiserOptions( enabled: enabled, options: options, @@ -1233,14 +1228,14 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; - const double optionsStrengthLevel = 10.0; - const double optionsSkinProtectLevel = 10.0; - const ColorEnhanceOptions options = ColorEnhanceOptions( + bool enabled = true; + double optionsStrengthLevel = 5.0; + double optionsSkinProtectLevel = 5.0; + ColorEnhanceOptions options = ColorEnhanceOptions( strengthLevel: optionsStrengthLevel, skinProtectLevel: optionsSkinProtectLevel, ); - const MediaSourceType type = MediaSourceType.audioPlayoutSource; + MediaSourceType type = MediaSourceType.audioPlayoutSource; await rtcEngine.setColorEnhanceOptions( enabled: enabled, options: options, @@ -1277,27 +1272,26 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; - const BackgroundSourceType backgroundSourceBackgroundSourceType = + bool enabled = true; + BackgroundSourceType backgroundSourceBackgroundSourceType = BackgroundSourceType.backgroundNone; - const BackgroundBlurDegree backgroundSourceBlurDegree = + BackgroundBlurDegree backgroundSourceBlurDegree = BackgroundBlurDegree.blurDegreeLow; - const int backgroundSourceColor = 10; - const String backgroundSourceSource = "hello"; - const VirtualBackgroundSource backgroundSource = - VirtualBackgroundSource( + int backgroundSourceColor = 5; + String backgroundSourceSource = "hello"; + VirtualBackgroundSource backgroundSource = VirtualBackgroundSource( backgroundSourceType: backgroundSourceBackgroundSourceType, color: backgroundSourceColor, source: backgroundSourceSource, blurDegree: backgroundSourceBlurDegree, ); - const SegModelType segpropertyModelType = SegModelType.segModelAi; - const double segpropertyGreenCapacity = 10.0; - const SegmentationProperty segproperty = SegmentationProperty( + SegModelType segpropertyModelType = SegModelType.segModelAi; + double segpropertyGreenCapacity = 5.0; + SegmentationProperty segproperty = SegmentationProperty( modelType: segpropertyModelType, greenCapacity: segpropertyGreenCapacity, ); - const MediaSourceType type = MediaSourceType.audioPlayoutSource; + MediaSourceType type = MediaSourceType.audioPlayoutSource; await rtcEngine.enableVirtualBackground( enabled: enabled, backgroundSource: backgroundSource, @@ -1335,32 +1329,32 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const RenderModeType canvasRenderMode = RenderModeType.renderModeHidden; - const VideoMirrorModeType canvasMirrorMode = + RenderModeType canvasRenderMode = RenderModeType.renderModeHidden; + VideoMirrorModeType canvasMirrorMode = VideoMirrorModeType.videoMirrorModeAuto; - const VideoViewSetupMode canvasSetupMode = + VideoViewSetupMode canvasSetupMode = VideoViewSetupMode.videoViewSetupReplace; - const VideoSourceType canvasSourceType = + VideoSourceType canvasSourceType = VideoSourceType.videoSourceCameraPrimary; - const int cropAreaX = 10; - const int cropAreaY = 10; - const int cropAreaWidth = 10; - const int cropAreaHeight = 10; - const Rectangle canvasCropArea = Rectangle( + int cropAreaX = 5; + int cropAreaY = 5; + int cropAreaWidth = 5; + int cropAreaHeight = 5; + Rectangle canvasCropArea = Rectangle( x: cropAreaX, y: cropAreaY, width: cropAreaWidth, height: cropAreaHeight, ); - const VideoModulePosition canvasPosition = + VideoModulePosition canvasPosition = VideoModulePosition.positionPostCapturer; - const int canvasUid = 10; - const int canvasSubviewUid = 10; - const int canvasView = 10; - const int canvasBackgroundColor = 10; - const int canvasMediaPlayerId = 10; - const bool canvasEnableAlphaMask = true; - const VideoCanvas canvas = VideoCanvas( + int canvasUid = 5; + int canvasSubviewUid = 5; + int canvasView = 5; + int canvasBackgroundColor = 5; + int canvasMediaPlayerId = 5; + bool canvasEnableAlphaMask = true; + VideoCanvas canvas = VideoCanvas( uid: canvasUid, subviewUid: canvasSubviewUid, view: canvasView, @@ -1407,32 +1401,32 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const RenderModeType canvasRenderMode = RenderModeType.renderModeHidden; - const VideoMirrorModeType canvasMirrorMode = + RenderModeType canvasRenderMode = RenderModeType.renderModeHidden; + VideoMirrorModeType canvasMirrorMode = VideoMirrorModeType.videoMirrorModeAuto; - const VideoViewSetupMode canvasSetupMode = + VideoViewSetupMode canvasSetupMode = VideoViewSetupMode.videoViewSetupReplace; - const VideoSourceType canvasSourceType = + VideoSourceType canvasSourceType = VideoSourceType.videoSourceCameraPrimary; - const int cropAreaX = 10; - const int cropAreaY = 10; - const int cropAreaWidth = 10; - const int cropAreaHeight = 10; - const Rectangle canvasCropArea = Rectangle( + int cropAreaX = 5; + int cropAreaY = 5; + int cropAreaWidth = 5; + int cropAreaHeight = 5; + Rectangle canvasCropArea = Rectangle( x: cropAreaX, y: cropAreaY, width: cropAreaWidth, height: cropAreaHeight, ); - const VideoModulePosition canvasPosition = + VideoModulePosition canvasPosition = VideoModulePosition.positionPostCapturer; - const int canvasUid = 10; - const int canvasSubviewUid = 10; - const int canvasView = 10; - const int canvasBackgroundColor = 10; - const int canvasMediaPlayerId = 10; - const bool canvasEnableAlphaMask = true; - const VideoCanvas canvas = VideoCanvas( + int canvasUid = 5; + int canvasSubviewUid = 5; + int canvasView = 5; + int canvasBackgroundColor = 5; + int canvasMediaPlayerId = 5; + bool canvasEnableAlphaMask = true; + VideoCanvas canvas = VideoCanvas( uid: canvasUid, subviewUid: canvasSubviewUid, view: canvasView, @@ -1479,7 +1473,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VideoApplicationScenarioType scenarioType = + VideoApplicationScenarioType scenarioType = VideoApplicationScenarioType.applicationScenarioGeneral; await rtcEngine.setVideoScenario( scenarioType, @@ -1514,7 +1508,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VideoQoePreferenceType qoePreference = + VideoQoePreferenceType qoePreference = VideoQoePreferenceType.videoQoePreferenceBalance; await rtcEngine.setVideoQoEPreference( qoePreference, @@ -1612,9 +1606,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const AudioProfileType profile = AudioProfileType.audioProfileDefault; - const AudioScenarioType scenario = - AudioScenarioType.audioScenarioDefault; + AudioProfileType profile = AudioProfileType.audioProfileDefault; + AudioScenarioType scenario = AudioScenarioType.audioScenarioDefault; await rtcEngine.setAudioProfile( profile: profile, scenario: scenario, @@ -1649,8 +1642,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const AudioScenarioType scenario = - AudioScenarioType.audioScenarioDefault; + AudioScenarioType scenario = AudioScenarioType.audioScenarioDefault; await rtcEngine.setAudioScenario( scenario, ); @@ -1684,7 +1676,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; + bool enabled = true; await rtcEngine.enableLocalAudio( enabled, ); @@ -1718,7 +1710,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool mute = true; + bool mute = true; await rtcEngine.muteLocalAudioStream( mute, ); @@ -1752,7 +1744,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool mute = true; + bool mute = true; await rtcEngine.muteAllRemoteAudioStreams( mute, ); @@ -1787,7 +1779,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool mute = true; + bool mute = true; await rtcEngine.setDefaultMuteAllRemoteAudioStreams( mute, ); @@ -1822,8 +1814,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int uid = 10; - const bool mute = true; + int uid = 5; + bool mute = true; await rtcEngine.muteRemoteAudioStream( uid: uid, mute: mute, @@ -1859,7 +1851,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool mute = true; + bool mute = true; await rtcEngine.muteLocalVideoStream( mute, ); @@ -1893,7 +1885,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; + bool enabled = true; await rtcEngine.enableLocalVideo( enabled, ); @@ -1927,7 +1919,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool mute = true; + bool mute = true; await rtcEngine.muteAllRemoteVideoStreams( mute, ); @@ -1962,7 +1954,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool mute = true; + bool mute = true; await rtcEngine.setDefaultMuteAllRemoteVideoStreams( mute, ); @@ -1997,7 +1989,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VideoStreamType streamType = VideoStreamType.videoStreamHigh; + VideoStreamType streamType = VideoStreamType.videoStreamHigh; await rtcEngine.setRemoteDefaultVideoStreamType( streamType, ); @@ -2032,8 +2024,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int uid = 10; - const bool mute = true; + int uid = 5; + bool mute = true; await rtcEngine.muteRemoteVideoStream( uid: uid, mute: mute, @@ -2069,8 +2061,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int uid = 10; - const VideoStreamType streamType = VideoStreamType.videoStreamHigh; + int uid = 5; + VideoStreamType streamType = VideoStreamType.videoStreamHigh; await rtcEngine.setRemoteVideoStreamType( uid: uid, streamType: streamType, @@ -2106,10 +2098,10 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int uid = 10; - const VideoStreamType optionsType = VideoStreamType.videoStreamHigh; - const bool optionsEncodedFrameOnly = true; - const VideoSubscriptionOptions options = VideoSubscriptionOptions( + int uid = 5; + VideoStreamType optionsType = VideoStreamType.videoStreamHigh; + bool optionsEncodedFrameOnly = true; + VideoSubscriptionOptions options = VideoSubscriptionOptions( type: optionsType, encodedFrameOnly: optionsEncodedFrameOnly, ); @@ -2148,9 +2140,9 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int interval = 10; - const int smooth = 10; - const bool reportVad = true; + int interval = 5; + int smooth = 5; + bool reportVad = true; await rtcEngine.enableAudioVolumeIndication( interval: interval, smooth: smooth, @@ -2187,15 +2179,15 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const AudioFileRecordingType configFileRecordingType = + AudioFileRecordingType configFileRecordingType = AudioFileRecordingType.audioFileRecordingMic; - const AudioRecordingQualityType configQuality = + AudioRecordingQualityType configQuality = AudioRecordingQualityType.audioRecordingQualityLow; - const String configFilePath = "hello"; - const bool configEncode = true; - const int configSampleRate = 10; - const int configRecordingChannel = 10; - const AudioRecordingConfiguration config = AudioRecordingConfiguration( + String configFilePath = "hello"; + bool configEncode = true; + int configSampleRate = 5; + int configRecordingChannel = 5; + AudioRecordingConfiguration config = AudioRecordingConfiguration( filePath: configFilePath, encode: configEncode, sampleRate: configSampleRate, @@ -2236,17 +2228,17 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const AudioEncodedFrameObserverPosition configPostionType = + AudioEncodedFrameObserverPosition configPostionType = AudioEncodedFrameObserverPosition .audioEncodedFrameObserverPositionRecord; - const AudioEncodingType configEncodingType = + AudioEncodingType configEncodingType = AudioEncodingType.audioEncodingTypeAac16000Low; - const AudioEncodedFrameObserverConfig config = + AudioEncodedFrameObserverConfig config = AudioEncodedFrameObserverConfig( postionType: configPostionType, encodingType: configEncodingType, ); - final AudioEncodedFrameObserver observer = AudioEncodedFrameObserver( + AudioEncodedFrameObserver observer = AudioEncodedFrameObserver( onRecordAudioEncodedFrame: (Uint8List frameBuffer, int length, EncodedAudioFrameInfo audioEncodedFrameInfo) {}, onPlaybackAudioEncodedFrame: (Uint8List frameBuffer, int length, @@ -2320,10 +2312,10 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String filePath = "hello"; - const bool loopback = true; - const int cycle = 10; - const int startPos = 10; + String filePath = "hello"; + bool loopback = true; + int cycle = 5; + int startPos = 5; await rtcEngine.startAudioMixing( filePath: filePath, loopback: loopback, @@ -2453,7 +2445,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int index = 10; + int index = 5; await rtcEngine.selectAudioTrack( index, ); @@ -2518,7 +2510,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int volume = 10; + int volume = 5; await rtcEngine.adjustAudioMixingVolume( volume, ); @@ -2553,7 +2545,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int volume = 10; + int volume = 5; await rtcEngine.adjustAudioMixingPublishVolume( volume, ); @@ -2620,7 +2612,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int volume = 10; + int volume = 5; await rtcEngine.adjustAudioMixingPlayoutVolume( volume, ); @@ -2751,7 +2743,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int pos = 10; + int pos = 5; await rtcEngine.setAudioMixingPosition( pos, ); @@ -2786,7 +2778,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const AudioMixingDualMonoMode mode = + AudioMixingDualMonoMode mode = AudioMixingDualMonoMode.audioMixingDualMonoAuto; await rtcEngine.setAudioMixingDualMonoMode( mode, @@ -2822,7 +2814,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int pitch = 10; + int pitch = 5; await rtcEngine.setAudioMixingPitch( pitch, ); @@ -2856,7 +2848,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int speed = 10; + int speed = 5; await rtcEngine.setAudioMixingPlaybackSpeed( speed, ); @@ -2922,7 +2914,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int volume = 10; + int volume = 5; await rtcEngine.setEffectsVolume( volume, ); @@ -2956,9 +2948,9 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int soundId = 10; - const String filePath = "hello"; - const int startPos = 10; + int soundId = 5; + String filePath = "hello"; + int startPos = 5; await rtcEngine.preloadEffect( soundId: soundId, filePath: filePath, @@ -2994,14 +2986,14 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int soundId = 10; - const String filePath = "hello"; - const int loopCount = 10; - const double pitch = 10.0; - const double pan = 10.0; - const int gain = 10; - const bool publish = true; - const int startPos = 10; + int soundId = 5; + String filePath = "hello"; + int loopCount = 5; + double pitch = 5.0; + double pan = 5.0; + int gain = 5; + bool publish = true; + int startPos = 5; await rtcEngine.playEffect( soundId: soundId, filePath: filePath, @@ -3042,11 +3034,11 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int loopCount = 10; - const double pitch = 10.0; - const double pan = 10.0; - const int gain = 10; - const bool publish = true; + int loopCount = 5; + double pitch = 5.0; + double pan = 5.0; + int gain = 5; + bool publish = true; await rtcEngine.playAllEffects( loopCount: loopCount, pitch: pitch, @@ -3084,7 +3076,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int soundId = 10; + int soundId = 5; await rtcEngine.getVolumeOfEffect( soundId, ); @@ -3118,8 +3110,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int soundId = 10; - const int volume = 10; + int soundId = 5; + int volume = 5; await rtcEngine.setVolumeOfEffect( soundId: soundId, volume: volume, @@ -3154,7 +3146,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int soundId = 10; + int soundId = 5; await rtcEngine.pauseEffect( soundId, ); @@ -3219,7 +3211,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int soundId = 10; + int soundId = 5; await rtcEngine.resumeEffect( soundId, ); @@ -3284,7 +3276,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int soundId = 10; + int soundId = 5; await rtcEngine.stopEffect( soundId, ); @@ -3349,7 +3341,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int soundId = 10; + int soundId = 5; await rtcEngine.unloadEffect( soundId, ); @@ -3414,7 +3406,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String filePath = "hello"; + String filePath = "hello"; await rtcEngine.getEffectDuration( filePath, ); @@ -3448,8 +3440,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int soundId = 10; - const int pos = 10; + int soundId = 5; + int pos = 5; await rtcEngine.setEffectPosition( soundId: soundId, pos: pos, @@ -3484,7 +3476,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int soundId = 10; + int soundId = 5; await rtcEngine.getEffectCurrentPosition( soundId, ); @@ -3519,7 +3511,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; + bool enabled = true; await rtcEngine.enableSoundPositionIndication( enabled, ); @@ -3554,9 +3546,9 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int uid = 10; - const double pan = 10.0; - const double gain = 10.0; + int uid = 5; + double pan = 5.0; + double gain = 5.0; await rtcEngine.setRemoteVoicePosition( uid: uid, pan: pan, @@ -3593,7 +3585,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; + bool enabled = true; await rtcEngine.enableSpatialAudio( enabled, ); @@ -3627,16 +3619,16 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int uid = 10; - const double paramsSpeakerAzimuth = 10.0; - const double paramsSpeakerElevation = 10.0; - const double paramsSpeakerDistance = 10.0; - const int paramsSpeakerOrientation = 10; - const bool paramsEnableBlur = true; - const bool paramsEnableAirAbsorb = true; - const double paramsSpeakerAttenuation = 10.0; - const bool paramsEnableDoppler = true; - const SpatialAudioParams params = SpatialAudioParams( + int uid = 5; + double paramsSpeakerAzimuth = 5.0; + double paramsSpeakerElevation = 5.0; + double paramsSpeakerDistance = 5.0; + int paramsSpeakerOrientation = 5; + bool paramsEnableBlur = true; + bool paramsEnableAirAbsorb = true; + double paramsSpeakerAttenuation = 5.0; + bool paramsEnableDoppler = true; + SpatialAudioParams params = SpatialAudioParams( speakerAzimuth: paramsSpeakerAzimuth, speakerElevation: paramsSpeakerElevation, speakerDistance: paramsSpeakerDistance, @@ -3681,8 +3673,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VoiceBeautifierPreset preset = - VoiceBeautifierPreset.voiceBeautifierOff; + VoiceBeautifierPreset preset = VoiceBeautifierPreset.voiceBeautifierOff; await rtcEngine.setVoiceBeautifierPreset( preset, ); @@ -3717,7 +3708,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const AudioEffectPreset preset = AudioEffectPreset.audioEffectOff; + AudioEffectPreset preset = AudioEffectPreset.audioEffectOff; await rtcEngine.setAudioEffectPreset( preset, ); @@ -3751,8 +3742,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VoiceConversionPreset preset = - VoiceConversionPreset.voiceConversionOff; + VoiceConversionPreset preset = VoiceConversionPreset.voiceConversionOff; await rtcEngine.setVoiceConversionPreset( preset, ); @@ -3787,9 +3777,9 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const AudioEffectPreset preset = AudioEffectPreset.audioEffectOff; - const int param1 = 10; - const int param2 = 10; + AudioEffectPreset preset = AudioEffectPreset.audioEffectOff; + int param1 = 5; + int param2 = 5; await rtcEngine.setAudioEffectParameters( preset: preset, param1: param1, @@ -3826,10 +3816,9 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VoiceBeautifierPreset preset = - VoiceBeautifierPreset.voiceBeautifierOff; - const int param1 = 10; - const int param2 = 10; + VoiceBeautifierPreset preset = VoiceBeautifierPreset.voiceBeautifierOff; + int param1 = 5; + int param2 = 5; await rtcEngine.setVoiceBeautifierParameters( preset: preset, param1: param1, @@ -3866,10 +3855,9 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VoiceConversionPreset preset = - VoiceConversionPreset.voiceConversionOff; - const int param1 = 10; - const int param2 = 10; + VoiceConversionPreset preset = VoiceConversionPreset.voiceConversionOff; + int param1 = 5; + int param2 = 5; await rtcEngine.setVoiceConversionParameters( preset: preset, param1: param1, @@ -3906,7 +3894,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const double pitch = 10.0; + double pitch = 5.0; await rtcEngine.setLocalVoicePitch( pitch, ); @@ -3940,7 +3928,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const double formantRatio = 10.0; + double formantRatio = 5.0; await rtcEngine.setLocalVoiceFormant( formantRatio, ); @@ -3974,9 +3962,9 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const AudioEqualizationBandFrequency bandFrequency = + AudioEqualizationBandFrequency bandFrequency = AudioEqualizationBandFrequency.audioEqualizationBand31; - const int bandGain = 10; + int bandGain = 5; await rtcEngine.setLocalVoiceEqualization( bandFrequency: bandFrequency, bandGain: bandGain, @@ -4012,8 +4000,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const AudioReverbType reverbKey = AudioReverbType.audioReverbDryLevel; - const int value = 10; + AudioReverbType reverbKey = AudioReverbType.audioReverbDryLevel; + int value = 5; await rtcEngine.setLocalVoiceReverb( reverbKey: reverbKey, value: value, @@ -4048,7 +4036,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const HeadphoneEqualizerPreset preset = + HeadphoneEqualizerPreset preset = HeadphoneEqualizerPreset.headphoneEqualizerOff; await rtcEngine.setHeadphoneEQPreset( preset, @@ -4083,8 +4071,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int lowGain = 10; - const int highGain = 10; + int lowGain = 5; + int highGain = 5; await rtcEngine.setHeadphoneEQParameters( lowGain: lowGain, highGain: highGain, @@ -4120,7 +4108,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String filePath = "hello"; + String filePath = "hello"; await rtcEngine.setLogFile( filePath, ); @@ -4154,7 +4142,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const LogFilterType filter = LogFilterType.logFilterOff; + LogFilterType filter = LogFilterType.logFilterOff; await rtcEngine.setLogFilter( filter, ); @@ -4188,7 +4176,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const LogLevel level = LogLevel.logLevelNone; + LogLevel level = LogLevel.logLevelNone; await rtcEngine.setLogLevel( level, ); @@ -4222,7 +4210,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int fileSizeInKBytes = 10; + int fileSizeInKBytes = 5; await rtcEngine.setLogFileSize( fileSizeInKBytes, ); @@ -4287,8 +4275,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const LogLevel level = LogLevel.logLevelNone; - const String fmt = "hello"; + LogLevel level = LogLevel.logLevelNone; + String fmt = "hello"; await rtcEngine.writeLog( level: level, fmt: fmt, @@ -4323,8 +4311,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const RenderModeType renderMode = RenderModeType.renderModeHidden; - const VideoMirrorModeType mirrorMode = + RenderModeType renderMode = RenderModeType.renderModeHidden; + VideoMirrorModeType mirrorMode = VideoMirrorModeType.videoMirrorModeAuto; await rtcEngine.setLocalRenderMode( renderMode: renderMode, @@ -4360,9 +4348,9 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int uid = 10; - const RenderModeType renderMode = RenderModeType.renderModeHidden; - const VideoMirrorModeType mirrorMode = + int uid = 5; + RenderModeType renderMode = RenderModeType.renderModeHidden; + VideoMirrorModeType mirrorMode = VideoMirrorModeType.videoMirrorModeAuto; await rtcEngine.setRemoteRenderMode( uid: uid, @@ -4399,7 +4387,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VideoMirrorModeType mirrorMode = + VideoMirrorModeType mirrorMode = VideoMirrorModeType.videoMirrorModeAuto; await rtcEngine.setLocalVideoMirrorMode( mirrorMode, @@ -4435,16 +4423,16 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; - const int dimensionsWidth = 10; - const int dimensionsHeight = 10; - const VideoDimensions streamConfigDimensions = VideoDimensions( + bool enabled = true; + int dimensionsWidth = 5; + int dimensionsHeight = 5; + VideoDimensions streamConfigDimensions = VideoDimensions( width: dimensionsWidth, height: dimensionsHeight, ); - const int streamConfigKBitrate = 10; - const int streamConfigFramerate = 10; - const SimulcastStreamConfig streamConfig = SimulcastStreamConfig( + int streamConfigKBitrate = 5; + int streamConfigFramerate = 5; + SimulcastStreamConfig streamConfig = SimulcastStreamConfig( dimensions: streamConfigDimensions, kBitrate: streamConfigKBitrate, framerate: streamConfigFramerate, @@ -4483,17 +4471,16 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const SimulcastStreamMode mode = - SimulcastStreamMode.autoSimulcastStream; - const int dimensionsWidth = 10; - const int dimensionsHeight = 10; - const VideoDimensions streamConfigDimensions = VideoDimensions( + SimulcastStreamMode mode = SimulcastStreamMode.autoSimulcastStream; + int dimensionsWidth = 5; + int dimensionsHeight = 5; + VideoDimensions streamConfigDimensions = VideoDimensions( width: dimensionsWidth, height: dimensionsHeight, ); - const int streamConfigKBitrate = 10; - const int streamConfigFramerate = 10; - const SimulcastStreamConfig streamConfig = SimulcastStreamConfig( + int streamConfigKBitrate = 5; + int streamConfigFramerate = 5; + SimulcastStreamConfig streamConfig = SimulcastStreamConfig( dimensions: streamConfigDimensions, kBitrate: streamConfigKBitrate, framerate: streamConfigFramerate, @@ -4532,8 +4519,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int trackId = 10; - const bool enabled = true; + int trackId = 5; + bool enabled = true; await rtcEngine.enableCustomAudioLocalPlayback( trackId: trackId, enabled: enabled, @@ -4569,11 +4556,11 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int sampleRate = 10; - const int channel = 10; - const RawAudioFrameOpModeType mode = + int sampleRate = 5; + int channel = 5; + RawAudioFrameOpModeType mode = RawAudioFrameOpModeType.rawAudioFrameOpModeReadOnly; - const int samplesPerCall = 10; + int samplesPerCall = 5; await rtcEngine.setRecordingAudioFrameParameters( sampleRate: sampleRate, channel: channel, @@ -4611,11 +4598,11 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int sampleRate = 10; - const int channel = 10; - const RawAudioFrameOpModeType mode = + int sampleRate = 5; + int channel = 5; + RawAudioFrameOpModeType mode = RawAudioFrameOpModeType.rawAudioFrameOpModeReadOnly; - const int samplesPerCall = 10; + int samplesPerCall = 5; await rtcEngine.setPlaybackAudioFrameParameters( sampleRate: sampleRate, channel: channel, @@ -4653,9 +4640,9 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int sampleRate = 10; - const int channel = 10; - const int samplesPerCall = 10; + int sampleRate = 5; + int channel = 5; + int samplesPerCall = 5; await rtcEngine.setMixedAudioFrameParameters( sampleRate: sampleRate, channel: channel, @@ -4692,11 +4679,11 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int sampleRate = 10; - const int channel = 10; - const RawAudioFrameOpModeType mode = + int sampleRate = 5; + int channel = 5; + RawAudioFrameOpModeType mode = RawAudioFrameOpModeType.rawAudioFrameOpModeReadOnly; - const int samplesPerCall = 10; + int samplesPerCall = 5; await rtcEngine.setEarMonitoringAudioFrameParameters( sampleRate: sampleRate, channel: channel, @@ -4734,8 +4721,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int sampleRate = 10; - const int channel = 10; + int sampleRate = 5; + int channel = 5; await rtcEngine.setPlaybackAudioFrameBeforeMixingParameters( sampleRate: sampleRate, channel: channel, @@ -4771,7 +4758,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int intervalInMS = 10; + int intervalInMS = 5; await rtcEngine.enableAudioSpectrumMonitor( intervalInMS: intervalInMS, ); @@ -4838,7 +4825,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - final AudioSpectrumObserver observer = AudioSpectrumObserver( + AudioSpectrumObserver observer = AudioSpectrumObserver( onLocalAudioSpectrum: (AudioSpectrumData data) {}, onRemoteAudioSpectrum: (List spectrums, int spectrumNumber) {}, ); @@ -4876,7 +4863,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - final AudioSpectrumObserver observer = AudioSpectrumObserver( + AudioSpectrumObserver observer = AudioSpectrumObserver( onLocalAudioSpectrum: (AudioSpectrumData data) {}, onRemoteAudioSpectrum: (List spectrums, int spectrumNumber) {}, ); @@ -4914,7 +4901,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int volume = 10; + int volume = 5; await rtcEngine.adjustRecordingSignalVolume( volume, ); @@ -4949,7 +4936,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool mute = true; + bool mute = true; await rtcEngine.muteRecordingSignal( mute, ); @@ -4983,7 +4970,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int volume = 10; + int volume = 5; await rtcEngine.adjustPlaybackSignalVolume( volume, ); @@ -5018,8 +5005,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int uid = 10; - const int volume = 10; + int uid = 5; + int volume = 5; await rtcEngine.adjustUserPlaybackSignalVolume( uid: uid, volume: volume, @@ -5055,7 +5042,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const StreamFallbackOptions option = + StreamFallbackOptions option = StreamFallbackOptions.streamFallbackOptionDisabled; await rtcEngine.setLocalPublishFallbackOption( option, @@ -5091,7 +5078,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const StreamFallbackOptions option = + StreamFallbackOptions option = StreamFallbackOptions.streamFallbackOptionDisabled; await rtcEngine.setRemoteSubscribeFallbackOption( option, @@ -5127,10 +5114,10 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String provider = "hello"; - const String extension = "hello"; - const bool enable = true; - const MediaSourceType type = MediaSourceType.audioPlayoutSource; + String provider = "hello"; + String extension = "hello"; + bool enable = true; + MediaSourceType type = MediaSourceType.audioPlayoutSource; await rtcEngine.enableExtension( provider: provider, extension: extension, @@ -5167,11 +5154,11 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String provider = "hello"; - const String extension = "hello"; - const String key = "hello"; - const String value = "hello"; - const MediaSourceType type = MediaSourceType.audioPlayoutSource; + String provider = "hello"; + String extension = "hello"; + String key = "hello"; + String value = "hello"; + MediaSourceType type = MediaSourceType.audioPlayoutSource; await rtcEngine.setExtensionProperty( provider: provider, extension: extension, @@ -5209,11 +5196,11 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String provider = "hello"; - const String extension = "hello"; - const String key = "hello"; - const int bufLen = 10; - const MediaSourceType type = MediaSourceType.audioPlayoutSource; + String provider = "hello"; + String extension = "hello"; + String key = "hello"; + int bufLen = 5; + MediaSourceType type = MediaSourceType.audioPlayoutSource; await rtcEngine.getExtensionProperty( provider: provider, extension: extension, @@ -5251,8 +5238,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; - const String deviceName = "hello"; + bool enabled = true; + String deviceName = "hello"; await rtcEngine.enableLoopbackRecording( enabled: enabled, deviceName: deviceName, @@ -5288,7 +5275,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int volume = 10; + int volume = 5; await rtcEngine.adjustLoopbackSignalVolume( volume, ); @@ -5355,8 +5342,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; - const EarMonitoringFilterType includeAudioFilters = + bool enabled = true; + EarMonitoringFilterType includeAudioFilters = EarMonitoringFilterType.earMonitoringFilterNone; await rtcEngine.enableInEarMonitoring( enabled: enabled, @@ -5393,7 +5380,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int volume = 10; + int volume = 5; await rtcEngine.setInEarMonitoringVolume( volume, ); @@ -5428,8 +5415,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String path = "hello"; - const bool unloadAfterUse = true; + String path = "hello"; + bool unloadAfterUse = true; await rtcEngine.loadExtensionProvider( path: path, unloadAfterUse: unloadAfterUse, @@ -5465,9 +5452,9 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String provider = "hello"; - const String key = "hello"; - const String value = "hello"; + String provider = "hello"; + String key = "hello"; + String value = "hello"; await rtcEngine.setExtensionProviderProperty( provider: provider, key: key, @@ -5504,9 +5491,9 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String provider = "hello"; - const String extension = "hello"; - const MediaSourceType type = MediaSourceType.audioPlayoutSource; + String provider = "hello"; + String extension = "hello"; + MediaSourceType type = MediaSourceType.audioPlayoutSource; await rtcEngine.registerExtension( provider: provider, extension: extension, @@ -5542,22 +5529,21 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const CameraDirection configCameraDirection = - CameraDirection.cameraRear; - const CameraFocalLengthType configCameraFocalLengthType = + CameraDirection configCameraDirection = CameraDirection.cameraRear; + CameraFocalLengthType configCameraFocalLengthType = CameraFocalLengthType.cameraFocalLengthDefault; - const int formatWidth = 10; - const int formatHeight = 10; - const int formatFps = 10; - const VideoFormat configFormat = VideoFormat( + int formatWidth = 5; + int formatHeight = 5; + int formatFps = 5; + VideoFormat configFormat = VideoFormat( width: formatWidth, height: formatHeight, fps: formatFps, ); - const String configDeviceId = "hello"; - const String configCameraId = "hello"; - const bool configFollowEncodeDimensionRatio = true; - const CameraCapturerConfiguration config = CameraCapturerConfiguration( + String configDeviceId = "hello"; + String configCameraId = "hello"; + bool configFollowEncodeDimensionRatio = true; + CameraCapturerConfiguration config = CameraCapturerConfiguration( cameraDirection: configCameraDirection, cameraFocalLengthType: configCameraFocalLengthType, deviceId: configDeviceId, @@ -5599,7 +5585,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int videoTrackId = 10; + int videoTrackId = 5; await rtcEngine.destroyCustomVideoTrack( videoTrackId, ); @@ -5634,7 +5620,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int videoTrackId = 10; + int videoTrackId = 5; await rtcEngine.destroyCustomEncodedVideoTrack( videoTrackId, ); @@ -5860,7 +5846,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const double factor = 10.0; + double factor = 5.0; await rtcEngine.setCameraZoomFactor( factor, ); @@ -5894,7 +5880,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; + bool enabled = true; await rtcEngine.enableFaceDetection( enabled, ); @@ -5960,8 +5946,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const double positionX = 10.0; - const double positionY = 10.0; + double positionX = 5.0; + double positionY = 5.0; await rtcEngine.setCameraFocusPositionInPreview( positionX: positionX, positionY: positionY, @@ -5997,7 +5983,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool isOn = true; + bool isOn = true; await rtcEngine.setCameraTorchOn( isOn, ); @@ -6031,7 +6017,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; + bool enabled = true; await rtcEngine.setCameraAutoFocusFaceModeEnabled( enabled, ); @@ -6098,8 +6084,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const double positionXinView = 10.0; - const double positionYinView = 10.0; + double positionXinView = 5.0; + double positionYinView = 5.0; await rtcEngine.setCameraExposurePosition( positionXinView: positionXinView, positionYinView: positionYinView, @@ -6167,7 +6153,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const double factor = 10.0; + double factor = 5.0; await rtcEngine.setCameraExposureFactor( factor, ); @@ -6234,7 +6220,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; + bool enabled = true; await rtcEngine.setCameraAutoExposureFaceModeEnabled( enabled, ); @@ -6269,7 +6255,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const CameraStabilizationMode mode = + CameraStabilizationMode mode = CameraStabilizationMode.cameraStabilizationModeOff; await rtcEngine.setCameraStabilizationMode( mode, @@ -6305,7 +6291,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool defaultToSpeaker = true; + bool defaultToSpeaker = true; await rtcEngine.setDefaultAudioRouteToSpeakerphone( defaultToSpeaker, ); @@ -6340,7 +6326,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool speakerOn = true; + bool speakerOn = true; await rtcEngine.setEnableSpeakerphone( speakerOn, ); @@ -6407,7 +6393,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int route = 10; + int route = 5; await rtcEngine.setRouteInCommunicationMode( route, ); @@ -6474,7 +6460,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; + bool enabled = true; await rtcEngine.enableCameraCenterStage( enabled, ); @@ -6509,19 +6495,19 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int thumbSizeWidth = 10; - const int thumbSizeHeight = 10; - const SIZE thumbSize = SIZE( + int thumbSizeWidth = 5; + int thumbSizeHeight = 5; + SIZE thumbSize = SIZE( width: thumbSizeWidth, height: thumbSizeHeight, ); - const int iconSizeWidth = 10; - const int iconSizeHeight = 10; - const SIZE iconSize = SIZE( + int iconSizeWidth = 5; + int iconSizeHeight = 5; + SIZE iconSize = SIZE( width: iconSizeWidth, height: iconSizeHeight, ); - const bool includeScreen = true; + bool includeScreen = true; await rtcEngine.getScreenCaptureSources( thumbSize: thumbSize, iconSize: iconSize, @@ -6558,7 +6544,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const AudioSessionOperationRestriction restriction = + AudioSessionOperationRestriction restriction = AudioSessionOperationRestriction .audioSessionOperationRestrictionNone; await rtcEngine.setAudioSessionOperationRestriction( @@ -6595,33 +6581,33 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int displayId = 10; - const int regionRectX = 10; - const int regionRectY = 10; - const int regionRectWidth = 10; - const int regionRectHeight = 10; - const Rectangle regionRect = Rectangle( + int displayId = 5; + int regionRectX = 5; + int regionRectY = 5; + int regionRectWidth = 5; + int regionRectHeight = 5; + Rectangle regionRect = Rectangle( x: regionRectX, y: regionRectY, width: regionRectWidth, height: regionRectHeight, ); - const int dimensionsWidth = 10; - const int dimensionsHeight = 10; - const VideoDimensions captureParamsDimensions = VideoDimensions( + int dimensionsWidth = 5; + int dimensionsHeight = 5; + VideoDimensions captureParamsDimensions = VideoDimensions( width: dimensionsWidth, height: dimensionsHeight, ); - const int captureParamsFrameRate = 10; - const int captureParamsBitrate = 10; - const bool captureParamsCaptureMouseCursor = true; - const bool captureParamsWindowFocus = true; - const List captureParamsExcludeWindowList = []; - const int captureParamsExcludeWindowCount = 10; - const int captureParamsHighLightWidth = 10; - const int captureParamsHighLightColor = 10; - const bool captureParamsEnableHighLight = true; - const ScreenCaptureParameters captureParams = ScreenCaptureParameters( + int captureParamsFrameRate = 5; + int captureParamsBitrate = 5; + bool captureParamsCaptureMouseCursor = true; + bool captureParamsWindowFocus = true; + List captureParamsExcludeWindowList = List.filled(5, 5); + int captureParamsExcludeWindowCount = 5; + int captureParamsHighLightWidth = 5; + int captureParamsHighLightColor = 5; + bool captureParamsEnableHighLight = true; + ScreenCaptureParameters captureParams = ScreenCaptureParameters( dimensions: captureParamsDimensions, frameRate: captureParamsFrameRate, bitrate: captureParamsBitrate, @@ -6669,42 +6655,42 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int screenRectX = 10; - const int screenRectY = 10; - const int screenRectWidth = 10; - const int screenRectHeight = 10; - const Rectangle screenRect = Rectangle( + int screenRectX = 5; + int screenRectY = 5; + int screenRectWidth = 5; + int screenRectHeight = 5; + Rectangle screenRect = Rectangle( x: screenRectX, y: screenRectY, width: screenRectWidth, height: screenRectHeight, ); - const int regionRectX = 10; - const int regionRectY = 10; - const int regionRectWidth = 10; - const int regionRectHeight = 10; - const Rectangle regionRect = Rectangle( + int regionRectX = 5; + int regionRectY = 5; + int regionRectWidth = 5; + int regionRectHeight = 5; + Rectangle regionRect = Rectangle( x: regionRectX, y: regionRectY, width: regionRectWidth, height: regionRectHeight, ); - const int dimensionsWidth = 10; - const int dimensionsHeight = 10; - const VideoDimensions captureParamsDimensions = VideoDimensions( + int dimensionsWidth = 5; + int dimensionsHeight = 5; + VideoDimensions captureParamsDimensions = VideoDimensions( width: dimensionsWidth, height: dimensionsHeight, ); - const int captureParamsFrameRate = 10; - const int captureParamsBitrate = 10; - const bool captureParamsCaptureMouseCursor = true; - const bool captureParamsWindowFocus = true; - const List captureParamsExcludeWindowList = []; - const int captureParamsExcludeWindowCount = 10; - const int captureParamsHighLightWidth = 10; - const int captureParamsHighLightColor = 10; - const bool captureParamsEnableHighLight = true; - const ScreenCaptureParameters captureParams = ScreenCaptureParameters( + int captureParamsFrameRate = 5; + int captureParamsBitrate = 5; + bool captureParamsCaptureMouseCursor = true; + bool captureParamsWindowFocus = true; + List captureParamsExcludeWindowList = List.filled(5, 5); + int captureParamsExcludeWindowCount = 5; + int captureParamsHighLightWidth = 5; + int captureParamsHighLightColor = 5; + bool captureParamsEnableHighLight = true; + ScreenCaptureParameters captureParams = ScreenCaptureParameters( dimensions: captureParamsDimensions, frameRate: captureParamsFrameRate, bitrate: captureParamsBitrate, @@ -6783,33 +6769,33 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int windowId = 10; - const int regionRectX = 10; - const int regionRectY = 10; - const int regionRectWidth = 10; - const int regionRectHeight = 10; - const Rectangle regionRect = Rectangle( + int windowId = 5; + int regionRectX = 5; + int regionRectY = 5; + int regionRectWidth = 5; + int regionRectHeight = 5; + Rectangle regionRect = Rectangle( x: regionRectX, y: regionRectY, width: regionRectWidth, height: regionRectHeight, ); - const int dimensionsWidth = 10; - const int dimensionsHeight = 10; - const VideoDimensions captureParamsDimensions = VideoDimensions( + int dimensionsWidth = 5; + int dimensionsHeight = 5; + VideoDimensions captureParamsDimensions = VideoDimensions( width: dimensionsWidth, height: dimensionsHeight, ); - const int captureParamsFrameRate = 10; - const int captureParamsBitrate = 10; - const bool captureParamsCaptureMouseCursor = true; - const bool captureParamsWindowFocus = true; - const List captureParamsExcludeWindowList = []; - const int captureParamsExcludeWindowCount = 10; - const int captureParamsHighLightWidth = 10; - const int captureParamsHighLightColor = 10; - const bool captureParamsEnableHighLight = true; - const ScreenCaptureParameters captureParams = ScreenCaptureParameters( + int captureParamsFrameRate = 5; + int captureParamsBitrate = 5; + bool captureParamsCaptureMouseCursor = true; + bool captureParamsWindowFocus = true; + List captureParamsExcludeWindowList = List.filled(5, 5); + int captureParamsExcludeWindowCount = 5; + int captureParamsHighLightWidth = 5; + int captureParamsHighLightColor = 5; + bool captureParamsEnableHighLight = true; + ScreenCaptureParameters captureParams = ScreenCaptureParameters( dimensions: captureParamsDimensions, frameRate: captureParamsFrameRate, bitrate: captureParamsBitrate, @@ -6857,7 +6843,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VideoContentHint contentHint = VideoContentHint.contentHintNone; + VideoContentHint contentHint = VideoContentHint.contentHintNone; await rtcEngine.setScreenCaptureContentHint( contentHint, ); @@ -6892,11 +6878,11 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int regionRectX = 10; - const int regionRectY = 10; - const int regionRectWidth = 10; - const int regionRectHeight = 10; - const Rectangle regionRect = Rectangle( + int regionRectX = 5; + int regionRectY = 5; + int regionRectWidth = 5; + int regionRectHeight = 5; + Rectangle regionRect = Rectangle( x: regionRectX, y: regionRectY, width: regionRectWidth, @@ -6936,22 +6922,22 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int dimensionsWidth = 10; - const int dimensionsHeight = 10; - const VideoDimensions captureParamsDimensions = VideoDimensions( + int dimensionsWidth = 5; + int dimensionsHeight = 5; + VideoDimensions captureParamsDimensions = VideoDimensions( width: dimensionsWidth, height: dimensionsHeight, ); - const int captureParamsFrameRate = 10; - const int captureParamsBitrate = 10; - const bool captureParamsCaptureMouseCursor = true; - const bool captureParamsWindowFocus = true; - const List captureParamsExcludeWindowList = []; - const int captureParamsExcludeWindowCount = 10; - const int captureParamsHighLightWidth = 10; - const int captureParamsHighLightColor = 10; - const bool captureParamsEnableHighLight = true; - const ScreenCaptureParameters captureParams = ScreenCaptureParameters( + int captureParamsFrameRate = 5; + int captureParamsBitrate = 5; + bool captureParamsCaptureMouseCursor = true; + bool captureParamsWindowFocus = true; + List captureParamsExcludeWindowList = List.filled(5, 5); + int captureParamsExcludeWindowCount = 5; + int captureParamsHighLightWidth = 5; + int captureParamsHighLightColor = 5; + bool captureParamsEnableHighLight = true; + ScreenCaptureParameters captureParams = ScreenCaptureParameters( dimensions: captureParamsDimensions, frameRate: captureParamsFrameRate, bitrate: captureParamsBitrate, @@ -6997,35 +6983,33 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int audioParamsSampleRate = 10; - const int audioParamsChannels = 10; - const int audioParamsCaptureSignalVolume = 10; - const ScreenAudioParameters captureParamsAudioParams = - ScreenAudioParameters( + int audioParamsSampleRate = 5; + int audioParamsChannels = 5; + int audioParamsCaptureSignalVolume = 5; + ScreenAudioParameters captureParamsAudioParams = ScreenAudioParameters( sampleRate: audioParamsSampleRate, channels: audioParamsChannels, captureSignalVolume: audioParamsCaptureSignalVolume, ); - const int dimensionsWidth = 10; - const int dimensionsHeight = 10; - const VideoDimensions videoParamsDimensions = VideoDimensions( + int dimensionsWidth = 5; + int dimensionsHeight = 5; + VideoDimensions videoParamsDimensions = VideoDimensions( width: dimensionsWidth, height: dimensionsHeight, ); - const VideoContentHint videoParamsContentHint = + VideoContentHint videoParamsContentHint = VideoContentHint.contentHintNone; - const int videoParamsFrameRate = 10; - const int videoParamsBitrate = 10; - const ScreenVideoParameters captureParamsVideoParams = - ScreenVideoParameters( + int videoParamsFrameRate = 5; + int videoParamsBitrate = 5; + ScreenVideoParameters captureParamsVideoParams = ScreenVideoParameters( dimensions: videoParamsDimensions, frameRate: videoParamsFrameRate, bitrate: videoParamsBitrate, contentHint: videoParamsContentHint, ); - const bool captureParamsCaptureAudio = true; - const bool captureParamsCaptureVideo = true; - const ScreenCaptureParameters2 captureParams = ScreenCaptureParameters2( + bool captureParamsCaptureAudio = true; + bool captureParamsCaptureVideo = true; + ScreenCaptureParameters2 captureParams = ScreenCaptureParameters2( captureAudio: captureParamsCaptureAudio, audioParams: captureParamsAudioParams, captureVideo: captureParamsCaptureVideo, @@ -7064,35 +7048,33 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int audioParamsSampleRate = 10; - const int audioParamsChannels = 10; - const int audioParamsCaptureSignalVolume = 10; - const ScreenAudioParameters captureParamsAudioParams = - ScreenAudioParameters( + int audioParamsSampleRate = 5; + int audioParamsChannels = 5; + int audioParamsCaptureSignalVolume = 5; + ScreenAudioParameters captureParamsAudioParams = ScreenAudioParameters( sampleRate: audioParamsSampleRate, channels: audioParamsChannels, captureSignalVolume: audioParamsCaptureSignalVolume, ); - const int dimensionsWidth = 10; - const int dimensionsHeight = 10; - const VideoDimensions videoParamsDimensions = VideoDimensions( + int dimensionsWidth = 5; + int dimensionsHeight = 5; + VideoDimensions videoParamsDimensions = VideoDimensions( width: dimensionsWidth, height: dimensionsHeight, ); - const VideoContentHint videoParamsContentHint = + VideoContentHint videoParamsContentHint = VideoContentHint.contentHintNone; - const int videoParamsFrameRate = 10; - const int videoParamsBitrate = 10; - const ScreenVideoParameters captureParamsVideoParams = - ScreenVideoParameters( + int videoParamsFrameRate = 5; + int videoParamsBitrate = 5; + ScreenVideoParameters captureParamsVideoParams = ScreenVideoParameters( dimensions: videoParamsDimensions, frameRate: videoParamsFrameRate, bitrate: videoParamsBitrate, contentHint: videoParamsContentHint, ); - const bool captureParamsCaptureAudio = true; - const bool captureParamsCaptureVideo = true; - const ScreenCaptureParameters2 captureParams = ScreenCaptureParameters2( + bool captureParamsCaptureAudio = true; + bool captureParamsCaptureVideo = true; + ScreenCaptureParameters2 captureParams = ScreenCaptureParameters2( captureAudio: captureParamsCaptureAudio, audioParams: captureParamsAudioParams, captureVideo: captureParamsCaptureVideo, @@ -7195,7 +7177,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const ScreenScenarioType screenScenario = + ScreenScenarioType screenScenario = ScreenScenarioType.screenScenarioDocument; await rtcEngine.setScreenCaptureScenario( screenScenario, @@ -7293,9 +7275,9 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String callId = "hello"; - const int rating = 10; - const String description = "hello"; + String callId = "hello"; + int rating = 5; + String description = "hello"; await rtcEngine.rate( callId: callId, rating: rating, @@ -7331,8 +7313,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String callId = "hello"; - const String description = "hello"; + String callId = "hello"; + String description = "hello"; await rtcEngine.complain( callId: callId, description: description, @@ -7367,7 +7349,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String url = "hello"; + String url = "hello"; await rtcEngine.startRtmpStreamWithoutTranscoding( url, ); @@ -7402,35 +7384,35 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String url = "hello"; - const VideoCodecProfileType transcodingVideoCodecProfile = + String url = "hello"; + VideoCodecProfileType transcodingVideoCodecProfile = VideoCodecProfileType.videoCodecProfileBaseline; - const VideoCodecTypeForStream transcodingVideoCodecType = + VideoCodecTypeForStream transcodingVideoCodecType = VideoCodecTypeForStream.videoCodecH264ForStream; - const AudioSampleRateType transcodingAudioSampleRate = + AudioSampleRateType transcodingAudioSampleRate = AudioSampleRateType.audioSampleRate32000; - const AudioCodecProfileType transcodingAudioCodecProfile = + AudioCodecProfileType transcodingAudioCodecProfile = AudioCodecProfileType.audioCodecProfileLcAac; - const int transcodingWidth = 10; - const int transcodingHeight = 10; - const int transcodingVideoBitrate = 10; - const int transcodingVideoFramerate = 10; - const bool transcodingLowLatency = true; - const int transcodingVideoGop = 10; - const int transcodingBackgroundColor = 10; - const int transcodingUserCount = 10; - const List transcodingTranscodingUsers = []; - const String transcodingTranscodingExtraInfo = "hello"; - const String transcodingMetadata = "hello"; - const List transcodingWatermark = []; - const int transcodingWatermarkCount = 10; - const List transcodingBackgroundImage = []; - const int transcodingBackgroundImageCount = 10; - const int transcodingAudioBitrate = 10; - const int transcodingAudioChannels = 10; - const List transcodingAdvancedFeatures = []; - const int transcodingAdvancedFeatureCount = 10; - const LiveTranscoding transcoding = LiveTranscoding( + int transcodingWidth = 5; + int transcodingHeight = 5; + int transcodingVideoBitrate = 5; + int transcodingVideoFramerate = 5; + bool transcodingLowLatency = true; + int transcodingVideoGop = 5; + int transcodingBackgroundColor = 5; + int transcodingUserCount = 5; + List transcodingTranscodingUsers = []; + String transcodingTranscodingExtraInfo = "hello"; + String transcodingMetadata = "hello"; + List transcodingWatermark = []; + int transcodingWatermarkCount = 5; + List transcodingBackgroundImage = []; + int transcodingBackgroundImageCount = 5; + int transcodingAudioBitrate = 5; + int transcodingAudioChannels = 5; + List transcodingAdvancedFeatures = []; + int transcodingAdvancedFeatureCount = 5; + LiveTranscoding transcoding = LiveTranscoding( width: transcodingWidth, height: transcodingHeight, videoBitrate: transcodingVideoBitrate, @@ -7490,34 +7472,34 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VideoCodecProfileType transcodingVideoCodecProfile = + VideoCodecProfileType transcodingVideoCodecProfile = VideoCodecProfileType.videoCodecProfileBaseline; - const VideoCodecTypeForStream transcodingVideoCodecType = + VideoCodecTypeForStream transcodingVideoCodecType = VideoCodecTypeForStream.videoCodecH264ForStream; - const AudioSampleRateType transcodingAudioSampleRate = + AudioSampleRateType transcodingAudioSampleRate = AudioSampleRateType.audioSampleRate32000; - const AudioCodecProfileType transcodingAudioCodecProfile = + AudioCodecProfileType transcodingAudioCodecProfile = AudioCodecProfileType.audioCodecProfileLcAac; - const int transcodingWidth = 10; - const int transcodingHeight = 10; - const int transcodingVideoBitrate = 10; - const int transcodingVideoFramerate = 10; - const bool transcodingLowLatency = true; - const int transcodingVideoGop = 10; - const int transcodingBackgroundColor = 10; - const int transcodingUserCount = 10; - const List transcodingTranscodingUsers = []; - const String transcodingTranscodingExtraInfo = "hello"; - const String transcodingMetadata = "hello"; - const List transcodingWatermark = []; - const int transcodingWatermarkCount = 10; - const List transcodingBackgroundImage = []; - const int transcodingBackgroundImageCount = 10; - const int transcodingAudioBitrate = 10; - const int transcodingAudioChannels = 10; - const List transcodingAdvancedFeatures = []; - const int transcodingAdvancedFeatureCount = 10; - const LiveTranscoding transcoding = LiveTranscoding( + int transcodingWidth = 5; + int transcodingHeight = 5; + int transcodingVideoBitrate = 5; + int transcodingVideoFramerate = 5; + bool transcodingLowLatency = true; + int transcodingVideoGop = 5; + int transcodingBackgroundColor = 5; + int transcodingUserCount = 5; + List transcodingTranscodingUsers = []; + String transcodingTranscodingExtraInfo = "hello"; + String transcodingMetadata = "hello"; + List transcodingWatermark = []; + int transcodingWatermarkCount = 5; + List transcodingBackgroundImage = []; + int transcodingBackgroundImageCount = 5; + int transcodingAudioBitrate = 5; + int transcodingAudioChannels = 5; + List transcodingAdvancedFeatures = []; + int transcodingAdvancedFeatureCount = 5; + LiveTranscoding transcoding = LiveTranscoding( width: transcodingWidth, height: transcodingHeight, videoBitrate: transcodingVideoBitrate, @@ -7576,35 +7558,32 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VideoCodecType videoOutputConfigurationCodecType = + VideoCodecType videoOutputConfigurationCodecType = VideoCodecType.videoCodecNone; - const int dimensionsWidth = 10; - const int dimensionsHeight = 10; - const VideoDimensions videoOutputConfigurationDimensions = - VideoDimensions( + int dimensionsWidth = 5; + int dimensionsHeight = 5; + VideoDimensions videoOutputConfigurationDimensions = VideoDimensions( width: dimensionsWidth, height: dimensionsHeight, ); - const OrientationMode videoOutputConfigurationOrientationMode = + OrientationMode videoOutputConfigurationOrientationMode = OrientationMode.orientationModeAdaptive; - const DegradationPreference - videoOutputConfigurationDegradationPreference = + DegradationPreference videoOutputConfigurationDegradationPreference = DegradationPreference.maintainQuality; - const VideoMirrorModeType videoOutputConfigurationMirrorMode = + VideoMirrorModeType videoOutputConfigurationMirrorMode = VideoMirrorModeType.videoMirrorModeAuto; - const EncodingPreference advanceOptionsEncodingPreference = + EncodingPreference advanceOptionsEncodingPreference = EncodingPreference.preferAuto; - const CompressionPreference advanceOptionsCompressionPreference = + CompressionPreference advanceOptionsCompressionPreference = CompressionPreference.preferLowLatency; - const AdvanceOptions videoOutputConfigurationAdvanceOptions = - AdvanceOptions( + AdvanceOptions videoOutputConfigurationAdvanceOptions = AdvanceOptions( encodingPreference: advanceOptionsEncodingPreference, compressionPreference: advanceOptionsCompressionPreference, ); - const int videoOutputConfigurationFrameRate = 10; - const int videoOutputConfigurationBitrate = 10; - const int videoOutputConfigurationMinBitrate = 10; - const VideoEncoderConfiguration configVideoOutputConfiguration = + int videoOutputConfigurationFrameRate = 5; + int videoOutputConfigurationBitrate = 5; + int videoOutputConfigurationMinBitrate = 5; + VideoEncoderConfiguration configVideoOutputConfiguration = VideoEncoderConfiguration( codecType: videoOutputConfigurationCodecType, dimensions: videoOutputConfigurationDimensions, @@ -7616,11 +7595,10 @@ void rtcEngineSmokeTestCases() { mirrorMode: videoOutputConfigurationMirrorMode, advanceOptions: videoOutputConfigurationAdvanceOptions, ); - const int configStreamCount = 10; - const List configVideoInputStreams = []; - const bool configSyncWithPrimaryCamera = true; - const LocalTranscoderConfiguration config = - LocalTranscoderConfiguration( + int configStreamCount = 5; + List configVideoInputStreams = []; + bool configSyncWithPrimaryCamera = true; + LocalTranscoderConfiguration config = LocalTranscoderConfiguration( streamCount: configStreamCount, videoInputStreams: configVideoInputStreams, videoOutputConfiguration: configVideoOutputConfiguration, @@ -7660,35 +7638,32 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VideoCodecType videoOutputConfigurationCodecType = + VideoCodecType videoOutputConfigurationCodecType = VideoCodecType.videoCodecNone; - const int dimensionsWidth = 10; - const int dimensionsHeight = 10; - const VideoDimensions videoOutputConfigurationDimensions = - VideoDimensions( + int dimensionsWidth = 5; + int dimensionsHeight = 5; + VideoDimensions videoOutputConfigurationDimensions = VideoDimensions( width: dimensionsWidth, height: dimensionsHeight, ); - const OrientationMode videoOutputConfigurationOrientationMode = + OrientationMode videoOutputConfigurationOrientationMode = OrientationMode.orientationModeAdaptive; - const DegradationPreference - videoOutputConfigurationDegradationPreference = + DegradationPreference videoOutputConfigurationDegradationPreference = DegradationPreference.maintainQuality; - const VideoMirrorModeType videoOutputConfigurationMirrorMode = + VideoMirrorModeType videoOutputConfigurationMirrorMode = VideoMirrorModeType.videoMirrorModeAuto; - const EncodingPreference advanceOptionsEncodingPreference = + EncodingPreference advanceOptionsEncodingPreference = EncodingPreference.preferAuto; - const CompressionPreference advanceOptionsCompressionPreference = + CompressionPreference advanceOptionsCompressionPreference = CompressionPreference.preferLowLatency; - const AdvanceOptions videoOutputConfigurationAdvanceOptions = - AdvanceOptions( + AdvanceOptions videoOutputConfigurationAdvanceOptions = AdvanceOptions( encodingPreference: advanceOptionsEncodingPreference, compressionPreference: advanceOptionsCompressionPreference, ); - const int videoOutputConfigurationFrameRate = 10; - const int videoOutputConfigurationBitrate = 10; - const int videoOutputConfigurationMinBitrate = 10; - const VideoEncoderConfiguration configVideoOutputConfiguration = + int videoOutputConfigurationFrameRate = 5; + int videoOutputConfigurationBitrate = 5; + int videoOutputConfigurationMinBitrate = 5; + VideoEncoderConfiguration configVideoOutputConfiguration = VideoEncoderConfiguration( codecType: videoOutputConfigurationCodecType, dimensions: videoOutputConfigurationDimensions, @@ -7700,11 +7675,10 @@ void rtcEngineSmokeTestCases() { mirrorMode: videoOutputConfigurationMirrorMode, advanceOptions: videoOutputConfigurationAdvanceOptions, ); - const int configStreamCount = 10; - const List configVideoInputStreams = []; - const bool configSyncWithPrimaryCamera = true; - const LocalTranscoderConfiguration config = - LocalTranscoderConfiguration( + int configStreamCount = 5; + List configVideoInputStreams = []; + bool configSyncWithPrimaryCamera = true; + LocalTranscoderConfiguration config = LocalTranscoderConfiguration( streamCount: configStreamCount, videoInputStreams: configVideoInputStreams, videoOutputConfiguration: configVideoOutputConfiguration, @@ -7744,7 +7718,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String url = "hello"; + String url = "hello"; await rtcEngine.stopRtmpStream( url, ); @@ -7810,24 +7784,22 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VideoSourceType sourceType = - VideoSourceType.videoSourceCameraPrimary; - const CameraDirection configCameraDirection = - CameraDirection.cameraRear; - const CameraFocalLengthType configCameraFocalLengthType = + VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary; + CameraDirection configCameraDirection = CameraDirection.cameraRear; + CameraFocalLengthType configCameraFocalLengthType = CameraFocalLengthType.cameraFocalLengthDefault; - const int formatWidth = 10; - const int formatHeight = 10; - const int formatFps = 10; - const VideoFormat configFormat = VideoFormat( + int formatWidth = 5; + int formatHeight = 5; + int formatFps = 5; + VideoFormat configFormat = VideoFormat( width: formatWidth, height: formatHeight, fps: formatFps, ); - const String configDeviceId = "hello"; - const String configCameraId = "hello"; - const bool configFollowEncodeDimensionRatio = true; - const CameraCapturerConfiguration config = CameraCapturerConfiguration( + String configDeviceId = "hello"; + String configCameraId = "hello"; + bool configFollowEncodeDimensionRatio = true; + CameraCapturerConfiguration config = CameraCapturerConfiguration( cameraDirection: configCameraDirection, cameraFocalLengthType: configCameraFocalLengthType, deviceId: configDeviceId, @@ -7869,8 +7841,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VideoSourceType sourceType = - VideoSourceType.videoSourceCameraPrimary; + VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary; await rtcEngine.stopCameraCapture( sourceType, ); @@ -7904,8 +7875,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VideoSourceType type = VideoSourceType.videoSourceCameraPrimary; - const VideoOrientation orientation = VideoOrientation.videoOrientation0; + VideoSourceType type = VideoSourceType.videoSourceCameraPrimary; + VideoOrientation orientation = VideoOrientation.videoOrientation0; await rtcEngine.setCameraDeviceOrientation( type: type, orientation: orientation, @@ -7941,8 +7912,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VideoSourceType type = VideoSourceType.videoSourceCameraPrimary; - const VideoOrientation orientation = VideoOrientation.videoOrientation0; + VideoSourceType type = VideoSourceType.videoSourceCameraPrimary; + VideoOrientation orientation = VideoOrientation.videoOrientation0; await rtcEngine.setScreenCaptureOrientation( type: type, orientation: orientation, @@ -8009,7 +7980,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - final RtcEngineEventHandler eventHandler = RtcEngineEventHandler( + RtcEngineEventHandler eventHandler = RtcEngineEventHandler( onJoinChannelSuccess: (RtcConnection connection, int elapsed) {}, onRejoinChannelSuccess: (RtcConnection connection, int elapsed) {}, onProxyConnected: (String channel, int uid, ProxyType proxyType, @@ -8225,7 +8196,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - final RtcEngineEventHandler eventHandler = RtcEngineEventHandler( + RtcEngineEventHandler eventHandler = RtcEngineEventHandler( onJoinChannelSuccess: (RtcConnection connection, int elapsed) {}, onRejoinChannelSuccess: (RtcConnection connection, int elapsed) {}, onProxyConnected: (String channel, int uid, ProxyType proxyType, @@ -8442,8 +8413,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int uid = 10; - const PriorityType userPriority = PriorityType.priorityHigh; + int uid = 5; + PriorityType userPriority = PriorityType.priorityHigh; await rtcEngine.setRemoteUserPriority( uid: uid, userPriority: userPriority, @@ -8479,7 +8450,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String encryptionMode = "hello"; + String encryptionMode = "hello"; await rtcEngine.setEncryptionMode( encryptionMode, ); @@ -8513,7 +8484,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String secret = "hello"; + String secret = "hello"; await rtcEngine.setEncryptionSecret( secret, ); @@ -8547,12 +8518,12 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; - const EncryptionMode configEncryptionMode = EncryptionMode.aes128Xts; - const String configEncryptionKey = "hello"; - Uint8List configEncryptionKdfSalt = Uint8List.fromList([1, 2, 3, 4, 5]); - const bool configDatastreamEncryptionEnabled = true; - final EncryptionConfig config = EncryptionConfig( + bool enabled = true; + EncryptionMode configEncryptionMode = EncryptionMode.aes128Xts; + String configEncryptionKey = "hello"; + Uint8List configEncryptionKdfSalt = Uint8List.fromList([1, 1, 1, 1, 1]); + bool configDatastreamEncryptionEnabled = true; + EncryptionConfig config = EncryptionConfig( encryptionMode: configEncryptionMode, encryptionKey: configEncryptionKey, encryptionKdfSalt: configEncryptionKdfSalt, @@ -8592,9 +8563,9 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int streamId = 10; - Uint8List data = Uint8List.fromList([1, 2, 3, 4, 5]); - const int length = 10; + int streamId = 5; + Uint8List data = Uint8List.fromList([1, 1, 1, 1, 1]); + int length = 5; await rtcEngine.sendStreamMessage( streamId: streamId, data: data, @@ -8630,39 +8601,38 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String watermarkUrl = "hello"; - const int positionInLandscapeModeX = 10; - const int positionInLandscapeModeY = 10; - const int positionInLandscapeModeWidth = 10; - const int positionInLandscapeModeHeight = 10; - const Rectangle optionsPositionInLandscapeMode = Rectangle( + String watermarkUrl = "hello"; + int positionInLandscapeModeX = 5; + int positionInLandscapeModeY = 5; + int positionInLandscapeModeWidth = 5; + int positionInLandscapeModeHeight = 5; + Rectangle optionsPositionInLandscapeMode = Rectangle( x: positionInLandscapeModeX, y: positionInLandscapeModeY, width: positionInLandscapeModeWidth, height: positionInLandscapeModeHeight, ); - const int positionInPortraitModeX = 10; - const int positionInPortraitModeY = 10; - const int positionInPortraitModeWidth = 10; - const int positionInPortraitModeHeight = 10; - const Rectangle optionsPositionInPortraitMode = Rectangle( + int positionInPortraitModeX = 5; + int positionInPortraitModeY = 5; + int positionInPortraitModeWidth = 5; + int positionInPortraitModeHeight = 5; + Rectangle optionsPositionInPortraitMode = Rectangle( x: positionInPortraitModeX, y: positionInPortraitModeY, width: positionInPortraitModeWidth, height: positionInPortraitModeHeight, ); - const double watermarkRatioXRatio = 10.0; - const double watermarkRatioYRatio = 10.0; - const double watermarkRatioWidthRatio = 10.0; - const WatermarkRatio optionsWatermarkRatio = WatermarkRatio( + double watermarkRatioXRatio = 5.0; + double watermarkRatioYRatio = 5.0; + double watermarkRatioWidthRatio = 5.0; + WatermarkRatio optionsWatermarkRatio = WatermarkRatio( xRatio: watermarkRatioXRatio, yRatio: watermarkRatioYRatio, widthRatio: watermarkRatioWidthRatio, ); - const WatermarkFitMode optionsMode = - WatermarkFitMode.fitModeCoverPosition; - const bool optionsVisibleInPreview = true; - const WatermarkOptions options = WatermarkOptions( + WatermarkFitMode optionsMode = WatermarkFitMode.fitModeCoverPosition; + bool optionsVisibleInPreview = true; + WatermarkOptions options = WatermarkOptions( visibleInPreview: optionsVisibleInPreview, positionInLandscapeMode: optionsPositionInLandscapeMode, positionInPortraitMode: optionsPositionInPortraitMode, @@ -8796,7 +8766,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; + bool enabled = true; await rtcEngine.enableWebSdkInteroperability( enabled, ); @@ -8831,11 +8801,11 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String id = "hello"; - const String category = "hello"; - const String event = "hello"; - const String label = "hello"; - const int value = 10; + String id = "hello"; + String category = "hello"; + String event = "hello"; + String label = "hello"; + int value = 5; await rtcEngine.sendCustomReportMessage( id: id, category: category, @@ -8874,10 +8844,10 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - final MetadataObserver observer = MetadataObserver( + MetadataObserver observer = MetadataObserver( onMetadataReceived: (Metadata metadata) {}, ); - const MetadataType type = MetadataType.unknownMetadata; + MetadataType type = MetadataType.unknownMetadata; rtcEngine.registerMediaMetadataObserver( observer: observer, type: type, @@ -8913,10 +8883,10 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - final MetadataObserver observer = MetadataObserver( + MetadataObserver observer = MetadataObserver( onMetadataReceived: (Metadata metadata) {}, ); - const MetadataType type = MetadataType.unknownMetadata; + MetadataType type = MetadataType.unknownMetadata; rtcEngine.unregisterMediaMetadataObserver( observer: observer, type: type, @@ -8952,13 +8922,13 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String channelId = "hello"; - const int uid = 10; - const String location = "hello"; - const String uuid = "hello"; - const String passwd = "hello"; - const int durationMs = 10; - const bool autoUpload = true; + String channelId = "hello"; + int uid = 5; + String location = "hello"; + String uuid = "hello"; + String passwd = "hello"; + int durationMs = 5; + bool autoUpload = true; await rtcEngine.startAudioFrameDump( channelId: channelId, uid: uid, @@ -8998,9 +8968,9 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String channelId = "hello"; - const int uid = 10; - const String location = "hello"; + String channelId = "hello"; + int uid = 5; + String location = "hello"; await rtcEngine.stopAudioFrameDump( channelId: channelId, uid: uid, @@ -9036,8 +9006,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; - const AudioAinsMode mode = AudioAinsMode.ainsModeBalanced; + bool enabled = true; + AudioAinsMode mode = AudioAinsMode.ainsModeBalanced; await rtcEngine.setAINSMode( enabled: enabled, mode: mode, @@ -9072,8 +9042,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String appId = "hello"; - const String userAccount = "hello"; + String appId = "hello"; + String userAccount = "hello"; await rtcEngine.registerLocalUserAccount( appId: appId, userAccount: userAccount, @@ -9109,50 +9079,50 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String token = "hello"; - const String channelId = "hello"; - const String userAccount = "hello"; - const ClientRoleType optionsClientRoleType = + String token = "hello"; + String channelId = "hello"; + String userAccount = "hello"; + ClientRoleType optionsClientRoleType = ClientRoleType.clientRoleBroadcaster; - const AudienceLatencyLevelType optionsAudienceLatencyLevel = + AudienceLatencyLevelType optionsAudienceLatencyLevel = AudienceLatencyLevelType.audienceLatencyLevelLowLatency; - const VideoStreamType optionsDefaultVideoStreamType = + VideoStreamType optionsDefaultVideoStreamType = VideoStreamType.videoStreamHigh; - const ChannelProfileType optionsChannelProfile = + ChannelProfileType optionsChannelProfile = ChannelProfileType.channelProfileCommunication; - const bool optionsPublishCameraTrack = true; - const bool optionsPublishSecondaryCameraTrack = true; - const bool optionsPublishThirdCameraTrack = true; - const bool optionsPublishFourthCameraTrack = true; - const bool optionsPublishMicrophoneTrack = true; - const bool optionsPublishScreenCaptureVideo = true; - const bool optionsPublishScreenCaptureAudio = true; - const bool optionsPublishScreenTrack = true; - const bool optionsPublishSecondaryScreenTrack = true; - const bool optionsPublishThirdScreenTrack = true; - const bool optionsPublishFourthScreenTrack = true; - const bool optionsPublishCustomAudioTrack = true; - const int optionsPublishCustomAudioTrackId = 10; - const bool optionsPublishCustomVideoTrack = true; - const bool optionsPublishEncodedVideoTrack = true; - const bool optionsPublishMediaPlayerAudioTrack = true; - const bool optionsPublishMediaPlayerVideoTrack = true; - const bool optionsPublishTranscodedVideoTrack = true; - const bool optionsPublishMixedAudioTrack = true; - const bool optionsPublishLipSyncTrack = true; - const bool optionsAutoSubscribeAudio = true; - const bool optionsAutoSubscribeVideo = true; - const bool optionsEnableAudioRecordingOrPlayout = true; - const int optionsPublishMediaPlayerId = 10; - const int optionsAudioDelayMs = 10; - const int optionsMediaPlayerAudioDelayMs = 10; - const String optionsToken = "hello"; - const bool optionsEnableBuiltInMediaEncryption = true; - const bool optionsPublishRhythmPlayerTrack = true; - const bool optionsIsInteractiveAudience = true; - const int optionsCustomVideoTrackId = 10; - const bool optionsIsAudioFilterable = true; - const ChannelMediaOptions options = ChannelMediaOptions( + bool optionsPublishCameraTrack = true; + bool optionsPublishSecondaryCameraTrack = true; + bool optionsPublishThirdCameraTrack = true; + bool optionsPublishFourthCameraTrack = true; + bool optionsPublishMicrophoneTrack = true; + bool optionsPublishScreenCaptureVideo = true; + bool optionsPublishScreenCaptureAudio = true; + bool optionsPublishScreenTrack = true; + bool optionsPublishSecondaryScreenTrack = true; + bool optionsPublishThirdScreenTrack = true; + bool optionsPublishFourthScreenTrack = true; + bool optionsPublishCustomAudioTrack = true; + int optionsPublishCustomAudioTrackId = 5; + bool optionsPublishCustomVideoTrack = true; + bool optionsPublishEncodedVideoTrack = true; + bool optionsPublishMediaPlayerAudioTrack = true; + bool optionsPublishMediaPlayerVideoTrack = true; + bool optionsPublishTranscodedVideoTrack = true; + bool optionsPublishMixedAudioTrack = true; + bool optionsPublishLipSyncTrack = true; + bool optionsAutoSubscribeAudio = true; + bool optionsAutoSubscribeVideo = true; + bool optionsEnableAudioRecordingOrPlayout = true; + int optionsPublishMediaPlayerId = 5; + int optionsAudioDelayMs = 5; + int optionsMediaPlayerAudioDelayMs = 5; + String optionsToken = "hello"; + bool optionsEnableBuiltInMediaEncryption = true; + bool optionsPublishRhythmPlayerTrack = true; + bool optionsIsInteractiveAudience = true; + int optionsCustomVideoTrackId = 5; + bool optionsIsAudioFilterable = true; + ChannelMediaOptions options = ChannelMediaOptions( publishCameraTrack: optionsPublishCameraTrack, publishSecondaryCameraTrack: optionsPublishSecondaryCameraTrack, publishThirdCameraTrack: optionsPublishThirdCameraTrack, @@ -9227,50 +9197,50 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String token = "hello"; - const String channelId = "hello"; - const String userAccount = "hello"; - const ClientRoleType optionsClientRoleType = + String token = "hello"; + String channelId = "hello"; + String userAccount = "hello"; + ClientRoleType optionsClientRoleType = ClientRoleType.clientRoleBroadcaster; - const AudienceLatencyLevelType optionsAudienceLatencyLevel = + AudienceLatencyLevelType optionsAudienceLatencyLevel = AudienceLatencyLevelType.audienceLatencyLevelLowLatency; - const VideoStreamType optionsDefaultVideoStreamType = + VideoStreamType optionsDefaultVideoStreamType = VideoStreamType.videoStreamHigh; - const ChannelProfileType optionsChannelProfile = + ChannelProfileType optionsChannelProfile = ChannelProfileType.channelProfileCommunication; - const bool optionsPublishCameraTrack = true; - const bool optionsPublishSecondaryCameraTrack = true; - const bool optionsPublishThirdCameraTrack = true; - const bool optionsPublishFourthCameraTrack = true; - const bool optionsPublishMicrophoneTrack = true; - const bool optionsPublishScreenCaptureVideo = true; - const bool optionsPublishScreenCaptureAudio = true; - const bool optionsPublishScreenTrack = true; - const bool optionsPublishSecondaryScreenTrack = true; - const bool optionsPublishThirdScreenTrack = true; - const bool optionsPublishFourthScreenTrack = true; - const bool optionsPublishCustomAudioTrack = true; - const int optionsPublishCustomAudioTrackId = 10; - const bool optionsPublishCustomVideoTrack = true; - const bool optionsPublishEncodedVideoTrack = true; - const bool optionsPublishMediaPlayerAudioTrack = true; - const bool optionsPublishMediaPlayerVideoTrack = true; - const bool optionsPublishTranscodedVideoTrack = true; - const bool optionsPublishMixedAudioTrack = true; - const bool optionsPublishLipSyncTrack = true; - const bool optionsAutoSubscribeAudio = true; - const bool optionsAutoSubscribeVideo = true; - const bool optionsEnableAudioRecordingOrPlayout = true; - const int optionsPublishMediaPlayerId = 10; - const int optionsAudioDelayMs = 10; - const int optionsMediaPlayerAudioDelayMs = 10; - const String optionsToken = "hello"; - const bool optionsEnableBuiltInMediaEncryption = true; - const bool optionsPublishRhythmPlayerTrack = true; - const bool optionsIsInteractiveAudience = true; - const int optionsCustomVideoTrackId = 10; - const bool optionsIsAudioFilterable = true; - const ChannelMediaOptions options = ChannelMediaOptions( + bool optionsPublishCameraTrack = true; + bool optionsPublishSecondaryCameraTrack = true; + bool optionsPublishThirdCameraTrack = true; + bool optionsPublishFourthCameraTrack = true; + bool optionsPublishMicrophoneTrack = true; + bool optionsPublishScreenCaptureVideo = true; + bool optionsPublishScreenCaptureAudio = true; + bool optionsPublishScreenTrack = true; + bool optionsPublishSecondaryScreenTrack = true; + bool optionsPublishThirdScreenTrack = true; + bool optionsPublishFourthScreenTrack = true; + bool optionsPublishCustomAudioTrack = true; + int optionsPublishCustomAudioTrackId = 5; + bool optionsPublishCustomVideoTrack = true; + bool optionsPublishEncodedVideoTrack = true; + bool optionsPublishMediaPlayerAudioTrack = true; + bool optionsPublishMediaPlayerVideoTrack = true; + bool optionsPublishTranscodedVideoTrack = true; + bool optionsPublishMixedAudioTrack = true; + bool optionsPublishLipSyncTrack = true; + bool optionsAutoSubscribeAudio = true; + bool optionsAutoSubscribeVideo = true; + bool optionsEnableAudioRecordingOrPlayout = true; + int optionsPublishMediaPlayerId = 5; + int optionsAudioDelayMs = 5; + int optionsMediaPlayerAudioDelayMs = 5; + String optionsToken = "hello"; + bool optionsEnableBuiltInMediaEncryption = true; + bool optionsPublishRhythmPlayerTrack = true; + bool optionsIsInteractiveAudience = true; + int optionsCustomVideoTrackId = 5; + bool optionsIsAudioFilterable = true; + ChannelMediaOptions options = ChannelMediaOptions( publishCameraTrack: optionsPublishCameraTrack, publishSecondaryCameraTrack: optionsPublishSecondaryCameraTrack, publishThirdCameraTrack: optionsPublishThirdCameraTrack, @@ -9345,7 +9315,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String userAccount = "hello"; + String userAccount = "hello"; await rtcEngine.getUserInfoByUserAccount( userAccount, ); @@ -9380,7 +9350,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int uid = 10; + int uid = 5; await rtcEngine.getUserInfoByUid( uid, ); @@ -9510,7 +9480,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const AudioProfileType profile = AudioProfileType.audioProfileDefault; + AudioProfileType profile = AudioProfileType.audioProfileDefault; await rtcEngine.setDirectCdnStreamingAudioConfiguration( profile, ); @@ -9545,31 +9515,31 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VideoCodecType configCodecType = VideoCodecType.videoCodecNone; - const int dimensionsWidth = 10; - const int dimensionsHeight = 10; - const VideoDimensions configDimensions = VideoDimensions( + VideoCodecType configCodecType = VideoCodecType.videoCodecNone; + int dimensionsWidth = 5; + int dimensionsHeight = 5; + VideoDimensions configDimensions = VideoDimensions( width: dimensionsWidth, height: dimensionsHeight, ); - const OrientationMode configOrientationMode = + OrientationMode configOrientationMode = OrientationMode.orientationModeAdaptive; - const DegradationPreference configDegradationPreference = + DegradationPreference configDegradationPreference = DegradationPreference.maintainQuality; - const VideoMirrorModeType configMirrorMode = + VideoMirrorModeType configMirrorMode = VideoMirrorModeType.videoMirrorModeAuto; - const EncodingPreference advanceOptionsEncodingPreference = + EncodingPreference advanceOptionsEncodingPreference = EncodingPreference.preferAuto; - const CompressionPreference advanceOptionsCompressionPreference = + CompressionPreference advanceOptionsCompressionPreference = CompressionPreference.preferLowLatency; - const AdvanceOptions configAdvanceOptions = AdvanceOptions( + AdvanceOptions configAdvanceOptions = AdvanceOptions( encodingPreference: advanceOptionsEncodingPreference, compressionPreference: advanceOptionsCompressionPreference, ); - const int configFrameRate = 10; - const int configBitrate = 10; - const int configMinBitrate = 10; - const VideoEncoderConfiguration config = VideoEncoderConfiguration( + int configFrameRate = 5; + int configBitrate = 5; + int configMinBitrate = 5; + VideoEncoderConfiguration config = VideoEncoderConfiguration( codecType: configCodecType, dimensions: configDimensions, frameRate: configFrameRate, @@ -9614,22 +9584,21 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - final DirectCdnStreamingEventHandler eventHandler = + DirectCdnStreamingEventHandler eventHandler = DirectCdnStreamingEventHandler( onDirectCdnStreamingStateChanged: (DirectCdnStreamingState state, DirectCdnStreamingReason reason, String message) {}, onDirectCdnStreamingStats: (DirectCdnStreamingStats stats) {}, ); - const String publishUrl = "hello"; - const bool optionsPublishCameraTrack = true; - const bool optionsPublishMicrophoneTrack = true; - const bool optionsPublishCustomAudioTrack = true; - const bool optionsPublishCustomVideoTrack = true; - const bool optionsPublishMediaPlayerAudioTrack = true; - const int optionsPublishMediaPlayerId = 10; - const int optionsCustomVideoTrackId = 10; - const DirectCdnStreamingMediaOptions options = - DirectCdnStreamingMediaOptions( + String publishUrl = "hello"; + bool optionsPublishCameraTrack = true; + bool optionsPublishMicrophoneTrack = true; + bool optionsPublishCustomAudioTrack = true; + bool optionsPublishCustomVideoTrack = true; + bool optionsPublishMediaPlayerAudioTrack = true; + int optionsPublishMediaPlayerId = 5; + int optionsCustomVideoTrackId = 5; + DirectCdnStreamingMediaOptions options = DirectCdnStreamingMediaOptions( publishCameraTrack: optionsPublishCameraTrack, publishMicrophoneTrack: optionsPublishMicrophoneTrack, publishCustomAudioTrack: optionsPublishCustomAudioTrack, @@ -9706,15 +9675,14 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool optionsPublishCameraTrack = true; - const bool optionsPublishMicrophoneTrack = true; - const bool optionsPublishCustomAudioTrack = true; - const bool optionsPublishCustomVideoTrack = true; - const bool optionsPublishMediaPlayerAudioTrack = true; - const int optionsPublishMediaPlayerId = 10; - const int optionsCustomVideoTrackId = 10; - const DirectCdnStreamingMediaOptions options = - DirectCdnStreamingMediaOptions( + bool optionsPublishCameraTrack = true; + bool optionsPublishMicrophoneTrack = true; + bool optionsPublishCustomAudioTrack = true; + bool optionsPublishCustomVideoTrack = true; + bool optionsPublishMediaPlayerAudioTrack = true; + int optionsPublishMediaPlayerId = 5; + int optionsCustomVideoTrackId = 5; + DirectCdnStreamingMediaOptions options = DirectCdnStreamingMediaOptions( publishCameraTrack: optionsPublishCameraTrack, publishMicrophoneTrack: optionsPublishMicrophoneTrack, publishCustomAudioTrack: optionsPublishCustomAudioTrack, @@ -9757,11 +9725,11 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String sound1 = "hello"; - const String sound2 = "hello"; - const int configBeatsPerMeasure = 10; - const int configBeatsPerMinute = 10; - const AgoraRhythmPlayerConfig config = AgoraRhythmPlayerConfig( + String sound1 = "hello"; + String sound2 = "hello"; + int configBeatsPerMeasure = 5; + int configBeatsPerMinute = 5; + AgoraRhythmPlayerConfig config = AgoraRhythmPlayerConfig( beatsPerMeasure: configBeatsPerMeasure, beatsPerMinute: configBeatsPerMinute, ); @@ -9831,9 +9799,9 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int configBeatsPerMeasure = 10; - const int configBeatsPerMinute = 10; - const AgoraRhythmPlayerConfig config = AgoraRhythmPlayerConfig( + int configBeatsPerMeasure = 5; + int configBeatsPerMinute = 5; + AgoraRhythmPlayerConfig config = AgoraRhythmPlayerConfig( beatsPerMeasure: configBeatsPerMeasure, beatsPerMinute: configBeatsPerMinute, ); @@ -9870,8 +9838,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int uid = 10; - const String filePath = "hello"; + int uid = 5; + String filePath = "hello"; await rtcEngine.takeSnapshot( uid: uid, filePath: filePath, @@ -9906,12 +9874,12 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; - const String configExtraInfo = "hello"; - const String configServerConfig = "hello"; - const List configModules = []; - const int configModuleCount = 10; - const ContentInspectConfig config = ContentInspectConfig( + bool enabled = true; + String configExtraInfo = "hello"; + String configServerConfig = "hello"; + List configModules = []; + int configModuleCount = 5; + ContentInspectConfig config = ContentInspectConfig( extraInfo: configExtraInfo, serverConfig: configServerConfig, modules: configModules, @@ -9951,8 +9919,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int trackId = 10; - const int volume = 10; + int trackId = 5; + int volume = 5; await rtcEngine.adjustCustomAudioPublishVolume( trackId: trackId, volume: volume, @@ -9988,8 +9956,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int trackId = 10; - const int volume = 10; + int trackId = 5; + int volume = 5; await rtcEngine.adjustCustomAudioPlayoutVolume( trackId: trackId, volume: volume, @@ -10025,7 +9993,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const CloudProxyType proxyType = CloudProxyType.noneProxy; + CloudProxyType proxyType = CloudProxyType.noneProxy; await rtcEngine.setCloudProxy( proxyType, ); @@ -10059,11 +10027,11 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int optionsAudioProcessingChannels = 10; - const AdvancedAudioOptions options = AdvancedAudioOptions( + int optionsAudioProcessingChannels = 5; + AdvancedAudioOptions options = AdvancedAudioOptions( audioProcessingChannels: optionsAudioProcessingChannels, ); - const int sourceType = 10; + int sourceType = 5; await rtcEngine.setAdvancedAudioOptions( options: options, sourceType: sourceType, @@ -10099,8 +10067,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String channelId = "hello"; - const int uid = 10; + String channelId = "hello"; + int uid = 5; await rtcEngine.setAVSyncSource( channelId: channelId, uid: uid, @@ -10135,12 +10103,12 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enable = true; - const VideoMirrorModeType optionsMirrorMode = + bool enable = true; + VideoMirrorModeType optionsMirrorMode = VideoMirrorModeType.videoMirrorModeAuto; - const String optionsImageUrl = "hello"; - const int optionsFps = 10; - const ImageTrackOptions options = ImageTrackOptions( + String optionsImageUrl = "hello"; + int optionsFps = 5; + ImageTrackOptions options = ImageTrackOptions( imageUrl: optionsImageUrl, fps: optionsFps, mirrorMode: optionsMirrorMode, @@ -10212,7 +10180,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; + bool enabled = true; await rtcEngine.enableWirelessAccelerate( enabled, ); @@ -10278,7 +10246,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const String parameters = "hello"; + String parameters = "hello"; await rtcEngine.setParameters( parameters, ); @@ -10407,7 +10375,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const FeatureType type = FeatureType.videoVirtualBackground; + FeatureType type = FeatureType.videoVirtualBackground; await rtcEngine.isFeatureAvailableOnDevice( type, ); @@ -10442,8 +10410,8 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - Uint8List metadata = Uint8List.fromList([1, 2, 3, 4, 5]); - const int length = 10; + Uint8List metadata = Uint8List.fromList([1, 1, 1, 1, 1]); + int length = 5; await rtcEngine.sendAudioMetadata( metadata: metadata, length: length, @@ -10478,34 +10446,33 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VideoSourceType sourceType = - VideoSourceType.videoSourceCameraPrimary; - const int screenRectX = 10; - const int screenRectY = 10; - const int screenRectWidth = 10; - const int screenRectHeight = 10; - const Rectangle configScreenRect = Rectangle( + VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary; + int screenRectX = 5; + int screenRectY = 5; + int screenRectWidth = 5; + int screenRectHeight = 5; + Rectangle configScreenRect = Rectangle( x: screenRectX, y: screenRectY, width: screenRectWidth, height: screenRectHeight, ); - const int dimensionsWidth = 10; - const int dimensionsHeight = 10; - const VideoDimensions paramsDimensions = VideoDimensions( + int dimensionsWidth = 5; + int dimensionsHeight = 5; + VideoDimensions paramsDimensions = VideoDimensions( width: dimensionsWidth, height: dimensionsHeight, ); - const int paramsFrameRate = 10; - const int paramsBitrate = 10; - const bool paramsCaptureMouseCursor = true; - const bool paramsWindowFocus = true; - const List paramsExcludeWindowList = []; - const int paramsExcludeWindowCount = 10; - const int paramsHighLightWidth = 10; - const int paramsHighLightColor = 10; - const bool paramsEnableHighLight = true; - const ScreenCaptureParameters configParams = ScreenCaptureParameters( + int paramsFrameRate = 5; + int paramsBitrate = 5; + bool paramsCaptureMouseCursor = true; + bool paramsWindowFocus = true; + List paramsExcludeWindowList = List.filled(5, 5); + int paramsExcludeWindowCount = 5; + int paramsHighLightWidth = 5; + int paramsHighLightColor = 5; + bool paramsEnableHighLight = true; + ScreenCaptureParameters configParams = ScreenCaptureParameters( dimensions: paramsDimensions, frameRate: paramsFrameRate, bitrate: paramsBitrate, @@ -10517,20 +10484,20 @@ void rtcEngineSmokeTestCases() { highLightColor: paramsHighLightColor, enableHighLight: paramsEnableHighLight, ); - const int regionRectX = 10; - const int regionRectY = 10; - const int regionRectWidth = 10; - const int regionRectHeight = 10; - const Rectangle configRegionRect = Rectangle( + int regionRectX = 5; + int regionRectY = 5; + int regionRectWidth = 5; + int regionRectHeight = 5; + Rectangle configRegionRect = Rectangle( x: regionRectX, y: regionRectY, width: regionRectWidth, height: regionRectHeight, ); - const bool configIsCaptureWindow = true; - const int configDisplayId = 10; - const int configWindowId = 10; - const ScreenCaptureConfiguration config = ScreenCaptureConfiguration( + bool configIsCaptureWindow = true; + int configDisplayId = 5; + int configWindowId = 5; + ScreenCaptureConfiguration config = ScreenCaptureConfiguration( isCaptureWindow: configIsCaptureWindow, displayId: configDisplayId, screenRect: configScreenRect, @@ -10573,8 +10540,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const VideoSourceType sourceType = - VideoSourceType.videoSourceCameraPrimary; + VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary; await rtcEngine.stopScreenCaptureBySourceType( sourceType, ); @@ -10609,7 +10575,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool sync = true; + bool sync = true; await rtcEngine.release( sync: sync, ); @@ -10865,18 +10831,17 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int metadataUid = 10; - const int metadataSize = 10; - Uint8List metadataBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int metadataTimeStampMs = 10; - final Metadata metadata = Metadata( + int metadataUid = 5; + int metadataSize = 5; + Uint8List metadataBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + int metadataTimeStampMs = 5; + Metadata metadata = Metadata( uid: metadataUid, size: metadataSize, buffer: metadataBuffer, timeStampMs: metadataTimeStampMs, ); - const VideoSourceType sourceType = - VideoSourceType.videoSourceCameraPrimary; + VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary; await rtcEngine.sendMetaData( metadata: metadata, sourceType: sourceType, @@ -10911,7 +10876,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - const int size = 10; + int size = 5; await rtcEngine.setMaxMetadataSize( size, ); @@ -10945,7 +10910,7 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { - final AudioEncodedFrameObserver observer = AudioEncodedFrameObserver( + AudioEncodedFrameObserver observer = AudioEncodedFrameObserver( onRecordAudioEncodedFrame: (Uint8List frameBuffer, int length, EncodedAudioFrameInfo audioEncodedFrameInfo) {}, onPlaybackAudioEncodedFrame: (Uint8List frameBuffer, int length, diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengine_metadataobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengine_metadataobserver_testcases.generated.dart index 57d65ec41..c8fef3989 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengine_metadataobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengine_metadataobserver_testcases.generated.dart @@ -31,7 +31,7 @@ void generatedTestCases(ValueGetter irisTester) { }, ); - const MetadataType type = MetadataType.unknownMetadata; + MetadataType type = MetadataType.unknownMetadata; rtcEngine.registerMediaMetadataObserver( observer: theMetadataObserver, @@ -42,11 +42,11 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const int metadataUid = 10; - const int metadataSize = 10; - Uint8List metadataBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int metadataTimeStampMs = 10; - final Metadata metadata = Metadata( + int metadataUid = 5; + int metadataSize = 5; + Uint8List metadataBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); + int metadataTimeStampMs = 5; + Metadata metadata = Metadata( uid: metadataUid, size: metadataSize, buffer: metadataBuffer, @@ -76,7 +76,7 @@ void generatedTestCases(ValueGetter irisTester) { expect(eventCalled, isTrue); { - const MetadataType type = MetadataType.unknownMetadata; + MetadataType type = MetadataType.unknownMetadata; rtcEngine.unregisterMediaMetadataObserver( observer: theMetadataObserver, diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengine_rtcengineeventhandler_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengine_rtcengineeventhandler_testcases.generated.dart index 96f659986..46d313173 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengine_rtcengineeventhandler_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengine_rtcengineeventhandler_testcases.generated.dart @@ -39,13 +39,13 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int elapsed = 10; + int elapsed = 5; final eventJson = { 'connection': connection.toJson(), @@ -108,13 +108,13 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int elapsed = 10; + int elapsed = 5; final eventJson = { 'connection': connection.toJson(), @@ -179,11 +179,11 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String channel = "hello"; - const int uid = 10; - const ProxyType proxyType = ProxyType.noneProxyType; - const String localProxyIp = "hello"; - const int elapsed = 10; + String channel = "hello"; + int uid = 5; + ProxyType proxyType = ProxyType.noneProxyType; + String localProxyIp = "hello"; + int elapsed = 5; final eventJson = { 'channel': channel, @@ -249,8 +249,8 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const ErrorCodeType err = ErrorCodeType.errOk; - const String msg = "hello"; + ErrorCodeType err = ErrorCodeType.errOk; + String msg = "hello"; final eventJson = { 'err': err.value(), @@ -313,16 +313,16 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int remoteUid = 10; - const QualityType quality = QualityType.qualityUnknown; - const int delay = 10; - const int lost = 10; + int remoteUid = 5; + QualityType quality = QualityType.qualityUnknown; + int delay = 5; + int lost = 5; final eventJson = { 'connection': connection.toJson(), @@ -388,28 +388,28 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const LastmileProbeResultState resultState = + LastmileProbeResultState resultState = LastmileProbeResultState.lastmileProbeResultComplete; - const int uplinkReportPacketLossRate = 10; - const int uplinkReportJitter = 10; - const int uplinkReportAvailableBandwidth = 10; - const LastmileProbeOneWayResult resultUplinkReport = + int uplinkReportPacketLossRate = 5; + int uplinkReportJitter = 5; + int uplinkReportAvailableBandwidth = 5; + LastmileProbeOneWayResult resultUplinkReport = LastmileProbeOneWayResult( packetLossRate: uplinkReportPacketLossRate, jitter: uplinkReportJitter, availableBandwidth: uplinkReportAvailableBandwidth, ); - const int downlinkReportPacketLossRate = 10; - const int downlinkReportJitter = 10; - const int downlinkReportAvailableBandwidth = 10; - const LastmileProbeOneWayResult resultDownlinkReport = + int downlinkReportPacketLossRate = 5; + int downlinkReportJitter = 5; + int downlinkReportAvailableBandwidth = 5; + LastmileProbeOneWayResult resultDownlinkReport = LastmileProbeOneWayResult( packetLossRate: downlinkReportPacketLossRate, jitter: downlinkReportJitter, availableBandwidth: downlinkReportAvailableBandwidth, ); - const int resultRtt = 10; - const LastmileProbeResult result = LastmileProbeResult( + int resultRtt = 5; + LastmileProbeResult result = LastmileProbeResult( state: resultState, uplinkReport: resultUplinkReport, downlinkReport: resultDownlinkReport, @@ -478,15 +478,29 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const List speakers = []; - const int speakerNumber = 10; - const int totalVolume = 10; + final List speakers = () { + int speakersItemUid = 5; + int speakersItemVolume = 5; + int speakersItemVad = 5; + double speakersItemVoicePitch = 5.0; + AudioVolumeInfo speakersItem = AudioVolumeInfo( + uid: speakersItemUid, + volume: speakersItemVolume, + vad: speakersItemVad, + voicePitch: speakersItemVoicePitch, + ); + + return List.filled(5, speakersItem); + }(); + + int speakerNumber = 5; + int totalVolume = 5; final eventJson = { 'connection': connection.toJson(), @@ -552,46 +566,46 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int statsDuration = 10; - const int statsTxBytes = 10; - const int statsRxBytes = 10; - const int statsTxAudioBytes = 10; - const int statsTxVideoBytes = 10; - const int statsRxAudioBytes = 10; - const int statsRxVideoBytes = 10; - const int statsTxKBitRate = 10; - const int statsRxKBitRate = 10; - const int statsRxAudioKBitRate = 10; - const int statsTxAudioKBitRate = 10; - const int statsRxVideoKBitRate = 10; - const int statsTxVideoKBitRate = 10; - const int statsLastmileDelay = 10; - const int statsUserCount = 10; - const double statsCpuAppUsage = 10.0; - const double statsCpuTotalUsage = 10.0; - const int statsGatewayRtt = 10; - const double statsMemoryAppUsageRatio = 10.0; - const double statsMemoryTotalUsageRatio = 10.0; - const int statsMemoryAppUsageInKbytes = 10; - const int statsConnectTimeMs = 10; - const int statsFirstAudioPacketDuration = 10; - const int statsFirstVideoPacketDuration = 10; - const int statsFirstVideoKeyFramePacketDuration = 10; - const int statsPacketsBeforeFirstKeyFramePacket = 10; - const int statsFirstAudioPacketDurationAfterUnmute = 10; - const int statsFirstVideoPacketDurationAfterUnmute = 10; - const int statsFirstVideoKeyFramePacketDurationAfterUnmute = 10; - const int statsFirstVideoKeyFrameDecodedDurationAfterUnmute = 10; - const int statsFirstVideoKeyFrameRenderedDurationAfterUnmute = 10; - const int statsTxPacketLossRate = 10; - const int statsRxPacketLossRate = 10; - const RtcStats stats = RtcStats( + int statsDuration = 5; + int statsTxBytes = 5; + int statsRxBytes = 5; + int statsTxAudioBytes = 5; + int statsTxVideoBytes = 5; + int statsRxAudioBytes = 5; + int statsRxVideoBytes = 5; + int statsTxKBitRate = 5; + int statsRxKBitRate = 5; + int statsRxAudioKBitRate = 5; + int statsTxAudioKBitRate = 5; + int statsRxVideoKBitRate = 5; + int statsTxVideoKBitRate = 5; + int statsLastmileDelay = 5; + int statsUserCount = 5; + double statsCpuAppUsage = 5.0; + double statsCpuTotalUsage = 5.0; + int statsGatewayRtt = 5; + double statsMemoryAppUsageRatio = 5.0; + double statsMemoryTotalUsageRatio = 5.0; + int statsMemoryAppUsageInKbytes = 5; + int statsConnectTimeMs = 5; + int statsFirstAudioPacketDuration = 5; + int statsFirstVideoPacketDuration = 5; + int statsFirstVideoKeyFramePacketDuration = 5; + int statsPacketsBeforeFirstKeyFramePacket = 5; + int statsFirstAudioPacketDurationAfterUnmute = 5; + int statsFirstVideoPacketDurationAfterUnmute = 5; + int statsFirstVideoKeyFramePacketDurationAfterUnmute = 5; + int statsFirstVideoKeyFrameDecodedDurationAfterUnmute = 5; + int statsFirstVideoKeyFrameRenderedDurationAfterUnmute = 5; + int statsTxPacketLossRate = 5; + int statsRxPacketLossRate = 5; + RtcStats stats = RtcStats( duration: statsDuration, txBytes: statsTxBytes, rxBytes: statsRxBytes, @@ -695,46 +709,46 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int statsDuration = 10; - const int statsTxBytes = 10; - const int statsRxBytes = 10; - const int statsTxAudioBytes = 10; - const int statsTxVideoBytes = 10; - const int statsRxAudioBytes = 10; - const int statsRxVideoBytes = 10; - const int statsTxKBitRate = 10; - const int statsRxKBitRate = 10; - const int statsRxAudioKBitRate = 10; - const int statsTxAudioKBitRate = 10; - const int statsRxVideoKBitRate = 10; - const int statsTxVideoKBitRate = 10; - const int statsLastmileDelay = 10; - const int statsUserCount = 10; - const double statsCpuAppUsage = 10.0; - const double statsCpuTotalUsage = 10.0; - const int statsGatewayRtt = 10; - const double statsMemoryAppUsageRatio = 10.0; - const double statsMemoryTotalUsageRatio = 10.0; - const int statsMemoryAppUsageInKbytes = 10; - const int statsConnectTimeMs = 10; - const int statsFirstAudioPacketDuration = 10; - const int statsFirstVideoPacketDuration = 10; - const int statsFirstVideoKeyFramePacketDuration = 10; - const int statsPacketsBeforeFirstKeyFramePacket = 10; - const int statsFirstAudioPacketDurationAfterUnmute = 10; - const int statsFirstVideoPacketDurationAfterUnmute = 10; - const int statsFirstVideoKeyFramePacketDurationAfterUnmute = 10; - const int statsFirstVideoKeyFrameDecodedDurationAfterUnmute = 10; - const int statsFirstVideoKeyFrameRenderedDurationAfterUnmute = 10; - const int statsTxPacketLossRate = 10; - const int statsRxPacketLossRate = 10; - const RtcStats stats = RtcStats( + int statsDuration = 5; + int statsTxBytes = 5; + int statsRxBytes = 5; + int statsTxAudioBytes = 5; + int statsTxVideoBytes = 5; + int statsRxAudioBytes = 5; + int statsRxVideoBytes = 5; + int statsTxKBitRate = 5; + int statsRxKBitRate = 5; + int statsRxAudioKBitRate = 5; + int statsTxAudioKBitRate = 5; + int statsRxVideoKBitRate = 5; + int statsTxVideoKBitRate = 5; + int statsLastmileDelay = 5; + int statsUserCount = 5; + double statsCpuAppUsage = 5.0; + double statsCpuTotalUsage = 5.0; + int statsGatewayRtt = 5; + double statsMemoryAppUsageRatio = 5.0; + double statsMemoryTotalUsageRatio = 5.0; + int statsMemoryAppUsageInKbytes = 5; + int statsConnectTimeMs = 5; + int statsFirstAudioPacketDuration = 5; + int statsFirstVideoPacketDuration = 5; + int statsFirstVideoKeyFramePacketDuration = 5; + int statsPacketsBeforeFirstKeyFramePacket = 5; + int statsFirstAudioPacketDurationAfterUnmute = 5; + int statsFirstVideoPacketDurationAfterUnmute = 5; + int statsFirstVideoKeyFramePacketDurationAfterUnmute = 5; + int statsFirstVideoKeyFrameDecodedDurationAfterUnmute = 5; + int statsFirstVideoKeyFrameRenderedDurationAfterUnmute = 5; + int statsTxPacketLossRate = 5; + int statsRxPacketLossRate = 5; + RtcStats stats = RtcStats( duration: statsDuration, txBytes: statsTxBytes, rxBytes: statsRxBytes, @@ -839,9 +853,9 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String deviceId = "hello"; - const MediaDeviceType deviceType = MediaDeviceType.unknownAudioDevice; - const MediaDeviceStateType deviceState = + String deviceId = "hello"; + MediaDeviceType deviceType = MediaDeviceType.unknownAudioDevice; + MediaDeviceStateType deviceState = MediaDeviceStateType.mediaDeviceStateIdle; final eventJson = { @@ -907,7 +921,7 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const int position = 10; + int position = 5; final eventJson = { 'position': position, @@ -1029,7 +1043,7 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const int soundId = 10; + int soundId = 5; final eventJson = { 'soundId': soundId, @@ -1093,9 +1107,9 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String deviceId = "hello"; - const MediaDeviceType deviceType = MediaDeviceType.unknownAudioDevice; - const MediaDeviceStateType deviceState = + String deviceId = "hello"; + MediaDeviceType deviceType = MediaDeviceType.unknownAudioDevice; + MediaDeviceStateType deviceState = MediaDeviceStateType.mediaDeviceStateIdle; final eventJson = { @@ -1162,15 +1176,15 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int remoteUid = 10; - const QualityType txQuality = QualityType.qualityUnknown; - const QualityType rxQuality = QualityType.qualityUnknown; + int remoteUid = 5; + QualityType txQuality = QualityType.qualityUnknown; + QualityType rxQuality = QualityType.qualityUnknown; final eventJson = { 'connection': connection.toJson(), @@ -1235,9 +1249,9 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -1303,8 +1317,8 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const int infoVideoEncoderTargetBitrateBps = 10; - const UplinkNetworkInfo info = UplinkNetworkInfo( + int infoVideoEncoderTargetBitrateBps = 5; + UplinkNetworkInfo info = UplinkNetworkInfo( videoEncoderTargetBitrateBps: infoVideoEncoderTargetBitrateBps, ); @@ -1344,80 +1358,6 @@ void generatedTestCases(ValueGetter irisTester) { timeout: const Timeout(Duration(minutes: 2)), ); - testWidgets( - 'RtcEngineEventHandler.onDownlinkNetworkInfoUpdated', - (WidgetTester tester) async { - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: 'app_id', - areaCode: AreaCode.areaCodeGlob.value(), - )); - await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); - - final onDownlinkNetworkInfoUpdatedCompleter = Completer(); - final theRtcEngineEventHandler = RtcEngineEventHandler( - onDownlinkNetworkInfoUpdated: (DownlinkNetworkInfo info) { - onDownlinkNetworkInfoUpdatedCompleter.complete(true); - }, - ); - - rtcEngine.registerEventHandler( - theRtcEngineEventHandler, - ); - -// Delay 500 milliseconds to ensure the registerEventHandler call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - { - const int infoLastmileBufferDelayTimeMs = 10; - const int infoBandwidthEstimationBps = 10; - const int infoTotalDownscaleLevelCount = 10; - const List infoPeerDownlinkInfo = []; - const int infoTotalReceivedVideoCount = 10; - const DownlinkNetworkInfo info = DownlinkNetworkInfo( - lastmileBufferDelayTimeMs: infoLastmileBufferDelayTimeMs, - bandwidthEstimationBps: infoBandwidthEstimationBps, - totalDownscaleLevelCount: infoTotalDownscaleLevelCount, - peerDownlinkInfo: infoPeerDownlinkInfo, - totalReceivedVideoCount: infoTotalReceivedVideoCount, - ); - - final eventJson = { - 'info': info.toJson(), - }; - - final eventIds = eventIdsMapping[ - 'RtcEngineEventHandler_onDownlinkNetworkInfoUpdated'] ?? - []; - for (final event in eventIds) { - final ret = irisTester().fireEvent(event, params: eventJson); - // Delay 200 milliseconds to ensure the callback is called. - await Future.delayed(const Duration(milliseconds: 200)); - // TODO(littlegnal): Most of callbacks on web are not implemented, we're temporarily skip these callbacks at this time. - if (kIsWeb && ret) { - if (!onDownlinkNetworkInfoUpdatedCompleter.isCompleted) { - onDownlinkNetworkInfoUpdatedCompleter.complete(true); - } - } - } - } - - final eventCalled = await onDownlinkNetworkInfoUpdatedCompleter.future; - expect(eventCalled, isTrue); - - { - rtcEngine.unregisterEventHandler( - theRtcEngineEventHandler, - ); - } -// Delay 500 milliseconds to ensure the unregisterEventHandler call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - await rtcEngine.release(); - }, - timeout: const Timeout(Duration(minutes: 2)), - ); - testWidgets( 'RtcEngineEventHandler.onLastmileQuality', (WidgetTester tester) async { @@ -1443,7 +1383,7 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const QualityType quality = QualityType.qualityUnknown; + QualityType quality = QualityType.qualityUnknown; final eventJson = { 'quality': quality.value(), @@ -1506,10 +1446,10 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const VideoSourceType source = VideoSourceType.videoSourceCameraPrimary; - const int width = 10; - const int height = 10; - const int elapsed = 10; + VideoSourceType source = VideoSourceType.videoSourceCameraPrimary; + int width = 5; + int height = 5; + int elapsed = 5; final eventJson = { 'source': source.value(), @@ -1575,8 +1515,8 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const VideoSourceType source = VideoSourceType.videoSourceCameraPrimary; - const int elapsed = 10; + VideoSourceType source = VideoSourceType.videoSourceCameraPrimary; + int elapsed = 5; final eventJson = { 'source': source.value(), @@ -1641,16 +1581,16 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int remoteUid = 10; - const int width = 10; - const int height = 10; - const int elapsed = 10; + int remoteUid = 5; + int width = 5; + int height = 5; + int elapsed = 5; final eventJson = { 'connection': connection.toJson(), @@ -1722,18 +1662,17 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const VideoSourceType sourceType = - VideoSourceType.videoSourceCameraPrimary; - const int uid = 10; - const int width = 10; - const int height = 10; - const int rotation = 10; + VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary; + int uid = 5; + int width = 5; + int height = 5; + int rotation = 5; final eventJson = { 'connection': connection.toJson(), @@ -1801,10 +1740,10 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const VideoSourceType source = VideoSourceType.videoSourceCameraPrimary; - const LocalVideoStreamState state = + VideoSourceType source = VideoSourceType.videoSourceCameraPrimary; + LocalVideoStreamState state = LocalVideoStreamState.localVideoStreamStateStopped; - const LocalVideoStreamReason reason = + LocalVideoStreamReason reason = LocalVideoStreamReason.localVideoStreamReasonOk; final eventJson = { @@ -1874,17 +1813,17 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int remoteUid = 10; - const RemoteVideoState state = RemoteVideoState.remoteVideoStateStopped; - const RemoteVideoStateReason reason = + int remoteUid = 5; + RemoteVideoState state = RemoteVideoState.remoteVideoStateStopped; + RemoteVideoStateReason reason = RemoteVideoStateReason.remoteVideoStateReasonInternal; - const int elapsed = 10; + int elapsed = 5; final eventJson = { 'connection': connection.toJson(), @@ -1952,16 +1891,16 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int remoteUid = 10; - const int width = 10; - const int height = 10; - const int elapsed = 10; + int remoteUid = 5; + int width = 5; + int height = 5; + int elapsed = 5; final eventJson = { 'connection': connection.toJson(), @@ -2028,14 +1967,14 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int remoteUid = 10; - const int elapsed = 10; + int remoteUid = 5; + int elapsed = 5; final eventJson = { 'connection': connection.toJson(), @@ -2100,15 +2039,14 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int remoteUid = 10; - const UserOfflineReasonType reason = - UserOfflineReasonType.userOfflineQuit; + int remoteUid = 5; + UserOfflineReasonType reason = UserOfflineReasonType.userOfflineQuit; final eventJson = { 'connection': connection.toJson(), @@ -2172,14 +2110,14 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int remoteUid = 10; - const bool muted = true; + int remoteUid = 5; + bool muted = true; final eventJson = { 'connection': connection.toJson(), @@ -2243,14 +2181,14 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int remoteUid = 10; - const bool muted = true; + int remoteUid = 5; + bool muted = true; final eventJson = { 'connection': connection.toJson(), @@ -2315,14 +2253,14 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int remoteUid = 10; - const bool enabled = true; + int remoteUid = 5; + bool enabled = true; final eventJson = { 'connection': connection.toJson(), @@ -2387,14 +2325,14 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int remoteUid = 10; - const int state = 10; + int remoteUid = 5; + int state = 5; final eventJson = { 'connection': connection.toJson(), @@ -2459,14 +2397,14 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int remoteUid = 10; - const bool enabled = true; + int remoteUid = 5; + bool enabled = true; final eventJson = { 'connection': connection.toJson(), @@ -2531,32 +2469,32 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int statsUid = 10; - const int statsQuality = 10; - const int statsNetworkTransportDelay = 10; - const int statsJitterBufferDelay = 10; - const int statsAudioLossRate = 10; - const int statsNumChannels = 10; - const int statsReceivedSampleRate = 10; - const int statsReceivedBitrate = 10; - const int statsTotalFrozenTime = 10; - const int statsFrozenRate = 10; - const int statsMosValue = 10; - const int statsFrozenRateByCustomPlcCount = 10; - const int statsPlcCount = 10; - const int statsTotalActiveTime = 10; - const int statsPublishDuration = 10; - const int statsQoeQuality = 10; - const int statsQualityChangedReason = 10; - const int statsRxAudioBytes = 10; - const int statsE2eDelay = 10; - const RemoteAudioStats stats = RemoteAudioStats( + int statsUid = 5; + int statsQuality = 5; + int statsNetworkTransportDelay = 5; + int statsJitterBufferDelay = 5; + int statsAudioLossRate = 5; + int statsNumChannels = 5; + int statsReceivedSampleRate = 5; + int statsReceivedBitrate = 5; + int statsTotalFrozenTime = 5; + int statsFrozenRate = 5; + int statsMosValue = 5; + int statsFrozenRateByCustomPlcCount = 5; + int statsPlcCount = 5; + int statsTotalActiveTime = 5; + int statsPublishDuration = 5; + int statsQoeQuality = 5; + int statsQualityChangedReason = 5; + int statsRxAudioBytes = 5; + int statsE2eDelay = 5; + RemoteAudioStats stats = RemoteAudioStats( uid: statsUid, quality: statsQuality, networkTransportDelay: statsNetworkTransportDelay, @@ -2639,22 +2577,22 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int statsNumChannels = 10; - const int statsSentSampleRate = 10; - const int statsSentBitrate = 10; - const int statsInternalCodec = 10; - const int statsTxPacketLossRate = 10; - const int statsAudioDeviceDelay = 10; - const int statsAudioPlayoutDelay = 10; - const int statsEarMonitorDelay = 10; - const int statsAecEstimatedDelay = 10; - const LocalAudioStats stats = LocalAudioStats( + int statsNumChannels = 5; + int statsSentSampleRate = 5; + int statsSentBitrate = 5; + int statsInternalCodec = 5; + int statsTxPacketLossRate = 5; + int statsAudioDeviceDelay = 5; + int statsAudioPlayoutDelay = 5; + int statsEarMonitorDelay = 5; + int statsAecEstimatedDelay = 5; + LocalAudioStats stats = LocalAudioStats( numChannels: statsNumChannels, sentSampleRate: statsSentSampleRate, sentBitrate: statsSentBitrate, @@ -2727,33 +2665,33 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const VideoSourceType source = VideoSourceType.videoSourceCameraPrimary; - const QualityAdaptIndication statsQualityAdaptIndication = + VideoSourceType source = VideoSourceType.videoSourceCameraPrimary; + QualityAdaptIndication statsQualityAdaptIndication = QualityAdaptIndication.adaptNone; - const VideoCodecType statsCodecType = VideoCodecType.videoCodecNone; - const CaptureBrightnessLevelType statsCaptureBrightnessLevel = + VideoCodecType statsCodecType = VideoCodecType.videoCodecNone; + CaptureBrightnessLevelType statsCaptureBrightnessLevel = CaptureBrightnessLevelType.captureBrightnessLevelInvalid; - const int statsUid = 10; - const int statsSentBitrate = 10; - const int statsSentFrameRate = 10; - const int statsCaptureFrameRate = 10; - const int statsCaptureFrameWidth = 10; - const int statsCaptureFrameHeight = 10; - const int statsRegulatedCaptureFrameRate = 10; - const int statsRegulatedCaptureFrameWidth = 10; - const int statsRegulatedCaptureFrameHeight = 10; - const int statsEncoderOutputFrameRate = 10; - const int statsEncodedFrameWidth = 10; - const int statsEncodedFrameHeight = 10; - const int statsRendererOutputFrameRate = 10; - const int statsTargetBitrate = 10; - const int statsTargetFrameRate = 10; - const int statsEncodedBitrate = 10; - const int statsEncodedFrameCount = 10; - const int statsTxPacketLossRate = 10; - const bool statsDualStreamEnabled = true; - const int statsHwEncoderAccelerating = 10; - const LocalVideoStats stats = LocalVideoStats( + int statsUid = 5; + int statsSentBitrate = 5; + int statsSentFrameRate = 5; + int statsCaptureFrameRate = 5; + int statsCaptureFrameWidth = 5; + int statsCaptureFrameHeight = 5; + int statsRegulatedCaptureFrameRate = 5; + int statsRegulatedCaptureFrameWidth = 5; + int statsRegulatedCaptureFrameHeight = 5; + int statsEncoderOutputFrameRate = 5; + int statsEncodedFrameWidth = 5; + int statsEncodedFrameHeight = 5; + int statsRendererOutputFrameRate = 5; + int statsTargetBitrate = 5; + int statsTargetFrameRate = 5; + int statsEncodedBitrate = 5; + int statsEncodedFrameCount = 5; + int statsTxPacketLossRate = 5; + bool statsDualStreamEnabled = true; + int statsHwEncoderAccelerating = 5; + LocalVideoStats stats = LocalVideoStats( uid: statsUid, sentBitrate: statsSentBitrate, sentFrameRate: statsSentFrameRate, @@ -2840,32 +2778,31 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const VideoStreamType statsRxStreamType = - VideoStreamType.videoStreamHigh; - const int statsUid = 10; - const int statsDelay = 10; - const int statsE2eDelay = 10; - const int statsWidth = 10; - const int statsHeight = 10; - const int statsReceivedBitrate = 10; - const int statsDecoderOutputFrameRate = 10; - const int statsRendererOutputFrameRate = 10; - const int statsFrameLossRate = 10; - const int statsPacketLossRate = 10; - const int statsTotalFrozenTime = 10; - const int statsFrozenRate = 10; - const int statsAvSyncTimeMs = 10; - const int statsTotalActiveTime = 10; - const int statsPublishDuration = 10; - const int statsMosValue = 10; - const int statsRxVideoBytes = 10; - const RemoteVideoStats stats = RemoteVideoStats( + VideoStreamType statsRxStreamType = VideoStreamType.videoStreamHigh; + int statsUid = 5; + int statsDelay = 5; + int statsE2eDelay = 5; + int statsWidth = 5; + int statsHeight = 5; + int statsReceivedBitrate = 5; + int statsDecoderOutputFrameRate = 5; + int statsRendererOutputFrameRate = 5; + int statsFrameLossRate = 5; + int statsPacketLossRate = 5; + int statsTotalFrozenTime = 5; + int statsFrozenRate = 5; + int statsAvSyncTimeMs = 5; + int statsTotalActiveTime = 5; + int statsPublishDuration = 5; + int statsMosValue = 5; + int statsRxVideoBytes = 5; + RemoteVideoStats stats = RemoteVideoStats( uid: statsUid, delay: statsDelay, e2eDelay: statsE2eDelay, @@ -3005,10 +2942,10 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const int x = 10; - const int y = 10; - const int width = 10; - const int height = 10; + int x = 5; + int y = 5; + int width = 5; + int height = 5; final eventJson = { 'x': x, @@ -3074,10 +3011,10 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const int x = 10; - const int y = 10; - const int width = 10; - const int height = 10; + int x = 5; + int y = 5; + int width = 5; + int height = 5; final eventJson = { 'x': x, @@ -3202,9 +3139,9 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const AudioMixingStateType state = + AudioMixingStateType state = AudioMixingStateType.audioMixingStatePlaying; - const AudioMixingReasonType reason = + AudioMixingReasonType reason = AudioMixingReasonType.audioMixingReasonCanNotOpen; final eventJson = { @@ -3270,10 +3207,9 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const RhythmPlayerStateType state = + RhythmPlayerStateType state = RhythmPlayerStateType.rhythmPlayerStateIdle; - const RhythmPlayerReason reason = - RhythmPlayerReason.rhythmPlayerReasonOk; + RhythmPlayerReason reason = RhythmPlayerReason.rhythmPlayerReasonOk; final eventJson = { 'state': state.value(), @@ -3337,9 +3273,9 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -3404,9 +3340,9 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -3472,9 +3408,9 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -3514,84 +3450,6 @@ void generatedTestCases(ValueGetter irisTester) { timeout: const Timeout(Duration(minutes: 2)), ); - testWidgets( - 'RtcEngineEventHandler.onStreamMessage', - (WidgetTester tester) async { - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: 'app_id', - areaCode: AreaCode.areaCodeGlob.value(), - )); - await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); - - final onStreamMessageCompleter = Completer(); - final theRtcEngineEventHandler = RtcEngineEventHandler( - onStreamMessage: (RtcConnection connection, int remoteUid, int streamId, - Uint8List data, int length, int sentTs) { - onStreamMessageCompleter.complete(true); - }, - ); - - rtcEngine.registerEventHandler( - theRtcEngineEventHandler, - ); - -// Delay 500 milliseconds to ensure the registerEventHandler call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( - channelId: connectionChannelId, - localUid: connectionLocalUid, - ); - const int remoteUid = 10; - const int streamId = 10; - Uint8List data = Uint8List.fromList([1, 2, 3, 4, 5]); - const int length = 10; - const int sentTs = 10; - - final eventJson = { - 'connection': connection.toJson(), - 'remoteUid': remoteUid, - 'streamId': streamId, - 'data': data.toList(), - 'length': length, - 'sentTs': sentTs, - }; - - final eventIds = - eventIdsMapping['RtcEngineEventHandler_onStreamMessage'] ?? []; - for (final event in eventIds) { - final ret = irisTester().fireEvent(event, params: eventJson); - // Delay 200 milliseconds to ensure the callback is called. - await Future.delayed(const Duration(milliseconds: 200)); - // TODO(littlegnal): Most of callbacks on web are not implemented, we're temporarily skip these callbacks at this time. - if (kIsWeb && ret) { - if (!onStreamMessageCompleter.isCompleted) { - onStreamMessageCompleter.complete(true); - } - } - } - } - - final eventCalled = await onStreamMessageCompleter.future; - expect(eventCalled, isTrue); - - { - rtcEngine.unregisterEventHandler( - theRtcEngineEventHandler, - ); - } -// Delay 500 milliseconds to ensure the unregisterEventHandler call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - await rtcEngine.release(); - }, - timeout: const Timeout(Duration(minutes: 2)), - ); - testWidgets( 'RtcEngineEventHandler.onStreamMessageError', (WidgetTester tester) async { @@ -3618,17 +3476,17 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int remoteUid = 10; - const int streamId = 10; - const ErrorCodeType code = ErrorCodeType.errOk; - const int missed = 10; - const int cached = 10; + int remoteUid = 5; + int streamId = 5; + ErrorCodeType code = ErrorCodeType.errOk; + int missed = 5; + int cached = 5; final eventJson = { 'connection': connection.toJson(), @@ -3695,9 +3553,9 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -3762,13 +3620,13 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const String token = "hello"; + String token = "hello"; final eventJson = { 'connection': connection.toJson(), @@ -3833,13 +3691,13 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const LicenseErrorType reason = LicenseErrorType.licenseErrInvalid; + LicenseErrorType reason = LicenseErrorType.licenseErrInvalid; final eventJson = { 'connection': connection.toJson(), @@ -3904,13 +3762,13 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int elapsed = 10; + int elapsed = 5; final eventJson = { 'connection': connection.toJson(), @@ -3975,14 +3833,14 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int uid = 10; - const int elapsed = 10; + int uid = 5; + int elapsed = 5; final eventJson = { 'connection': connection.toJson(), @@ -4048,14 +3906,14 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int userId = 10; - const int elapsed = 10; + int userId = 5; + int elapsed = 5; final eventJson = { 'connection': connection.toJson(), @@ -4121,15 +3979,15 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const LocalAudioStreamState state = + LocalAudioStreamState state = LocalAudioStreamState.localAudioStreamStateStopped; - const LocalAudioStreamReason reason = + LocalAudioStreamReason reason = LocalAudioStreamReason.localAudioStreamReasonOk; final eventJson = { @@ -4199,17 +4057,17 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int remoteUid = 10; - const RemoteAudioState state = RemoteAudioState.remoteAudioStateStopped; - const RemoteAudioStateReason reason = + int remoteUid = 5; + RemoteAudioState state = RemoteAudioState.remoteAudioStateStopped; + RemoteAudioStateReason reason = RemoteAudioStateReason.remoteAudioReasonInternal; - const int elapsed = 10; + int elapsed = 5; final eventJson = { 'connection': connection.toJson(), @@ -4276,13 +4134,13 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int uid = 10; + int uid = 5; final eventJson = { 'connection': connection.toJson(), @@ -4345,7 +4203,7 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const ContentInspectResult result = + ContentInspectResult result = ContentInspectResult.contentInspectNeutral; final eventJson = { @@ -4410,17 +4268,17 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int uid = 10; - const String filePath = "hello"; - const int width = 10; - const int height = 10; - const int errCode = 10; + int uid = 5; + String filePath = "hello"; + int width = 5; + int height = 5; + int errCode = 5; final eventJson = { 'connection': connection.toJson(), @@ -4488,17 +4346,17 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const ClientRoleType oldRole = ClientRoleType.clientRoleBroadcaster; - const ClientRoleType newRole = ClientRoleType.clientRoleBroadcaster; - const AudienceLatencyLevelType newRoleOptionsAudienceLatencyLevel = + ClientRoleType oldRole = ClientRoleType.clientRoleBroadcaster; + ClientRoleType newRole = ClientRoleType.clientRoleBroadcaster; + AudienceLatencyLevelType newRoleOptionsAudienceLatencyLevel = AudienceLatencyLevelType.audienceLatencyLevelLowLatency; - const ClientRoleOptions newRoleOptions = ClientRoleOptions( + ClientRoleOptions newRoleOptions = ClientRoleOptions( audienceLatencyLevel: newRoleOptionsAudienceLatencyLevel, ); @@ -4566,15 +4424,15 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const ClientRoleChangeFailedReason reason = ClientRoleChangeFailedReason + ClientRoleChangeFailedReason reason = ClientRoleChangeFailedReason .clientRoleChangeFailedTooManyBroadcasters; - const ClientRoleType currentRole = ClientRoleType.clientRoleBroadcaster; + ClientRoleType currentRole = ClientRoleType.clientRoleBroadcaster; final eventJson = { 'connection': connection.toJson(), @@ -4640,9 +4498,9 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const MediaDeviceType deviceType = MediaDeviceType.unknownAudioDevice; - const int volume = 10; - const bool muted = true; + MediaDeviceType deviceType = MediaDeviceType.unknownAudioDevice; + int volume = 5; + bool muted = true; final eventJson = { 'deviceType': deviceType.value(), @@ -4708,10 +4566,10 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String url = "hello"; - const RtmpStreamPublishState state = + String url = "hello"; + RtmpStreamPublishState state = RtmpStreamPublishState.rtmpStreamPublishStateIdle; - const RtmpStreamPublishReason reason = + RtmpStreamPublishReason reason = RtmpStreamPublishReason.rtmpStreamPublishReasonOk; final eventJson = { @@ -4777,8 +4635,8 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String url = "hello"; - const RtmpStreamingEvent eventCode = + String url = "hello"; + RtmpStreamingEvent eventCode = RtmpStreamingEvent.rtmpStreamingEventFailedLoadImage; final eventJson = { @@ -4900,7 +4758,7 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const int routing = 10; + int routing = 5; final eventJson = { 'routing': routing, @@ -4964,9 +4822,8 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const ChannelMediaRelayState state = - ChannelMediaRelayState.relayStateIdle; - const ChannelMediaRelayError code = ChannelMediaRelayError.relayOk; + ChannelMediaRelayState state = ChannelMediaRelayState.relayStateIdle; + ChannelMediaRelayError code = ChannelMediaRelayError.relayOk; final eventJson = { 'state': state.value(), @@ -5030,7 +4887,7 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const bool isFallbackOrRecover = true; + bool isFallbackOrRecover = true; final eventJson = { 'isFallbackOrRecover': isFallbackOrRecover, @@ -5095,8 +4952,8 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const int uid = 10; - const bool isFallbackOrRecover = true; + int uid = 5; + bool isFallbackOrRecover = true; final eventJson = { 'uid': uid, @@ -5162,16 +5019,16 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int remoteUid = 10; - const int delay = 10; - const int lost = 10; - const int rxKBitRate = 10; + int remoteUid = 5; + int delay = 5; + int lost = 5; + int rxKBitRate = 5; final eventJson = { 'connection': connection.toJson(), @@ -5239,16 +5096,16 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int remoteUid = 10; - const int delay = 10; - const int lost = 10; - const int rxKBitRate = 10; + int remoteUid = 5; + int delay = 5; + int lost = 5; + int rxKBitRate = 5; final eventJson = { 'connection': connection.toJson(), @@ -5316,15 +5173,15 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const ConnectionStateType state = + ConnectionStateType state = ConnectionStateType.connectionStateDisconnected; - const ConnectionChangedReasonType reason = + ConnectionChangedReasonType reason = ConnectionChangedReasonType.connectionChangedConnecting; final eventJson = { @@ -5391,17 +5248,17 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const WlaccMessageReason reason = + WlaccMessageReason reason = WlaccMessageReason.wlaccMessageReasonWeakSignal; - const WlaccSuggestAction action = + WlaccSuggestAction action = WlaccSuggestAction.wlaccSuggestActionCloseToWifi; - const String wlAccMsg = "hello"; + String wlAccMsg = "hello"; final eventJson = { 'connection': connection.toJson(), @@ -5467,24 +5324,24 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int currentStatsE2eDelayPercent = 10; - const int currentStatsFrozenRatioPercent = 10; - const int currentStatsLossRatePercent = 10; - const WlAccStats currentStats = WlAccStats( + int currentStatsE2eDelayPercent = 5; + int currentStatsFrozenRatioPercent = 5; + int currentStatsLossRatePercent = 5; + WlAccStats currentStats = WlAccStats( e2eDelayPercent: currentStatsE2eDelayPercent, frozenRatioPercent: currentStatsFrozenRatioPercent, lossRatePercent: currentStatsLossRatePercent, ); - const int averageStatsE2eDelayPercent = 10; - const int averageStatsFrozenRatioPercent = 10; - const int averageStatsLossRatePercent = 10; - const WlAccStats averageStats = WlAccStats( + int averageStatsE2eDelayPercent = 5; + int averageStatsFrozenRatioPercent = 5; + int averageStatsLossRatePercent = 5; + WlAccStats averageStats = WlAccStats( e2eDelayPercent: averageStatsE2eDelayPercent, frozenRatioPercent: averageStatsFrozenRatioPercent, lossRatePercent: averageStatsLossRatePercent, @@ -5552,13 +5409,13 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const NetworkType type = NetworkType.networkTypeUnknown; + NetworkType type = NetworkType.networkTypeUnknown; final eventJson = { 'connection': connection.toJson(), @@ -5622,13 +5479,13 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const EncryptionErrorType errorType = + EncryptionErrorType errorType = EncryptionErrorType.encryptionErrorInternalFailure; final eventJson = { @@ -5692,7 +5549,7 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const PermissionType permissionType = PermissionType.recordAudio; + PermissionType permissionType = PermissionType.recordAudio; final eventJson = { 'permissionType': permissionType.value(), @@ -5754,8 +5611,8 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const int uid = 10; - const String userAccount = "hello"; + int uid = 5; + String userAccount = "hello"; final eventJson = { 'uid': uid, @@ -5819,10 +5676,10 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const int uid = 10; - const int infoUid = 10; - const String infoUserAccount = "hello"; - const UserInfo info = UserInfo( + int uid = 5; + int infoUid = 5; + String infoUserAccount = "hello"; + UserInfo info = UserInfo( uid: infoUid, userAccount: infoUserAccount, ); @@ -5889,14 +5746,14 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int remoteUid = 10; - const String remoteUserAccount = "hello"; + int remoteUid = 5; + String remoteUserAccount = "hello"; final eventJson = { 'connection': connection.toJson(), @@ -5963,23 +5820,23 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int uid = 10; - const MediaTraceEvent currentEvent = + int uid = 5; + MediaTraceEvent currentEvent = MediaTraceEvent.mediaTraceEventVideoRendered; - const int tracingInfoElapsedTime = 10; - const int tracingInfoStart2JoinChannel = 10; - const int tracingInfoJoin2JoinSuccess = 10; - const int tracingInfoJoinSuccess2RemoteJoined = 10; - const int tracingInfoRemoteJoined2SetView = 10; - const int tracingInfoRemoteJoined2UnmuteVideo = 10; - const int tracingInfoRemoteJoined2PacketReceived = 10; - const VideoRenderingTracingInfo tracingInfo = VideoRenderingTracingInfo( + int tracingInfoElapsedTime = 5; + int tracingInfoStart2JoinChannel = 5; + int tracingInfoJoin2JoinSuccess = 5; + int tracingInfoJoinSuccess2RemoteJoined = 5; + int tracingInfoRemoteJoined2SetView = 5; + int tracingInfoRemoteJoined2UnmuteVideo = 5; + int tracingInfoRemoteJoined2PacketReceived = 5; + VideoRenderingTracingInfo tracingInfo = VideoRenderingTracingInfo( elapsedTime: tracingInfoElapsedTime, start2JoinChannel: tracingInfoStart2JoinChannel, join2JoinSuccess: tracingInfoJoin2JoinSuccess, @@ -6054,19 +5911,19 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const VideoSourceType streamSourceType = + VideoSourceType streamSourceType = VideoSourceType.videoSourceCameraPrimary; - const int streamRemoteUserUid = 10; - const String streamImageUrl = "hello"; - const int streamMediaPlayerId = 10; - const int streamX = 10; - const int streamY = 10; - const int streamWidth = 10; - const int streamHeight = 10; - const int streamZOrder = 10; - const double streamAlpha = 10.0; - const bool streamMirror = true; - const TranscodingVideoStream stream = TranscodingVideoStream( + int streamRemoteUserUid = 5; + String streamImageUrl = "hello"; + int streamMediaPlayerId = 5; + int streamX = 5; + int streamY = 5; + int streamWidth = 5; + int streamHeight = 5; + int streamZOrder = 5; + double streamAlpha = 5.0; + bool streamMirror = true; + TranscodingVideoStream stream = TranscodingVideoStream( sourceType: streamSourceType, remoteUserUid: streamRemoteUserUid, imageUrl: streamImageUrl, @@ -6079,7 +5936,7 @@ void generatedTestCases(ValueGetter irisTester) { alpha: streamAlpha, mirror: streamMirror, ); - const VideoTranscoderError error = + VideoTranscoderError error = VideoTranscoderError.vtErrVideoSourceNotReady; final eventJson = { @@ -6145,15 +6002,15 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const String requestId = "hello"; - const bool success = true; - const UploadErrorReason reason = UploadErrorReason.uploadSuccess; + String requestId = "hello"; + bool success = true; + UploadErrorReason reason = UploadErrorReason.uploadSuccess; final eventJson = { 'connection': connection.toJson(), @@ -6222,11 +6079,11 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String channel = "hello"; - const int uid = 10; - const StreamSubscribeState oldState = StreamSubscribeState.subStateIdle; - const StreamSubscribeState newState = StreamSubscribeState.subStateIdle; - const int elapseSinceLastState = 10; + String channel = "hello"; + int uid = 5; + StreamSubscribeState oldState = StreamSubscribeState.subStateIdle; + StreamSubscribeState newState = StreamSubscribeState.subStateIdle; + int elapseSinceLastState = 5; final eventJson = { 'channel': channel, @@ -6297,11 +6154,11 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String channel = "hello"; - const int uid = 10; - const StreamSubscribeState oldState = StreamSubscribeState.subStateIdle; - const StreamSubscribeState newState = StreamSubscribeState.subStateIdle; - const int elapseSinceLastState = 10; + String channel = "hello"; + int uid = 5; + StreamSubscribeState oldState = StreamSubscribeState.subStateIdle; + StreamSubscribeState newState = StreamSubscribeState.subStateIdle; + int elapseSinceLastState = 5; final eventJson = { 'channel': channel, @@ -6371,10 +6228,10 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String channel = "hello"; - const StreamPublishState oldState = StreamPublishState.pubStateIdle; - const StreamPublishState newState = StreamPublishState.pubStateIdle; - const int elapseSinceLastState = 10; + String channel = "hello"; + StreamPublishState oldState = StreamPublishState.pubStateIdle; + StreamPublishState newState = StreamPublishState.pubStateIdle; + int elapseSinceLastState = 5; final eventJson = { 'channel': channel, @@ -6444,11 +6301,11 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const VideoSourceType source = VideoSourceType.videoSourceCameraPrimary; - const String channel = "hello"; - const StreamPublishState oldState = StreamPublishState.pubStateIdle; - const StreamPublishState newState = StreamPublishState.pubStateIdle; - const int elapseSinceLastState = 10; + VideoSourceType source = VideoSourceType.videoSourceCameraPrimary; + String channel = "hello"; + StreamPublishState oldState = StreamPublishState.pubStateIdle; + StreamPublishState newState = StreamPublishState.pubStateIdle; + int elapseSinceLastState = 5; final eventJson = { 'source': source.value(), @@ -6516,17 +6373,38 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int uid = 10; - const int width = 10; - const int height = 10; - const int layoutCount = 10; - const List layoutlist = []; + int uid = 5; + int width = 5; + int height = 5; + int layoutCount = 5; + final List layoutlist = () { + String layoutlistItemChannelId = "hello"; + int layoutlistItemUid = 5; + String layoutlistItemStrUid = "hello"; + int layoutlistItemX = 5; + int layoutlistItemY = 5; + int layoutlistItemWidth = 5; + int layoutlistItemHeight = 5; + int layoutlistItemVideoState = 5; + VideoLayout layoutlistItem = VideoLayout( + channelId: layoutlistItemChannelId, + uid: layoutlistItemUid, + strUid: layoutlistItemStrUid, + x: layoutlistItemX, + y: layoutlistItemY, + width: layoutlistItemWidth, + height: layoutlistItemHeight, + videoState: layoutlistItemVideoState, + ); + + return List.filled(5, layoutlistItem); + }(); final eventJson = { 'connection': connection.toJson(), @@ -6595,15 +6473,15 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int uid = 10; - Uint8List metadata = Uint8List.fromList([1, 2, 3, 4, 5]); - const int length = 10; + int uid = 5; + Uint8List metadata = Uint8List.fromList([1, 1, 1, 1, 1]); + int length = 5; final eventJson = { 'connection': connection.toJson(), @@ -6670,10 +6548,10 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String provider = "hello"; - const String extension = "hello"; - const String key = "hello"; - const String value = "hello"; + String provider = "hello"; + String extension = "hello"; + String key = "hello"; + String value = "hello"; final eventJson = { 'provider': provider, @@ -6738,8 +6616,8 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String provider = "hello"; - const String extension = "hello"; + String provider = "hello"; + String extension = "hello"; final eventJson = { 'provider': provider, @@ -6802,8 +6680,8 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String provider = "hello"; - const String extension = "hello"; + String provider = "hello"; + String extension = "hello"; final eventJson = { 'provider': provider, @@ -6867,10 +6745,10 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String provider = "hello"; - const String extension = "hello"; - const int error = 10; - const String message = "hello"; + String provider = "hello"; + String extension = "hello"; + int error = 5; + String message = "hello"; final eventJson = { 'provider': provider, @@ -6935,13 +6813,13 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int code = 10; + int code = 5; final eventJson = { 'connection': connection.toJson(), diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengineex_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengineex_fake_test.generated.dart index 2aa805922..3edf082f9 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengineex_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengineex_fake_test.generated.dart @@ -25,54 +25,54 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const String token = "hello"; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String token = "hello"; + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const ClientRoleType optionsClientRoleType = + ClientRoleType optionsClientRoleType = ClientRoleType.clientRoleBroadcaster; - const AudienceLatencyLevelType optionsAudienceLatencyLevel = + AudienceLatencyLevelType optionsAudienceLatencyLevel = AudienceLatencyLevelType.audienceLatencyLevelLowLatency; - const VideoStreamType optionsDefaultVideoStreamType = + VideoStreamType optionsDefaultVideoStreamType = VideoStreamType.videoStreamHigh; - const ChannelProfileType optionsChannelProfile = + ChannelProfileType optionsChannelProfile = ChannelProfileType.channelProfileCommunication; - const bool optionsPublishCameraTrack = true; - const bool optionsPublishSecondaryCameraTrack = true; - const bool optionsPublishThirdCameraTrack = true; - const bool optionsPublishFourthCameraTrack = true; - const bool optionsPublishMicrophoneTrack = true; - const bool optionsPublishScreenCaptureVideo = true; - const bool optionsPublishScreenCaptureAudio = true; - const bool optionsPublishScreenTrack = true; - const bool optionsPublishSecondaryScreenTrack = true; - const bool optionsPublishThirdScreenTrack = true; - const bool optionsPublishFourthScreenTrack = true; - const bool optionsPublishCustomAudioTrack = true; - const int optionsPublishCustomAudioTrackId = 10; - const bool optionsPublishCustomVideoTrack = true; - const bool optionsPublishEncodedVideoTrack = true; - const bool optionsPublishMediaPlayerAudioTrack = true; - const bool optionsPublishMediaPlayerVideoTrack = true; - const bool optionsPublishTranscodedVideoTrack = true; - const bool optionsPublishMixedAudioTrack = true; - const bool optionsPublishLipSyncTrack = true; - const bool optionsAutoSubscribeAudio = true; - const bool optionsAutoSubscribeVideo = true; - const bool optionsEnableAudioRecordingOrPlayout = true; - const int optionsPublishMediaPlayerId = 10; - const int optionsAudioDelayMs = 10; - const int optionsMediaPlayerAudioDelayMs = 10; - const String optionsToken = "hello"; - const bool optionsEnableBuiltInMediaEncryption = true; - const bool optionsPublishRhythmPlayerTrack = true; - const bool optionsIsInteractiveAudience = true; - const int optionsCustomVideoTrackId = 10; - const bool optionsIsAudioFilterable = true; - const ChannelMediaOptions options = ChannelMediaOptions( + bool optionsPublishCameraTrack = true; + bool optionsPublishSecondaryCameraTrack = true; + bool optionsPublishThirdCameraTrack = true; + bool optionsPublishFourthCameraTrack = true; + bool optionsPublishMicrophoneTrack = true; + bool optionsPublishScreenCaptureVideo = true; + bool optionsPublishScreenCaptureAudio = true; + bool optionsPublishScreenTrack = true; + bool optionsPublishSecondaryScreenTrack = true; + bool optionsPublishThirdScreenTrack = true; + bool optionsPublishFourthScreenTrack = true; + bool optionsPublishCustomAudioTrack = true; + int optionsPublishCustomAudioTrackId = 5; + bool optionsPublishCustomVideoTrack = true; + bool optionsPublishEncodedVideoTrack = true; + bool optionsPublishMediaPlayerAudioTrack = true; + bool optionsPublishMediaPlayerVideoTrack = true; + bool optionsPublishTranscodedVideoTrack = true; + bool optionsPublishMixedAudioTrack = true; + bool optionsPublishLipSyncTrack = true; + bool optionsAutoSubscribeAudio = true; + bool optionsAutoSubscribeVideo = true; + bool optionsEnableAudioRecordingOrPlayout = true; + int optionsPublishMediaPlayerId = 5; + int optionsAudioDelayMs = 5; + int optionsMediaPlayerAudioDelayMs = 5; + String optionsToken = "hello"; + bool optionsEnableBuiltInMediaEncryption = true; + bool optionsPublishRhythmPlayerTrack = true; + bool optionsIsInteractiveAudience = true; + int optionsCustomVideoTrackId = 5; + bool optionsIsAudioFilterable = true; + ChannelMediaOptions options = ChannelMediaOptions( publishCameraTrack: optionsPublishCameraTrack, publishSecondaryCameraTrack: optionsPublishSecondaryCameraTrack, publishThirdCameraTrack: optionsPublishThirdCameraTrack, @@ -146,16 +146,16 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const bool optionsStopAudioMixing = true; - const bool optionsStopAllEffect = true; - const bool optionsStopMicrophoneRecording = true; - const LeaveChannelOptions options = LeaveChannelOptions( + bool optionsStopAudioMixing = true; + bool optionsStopAllEffect = true; + bool optionsStopMicrophoneRecording = true; + LeaveChannelOptions options = LeaveChannelOptions( stopAudioMixing: optionsStopAudioMixing, stopAllEffect: optionsStopAllEffect, stopMicrophoneRecording: optionsStopMicrophoneRecording, @@ -195,47 +195,47 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const ClientRoleType optionsClientRoleType = + ClientRoleType optionsClientRoleType = ClientRoleType.clientRoleBroadcaster; - const AudienceLatencyLevelType optionsAudienceLatencyLevel = + AudienceLatencyLevelType optionsAudienceLatencyLevel = AudienceLatencyLevelType.audienceLatencyLevelLowLatency; - const VideoStreamType optionsDefaultVideoStreamType = + VideoStreamType optionsDefaultVideoStreamType = VideoStreamType.videoStreamHigh; - const ChannelProfileType optionsChannelProfile = + ChannelProfileType optionsChannelProfile = ChannelProfileType.channelProfileCommunication; - const bool optionsPublishCameraTrack = true; - const bool optionsPublishSecondaryCameraTrack = true; - const bool optionsPublishThirdCameraTrack = true; - const bool optionsPublishFourthCameraTrack = true; - const bool optionsPublishMicrophoneTrack = true; - const bool optionsPublishScreenCaptureVideo = true; - const bool optionsPublishScreenCaptureAudio = true; - const bool optionsPublishScreenTrack = true; - const bool optionsPublishSecondaryScreenTrack = true; - const bool optionsPublishThirdScreenTrack = true; - const bool optionsPublishFourthScreenTrack = true; - const bool optionsPublishCustomAudioTrack = true; - const int optionsPublishCustomAudioTrackId = 10; - const bool optionsPublishCustomVideoTrack = true; - const bool optionsPublishEncodedVideoTrack = true; - const bool optionsPublishMediaPlayerAudioTrack = true; - const bool optionsPublishMediaPlayerVideoTrack = true; - const bool optionsPublishTranscodedVideoTrack = true; - const bool optionsPublishMixedAudioTrack = true; - const bool optionsPublishLipSyncTrack = true; - const bool optionsAutoSubscribeAudio = true; - const bool optionsAutoSubscribeVideo = true; - const bool optionsEnableAudioRecordingOrPlayout = true; - const int optionsPublishMediaPlayerId = 10; - const int optionsAudioDelayMs = 10; - const int optionsMediaPlayerAudioDelayMs = 10; - const String optionsToken = "hello"; - const bool optionsEnableBuiltInMediaEncryption = true; - const bool optionsPublishRhythmPlayerTrack = true; - const bool optionsIsInteractiveAudience = true; - const int optionsCustomVideoTrackId = 10; - const bool optionsIsAudioFilterable = true; - const ChannelMediaOptions options = ChannelMediaOptions( + bool optionsPublishCameraTrack = true; + bool optionsPublishSecondaryCameraTrack = true; + bool optionsPublishThirdCameraTrack = true; + bool optionsPublishFourthCameraTrack = true; + bool optionsPublishMicrophoneTrack = true; + bool optionsPublishScreenCaptureVideo = true; + bool optionsPublishScreenCaptureAudio = true; + bool optionsPublishScreenTrack = true; + bool optionsPublishSecondaryScreenTrack = true; + bool optionsPublishThirdScreenTrack = true; + bool optionsPublishFourthScreenTrack = true; + bool optionsPublishCustomAudioTrack = true; + int optionsPublishCustomAudioTrackId = 5; + bool optionsPublishCustomVideoTrack = true; + bool optionsPublishEncodedVideoTrack = true; + bool optionsPublishMediaPlayerAudioTrack = true; + bool optionsPublishMediaPlayerVideoTrack = true; + bool optionsPublishTranscodedVideoTrack = true; + bool optionsPublishMixedAudioTrack = true; + bool optionsPublishLipSyncTrack = true; + bool optionsAutoSubscribeAudio = true; + bool optionsAutoSubscribeVideo = true; + bool optionsEnableAudioRecordingOrPlayout = true; + int optionsPublishMediaPlayerId = 5; + int optionsAudioDelayMs = 5; + int optionsMediaPlayerAudioDelayMs = 5; + String optionsToken = "hello"; + bool optionsEnableBuiltInMediaEncryption = true; + bool optionsPublishRhythmPlayerTrack = true; + bool optionsIsInteractiveAudience = true; + int optionsCustomVideoTrackId = 5; + bool optionsIsAudioFilterable = true; + ChannelMediaOptions options = ChannelMediaOptions( publishCameraTrack: optionsPublishCameraTrack, publishSecondaryCameraTrack: optionsPublishSecondaryCameraTrack, publishThirdCameraTrack: optionsPublishThirdCameraTrack, @@ -273,9 +273,9 @@ void rtcEngineExSmokeTestCases() { customVideoTrackId: optionsCustomVideoTrackId, isAudioFilterable: optionsIsAudioFilterable, ); - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -315,31 +315,31 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const VideoCodecType configCodecType = VideoCodecType.videoCodecNone; - const int dimensionsWidth = 10; - const int dimensionsHeight = 10; - const VideoDimensions configDimensions = VideoDimensions( + VideoCodecType configCodecType = VideoCodecType.videoCodecNone; + int dimensionsWidth = 5; + int dimensionsHeight = 5; + VideoDimensions configDimensions = VideoDimensions( width: dimensionsWidth, height: dimensionsHeight, ); - const OrientationMode configOrientationMode = + OrientationMode configOrientationMode = OrientationMode.orientationModeAdaptive; - const DegradationPreference configDegradationPreference = + DegradationPreference configDegradationPreference = DegradationPreference.maintainQuality; - const VideoMirrorModeType configMirrorMode = + VideoMirrorModeType configMirrorMode = VideoMirrorModeType.videoMirrorModeAuto; - const EncodingPreference advanceOptionsEncodingPreference = + EncodingPreference advanceOptionsEncodingPreference = EncodingPreference.preferAuto; - const CompressionPreference advanceOptionsCompressionPreference = + CompressionPreference advanceOptionsCompressionPreference = CompressionPreference.preferLowLatency; - const AdvanceOptions configAdvanceOptions = AdvanceOptions( + AdvanceOptions configAdvanceOptions = AdvanceOptions( encodingPreference: advanceOptionsEncodingPreference, compressionPreference: advanceOptionsCompressionPreference, ); - const int configFrameRate = 10; - const int configBitrate = 10; - const int configMinBitrate = 10; - const VideoEncoderConfiguration config = VideoEncoderConfiguration( + int configFrameRate = 5; + int configBitrate = 5; + int configMinBitrate = 5; + VideoEncoderConfiguration config = VideoEncoderConfiguration( codecType: configCodecType, dimensions: configDimensions, frameRate: configFrameRate, @@ -350,9 +350,9 @@ void rtcEngineExSmokeTestCases() { mirrorMode: configMirrorMode, advanceOptions: configAdvanceOptions, ); - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -392,32 +392,32 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const RenderModeType canvasRenderMode = RenderModeType.renderModeHidden; - const VideoMirrorModeType canvasMirrorMode = + RenderModeType canvasRenderMode = RenderModeType.renderModeHidden; + VideoMirrorModeType canvasMirrorMode = VideoMirrorModeType.videoMirrorModeAuto; - const VideoViewSetupMode canvasSetupMode = + VideoViewSetupMode canvasSetupMode = VideoViewSetupMode.videoViewSetupReplace; - const VideoSourceType canvasSourceType = + VideoSourceType canvasSourceType = VideoSourceType.videoSourceCameraPrimary; - const int cropAreaX = 10; - const int cropAreaY = 10; - const int cropAreaWidth = 10; - const int cropAreaHeight = 10; - const Rectangle canvasCropArea = Rectangle( + int cropAreaX = 5; + int cropAreaY = 5; + int cropAreaWidth = 5; + int cropAreaHeight = 5; + Rectangle canvasCropArea = Rectangle( x: cropAreaX, y: cropAreaY, width: cropAreaWidth, height: cropAreaHeight, ); - const VideoModulePosition canvasPosition = + VideoModulePosition canvasPosition = VideoModulePosition.positionPostCapturer; - const int canvasUid = 10; - const int canvasSubviewUid = 10; - const int canvasView = 10; - const int canvasBackgroundColor = 10; - const int canvasMediaPlayerId = 10; - const bool canvasEnableAlphaMask = true; - const VideoCanvas canvas = VideoCanvas( + int canvasUid = 5; + int canvasSubviewUid = 5; + int canvasView = 5; + int canvasBackgroundColor = 5; + int canvasMediaPlayerId = 5; + bool canvasEnableAlphaMask = true; + VideoCanvas canvas = VideoCanvas( uid: canvasUid, subviewUid: canvasSubviewUid, view: canvasView, @@ -431,9 +431,9 @@ void rtcEngineExSmokeTestCases() { enableAlphaMask: canvasEnableAlphaMask, position: canvasPosition, ); - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -472,11 +472,11 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const int uid = 10; - const bool mute = true; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + int uid = 5; + bool mute = true; + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -517,11 +517,11 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const int uid = 10; - const bool mute = true; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + int uid = 5; + bool mute = true; + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -562,11 +562,11 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const int uid = 10; - const VideoStreamType streamType = VideoStreamType.videoStreamHigh; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + int uid = 5; + VideoStreamType streamType = VideoStreamType.videoStreamHigh; + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -607,10 +607,10 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool mute = true; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + bool mute = true; + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -650,10 +650,10 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool mute = true; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + bool mute = true; + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -693,10 +693,10 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool mute = true; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + bool mute = true; + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -736,10 +736,10 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool mute = true; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + bool mute = true; + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -779,16 +779,16 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const int uid = 10; - const VideoStreamType optionsType = VideoStreamType.videoStreamHigh; - const bool optionsEncodedFrameOnly = true; - const VideoSubscriptionOptions options = VideoSubscriptionOptions( + int uid = 5; + VideoStreamType optionsType = VideoStreamType.videoStreamHigh; + bool optionsEncodedFrameOnly = true; + VideoSubscriptionOptions options = VideoSubscriptionOptions( type: optionsType, encodedFrameOnly: optionsEncodedFrameOnly, ); - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -829,12 +829,12 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const int uid = 10; - const double pan = 10.0; - const double gain = 10.0; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + int uid = 5; + double pan = 5.0; + double gain = 5.0; + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -876,16 +876,16 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const int uid = 10; - const double paramsSpeakerAzimuth = 10.0; - const double paramsSpeakerElevation = 10.0; - const double paramsSpeakerDistance = 10.0; - const int paramsSpeakerOrientation = 10; - const bool paramsEnableBlur = true; - const bool paramsEnableAirAbsorb = true; - const double paramsSpeakerAttenuation = 10.0; - const bool paramsEnableDoppler = true; - const SpatialAudioParams params = SpatialAudioParams( + int uid = 5; + double paramsSpeakerAzimuth = 5.0; + double paramsSpeakerElevation = 5.0; + double paramsSpeakerDistance = 5.0; + int paramsSpeakerOrientation = 5; + bool paramsEnableBlur = true; + bool paramsEnableAirAbsorb = true; + double paramsSpeakerAttenuation = 5.0; + bool paramsEnableDoppler = true; + SpatialAudioParams params = SpatialAudioParams( speakerAzimuth: paramsSpeakerAzimuth, speakerElevation: paramsSpeakerElevation, speakerDistance: paramsSpeakerDistance, @@ -895,9 +895,9 @@ void rtcEngineExSmokeTestCases() { speakerAttenuation: paramsSpeakerAttenuation, enableDoppler: paramsEnableDoppler, ); - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -938,13 +938,13 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const int uid = 10; - const RenderModeType renderMode = RenderModeType.renderModeHidden; - const VideoMirrorModeType mirrorMode = + int uid = 5; + RenderModeType renderMode = RenderModeType.renderModeHidden; + VideoMirrorModeType mirrorMode = VideoMirrorModeType.videoMirrorModeAuto; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -986,14 +986,14 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const bool enabled = true; - const String deviceName = "hello"; + bool enabled = true; + String deviceName = "hello"; await rtcEngineEx.enableLoopbackRecordingEx( connection: connection, enabled: enabled, @@ -1031,10 +1031,10 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const int volume = 10; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + int volume = 5; + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -1074,10 +1074,10 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool mute = true; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + bool mute = true; + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -1117,11 +1117,11 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const int uid = 10; - const int volume = 10; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + int uid = 5; + int volume = 5; + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -1162,9 +1162,9 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -1203,18 +1203,18 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const bool enabled = true; - const EncryptionMode configEncryptionMode = EncryptionMode.aes128Xts; - const String configEncryptionKey = "hello"; - Uint8List configEncryptionKdfSalt = Uint8List.fromList([1, 2, 3, 4, 5]); - const bool configDatastreamEncryptionEnabled = true; - final EncryptionConfig config = EncryptionConfig( + bool enabled = true; + EncryptionMode configEncryptionMode = EncryptionMode.aes128Xts; + String configEncryptionKey = "hello"; + Uint8List configEncryptionKdfSalt = Uint8List.fromList([1, 1, 1, 1, 1]); + bool configDatastreamEncryptionEnabled = true; + EncryptionConfig config = EncryptionConfig( encryptionMode: configEncryptionMode, encryptionKey: configEncryptionKey, encryptionKdfSalt: configEncryptionKdfSalt, @@ -1256,12 +1256,12 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const int streamId = 10; - Uint8List data = Uint8List.fromList([1, 2, 3, 4, 5]); - const int length = 10; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + int streamId = 5; + Uint8List data = Uint8List.fromList([1, 1, 1, 1, 1]); + int length = 5; + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -1303,48 +1303,47 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const String watermarkUrl = "hello"; - const int positionInLandscapeModeX = 10; - const int positionInLandscapeModeY = 10; - const int positionInLandscapeModeWidth = 10; - const int positionInLandscapeModeHeight = 10; - const Rectangle optionsPositionInLandscapeMode = Rectangle( + String watermarkUrl = "hello"; + int positionInLandscapeModeX = 5; + int positionInLandscapeModeY = 5; + int positionInLandscapeModeWidth = 5; + int positionInLandscapeModeHeight = 5; + Rectangle optionsPositionInLandscapeMode = Rectangle( x: positionInLandscapeModeX, y: positionInLandscapeModeY, width: positionInLandscapeModeWidth, height: positionInLandscapeModeHeight, ); - const int positionInPortraitModeX = 10; - const int positionInPortraitModeY = 10; - const int positionInPortraitModeWidth = 10; - const int positionInPortraitModeHeight = 10; - const Rectangle optionsPositionInPortraitMode = Rectangle( + int positionInPortraitModeX = 5; + int positionInPortraitModeY = 5; + int positionInPortraitModeWidth = 5; + int positionInPortraitModeHeight = 5; + Rectangle optionsPositionInPortraitMode = Rectangle( x: positionInPortraitModeX, y: positionInPortraitModeY, width: positionInPortraitModeWidth, height: positionInPortraitModeHeight, ); - const double watermarkRatioXRatio = 10.0; - const double watermarkRatioYRatio = 10.0; - const double watermarkRatioWidthRatio = 10.0; - const WatermarkRatio optionsWatermarkRatio = WatermarkRatio( + double watermarkRatioXRatio = 5.0; + double watermarkRatioYRatio = 5.0; + double watermarkRatioWidthRatio = 5.0; + WatermarkRatio optionsWatermarkRatio = WatermarkRatio( xRatio: watermarkRatioXRatio, yRatio: watermarkRatioYRatio, widthRatio: watermarkRatioWidthRatio, ); - const WatermarkFitMode optionsMode = - WatermarkFitMode.fitModeCoverPosition; - const bool optionsVisibleInPreview = true; - const WatermarkOptions options = WatermarkOptions( + WatermarkFitMode optionsMode = WatermarkFitMode.fitModeCoverPosition; + bool optionsVisibleInPreview = true; + WatermarkOptions options = WatermarkOptions( visibleInPreview: optionsVisibleInPreview, positionInLandscapeMode: optionsPositionInLandscapeMode, positionInPortraitMode: optionsPositionInPortraitMode, watermarkRatio: optionsWatermarkRatio, mode: optionsMode, ); - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -1385,9 +1384,9 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -1426,14 +1425,14 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const String id = "hello"; - const String category = "hello"; - const String event = "hello"; - const String label = "hello"; - const int value = 10; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String id = "hello"; + String category = "hello"; + String event = "hello"; + String label = "hello"; + int value = 5; + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -1477,12 +1476,12 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const int interval = 10; - const int smooth = 10; - const bool reportVad = true; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + int interval = 5; + int smooth = 5; + bool reportVad = true; + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -1524,10 +1523,10 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const String url = "hello"; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String url = "hello"; + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -1567,35 +1566,35 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const String url = "hello"; - const VideoCodecProfileType transcodingVideoCodecProfile = + String url = "hello"; + VideoCodecProfileType transcodingVideoCodecProfile = VideoCodecProfileType.videoCodecProfileBaseline; - const VideoCodecTypeForStream transcodingVideoCodecType = + VideoCodecTypeForStream transcodingVideoCodecType = VideoCodecTypeForStream.videoCodecH264ForStream; - const AudioSampleRateType transcodingAudioSampleRate = + AudioSampleRateType transcodingAudioSampleRate = AudioSampleRateType.audioSampleRate32000; - const AudioCodecProfileType transcodingAudioCodecProfile = + AudioCodecProfileType transcodingAudioCodecProfile = AudioCodecProfileType.audioCodecProfileLcAac; - const int transcodingWidth = 10; - const int transcodingHeight = 10; - const int transcodingVideoBitrate = 10; - const int transcodingVideoFramerate = 10; - const bool transcodingLowLatency = true; - const int transcodingVideoGop = 10; - const int transcodingBackgroundColor = 10; - const int transcodingUserCount = 10; - const List transcodingTranscodingUsers = []; - const String transcodingTranscodingExtraInfo = "hello"; - const String transcodingMetadata = "hello"; - const List transcodingWatermark = []; - const int transcodingWatermarkCount = 10; - const List transcodingBackgroundImage = []; - const int transcodingBackgroundImageCount = 10; - const int transcodingAudioBitrate = 10; - const int transcodingAudioChannels = 10; - const List transcodingAdvancedFeatures = []; - const int transcodingAdvancedFeatureCount = 10; - const LiveTranscoding transcoding = LiveTranscoding( + int transcodingWidth = 5; + int transcodingHeight = 5; + int transcodingVideoBitrate = 5; + int transcodingVideoFramerate = 5; + bool transcodingLowLatency = true; + int transcodingVideoGop = 5; + int transcodingBackgroundColor = 5; + int transcodingUserCount = 5; + List transcodingTranscodingUsers = []; + String transcodingTranscodingExtraInfo = "hello"; + String transcodingMetadata = "hello"; + List transcodingWatermark = []; + int transcodingWatermarkCount = 5; + List transcodingBackgroundImage = []; + int transcodingBackgroundImageCount = 5; + int transcodingAudioBitrate = 5; + int transcodingAudioChannels = 5; + List transcodingAdvancedFeatures = []; + int transcodingAdvancedFeatureCount = 5; + LiveTranscoding transcoding = LiveTranscoding( width: transcodingWidth, height: transcodingHeight, videoBitrate: transcodingVideoBitrate, @@ -1620,9 +1619,9 @@ void rtcEngineExSmokeTestCases() { advancedFeatures: transcodingAdvancedFeatures, advancedFeatureCount: transcodingAdvancedFeatureCount, ); - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -1663,34 +1662,34 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const VideoCodecProfileType transcodingVideoCodecProfile = + VideoCodecProfileType transcodingVideoCodecProfile = VideoCodecProfileType.videoCodecProfileBaseline; - const VideoCodecTypeForStream transcodingVideoCodecType = + VideoCodecTypeForStream transcodingVideoCodecType = VideoCodecTypeForStream.videoCodecH264ForStream; - const AudioSampleRateType transcodingAudioSampleRate = + AudioSampleRateType transcodingAudioSampleRate = AudioSampleRateType.audioSampleRate32000; - const AudioCodecProfileType transcodingAudioCodecProfile = + AudioCodecProfileType transcodingAudioCodecProfile = AudioCodecProfileType.audioCodecProfileLcAac; - const int transcodingWidth = 10; - const int transcodingHeight = 10; - const int transcodingVideoBitrate = 10; - const int transcodingVideoFramerate = 10; - const bool transcodingLowLatency = true; - const int transcodingVideoGop = 10; - const int transcodingBackgroundColor = 10; - const int transcodingUserCount = 10; - const List transcodingTranscodingUsers = []; - const String transcodingTranscodingExtraInfo = "hello"; - const String transcodingMetadata = "hello"; - const List transcodingWatermark = []; - const int transcodingWatermarkCount = 10; - const List transcodingBackgroundImage = []; - const int transcodingBackgroundImageCount = 10; - const int transcodingAudioBitrate = 10; - const int transcodingAudioChannels = 10; - const List transcodingAdvancedFeatures = []; - const int transcodingAdvancedFeatureCount = 10; - const LiveTranscoding transcoding = LiveTranscoding( + int transcodingWidth = 5; + int transcodingHeight = 5; + int transcodingVideoBitrate = 5; + int transcodingVideoFramerate = 5; + bool transcodingLowLatency = true; + int transcodingVideoGop = 5; + int transcodingBackgroundColor = 5; + int transcodingUserCount = 5; + List transcodingTranscodingUsers = []; + String transcodingTranscodingExtraInfo = "hello"; + String transcodingMetadata = "hello"; + List transcodingWatermark = []; + int transcodingWatermarkCount = 5; + List transcodingBackgroundImage = []; + int transcodingBackgroundImageCount = 5; + int transcodingAudioBitrate = 5; + int transcodingAudioChannels = 5; + List transcodingAdvancedFeatures = []; + int transcodingAdvancedFeatureCount = 5; + LiveTranscoding transcoding = LiveTranscoding( width: transcodingWidth, height: transcodingHeight, videoBitrate: transcodingVideoBitrate, @@ -1715,9 +1714,9 @@ void rtcEngineExSmokeTestCases() { advancedFeatures: transcodingAdvancedFeatures, advancedFeatureCount: transcodingAdvancedFeatureCount, ); - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -1757,10 +1756,10 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const String url = "hello"; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String url = "hello"; + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -1799,9 +1798,9 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -1840,9 +1839,9 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -1881,9 +1880,9 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -1922,10 +1921,10 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const String userAccount = "hello"; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String userAccount = "hello"; + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -1965,10 +1964,10 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const int uid = 10; - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + int uid = 5; + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -2007,23 +2006,23 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; - const int dimensionsWidth = 10; - const int dimensionsHeight = 10; - const VideoDimensions streamConfigDimensions = VideoDimensions( + bool enabled = true; + int dimensionsWidth = 5; + int dimensionsHeight = 5; + VideoDimensions streamConfigDimensions = VideoDimensions( width: dimensionsWidth, height: dimensionsHeight, ); - const int streamConfigKBitrate = 10; - const int streamConfigFramerate = 10; - const SimulcastStreamConfig streamConfig = SimulcastStreamConfig( + int streamConfigKBitrate = 5; + int streamConfigFramerate = 5; + SimulcastStreamConfig streamConfig = SimulcastStreamConfig( dimensions: streamConfigDimensions, kBitrate: streamConfigKBitrate, framerate: streamConfigFramerate, ); - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -2064,24 +2063,23 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const SimulcastStreamMode mode = - SimulcastStreamMode.autoSimulcastStream; - const int dimensionsWidth = 10; - const int dimensionsHeight = 10; - const VideoDimensions streamConfigDimensions = VideoDimensions( + SimulcastStreamMode mode = SimulcastStreamMode.autoSimulcastStream; + int dimensionsWidth = 5; + int dimensionsHeight = 5; + VideoDimensions streamConfigDimensions = VideoDimensions( width: dimensionsWidth, height: dimensionsHeight, ); - const int streamConfigKBitrate = 10; - const int streamConfigFramerate = 10; - const SimulcastStreamConfig streamConfig = SimulcastStreamConfig( + int streamConfigKBitrate = 5; + int streamConfigFramerate = 5; + SimulcastStreamConfig streamConfig = SimulcastStreamConfig( dimensions: streamConfigDimensions, kBitrate: streamConfigKBitrate, framerate: streamConfigFramerate, ); - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -2122,14 +2120,14 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int uid = 10; - const String filePath = "hello"; + int uid = 5; + String filePath = "hello"; await rtcEngineEx.takeSnapshotEx( connection: connection, uid: uid, @@ -2166,20 +2164,20 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const bool enabled = true; - const String configExtraInfo = "hello"; - const String configServerConfig = "hello"; - const List configModules = []; - const int configModuleCount = 10; - const ContentInspectConfig config = ContentInspectConfig( + bool enabled = true; + String configExtraInfo = "hello"; + String configServerConfig = "hello"; + List configModules = []; + int configModuleCount = 5; + ContentInspectConfig config = ContentInspectConfig( extraInfo: configExtraInfo, serverConfig: configServerConfig, modules: configModules, moduleCount: configModuleCount, ); - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -2220,9 +2218,9 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -2261,13 +2259,13 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - const String parameters = "hello"; + String parameters = "hello"; await rtcEngineEx.setParametersEx( connection: connection, parameters: parameters, @@ -2303,9 +2301,9 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); @@ -2343,14 +2341,14 @@ void rtcEngineExSmokeTestCases() { await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); try { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( channelId: connectionChannelId, localUid: connectionLocalUid, ); - Uint8List metadata = Uint8List.fromList([1, 2, 3, 4, 5]); - const int length = 10; + Uint8List metadata = Uint8List.fromList([1, 1, 1, 1, 1]); + int length = 5; await rtcEngineEx.sendAudioMetadataEx( connection: connection, metadata: metadata, diff --git a/test_shard/fake_test_app/integration_test/generated/videodevicemanager_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/videodevicemanager_fake_test.generated.dart index 476402211..5846e09b5 100644 --- a/test_shard/fake_test_app/integration_test/generated/videodevicemanager_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/videodevicemanager_fake_test.generated.dart @@ -62,7 +62,7 @@ void videoDeviceManagerSmokeTestCases() { final videoDeviceManager = rtcEngine.getVideoDeviceManager(); try { - const String deviceIdUTF8 = "hello"; + String deviceIdUTF8 = "hello"; await videoDeviceManager.setDevice( deviceIdUTF8, ); @@ -135,7 +135,7 @@ void videoDeviceManagerSmokeTestCases() { final videoDeviceManager = rtcEngine.getVideoDeviceManager(); try { - const String deviceIdUTF8 = "hello"; + String deviceIdUTF8 = "hello"; await videoDeviceManager.numberOfCapabilities( deviceIdUTF8, ); @@ -174,8 +174,8 @@ void videoDeviceManagerSmokeTestCases() { final videoDeviceManager = rtcEngine.getVideoDeviceManager(); try { - const String deviceIdUTF8 = "hello"; - const int deviceCapabilityNumber = 10; + String deviceIdUTF8 = "hello"; + int deviceCapabilityNumber = 5; await videoDeviceManager.getCapability( deviceIdUTF8: deviceIdUTF8, deviceCapabilityNumber: deviceCapabilityNumber, @@ -215,7 +215,7 @@ void videoDeviceManagerSmokeTestCases() { final videoDeviceManager = rtcEngine.getVideoDeviceManager(); try { - const int hwnd = 10; + int hwnd = 5; await videoDeviceManager.startDeviceTest( hwnd, ); diff --git a/test_shard/fake_test_app/integration_test/testcases/musiccontentcenter_testcases.dart b/test_shard/fake_test_app/integration_test/testcases/musiccontentcenter_testcases.dart new file mode 100644 index 000000000..234d7602d --- /dev/null +++ b/test_shard/fake_test_app/integration_test/testcases/musiccontentcenter_testcases.dart @@ -0,0 +1,53 @@ +import 'dart:async'; +import 'dart:io'; + +import 'package:agora_rtc_engine/agora_rtc_engine.dart'; +import 'package:flutter/foundation.dart'; +import 'package:flutter_test/flutter_test.dart'; +import 'package:iris_tester/iris_tester.dart'; +import '../generated/musiccontentcenter_fake_test.generated.dart' as generated; + +import '../testcases/event_ids_mapping.dart'; + +void testCases() { + generated.musicContentCenterSmokeTestCases(); + + testWidgets( + 'MusicContentCenter.destroyMusicPlayer', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + final musicContentCenter = rtcEngine.getMusicContentCenter(); + + try { + final musicPlayer = (await musicContentCenter.createMusicPlayer())!; + await musicContentCenter.destroyMusicPlayer( + musicPlayer, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[MusicContentCenter.destroyMusicPlayer] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await musicContentCenter.release(); + await rtcEngine.release(); + }, +// skip: !(), + ); +} diff --git a/test_shard/fake_test_app/integration_test/testcases/rtcengine_rtcengineeventhandler_testcases.dart b/test_shard/fake_test_app/integration_test/testcases/rtcengine_rtcengineeventhandler_testcases.dart index 5bf6c93c5..cfd1a9551 100644 --- a/test_shard/fake_test_app/integration_test/testcases/rtcengine_rtcengineeventhandler_testcases.dart +++ b/test_shard/fake_test_app/integration_test/testcases/rtcengine_rtcengineeventhandler_testcases.dart @@ -1,5 +1,6 @@ import 'dart:async'; import 'dart:io'; +import 'dart:typed_data'; import 'package:agora_rtc_engine/agora_rtc_engine.dart'; import 'package:flutter/foundation.dart'; @@ -159,4 +160,165 @@ void testCases(ValueGetter irisTester) { // timeout: const Timeout(Duration(minutes: 2)), // skip: kIsWeb || !Platform.isAndroid, // ); + + testWidgets( + 'RtcEngineEventHandler.onDownlinkNetworkInfoUpdated', + (WidgetTester tester) async { + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: 'app_id', + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + final onDownlinkNetworkInfoUpdatedCompleter = Completer(); + final theRtcEngineEventHandler = RtcEngineEventHandler( + onDownlinkNetworkInfoUpdated: (DownlinkNetworkInfo info) { + onDownlinkNetworkInfoUpdatedCompleter.complete(true); + }, + ); + + rtcEngine.registerEventHandler( + theRtcEngineEventHandler, + ); + +// Delay 500 milliseconds to ensure the registerEventHandler call completed. + await Future.delayed(const Duration(milliseconds: 500)); + + { + const int infoLastmileBufferDelayTimeMs = 1; + const int infoBandwidthEstimationBps = 1; + const int infoTotalDownscaleLevelCount = 1; + const List infoPeerDownlinkInfo = [ + PeerDownlinkInfo( + userId: '123', + streamType: VideoStreamType.videoStreamLow, + currentDownscaleLevel: + RemoteVideoDownscaleLevel.remoteVideoDownscaleLevel1, + expectedBitrateBps: 1, + ), + ]; + const int infoTotalReceivedVideoCount = 1; + const DownlinkNetworkInfo info = DownlinkNetworkInfo( + lastmileBufferDelayTimeMs: infoLastmileBufferDelayTimeMs, + bandwidthEstimationBps: infoBandwidthEstimationBps, + totalDownscaleLevelCount: infoTotalDownscaleLevelCount, + peerDownlinkInfo: infoPeerDownlinkInfo, + totalReceivedVideoCount: infoTotalReceivedVideoCount, + ); + + final eventJson = { + 'info': info.toJson(), + }; + + final eventIds = eventIdsMapping[ + 'RtcEngineEventHandler_onDownlinkNetworkInfoUpdated'] ?? + []; + for (final event in eventIds) { + final ret = irisTester().fireEvent(event, params: eventJson); + // Delay 200 milliseconds to ensure the callback is called. + await Future.delayed(const Duration(milliseconds: 200)); + // TODO(littlegnal): Most of callbacks on web are not implemented, we're temporarily skip these callbacks at this time. + if (kIsWeb && ret) { + if (!onDownlinkNetworkInfoUpdatedCompleter.isCompleted) { + onDownlinkNetworkInfoUpdatedCompleter.complete(true); + } + } + } + } + + final eventCalled = await onDownlinkNetworkInfoUpdatedCompleter.future; + expect(eventCalled, isTrue); + + { + rtcEngine.unregisterEventHandler( + theRtcEngineEventHandler, + ); + } +// Delay 500 milliseconds to ensure the unregisterEventHandler call completed. + await Future.delayed(const Duration(milliseconds: 500)); + + await rtcEngine.release(); + }, + timeout: const Timeout(Duration(minutes: 2)), + ); + + testWidgets( + 'RtcEngineEventHandler.onStreamMessage', + (WidgetTester tester) async { + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: 'app_id', + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + final onStreamMessageCompleter = Completer(); + final theRtcEngineEventHandler = RtcEngineEventHandler( + onStreamMessage: (RtcConnection connection, int remoteUid, int streamId, + Uint8List data, int length, int sentTs) { + onStreamMessageCompleter.complete(true); + }, + ); + + rtcEngine.registerEventHandler( + theRtcEngineEventHandler, + ); + +// Delay 500 milliseconds to ensure the registerEventHandler call completed. + await Future.delayed(const Duration(milliseconds: 500)); + + { + const String connectionChannelId = "hello"; + const int connectionLocalUid = 10; + const RtcConnection connection = RtcConnection( + channelId: connectionChannelId, + localUid: connectionLocalUid, + ); + const int remoteUid = 10; + const int streamId = 10; + Uint8List data = Uint8List.fromList([1, 2, 3, 4, 5]); + const int length = 0; + const int sentTs = 10; + + final eventJson = { + 'connection': connection.toJson(), + 'remoteUid': remoteUid, + 'streamId': streamId, + // DO not pass data to the event, since it is treated as intptr in iris. But we does not want to adpot this logic. + // 'data': data.toList(), + 'length': length, + 'sentTs': sentTs, + }; + + final eventIds = + eventIdsMapping['RtcEngineEventHandler_onStreamMessage'] ?? []; + for (final event in eventIds) { + final ret = irisTester().fireEvent(event, params: eventJson); + // Delay 200 milliseconds to ensure the callback is called. + await Future.delayed(const Duration(milliseconds: 200)); + // TODO(littlegnal): Most of callbacks on web are not implemented, we're temporarily skip these callbacks at this time. + if (kIsWeb && ret) { + if (!onStreamMessageCompleter.isCompleted) { + onStreamMessageCompleter.complete(true); + } + } + } + } + + final eventCalled = await onStreamMessageCompleter.future; + expect(eventCalled, isTrue); + + { + rtcEngine.unregisterEventHandler( + theRtcEngineEventHandler, + ); + } +// Delay 500 milliseconds to ensure the unregisterEventHandler call completed. + await Future.delayed(const Duration(milliseconds: 500)); + + await rtcEngine.release(); + }, + timeout: const Timeout(Duration(minutes: 2)), + ); } diff --git a/test_shard/fake_test_app/lib/main.dart b/test_shard/fake_test_app/lib/main.dart index 48d84c326..a5f5ba165 100644 --- a/test_shard/fake_test_app/lib/main.dart +++ b/test_shard/fake_test_app/lib/main.dart @@ -138,9 +138,7 @@ class _MyHomePageState extends State { const Text( 'You have pushed the button this many times:', ), - Text( - '$_counter' - ), + Text('$_counter'), ], ), ), diff --git a/test_shard/integration_test_app/integration_test/generated/mediaplayercachemanager_smoke_test.generated.dart b/test_shard/integration_test_app/integration_test/generated/mediaplayercachemanager_smoke_test.generated.dart index cf30613c7..321881b12 100644 --- a/test_shard/integration_test_app/integration_test/generated/mediaplayercachemanager_smoke_test.generated.dart +++ b/test_shard/integration_test_app/integration_test/generated/mediaplayercachemanager_smoke_test.generated.dart @@ -97,7 +97,7 @@ void mediaPlayerCacheManagerSmokeTestCases() { final mediaPlayerCacheManager = getMediaPlayerCacheManager(rtcEngine); try { - const String uri = "hello"; + String uri = "hello"; await mediaPlayerCacheManager.removeCacheByUri( uri, ); @@ -135,7 +135,7 @@ void mediaPlayerCacheManagerSmokeTestCases() { final mediaPlayerCacheManager = getMediaPlayerCacheManager(rtcEngine); try { - const String path = "hello"; + String path = "hello"; await mediaPlayerCacheManager.setCacheDir( path, ); @@ -173,7 +173,7 @@ void mediaPlayerCacheManagerSmokeTestCases() { final mediaPlayerCacheManager = getMediaPlayerCacheManager(rtcEngine); try { - const int count = 10; + int count = 5; await mediaPlayerCacheManager.setMaxCacheFileCount( count, ); @@ -211,7 +211,7 @@ void mediaPlayerCacheManagerSmokeTestCases() { final mediaPlayerCacheManager = getMediaPlayerCacheManager(rtcEngine); try { - const int cacheSize = 10; + int cacheSize = 5; await mediaPlayerCacheManager.setMaxCacheFileSize( cacheSize, ); @@ -249,7 +249,7 @@ void mediaPlayerCacheManagerSmokeTestCases() { final mediaPlayerCacheManager = getMediaPlayerCacheManager(rtcEngine); try { - const bool enable = true; + bool enable = true; await mediaPlayerCacheManager.enableAutoRemoveCache( enable, ); @@ -287,7 +287,7 @@ void mediaPlayerCacheManagerSmokeTestCases() { final mediaPlayerCacheManager = getMediaPlayerCacheManager(rtcEngine); try { - const int length = 10; + int length = 5; await mediaPlayerCacheManager.getCacheDir( length, ); diff --git a/test_shard/integration_test_app/integration_test/testcases/fake_agora_video_view_testcases.dart b/test_shard/integration_test_app/integration_test/testcases/fake_agora_video_view_testcases.dart index 81ab3425d..f1a0611cd 100644 --- a/test_shard/integration_test_app/integration_test/testcases/fake_agora_video_view_testcases.dart +++ b/test_shard/integration_test_app/integration_test/testcases/fake_agora_video_view_testcases.dart @@ -167,7 +167,6 @@ class FakeMethodChannelController { textrueId = 0; } - void triggerPlatformMessage(String channelId, MethodCall methodCall) async { const StandardMethodCodec codec = StandardMethodCodec(); final ByteData data = codec.encodeMethodCall(methodCall); @@ -596,7 +595,6 @@ void testCases() { expect(disposeTextureRenderTextureId != -1, isTrue); } - }, ); @@ -696,7 +694,6 @@ void testCases() { // The first textureId is 1 expect(textureId == 1, isTrue); } - }, ); @@ -894,7 +891,6 @@ void testCases() { } } - await tester.pumpWidget(Container()); await tester.pumpAndSettle(const Duration(milliseconds: 5000)); await Future.delayed(const Duration(seconds: 5)); diff --git a/test_shard/integration_test_app/lib/fake_remote_user_main.dart b/test_shard/integration_test_app/lib/fake_remote_user_main.dart index 714d5fa21..41e60576c 100644 --- a/test_shard/integration_test_app/lib/fake_remote_user_main.dart +++ b/test_shard/integration_test_app/lib/fake_remote_user_main.dart @@ -131,9 +131,7 @@ class _MyHomePageState extends State { const Text( 'You have pushed the button this many times:', ), - Text( - '$_counter' - ), + Text('$_counter'), ], ), ), diff --git a/test_shard/integration_test_app/lib/main.dart b/test_shard/integration_test_app/lib/main.dart index c48fc2da2..f6002fd38 100644 --- a/test_shard/integration_test_app/lib/main.dart +++ b/test_shard/integration_test_app/lib/main.dart @@ -144,9 +144,7 @@ class _MyHomePageState extends State { const Text( 'You have pushed the button this many times:', ), - Text( - '$_counter' - ), + Text('$_counter'), ], ), ), diff --git a/test_shard/iris_tester/lib/src/platform/io/iris_tester_bindings.dart b/test_shard/iris_tester/lib/src/platform/io/iris_tester_bindings.dart index 385f58631..9af83b52f 100644 --- a/test_shard/iris_tester/lib/src/platform/io/iris_tester_bindings.dart +++ b/test_shard/iris_tester/lib/src/platform/io/iris_tester_bindings.dart @@ -150,6 +150,20 @@ class NativeIrisTesterBinding { ffi.Pointer)>>('TriggerEventWithFakeRtcEngine'); late final _TriggerEventWithFakeRtcEngine = _TriggerEventWithFakeRtcEnginePtr .asFunction, ffi.Pointer)>(); + + void SetShouldReadBufferFromJson( + int readBufferFromJson, + ) { + return _SetShouldReadBufferFromJson( + readBufferFromJson, + ); + } + + late final _SetShouldReadBufferFromJsonPtr = + _lookup>( + 'SetShouldReadBufferFromJson'); + late final _SetShouldReadBufferFromJson = + _SetShouldReadBufferFromJsonPtr.asFunction(); } typedef IrisApiEnginePtr = ffi.Pointer; diff --git a/test_shard/iris_tester/lib/src/platform/io/iris_tester_io.dart b/test_shard/iris_tester/lib/src/platform/io/iris_tester_io.dart index d89f41bc3..f8e7f9d57 100644 --- a/test_shard/iris_tester/lib/src/platform/io/iris_tester_io.dart +++ b/test_shard/iris_tester/lib/src/platform/io/iris_tester_io.dart @@ -44,6 +44,7 @@ class IrisTesterIO implements IrisTester { @override void initialize() { _fakeRtcEngineHandle = _nativeIrisTesterBinding.CreateFakeRtcEngine(); + _nativeIrisTesterBinding.SetShouldReadBufferFromJson(0); } @override diff --git a/test_shard/rendering_test/lib/main.dart b/test_shard/rendering_test/lib/main.dart index 839f9a6b9..75a6ec184 100644 --- a/test_shard/rendering_test/lib/main.dart +++ b/test_shard/rendering_test/lib/main.dart @@ -98,9 +98,7 @@ class _MyHomePageState extends State { const Text( 'You have pushed the button this many times:', ), - Text( - '$_counter' - ), + Text('$_counter'), ], ), ), diff --git a/tool/terra/package.json b/tool/terra/package.json index 9bea7dec7..3a841c840 100644 --- a/tool/terra/package.json +++ b/tool/terra/package.json @@ -16,11 +16,10 @@ "@agoraio-extensions/cxx-parser": "git@github.com:AgoraIO-Extensions/terra.git#head=main&workspace=cxx-parser", "@agoraio-extensions/terra": "git@github.com:AgoraIO-Extensions/terra.git#head=main&workspace=terra", "@agoraio-extensions/terra-core": "git@github.com:AgoraIO-Extensions/terra.git#head=main&workspace=terra-core", - "@agoraio-extensions/terra-legacy-cxx-parser": "git@github.com:AgoraIO-Extensions/terra-legacy-cxx-parser.git#head=main", "@agoraio-extensions/terra_shared_configs": "git@github.com:AgoraIO-Extensions/terra_shared_configs.git#head=main", "lodash": "^4.17.21", "ts-node": "^10.9.1", "typescript": "^5.1.6" }, - "packageManager": "yarn@4.1.1" + "packageManager": "yarn@4.2.2" } diff --git a/tool/terra/terra_config_main.yaml b/tool/terra/terra_config_main.yaml index fb2dedf58..6d42f0421 100644 --- a/tool/terra/terra_config_main.yaml +++ b/tool/terra/terra_config_main.yaml @@ -3,17 +3,17 @@ parsers: package: '@agoraio-extensions/cxx-parser' args: includeHeaderDirs: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include' parseFiles: include: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include/*.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include/*.h' exclude: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include/AgoraRefPtr.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include/time_utils.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include/AgoraOptional.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include/AgoraRefPtr.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include/IAgoraMediaComponentFactory.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include/IAgoraParameter.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include/AgoraRefPtr.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include/time_utils.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include/AgoraOptional.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include/AgoraRefPtr.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include/IAgoraMediaComponentFactory.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include/IAgoraParameter.h' - name: IrisApiIdParser package: '@agoraio-extensions/terra_shared_configs' @@ -23,14 +23,14 @@ parsers: args: customHeaderFileNamePrefix: 'Custom' includeHeaderDirs: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include' parseFiles: include: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include/*.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/custom_headers/*.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include/*.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/custom_headers/*.h' exclude: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include/time_utils.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include/IAgoraMediaComponentFactory.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include/time_utils.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include/IAgoraMediaComponentFactory.h' - path: parsers/cud_node_parser.ts args: diff --git a/tool/testcase_gen/bin/event_handler_gen_config.dart b/tool/testcase_gen/bin/event_handler_gen_config.dart index d976b6ee6..23ece622f 100644 --- a/tool/testcase_gen/bin/event_handler_gen_config.dart +++ b/tool/testcase_gen/bin/event_handler_gen_config.dart @@ -48,6 +48,8 @@ testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { skipMemberFunctions: [ 'onFacePositionChanged', 'onCameraCapturerConfigurationChanged', + 'onDownlinkNetworkInfoUpdated', + 'onStreamMessage', ], ), EventHandlerTemplatedTestCase( diff --git a/tool/testcase_gen/bin/method_call_gen_config.dart b/tool/testcase_gen/bin/method_call_gen_config.dart index 19aa6f8a9..f274f12cd 100644 --- a/tool/testcase_gen/bin/method_call_gen_config.dart +++ b/tool/testcase_gen/bin/method_call_gen_config.dart @@ -633,7 +633,9 @@ testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { ''', methodInvokeObjectName: 'musicContentCenter', outputDir: outputDir, - skipMemberFunctions: [], + skipMemberFunctions: [ + 'destroyMusicPlayer', + ], outputFileSuffixName: 'fake_test', ), ]; diff --git a/tool/testcase_gen/build.sh b/tool/testcase_gen/build.sh index 3eeda324b..4f1fc0254 100644 --- a/tool/testcase_gen/build.sh +++ b/tool/testcase_gen/build.sh @@ -14,3 +14,5 @@ dart run ${MY_PATH}/bin/testcase_gen.dart \ dart run ${MY_PATH}/bin/testcase_gen.dart \ --gen-integration-test --output-dir=${PROJECT_ROOT}/test_shard/integration_test_app/integration_test/generated + +dart format . diff --git a/tool/testcase_gen/lib/default_generator.dart b/tool/testcase_gen/lib/default_generator.dart index 329980908..cb765845c 100644 --- a/tool/testcase_gen/lib/default_generator.dart +++ b/tool/testcase_gen/lib/default_generator.dart @@ -42,6 +42,79 @@ abstract class DefaultGenerator implements Generator { return '${parameter.type.type}<${parameter.type.typeArguments.join(', ')}>'; } + bool _isPrimitiveType(Type type) => + type.type == 'int' || + type.type == 'double' || + type.type == 'bool' || + type.type == 'String' || + type.type == 'List' || + type.type == 'Map' || + type.type == 'Set' || + type.type == 'Uint8List'; + + String defualtValueOfType( + Type type, + ) { + switch (type.type) { + case 'int': + return '5'; + case 'double': + return '5.0'; + case 'String': + return '"hello"'; + case 'bool': + return 'true'; + case 'List': + if (type.typeArguments.isNotEmpty) { + final typeArgumentType = Type()..type = type.typeArguments[0]; + if (_isPrimitiveType(typeArgumentType)) { + return 'List.filled(5, ${defualtValueOfType(typeArgumentType)})'; + } else { + return '[]'; + } + } + return '[]'; + case 'Map': + return '{}'; + case 'Uint8List': + return 'Uint8List.fromList([1, 1, 1, 1, 1])'; + case 'Set': + return '{}'; + + default: + throw Exception('not support type $type'); + } + } + + @protected + String createListBuilderBlockForList( + ParseResult parseResult, Parameter parameter) { + if (parameter.type.typeArguments.isNotEmpty) { + final listTypeArgumentType = parameter.type.typeArguments[0]; + final listTypeArgumentTypeClazzes = + parseResult.getClazz(listTypeArgumentType); + if (listTypeArgumentTypeClazzes.isNotEmpty) { + final clazz = listTypeArgumentTypeClazzes[0]; + final toParamType = Type()..type = clazz.name; + Parameter pp = Parameter() + ..type = toParamType + ..name = '${parameter.name}Item'; + final listInitializerBuilder = StringBuffer(); + createConstructorInitializerForMethodParameter( + parseResult, null, pp, listInitializerBuilder); + final listBuilder = ''' +final ${getParamType(parameter)} ${parameter.name} = () { +${listInitializerBuilder.toString()} + + return List.filled(5, ${parameter.name}Item); +}(); +'''; + return listBuilder; + } + } + return ''; + } + @protected String createConstructorInitializerForMethodParameter( ParseResult parseResult, @@ -55,7 +128,7 @@ abstract class DefaultGenerator implements Generator { final enumz = parseResult.getEnum(parameter.type.type)[0]; initializerBuilder.writeln( - 'const ${getParamType(parameter)} ${_concatParamName(rootParameter?.name, parameter.name)} = ${enumz.enumConstants[0].name};'); + '${getParamType(parameter)} ${_concatParamName(rootParameter?.name, parameter.name)} = ${enumz.enumConstants[0].name};'); return _concatParamName(rootParameter?.name, parameter.name); } @@ -65,11 +138,9 @@ abstract class DefaultGenerator implements Generator { final initBlockParameterListBuilder = StringBuffer(); final initBlockBuilder = StringBuffer(); - bool shouldBeConst = false; bool isNullable = false; if (parameterClass.constructors.isEmpty) { // If there're not constructors found, default to null. - shouldBeConst = true; isNullable = true; initBlockBuilder.write('null'); } else { @@ -78,13 +149,10 @@ abstract class DefaultGenerator implements Generator { initBlockBuilder.write(parameterClass.name); initBlockBuilder.write('('); - shouldBeConst = constructor.isConst; - for (final cp in parameterClass.constructors[0].parameters) { final adjustedParamName = _concatParamName(parameter.name, cp.name); if (cp.isNamed) { if (cp.type.type == 'Function') { - shouldBeConst = false; stdout.writeln( 'cp.type.parameters: ${cp.type.parameters.map((e) => e.name.toString()).toString()}'); final functionParamsList = cp.type.parameters @@ -94,12 +162,18 @@ abstract class DefaultGenerator implements Generator { initBlockBuilder.write('${cp.name}:($functionParamsList) { },'); } else if (cp.isPrimitiveType) { if (getParamType(cp) == 'Uint8List') { - shouldBeConst = false; initBlockParameterListBuilder.writeln( - '${getParamType(cp)} $adjustedParamName = ${cp.primitiveDefualtValue()};'); + '${getParamType(cp)} $adjustedParamName = ${defualtValueOfType(cp.type)};'); } else { - initBlockParameterListBuilder.writeln( - 'const ${getParamType(cp)} $adjustedParamName = ${cp.primitiveDefualtValue()};'); + if (getParamType(cp).startsWith('List') && + parameter.type.typeArguments.isNotEmpty) { + final listBuilderBlock = + createListBuilderBlockForList(parseResult, parameter); + initBlockParameterListBuilder.writeln(listBuilderBlock); + } else { + initBlockParameterListBuilder.writeln( + '${getParamType(cp)} $adjustedParamName = ${defualtValueOfType(cp.type)};'); + } } initBlockBuilder.write('${cp.name}: $adjustedParamName,'); @@ -118,10 +192,17 @@ abstract class DefaultGenerator implements Generator { } else if (cp.isPrimitiveType) { if (getParamType(cp) == 'Uint8List') { initBlockParameterListBuilder.writeln( - '${getParamType(cp)} $adjustedParamName = ${cp.primitiveDefualtValue()};'); + '${getParamType(cp)} $adjustedParamName = ${defualtValueOfType(cp.type)};'); } else { - initBlockParameterListBuilder.writeln( - 'const ${getParamType(cp)} $adjustedParamName = ${cp.primitiveDefualtValue()};'); + if (getParamType(cp).startsWith('List') && + parameter.type.typeArguments.isNotEmpty) { + final listBuilderBlock = + createListBuilderBlockForList(parseResult, parameter); + initBlockParameterListBuilder.writeln(listBuilderBlock); + } else { + initBlockParameterListBuilder.writeln( + '${getParamType(cp)} $adjustedParamName = ${defualtValueOfType(cp.type)};'); + } } initBlockBuilder.write('$adjustedParamName,'); @@ -137,10 +218,10 @@ abstract class DefaultGenerator implements Generator { } initializerBuilder.write(initBlockParameterListBuilder.toString()); - final keywordPrefix = shouldBeConst ? 'const' : 'final'; + // final keywordPrefix = shouldBeConst ? 'const' : 'final'; initializerBuilder.writeln( - '$keywordPrefix ${getParamType(parameter)}${isNullable ? '?' : ''} ${_concatParamName(rootParameter?.name, parameter.name)} = ${initBlockBuilder.toString()};'); + '${getParamType(parameter)}${isNullable ? '?' : ''} ${_concatParamName(rootParameter?.name, parameter.name)} = ${initBlockBuilder.toString()};'); return _concatParamName(rootParameter?.name, parameter.name); } @@ -177,10 +258,17 @@ abstract class DefaultGenerator implements Generator { final parameterType = getParamType(parameter); if (parameterType == 'Uint8List') { pb.writeln( - '${getParamType(parameter)} ${parameter.name} = ${parameter.primitiveDefualtValue()};'); + '${getParamType(parameter)} ${parameter.name} = ${defualtValueOfType(parameter.type)};'); } else { - pb.writeln( - 'const ${getParamType(parameter)} ${parameter.name} = ${parameter.primitiveDefualtValue()};'); + if (parameterType.startsWith('List') && + parameter.type.typeArguments.isNotEmpty) { + final listBuilderBlock = + createListBuilderBlockForList(parseResult, parameter); + pb.writeln(listBuilderBlock); + } else { + pb.writeln( + '${getParamType(parameter)} ${parameter.name} = ${defualtValueOfType(parameter.type)};'); + } } } else { createConstructorInitializerForMethodParameter( diff --git a/tool/testcase_gen/lib/templated_generator.dart b/tool/testcase_gen/lib/templated_generator.dart index 2fdccf44e..8084cc3cc 100644 --- a/tool/testcase_gen/lib/templated_generator.dart +++ b/tool/testcase_gen/lib/templated_generator.dart @@ -368,10 +368,17 @@ await Future.delayed(const Duration(milliseconds: 500)); final parameterType = getParamType(parameter); if (parameterType == 'Uint8List') { pb.writeln( - '${getParamType(parameter)} ${parameter.name} = ${parameter.primitiveDefualtValue()};'); + '${getParamType(parameter)} ${parameter.name} = ${defualtValueOfType(parameter.type)};'); } else { - pb.writeln( - 'const ${getParamType(parameter)} ${parameter.name} = ${parameter.primitiveDefualtValue()};'); + if (parameterType.startsWith('List') && + parameter.type.typeArguments.isNotEmpty) { + final listBuilderBlock = + createListBuilderBlockForList(parseResult, parameter); + pb.writeln(listBuilderBlock); + } else { + pb.writeln( + '${getParamType(parameter)} ${parameter.name} = ${defualtValueOfType(parameter.type)};'); + } } } else { createConstructorInitializerForMethodParameter( diff --git a/windows/CMakeLists.txt b/windows/CMakeLists.txt index b678f5dde..3c0732e30 100644 --- a/windows/CMakeLists.txt +++ b/windows/CMakeLists.txt @@ -12,8 +12,8 @@ project(${PROJECT_NAME} LANGUAGES CXX) # not be changed set(PLUGIN_NAME "agora_rtc_engine_plugin") -set(IRIS_SDK_DOWNLOAD_URL "https://download.agora.io/sdk/release/iris_4.3.1-build.1_DCG_Windows_Video_20240428_0641_423.zip") -set(IRIS_SDK_DOWNLOAD_NAME "iris_4.3.1-build.1_DCG_Windows") +set(IRIS_SDK_DOWNLOAD_URL "https://download.agora.io/sdk/release/iris_4.3.2-build.1_DCG_Windows_Video_20240604_0456_441.zip") +set(IRIS_SDK_DOWNLOAD_NAME "iris_4.3.2-build.1_DCG_Windows") set(RTC_SDK_DOWNLOAD_NAME "Agora_Native_SDK_for_Windows_FULL") set(IRIS_SDK_VERSION "v3_6_2_fix.1")