From cc4ef40573bf4599c6986afeeb89c4db32cb1790 Mon Sep 17 00:00:00 2001 From: guoxianzhe <53285945+guoxianzhe@users.noreply.github.com> Date: Tue, 10 Dec 2024 11:33:12 +0800 Subject: [PATCH] feat: Upgrade native sdk 4.5.0 (#2087) Co-authored-by: Littlegnal <8847263+littleGnAl@users.noreply.github.com> Co-authored-by: littleGnAl Co-authored-by: littleGnAl Co-authored-by: guoxianzhe --- .github/workflows/build.yml | 46 +- .../integration-test-iris-artifacts.yml | 10 +- README.md | 4 +- android/build.gradle | 6 +- .../third_party/include/agora_rtc/AgoraBase.h | 2430 +++++++++++------ .../include/agora_rtc/AgoraMediaBase.h | 892 ++++-- .../third_party/include/agora_rtc/IAgoraLog.h | 1 + .../include/agora_rtc/IAgoraMediaEngine.h | 18 + .../include/agora_rtc/IAgoraMediaPlayer.h | 11 - .../agora_rtc/IAgoraMediaPlayerSource.h | 11 - .../include/agora_rtc/IAgoraMediaRecorder.h | 1 - .../include/agora_rtc/IAgoraParameter.h | 5 +- .../include/agora_rtc/IAgoraRtcEngine.h | 509 ++-- .../include/agora_rtc/IAgoraRtcEngineEx.h | 128 +- .../include/agora_rtc/rte_base/c/bridge.h | 25 + .../include/agora_rtc/rte_base/c/c_error.h | 48 + .../include/agora_rtc/rte_base/c/c_player.h | 615 +++++ .../include/agora_rtc/rte_base/c/c_rte.h | 142 + .../include/agora_rtc/rte_base/c/channel.h | 383 +++ .../include/agora_rtc/rte_base/c/common.h | 59 + .../agora_rtc/rte_base/c/device/audio.h | 40 + .../rte_base/c/device/audio_device_manager.h | 97 + .../agora_rtc/rte_base/c/device/device.h | 27 + .../agora_rtc/rte_base/c/device/video.h | 37 + .../rte_base/c/device/video_device_manager.h | 67 + .../include/agora_rtc/rte_base/c/handle.h | 155 ++ .../include/agora_rtc/rte_base/c/info.h | 27 + .../include/agora_rtc/rte_base/c/log.h | 48 + .../include/agora_rtc/rte_base/c/metadata.h | 92 + .../include/agora_rtc/rte_base/c/observer.h | 20 + .../include/agora_rtc/rte_base/c/old.h | 112 + .../include/agora_rtc/rte_base/c/options.h | 22 + .../agora_rtc/rte_base/c/stream/cdn_stream.h | 36 + .../rte_base/c/stream/local_cdn_stream.h | 59 + .../rte_base/c/stream/local_realtime_stream.h | 33 + .../rte_base/c/stream/local_stream.h | 38 + .../rte_base/c/stream/realtime_stream.h | 37 + .../rte_base/c/stream/remote_cdn_stream.h | 54 + .../c/stream/remote_realtime_stream.h | 32 + .../rte_base/c/stream/remote_stream.h | 49 + .../agora_rtc/rte_base/c/stream/stream.h | 289 ++ .../rte_base/c/track/camera_video_track.h | 34 + .../agora_rtc/rte_base/c/track/canvas.h | 87 + .../agora_rtc/rte_base/c/track/layout.h | 19 + .../rte_base/c/track/local_audio_track.h | 101 + .../agora_rtc/rte_base/c/track/local_track.h | 56 + .../rte_base/c/track/local_video_track.h | 25 + .../rte_base/c/track/mic_audio_track.h | 121 + .../rte_base/c/track/mixed_video_track.h | 35 + .../rte_base/c/track/remote_audio_track.h | 87 + .../agora_rtc/rte_base/c/track/remote_track.h | 43 + .../rte_base/c/track/remote_video_track.h | 21 + .../rte_base/c/track/screen_video_track.h | 61 + .../agora_rtc/rte_base/c/track/track.h | 54 + .../agora_rtc/rte_base/c/track/video_track.h | 37 + .../include/agora_rtc/rte_base/c/track/view.h | 55 + .../agora_rtc/rte_base/c/user/local_user.h | 120 + .../agora_rtc/rte_base/c/user/remote_user.h | 77 + .../include/agora_rtc/rte_base/c/user/user.h | 84 + .../include/agora_rtc/rte_base/c/utils/buf.h | 65 + .../agora_rtc/rte_base/c/utils/frame.h | 35 + .../include/agora_rtc/rte_base/c/utils/rect.h | 24 + .../agora_rtc/rte_base/c/utils/string.h | 52 + .../include/agora_rtc/rte_base/c/utils/uuid.h | 23 + .../rte_base/rte_cpp_callback_utils.h | 200 ++ .../agora_rtc/rte_base/rte_cpp_canvas.h | 227 ++ .../agora_rtc/rte_base/rte_cpp_error.h | 83 + .../agora_rtc/rte_base/rte_cpp_player.h | 1007 +++++++ .../include/agora_rtc/rte_base/rte_cpp_rte.h | 403 +++ .../agora_rtc/rte_base/rte_cpp_stream.h | 30 + .../agora_rtc/rte_base/rte_cpp_string.h | 66 + .../third_party/include/agora_rtc/rte_cpp.h | 14 + .../cpp/third_party/include/iris/iris_base.h | 11 +- .../iris/iris_rtc_high_performance_c_api.h | 131 +- .../include/iris/iris_rtc_rendering_cxx.h | 4 - example/android/build.gradle | 2 +- .../components/stats_monitoring_widget.dart | 2 +- .../set_beauty_effect/set_beauty_effect.dart | 26 +- example/lib/main.dart | 5 +- example/web/index.html | 2 +- internal/deps_summary.txt | 22 +- ios/agora_rtc_engine.podspec | 4 +- lib/src/agora_base.dart | 597 +++- lib/src/agora_base.g.dart | 276 +- lib/src/agora_log.dart | 4 + lib/src/agora_log.g.dart | 1 + lib/src/agora_media_base.dart | 608 ++++- lib/src/agora_media_base.g.dart | 296 +- lib/src/agora_media_engine.dart | 20 +- lib/src/agora_media_player.dart | 15 +- lib/src/agora_media_player_source.dart | 6 +- lib/src/agora_media_player_types.dart | 2 +- lib/src/agora_rtc_engine.dart | 354 ++- lib/src/agora_rtc_engine.g.dart | 25 + lib/src/agora_rtc_engine_ex.dart | 35 +- lib/src/audio_device_manager.dart | 12 +- lib/src/binding/agora_media_engine_impl.dart | 17 + .../binding/agora_rtc_engine_event_impl.dart | 81 +- lib/src/binding/agora_rtc_engine_ex_impl.dart | 80 + lib/src/binding/agora_rtc_engine_impl.dart | 375 ++- .../call_api_event_handler_buffer_ext.dart | 151 +- .../binding/call_api_impl_params_json.dart | 44 + .../binding/call_api_impl_params_json.g.dart | 45 + lib/src/binding/event_handler_param_json.dart | 98 +- .../binding/event_handler_param_json.g.dart | 84 +- macos/agora_rtc_engine.podspec | 4 +- pubspec.yaml | 2 +- scripts/artifacts_version.sh | 8 +- scripts/iris_web_version.js | 4 +- test_shard/fake_test_app/android/build.gradle | 2 +- .../generated/event_ids_mapping_gen.dart | 16 +- .../mediaengine_fake_test.generated.dart | 83 + ...ideoframeobserver_testcases.generated.dart | 205 ++ ...audiopcmframesink_testcases.generated.dart | 2 + ...ideoframeobserver_testcases.generated.dart | 40 + .../mediarecorder_fake_test.generated.dart | 13 + .../rtcengine_fake_test.generated.dart | 778 ++++-- ..._metadataobserver_testcases.generated.dart | 2 + ...ngineeventhandler_testcases.generated.dart | 144 +- .../rtcengineex_fake_test.generated.dart | 154 +- .../integration_test_app/android/build.gradle | 2 +- .../ios/Flutter/AppFrameworkInfo.plist | 2 +- test_shard/integration_test_app/ios/Podfile | 2 +- .../ios/Runner.xcodeproj/project.pbxproj | 14 +- .../xcshareddata/xcschemes/Runner.xcscheme | 2 +- .../ios/Runner/Info.plist | 10 +- test_shard/iris_tester/android/build.gradle | 2 +- .../iris_tester/example/android/build.gradle | 2 +- .../rendering_test/android/build.gradle | 2 +- tool/terra/package.json | 2 +- tool/terra/terra_config_main.yaml | 26 +- windows/CMakeLists.txt | 4 +- 132 files changed, 12894 insertions(+), 2129 deletions(-) create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/bridge.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/c_error.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/c_player.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/c_rte.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/channel.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/common.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/device/audio.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/device/audio_device_manager.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/device/device.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/device/video.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/device/video_device_manager.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/handle.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/info.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/log.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/metadata.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/observer.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/old.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/options.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/cdn_stream.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/local_cdn_stream.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/local_realtime_stream.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/local_stream.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/realtime_stream.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/remote_cdn_stream.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/remote_realtime_stream.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/remote_stream.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/stream.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/camera_video_track.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/canvas.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/layout.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/local_audio_track.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/local_track.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/local_video_track.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/mic_audio_track.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/mixed_video_track.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/remote_audio_track.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/remote_track.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/remote_video_track.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/screen_video_track.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/track.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/video_track.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/view.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/user/local_user.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/user/remote_user.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/user/user.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/utils/buf.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/utils/frame.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/utils/rect.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/utils/string.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/utils/uuid.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_callback_utils.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_canvas.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_error.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_player.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_rte.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_stream.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_string.h create mode 100644 android/src/main/cpp/third_party/include/agora_rtc/rte_cpp.h diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 9f2ea952b..39c0d7052 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -86,7 +86,7 @@ jobs: strategy: fail-fast: false matrix: - version: ["3.7.0", "3.x"] + version: ["3.7.0"] runs-on: ubuntu-latest timeout-minutes: 120 env: @@ -124,7 +124,7 @@ jobs: fail-fast: false matrix: version: ["3.7.0", "3.16"] - runs-on: macos-12 + runs-on: macos-latest timeout-minutes: 120 env: TEST_APP_ID: ${{ secrets.MY_APP_ID }} @@ -136,8 +136,28 @@ jobs: cache: true - uses: futureware-tech/simulator-action@v3 with: - model: 'iPhone 14 Pro Max' + model: 'iPhone 15' - run: bash ci/run_flutter_integration_test_ios.sh + - name: Get ios crash logs + if: always() + run: | + sleep 30 + + mkdir logs-ios + + CRASH_DIR="${HOME}/Library/Logs/DiagnosticReports/" + OUTPUT_CRASH_DIR=./logs-ios/crash + mkdir -p ${OUTPUT_CRASH_DIR} + + # Copy all files + cp -RP $CRASH_DIR* $OUTPUT_CRASH_DIR + + - name: Upload ios logs + uses: actions/upload-artifact@v4 + if: always() + with: + name: logs-ios-${{ matrix.version }} + path: logs-ios/* integration_test_macos: name: Run Flutter macOS Integration Tests @@ -145,8 +165,8 @@ jobs: strategy: fail-fast: false matrix: - version: ["3.7.0", "3.x"] - runs-on: macos-12 + version: ["3.7.0"] + runs-on: macos-latest timeout-minutes: 120 env: TEST_APP_ID: ${{ secrets.MY_APP_ID }} @@ -191,7 +211,7 @@ jobs: strategy: fail-fast: false matrix: - version: ["3.7.0", "3.x"] + version: ["3.7.0"] runs-on: windows-2019 timeout-minutes: 120 env: @@ -246,7 +266,7 @@ jobs: strategy: fail-fast: false matrix: - version: ["3.7.12", "3.x"] # Need 3.7.12 to build with Gradle 8.x https://github.com/flutter/flutter/issues/124838 + version: ["3.7.12"] # Need 3.7.12 to build with Gradle 8.x https://github.com/flutter/flutter/issues/124838 runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 @@ -269,7 +289,7 @@ jobs: strategy: fail-fast: false matrix: - version: ["3.7.12", "3.x"] # Need 3.7.12 to build with Gradle 8.x https://github.com/flutter/flutter/issues/124838 + version: ["3.7.12"] # Need 3.7.12 to build with Gradle 8.x https://github.com/flutter/flutter/issues/124838 runs-on: windows-2019 steps: - uses: actions/checkout@v3 @@ -293,8 +313,8 @@ jobs: strategy: fail-fast: false matrix: - version: ["3.7.0", "3.x"] - runs-on: macos-12 + version: ["3.7.0"] + runs-on: macos-latest timeout-minutes: 120 steps: - uses: actions/checkout@v3 @@ -339,7 +359,7 @@ jobs: strategy: fail-fast: false matrix: - version: ["3.7.0", "3.x"] + version: ["3.7.0"] runs-on: ubuntu-latest timeout-minutes: 120 steps: @@ -457,7 +477,7 @@ jobs: --iris-ios-cdn-url=${IRIS_CDN_URL_IOS} - uses: futureware-tech/simulator-action@v3 with: - model: 'iPhone 14 Pro Max' + model: 'iPhone 15' - run: bash ci/rendering_test_ios.sh - uses: actions/upload-artifact@v3 @@ -472,7 +492,7 @@ jobs: strategy: matrix: version: ['3.x'] - runs-on: macos-12 + runs-on: macos-latest timeout-minutes: 120 env: TEST_APP_ID: ${{ secrets.MY_APP_ID }} diff --git a/.github/workflows/integration-test-iris-artifacts.yml b/.github/workflows/integration-test-iris-artifacts.yml index 421df45cc..16671f1e0 100644 --- a/.github/workflows/integration-test-iris-artifacts.yml +++ b/.github/workflows/integration-test-iris-artifacts.yml @@ -20,7 +20,7 @@ jobs: strategy: matrix: version: ['2.10.5', '3.0.0'] - runs-on: macos-12 + runs-on: macos-latest timeout-minutes: 120 env: TEST_APP_ID: ${{ secrets.MY_APP_ID }} @@ -75,7 +75,7 @@ jobs: strategy: matrix: version: ['2.10.5', '3.0.0'] - runs-on: macos-12 + runs-on: macos-latest timeout-minutes: 120 env: TEST_APP_ID: ${{ secrets.MY_APP_ID }} @@ -85,9 +85,9 @@ jobs: with: flutter-version: ${{ matrix.version }} cache: true - - uses: futureware-tech/simulator-action@v1 + - uses: futureware-tech/simulator-action@v3 with: - model: 'iPhone 13 Pro Max' + model: 'iPhone 15' - name: Checkout hoe uses: actions/checkout@v3 with: @@ -119,7 +119,7 @@ jobs: strategy: matrix: version: ['2.10.5', '3.0.0'] - runs-on: macos-12 + runs-on: macos-latest timeout-minutes: 120 env: TEST_APP_ID: ${{ secrets.MY_APP_ID }} diff --git a/README.md b/README.md index 6b5e7b36d..ef4a21127 100644 --- a/README.md +++ b/README.md @@ -89,7 +89,7 @@ Download the `iris_web`(see the link below) artifact and include it as a ` ``` -Download: https://download.agora.io/sdk/release/iris-web-rtc_n440_w4220_0.8.0.js +Download: https://download.agora.io/sdk/release/iris-web-rtc_n450_w4220_0.8.6.js **For Testing Purposes** @@ -101,7 +101,7 @@ You can directly depend on the Agora CDN for testing purposes: ... ... - + ``` diff --git a/android/build.gradle b/android/build.gradle index 4791d1cac..88ac432fe 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -57,9 +57,9 @@ dependencies { if (isDev(project)) { api fileTree(dir: "libs", include: ["*.jar"]) } else { - api 'io.agora.rtc:iris-rtc:4.3.2-build.1' - api 'io.agora.rtc:full-sdk:4.3.2' - api 'io.agora.rtc:full-screen-sharing:4.3.2' + api 'io.agora.rtc:iris-rtc:4.5.0-build.1' + api 'io.agora.rtc:agora-special-full:4.5.0.1' + api 'io.agora.rtc:full-screen-sharing:4.5.0.1' } } diff --git a/android/src/main/cpp/third_party/include/agora_rtc/AgoraBase.h b/android/src/main/cpp/third_party/include/agora_rtc/AgoraBase.h index fc74e5841..c3bfa34cb 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/AgoraBase.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/AgoraBase.h @@ -43,6 +43,8 @@ #define __deprecated +#define AGORA_CPP_INTERNAL_API extern + #elif defined(__APPLE__) #include @@ -51,6 +53,8 @@ #define AGORA_CPP_API __attribute__((visibility("default"))) #define AGORA_CALL +#define AGORA_CPP_INTERNAL_API __attribute__((visibility("hidden"))) + #elif defined(__ANDROID__) || defined(__linux__) #define AGORA_API extern "C" __attribute__((visibility("default"))) @@ -59,6 +63,8 @@ #define __deprecated +#define AGORA_CPP_INTERNAL_API __attribute__((visibility("hidden"))) + #else // !_WIN32 && !__APPLE__ && !(__ANDROID__ || __linux__) #define AGORA_API extern "C" @@ -553,7 +559,8 @@ enum ERROR_CODE_TYPE { /** * 101: The App ID is invalid, usually because the data format of the App ID is incorrect. * - * Solution: Check the data format of your App ID. Ensure that you use the correct App ID to initialize the Agora service. + * Solution: Check the data format of your App ID. Ensure that you use the correct App ID to + * initialize the Agora service. */ ERR_INVALID_APP_ID = 101, /** @@ -572,9 +579,9 @@ enum ERROR_CODE_TYPE { * - Timeout for token authorization: Once a token is generated, you must use it to access the * Agora service within 24 hours. Otherwise, the token times out and you can no longer use it. * - The token privilege expires: To generate a token, you need to set a timestamp for the token - * privilege to expire. For example, If you set it as seven days, the token expires seven days after - * its usage. In that case, you can no longer access the Agora service. The users cannot make calls, - * or are kicked out of the channel. + * privilege to expire. For example, If you set it as seven days, the token expires seven days + * after its usage. In that case, you can no longer access the Agora service. The users cannot + * make calls, or are kicked out of the channel. * * Solution: Regardless of whether token authorization times out or the token privilege expires, * you need to generate a new token on your server, and try to join the channel. @@ -582,19 +589,19 @@ enum ERROR_CODE_TYPE { ERR_TOKEN_EXPIRED = 109, /** * 110: The token is invalid, usually for one of the following reasons: - * - Did not provide a token when joining a channel in a situation where the project has enabled the - * App Certificate. + * - Did not provide a token when joining a channel in a situation where the project has enabled + * the App Certificate. * - Tried to join a channel with a token in a situation where the project has not enabled the App * Certificate. - * - The App ID, user ID and channel name that you use to generate the token on the server do not match - * those that you use when joining a channel. + * - The App ID, user ID and channel name that you use to generate the token on the server do not + * match those that you use when joining a channel. * * Solution: - * - Before joining a channel, check whether your project has enabled the App certificate. If yes, you - * must provide a token when joining a channel; if no, join a channel without a token. - * - When using a token to join a channel, ensure that the App ID, user ID, and channel name that you - * use to generate the token is the same as the App ID that you use to initialize the Agora service, and - * the user ID and channel name that you use to join the channel. + * - Before joining a channel, check whether your project has enabled the App certificate. If yes, + * you must provide a token when joining a channel; if no, join a channel without a token. + * - When using a token to join a channel, ensure that the App ID, user ID, and channel name that + * you use to generate the token is the same as the App ID that you use to initialize the Agora + * service, and the user ID and channel name that you use to join the channel. */ ERR_INVALID_TOKEN = 110, /** @@ -666,13 +673,15 @@ enum ERROR_CODE_TYPE { ERR_LICENSE_CREDENTIAL_INVALID = 131, /** - * 134: The user account is invalid, usually because the data format of the user account is incorrect. + * 134: The user account is invalid, usually because the data format of the user account is + * incorrect. */ ERR_INVALID_USER_ACCOUNT = 134, /** 157: The necessary dynamical library is not integrated. For example, if you call - * the \ref agora::rtc::IRtcEngine::enableDeepLearningDenoise "enableDeepLearningDenoise" but do not integrate the dynamical - * library for the deep-learning noise reduction into your project, the SDK reports this error code. + * the \ref agora::rtc::IRtcEngine::enableDeepLearningDenoise "enableDeepLearningDenoise" but do + * not integrate the dynamical library for the deep-learning noise reduction into your project, + * the SDK reports this error code. * */ ERR_MODULE_NOT_FOUND = 157, @@ -692,7 +701,7 @@ enum ERROR_CODE_TYPE { ERR_CERT_REQUEST = 168, // PcmSend Error num - ERR_PCMSEND_FORMAT = 200, // unsupport pcm format + ERR_PCMSEND_FORMAT = 200, // unsupport pcm format ERR_PCMSEND_BUFFEROVERFLOW = 201, // buffer overflow, the pcm send rate too quickly /// @cond @@ -746,27 +755,27 @@ enum ERROR_CODE_TYPE { enum LICENSE_ERROR_TYPE { /** * 1: Invalid license - */ + */ LICENSE_ERR_INVALID = 1, /** * 2: License expired - */ + */ LICENSE_ERR_EXPIRE = 2, /** * 3: Exceed license minutes limit - */ + */ LICENSE_ERR_MINUTES_EXCEED = 3, /** * 4: License use in limited period - */ + */ LICENSE_ERR_LIMITED_PERIOD = 4, /** * 5: Same license used in different devices at the same time - */ + */ LICENSE_ERR_DIFF_DEVICES = 5, /** * 99: SDK internal error - */ + */ LICENSE_ERR_INTERNAL = 99, }; @@ -839,9 +848,9 @@ enum USER_OFFLINE_REASON_TYPE { */ USER_OFFLINE_QUIT = 0, /** - * 1: The SDK times out and the user drops offline because no data packet was received within a certain - * period of time. If a user quits the call and the message is not passed to the SDK (due to an - * unreliable channel), the SDK assumes that the user drops offline. + * 1: The SDK times out and the user drops offline because no data packet was received within a + * certain period of time. If a user quits the call and the message is not passed to the SDK (due + * to an unreliable channel), the SDK assumes that the user drops offline. */ USER_OFFLINE_DROPPED = 1, /** @@ -864,7 +873,7 @@ enum INTERFACE_ID_TYPE { AGORA_IID_STATE_SYNC = 13, AGORA_IID_META_SERVICE = 14, AGORA_IID_MUSIC_CONTENT_CENTER = 15, - AGORA_IID_H265_TRANSCODER = 16, + AGORA_IID_H265_TRANSCODER = 16, }; /** @@ -993,7 +1002,6 @@ enum FRAME_HEIGHT { FRAME_HEIGHT_540 = 540, }; - /** * Types of the video frame. */ @@ -1026,9 +1034,9 @@ enum ORIENTATION_MODE { ORIENTATION_MODE_ADAPTIVE = 0, /** * 1: Landscape mode. In this mode, the SDK always outputs videos in landscape (horizontal) mode. - * If the captured video is in portrait mode, the video encoder crops it to fit the output. Applies - * to situations where the receiving end cannot process the rotational information. For example, - * CDN live streaming. + * If the captured video is in portrait mode, the video encoder crops it to fit the output. + * Applies to situations where the receiving end cannot process the rotational information. For + * example, CDN live streaming. */ ORIENTATION_MODE_FIXED_LANDSCAPE = 1, /** @@ -1045,9 +1053,16 @@ enum ORIENTATION_MODE { */ enum DEGRADATION_PREFERENCE { /** - * 0: (Default) Prefers to reduce the video frame rate while maintaining video quality during video - * encoding under limited bandwidth. This degradation preference is suitable for scenarios where - * video quality is prioritized. + * -1: (Default) SDK uses degradation preference according to setVideoScenario API settings, real-time network state and other relevant data information. + * If API setVideoScenario set video scenario to APPLICATION_SCENARIO_LIVESHOW, then MAINTAIN_BALANCED is used. If not, then MAINTAIN_RESOLUTION is used. + * Also if network state has changed, SDK may change this parameter between MAINTAIN_FRAMERATE态MAINTAIN_BALANCED and MAINTAIN_RESOLUTION automatically to get the best QOE. + * We recommend using this option. + */ + MAINTAIN_AUTO = -1, + /** + * 0: (Deprecated) Prefers to reduce the video frame rate while maintaining video quality during + * video encoding under limited bandwidth. This degradation preference is suitable for scenarios + * where video quality is prioritized. * @note In the COMMUNICATION channel profile, the resolution of the video sent may change, so * remote users need to handle this issue. */ @@ -1060,9 +1075,9 @@ enum DEGRADATION_PREFERENCE { MAINTAIN_FRAMERATE = 1, /** * 2: Reduces the video frame rate and video quality simultaneously during video encoding under - * limited bandwidth. MAINTAIN_BALANCED has a lower reduction than MAINTAIN_QUALITY and MAINTAIN_FRAMERATE, - * and this preference is suitable for scenarios where both smoothness and video quality are a - * priority. + * limited bandwidth. MAINTAIN_BALANCED has a lower reduction than MAINTAIN_RESOLUTION and + * MAINTAIN_FRAMERATE, and this preference is suitable for scenarios where both smoothness and + * video quality are a priority. */ MAINTAIN_BALANCED = 2, /** @@ -1149,6 +1164,11 @@ enum VIDEO_CODEC_CAPABILITY_LEVEL { * The video codec types. */ enum VIDEO_CODEC_TYPE { + /** + * 0: (Default) SDK will automatically adjust the codec type according to country and region or real-time network state and other relevant data information. + * Also if network state is changed, SDK may change codec automatically to get the best QOE. + * We recommend use this option. + */ VIDEO_CODEC_NONE = 0, /** * 1: Standard VP8. @@ -1164,11 +1184,13 @@ enum VIDEO_CODEC_TYPE { VIDEO_CODEC_H265 = 3, /** * 6: Generic. This type is used for transmitting raw video data, such as encrypted video frames. - * The SDK returns this type of video frames in callbacks, and you need to decode and render the frames yourself. + * The SDK returns this type of video frames in callbacks, and you need to decode and render the + * frames yourself. */ VIDEO_CODEC_GENERIC = 6, /** * 7: Generic H264. + * @deprecated This codec type is deprecated. */ VIDEO_CODEC_GENERIC_H264 = 7, /** @@ -1231,7 +1253,8 @@ struct SenderOptions { */ TCcMode ccMode; /** - * The codec type used for the encoded images: \ref agora::rtc::VIDEO_CODEC_TYPE "VIDEO_CODEC_TYPE". + * The codec type used for the encoded images: \ref agora::rtc::VIDEO_CODEC_TYPE + * "VIDEO_CODEC_TYPE". */ VIDEO_CODEC_TYPE codecType; @@ -1243,12 +1266,14 @@ struct SenderOptions { * - \ref agora::rtc::STANDARD_BITRATE "STANDARD_BITRATE": (Recommended) Standard bitrate. * - Communication profile: The encoding bitrate equals the base bitrate. * - Live-broadcast profile: The encoding bitrate is twice the base bitrate. - * - \ref agora::rtc::COMPATIBLE_BITRATE "COMPATIBLE_BITRATE": Compatible bitrate. The bitrate stays the same + * - \ref agora::rtc::COMPATIBLE_BITRATE "COMPATIBLE_BITRATE": Compatible bitrate. The bitrate + stays the same * regardless of the profile. * * The Communication profile prioritizes smoothness, while the Live Broadcast * profile prioritizes video quality (requiring a higher bitrate). Agora - * recommends setting the bitrate mode as \ref agora::rtc::STANDARD_BITRATE "STANDARD_BITRATE" or simply to + * recommends setting the bitrate mode as \ref agora::rtc::STANDARD_BITRATE "STANDARD_BITRATE" or + simply to * address this difference. * * The following table lists the recommended video encoder configurations, @@ -1256,7 +1281,8 @@ struct SenderOptions { * bitrate based on this table. If the bitrate you set is beyond the proper * range, the SDK automatically sets it to within the range. - | Resolution | Frame Rate (fps) | Base Bitrate (Kbps, for Communication) | Live Bitrate (Kbps, for Live Broadcast)| + | Resolution | Frame Rate (fps) | Base Bitrate (Kbps, for Communication) | Live + Bitrate (Kbps, for Live Broadcast)| |------------------------|------------------|----------------------------------------|----------------------------------------| | 160 × 120 | 15 | 65 | 130 | | 120 × 120 | 15 | 50 | 100 | @@ -1293,10 +1319,7 @@ struct SenderOptions { */ int targetBitrate; - SenderOptions() - : ccMode(CC_ENABLED), - codecType(VIDEO_CODEC_H265), - targetBitrate(6500) {} + SenderOptions() : ccMode(CC_ENABLED), codecType(VIDEO_CODEC_H265), targetBitrate(6500) {} }; /** @@ -1343,6 +1366,10 @@ enum AUDIO_CODEC_TYPE { * 12: LPCNET. */ AUDIO_CODEC_LPCNET = 12, + /** + * 13: Opus codec, supporting 3 to 8 channels audio. + */ + AUDIO_CODEC_OPUSMC = 13, }; /** @@ -1355,8 +1382,8 @@ enum AUDIO_ENCODING_TYPE { */ AUDIO_ENCODING_TYPE_AAC_16000_LOW = 0x010101, /** - * AAC encoding format, 16000 Hz sampling rate, medium sound quality. A file with an audio duration - * of 10 minutes is approximately 2 MB after encoding. + * AAC encoding format, 16000 Hz sampling rate, medium sound quality. A file with an audio + * duration of 10 minutes is approximately 2 MB after encoding. */ AUDIO_ENCODING_TYPE_AAC_16000_MEDIUM = 0x010102, /** @@ -1365,18 +1392,18 @@ enum AUDIO_ENCODING_TYPE { */ AUDIO_ENCODING_TYPE_AAC_32000_LOW = 0x010201, /** - * AAC encoding format, 32000 Hz sampling rate, medium sound quality. A file with an audio duration - * of 10 minutes is approximately 2 MB after encoding. + * AAC encoding format, 32000 Hz sampling rate, medium sound quality. A file with an audio + * duration of 10 minutes is approximately 2 MB after encoding. */ AUDIO_ENCODING_TYPE_AAC_32000_MEDIUM = 0x010202, /** - * AAC encoding format, 32000 Hz sampling rate, high sound quality. A file with an audio duration of - * 10 minutes is approximately 3.5 MB after encoding. + * AAC encoding format, 32000 Hz sampling rate, high sound quality. A file with an audio duration + * of 10 minutes is approximately 3.5 MB after encoding. */ AUDIO_ENCODING_TYPE_AAC_32000_HIGH = 0x010203, /** - * AAC encoding format, 48000 Hz sampling rate, medium sound quality. A file with an audio duration - * of 10 minutes is approximately 2 MB after encoding. + * AAC encoding format, 48000 Hz sampling rate, medium sound quality. A file with an audio + * duration of 10 minutes is approximately 2 MB after encoding. */ AUDIO_ENCODING_TYPE_AAC_48000_MEDIUM = 0x010302, /** @@ -1390,18 +1417,18 @@ enum AUDIO_ENCODING_TYPE { */ AUDIO_ENCODING_TYPE_OPUS_16000_LOW = 0x020101, /** - * OPUS encoding format, 16000 Hz sampling rate, medium sound quality. A file with an audio duration - * of 10 minutes is approximately 2 MB after encoding. + * OPUS encoding format, 16000 Hz sampling rate, medium sound quality. A file with an audio + * duration of 10 minutes is approximately 2 MB after encoding. */ AUDIO_ENCODING_TYPE_OPUS_16000_MEDIUM = 0x020102, /** - * OPUS encoding format, 48000 Hz sampling rate, medium sound quality. A file with an audio duration - * of 10 minutes is approximately 2 MB after encoding. + * OPUS encoding format, 48000 Hz sampling rate, medium sound quality. A file with an audio + * duration of 10 minutes is approximately 2 MB after encoding. */ AUDIO_ENCODING_TYPE_OPUS_48000_MEDIUM = 0x020302, /** - * OPUS encoding format, 48000 Hz sampling rate, high sound quality. A file with an audio duration of - * 10 minutes is approximately 3.5 MB after encoding. + * OPUS encoding format, 48000 Hz sampling rate, high sound quality. A file with an audio duration + * of 10 minutes is approximately 3.5 MB after encoding. */ AUDIO_ENCODING_TYPE_OPUS_48000_HIGH = 0x020303, }; @@ -1411,13 +1438,13 @@ enum AUDIO_ENCODING_TYPE { */ enum WATERMARK_FIT_MODE { /** - * Use the `positionInLandscapeMode` and `positionInPortraitMode` values you set in #WatermarkOptions. - * The settings in `WatermarkRatio` are invalid. + * Use the `positionInLandscapeMode` and `positionInPortraitMode` values you set in + * #WatermarkOptions. The settings in `WatermarkRatio` are invalid. */ FIT_MODE_COVER_POSITION, /** - * Use the value you set in `WatermarkRatio`. The settings in `positionInLandscapeMode` and `positionInPortraitMode` - * in `WatermarkOptions` are invalid. + * Use the value you set in `WatermarkRatio`. The settings in `positionInLandscapeMode` and + * `positionInPortraitMode` in `WatermarkOptions` are invalid. */ FIT_MODE_USE_IMAGE_RATIO }; @@ -1426,9 +1453,7 @@ enum WATERMARK_FIT_MODE { * The advanced settings of encoded audio frame. */ struct EncodedAudioFrameAdvancedSettings { - EncodedAudioFrameAdvancedSettings() - : speech(true), - sendEvenIfEmpty(true) {} + EncodedAudioFrameAdvancedSettings() : speech(true), sendEvenIfEmpty(true) {} /** * Determines whether the audio source is speech. @@ -1449,19 +1474,19 @@ struct EncodedAudioFrameAdvancedSettings { */ struct EncodedAudioFrameInfo { EncodedAudioFrameInfo() - : codec(AUDIO_CODEC_AACLC), - sampleRateHz(0), - samplesPerChannel(0), - numberOfChannels(0), - captureTimeMs(0) {} + : codec(AUDIO_CODEC_AACLC), + sampleRateHz(0), + samplesPerChannel(0), + numberOfChannels(0), + captureTimeMs(0) {} EncodedAudioFrameInfo(const EncodedAudioFrameInfo& rhs) - : codec(rhs.codec), - sampleRateHz(rhs.sampleRateHz), - samplesPerChannel(rhs.samplesPerChannel), - numberOfChannels(rhs.numberOfChannels), - advancedSettings(rhs.advancedSettings), - captureTimeMs(rhs.captureTimeMs) {} + : codec(rhs.codec), + sampleRateHz(rhs.sampleRateHz), + samplesPerChannel(rhs.samplesPerChannel), + numberOfChannels(rhs.numberOfChannels), + advancedSettings(rhs.advancedSettings), + captureTimeMs(rhs.captureTimeMs) {} /** * The audio codec: #AUDIO_CODEC_TYPE. */ @@ -1494,14 +1519,15 @@ struct EncodedAudioFrameInfo { * The definition of the AudioPcmDataInfo struct. */ struct AudioPcmDataInfo { - AudioPcmDataInfo() : samplesPerChannel(0), channelNum(0), samplesOut(0), elapsedTimeMs(0), ntpTimeMs(0) {} + AudioPcmDataInfo() + : samplesPerChannel(0), channelNum(0), samplesOut(0), elapsedTimeMs(0), ntpTimeMs(0) {} AudioPcmDataInfo(const AudioPcmDataInfo& rhs) - : samplesPerChannel(rhs.samplesPerChannel), - channelNum(rhs.channelNum), - samplesOut(rhs.samplesOut), - elapsedTimeMs(rhs.elapsedTimeMs), - ntpTimeMs(rhs.ntpTimeMs) {} + : samplesPerChannel(rhs.samplesPerChannel), + channelNum(rhs.channelNum), + samplesOut(rhs.samplesOut), + elapsedTimeMs(rhs.elapsedTimeMs), + ntpTimeMs(rhs.ntpTimeMs) {} /** * The sample count of the PCM data that you expect. @@ -1535,7 +1561,7 @@ enum H264PacketizeMode { /** * Single NAL unit mode. See RFC 6184. */ - SingleNalUnit, // Mode 0 - only single NALU allowed + SingleNalUnit, // Mode 0 - only single NALU allowed }; /** @@ -1543,74 +1569,98 @@ enum H264PacketizeMode { */ enum VIDEO_STREAM_TYPE { /** - * 0: The high-quality video stream, which has a higher resolution and bitrate. + * 0: The high-quality video stream, which has the highest resolution and bitrate. */ VIDEO_STREAM_HIGH = 0, /** - * 1: The low-quality video stream, which has a lower resolution and bitrate. + * 1: The low-quality video stream, which has the lowest resolution and bitrate. */ VIDEO_STREAM_LOW = 1, + /** + * 4: The video stream of layer_1, which has a lower resolution and bitrate than VIDEO_STREAM_HIGH. + */ + VIDEO_STREAM_LAYER_1 = 4, + /** + * 5: The video stream of layer_2, which has a lower resolution and bitrate than VIDEO_STREAM_LAYER_1. + */ + VIDEO_STREAM_LAYER_2 = 5, + /** + * 6: The video stream of layer_3, which has a lower resolution and bitrate than VIDEO_STREAM_LAYER_2. + */ + VIDEO_STREAM_LAYER_3 = 6, + /** + * 7: The video stream of layer_4, which has a lower resolution and bitrate than VIDEO_STREAM_LAYER_3. + */ + VIDEO_STREAM_LAYER_4 = 7, + /** + * 8: The video stream of layer_5, which has a lower resolution and bitrate than VIDEO_STREAM_LAYER_4. + */ + VIDEO_STREAM_LAYER_5 = 8, + /** + * 9: The video stream of layer_6, which has a lower resolution and bitrate than VIDEO_STREAM_LAYER_5. + */ + VIDEO_STREAM_LAYER_6 = 9, + }; struct VideoSubscriptionOptions { - /** - * The type of the video stream to subscribe to. - * - * The default value is `VIDEO_STREAM_HIGH`, which means the high-quality - * video stream. - */ - Optional type; - /** - * Whether to subscribe to encoded video data only: - * - `true`: Subscribe to encoded video data only. - * - `false`: (Default) Subscribe to decoded video data. - */ - Optional encodedFrameOnly; + /** + * The type of the video stream to subscribe to. + * + * The default value is `VIDEO_STREAM_HIGH`, which means the high-quality + * video stream. + */ + Optional type; + /** + * Whether to subscribe to encoded video data only: + * - `true`: Subscribe to encoded video data only. + * - `false`: (Default) Subscribe to decoded video data. + */ + Optional encodedFrameOnly; - VideoSubscriptionOptions() {} + VideoSubscriptionOptions() {} }; - /** The maximum length of the user account. */ -enum MAX_USER_ACCOUNT_LENGTH_TYPE -{ +enum MAX_USER_ACCOUNT_LENGTH_TYPE { /** The maximum length of the user account is 256 bytes. */ MAX_USER_ACCOUNT_LENGTH = 256 }; /** - * The definition of the EncodedVideoFrameInfo struct, which contains the information of the external encoded video frame. + * The definition of the EncodedVideoFrameInfo struct, which contains the information of the + * external encoded video frame. */ struct EncodedVideoFrameInfo { EncodedVideoFrameInfo() - : uid(0), - codecType(VIDEO_CODEC_H264), - width(0), - height(0), - framesPerSecond(0), - frameType(VIDEO_FRAME_TYPE_BLANK_FRAME), - rotation(VIDEO_ORIENTATION_0), - trackId(0), - captureTimeMs(0), - decodeTimeMs(0), - streamType(VIDEO_STREAM_HIGH), - presentationMs(-1) {} + : uid(0), + codecType(VIDEO_CODEC_H264), + width(0), + height(0), + framesPerSecond(0), + frameType(VIDEO_FRAME_TYPE_BLANK_FRAME), + rotation(VIDEO_ORIENTATION_0), + trackId(0), + captureTimeMs(0), + decodeTimeMs(0), + streamType(VIDEO_STREAM_HIGH), + presentationMs(-1) {} EncodedVideoFrameInfo(const EncodedVideoFrameInfo& rhs) - : uid(rhs.uid), - codecType(rhs.codecType), - width(rhs.width), - height(rhs.height), - framesPerSecond(rhs.framesPerSecond), - frameType(rhs.frameType), - rotation(rhs.rotation), - trackId(rhs.trackId), - captureTimeMs(rhs.captureTimeMs), - decodeTimeMs(rhs.decodeTimeMs), - streamType(rhs.streamType), - presentationMs(rhs.presentationMs) {} + : uid(rhs.uid), + codecType(rhs.codecType), + width(rhs.width), + height(rhs.height), + framesPerSecond(rhs.framesPerSecond), + frameType(rhs.frameType), + rotation(rhs.rotation), + trackId(rhs.trackId), + captureTimeMs(rhs.captureTimeMs), + decodeTimeMs(rhs.decodeTimeMs), + streamType(rhs.streamType), + presentationMs(rhs.presentationMs) {} EncodedVideoFrameInfo& operator=(const EncodedVideoFrameInfo& rhs) { if (this == &rhs) return *this; @@ -1634,7 +1684,8 @@ struct EncodedVideoFrameInfo { */ uid_t uid; /** - * The codec type of the local video stream. See #VIDEO_CODEC_TYPE. The default value is `VIDEO_CODEC_H265 (3)`. + * The codec type of the local video stream. See #VIDEO_CODEC_TYPE. The default value is + * `VIDEO_CODEC_H265 (3)`. */ VIDEO_CODEC_TYPE codecType; /** @@ -1682,33 +1733,40 @@ struct EncodedVideoFrameInfo { }; /** -* Video compression preference. -*/ + * Video compression preference. + */ enum COMPRESSION_PREFERENCE { /** - * (Default) Low latency is preferred, usually used in real-time communication where low latency is the number one priority. + * (Default) SDK uses compression preference according to setVideoScenario API settings, real-time network state and other relevant data information. + * If API setVideoScenario set video scenario to APPLICATION_SCENARIO_LIVESHOW, then PREFER_QUALITY is used. If not, then PREFER_LOW_LATENCY is used. + * Also if network state has changed, SDK may change this parameter between PREFER_QUALITY and PREFER_LOW_LATENCY automatically to get the best QOE. + * We recommend using this option. + */ + PREFER_COMPRESSION_AUTO = -1, + /** + * Prefer low latency, usually used in real-time communication where low latency is the number one priority. */ - PREFER_LOW_LATENCY, + PREFER_LOW_LATENCY = 0, /** - * Prefer quality in sacrifice of a degree of latency, usually around 30ms ~ 150ms, depends target fps + * Prefer quality in sacrifice of a degree of latency, usually around 30ms ~ 150ms, depends target fps */ - PREFER_QUALITY, + PREFER_QUALITY = 1, }; /** -* The video encoder type preference. -*/ + * The video encoder type preference. + */ enum ENCODING_PREFERENCE { /** - *Default . + *Default . */ PREFER_AUTO = -1, /** - * Software encoding. - */ + * Software encoding. + */ PREFER_SOFTWARE = 0, /** - * Hardware encoding + * Hardware encoding */ PREFER_HARDWARE = 1, }; @@ -1717,30 +1775,38 @@ enum ENCODING_PREFERENCE { * The definition of the AdvanceOptions struct. */ struct AdvanceOptions { - /** * The video encoder type preference.. */ ENCODING_PREFERENCE encodingPreference; /** - * Video compression preference. - */ + * Video compression preference. + */ COMPRESSION_PREFERENCE compressionPreference; + /** + * Whether to encode and send the alpha data to the remote when alpha data is present. + * The default value is false. + */ + bool encodeAlpha; + AdvanceOptions() : encodingPreference(PREFER_AUTO), - compressionPreference(PREFER_LOW_LATENCY) {} + compressionPreference(PREFER_COMPRESSION_AUTO), + encodeAlpha(false) {} AdvanceOptions(ENCODING_PREFERENCE encoding_preference, - COMPRESSION_PREFERENCE compression_preference) : + COMPRESSION_PREFERENCE compression_preference, + bool encode_alpha) : encodingPreference(encoding_preference), - compressionPreference(compression_preference) {} + compressionPreference(compression_preference), + encodeAlpha(encode_alpha) {} bool operator==(const AdvanceOptions& rhs) const { return encodingPreference == rhs.encodingPreference && - compressionPreference == rhs.compressionPreference; + compressionPreference == rhs.compressionPreference && + encodeAlpha == rhs.encodeAlpha; } - }; /** @@ -1761,6 +1827,41 @@ enum VIDEO_MIRROR_MODE_TYPE { VIDEO_MIRROR_MODE_DISABLED = 2, }; +#if defined(__APPLE__) && TARGET_OS_IOS +/** + * Camera capturer configuration for format type. + */ +enum CAMERA_FORMAT_TYPE { + /** 0: (Default) NV12. */ + CAMERA_FORMAT_NV12, + /** 1: BGRA. */ + CAMERA_FORMAT_BGRA, +}; +#endif + +enum VIDEO_MODULE_TYPE { + /** Video capture module */ + VIDEO_MODULE_CAPTURER = 0, + /** Video software encoder module */ + VIDEO_MODULE_SOFTWARE_ENCODER = 1, + /** Video hardware encoder module */ + VIDEO_MODULE_HARDWARE_ENCODER = 2, + /** Video software decoder module */ + VIDEO_MODULE_SOFTWARE_DECODER = 3, + /** Video hardware decoder module */ + VIDEO_MODULE_HARDWARE_DECODER = 4, + /** Video render module */ + VIDEO_MODULE_RENDERER = 5, +}; + +enum HDR_CAPABILITY { + /** The result of static check is not reliable, by defualt*/ + HDR_CAPABILITY_UNKNOWN = -1, + /** The module you query doesn't support HDR */ + HDR_CAPABILITY_UNSUPPORTED = 0, + /** The module you query supports HDR */ + HDR_CAPABILITY_SUPPORTED = 1, +}; /** Supported codec type bit mask. */ enum CODEC_CAP_MASK { @@ -1784,7 +1885,9 @@ struct CodecCapLevels { VIDEO_CODEC_CAPABILITY_LEVEL hwDecodingLevel; VIDEO_CODEC_CAPABILITY_LEVEL swDecodingLevel; - CodecCapLevels(): hwDecodingLevel(CODEC_CAPABILITY_LEVEL_UNSPECIFIED), swDecodingLevel(CODEC_CAPABILITY_LEVEL_UNSPECIFIED) {} + CodecCapLevels() + : hwDecodingLevel(CODEC_CAPABILITY_LEVEL_UNSPECIFIED), + swDecodingLevel(CODEC_CAPABILITY_LEVEL_UNSPECIFIED) {} }; /** The codec support information. */ @@ -1796,10 +1899,11 @@ struct CodecCapInfo { /** The codec capability level, estimated based on the device hardware.*/ CodecCapLevels codecLevels; - CodecCapInfo(): codecType(VIDEO_CODEC_NONE), codecCapMask(0) {} + CodecCapInfo() : codecType(VIDEO_CODEC_NONE), codecCapMask(0) {} }; -/** FocalLengthInfo contains the IDs of the front and rear cameras, along with the wide-angle types. */ +/** FocalLengthInfo contains the IDs of the front and rear cameras, along with the wide-angle types. + */ struct FocalLengthInfo { /** The camera direction. */ int cameraDirection; @@ -1826,21 +1930,22 @@ struct VideoEncoderConfiguration { /** * The bitrate (Kbps) of the video. * - * Refer to the **Video Bitrate Table** below and set your bitrate. If you set a bitrate beyond the - * proper range, the SDK automatically adjusts it to a value within the range. You can also choose - * from the following options: + * Refer to the **Video Bitrate Table** below and set your bitrate. If you set a bitrate beyond + * the proper range, the SDK automatically adjusts it to a value within the range. You can also + * choose from the following options: * - * - #STANDARD_BITRATE: (Recommended) Standard bitrate mode. In this mode, the bitrates differ between - * the Live Broadcast and Communication profiles: + * - #STANDARD_BITRATE: (Recommended) Standard bitrate mode. In this mode, the bitrates differ + * between the Live Broadcast and Communication profiles: * - In the Communication profile, the video bitrate is the same as the base bitrate. * - In the Live Broadcast profile, the video bitrate is twice the base bitrate. - * - #COMPATIBLE_BITRATE: Compatible bitrate mode. The compatible bitrate mode. In this mode, the bitrate - * stays the same regardless of the profile. If you choose this mode for the Live Broadcast profile, - * the video frame rate may be lower than the set value. + * - #COMPATIBLE_BITRATE: Compatible bitrate mode. The compatible bitrate mode. In this mode, the + * bitrate stays the same regardless of the profile. If you choose this mode for the Live + * Broadcast profile, the video frame rate may be lower than the set value. * - * Agora uses different video codecs for different profiles to optimize the user experience. For example, - * the communication profile prioritizes the smoothness while the live-broadcast profile prioritizes the - * video quality (a higher bitrate). Therefore, We recommend setting this parameter as #STANDARD_BITRATE. + * Agora uses different video codecs for different profiles to optimize the user experience. For + * example, the communication profile prioritizes the smoothness while the live-broadcast profile + * prioritizes the video quality (a higher bitrate). Therefore, We recommend setting this + * parameter as #STANDARD_BITRATE. * * | Resolution | Frame Rate (fps) | Base Bitrate (Kbps) | Live Bitrate (Kbps)| * |------------------------|------------------|---------------------|--------------------| @@ -1908,7 +2013,8 @@ struct VideoEncoderConfiguration { /** * The mirror mode is disabled by default - * If mirror_type is set to VIDEO_MIRROR_MODE_ENABLED, then the video frame would be mirrored before encoding. + * If mirror_type is set to VIDEO_MIRROR_MODE_ENABLED, then the video frame would be mirrored + * before encoding. */ VIDEO_MIRROR_MODE_TYPE mirrorMode; @@ -1924,9 +2030,9 @@ struct VideoEncoderConfiguration { bitrate(b), minBitrate(DEFAULT_MIN_BITRATE), orientationMode(m), - degradationPreference(MAINTAIN_QUALITY), + degradationPreference(MAINTAIN_AUTO), mirrorMode(mirror), - advanceOptions(PREFER_AUTO, PREFER_LOW_LATENCY) {} + advanceOptions(PREFER_AUTO, PREFER_COMPRESSION_AUTO, false) {} VideoEncoderConfiguration(int width, int height, int f, int b, ORIENTATION_MODE m, VIDEO_MIRROR_MODE_TYPE mirror = VIDEO_MIRROR_MODE_DISABLED) : codecType(VIDEO_CODEC_NONE), dimensions(width, height), @@ -1934,19 +2040,19 @@ struct VideoEncoderConfiguration { bitrate(b), minBitrate(DEFAULT_MIN_BITRATE), orientationMode(m), - degradationPreference(MAINTAIN_QUALITY), + degradationPreference(MAINTAIN_AUTO), mirrorMode(mirror), - advanceOptions(PREFER_AUTO, PREFER_LOW_LATENCY) {} + advanceOptions(PREFER_AUTO, PREFER_COMPRESSION_AUTO, false) {} VideoEncoderConfiguration(const VideoEncoderConfiguration& config) - : codecType(config.codecType), - dimensions(config.dimensions), - frameRate(config.frameRate), - bitrate(config.bitrate), - minBitrate(config.minBitrate), - orientationMode(config.orientationMode), - degradationPreference(config.degradationPreference), - mirrorMode(config.mirrorMode), - advanceOptions(config.advanceOptions) {} + : codecType(config.codecType), + dimensions(config.dimensions), + frameRate(config.frameRate), + bitrate(config.bitrate), + minBitrate(config.minBitrate), + orientationMode(config.orientationMode), + degradationPreference(config.degradationPreference), + mirrorMode(config.mirrorMode), + advanceOptions(config.advanceOptions) {} VideoEncoderConfiguration() : codecType(VIDEO_CODEC_NONE), dimensions(FRAME_WIDTH_960, FRAME_HEIGHT_540), @@ -1954,9 +2060,9 @@ struct VideoEncoderConfiguration { bitrate(STANDARD_BITRATE), minBitrate(DEFAULT_MIN_BITRATE), orientationMode(ORIENTATION_MODE_ADAPTIVE), - degradationPreference(MAINTAIN_QUALITY), + degradationPreference(MAINTAIN_AUTO), mirrorMode(VIDEO_MIRROR_MODE_DISABLED), - advanceOptions(PREFER_AUTO, PREFER_LOW_LATENCY) {} + advanceOptions(PREFER_AUTO, PREFER_COMPRESSION_AUTO, false) {} VideoEncoderConfiguration& operator=(const VideoEncoderConfiguration& rhs) { if (this == &rhs) return *this; @@ -1984,9 +2090,9 @@ struct DataStreamConfig { * * When you set the data packet to synchronize with the audio, then if the data packet delay is * within the audio delay, the SDK triggers the `onStreamMessage` callback when the synchronized - * audio packet is played out. Do not set this parameter as true if you need the receiver to receive - * the data packet immediately. Agora recommends that you set this parameter to `true` only when you - * need to implement specific functions, for example lyric synchronization. + * audio packet is played out. Do not set this parameter as true if you need the receiver to + * receive the data packet immediately. Agora recommends that you set this parameter to `true` + * only when you need to implement specific functions, for example lyric synchronization. */ bool syncWithAudio; /** @@ -1994,7 +2100,8 @@ struct DataStreamConfig { * - `true`: Guarantee that the receiver receives the data in the sent order. * - `false`: Do not guarantee that the receiver receives the data in the sent order. * - * Do not set this parameter as `true` if you need the receiver to receive the data packet immediately. + * Do not set this parameter as `true` if you need the receiver to receive the data packet + * immediately. */ bool ordered; }; @@ -2004,16 +2111,16 @@ struct DataStreamConfig { */ enum SIMULCAST_STREAM_MODE { /* - * disable simulcast stream until receive request for enable simulcast stream by other broadcaster - */ + * disable simulcast stream until receive request for enable simulcast stream by other broadcaster + */ AUTO_SIMULCAST_STREAM = -1, /* - * disable simulcast stream - */ + * disable simulcast stream + */ DISABLE_SIMULCAST_STREAM = 0, /* - * always enable simulcast stream - */ + * always enable simulcast stream + */ ENABLE_SIMULCAST_STREAM = 1, }; @@ -2026,19 +2133,83 @@ struct SimulcastStreamConfig { */ VideoDimensions dimensions; /** - * The video bitrate (Kbps), represented by an instantaneous value. The default value of the log level is 5. + * The video bitrate (Kbps), represented by an instantaneous value. The default value of the log + * level is 5. */ int kBitrate; /** - * he capture frame rate (fps) of the local video. The default value is 5. + * The capture frame rate (fps) of the local video. The default value is 5. */ int framerate; SimulcastStreamConfig() : dimensions(160, 120), kBitrate(65), framerate(5) {} + SimulcastStreamConfig(const SimulcastStreamConfig& other) : dimensions(other.dimensions), kBitrate(other.kBitrate), framerate(other.framerate) {} bool operator==(const SimulcastStreamConfig& rhs) const { return dimensions == rhs.dimensions && kBitrate == rhs.kBitrate && framerate == rhs.framerate; } }; +/** + * The configuration of the multi-layer video stream. + */ +struct SimulcastConfig { + /** + * The index of multi-layer video stream + */ + enum StreamLayerIndex { + /** + * 0: video stream index of layer_1 + */ + STREAM_LAYER_1 = 0, + /** + * 1: video stream index of layer_2 + */ + STREAM_LAYER_2 = 1, + /** + * 2: video stream index of layer_3 + */ + STREAM_LAYER_3 = 2, + /** + * 3: video stream index of layer_4 + */ + STREAM_LAYER_4 = 3, + /** + * 4: video stream index of layer_5 + */ + STREAM_LAYER_5 = 4, + /** + * 5: video stream index of layer_6 + */ + STREAM_LAYER_6 = 5, + /** + * 6: video stream index of low + */ + STREAM_LOW = 6, + /** + * 7: max count of video stream layers + */ + STREAM_LAYER_COUNT_MAX = 7 + }; + struct StreamLayerConfig { + /** + * The video frame dimension. The default value is 0. + */ + VideoDimensions dimensions; + /** + * The capture frame rate (fps) of the local video. The default value is 0. + */ + int framerate; + /** + * Whether to enable the corresponding layer of video stream. The default value is false. + */ + bool enable; + StreamLayerConfig() : dimensions(0, 0), framerate(0), enable(false) {} + }; + + /** + * The array of StreamLayerConfig, which contains STREAM_LAYER_COUNT_MAX layers of video stream at most. + */ + StreamLayerConfig configs[STREAM_LAYER_COUNT_MAX]; +}; /** * The location of the target area relative to the screen or window. If you do not set this parameter, * the SDK selects the whole screen or window. @@ -2068,28 +2239,31 @@ struct Rectangle { /** * The position and size of the watermark on the screen. * - * The position and size of the watermark on the screen are determined by `xRatio`, `yRatio`, and `widthRatio`: - * - (`xRatio`, `yRatio`) refers to the coordinates of the upper left corner of the watermark, which determines - * the distance from the upper left corner of the watermark to the upper left corner of the screen. - * The `widthRatio` determines the width of the watermark. + * The position and size of the watermark on the screen are determined by `xRatio`, `yRatio`, and + * `widthRatio`: + * - (`xRatio`, `yRatio`) refers to the coordinates of the upper left corner of the watermark, which + * determines the distance from the upper left corner of the watermark to the upper left corner of + * the screen. The `widthRatio` determines the width of the watermark. */ struct WatermarkRatio { /** * The x-coordinate of the upper left corner of the watermark. The horizontal position relative to - * the origin, where the upper left corner of the screen is the origin, and the x-coordinate is the - * upper left corner of the watermark. The value range is [0.0,1.0], and the default value is 0. + * the origin, where the upper left corner of the screen is the origin, and the x-coordinate is + * the upper left corner of the watermark. The value range is [0.0,1.0], and the default value is + * 0. */ float xRatio; /** - * The y-coordinate of the upper left corner of the watermark. The vertical position relative to the - * origin, where the upper left corner of the screen is the origin, and the y-coordinate is the upper - * left corner of the screen. The value range is [0.0,1.0], and the default value is 0. + * The y-coordinate of the upper left corner of the watermark. The vertical position relative to + * the origin, where the upper left corner of the screen is the origin, and the y-coordinate is + * the upper left corner of the screen. The value range is [0.0,1.0], and the default value is 0. */ float yRatio; /** - * The width of the watermark. The SDK calculates the height of the watermark proportionally according - * to this parameter value to ensure that the enlarged or reduced watermark image is not distorted. - * The value range is [0,1], and the default value is 0, which means no watermark is displayed. + * The width of the watermark. The SDK calculates the height of the watermark proportionally + * according to this parameter value to ensure that the enlarged or reduced watermark image is not + * distorted. The value range is [0,1], and the default value is 0, which means no watermark is + * displayed. */ float widthRatio; @@ -2128,10 +2302,10 @@ struct WatermarkOptions { WATERMARK_FIT_MODE mode; WatermarkOptions() - : visibleInPreview(true), - positionInLandscapeMode(0, 0, 0, 0), - positionInPortraitMode(0, 0, 0, 0), - mode(FIT_MODE_COVER_POSITION) {} + : visibleInPreview(true), + positionInLandscapeMode(0, 0, 0, 0), + positionInPortraitMode(0, 0, 0, 0), + mode(FIT_MODE_COVER_POSITION) {} }; /** @@ -2202,7 +2376,8 @@ struct RtcStats { * The app CPU usage (%). * @note * - The value of `cpuAppUsage` is always reported as 0 in the `onLeaveChannel` callback. - * - As of Android 8.1, you cannot get the CPU usage from this attribute due to system limitations. + * - As of Android 8.1, you cannot get the CPU usage from this attribute due to system + * limitations. */ double cpuAppUsage; /** @@ -2212,13 +2387,15 @@ struct RtcStats { * value = (100 - System Idle Progress in Task Manager)/100. * @note * - The value of `cpuTotalUsage` is always reported as 0 in the `onLeaveChannel` callback. - * - As of Android 8.1, you cannot get the CPU usage from this attribute due to system limitations. + * - As of Android 8.1, you cannot get the CPU usage from this attribute due to system + * limitations. */ double cpuTotalUsage; /** * The round-trip time delay from the client to the local router. - * @note On Android, to get `gatewayRtt`, ensure that you add the `android.permission.ACCESS_WIFI_STATE` - * permission after `` in the `AndroidManifest.xml` file in your project. + * @note On Android, to get `gatewayRtt`, ensure that you add the + * `android.permission.ACCESS_WIFI_STATE` permission after `` in the + * `AndroidManifest.xml` file in your project. */ int gatewayRtt; /** @@ -2295,39 +2472,39 @@ struct RtcStats { */ int rxPacketLossRate; RtcStats() - : duration(0), - txBytes(0), - rxBytes(0), - txAudioBytes(0), - txVideoBytes(0), - rxAudioBytes(0), - rxVideoBytes(0), - txKBitRate(0), - rxKBitRate(0), - rxAudioKBitRate(0), - txAudioKBitRate(0), - rxVideoKBitRate(0), - txVideoKBitRate(0), - lastmileDelay(0), - userCount(0), - cpuAppUsage(0.0), - cpuTotalUsage(0.0), - gatewayRtt(0), - memoryAppUsageRatio(0.0), - memoryTotalUsageRatio(0.0), - memoryAppUsageInKbytes(0), - connectTimeMs(0), - firstAudioPacketDuration(0), - firstVideoPacketDuration(0), - firstVideoKeyFramePacketDuration(0), - packetsBeforeFirstKeyFramePacket(0), - firstAudioPacketDurationAfterUnmute(0), - firstVideoPacketDurationAfterUnmute(0), - firstVideoKeyFramePacketDurationAfterUnmute(0), - firstVideoKeyFrameDecodedDurationAfterUnmute(0), - firstVideoKeyFrameRenderedDurationAfterUnmute(0), - txPacketLossRate(0), - rxPacketLossRate(0) {} + : duration(0), + txBytes(0), + rxBytes(0), + txAudioBytes(0), + txVideoBytes(0), + rxAudioBytes(0), + rxVideoBytes(0), + txKBitRate(0), + rxKBitRate(0), + rxAudioKBitRate(0), + txAudioKBitRate(0), + rxVideoKBitRate(0), + txVideoKBitRate(0), + lastmileDelay(0), + userCount(0), + cpuAppUsage(0.0), + cpuTotalUsage(0.0), + gatewayRtt(0), + memoryAppUsageRatio(0.0), + memoryTotalUsageRatio(0.0), + memoryAppUsageInKbytes(0), + connectTimeMs(0), + firstAudioPacketDuration(0), + firstVideoPacketDuration(0), + firstVideoKeyFramePacketDuration(0), + packetsBeforeFirstKeyFramePacket(0), + firstAudioPacketDurationAfterUnmute(0), + firstVideoPacketDurationAfterUnmute(0), + firstVideoKeyFramePacketDurationAfterUnmute(0), + firstVideoKeyFrameDecodedDurationAfterUnmute(0), + firstVideoKeyFrameRenderedDurationAfterUnmute(0), + txPacketLossRate(0), + rxPacketLossRate(0) {} }; /** @@ -2345,7 +2522,8 @@ enum CLIENT_ROLE_TYPE { }; /** - * Quality change of the local video in terms of target frame rate and target bit rate since last count. + * Quality change of the local video in terms of target frame rate and target bit rate since last + * count. */ enum QUALITY_ADAPT_INDICATION { /** @@ -2363,11 +2541,10 @@ enum QUALITY_ADAPT_INDICATION { }; /** - * The latency level of an audience member in interactive live streaming. This enum takes effect only - * when the user role is set to `CLIENT_ROLE_AUDIENCE`. + * The latency level of an audience member in interactive live streaming. This enum takes effect + * only when the user role is set to `CLIENT_ROLE_AUDIENCE`. */ -enum AUDIENCE_LATENCY_LEVEL_TYPE -{ +enum AUDIENCE_LATENCY_LEVEL_TYPE { /** * 1: Low latency. */ @@ -2381,15 +2558,14 @@ enum AUDIENCE_LATENCY_LEVEL_TYPE /** * The detailed options of a user. */ -struct ClientRoleOptions -{ +struct ClientRoleOptions { /** - * The latency level of an audience member in interactive live streaming. See `AUDIENCE_LATENCY_LEVEL_TYPE`. + * The latency level of an audience member in interactive live streaming. See + * `AUDIENCE_LATENCY_LEVEL_TYPE`. */ AUDIENCE_LATENCY_LEVEL_TYPE audienceLatencyLevel; - ClientRoleOptions() - : audienceLatencyLevel(AUDIENCE_LATENCY_LEVEL_ULTRA_LOW_LATENCY) {} + ClientRoleOptions() : audienceLatencyLevel(AUDIENCE_LATENCY_LEVEL_ULTRA_LOW_LATENCY) {} }; /** @@ -2423,8 +2599,8 @@ enum EXPERIENCE_POOR_REASON { */ WIRELESS_SIGNAL_POOR = 4, /** - * 8: The local user enables both Wi-Fi and bluetooth, and their signals interfere with each other. - * As a result, audio transmission quality is undermined. + * 8: The local user enables both Wi-Fi and bluetooth, and their signals interfere with each + * other. As a result, audio transmission quality is undermined. */ WIFI_BLUETOOTH_COEXIST = 8, }; @@ -2433,18 +2609,18 @@ enum EXPERIENCE_POOR_REASON { * Audio AINS mode */ enum AUDIO_AINS_MODE { - /** - * AINS mode with soft suppression level. - */ - AINS_MODE_BALANCED = 0, - /** - * AINS mode with high suppression level. - */ - AINS_MODE_AGGRESSIVE = 1, - /** - * AINS mode with high suppression level and ultra-low-latency - */ - AINS_MODE_ULTRALOWLATENCY = 2 + /** + * AINS mode with soft suppression level. + */ + AINS_MODE_BALANCED = 0, + /** + * AINS mode with high suppression level. + */ + AINS_MODE_AGGRESSIVE = 1, + /** + * AINS mode with high suppression level and ultra-low-latency + */ + AINS_MODE_ULTRALOWLATENCY = 2 }; /** @@ -2455,9 +2631,10 @@ enum AUDIO_PROFILE_TYPE { * 0: The default audio profile. * - For the Communication profile: * - Windows: A sample rate of 16 kHz, audio encoding, mono, and a bitrate of up to 16 Kbps. - * - Android/macOS/iOS: A sample rate of 32 kHz, audio encoding, mono, and a bitrate of up to 18 Kbps. - * of up to 16 Kbps. - * - For the Live-broadcast profile: A sample rate of 48 kHz, music encoding, mono, and a bitrate of up to 64 Kbps. + * - Android/macOS/iOS: A sample rate of 32 kHz, audio encoding, mono, and a bitrate of up to 18 + * Kbps. of up to 16 Kbps. + * - For the Live-broadcast profile: A sample rate of 48 kHz, music encoding, mono, and a bitrate + * of up to 64 Kbps. */ AUDIO_PROFILE_DEFAULT = 0, /** @@ -2471,8 +2648,8 @@ enum AUDIO_PROFILE_TYPE { /** * 3: A sample rate of 48 kHz, music encoding, stereo, and a bitrate of up to 80 Kbps. * - * To implement stereo audio, you also need to call `setAdvancedAudioOptions` and set `audioProcessingChannels` - * to `AUDIO_PROCESSING_STEREO` in `AdvancedAudioOptions`. + * To implement stereo audio, you also need to call `setAdvancedAudioOptions` and set + * `audioProcessingChannels` to `AUDIO_PROCESSING_STEREO` in `AdvancedAudioOptions`. */ AUDIO_PROFILE_MUSIC_STANDARD_STEREO = 3, /** @@ -2482,8 +2659,8 @@ enum AUDIO_PROFILE_TYPE { /** * 5: A sample rate of 48 kHz, music encoding, stereo, and a bitrate of up to 128 Kbps. * - * To implement stereo audio, you also need to call `setAdvancedAudioOptions` and set `audioProcessingChannels` - * to `AUDIO_PROCESSING_STEREO` in `AdvancedAudioOptions`. + * To implement stereo audio, you also need to call `setAdvancedAudioOptions` and set + * `audioProcessingChannels` to `AUDIO_PROCESSING_STEREO` in `AdvancedAudioOptions`. */ AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO = 5, /** @@ -2515,7 +2692,8 @@ enum AUDIO_SCENARIO_TYPE { */ AUDIO_SCENARIO_CHATROOM = 5, /** - * 7: Real-time chorus scenario, where users have good network conditions and require ultra-low latency. + * 7: Real-time chorus scenario, where users have good network conditions and require ultra-low + * latency. */ AUDIO_SCENARIO_CHORUS = 7, /** @@ -2532,19 +2710,19 @@ enum AUDIO_SCENARIO_TYPE { * The format of the video frame. */ struct VideoFormat { - OPTIONAL_ENUM_SIZE_T { - /** The maximum value (px) of the width. */ - kMaxWidthInPixels = 3840, - /** The maximum value (px) of the height. */ - kMaxHeightInPixels = 2160, - /** The maximum value (fps) of the frame rate. */ - kMaxFps = 60, + OPTIONAL_ENUM_SIZE_T{ + /** The maximum value (px) of the width. */ + kMaxWidthInPixels = 3840, + /** The maximum value (px) of the height. */ + kMaxHeightInPixels = 2160, + /** The maximum value (fps) of the frame rate. */ + kMaxFps = 60, }; /** * The width (px) of the video. */ - int width; // Number of pixels. + int width; // Number of pixels. /** * The height (px) of the video. */ @@ -2568,9 +2746,7 @@ struct VideoFormat { bool operator==(const VideoFormat& fmt) const { return width == fmt.width && height == fmt.height && fps == fmt.fps; } - bool operator!=(const VideoFormat& fmt) const { - return !operator==(fmt); - } + bool operator!=(const VideoFormat& fmt) const { return !operator==(fmt); } }; /** @@ -2623,7 +2799,6 @@ enum SCREEN_SCENARIO_TYPE { SCREEN_SCENARIO_RDC = 4, }; - /** * The video application scenario type. */ @@ -2636,6 +2811,14 @@ enum VIDEO_APPLICATION_SCENARIO_TYPE { * 1: Meeting Scenario. This scenario is the best QoE practice of meeting application. */ APPLICATION_SCENARIO_MEETING = 1, + /** + * 2: Video Call Scenario. This scenario is used to optimize the video experience in video application, like 1v1 video call. + */ + APPLICATION_SCENARIO_1V1 = 2, + /** + * 3: Live Show Scenario. This scenario is used to optimize the video experience in video live show. + */ + APPLICATION_SCENARIO_LIVESHOW = 3, }; /** @@ -2666,7 +2849,8 @@ enum VIDEO_QOE_PREFERENCE_TYPE { */ enum CAPTURE_BRIGHTNESS_LEVEL_TYPE { /** -1: The SDK does not detect the brightness level of the video image. - * Wait a few seconds to get the brightness level from `CAPTURE_BRIGHTNESS_LEVEL_TYPE` in the next callback. + * Wait a few seconds to get the brightness level from `CAPTURE_BRIGHTNESS_LEVEL_TYPE` in the next + * callback. */ CAPTURE_BRIGHTNESS_LEVEL_INVALID = -1, /** 0: The brightness level of the video image is normal. @@ -2681,20 +2865,20 @@ enum CAPTURE_BRIGHTNESS_LEVEL_TYPE { }; enum CAMERA_STABILIZATION_MODE { - /** The camera stabilization mode is disabled. - */ + /** The camera stabilization mode is disabled. + */ CAMERA_STABILIZATION_MODE_OFF = -1, - /** device choose stabilization mode automatically. - */ + /** device choose stabilization mode automatically. + */ CAMERA_STABILIZATION_MODE_AUTO = 0, - /** stabilization mode level 1. - */ + /** stabilization mode level 1. + */ CAMERA_STABILIZATION_MODE_LEVEL_1 = 1, - /** stabilization mode level 2. - */ + /** stabilization mode level 2. + */ CAMERA_STABILIZATION_MODE_LEVEL_2 = 2, - /** stabilization mode level 3. - */ + /** stabilization mode level 3. + */ CAMERA_STABILIZATION_MODE_LEVEL_3 = 3, /** The maximum level of the camera stabilization mode. */ @@ -2732,7 +2916,8 @@ enum LOCAL_AUDIO_STREAM_REASON { */ LOCAL_AUDIO_STREAM_REASON_OK = 0, /** - * 1: No specified reason for the local audio failure. Remind your users to try to rejoin the channel. + * 1: No specified reason for the local audio failure. Remind your users to try to rejoin the + * channel. */ LOCAL_AUDIO_STREAM_REASON_FAILURE = 1, /** @@ -2845,7 +3030,7 @@ enum LOCAL_VIDEO_STREAM_REASON { */ LOCAL_VIDEO_STREAM_REASON_DEVICE_NOT_FOUND = 8, /** - * 9: (macOS only) The video capture device currently in use is disconnected (such as being + * 9: (macOS and Windows only) The video capture device currently in use is disconnected (such as being * unplugged). */ LOCAL_VIDEO_STREAM_REASON_DEVICE_DISCONNECTED = 9, @@ -2860,8 +3045,8 @@ enum LOCAL_VIDEO_STREAM_REASON { */ LOCAL_VIDEO_STREAM_REASON_DEVICE_INTERRUPT = 14, /** - * 15: (Android only) The device may need to be shut down and restarted to restore camera function, - * or there may be a persistent hardware problem. + * 15: (Android only) The device may need to be shut down and restarted to restore camera + * function, or there may be a persistent hardware problem. */ LOCAL_VIDEO_STREAM_REASON_DEVICE_FATAL_ERROR = 15, /** @@ -2898,20 +3083,21 @@ enum LOCAL_VIDEO_STREAM_REASON { /** 22: No permision to capture screen. */ LOCAL_VIDEO_STREAM_REASON_SCREEN_CAPTURE_NO_PERMISSION = 22, /** - * 24: (Windows Only) An unexpected error (possibly due to window block failure) occurs during the screen - * sharing process, resulting in performance degradation. However, the screen sharing process itself is - * functioning normally. + * 24: (Windows Only) An unexpected error (possibly due to window block failure) occurs during the + * screen sharing process, resulting in performance degradation. However, the screen sharing + * process itself is functioning normally. */ LOCAL_VIDEO_STREAM_REASON_SCREEN_CAPTURE_AUTO_FALLBACK = 24, - /** 25: (Windows only) The local screen capture window is currently hidden and not visible on the desktop. */ + /** 25: (Windows only) The local screen capture window is currently hidden and not visible on the + desktop. */ LOCAL_VIDEO_STREAM_REASON_SCREEN_CAPTURE_WINDOW_HIDDEN = 25, /** 26: (Windows only) The local screen capture window is recovered from its hidden state. */ LOCAL_VIDEO_STREAM_REASON_SCREEN_CAPTURE_WINDOW_RECOVER_FROM_HIDDEN = 26, /** 27: (Windows and macOS only) The window is recovered from miniminzed */ LOCAL_VIDEO_STREAM_REASON_SCREEN_CAPTURE_WINDOW_RECOVER_FROM_MINIMIZED = 27, - /** + /** * 28: The screen capture paused. - * + * * Common scenarios for reporting this error code: * - When the desktop switch to the secure desktop such as UAC dialog or the Winlogon desktop on * Windows platform, the SDK reports this error code. @@ -2919,47 +3105,49 @@ enum LOCAL_VIDEO_STREAM_REASON { LOCAL_VIDEO_STREAM_REASON_SCREEN_CAPTURE_PAUSED = 28, /** 29: The screen capture is resumed. */ LOCAL_VIDEO_STREAM_REASON_SCREEN_CAPTURE_RESUMED = 29, + /** 30: The shared display has been disconnected */ + LOCAL_VIDEO_STREAM_REASON_SCREEN_CAPTURE_DISPLAY_DISCONNECTED = 30, }; /** * Remote audio states. */ -enum REMOTE_AUDIO_STATE -{ +enum REMOTE_AUDIO_STATE { /** * 0: The remote audio is in the default state. The SDK reports this state in the case of * `REMOTE_AUDIO_REASON_LOCAL_MUTED(3)`, `REMOTE_AUDIO_REASON_REMOTE_MUTED(5)`, or * `REMOTE_AUDIO_REASON_REMOTE_OFFLINE(7)`. */ - REMOTE_AUDIO_STATE_STOPPED = 0, // Default state, audio is started or remote user disabled/muted audio stream + REMOTE_AUDIO_STATE_STOPPED = + 0, // Default state, audio is started or remote user disabled/muted audio stream /** * 1: The first remote audio packet is received. */ REMOTE_AUDIO_STATE_STARTING = 1, // The first audio frame packet has been received /** - * 2: The remote audio stream is decoded and plays normally. The SDK reports this state in the case of - * `REMOTE_AUDIO_REASON_NETWORK_RECOVERY(2)`, `REMOTE_AUDIO_REASON_LOCAL_UNMUTED(4)`, or + * 2: The remote audio stream is decoded and plays normally. The SDK reports this state in the + * case of `REMOTE_AUDIO_REASON_NETWORK_RECOVERY(2)`, `REMOTE_AUDIO_REASON_LOCAL_UNMUTED(4)`, or * `REMOTE_AUDIO_REASON_REMOTE_UNMUTED(6)`. */ - REMOTE_AUDIO_STATE_DECODING = 2, // The first remote audio frame has been decoded or fronzen state ends + REMOTE_AUDIO_STATE_DECODING = + 2, // The first remote audio frame has been decoded or fronzen state ends /** * 3: The remote audio is frozen. The SDK reports this state in the case of * `REMOTE_AUDIO_REASON_NETWORK_CONGESTION(1)`. */ - REMOTE_AUDIO_STATE_FROZEN = 3, // Remote audio is frozen, probably due to network issue + REMOTE_AUDIO_STATE_FROZEN = 3, // Remote audio is frozen, probably due to network issue /** * 4: The remote audio fails to start. The SDK reports this state in the case of * `REMOTE_AUDIO_REASON_INTERNAL(0)`. */ - REMOTE_AUDIO_STATE_FAILED = 4, // Remote audio play failed + REMOTE_AUDIO_STATE_FAILED = 4, // Remote audio play failed }; /** * Reasons for the remote audio state change. */ -enum REMOTE_AUDIO_STATE_REASON -{ +enum REMOTE_AUDIO_STATE_REASON { /** * 0: The SDK reports this reason when the video state changes. */ @@ -3013,7 +3201,8 @@ enum REMOTE_VIDEO_STATE { /** * 0: The remote video is in the default state. The SDK reports this state in the case of * `REMOTE_VIDEO_STATE_REASON_LOCAL_MUTED (3)`, `REMOTE_VIDEO_STATE_REASON_REMOTE_MUTED (5)`, - * `REMOTE_VIDEO_STATE_REASON_REMOTE_OFFLINE (7)`, or `REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK (8)`. + * `REMOTE_VIDEO_STATE_REASON_REMOTE_OFFLINE (7)`, or `REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK + * (8)`. */ REMOTE_VIDEO_STATE_STOPPED = 0, /** @@ -3021,9 +3210,10 @@ enum REMOTE_VIDEO_STATE { */ REMOTE_VIDEO_STATE_STARTING = 1, /** - * 2: The remote video stream is decoded and plays normally. The SDK reports this state in the case of - * `REMOTE_VIDEO_STATE_REASON_NETWORK_RECOVERY (2)`, `REMOTE_VIDEO_STATE_REASON_LOCAL_UNMUTED (4)`, - * `REMOTE_VIDEO_STATE_REASON_REMOTE_UNMUTED (6)`, or `REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK_RECOVERY (9)`. + * 2: The remote video stream is decoded and plays normally. The SDK reports this state in the + * case of `REMOTE_VIDEO_STATE_REASON_NETWORK_RECOVERY (2)`, + * `REMOTE_VIDEO_STATE_REASON_LOCAL_UNMUTED (4)`, `REMOTE_VIDEO_STATE_REASON_REMOTE_UNMUTED (6)`, + * or `REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK_RECOVERY (9)`. */ REMOTE_VIDEO_STATE_DECODING = 2, /** 3: The remote video is frozen, probably due to @@ -3040,36 +3230,36 @@ enum REMOTE_VIDEO_STATE { */ enum REMOTE_VIDEO_STATE_REASON { /** - * 0: The SDK reports this reason when the video state changes. - */ + * 0: The SDK reports this reason when the video state changes. + */ REMOTE_VIDEO_STATE_REASON_INTERNAL = 0, /** - * 1: Network congestion. - */ + * 1: Network congestion. + */ REMOTE_VIDEO_STATE_REASON_NETWORK_CONGESTION = 1, /** - * 2: Network recovery. - */ + * 2: Network recovery. + */ REMOTE_VIDEO_STATE_REASON_NETWORK_RECOVERY = 2, /** - * 3: The local user stops receiving the remote video stream or disables the video module. - */ + * 3: The local user stops receiving the remote video stream or disables the video module. + */ REMOTE_VIDEO_STATE_REASON_LOCAL_MUTED = 3, /** - * 4: The local user resumes receiving the remote video stream or enables the video module. - */ + * 4: The local user resumes receiving the remote video stream or enables the video module. + */ REMOTE_VIDEO_STATE_REASON_LOCAL_UNMUTED = 4, /** - * 5: The remote user stops sending the video stream or disables the video module. - */ + * 5: The remote user stops sending the video stream or disables the video module. + */ REMOTE_VIDEO_STATE_REASON_REMOTE_MUTED = 5, /** - * 6: The remote user resumes sending the video stream or enables the video module. - */ + * 6: The remote user resumes sending the video stream or enables the video module. + */ REMOTE_VIDEO_STATE_REASON_REMOTE_UNMUTED = 6, /** - * 7: The remote user leaves the channel. - */ + * 7: The remote user leaves the channel. + */ REMOTE_VIDEO_STATE_REASON_REMOTE_OFFLINE = 7, /** 8: The remote audio-and-video stream falls back to the audio-only stream * due to poor network conditions. @@ -3085,7 +3275,7 @@ enum REMOTE_VIDEO_STATE_REASON { /** (Internal use only) 11: The remote video stream type change to high stream type */ REMOTE_VIDEO_STATE_REASON_VIDEO_STREAM_TYPE_CHANGE_TO_HIGH = 11, - /** (iOS only) 12: The app of the remote user is in background. + /** (iOS only) 12: The app of the remote user is in background. */ REMOTE_VIDEO_STATE_REASON_SDK_IN_BACKGROUND = 12, @@ -3123,10 +3313,14 @@ enum REMOTE_USER_STATE { */ struct VideoTrackInfo { VideoTrackInfo() - : isLocal(false), ownerUid(0), trackId(0), channelId(OPTIONAL_NULLPTR) - , codecType(VIDEO_CODEC_H265) - , encodedFrameOnly(false), sourceType(VIDEO_SOURCE_CAMERA_PRIMARY) - , observationPosition(agora::media::base::POSITION_POST_CAPTURER) {} + : isLocal(false), + ownerUid(0), + trackId(0), + channelId(OPTIONAL_NULLPTR), + codecType(VIDEO_CODEC_H265), + encodedFrameOnly(false), + sourceType(VIDEO_SOURCE_CAMERA_PRIMARY), + observationPosition(agora::media::base::POSITION_POST_CAPTURER) {} /** * Whether the video track is local or remote. * - true: The video track is local. @@ -3166,7 +3360,8 @@ struct VideoTrackInfo { }; /** - * The downscale level of the remote video stream . The higher the downscale level, the more the video downscales. + * The downscale level of the remote video stream . The higher the downscale level, the more the + * video downscales. */ enum REMOTE_VIDEO_DOWNSCALE_LEVEL { /** @@ -3215,7 +3410,8 @@ struct AudioVolumeInfo { * @note * - The `vad` parameter does not report the voice activity status of remote users. In a remote * user's callback, the value of `vad` is always 1. - * - To use this parameter, you must set `reportVad` to true when calling `enableAudioVolumeIndication`. + * - To use this parameter, you must set `reportVad` to true when calling + * `enableAudioVolumeIndication`. */ unsigned int vad; /** @@ -3339,7 +3535,8 @@ enum VIDEO_CODEC_PROFILE_TYPE { */ VIDEO_CODEC_PROFILE_BASELINE = 66, /** - * 77: Main video codec profile. Generally used in mainstream electronics, such as MP4 players, portable video players, PSP, and iPads. + * 77: Main video codec profile. Generally used in mainstream electronics, such as MP4 players, + * portable video players, PSP, and iPads. */ VIDEO_CODEC_PROFILE_MAIN = 77, /** @@ -3348,7 +3545,6 @@ enum VIDEO_CODEC_PROFILE_TYPE { VIDEO_CODEC_PROFILE_HIGH = 100, }; - /** * Self-defined audio codec profile. */ @@ -3370,8 +3566,7 @@ enum AUDIO_CODEC_PROFILE_TYPE { /** * Local audio statistics. */ -struct LocalAudioStats -{ +struct LocalAudioStats { /** * The number of audio channels. */ @@ -3389,7 +3584,8 @@ struct LocalAudioStats */ int internalCodec; /** - * The packet loss rate (%) from the local client to the Agora server before applying the anti-packet loss strategies. + * The packet loss rate (%) from the local client to the Agora server before applying the + * anti-packet loss strategies. */ unsigned short txPacketLossRate; /** @@ -3410,35 +3606,45 @@ struct LocalAudioStats int aecEstimatedDelay; }; - /** * States of the Media Push. */ enum RTMP_STREAM_PUBLISH_STATE { /** - * 0: The Media Push has not started or has ended. This state is also triggered after you remove a RTMP or RTMPS stream from the CDN by calling `removePublishStreamUrl`. + * 0: The Media Push has not started or has ended. This state is also triggered after you remove a + * RTMP or RTMPS stream from the CDN by calling `removePublishStreamUrl`. */ RTMP_STREAM_PUBLISH_STATE_IDLE = 0, /** - * 1: The SDK is connecting to Agora's streaming server and the CDN server. This state is triggered after you call the `addPublishStreamUrl` method. + * 1: The SDK is connecting to Agora's streaming server and the CDN server. This state is + * triggered after you call the `addPublishStreamUrl` method. */ RTMP_STREAM_PUBLISH_STATE_CONNECTING = 1, /** - * 2: The RTMP or RTMPS streaming publishes. The SDK successfully publishes the RTMP or RTMPS streaming and returns this state. + * 2: The RTMP or RTMPS streaming publishes. The SDK successfully publishes the RTMP or RTMPS + * streaming and returns this state. */ RTMP_STREAM_PUBLISH_STATE_RUNNING = 2, /** - * 3: The RTMP or RTMPS streaming is recovering. When exceptions occur to the CDN, or the streaming is interrupted, the SDK tries to resume RTMP or RTMPS streaming and returns this state. - * - If the SDK successfully resumes the streaming, #RTMP_STREAM_PUBLISH_STATE_RUNNING (2) returns. - * - If the streaming does not resume within 60 seconds or server errors occur, #RTMP_STREAM_PUBLISH_STATE_FAILURE (4) returns. You can also reconnect to the server by calling the `removePublishStreamUrl` and `addPublishStreamUrl` methods. + * 3: The RTMP or RTMPS streaming is recovering. When exceptions occur to the CDN, or the + * streaming is interrupted, the SDK tries to resume RTMP or RTMPS streaming and returns this + * state. + * - If the SDK successfully resumes the streaming, #RTMP_STREAM_PUBLISH_STATE_RUNNING (2) + * returns. + * - If the streaming does not resume within 60 seconds or server errors occur, + * #RTMP_STREAM_PUBLISH_STATE_FAILURE (4) returns. You can also reconnect to the server by calling + * the `removePublishStreamUrl` and `addPublishStreamUrl` methods. */ RTMP_STREAM_PUBLISH_STATE_RECOVERING = 3, /** - * 4: The RTMP or RTMPS streaming fails. See the `errCode` parameter for the detailed error information. You can also call the `addPublishStreamUrl` method to publish the RTMP or RTMPS streaming again. + * 4: The RTMP or RTMPS streaming fails. See the `errCode` parameter for the detailed error + * information. You can also call the `addPublishStreamUrl` method to publish the RTMP or RTMPS + * streaming again. */ RTMP_STREAM_PUBLISH_STATE_FAILURE = 4, /** - * 5: The SDK is disconnecting to Agora's streaming server and the CDN server. This state is triggered after you call the `removePublishStreamUrl` method. + * 5: The SDK is disconnecting to Agora's streaming server and the CDN server. This state is + * triggered after you call the `removePublishStreamUrl` method. */ RTMP_STREAM_PUBLISH_STATE_DISCONNECTING = 5, }; @@ -3452,8 +3658,10 @@ enum RTMP_STREAM_PUBLISH_REASON { */ RTMP_STREAM_PUBLISH_REASON_OK = 0, /** - * 1: Invalid argument used. If, for example, you do not call the `setLiveTranscoding` method to configure the LiveTranscoding parameters before calling the addPublishStreamUrl method, - * the SDK returns this error. Check whether you set the parameters in the `setLiveTranscoding` method properly. + * 1: Invalid argument used. If, for example, you do not call the `setLiveTranscoding` method to + * configure the LiveTranscoding parameters before calling the addPublishStreamUrl method, the SDK + * returns this error. Check whether you set the parameters in the `setLiveTranscoding` method + * properly. */ RTMP_STREAM_PUBLISH_REASON_INVALID_ARGUMENT = 1, /** @@ -3461,11 +3669,13 @@ enum RTMP_STREAM_PUBLISH_REASON { */ RTMP_STREAM_PUBLISH_REASON_ENCRYPTED_STREAM_NOT_ALLOWED = 2, /** - * 3: Timeout for the RTMP or RTMPS streaming. Call the `addPublishStreamUrl` method to publish the streaming again. + * 3: Timeout for the RTMP or RTMPS streaming. Call the `addPublishStreamUrl` method to publish + * the streaming again. */ RTMP_STREAM_PUBLISH_REASON_CONNECTION_TIMEOUT = 3, /** - * 4: An error occurs in Agora's streaming server. Call the `addPublishStreamUrl` method to publish the streaming again. + * 4: An error occurs in Agora's streaming server. Call the `addPublishStreamUrl` method to + * publish the streaming again. */ RTMP_STREAM_PUBLISH_REASON_INTERNAL_SERVER_ERROR = 4, /** @@ -3489,17 +3699,23 @@ enum RTMP_STREAM_PUBLISH_REASON { */ RTMP_STREAM_PUBLISH_REASON_STREAM_NOT_FOUND = 9, /** - * 10: The format of the RTMP or RTMPS streaming URL is not supported. Check whether the URL format is correct. + * 10: The format of the RTMP or RTMPS streaming URL is not supported. Check whether the URL + * format is correct. */ RTMP_STREAM_PUBLISH_REASON_FORMAT_NOT_SUPPORTED = 10, /** - * 11: The user role is not host, so the user cannot use the CDN live streaming function. Check your application code logic. + * 11: The user role is not host, so the user cannot use the CDN live streaming function. Check + * your application code logic. */ - RTMP_STREAM_PUBLISH_REASON_NOT_BROADCASTER = 11, // Note: match to ERR_PUBLISH_STREAM_NOT_BROADCASTER in AgoraBase.h + RTMP_STREAM_PUBLISH_REASON_NOT_BROADCASTER = + 11, // Note: match to ERR_PUBLISH_STREAM_NOT_BROADCASTER in AgoraBase.h /** - * 13: The `updateRtmpTranscoding` or `setLiveTranscoding` method is called to update the transcoding configuration in a scenario where there is streaming without transcoding. Check your application code logic. + * 13: The `updateRtmpTranscoding` or `setLiveTranscoding` method is called to update the + * transcoding configuration in a scenario where there is streaming without transcoding. Check + * your application code logic. */ - RTMP_STREAM_PUBLISH_REASON_TRANSCODING_NO_MIX_STREAM = 13, // Note: match to ERR_PUBLISH_STREAM_TRANSCODING_NO_MIX_STREAM in AgoraBase.h + RTMP_STREAM_PUBLISH_REASON_TRANSCODING_NO_MIX_STREAM = + 13, // Note: match to ERR_PUBLISH_STREAM_TRANSCODING_NO_MIX_STREAM in AgoraBase.h /** * 14: Errors occurred in the host's network. */ @@ -3507,11 +3723,13 @@ enum RTMP_STREAM_PUBLISH_REASON { /** * 15: Your App ID does not have permission to use the CDN live streaming function. */ - RTMP_STREAM_PUBLISH_REASON_INVALID_APPID = 15, // Note: match to ERR_PUBLISH_STREAM_APPID_INVALID in AgoraBase.h + RTMP_STREAM_PUBLISH_REASON_INVALID_APPID = + 15, // Note: match to ERR_PUBLISH_STREAM_APPID_INVALID in AgoraBase.h /** invalid privilege. */ RTMP_STREAM_PUBLISH_REASON_INVALID_PRIVILEGE = 16, /** - * 100: The streaming has been stopped normally. After you call `removePublishStreamUrl` to stop streaming, the SDK returns this value. + * 100: The streaming has been stopped normally. After you call `removePublishStreamUrl` to stop + * streaming, the SDK returns this value. */ RTMP_STREAM_UNPUBLISH_REASON_OK = 100, }; @@ -3519,11 +3737,13 @@ enum RTMP_STREAM_PUBLISH_REASON { /** Events during the RTMP or RTMPS streaming. */ enum RTMP_STREAMING_EVENT { /** - * 1: An error occurs when you add a background image or a watermark image to the RTMP or RTMPS stream. + * 1: An error occurs when you add a background image or a watermark image to the RTMP or RTMPS + * stream. */ RTMP_STREAMING_EVENT_FAILED_LOAD_IMAGE = 1, /** - * 2: The streaming URL is already being used for CDN live streaming. If you want to start new streaming, use a new streaming URL. + * 2: The streaming URL is already being used for CDN live streaming. If you want to start new + * streaming, use a new streaming URL. */ RTMP_STREAMING_EVENT_URL_ALREADY_IN_USE = 2, /** @@ -3541,15 +3761,18 @@ enum RTMP_STREAMING_EVENT { */ typedef struct RtcImage { /** - *The HTTP/HTTPS URL address of the image in the live video. The maximum length of this parameter is 1024 bytes. + *The HTTP/HTTPS URL address of the image in the live video. The maximum length of this parameter + *is 1024 bytes. */ const char* url; /** - * The x coordinate (pixel) of the image on the video frame (taking the upper left corner of the video frame as the origin). + * The x coordinate (pixel) of the image on the video frame (taking the upper left corner of the + * video frame as the origin). */ int x; /** - * The y coordinate (pixel) of the image on the video frame (taking the upper left corner of the video frame as the origin). + * The y coordinate (pixel) of the image on the video frame (taking the upper left corner of the + * video frame as the origin). */ int y; /** @@ -3580,18 +3803,21 @@ typedef struct RtcImage { /** * The configuration for advanced features of the RTMP or RTMPS streaming with transcoding. * - * If you want to enable the advanced features of streaming with transcoding, contact support@agora.io. + * If you want to enable the advanced features of streaming with transcoding, contact + * support@agora.io. */ struct LiveStreamAdvancedFeature { LiveStreamAdvancedFeature() : featureName(OPTIONAL_NULLPTR), opened(false) {} - LiveStreamAdvancedFeature(const char* feat_name, bool open) : featureName(feat_name), opened(open) {} + LiveStreamAdvancedFeature(const char* feat_name, bool open) + : featureName(feat_name), opened(open) {} /** The advanced feature for high-quality video with a lower bitrate. */ // static const char* LBHQ = "lbhq"; /** The advanced feature for the optimized video encoder. */ // static const char* VEO = "veo"; /** - * The feature names, including LBHQ (high-quality video with a lower bitrate) and VEO (optimized video encoder). + * The feature names, including LBHQ (high-quality video with a lower bitrate) and VEO (optimized + * video encoder). */ const char* featureName; @@ -3601,15 +3827,15 @@ struct LiveStreamAdvancedFeature { * - `false`: (Default) Disable the advanced feature. */ bool opened; -} ; +}; /** * Connection state types. */ -enum CONNECTION_STATE_TYPE -{ +enum CONNECTION_STATE_TYPE { /** - * 1: The SDK is disconnected from the Agora edge server. The state indicates the SDK is in one of the following phases: + * 1: The SDK is disconnected from the Agora edge server. The state indicates the SDK is in one of + * the following phases: * - The initial state before calling the `joinChannel` method. * - The app calls the `leaveChannel` method. */ @@ -3661,11 +3887,15 @@ struct TranscodingUser { */ uid_t uid; /** - * The x coordinate (pixel) of the host's video on the output video frame (taking the upper left corner of the video frame as the origin). The value range is [0, width], where width is the `width` set in `LiveTranscoding`. + * The x coordinate (pixel) of the host's video on the output video frame (taking the upper left + * corner of the video frame as the origin). The value range is [0, width], where width is the + * `width` set in `LiveTranscoding`. */ int x; /** - * The y coordinate (pixel) of the host's video on the output video frame (taking the upper left corner of the video frame as the origin). The value range is [0, height], where height is the `height` set in `LiveTranscoding`. + * The y coordinate (pixel) of the host's video on the output video frame (taking the upper left + * corner of the video frame as the origin). The value range is [0, height], where height is the + * `height` set in `LiveTranscoding`. */ int y; /** @@ -3682,7 +3912,7 @@ struct TranscodingUser { * - 100: The host's video is the top layer. * * If the value is beyond this range, the SDK reports the error code `ERR_INVALID_ARGUMENT`. - */ + */ int zOrder; /** * The transparency of the host's video. The value range is [0.0, 1.0]. @@ -3691,28 +3921,29 @@ struct TranscodingUser { */ double alpha; /** - * The audio channel used by the host's audio in the output audio. The default value is 0, and the value range is [0, 5]. - * - `0`: (Recommended) The defaut setting, which supports dual channels at most and depends on the upstream of the host. - * - `1`: The host's audio uses the FL audio channel. If the host's upstream uses multiple audio channels, the Agora server mixes them into mono first. - * - `2`: The host's audio uses the FC audio channel. If the host's upstream uses multiple audio channels, the Agora server mixes them into mono first. - * - `3`: The host's audio uses the FR audio channel. If the host's upstream uses multiple audio channels, the Agora server mixes them into mono first. - * - `4`: The host's audio uses the BL audio channel. If the host's upstream uses multiple audio channels, the Agora server mixes them into mono first. - * - `5`: The host's audio uses the BR audio channel. If the host's upstream uses multiple audio channels, the Agora server mixes them into mono first. - * - `0xFF` or a value greater than 5: The host's audio is muted, and the Agora server removes the host's audio. + * The audio channel used by the host's audio in the output audio. The default value is 0, and the + * value range is [0, 5]. + * - `0`: (Recommended) The defaut setting, which supports dual channels at most and depends on + * the upstream of the host. + * - `1`: The host's audio uses the FL audio channel. If the host's upstream uses multiple audio + * channels, the Agora server mixes them into mono first. + * - `2`: The host's audio uses the FC audio channel. If the host's upstream uses multiple audio + * channels, the Agora server mixes them into mono first. + * - `3`: The host's audio uses the FR audio channel. If the host's upstream uses multiple audio + * channels, the Agora server mixes them into mono first. + * - `4`: The host's audio uses the BL audio channel. If the host's upstream uses multiple audio + * channels, the Agora server mixes them into mono first. + * - `5`: The host's audio uses the BR audio channel. If the host's upstream uses multiple audio + * channels, the Agora server mixes them into mono first. + * - `0xFF` or a value greater than 5: The host's audio is muted, and the Agora server removes the + * host's audio. * * @note If the value is not `0`, a special player is required. */ int audioChannel; TranscodingUser() - : uid(0), - x(0), - y(0), - width(0), - height(0), - zOrder(0), - alpha(1.0), - audioChannel(0) {} + : uid(0), x(0), y(0), width(0), height(0), zOrder(0), alpha(1.0), audioChannel(0) {} }; /** @@ -3735,10 +3966,12 @@ struct LiveTranscoding { int height; /** Bitrate of the CDN live output video stream. The default value is 400 Kbps. - Set this parameter according to the Video Bitrate Table. If you set a bitrate beyond the proper range, the SDK automatically adapts it to a value within the range. + Set this parameter according to the Video Bitrate Table. If you set a bitrate beyond the proper + range, the SDK automatically adapts it to a value within the range. */ int videoBitrate; - /** Frame rate of the output video stream set for the CDN live streaming. The default value is 15 fps, and the value range is (0,30]. + /** Frame rate of the output video stream set for the CDN live streaming. The default value is 15 + fps, and the value range is (0,30]. @note The Agora server adjusts any value over 30 to 30. */ @@ -3759,7 +3992,8 @@ struct LiveTranscoding { @note If you set this parameter to other values, Agora adjusts it to the default value of 100. */ VIDEO_CODEC_PROFILE_TYPE videoCodecProfile; - /** The background color in RGB hex value. Value only. Do not include a preceeding #. For example, 0xFFB6C1 (light pink). The default value is 0x000000 (black). + /** The background color in RGB hex value. Value only. Do not include a preceeding #. For example, + * 0xFFB6C1 (light pink). The default value is 0x000000 (black). */ unsigned int backgroundColor; /** Video codec profile types for Media Push. See VIDEO_CODEC_TYPE_FOR_STREAM. */ @@ -3768,10 +4002,12 @@ struct LiveTranscoding { * The value range is [0, 17]. */ unsigned int userCount; - /** Manages the user layout configuration in the Media Push. Agora supports a maximum of 17 transcoding users in a Media Push channel. See `TranscodingUser`. + /** Manages the user layout configuration in the Media Push. Agora supports a maximum of 17 + * transcoding users in a Media Push channel. See `TranscodingUser`. */ TranscodingUser* transcodingUsers; - /** Reserved property. Extra user-defined information to send SEI for the H.264/H.265 video stream to the CDN live client. Maximum length: 4096 Bytes. + /** Reserved property. Extra user-defined information to send SEI for the H.264/H.265 video stream + to the CDN live client. Maximum length: 4096 Bytes. For more information on SEI frame, see [SEI-related questions](https://docs.agora.io/en/faq/sei). */ @@ -3782,31 +4018,38 @@ struct LiveTranscoding { const char* metadata; /** The watermark on the live video. The image format needs to be PNG. See `RtcImage`. - You can add one watermark, or add multiple watermarks using an array. This parameter is used with `watermarkCount`. + You can add one watermark, or add multiple watermarks using an array. This parameter is used with + `watermarkCount`. */ RtcImage* watermark; /** - * The number of watermarks on the live video. The total number of watermarks and background images can range from 0 to 10. This parameter is used with `watermark`. + * The number of watermarks on the live video. The total number of watermarks and background + * images can range from 0 to 10. This parameter is used with `watermark`. */ unsigned int watermarkCount; - /** The number of background images on the live video. The image format needs to be PNG. See `RtcImage`. + /** The number of background images on the live video. The image format needs to be PNG. See + * `RtcImage`. * - * You can add a background image or use an array to add multiple background images. This parameter is used with `backgroundImageCount`. + * You can add a background image or use an array to add multiple background images. This + * parameter is used with `backgroundImageCount`. */ RtcImage* backgroundImage; /** - * The number of background images on the live video. The total number of watermarks and background images can range from 0 to 10. This parameter is used with `backgroundImage`. + * The number of background images on the live video. The total number of watermarks and + * background images can range from 0 to 10. This parameter is used with `backgroundImage`. */ unsigned int backgroundImageCount; /** The audio sampling rate (Hz) of the output media stream. See #AUDIO_SAMPLE_RATE_TYPE. */ AUDIO_SAMPLE_RATE_TYPE audioSampleRate; - /** Bitrate (Kbps) of the audio output stream for Media Push. The default value is 48, and the highest value is 128. + /** Bitrate (Kbps) of the audio output stream for Media Push. The default value is 48, and the + * highest value is 128. */ int audioBitrate; - /** The number of audio channels for Media Push. Agora recommends choosing 1 (mono), or 2 (stereo) audio channels. Special players are required if you choose 3, 4, or 5. + /** The number of audio channels for Media Push. Agora recommends choosing 1 (mono), or 2 (stereo) + * audio channels. Special players are required if you choose 3, 4, or 5. * - 1: (Default) Mono. * - 2: Stereo. * - 3: Three audio channels. @@ -3817,7 +4060,8 @@ struct LiveTranscoding { /** Audio codec profile type for Media Push. See #AUDIO_CODEC_PROFILE_TYPE. */ AUDIO_CODEC_PROFILE_TYPE audioCodecProfile; - /** Advanced features of the RTMP or RTMPS streaming with transcoding. See LiveStreamAdvancedFeature. + /** Advanced features of the RTMP or RTMPS streaming with transcoding. See + * LiveStreamAdvancedFeature. */ LiveStreamAdvancedFeature* advancedFeatures; @@ -3834,7 +4078,7 @@ struct LiveTranscoding { videoCodecProfile(VIDEO_CODEC_PROFILE_HIGH), backgroundColor(0x000000), videoCodecType(VIDEO_CODEC_H264_FOR_STREAM), - userCount(0), + userCount(0), transcodingUsers(OPTIONAL_NULLPTR), transcodingExtraInfo(OPTIONAL_NULLPTR), metadata(OPTIONAL_NULLPTR), @@ -3860,12 +4104,14 @@ struct TranscodingVideoStream { VIDEO_SOURCE_TYPE sourceType; /** * The ID of the remote user. - * @note Use this parameter only when the source type of the video for the video mixing on the local client is `VIDEO_SOURCE_REMOTE`. + * @note Use this parameter only when the source type of the video for the video mixing on the + * local client is `VIDEO_SOURCE_REMOTE`. */ uid_t remoteUserUid; /** * The URL of the image. - * @note Use this parameter only when the source type of the video for the video mixing on the local client is `RTC_IMAGE`. + * @note Use this parameter only when the source type of the video for the video mixing on the + * local client is `RTC_IMAGE`. */ const char* imageUrl; /** @@ -3873,11 +4119,13 @@ struct TranscodingVideoStream { */ int mediaPlayerId; /** - * The horizontal displacement of the top-left corner of the video for the video mixing on the client relative to the top-left corner (origin) of the canvas for this video mixing. + * The horizontal displacement of the top-left corner of the video for the video mixing on the + * client relative to the top-left corner (origin) of the canvas for this video mixing. */ int x; /** - * The vertical displacement of the top-left corner of the video for the video mixing on the client relative to the top-left corner (origin) of the canvas for this video mixing. + * The vertical displacement of the top-left corner of the video for the video mixing on the + * client relative to the top-left corner (origin) of the canvas for this video mixing. */ int y; /** @@ -3889,13 +4137,16 @@ struct TranscodingVideoStream { */ int height; /** - * The number of the layer to which the video for the video mixing on the local client belongs. The value range is [0,100]. + * The number of the layer to which the video for the video mixing on the local client belongs. + * The value range is [0,100]. * - 0: (Default) The layer is at the bottom. * - 100: The layer is at the top. */ int zOrder; /** - * The transparency of the video for the video mixing on the local client. The value range is [0.0,1.0]. 0.0 means the transparency is completely transparent. 1.0 means the transparency is opaque. + * The transparency of the video for the video mixing on the local client. The value range is + * [0.0,1.0]. 0.0 means the transparency is completely transparent. 1.0 means the transparency is + * opaque. */ double alpha; /** @@ -3907,16 +4158,16 @@ struct TranscodingVideoStream { bool mirror; TranscodingVideoStream() - : sourceType(VIDEO_SOURCE_CAMERA_PRIMARY), - remoteUserUid(0), - imageUrl(OPTIONAL_NULLPTR), - x(0), - y(0), - width(0), - height(0), - zOrder(0), - alpha(1.0), - mirror(false) {} + : sourceType(VIDEO_SOURCE_CAMERA_PRIMARY), + remoteUserUid(0), + imageUrl(OPTIONAL_NULLPTR), + x(0), + y(0), + width(0), + height(0), + zOrder(0), + alpha(1.0), + mirror(false) {} }; /** @@ -3932,17 +4183,25 @@ struct LocalTranscoderConfiguration { */ TranscodingVideoStream* videoInputStreams; /** - * The encoding configuration of the mixed video stream after the video mixing on the local client. See VideoEncoderConfiguration. + * The encoding configuration of the mixed video stream after the video mixing on the local + * client. See VideoEncoderConfiguration. */ VideoEncoderConfiguration videoOutputConfiguration; /** - * Whether to use the timestamp when the primary camera captures the video frame as the timestamp of the mixed video frame. - * - true: (Default) Use the timestamp of the captured video frame as the timestamp of the mixed video frame. - * - false: Do not use the timestamp of the captured video frame as the timestamp of the mixed video frame. Instead, use the timestamp when the mixed video frame is constructed. + * Whether to use the timestamp when the primary camera captures the video frame as the timestamp + * of the mixed video frame. + * - true: (Default) Use the timestamp of the captured video frame as the timestamp of the mixed + * video frame. + * - false: Do not use the timestamp of the captured video frame as the timestamp of the mixed + * video frame. Instead, use the timestamp when the mixed video frame is constructed. */ bool syncWithPrimaryCamera; - LocalTranscoderConfiguration() : streamCount(0), videoInputStreams(OPTIONAL_NULLPTR), videoOutputConfiguration(), syncWithPrimaryCamera(true) {} + LocalTranscoderConfiguration() + : streamCount(0), + videoInputStreams(OPTIONAL_NULLPTR), + videoOutputConfiguration(), + syncWithPrimaryCamera(true) {} }; enum VIDEO_TRANSCODER_ERROR { @@ -3972,6 +4231,77 @@ enum VIDEO_TRANSCODER_ERROR { VT_ERR_INTERNAL = 20 }; + +/** + * The audio streams for the video mixing on the local client. + */ +struct MixedAudioStream { + /** + * The source type of audio for the audio mixing on the local client. See #AUDIO_SOURCE_TYPE. + */ + AUDIO_SOURCE_TYPE sourceType; + /** + * The ID of the remote user. + * @note Use this parameter only when the source type is `AUDIO_SOURCE_REMOTE`. + */ + uid_t remoteUserUid; + /** + * The channel ID of the remote user. + * @note Use this parameter only when the source type is `AUDIO_SOURCE_REMOTE`. + */ + const char* channelId; + /** + * The track ID of the local track. + * @note Use this parameter only when the source type is `AUDIO_SOURCE_REMOTE`. + */ + track_id_t trackId; + + MixedAudioStream(AUDIO_SOURCE_TYPE source) + : sourceType(source), + remoteUserUid(0), + channelId(NULL), + trackId(-1) {} + + MixedAudioStream(AUDIO_SOURCE_TYPE source, track_id_t track) + : sourceType(source), + trackId(track) {} + + MixedAudioStream(AUDIO_SOURCE_TYPE source, uid_t uid, const char* channel) + : sourceType(source), + remoteUserUid(uid), + channelId(channel) {} + + MixedAudioStream(AUDIO_SOURCE_TYPE source, uid_t uid, const char* channel, track_id_t track) + : sourceType(source), + remoteUserUid(uid), + channelId(channel), + trackId(track) {} + +}; + +/** + * The configuration of the audio mixing on the local client. + */ +struct LocalAudioMixerConfiguration { + /** + * The number of the audio streams for the audio mixing on the local client. + */ + unsigned int streamCount; + /** + * The source of the streams to mixed; + */ + MixedAudioStream* audioInputStreams; + + /** + * Whether to use the timestamp follow the local mic's audio frame. + * - true: (Default) Use the timestamp of the captured audio frame as the timestamp of the mixed audio frame. + * - false: Do not use the timestamp of the captured audio frame as the timestamp of the mixed audio frame. Instead, use the timestamp when the mixed audio frame is constructed. + */ + bool syncWithLocalMic; + + LocalAudioMixerConfiguration() : streamCount(0), syncWithLocalMic(true) {} +}; + /** * Configurations of the last-mile network test. */ @@ -3990,12 +4320,14 @@ struct LastmileProbeConfig { */ bool probeDownlink; /** - * The expected maximum sending bitrate (bps) of the local user. The value range is [100000, 5000000]. We recommend setting this parameter - * according to the bitrate value set by `setVideoEncoderConfiguration`. + * The expected maximum sending bitrate (bps) of the local user. The value range is [100000, + * 5000000]. We recommend setting this parameter according to the bitrate value set by + * `setVideoEncoderConfiguration`. */ unsigned int expectedUplinkBitrate; /** - * The expected maximum receiving bitrate (bps) of the local user. The value range is [100000,5000000]. + * The expected maximum receiving bitrate (bps) of the local user. The value range is + * [100000,5000000]. */ unsigned int expectedDownlinkBitrate; }; @@ -4009,11 +4341,13 @@ enum LASTMILE_PROBE_RESULT_STATE { */ LASTMILE_PROBE_RESULT_COMPLETE = 1, /** - * 2: The last-mile network probe test is incomplete because the bandwidth estimation is not available due to limited test resources. + * 2: The last-mile network probe test is incomplete because the bandwidth estimation is not + * available due to limited test resources. */ LASTMILE_PROBE_RESULT_INCOMPLETE_NO_BWE = 2, /** - * 3: The last-mile network probe test is not carried out, probably due to poor network conditions. + * 3: The last-mile network probe test is not carried out, probably due to poor network + * conditions. */ LASTMILE_PROBE_RESULT_UNAVAILABLE = 3 }; @@ -4035,9 +4369,7 @@ struct LastmileProbeOneWayResult { */ unsigned int availableBandwidth; - LastmileProbeOneWayResult() : packetLossRate(0), - jitter(0), - availableBandwidth(0) {} + LastmileProbeOneWayResult() : packetLossRate(0), jitter(0), availableBandwidth(0) {} }; /** @@ -4061,16 +4393,13 @@ struct LastmileProbeResult { */ unsigned int rtt; - LastmileProbeResult() - : state(LASTMILE_PROBE_RESULT_UNAVAILABLE), - rtt(0) {} + LastmileProbeResult() : state(LASTMILE_PROBE_RESULT_UNAVAILABLE), rtt(0) {} }; /** * Reasons causing the change of the connection state. */ -enum CONNECTION_CHANGED_REASON_TYPE -{ +enum CONNECTION_CHANGED_REASON_TYPE { /** * 0: The SDK is connecting to the server. */ @@ -4084,11 +4413,13 @@ enum CONNECTION_CHANGED_REASON_TYPE */ CONNECTION_CHANGED_INTERRUPTED = 2, /** - * 3: The connection between the SDK and the server is banned by the server. This error occurs when the user is kicked out of the channel by the server. + * 3: The connection between the SDK and the server is banned by the server. This error occurs + * when the user is kicked out of the channel by the server. */ CONNECTION_CHANGED_BANNED_BY_SERVER = 3, /** - * 4: The SDK fails to join the channel. When the SDK fails to join the channel for more than 20 minutes, this error occurs and the SDK stops reconnecting to the channel. + * 4: The SDK fails to join the channel. When the SDK fails to join the channel for more than 20 + * minutes, this error occurs and the SDK stops reconnecting to the channel. */ CONNECTION_CHANGED_JOIN_FAILED = 4, /** @@ -4100,13 +4431,17 @@ enum CONNECTION_CHANGED_REASON_TYPE */ CONNECTION_CHANGED_INVALID_APP_ID = 6, /** - * 7: The connection fails because the channel name is not valid. Please rejoin the channel with a valid channel name. + * 7: The connection fails because the channel name is not valid. Please rejoin the channel with a + * valid channel name. */ CONNECTION_CHANGED_INVALID_CHANNEL_NAME = 7, /** * 8: The connection fails because the token is not valid. Typical reasons include: - * - The App Certificate for the project is enabled in Agora Console, but you do not use a token when joining the channel. If you enable the App Certificate, you must use a token to join the channel. - * - The `uid` specified when calling `joinChannel` to join the channel is inconsistent with the `uid` passed in when generating the token. + * - The App Certificate for the project is enabled in Agora Console, but you do not use a token + * when joining the channel. If you enable the App Certificate, you must use a token to join the + * channel. + * - The `uid` specified when calling `joinChannel` to join the channel is inconsistent with the + * `uid` passed in when generating the token. */ CONNECTION_CHANGED_INVALID_TOKEN = 8, /** @@ -4115,8 +4450,10 @@ enum CONNECTION_CHANGED_REASON_TYPE CONNECTION_CHANGED_TOKEN_EXPIRED = 9, /** * 10: The connection is rejected by the server. Typical reasons include: - * - The user is already in the channel and still calls a method, for example, `joinChannel`, to join the channel. Stop calling this method to clear this error. - * - The user tries to join the channel when conducting a pre-call test. The user needs to call the channel after the call test ends. + * - The user is already in the channel and still calls a method, for example, `joinChannel`, to + * join the channel. Stop calling this method to clear this error. + * - The user tries to join the channel when conducting a pre-call test. The user needs to call + * the channel after the call test ends. */ CONNECTION_CHANGED_REJECTED_BY_SERVER = 10, /** @@ -4128,11 +4465,13 @@ enum CONNECTION_CHANGED_REASON_TYPE */ CONNECTION_CHANGED_RENEW_TOKEN = 12, /** - * 13: The IP address of the client has changed, possibly because the network type, IP address, or port has been changed. + * 13: The IP address of the client has changed, possibly because the network type, IP address, or + * port has been changed. */ CONNECTION_CHANGED_CLIENT_IP_ADDRESS_CHANGED = 13, /** - * 14: Timeout for the keep-alive of the connection between the SDK and the Agora edge server. The connection state changes to CONNECTION_STATE_RECONNECTING. + * 14: Timeout for the keep-alive of the connection between the SDK and the Agora edge server. The + * connection state changes to CONNECTION_STATE_RECONNECTING. */ CONNECTION_CHANGED_KEEP_ALIVE_TIMEOUT = 14, /** @@ -4192,12 +4531,14 @@ enum CLIENT_ROLE_CHANGE_FAILED_REASON { CLIENT_ROLE_CHANGE_FAILED_NOT_AUTHORIZED = 2, /** * 3: The operation of changing role is timeout. + * @deprecated This reason is deprecated. */ - CLIENT_ROLE_CHANGE_FAILED_REQUEST_TIME_OUT = 3, + CLIENT_ROLE_CHANGE_FAILED_REQUEST_TIME_OUT __deprecated = 3, /** * 4: The operation of changing role is interrupted since we lost connection with agora service. + * @deprecated This reason is deprecated. */ - CLIENT_ROLE_CHANGE_FAILED_CONNECTION_FAILED = 4, + CLIENT_ROLE_CHANGE_FAILED_CONNECTION_FAILED __deprecated = 4, }; /** @@ -4227,11 +4568,13 @@ enum WLACC_SUGGEST_ACTION { */ WLACC_SUGGEST_ACTION_CONNECT_SSID = 1, /** - * The user is advised to check whether the AP supports 5G band and enable 5G band (the aciton link is attached), or purchases an AP that supports 5G. AP does not support 5G band. + * The user is advised to check whether the AP supports 5G band and enable 5G band (the aciton + * link is attached), or purchases an AP that supports 5G. AP does not support 5G band. */ WLACC_SUGGEST_ACTION_CHECK_5G = 2, /** - * The user is advised to change the SSID of the 2.4G or 5G band (the aciton link is attached). The SSID of the 2.4G band AP is the same as that of the 5G band. + * The user is advised to change the SSID of the 2.4G or 5G band (the aciton link is attached). + * The SSID of the 2.4G band AP is the same as that of the 5G band. */ WLACC_SUGGEST_ACTION_MODIFY_SSID = 3, }; @@ -4320,8 +4663,9 @@ struct VideoCanvas { uid_t uid; /** - * The uid of video stream composing the video stream from transcoder which will be drawn on this video canvas. - */ + * The uid of video stream composing the video stream from transcoder which will be drawn on this + * video canvas. + */ uid_t subviewUid; /** * Video display window. @@ -4340,7 +4684,7 @@ struct VideoCanvas { * The video mirror mode. See \ref VIDEO_MIRROR_MODE_TYPE "VIDEO_MIRROR_MODE_TYPE". * The default value is VIDEO_MIRROR_MODE_AUTO. * @note - * - For the mirror mode of the local video view: + * - For the mirror mode of the local video view: * If you use a front camera, the SDK enables the mirror mode by default; * if you use a rear camera, the SDK disables the mirror mode by default. * - For the remote user: The mirror mode is disabled by default. @@ -4357,14 +4701,14 @@ struct VideoCanvas { */ VIDEO_SOURCE_TYPE sourceType; /** - * The media player id of AgoraMediaPlayer. It should set this parameter when the + * The media player id of AgoraMediaPlayer. It should set this parameter when the * sourceType is VIDEO_SOURCE_MEDIA_PLAYER to show the video that AgoraMediaPlayer is playing. * You can get this value by calling the method \ref getMediaPlayerId(). */ int mediaPlayerId; /** - * If you want to display a certain part of a video frame, you can set - * this value to crop the video frame to show. + * If you want to display a certain part of a video frame, you can set + * this value to crop the video frame to show. * The default value is empty(that is, if it has zero width or height), which means no cropping. */ Rectangle cropArea; @@ -4381,62 +4725,225 @@ struct VideoCanvas { media::base::VIDEO_MODULE_POSITION position; VideoCanvas() - : uid(0), subviewUid(0), view(NULL), backgroundColor(0x00000000), renderMode(media::base::RENDER_MODE_HIDDEN), mirrorMode(VIDEO_MIRROR_MODE_AUTO), - setupMode(VIDEO_VIEW_SETUP_REPLACE), sourceType(VIDEO_SOURCE_CAMERA_PRIMARY), mediaPlayerId(-ERR_NOT_READY), - cropArea(0, 0, 0, 0), enableAlphaMask(false), position(media::base::POSITION_POST_CAPTURER) {} + : uid(0), + subviewUid(0), + view(NULL), + backgroundColor(0x00000000), + renderMode(media::base::RENDER_MODE_HIDDEN), + mirrorMode(VIDEO_MIRROR_MODE_AUTO), + setupMode(VIDEO_VIEW_SETUP_REPLACE), + sourceType(VIDEO_SOURCE_CAMERA_PRIMARY), + mediaPlayerId(-ERR_NOT_READY), + cropArea(0, 0, 0, 0), + enableAlphaMask(false), + position(media::base::POSITION_POST_CAPTURER) {} VideoCanvas(view_t v, media::base::RENDER_MODE_TYPE m, VIDEO_MIRROR_MODE_TYPE mt) - : uid(0), subviewUid(0), view(v), backgroundColor(0x00000000), renderMode(m), mirrorMode(mt), setupMode(VIDEO_VIEW_SETUP_REPLACE), - sourceType(VIDEO_SOURCE_CAMERA_PRIMARY), mediaPlayerId(-ERR_NOT_READY), - cropArea(0, 0, 0, 0), enableAlphaMask(false), position(media::base::POSITION_POST_CAPTURER) {} + : uid(0), + subviewUid(0), + view(v), + backgroundColor(0x00000000), + renderMode(m), + mirrorMode(mt), + setupMode(VIDEO_VIEW_SETUP_REPLACE), + sourceType(VIDEO_SOURCE_CAMERA_PRIMARY), + mediaPlayerId(-ERR_NOT_READY), + cropArea(0, 0, 0, 0), + enableAlphaMask(false), + position(media::base::POSITION_POST_CAPTURER) {} VideoCanvas(view_t v, media::base::RENDER_MODE_TYPE m, VIDEO_MIRROR_MODE_TYPE mt, uid_t u) - : uid(u), subviewUid(0), view(v), backgroundColor(0x00000000), renderMode(m), mirrorMode(mt), setupMode(VIDEO_VIEW_SETUP_REPLACE), - sourceType(VIDEO_SOURCE_CAMERA_PRIMARY), mediaPlayerId(-ERR_NOT_READY), - cropArea(0, 0, 0, 0), enableAlphaMask(false), position(media::base::POSITION_POST_CAPTURER) {} - - VideoCanvas(view_t v, media::base::RENDER_MODE_TYPE m, VIDEO_MIRROR_MODE_TYPE mt, uid_t u, uid_t subu) - : uid(u), subviewUid(subu), view(v), backgroundColor(0x00000000), renderMode(m), mirrorMode(mt), setupMode(VIDEO_VIEW_SETUP_REPLACE), - sourceType(VIDEO_SOURCE_CAMERA_PRIMARY), mediaPlayerId(-ERR_NOT_READY), - cropArea(0, 0, 0, 0), enableAlphaMask(false), position(media::base::POSITION_POST_CAPTURER) {} + : uid(u), + subviewUid(0), + view(v), + backgroundColor(0x00000000), + renderMode(m), + mirrorMode(mt), + setupMode(VIDEO_VIEW_SETUP_REPLACE), + sourceType(VIDEO_SOURCE_CAMERA_PRIMARY), + mediaPlayerId(-ERR_NOT_READY), + cropArea(0, 0, 0, 0), + enableAlphaMask(false), + position(media::base::POSITION_POST_CAPTURER) {} + + VideoCanvas(view_t v, media::base::RENDER_MODE_TYPE m, VIDEO_MIRROR_MODE_TYPE mt, uid_t u, + uid_t subu) + : uid(u), + subviewUid(subu), + view(v), + backgroundColor(0x00000000), + renderMode(m), + mirrorMode(mt), + setupMode(VIDEO_VIEW_SETUP_REPLACE), + sourceType(VIDEO_SOURCE_CAMERA_PRIMARY), + mediaPlayerId(-ERR_NOT_READY), + cropArea(0, 0, 0, 0), + enableAlphaMask(false), + position(media::base::POSITION_POST_CAPTURER) {} }; /** Image enhancement options. */ struct BeautyOptions { /** The contrast level. - */ + */ enum LIGHTENING_CONTRAST_LEVEL { - /** Low contrast level. */ - LIGHTENING_CONTRAST_LOW = 0, - /** (Default) Normal contrast level. */ - LIGHTENING_CONTRAST_NORMAL = 1, - /** High contrast level. */ - LIGHTENING_CONTRAST_HIGH = 2, + /** Low contrast level. */ + LIGHTENING_CONTRAST_LOW = 0, + /** (Default) Normal contrast level. */ + LIGHTENING_CONTRAST_NORMAL = 1, + /** High contrast level. */ + LIGHTENING_CONTRAST_HIGH = 2, }; - /** The contrast level, used with the `lighteningLevel` parameter. The larger the value, the greater the contrast between light and dark. See #LIGHTENING_CONTRAST_LEVEL. - */ + /** The contrast level, used with the `lighteningLevel` parameter. The larger the value, the + * greater the contrast between light and dark. See #LIGHTENING_CONTRAST_LEVEL. + */ LIGHTENING_CONTRAST_LEVEL lighteningContrastLevel; - /** The brightness level. The value ranges from 0.0 (original) to 1.0. The default value is 0.0. The greater the value, the greater the degree of whitening. */ + /** The brightness level. The value ranges from 0.0 (original) to 1.0. The default value is 0.0. + * The greater the value, the greater the degree of whitening. */ float lighteningLevel; - /** The value ranges from 0.0 (original) to 1.0. The default value is 0.0. The greater the value, the greater the degree of skin grinding. - */ + /** The value ranges from 0.0 (original) to 1.0. The default value is 0.0. The greater the value, + * the greater the degree of skin grinding. + */ float smoothnessLevel; - /** The redness level. The value ranges from 0.0 (original) to 1.0. The default value is 0.0. The larger the value, the greater the rosy degree. - */ + /** The redness level. The value ranges from 0.0 (original) to 1.0. The default value is 0.0. The + * larger the value, the greater the rosy degree. + */ float rednessLevel; - /** The sharpness level. The value ranges from 0.0 (original) to 1.0. The default value is 0.0. The larger the value, the greater the sharpening degree. - */ + /** The sharpness level. The value ranges from 0.0 (original) to 1.0. The default value is 0.0. + * The larger the value, the greater the sharpening degree. + */ float sharpnessLevel; - BeautyOptions(LIGHTENING_CONTRAST_LEVEL contrastLevel, float lightening, float smoothness, float redness, float sharpness) : lighteningContrastLevel(contrastLevel), lighteningLevel(lightening), smoothnessLevel(smoothness), rednessLevel(redness), sharpnessLevel(sharpness) {} + BeautyOptions(LIGHTENING_CONTRAST_LEVEL contrastLevel, float lightening, float smoothness, + float redness, float sharpness) + : lighteningContrastLevel(contrastLevel), + lighteningLevel(lightening), + smoothnessLevel(smoothness), + rednessLevel(redness), + sharpnessLevel(sharpness) {} - BeautyOptions() : lighteningContrastLevel(LIGHTENING_CONTRAST_NORMAL), lighteningLevel(0), smoothnessLevel(0), rednessLevel(0), sharpnessLevel(0) {} + BeautyOptions() + : lighteningContrastLevel(LIGHTENING_CONTRAST_NORMAL), + lighteningLevel(0), + smoothnessLevel(0), + rednessLevel(0), + sharpnessLevel(0) {} +}; + +/** Face shape area options. This structure defines options for facial adjustments on different facial areas. + * + * @technical preview + */ +struct FaceShapeAreaOptions { + /** The specific facial area to be adjusted. + */ + enum FACE_SHAPE_AREA { + /** (Default) Invalid area. */ + FACE_SHAPE_AREA_NONE = -1, + /** Head Scale, reduces the size of head. */ + FACE_SHAPE_AREA_HEADSCALE = 0, + /** Forehead, adjusts the size of forehead. */ + FACE_SHAPE_AREA_FOREHEAD = 1, + /** Face Contour, slims the facial contour. */ + FACE_SHAPE_AREA_FACECONTOUR = 2, + /** Face Length, adjusts the length of face. */ + FACE_SHAPE_AREA_FACELENGTH = 3, + /** Face Width, narrows the width of face. */ + FACE_SHAPE_AREA_FACEWIDTH = 4, + /** Cheekbone, adjusts the size of cheekbone. */ + FACE_SHAPE_AREA_CHEEKBONE = 5, + /** Cheek, adjusts the size of cheek. */ + FACE_SHAPE_AREA_CHEEK = 6, + /** Chin, adjusts the length of chin. */ + FACE_SHAPE_AREA_CHIN = 7, + /** Eye Scale, adjusts the size of eyes. */ + FACE_SHAPE_AREA_EYESCALE = 8, + /** Nose Length, adjusts the length of nose. */ + FACE_SHAPE_AREA_NOSELENGTH = 9, + /** Nose Width, adjusts the width of nose. */ + FACE_SHAPE_AREA_NOSEWIDTH = 10, + /** Mouth Scale, adjusts the size of mouth. */ + FACE_SHAPE_AREA_MOUTHSCALE = 11, + }; + + /** The specific facial area to be adjusted, See #FACE_SHAPE_AREA. + */ + FACE_SHAPE_AREA shapeArea; + + /** The intensity of the pinching effect applied to the specified facial area. + * For the following area values: #FACE_SHAPE_AREA_FOREHEAD, #FACE_SHAPE_AREA_FACELENGTH, #FACE_SHAPE_AREA_CHIN, #FACE_SHAPE_AREA_NOSELENGTH, #FACE_SHAPE_AREA_NOSEWIDTH, #FACE_SHAPE_AREA_MOUTHSCALE, the value ranges from -100 to 100. + * The default value is 0. The greater the absolute value, the stronger the intensity applied to the specified facial area, and negative values indicate the opposite direction. + * For enumeration values other than the above, the value ranges from 0 to 100. The default value is 0. The greater the value, the stronger the intensity applied to the specified facial area. + */ + int shapeIntensity; + + FaceShapeAreaOptions(FACE_SHAPE_AREA shapeArea, int areaIntensity) : shapeArea(shapeArea), shapeIntensity(areaIntensity) {} + + FaceShapeAreaOptions() : shapeArea(FACE_SHAPE_AREA_NONE), shapeIntensity(0) {} +}; + +/** Face shape beauty options. This structure defines options for facial adjustments of different facial styles. + * + * @technical preview + */ +struct FaceShapeBeautyOptions { + /** The face shape style. + */ + enum FACE_SHAPE_BEAUTY_STYLE { + /** (Default) Female face shape style. */ + FACE_SHAPE_BEAUTY_STYLE_FEMALE = 0, + /** Male face shape style. */ + FACE_SHAPE_BEAUTY_STYLE_MALE = 1, + }; + + /** The face shape style, See #FACE_SHAPE_BEAUTY_STYLE. + */ + FACE_SHAPE_BEAUTY_STYLE shapeStyle; + + /** The intensity of the pinching effect applied to the specified facial style. The value ranges from 0 (original) to 100. The default value is 0. The greater the value, the stronger the intensity applied to face pinching. + */ + int styleIntensity; + + FaceShapeBeautyOptions(FACE_SHAPE_BEAUTY_STYLE shapeStyle, int styleIntensity) : shapeStyle(shapeStyle), styleIntensity(styleIntensity) {} + + FaceShapeBeautyOptions() : shapeStyle(FACE_SHAPE_BEAUTY_STYLE_FEMALE), styleIntensity(50) {} +}; + +/** Filter effect options. This structure defines options for filter effect. + * + * @since v4.4.1 + */ +struct FilterEffectOptions { + /** + * The local absolute path of the custom 3D Cube path. Only cube format is supported. + * The cube file must strictly comply with the Cube LUT Specification; otherwise, the filter effects will not take effect. + * + * The following is an example of the Cube file format. The cube file starts with `LUT_3D_SIZE`, which indicates the cube size. In filter effects, the cube size is limited to 32. + + * LUT_3D_SIZE 32 + * 0.0039215689 0 0.0039215682 + * 0.0086021447 0.0037950677 0 + * 0.0728652592 0.0039215689 0 + * ... + * + * The SDK provides a built-in cube named `built_in_whiten.cube` for whitening. To use this cube, specify the path to `built_in_whiten_filter` + */ + const char * path; + + /** + * The intensity of specified filter effect. The value ranges from 0.0 to 1.0. The default value is 0.5. The greater the value, the stronger the intensity of the filter. + */ + float strength; + + FilterEffectOptions(const char * lut3dPath, float filterStrength) : path(lut3dPath), strength(filterStrength) {} + + FilterEffectOptions() : path(OPTIONAL_NULLPTR), strength(0.5) {} }; struct LowlightEnhanceOptions { @@ -4444,7 +4951,9 @@ struct LowlightEnhanceOptions { * The low-light enhancement mode. */ enum LOW_LIGHT_ENHANCE_MODE { - /** 0: (Default) Automatic mode. The SDK automatically enables or disables the low-light enhancement feature according to the ambient light to compensate for the lighting level or prevent overexposure, as necessary. */ + /** 0: (Default) Automatic mode. The SDK automatically enables or disables the low-light + enhancement feature according to the ambient light to compensate for the lighting level or + prevent overexposure, as necessary. */ LOW_LIGHT_ENHANCE_AUTO = 0, /** Manual mode. Users need to enable or disable the low-light enhancement feature manually. */ LOW_LIGHT_ENHANCE_MANUAL = 1, @@ -4454,11 +4963,14 @@ struct LowlightEnhanceOptions { */ enum LOW_LIGHT_ENHANCE_LEVEL { /** - * 0: (Default) Promotes video quality during low-light enhancement. It processes the brightness, details, and noise of the video image. The performance consumption is moderate, the processing speed is moderate, and the overall video quality is optimal. + * 0: (Default) Promotes video quality during low-light enhancement. It processes the + * brightness, details, and noise of the video image. The performance consumption is moderate, + * the processing speed is moderate, and the overall video quality is optimal. */ LOW_LIGHT_ENHANCE_LEVEL_HIGH_QUALITY = 0, /** - * Promotes performance during low-light enhancement. It processes the brightness and details of the video image. The processing speed is faster. + * Promotes performance during low-light enhancement. It processes the brightness and details of + * the video image. The processing speed is faster. */ LOW_LIGHT_ENHANCE_LEVEL_FAST = 1, }; @@ -4471,9 +4983,11 @@ struct LowlightEnhanceOptions { */ LOW_LIGHT_ENHANCE_LEVEL level; - LowlightEnhanceOptions(LOW_LIGHT_ENHANCE_MODE lowlightMode, LOW_LIGHT_ENHANCE_LEVEL lowlightLevel) : mode(lowlightMode), level(lowlightLevel) {} + LowlightEnhanceOptions(LOW_LIGHT_ENHANCE_MODE lowlightMode, LOW_LIGHT_ENHANCE_LEVEL lowlightLevel) + : mode(lowlightMode), level(lowlightLevel) {} - LowlightEnhanceOptions() : mode(LOW_LIGHT_ENHANCE_AUTO), level(LOW_LIGHT_ENHANCE_LEVEL_HIGH_QUALITY) {} + LowlightEnhanceOptions() + : mode(LOW_LIGHT_ENHANCE_AUTO), level(LOW_LIGHT_ENHANCE_LEVEL_HIGH_QUALITY) {} }; /** * The video noise reduction options. @@ -4484,7 +4998,8 @@ struct VideoDenoiserOptions { /** The video noise reduction mode. */ enum VIDEO_DENOISER_MODE { - /** 0: (Default) Automatic mode. The SDK automatically enables or disables the video noise reduction feature according to the ambient light. */ + /** 0: (Default) Automatic mode. The SDK automatically enables or disables the video noise + reduction feature according to the ambient light. */ VIDEO_DENOISER_AUTO = 0, /** Manual mode. Users need to enable or disable the video noise reduction feature manually. */ VIDEO_DENOISER_MANUAL = 1, @@ -4494,21 +5009,20 @@ struct VideoDenoiserOptions { */ enum VIDEO_DENOISER_LEVEL { /** - * 0: (Default) Promotes video quality during video noise reduction. `HIGH_QUALITY` balances performance consumption and video noise reduction quality. - * The performance consumption is moderate, the video noise reduction speed is moderate, and the overall video quality is optimal. + * 0: (Default) Promotes video quality during video noise reduction. `HIGH_QUALITY` balances + * performance consumption and video noise reduction quality. The performance consumption is + * moderate, the video noise reduction speed is moderate, and the overall video quality is + * optimal. */ VIDEO_DENOISER_LEVEL_HIGH_QUALITY = 0, /** - * Promotes reducing performance consumption during video noise reduction. `FAST` prioritizes reducing performance consumption over video noise reduction quality. - * The performance consumption is lower, and the video noise reduction speed is faster. To avoid a noticeable shadowing effect (shadows trailing behind moving objects) in the processed video, Agora recommends that you use `FAST` when the camera is fixed. + * Promotes reducing performance consumption during video noise reduction. `FAST` prioritizes + * reducing performance consumption over video noise reduction quality. The performance + * consumption is lower, and the video noise reduction speed is faster. To avoid a noticeable + * shadowing effect (shadows trailing behind moving objects) in the processed video, Agora + * recommends that you use `FAST` when the camera is fixed. */ VIDEO_DENOISER_LEVEL_FAST = 1, - /** - * Enhanced video noise reduction. `STRENGTH` prioritizes video noise reduction quality over reducing performance consumption. - * The performance consumption is higher, the video noise reduction speed is slower, and the video noise reduction quality is better. - * If `HIGH_QUALITY` is not enough for your video noise reduction needs, you can use `STRENGTH`. - */ - VIDEO_DENOISER_LEVEL_STRENGTH = 2, }; /** The video noise reduction mode. See #VIDEO_DENOISER_MODE. */ @@ -4518,7 +5032,8 @@ struct VideoDenoiserOptions { */ VIDEO_DENOISER_LEVEL level; - VideoDenoiserOptions(VIDEO_DENOISER_MODE denoiserMode, VIDEO_DENOISER_LEVEL denoiserLevel) : mode(denoiserMode), level(denoiserLevel) {} + VideoDenoiserOptions(VIDEO_DENOISER_MODE denoiserMode, VIDEO_DENOISER_LEVEL denoiserLevel) + : mode(denoiserMode), level(denoiserLevel) {} VideoDenoiserOptions() : mode(VIDEO_DENOISER_AUTO), level(VIDEO_DENOISER_LEVEL_HIGH_QUALITY) {} }; @@ -4528,17 +5043,24 @@ struct VideoDenoiserOptions { * @since v4.0.0 */ struct ColorEnhanceOptions { - /** The level of color enhancement. The value range is [0.0,1.0]. `0.0` is the default value, which means no color enhancement is applied to the video. The higher the value, the higher the level of color enhancement. + /** The level of color enhancement. The value range is [0.0,1.0]. `0.0` is the default value, + * which means no color enhancement is applied to the video. The higher the value, the higher the + * level of color enhancement. */ float strengthLevel; - /** The level of skin tone protection. The value range is [0.0,1.0]. `0.0` means no skin tone protection. The higher the value, the higher the level of skin tone protection. - * The default value is `1.0`. When the level of color enhancement is higher, the portrait skin tone can be significantly distorted, so you need to set the level of skin tone protection; when the level of skin tone protection is higher, the color enhancement effect can be slightly reduced. - * Therefore, to get the best color enhancement effect, Agora recommends that you adjust `strengthLevel` and `skinProtectLevel` to get the most appropriate values. + /** The level of skin tone protection. The value range is [0.0,1.0]. `0.0` means no skin tone + * protection. The higher the value, the higher the level of skin tone protection. The default + * value is `1.0`. When the level of color enhancement is higher, the portrait skin tone can be + * significantly distorted, so you need to set the level of skin tone protection; when the level + * of skin tone protection is higher, the color enhancement effect can be slightly reduced. + * Therefore, to get the best color enhancement effect, Agora recommends that you adjust + * `strengthLevel` and `skinProtectLevel` to get the most appropriate values. */ float skinProtectLevel; - ColorEnhanceOptions(float stength, float skinProtect) : strengthLevel(stength), skinProtectLevel(skinProtect) {} + ColorEnhanceOptions(float stength, float skinProtect) + : strengthLevel(stength), skinProtectLevel(skinProtect) {} ColorEnhanceOptions() : strengthLevel(0), skinProtectLevel(1) {} }; @@ -4562,12 +5084,12 @@ struct VirtualBackgroundSource { * The background source is a file in PNG or JPG format. */ BACKGROUND_IMG = 2, - /** + /** * The background source is the blurred original video frame. * */ BACKGROUND_BLUR = 3, - /** - * The background source is a file in MP4, AVI, MKV, FLV format. + /** + * The background source is a file in MP4, AVI, MKV, FLV format. * */ BACKGROUND_VIDEO = 4, }; @@ -4575,11 +5097,14 @@ struct VirtualBackgroundSource { /** The degree of blurring applied to the background source. */ enum BACKGROUND_BLUR_DEGREE { - /** 1: The degree of blurring applied to the custom background image is low. The user can almost see the background clearly. */ + /** 1: The degree of blurring applied to the custom background image is low. The user can almost + see the background clearly. */ BLUR_DEGREE_LOW = 1, - /** 2: The degree of blurring applied to the custom background image is medium. It is difficult for the user to recognize details in the background. */ + /** 2: The degree of blurring applied to the custom background image is medium. It is difficult + for the user to recognize details in the background. */ BLUR_DEGREE_MEDIUM = 2, - /** 3: (Default) The degree of blurring applied to the custom background image is high. The user can barely see any distinguishing features in the background. */ + /** 3: (Default) The degree of blurring applied to the custom background image is high. The user + can barely see any distinguishing features in the background. */ BLUR_DEGREE_HIGH = 3, }; @@ -4588,34 +5113,41 @@ struct VirtualBackgroundSource { BACKGROUND_SOURCE_TYPE background_source_type; /** - * The color of the custom background image. The format is a hexadecimal integer defined by RGB, without the # sign, - * such as 0xFFB6C1 for light pink. The default value is 0xFFFFFF, which signifies white. The value range - * is [0x000000,0xFFFFFF]. If the value is invalid, the SDK replaces the original background image with a white - * background image. + * The color of the custom background image. The format is a hexadecimal integer defined by RGB, + * without the # sign, such as 0xFFB6C1 for light pink. The default value is 0xFFFFFF, which + * signifies white. The value range is [0x000000,0xFFFFFF]. If the value is invalid, the SDK + * replaces the original background image with a white background image. * - * @note This parameter takes effect only when the type of the custom background image is `BACKGROUND_COLOR`. + * @note This parameter takes effect only when the type of the custom background image is + * `BACKGROUND_COLOR`. */ unsigned int color; /** - * The local absolute path of the custom background image. PNG and JPG formats are supported. If the path is invalid, - * the SDK replaces the original background image with a white background image. + * The local absolute path of the custom background image. PNG and JPG formats are supported. If + * the path is invalid, the SDK replaces the original background image with a white background + * image. * - * @note This parameter takes effect only when the type of the custom background image is `BACKGROUND_IMG`. + * @note This parameter takes effect only when the type of the custom background image is + * `BACKGROUND_IMG`. */ const char* source; /** The degree of blurring applied to the custom background image. See BACKGROUND_BLUR_DEGREE. - * @note This parameter takes effect only when the type of the custom background image is `BACKGROUND_BLUR`. + * @note This parameter takes effect only when the type of the custom background image is + * `BACKGROUND_BLUR`. */ BACKGROUND_BLUR_DEGREE blur_degree; - VirtualBackgroundSource() : background_source_type(BACKGROUND_COLOR), color(0xffffff), source(OPTIONAL_NULLPTR), blur_degree(BLUR_DEGREE_HIGH) {} + VirtualBackgroundSource() + : background_source_type(BACKGROUND_COLOR), + color(0xffffff), + source(OPTIONAL_NULLPTR), + blur_degree(BLUR_DEGREE_HIGH) {} }; struct SegmentationProperty { - - enum SEG_MODEL_TYPE { + enum SEG_MODEL_TYPE { SEG_MODEL_AI = 1, SEG_MODEL_GREEN = 2 @@ -4625,34 +5157,33 @@ struct SegmentationProperty { float greenCapacity; - - SegmentationProperty() : modelType(SEG_MODEL_AI), greenCapacity(0.5){} + SegmentationProperty() : modelType(SEG_MODEL_AI), greenCapacity(0.5) {} }; /** The type of custom audio track -*/ + */ enum AUDIO_TRACK_TYPE { - /** + /** * -1: Invalid audio track */ AUDIO_TRACK_INVALID = -1, - /** + /** * 0: Mixable audio track - * You can push more than one mixable Audio tracks into one RTC connection(channel id + uid), + * You can push more than one mixable Audio tracks into one RTC connection(channel id + uid), * and SDK will mix these tracks into one audio track automatically. * However, compare to direct audio track, mixable track might cause extra 30ms+ delay. */ AUDIO_TRACK_MIXABLE = 0, /** * 1: Direct audio track - * You can only push one direct (non-mixable) audio track into one RTC connection(channel id + uid). - * Compare to mixable stream, you can have lower lantency using direct audio track. + * You can only push one direct (non-mixable) audio track into one RTC connection(channel id + + * uid). Compare to mixable stream, you can have lower lantency using direct audio track. */ AUDIO_TRACK_DIRECT = 1, }; /** The configuration of custom audio track -*/ + */ struct AudioTrackConfig { /** * Enable local playback, enabled by default @@ -4660,9 +5191,14 @@ struct AudioTrackConfig { * false: Do not enable local playback */ bool enableLocalPlayback; + /** + * Whether to enable APM (AEC/ANS/AGC) processing when the trackType is AUDIO_TRACK_DIRECT. + * false: (Default) Do not enable APM processing. + * true: Enable APM processing. + */ + bool enableAudioProcessing; - AudioTrackConfig() - : enableLocalPlayback(true) {} + AudioTrackConfig() : enableLocalPlayback(true),enableAudioProcessing(false) {} }; /** @@ -4709,11 +5245,12 @@ enum VOICE_BEAUTIFIER_PRESET { CHAT_BEAUTIFIER_VITALITY = 0x01010300, /** * Singing beautifier effect. - * - If you call `setVoiceBeautifierPreset`(SINGING_BEAUTIFIER), you can beautify a male-sounding voice and add a reverberation effect - * that sounds like singing in a small room. Agora recommends not using `setVoiceBeautifierPreset`(SINGING_BEAUTIFIER) to process - * a female-sounding voice; otherwise, you may experience vocal distortion. - * - If you call `setVoiceBeautifierParameters`(SINGING_BEAUTIFIER, param1, param2), you can beautify a male- or - * female-sounding voice and add a reverberation effect. + * - If you call `setVoiceBeautifierPreset`(SINGING_BEAUTIFIER), you can beautify a male-sounding + * voice and add a reverberation effect that sounds like singing in a small room. Agora recommends + * not using `setVoiceBeautifierPreset`(SINGING_BEAUTIFIER) to process a female-sounding voice; + * otherwise, you may experience vocal distortion. + * - If you call `setVoiceBeautifierParameters`(SINGING_BEAUTIFIER, param1, param2), you can + * beautify a male- or female-sounding voice and add a reverberation effect. */ SINGING_BEAUTIFIER = 0x01020100, /** A more vigorous voice. @@ -4743,8 +5280,9 @@ enum VOICE_BEAUTIFIER_PRESET { /** * A ultra-high quality voice, which makes the audio clearer and restores more details. * - To achieve better audio effect quality, Agora recommends that you call `setAudioProfile` - * and set the `profile` to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)` - * and `scenario` to `AUDIO_SCENARIO_HIGH_DEFINITION(6)` before calling `setVoiceBeautifierPreset`. + * and set the `profile` to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or + * `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)` and `scenario` to + * `AUDIO_SCENARIO_HIGH_DEFINITION(6)` before calling `setVoiceBeautifierPreset`. * - If you have an audio capturing device that can already restore audio details to a high * degree, Agora recommends that you do not enable ultra-high quality; otherwise, the SDK may * over-restore audio details, and you may not hear the anticipated voice effect. @@ -4754,7 +5292,9 @@ enum VOICE_BEAUTIFIER_PRESET { /** Preset voice effects. * - * For better voice effects, Agora recommends setting the `profile` parameter of `setAudioProfile` to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO` before using the following presets: + * For better voice effects, Agora recommends setting the `profile` parameter of `setAudioProfile` + * to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO` before using + * the following presets: * * - `ROOM_ACOUSTICS_KTV` * - `ROOM_ACOUSTICS_VOCAL_CONCERT` @@ -4802,8 +5342,8 @@ enum AUDIO_EFFECT_PRESET { */ ROOM_ACOUSTICS_ETHEREAL = 0x02010700, /** A 3D voice effect that makes the voice appear to be moving around the user. The default cycle - * period of the 3D voice effect is 10 seconds. To change the cycle period, call `setAudioEffectParameters` - * after this method. + * period of the 3D voice effect is 10 seconds. To change the cycle period, call + * `setAudioEffectParameters` after this method. * * @note * - Before using this preset, set the `profile` parameter of `setAudioProfile` to @@ -4825,12 +5365,12 @@ enum AUDIO_EFFECT_PRESET { */ ROOM_ACOUSTICS_VIRTUAL_SURROUND_SOUND = 0x02010900, /** The voice effect for chorus. - * + * * @note: To achieve better audio effect quality, Agora recommends calling \ref * IRtcEngine::setAudioProfile "setAudioProfile" and setting the `profile` parameter to * `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)` before * setting this enumerator. - */ + */ ROOM_ACOUSTICS_CHORUS = 0x02010D00, /** A middle-aged man's voice. * @@ -4841,14 +5381,14 @@ enum AUDIO_EFFECT_PRESET { VOICE_CHANGER_EFFECT_UNCLE = 0x02020100, /** A senior man's voice. * - * @note Agora recommends using this enumerator to process a male-sounding voice; otherwise, you may - * not hear the anticipated voice effect. + * @note Agora recommends using this enumerator to process a male-sounding voice; otherwise, you + * may not hear the anticipated voice effect. */ VOICE_CHANGER_EFFECT_OLDMAN = 0x02020200, /** A boy's voice. * - * @note Agora recommends using this enumerator to process a male-sounding voice; otherwise, you may - * not hear the anticipated voice effect. + * @note Agora recommends using this enumerator to process a male-sounding voice; otherwise, you + * may not hear the anticipated voice effect. */ VOICE_CHANGER_EFFECT_BOY = 0x02020300, /** A young woman's voice. @@ -4860,8 +5400,8 @@ enum AUDIO_EFFECT_PRESET { VOICE_CHANGER_EFFECT_SISTER = 0x02020400, /** A girl's voice. * - * @note Agora recommends using this enumerator to process a female-sounding voice; otherwise, you may - * not hear the anticipated voice effect. + * @note Agora recommends using this enumerator to process a female-sounding voice; otherwise, you + * may not hear the anticipated voice effect. */ VOICE_CHANGER_EFFECT_GIRL = 0x02020500, /** The voice of Pig King, a character in Journey to the West who has a voice like a growling @@ -4886,8 +5426,8 @@ enum AUDIO_EFFECT_PRESET { */ STYLE_TRANSFORMATION_POPULAR = 0x02030200, /** A pitch correction effect that corrects the user's pitch based on the pitch of the natural C - * major scale. After setting this voice effect, you can call `setAudioEffectParameters` to adjust - * the basic mode of tuning and the pitch of the main tone. + * major scale. After setting this voice effect, you can call `setAudioEffectParameters` to adjust + * the basic mode of tuning and the pitch of the main tone. */ PITCH_CORRECTION = 0x02040100, @@ -4902,16 +5442,20 @@ enum VOICE_CONVERSION_PRESET { /** Turn off voice conversion and use the original voice. */ VOICE_CONVERSION_OFF = 0x00000000, - /** A gender-neutral voice. To avoid audio distortion, ensure that you use this enumerator to process a female-sounding voice. + /** A gender-neutral voice. To avoid audio distortion, ensure that you use this enumerator to + * process a female-sounding voice. */ VOICE_CHANGER_NEUTRAL = 0x03010100, - /** A sweet voice. To avoid audio distortion, ensure that you use this enumerator to process a female-sounding voice. + /** A sweet voice. To avoid audio distortion, ensure that you use this enumerator to process a + * female-sounding voice. */ VOICE_CHANGER_SWEET = 0x03010200, - /** A steady voice. To avoid audio distortion, ensure that you use this enumerator to process a male-sounding voice. + /** A steady voice. To avoid audio distortion, ensure that you use this enumerator to process a + * male-sounding voice. */ VOICE_CHANGER_SOLID = 0x03010300, - /** A deep voice. To avoid audio distortion, ensure that you use this enumerator to process a male-sounding voice. + /** A deep voice. To avoid audio distortion, ensure that you use this enumerator to process a + * male-sounding voice. */ VOICE_CHANGER_BASS = 0x03010400, /** A voice like a cartoon character. @@ -4964,6 +5508,41 @@ enum HEADPHONE_EQUALIZER_PRESET { HEADPHONE_EQUALIZER_INEAR = 0x04000002 }; +/** The options for SDK voice AI tuner. + */ +enum VOICE_AI_TUNER_TYPE { + /** Uncle, deep and magnetic male voice. + */ + VOICE_AI_TUNER_MATURE_MALE, + /** Fresh male, refreshing and sweet male voice. + */ + VOICE_AI_TUNER_FRESH_MALE, + /** Big sister, deep and charming female voice. + */ + VOICE_AI_TUNER_ELEGANT_FEMALE, + /** Lolita, high-pitched and cute female voice. + */ + VOICE_AI_TUNER_SWEET_FEMALE, + /** Warm man singing, warm and melodic male voice that is suitable for male lyrical songs. + */ + VOICE_AI_TUNER_WARM_MALE_SINGING, + /** Gentle female singing, soft and delicate female voice that is suitable for female lyrical songs. + */ + VOICE_AI_TUNER_GENTLE_FEMALE_SINGING, + /** Smoky uncle singing, unique husky male voice that is suitable for rock or blues songs. + */ + VOICE_AI_TUNER_HUSKY_MALE_SINGING, + /** Warm big sister singing, warm and mature female voice that is suitable for emotionally powerful songs. + */ + VOICE_AI_TUNER_WARM_ELEGANT_FEMALE_SINGING, + /** Forceful male singing, strong and powerful male voice that is suitable for passionate songs. + */ + VOICE_AI_TUNER_POWERFUL_MALE_SINGING, + /** Dreamy female singing, dreamlike and soft female voice that is suitable for airy and dream-like songs. + */ + VOICE_AI_TUNER_DREAMY_FEMALE_SINGING, +}; + /** * Screen sharing configurations. */ @@ -4983,9 +5562,9 @@ struct ScreenCaptureParameters { */ VideoDimensions dimensions; /** - * On Windows and macOS, it represents the video encoding frame rate (fps) of the shared screen stream. - * The frame rate (fps) of the shared region. The default value is 5. We do not recommend setting - * this to a value greater than 15. + * On Windows and macOS, it represents the video encoding frame rate (fps) of the shared screen + * stream. The frame rate (fps) of the shared region. The default value is 5. We do not recommend + * setting this to a value greater than 15. */ int frameRate; /** @@ -5000,52 +5579,109 @@ struct ScreenCaptureParameters { */ bool captureMouseCursor; /** - * Whether to bring the window to the front when calling the `startScreenCaptureByWindowId` method to share it: + * Whether to bring the window to the front when calling the `startScreenCaptureByWindowId` method + * to share it: * - `true`: Bring the window to the front. * - `false`: (Default) Do not bring the window to the front. - */ + */ bool windowFocus; /** - * A list of IDs of windows to be blocked. When calling `startScreenCaptureByDisplayId` to start screen sharing, - * you can use this parameter to block a specified window. When calling `updateScreenCaptureParameters` to update - * screen sharing configurations, you can use this parameter to dynamically block the specified windows during - * screen sharing. + * A list of IDs of windows to be blocked. When calling `startScreenCaptureByDisplayId` to start + * screen sharing, you can use this parameter to block a specified window. When calling + * `updateScreenCaptureParameters` to update screen sharing configurations, you can use this + * parameter to dynamically block the specified windows during screen sharing. */ - view_t *excludeWindowList; + view_t* excludeWindowList; /** * The number of windows to be blocked. */ int excludeWindowCount; /** The width (px) of the border. Defaults to 0, and the value range is [0,50]. - * - */ + * + */ int highLightWidth; /** The color of the border in RGBA format. The default value is 0xFF8CBF26. - * - */ + * + */ unsigned int highLightColor; /** Whether to place a border around the shared window or screen: - * - true: Place a border. - * - false: (Default) Do not place a border. - * - * @note When you share a part of a window or screen, the SDK places a border around the entire window or screen if you set `enableHighLight` as true. - * - */ + * - true: Place a border. + * - false: (Default) Do not place a border. + * + * @note When you share a part of a window or screen, the SDK places a border around the entire + * window or screen if you set `enableHighLight` as true. + * + */ bool enableHighLight; ScreenCaptureParameters() - : dimensions(1920, 1080), frameRate(5), bitrate(STANDARD_BITRATE), captureMouseCursor(true), windowFocus(false), excludeWindowList(OPTIONAL_NULLPTR), excludeWindowCount(0), highLightWidth(0), highLightColor(0), enableHighLight(false) {} + : dimensions(1920, 1080), + frameRate(5), + bitrate(STANDARD_BITRATE), + captureMouseCursor(true), + windowFocus(false), + excludeWindowList(OPTIONAL_NULLPTR), + excludeWindowCount(0), + highLightWidth(0), + highLightColor(0), + enableHighLight(false) {} ScreenCaptureParameters(const VideoDimensions& d, int f, int b) - : dimensions(d), frameRate(f), bitrate(b), captureMouseCursor(true), windowFocus(false), excludeWindowList(OPTIONAL_NULLPTR), excludeWindowCount(0), highLightWidth(0), highLightColor(0), enableHighLight(false) {} + : dimensions(d), + frameRate(f), + bitrate(b), + captureMouseCursor(true), + windowFocus(false), + excludeWindowList(OPTIONAL_NULLPTR), + excludeWindowCount(0), + highLightWidth(0), + highLightColor(0), + enableHighLight(false) {} ScreenCaptureParameters(int width, int height, int f, int b) - : dimensions(width, height), frameRate(f), bitrate(b), captureMouseCursor(true), windowFocus(false), excludeWindowList(OPTIONAL_NULLPTR), excludeWindowCount(0), highLightWidth(0), highLightColor(0), enableHighLight(false){} + : dimensions(width, height), + frameRate(f), + bitrate(b), + captureMouseCursor(true), + windowFocus(false), + excludeWindowList(OPTIONAL_NULLPTR), + excludeWindowCount(0), + highLightWidth(0), + highLightColor(0), + enableHighLight(false) {} ScreenCaptureParameters(int width, int height, int f, int b, bool cur, bool fcs) - : dimensions(width, height), frameRate(f), bitrate(b), captureMouseCursor(cur), windowFocus(fcs), excludeWindowList(OPTIONAL_NULLPTR), excludeWindowCount(0), highLightWidth(0), highLightColor(0), enableHighLight(false) {} - ScreenCaptureParameters(int width, int height, int f, int b, view_t *ex, int cnt) - : dimensions(width, height), frameRate(f), bitrate(b), captureMouseCursor(true), windowFocus(false), excludeWindowList(ex), excludeWindowCount(cnt), highLightWidth(0), highLightColor(0), enableHighLight(false) {} - ScreenCaptureParameters(int width, int height, int f, int b, bool cur, bool fcs, view_t *ex, int cnt) - : dimensions(width, height), frameRate(f), bitrate(b), captureMouseCursor(cur), windowFocus(fcs), excludeWindowList(ex), excludeWindowCount(cnt), highLightWidth(0), highLightColor(0), enableHighLight(false) {} + : dimensions(width, height), + frameRate(f), + bitrate(b), + captureMouseCursor(cur), + windowFocus(fcs), + excludeWindowList(OPTIONAL_NULLPTR), + excludeWindowCount(0), + highLightWidth(0), + highLightColor(0), + enableHighLight(false) {} + ScreenCaptureParameters(int width, int height, int f, int b, view_t* ex, int cnt) + : dimensions(width, height), + frameRate(f), + bitrate(b), + captureMouseCursor(true), + windowFocus(false), + excludeWindowList(ex), + excludeWindowCount(cnt), + highLightWidth(0), + highLightColor(0), + enableHighLight(false) {} + ScreenCaptureParameters(int width, int height, int f, int b, bool cur, bool fcs, view_t* ex, + int cnt) + : dimensions(width, height), + frameRate(f), + bitrate(b), + captureMouseCursor(cur), + windowFocus(fcs), + excludeWindowList(ex), + excludeWindowCount(cnt), + highLightWidth(0), + highLightColor(0), + enableHighLight(false) {} }; /** @@ -5053,15 +5689,18 @@ struct ScreenCaptureParameters { */ enum AUDIO_RECORDING_QUALITY_TYPE { /** - * 0: Low quality. The sample rate is 32 kHz, and the file size is around 1.2 MB after 10 minutes of recording. + * 0: Low quality. The sample rate is 32 kHz, and the file size is around 1.2 MB after 10 minutes + * of recording. */ AUDIO_RECORDING_QUALITY_LOW = 0, /** - * 1: Medium quality. The sample rate is 32 kHz, and the file size is around 2 MB after 10 minutes of recording. + * 1: Medium quality. The sample rate is 32 kHz, and the file size is around 2 MB after 10 minutes + * of recording. */ AUDIO_RECORDING_QUALITY_MEDIUM = 1, /** - * 2: High quality. The sample rate is 32 kHz, and the file size is around 3.75 MB after 10 minutes of recording. + * 2: High quality. The sample rate is 32 kHz, and the file size is around 3.75 MB after 10 + * minutes of recording. */ AUDIO_RECORDING_QUALITY_HIGH = 2, /** @@ -5093,16 +5732,16 @@ enum AUDIO_FILE_RECORDING_TYPE { */ enum AUDIO_ENCODED_FRAME_OBSERVER_POSITION { /** - * 1: Only records the audio of the local user. - */ + * 1: Only records the audio of the local user. + */ AUDIO_ENCODED_FRAME_OBSERVER_POSITION_RECORD = 1, /** - * 2: Only records the audio of all remote users. - */ + * 2: Only records the audio of all remote users. + */ AUDIO_ENCODED_FRAME_OBSERVER_POSITION_PLAYBACK = 2, /** - * 3: Records the mixed audio of the local and all remote users. - */ + * 3: Records the mixed audio of the local and all remote users. + */ AUDIO_ENCODED_FRAME_OBSERVER_POSITION_MIXED = 3, }; @@ -5111,7 +5750,8 @@ enum AUDIO_ENCODED_FRAME_OBSERVER_POSITION { */ struct AudioRecordingConfiguration { /** - * The absolute path (including the filename extensions) of the recording file. For example: `C:\music\audio.mp4`. + * The absolute path (including the filename extensions) of the recording file. For example: + * `C:\music\audio.mp4`. * @note Ensure that the directory for the log files exists and is writable. */ const char* filePath; @@ -5127,8 +5767,9 @@ struct AudioRecordingConfiguration { * - (Default) 32000 * - 44100 * - 48000 - * @note If you set this parameter to 44100 or 48000, Agora recommends recording WAV files, or AAC files with quality - * to be `AUDIO_RECORDING_QUALITY_MEDIUM` or `AUDIO_RECORDING_QUALITY_HIGH` for better recording quality. + * @note If you set this parameter to 44100 or 48000, Agora recommends recording WAV files, or AAC + * files with quality to be `AUDIO_RECORDING_QUALITY_MEDIUM` or `AUDIO_RECORDING_QUALITY_HIGH` for + * better recording quality. */ int sampleRate; /** @@ -5149,133 +5790,148 @@ struct AudioRecordingConfiguration { int recordingChannel; AudioRecordingConfiguration() - : filePath(OPTIONAL_NULLPTR), - encode(false), - sampleRate(32000), - fileRecordingType(AUDIO_FILE_RECORDING_MIXED), - quality(AUDIO_RECORDING_QUALITY_LOW), - recordingChannel(1) {} - - AudioRecordingConfiguration(const char* file_path, int sample_rate, AUDIO_RECORDING_QUALITY_TYPE quality_type, int channel) - : filePath(file_path), - encode(false), - sampleRate(sample_rate), - fileRecordingType(AUDIO_FILE_RECORDING_MIXED), - quality(quality_type), - recordingChannel(channel) {} - - AudioRecordingConfiguration(const char* file_path, bool enc, int sample_rate, AUDIO_FILE_RECORDING_TYPE type, AUDIO_RECORDING_QUALITY_TYPE quality_type, int channel) - : filePath(file_path), - encode(enc), - sampleRate(sample_rate), - fileRecordingType(type), - quality(quality_type), - recordingChannel(channel) {} - - AudioRecordingConfiguration(const AudioRecordingConfiguration &rhs) - : filePath(rhs.filePath), - encode(rhs.encode), - sampleRate(rhs.sampleRate), - fileRecordingType(rhs.fileRecordingType), - quality(rhs.quality), - recordingChannel(rhs.recordingChannel) {} + : filePath(OPTIONAL_NULLPTR), + encode(false), + sampleRate(32000), + fileRecordingType(AUDIO_FILE_RECORDING_MIXED), + quality(AUDIO_RECORDING_QUALITY_LOW), + recordingChannel(1) {} + + AudioRecordingConfiguration(const char* file_path, int sample_rate, + AUDIO_RECORDING_QUALITY_TYPE quality_type, int channel) + : filePath(file_path), + encode(false), + sampleRate(sample_rate), + fileRecordingType(AUDIO_FILE_RECORDING_MIXED), + quality(quality_type), + recordingChannel(channel) {} + + AudioRecordingConfiguration(const char* file_path, bool enc, int sample_rate, + AUDIO_FILE_RECORDING_TYPE type, + AUDIO_RECORDING_QUALITY_TYPE quality_type, int channel) + : filePath(file_path), + encode(enc), + sampleRate(sample_rate), + fileRecordingType(type), + quality(quality_type), + recordingChannel(channel) {} + + AudioRecordingConfiguration(const AudioRecordingConfiguration& rhs) + : filePath(rhs.filePath), + encode(rhs.encode), + sampleRate(rhs.sampleRate), + fileRecordingType(rhs.fileRecordingType), + quality(rhs.quality), + recordingChannel(rhs.recordingChannel) {} }; /** * Observer settings for the encoded audio. */ struct AudioEncodedFrameObserverConfig { - /** - * Audio profile. For details, see `AUDIO_ENCODED_FRAME_OBSERVER_POSITION`. - */ - AUDIO_ENCODED_FRAME_OBSERVER_POSITION postionType; - /** - * Audio encoding type. For details, see `AUDIO_ENCODING_TYPE`. - */ - AUDIO_ENCODING_TYPE encodingType; - - AudioEncodedFrameObserverConfig() - : postionType(AUDIO_ENCODED_FRAME_OBSERVER_POSITION_PLAYBACK), - encodingType(AUDIO_ENCODING_TYPE_OPUS_48000_MEDIUM){} + /** + * Audio profile. For details, see `AUDIO_ENCODED_FRAME_OBSERVER_POSITION`. + */ + AUDIO_ENCODED_FRAME_OBSERVER_POSITION postionType; + /** + * Audio encoding type. For details, see `AUDIO_ENCODING_TYPE`. + */ + AUDIO_ENCODING_TYPE encodingType; + AudioEncodedFrameObserverConfig() + : postionType(AUDIO_ENCODED_FRAME_OBSERVER_POSITION_PLAYBACK), + encodingType(AUDIO_ENCODING_TYPE_OPUS_48000_MEDIUM) {} }; /** * The encoded audio observer. */ class IAudioEncodedFrameObserver { -public: -/** -* Gets the encoded audio data of the local user. -* -* After calling `registerAudioEncodedFrameObserver` and setting the encoded audio as `AUDIO_ENCODED_FRAME_OBSERVER_POSITION_RECORD`, -* you can get the encoded audio data of the local user from this callback. -* -* @param frameBuffer The pointer to the audio frame buffer. -* @param length The data length (byte) of the audio frame. -* @param audioEncodedFrameInfo Audio information after encoding. For details, see `EncodedAudioFrameInfo`. -*/ -virtual void onRecordAudioEncodedFrame(const uint8_t* frameBuffer, int length, const EncodedAudioFrameInfo& audioEncodedFrameInfo) = 0; + public: + /** + * Gets the encoded audio data of the local user. + * + * After calling `registerAudioEncodedFrameObserver` and setting the encoded audio as + * `AUDIO_ENCODED_FRAME_OBSERVER_POSITION_RECORD`, you can get the encoded audio data of the local + * user from this callback. + * + * @param frameBuffer The pointer to the audio frame buffer. + * @param length The data length (byte) of the audio frame. + * @param audioEncodedFrameInfo Audio information after encoding. For details, see + * `EncodedAudioFrameInfo`. + */ + virtual void onRecordAudioEncodedFrame(const uint8_t* frameBuffer, int length, + const EncodedAudioFrameInfo& audioEncodedFrameInfo) = 0; -/** -* Gets the encoded audio data of all remote users. -* -* After calling `registerAudioEncodedFrameObserver` and setting the encoded audio as `AUDIO_ENCODED_FRAME_OBSERVER_POSITION_PLAYBACK`, -* you can get encoded audio data of all remote users through this callback. -* -* @param frameBuffer The pointer to the audio frame buffer. -* @param length The data length (byte) of the audio frame. -* @param audioEncodedFrameInfo Audio information after encoding. For details, see `EncodedAudioFrameInfo`. -*/ -virtual void onPlaybackAudioEncodedFrame(const uint8_t* frameBuffer, int length, const EncodedAudioFrameInfo& audioEncodedFrameInfo) = 0; + /** + * Gets the encoded audio data of all remote users. + * + * After calling `registerAudioEncodedFrameObserver` and setting the encoded audio as + * `AUDIO_ENCODED_FRAME_OBSERVER_POSITION_PLAYBACK`, you can get encoded audio data of all remote + * users through this callback. + * + * @param frameBuffer The pointer to the audio frame buffer. + * @param length The data length (byte) of the audio frame. + * @param audioEncodedFrameInfo Audio information after encoding. For details, see + * `EncodedAudioFrameInfo`. + */ + virtual void onPlaybackAudioEncodedFrame(const uint8_t* frameBuffer, int length, + const EncodedAudioFrameInfo& audioEncodedFrameInfo) = 0; -/** -* Gets the mixed and encoded audio data of the local and all remote users. -* -* After calling `registerAudioEncodedFrameObserver` and setting the audio profile as `AUDIO_ENCODED_FRAME_OBSERVER_POSITION_MIXED`, -* you can get the mixed and encoded audio data of the local and all remote users through this callback. -* -* @param frameBuffer The pointer to the audio frame buffer. -* @param length The data length (byte) of the audio frame. -* @param audioEncodedFrameInfo Audio information after encoding. For details, see `EncodedAudioFrameInfo`. -*/ -virtual void onMixedAudioEncodedFrame(const uint8_t* frameBuffer, int length, const EncodedAudioFrameInfo& audioEncodedFrameInfo) = 0; + /** + * Gets the mixed and encoded audio data of the local and all remote users. + * + * After calling `registerAudioEncodedFrameObserver` and setting the audio profile as + * `AUDIO_ENCODED_FRAME_OBSERVER_POSITION_MIXED`, you can get the mixed and encoded audio data of + * the local and all remote users through this callback. + * + * @param frameBuffer The pointer to the audio frame buffer. + * @param length The data length (byte) of the audio frame. + * @param audioEncodedFrameInfo Audio information after encoding. For details, see + * `EncodedAudioFrameInfo`. + */ + virtual void onMixedAudioEncodedFrame(const uint8_t* frameBuffer, int length, + const EncodedAudioFrameInfo& audioEncodedFrameInfo) = 0; -virtual ~IAudioEncodedFrameObserver () {} + virtual ~IAudioEncodedFrameObserver() {} }; /** The region for connection, which is the region where the server the SDK connects to is located. */ enum AREA_CODE { - /** - * Mainland China. - */ - AREA_CODE_CN = 0x00000001, - /** - * North America. - */ - AREA_CODE_NA = 0x00000002, - /** - * Europe. - */ - AREA_CODE_EU = 0x00000004, - /** - * Asia, excluding Mainland China. - */ - AREA_CODE_AS = 0x00000008, - /** - * Japan. - */ - AREA_CODE_JP = 0x00000010, - /** - * India. - */ - AREA_CODE_IN = 0x00000020, - /** - * (Default) Global. - */ - AREA_CODE_GLOB = (0xFFFFFFFF) + /** + * Mainland China. + */ + AREA_CODE_CN = 0x00000001, + /** + * North America. + */ + AREA_CODE_NA = 0x00000002, + /** + * Europe. + */ + AREA_CODE_EU = 0x00000004, + /** + * Asia, excluding Mainland China. + */ + AREA_CODE_AS = 0x00000008, + /** + * Japan. + */ + AREA_CODE_JP = 0x00000010, + /** + * India. + */ + AREA_CODE_IN = 0x00000020, + /** + * (Default) Global. + */ + AREA_CODE_GLOB = (0xFFFFFFFF) }; +/** + Extra region code + @technical preview +*/ enum AREA_CODE_EX { /** * Oceania @@ -5301,6 +5957,10 @@ enum AREA_CODE_EX { * United States */ AREA_CODE_US = 0x00000800, + /** + * Russia + */ + AREA_CODE_RU = 0x00001000, /** * The global area (except China) */ @@ -5319,8 +5979,9 @@ enum CHANNEL_MEDIA_RELAY_ERROR { RELAY_ERROR_SERVER_ERROR_RESPONSE = 1, /** 2: No server response. You can call the `leaveChannel` method to leave the channel. * - * This error can also occur if your project has not enabled co-host token authentication. You can contact technical - * support to enable the service for cohosting across channels before starting a channel media relay. + * This error can also occur if your project has not enabled co-host token authentication. You can + * contact technical support to enable the service for cohosting across channels before starting a + * channel media relay. */ RELAY_ERROR_SERVER_NO_RESPONSE = 2, /** 3: The SDK fails to access the service, probably due to limited resources of the server. @@ -5338,8 +5999,8 @@ enum CHANNEL_MEDIA_RELAY_ERROR { /** 7: The server fails to send the media stream. */ RELAY_ERROR_FAILED_PACKET_SENT_TO_DEST = 7, - /** 8: The SDK disconnects from the server due to poor network connections. You can call the `leaveChannel` method to - * leave the channel. + /** 8: The SDK disconnects from the server due to poor network connections. You can call the + * `leaveChannel` method to leave the channel. */ RELAY_ERROR_SERVER_CONNECTION_LOST = 8, /** 9: An internal error occurs in the server. @@ -5357,8 +6018,8 @@ enum CHANNEL_MEDIA_RELAY_ERROR { * The state code of the channel media relay. */ enum CHANNEL_MEDIA_RELAY_STATE { - /** 0: The initial state. After you successfully stop the channel media relay by calling `stopChannelMediaRelay`, - * the `onChannelMediaRelayStateChanged` callback returns this state. + /** 0: The initial state. After you successfully stop the channel media relay by calling + * `stopChannelMediaRelay`, the `onChannelMediaRelayStateChanged` callback returns this state. */ RELAY_STATE_IDLE = 0, /** 1: The SDK tries to relay the media stream to the destination channel. @@ -5376,15 +6037,15 @@ enum CHANNEL_MEDIA_RELAY_STATE { */ struct ChannelMediaInfo { /** The user ID. - */ + */ uid_t uid; /** The channel name. The default value is NULL, which means that the SDK - * applies the current channel name. - */ + * applies the current channel name. + */ const char* channelName; /** The token that enables the user to join the channel. The default value - * is NULL, which means that the SDK applies the current token. - */ + * is NULL, which means that the SDK applies the current token. + */ const char* token; ChannelMediaInfo() : uid(0), channelName(NULL), token(NULL) {} @@ -5395,31 +6056,32 @@ struct ChannelMediaInfo { */ struct ChannelMediaRelayConfiguration { /** The information of the source channel `ChannelMediaInfo`. It contains the following members: - * - `channelName`: The name of the source channel. The default value is `NULL`, which means the SDK applies the name - * of the current channel. - * - `uid`: The unique ID to identify the relay stream in the source channel. The default value is 0, which means the - * SDK generates a random UID. You must set it as 0. - * - `token`: The token for joining the source channel. It is generated with the `channelName` and `uid` you set in - * `srcInfo`. - * - If you have not enabled the App Certificate, set this parameter as the default value `NULL`, which means the - * SDK applies the App ID. - * - If you have enabled the App Certificate, you must use the token generated with the `channelName` and `uid`, and - * the `uid` must be set as 0. + * - `channelName`: The name of the source channel. The default value is `NULL`, which means the + * SDK applies the name of the current channel. + * - `uid`: The unique ID to identify the relay stream in the source channel. The default value is + * 0, which means the SDK generates a random UID. You must set it as 0. + * - `token`: The token for joining the source channel. It is generated with the `channelName` and + * `uid` you set in `srcInfo`. + * - If you have not enabled the App Certificate, set this parameter as the default value + * `NULL`, which means the SDK applies the App ID. + * - If you have enabled the App Certificate, you must use the token generated with the + * `channelName` and `uid`, and the `uid` must be set as 0. */ ChannelMediaInfo* srcInfo; - /** The information of the destination channel `ChannelMediaInfo`. It contains the following members: + /** The information of the destination channel `ChannelMediaInfo`. It contains the following + * members: * - `channelName`: The name of the destination channel. * - `uid`: The unique ID to identify the relay stream in the destination channel. The value * ranges from 0 to (2^32-1). To avoid UID conflicts, this `UID` must be different from any * other `UID` in the destination channel. The default value is 0, which means the SDK generates * a random `UID`. Do not set this parameter as the `UID` of the host in the destination channel, * and ensure that this `UID` is different from any other `UID` in the channel. - * - `token`: The token for joining the destination channel. It is generated with the `channelName` - * and `uid` you set in `destInfos`. + * - `token`: The token for joining the destination channel. It is generated with the + * `channelName` and `uid` you set in `destInfos`. * - If you have not enabled the App Certificate, set this parameter as the default value NULL, * which means the SDK applies the App ID. - * If you have enabled the App Certificate, you must use the token generated with the `channelName` - * and `uid`. + * If you have enabled the App Certificate, you must use the token generated with the + * `channelName` and `uid`. */ ChannelMediaInfo* destInfos; /** The number of destination channels. The default value is 0, and the value range is from 0 to @@ -5428,7 +6090,8 @@ struct ChannelMediaRelayConfiguration { */ int destCount; - ChannelMediaRelayConfiguration() : srcInfo(OPTIONAL_NULLPTR), destInfos(OPTIONAL_NULLPTR), destCount(0) {} + ChannelMediaRelayConfiguration() + : srcInfo(OPTIONAL_NULLPTR), destInfos(OPTIONAL_NULLPTR), destCount(0) {} }; /** @@ -5473,11 +6136,11 @@ struct DownlinkNetworkInfo { expected_bitrate_bps(-1) {} PeerDownlinkInfo(const PeerDownlinkInfo& rhs) - : stream_type(rhs.stream_type), + : stream_type(rhs.stream_type), current_downscale_level(rhs.current_downscale_level), expected_bitrate_bps(rhs.expected_bitrate_bps) { if (rhs.userId != OPTIONAL_NULLPTR) { - const int len = std::strlen(rhs.userId); + const size_t len = std::strlen(rhs.userId); char* buf = new char[len + 1]; std::memcpy(buf, rhs.userId, len); buf[len] = '\0'; @@ -5492,7 +6155,7 @@ struct DownlinkNetworkInfo { current_downscale_level = rhs.current_downscale_level; expected_bitrate_bps = rhs.expected_bitrate_bps; if (rhs.userId != OPTIONAL_NULLPTR) { - const int len = std::strlen(rhs.userId); + const size_t len = std::strlen(rhs.userId); char* buf = new char[len + 1]; std::memcpy(buf, rhs.userId, len); buf[len] = '\0'; @@ -5526,18 +6189,18 @@ struct DownlinkNetworkInfo { int total_received_video_count; DownlinkNetworkInfo() - : lastmile_buffer_delay_time_ms(-1), - bandwidth_estimation_bps(-1), - total_downscale_level_count(-1), - peer_downlink_info(OPTIONAL_NULLPTR), - total_received_video_count(-1) {} + : lastmile_buffer_delay_time_ms(-1), + bandwidth_estimation_bps(-1), + total_downscale_level_count(-1), + peer_downlink_info(OPTIONAL_NULLPTR), + total_received_video_count(-1) {} DownlinkNetworkInfo(const DownlinkNetworkInfo& info) - : lastmile_buffer_delay_time_ms(info.lastmile_buffer_delay_time_ms), - bandwidth_estimation_bps(info.bandwidth_estimation_bps), - total_downscale_level_count(info.total_downscale_level_count), - peer_downlink_info(OPTIONAL_NULLPTR), - total_received_video_count(info.total_received_video_count) { + : lastmile_buffer_delay_time_ms(info.lastmile_buffer_delay_time_ms), + bandwidth_estimation_bps(info.bandwidth_estimation_bps), + total_downscale_level_count(info.total_downscale_level_count), + peer_downlink_info(OPTIONAL_NULLPTR), + total_received_video_count(info.total_received_video_count) { if (total_received_video_count <= 0) return; peer_downlink_info = new PeerDownlinkInfo[total_received_video_count]; for (int i = 0; i < total_received_video_count; ++i) @@ -5591,7 +6254,8 @@ enum ENCRYPTION_MODE { * salt (`encryptionKdfSalt`). */ AES_128_GCM2 = 7, - /** 8: 256-bit AES encryption, GCM mode. This encryption mode requires the setting of salt (`encryptionKdfSalt`). + /** 8: 256-bit AES encryption, GCM mode. This encryption mode requires the setting of salt + * (`encryptionKdfSalt`). */ AES_256_GCM2 = 8, /** Enumerator boundary. @@ -5609,30 +6273,31 @@ struct EncryptionConfig { /** * Encryption key in string type with unlimited length. Agora recommends using a 32-byte key. * - * @note If you do not set an encryption key or set it as NULL, you cannot use the built-in encryption, and the SDK returns #ERR_INVALID_ARGUMENT (-2). + * @note If you do not set an encryption key or set it as NULL, you cannot use the built-in + * encryption, and the SDK returns #ERR_INVALID_ARGUMENT (-2). */ const char* encryptionKey; /** - * Salt, 32 bytes in length. Agora recommends that you use OpenSSL to generate salt on the server side. + * Salt, 32 bytes in length. Agora recommends that you use OpenSSL to generate salt on the server + * side. * * @note This parameter takes effect only in `AES_128_GCM2` or `AES_256_GCM2` encrypted mode. * In this case, ensure that this parameter is not 0. */ uint8_t encryptionKdfSalt[32]; - + bool datastreamEncryptionEnabled; EncryptionConfig() - : encryptionMode(AES_128_GCM2), - encryptionKey(OPTIONAL_NULLPTR), - datastreamEncryptionEnabled(false) - { + : encryptionMode(AES_128_GCM2), + encryptionKey(OPTIONAL_NULLPTR), + datastreamEncryptionEnabled(false) { memset(encryptionKdfSalt, 0, sizeof(encryptionKdfSalt)); } /// @cond const char* getEncryptionString() const { - switch(encryptionMode) { + switch (encryptionMode) { case AES_128_XTS: return "aes-128-xts"; case AES_128_ECB: @@ -5660,30 +6325,31 @@ struct EncryptionConfig { /** Encryption error type. */ enum ENCRYPTION_ERROR_TYPE { - /** - * 0: Internal reason. - */ - ENCRYPTION_ERROR_INTERNAL_FAILURE = 0, - /** - * 1: MediaStream decryption errors. Ensure that the receiver and the sender use the same encryption mode and key. - */ - ENCRYPTION_ERROR_DECRYPTION_FAILURE = 1, - /** - * 2: MediaStream encryption errors. - */ - ENCRYPTION_ERROR_ENCRYPTION_FAILURE = 2, - /** - * 3: DataStream decryption errors. Ensure that the receiver and the sender use the same encryption mode and key. - */ - ENCRYPTION_ERROR_DATASTREAM_DECRYPTION_FAILURE = 3, - /** - * 4: DataStream encryption errors. - */ - ENCRYPTION_ERROR_DATASTREAM_ENCRYPTION_FAILURE = 4, + /** + * 0: Internal reason. + */ + ENCRYPTION_ERROR_INTERNAL_FAILURE = 0, + /** + * 1: MediaStream decryption errors. Ensure that the receiver and the sender use the same + * encryption mode and key. + */ + ENCRYPTION_ERROR_DECRYPTION_FAILURE = 1, + /** + * 2: MediaStream encryption errors. + */ + ENCRYPTION_ERROR_ENCRYPTION_FAILURE = 2, + /** + * 3: DataStream decryption errors. Ensure that the receiver and the sender use the same + * encryption mode and key. + */ + ENCRYPTION_ERROR_DATASTREAM_DECRYPTION_FAILURE = 3, + /** + * 4: DataStream encryption errors. + */ + ENCRYPTION_ERROR_DATASTREAM_ENCRYPTION_FAILURE = 4, }; -enum UPLOAD_ERROR_REASON -{ +enum UPLOAD_ERROR_REASON { UPLOAD_SUCCESS = 0, UPLOAD_NET_ERROR = 1, UPLOAD_SERVER_ERROR = 2, @@ -5718,11 +6384,12 @@ enum STREAM_SUBSCRIBE_STATE { * - Calls `muteLocalAudioStream(true)` or `muteLocalVideoStream(true)` to stop sending local * media stream. * - Calls `disableAudio` or `disableVideo `to disable the local audio or video module. - * - Calls `enableLocalAudio(false)` or `enableLocalVideo(false)` to disable the local audio or video capture. + * - Calls `enableLocalAudio(false)` or `enableLocalVideo(false)` to disable the local audio or + * video capture. * - The role of the remote user is audience. * - The local user calls the following methods to stop receiving remote streams: - * - Calls `muteRemoteAudioStream(true)`, `muteAllRemoteAudioStreams(true)` or `setDefaultMuteAllRemoteAudioStreams(true)` to stop receiving the remote audio streams. - * - Calls `muteRemoteVideoStream(true)`, `muteAllRemoteVideoStreams(true)` or `setDefaultMuteAllRemoteVideoStreams(true)` to stop receiving the remote video streams. + * - Calls `muteRemoteAudioStream(true)`, `muteAllRemoteAudioStreams(true)` to stop receiving the remote audio streams. + * - Calls `muteRemoteVideoStream(true)`, `muteAllRemoteVideoStreams(true)` to stop receiving the remote video streams. */ SUB_STATE_NO_SUBSCRIBED = 1, /** @@ -5745,9 +6412,12 @@ enum STREAM_PUBLISH_STATE { PUB_STATE_IDLE = 0, /** * 1: Fails to publish the local stream. Possible reasons: - * - The local user calls `muteLocalAudioStream(true)` or `muteLocalVideoStream(true)` to stop sending the local media stream. - * - The local user calls `disableAudio` or `disableVideo` to disable the local audio or video module. - * - The local user calls `enableLocalAudio(false)` or `enableLocalVideo(false)` to disable the local audio or video capture. + * - The local user calls `muteLocalAudioStream(true)` or `muteLocalVideoStream(true)` to stop + * sending the local media stream. + * - The local user calls `disableAudio` or `disableVideo` to disable the local audio or video + * module. + * - The local user calls `enableLocalAudio(false)` or `enableLocalVideo(false)` to disable the + * local audio or video capture. * - The role of the local user is audience. */ PUB_STATE_NO_PUBLISHED = 1, @@ -5773,10 +6443,15 @@ struct EchoTestConfiguration { int intervalInSeconds; EchoTestConfiguration(view_t v, bool ea, bool ev, const char* t, const char* c, const int is) - : view(v), enableAudio(ea), enableVideo(ev), token(t), channelId(c), intervalInSeconds(is) {} + : view(v), enableAudio(ea), enableVideo(ev), token(t), channelId(c), intervalInSeconds(is) {} EchoTestConfiguration() - : view(OPTIONAL_NULLPTR), enableAudio(true), enableVideo(true), token(OPTIONAL_NULLPTR), channelId(OPTIONAL_NULLPTR), intervalInSeconds(2) {} + : view(OPTIONAL_NULLPTR), + enableAudio(true), + enableVideo(true), + token(OPTIONAL_NULLPTR), + channelId(OPTIONAL_NULLPTR), + intervalInSeconds(2) {} }; /** @@ -5792,9 +6467,7 @@ struct UserInfo { */ char userAccount[MAX_USER_ACCOUNT_LENGTH]; - UserInfo() : uid(0) { - userAccount[0] = '\0'; - } + UserInfo() : uid(0) { userAccount[0] = '\0'; } }; /** @@ -5804,21 +6477,22 @@ enum EAR_MONITORING_FILTER_TYPE { /** * 1: Do not add an audio filter to the in-ear monitor. */ - EAR_MONITORING_FILTER_NONE = (1<<0), + EAR_MONITORING_FILTER_NONE = (1 << 0), /** * 2: Enable audio filters to the in-ear monitor. If you implement functions such as voice * beautifier and audio effect, users can hear the voice after adding these effects. */ - EAR_MONITORING_FILTER_BUILT_IN_AUDIO_FILTERS = (1<<1), + EAR_MONITORING_FILTER_BUILT_IN_AUDIO_FILTERS = (1 << 1), /** * 4: Enable noise suppression to the in-ear monitor. */ - EAR_MONITORING_FILTER_NOISE_SUPPRESSION = (1<<2), + EAR_MONITORING_FILTER_NOISE_SUPPRESSION = (1 << 2), /** * 32768: Enable audio filters by reuse post-processing filter to the in-ear monitor. - * This bit is intended to be used in exclusive mode, which means, if this bit is set, all other bits will be disregarded. + * This bit is intended to be used in exclusive mode, which means, if this bit is set, all other + * bits will be disregarded. */ - EAR_MONITORING_FILTER_REUSE_POST_PROCESSING_FILTER = (1<<15), + EAR_MONITORING_FILTER_REUSE_POST_PROCESSING_FILTER = (1 << 15), }; /** @@ -5890,7 +6564,7 @@ struct ScreenVideoParameters { * profiles](https://docs.agora.io/en/Interactive%20Broadcast/game_streaming_video_profile?platform=Android#recommended-video-profiles). */ int frameRate = 15; - /** + /** * The video encoding bitrate (Kbps). For recommended values, see [Recommended video * profiles](https://docs.agora.io/en/Interactive%20Broadcast/game_streaming_video_profile?platform=Android#recommended-video-profiles). */ @@ -5981,7 +6655,7 @@ struct VideoRenderingTracingInfo { int elapsedTime; /** * Elapsed time from the start tracing time to the time when join channel. - * + * * **Note** * If the start tracing time is behind the time when join channel, this value will be negative. */ @@ -5992,7 +6666,7 @@ struct VideoRenderingTracingInfo { int join2JoinSuccess; /** * Elapsed time from finishing joining channel to remote user joined. - * + * * **Note** * If the start tracing time is after the time finishing join channel, this value will be * the elapsed time from the start tracing time to remote user joined. The minimum value is 0. @@ -6000,7 +6674,7 @@ struct VideoRenderingTracingInfo { int joinSuccess2RemoteJoined; /** * Elapsed time from remote user joined to set the view. - * + * * **Note** * If the start tracing time is after the time when remote user joined, this value will be * the elapsed time from the start tracing time to set the view. The minimum value is 0. @@ -6008,7 +6682,7 @@ struct VideoRenderingTracingInfo { int remoteJoined2SetView; /** * Elapsed time from remote user joined to the time subscribing remote video stream. - * + * * **Note** * If the start tracing time is after the time when remote user joined, this value will be * the elapsed time from the start tracing time to the time subscribing remote video stream. @@ -6017,7 +6691,7 @@ struct VideoRenderingTracingInfo { int remoteJoined2UnmuteVideo; /** * Elapsed time from remote user joined to the remote video packet received. - * + * * **Note** * If the start tracing time is after the time when remote user joined, this value will be * the elapsed time from the start tracing time to the time subscribing remote video stream. @@ -6037,7 +6711,6 @@ enum CONFIG_FETCH_TYPE { CONFIG_FETCH_TYPE_JOIN_CHANNEL = 2, }; - /** The local proxy mode type. */ enum LOCAL_PROXY_MODE { /** 0: Connect local proxy with high priority, if not connected to local proxy, fallback to sdrtn. @@ -6066,7 +6739,8 @@ struct LogUploadServerInfo { LogUploadServerInfo() : serverDomain(NULL), serverPath(NULL), serverPort(0), serverHttps(true) {} - LogUploadServerInfo(const char* domain, const char* path, int port, bool https) : serverDomain(domain), serverPath(path), serverPort(port), serverHttps(https) {} + LogUploadServerInfo(const char* domain, const char* path, int port, bool https) + : serverDomain(domain), serverPath(path), serverPort(port), serverHttps(https) {} }; struct AdvancedConfigInfo { @@ -6088,8 +6762,9 @@ struct LocalAccessPointConfiguration { /** The number of local access point domain. */ int domainListSize; - /** Certificate domain name installed on specific local access point. pass "" means using sni domain on specific local access point - * SNI(Server Name Indication) is an extension to the TLS protocol. + /** Certificate domain name installed on specific local access point. pass "" means using sni + * domain on specific local access point SNI(Server Name Indication) is an extension to the TLS + * protocol. */ const char* verifyDomainName; /** Local proxy connection mode, connectivity first or local only. @@ -6104,23 +6779,42 @@ struct LocalAccessPointConfiguration { - false: not disable vos-aut */ bool disableAut; - LocalAccessPointConfiguration() : ipList(NULL), ipListSize(0), domainList(NULL), domainListSize(0), verifyDomainName(NULL), mode(ConnectivityFirst), disableAut(true) {} + LocalAccessPointConfiguration() + : ipList(NULL), + ipListSize(0), + domainList(NULL), + domainListSize(0), + verifyDomainName(NULL), + mode(ConnectivityFirst), + disableAut(true) {} +}; + +enum RecorderStreamType { + RTC, + PREVIEW, }; /** * The information about recorded media streams. */ struct RecorderStreamInfo { - const char* channelId; - /** - * The user ID. - */ - uid_t uid; - /** - * The channel ID of the audio/video stream needs to be recorded. - */ - RecorderStreamInfo() : channelId(NULL), uid(0) {} - RecorderStreamInfo(const char* channelId, uid_t uid) : channelId(channelId), uid(uid) {} + /** + * The channel ID of the audio/video stream needs to be recorded. + */ + const char* channelId; + /** + * The user ID. + */ + uid_t uid; + /** + * The Recoder Stream type. + */ + RecorderStreamType type; + RecorderStreamInfo() : channelId(NULL), uid(0), type(RTC) {} + RecorderStreamInfo(const char* channelId, uid_t uid) + : channelId(channelId), uid(uid), type(RTC) {} + RecorderStreamInfo(const char* channelId, uid_t uid, RecorderStreamType type) + : channelId(channelId), uid(uid), type(type) {} }; } // namespace rtc @@ -6147,12 +6841,12 @@ class AParameter : public agora::util::AutoPtr { }; class LicenseCallback { - public: - virtual ~LicenseCallback() {} - virtual void onCertificateRequired() = 0; - virtual void onLicenseRequest() = 0; - virtual void onLicenseValidated() = 0; - virtual void onLicenseError(int result) = 0; + public: + virtual ~LicenseCallback() {} + virtual void onCertificateRequired() = 0; + virtual void onLicenseRequest() = 0; + virtual void onLicenseValidated() = 0; + virtual void onLicenseError(int result) = 0; }; } // namespace base @@ -6196,44 +6890,51 @@ struct SpatialAudioParams { }; /** * Layout info of video stream which compose a transcoder video stream. -*/ -struct VideoLayout -{ + */ +struct VideoLayout { /** * Channel Id from which this video stream come from. - */ + */ const char* channelId; /** * User id of video stream. - */ + */ rtc::uid_t uid; /** * User account of video stream. - */ + */ user_id_t strUid; /** * x coordinate of video stream on a transcoded video stream canvas. - */ + */ uint32_t x; /** * y coordinate of video stream on a transcoded video stream canvas. - */ + */ uint32_t y; /** * width of video stream on a transcoded video stream canvas. - */ + */ uint32_t width; /** * height of video stream on a transcoded video stream canvas. - */ + */ uint32_t height; /** * video state of video stream on a transcoded video stream canvas. * 0 for normal video , 1 for placeholder image showed , 2 for black image. - */ - uint32_t videoState; + */ + uint32_t videoState; - VideoLayout() : channelId(OPTIONAL_NULLPTR), uid(0), strUid(OPTIONAL_NULLPTR), x(0), y(0), width(0), height(0), videoState(0) {} + VideoLayout() + : channelId(OPTIONAL_NULLPTR), + uid(0), + strUid(OPTIONAL_NULLPTR), + x(0), + y(0), + width(0), + height(0), + videoState(0) {} }; } // namespace agora @@ -6260,7 +6961,7 @@ AGORA_API int AGORA_CALL setAgoraSdkExternalSymbolLoader(void* (*func)(const cha * @note For license only, everytime will generate a different credential. * So, just need to call once for a device, and then save the credential */ -AGORA_API int AGORA_CALL createAgoraCredential(agora::util::AString &credential); +AGORA_API int AGORA_CALL createAgoraCredential(agora::util::AString& credential); /** * Verify given certificate and return the result @@ -6275,8 +6976,10 @@ AGORA_API int AGORA_CALL createAgoraCredential(agora::util::AString &credential) * @return The description of the error code. * @note For license only. */ -AGORA_API int AGORA_CALL getAgoraCertificateVerifyResult(const char *credential_buf, int credential_len, - const char *certificate_buf, int certificate_len); +AGORA_API int AGORA_CALL getAgoraCertificateVerifyResult(const char* credential_buf, + int credential_len, + const char* certificate_buf, + int certificate_len); /** * @brief Implement the agora::base::LicenseCallback, @@ -6285,7 +6988,7 @@ AGORA_API int AGORA_CALL getAgoraCertificateVerifyResult(const char *credential_ * @param [in] callback The object of agora::LiceseCallback, * set the callback to null before delete it. */ -AGORA_API void setAgoraLicenseCallback(agora::base::LicenseCallback *callback); +AGORA_API void setAgoraLicenseCallback(agora::base::LicenseCallback* callback); /** * @brief Get the LicenseCallback pointer if already setup, @@ -6301,18 +7004,15 @@ AGORA_API agora::base::LicenseCallback* getAgoraLicenseCallback(); * typical scenario is as follows: * * ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - * | // custom audio/video base capture time, e.g. the first audio/video capture time. | - * | int64_t custom_capture_time_base; | - * | | - * | int64_t agora_monotonic_time = getAgoraCurrentMonotonicTimeInMs(); | - * | | - * | // offset is fixed once calculated in the begining. | - * | const int64_t offset = agora_monotonic_time - custom_capture_time_base; | - * | | - * | // realtime_custom_audio/video_capture_time is the origin capture time that customer provided.| - * | // actual_audio/video_capture_time is the actual capture time transfered to sdk. | - * | int64_t actual_audio_capture_time = realtime_custom_audio_capture_time + offset; | - * | int64_t actual_video_capture_time = realtime_custom_video_capture_time + offset; | + * | // custom audio/video base capture time, e.g. the first audio/video capture time. | | int64_t + * custom_capture_time_base; | | | | + * int64_t agora_monotonic_time = getAgoraCurrentMonotonicTimeInMs(); | + * | | | // offset is fixed once calculated in the begining. | | const int64_t offset = + * agora_monotonic_time - custom_capture_time_base; | | | | // + * realtime_custom_audio/video_capture_time is the origin capture time that customer provided.| | // + * actual_audio/video_capture_time is the actual capture time transfered to sdk. | | + * int64_t actual_audio_capture_time = realtime_custom_audio_capture_time + offset; | + * | int64_t actual_video_capture_time = realtime_custom_video_capture_time + offset; | * ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ * * @return diff --git a/android/src/main/cpp/third_party/include/agora_rtc/AgoraMediaBase.h b/android/src/main/cpp/third_party/include/agora_rtc/AgoraMediaBase.h index 8c24b5203..6e7d45357 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/AgoraMediaBase.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/AgoraMediaBase.h @@ -35,10 +35,36 @@ static const unsigned int DEFAULT_CONNECTION_ID = 0; static const unsigned int DUMMY_CONNECTION_ID = (std::numeric_limits::max)(); struct EncodedVideoFrameInfo; - /** -* Video source types definition. +* The definition of extension context types. **/ +struct ExtensionContext { + /** + * Whether the uid is valid. + * - true: The uid is valid. + * - false: The uid is invalid. + */ + bool isValid; + /** + * The ID of the user. + * A uid of 0 indicates the local user, and a uid greater than 0 represents a remote user. + */ + uid_t uid; + /** + * The provider name of the current extension. + */ + const char *providerName; + /** + * The extension name of the current extension. + */ + const char *extensionName; + ExtensionContext():isValid(false), uid(0), providerName(NULL), extensionName(NULL) {} +}; + + +/** + * Video source types definition. + **/ enum VIDEO_SOURCE_TYPE { /** Video captured by the camera. */ @@ -89,17 +115,45 @@ enum VIDEO_SOURCE_TYPE { */ VIDEO_SOURCE_SCREEN_FOURTH = 14, /** Video for voice drive. - */ - VIDEO_SOURCE_SPEECH_DRIVEN = 15, + */ + VIDEO_SOURCE_SPEECH_DRIVEN = 15, VIDEO_SOURCE_UNKNOWN = 100 }; +/** +* Audio source types definition. +**/ +enum AUDIO_SOURCE_TYPE { + /** Audio captured by the mic. + */ + AUDIO_SOURCE_MICROPHONE = 0, + /** Not define. + */ + AUDIO_SOURCE_CUSTOM = 1, + /** Audio for media player sharing. + */ + AUDIO_SOURCE_MEDIA_PLAYER = 2, + /** Audio for screen audio. + */ + AUDIO_SOURCE_LOOPBACK_RECORDING = 3, + /** Audio captured by mixed source. + */ + AUDIO_SOURCE_MIXED_STREAM = 4, + /** Remote audio received from network. + */ + AUDIO_SOURCE_REMOTE_USER = 5, + /** Remote audio received from network by channel. + */ + AUDIO_SOURCE_REMOTE_CHANNEL = 6, + + AUDIO_SOURCE_UNKNOWN = 100 +}; + /** * Audio routes. */ -enum AudioRoute -{ +enum AudioRoute { /** * -1: The default audio route. */ @@ -165,23 +219,21 @@ struct AudioParameters { size_t channels; size_t frames_per_buffer; - AudioParameters() - : sample_rate(0), - channels(0), - frames_per_buffer(0) {} + AudioParameters() : sample_rate(0), channels(0), frames_per_buffer(0) {} }; /** * The use mode of the audio data. */ enum RAW_AUDIO_FRAME_OP_MODE_TYPE { - /** 0: Read-only mode: Users only read the data from `AudioFrame` without modifying anything. + /** 0: Read-only mode: Users only read the data from `AudioFrame` without modifying anything. * For example, when users acquire the data with the Agora SDK, then start the media push. */ RAW_AUDIO_FRAME_OP_MODE_READ_ONLY = 0, - /** 2: Read and write mode: Users read the data from `AudioFrame`, modify it, and then play it. - * For example, when users have their own audio-effect processing module and perform some voice pre-processing, such as a voice change. + /** 2: Read and write mode: Users read the data from `AudioFrame`, modify it, and then play it. + * For example, when users have their own audio-effect processing module and perform some voice + * pre-processing, such as a voice change. */ RAW_AUDIO_FRAME_OP_MODE_READ_WRITE = 2, }; @@ -189,7 +241,7 @@ enum RAW_AUDIO_FRAME_OP_MODE_TYPE { } // namespace rtc namespace media { - /** +/** * The type of media device. */ enum MEDIA_SOURCE_TYPE { @@ -264,23 +316,23 @@ enum CONTENT_INSPECT_RESULT { }; enum CONTENT_INSPECT_TYPE { -/** - * (Default) content inspect type invalid - */ -CONTENT_INSPECT_INVALID = 0, -/** - * @deprecated - * Content inspect type moderation - */ -CONTENT_INSPECT_MODERATION __deprecated = 1, -/** - * Content inspect type supervise - */ -CONTENT_INSPECT_SUPERVISION = 2, -/** - * Content inspect type image moderation - */ -CONTENT_INSPECT_IMAGE_MODERATION = 3 + /** + * (Default) content inspect type invalid + */ + CONTENT_INSPECT_INVALID = 0, + /** + * @deprecated + * Content inspect type moderation + */ + CONTENT_INSPECT_MODERATION __deprecated = 1, + /** + * Content inspect type supervise + */ + CONTENT_INSPECT_SUPERVISION = 2, + /** + * Content inspect type image moderation + */ + CONTENT_INSPECT_IMAGE_MODERATION = 3 }; struct ContentInspectModule { @@ -312,15 +364,14 @@ struct ContentInspectConfig { /**The content inspect module count. */ int moduleCount; - ContentInspectConfig& operator=(const ContentInspectConfig& rth) - { - extraInfo = rth.extraInfo; - serverConfig = rth.serverConfig; - moduleCount = rth.moduleCount; - memcpy(&modules, &rth.modules, MAX_CONTENT_INSPECT_MODULE_COUNT * sizeof(ContentInspectModule)); - return *this; - } - ContentInspectConfig() :extraInfo(NULL), serverConfig(NULL), moduleCount(0){} + ContentInspectConfig& operator=(const ContentInspectConfig& rth) { + extraInfo = rth.extraInfo; + serverConfig = rth.serverConfig; + moduleCount = rth.moduleCount; + memcpy(&modules, &rth.modules, MAX_CONTENT_INSPECT_MODULE_COUNT * sizeof(ContentInspectModule)); + return *this; + } + ContentInspectConfig() : extraInfo(NULL), serverConfig(NULL), moduleCount(0) {} }; namespace base { @@ -342,9 +393,7 @@ struct PacketOptions { uint32_t timestamp; // Audio level indication. uint8_t audioLevelIndication; - PacketOptions() - : timestamp(0), - audioLevelIndication(127) {} + PacketOptions() : timestamp(0), audioLevelIndication(127) {} }; /** @@ -360,9 +409,7 @@ struct AudioEncodedFrameInfo { * The codec of the packet. */ uint8_t codec; - AudioEncodedFrameInfo() - : sendTs(0), - codec(0) {} + AudioEncodedFrameInfo() : sendTs(0), codec(0) {} }; /** @@ -372,17 +419,18 @@ struct AudioPcmFrame { /** * The buffer size of the PCM audio frame. */ - OPTIONAL_ENUM_SIZE_T { - // Stereo, 32 kHz, 60 ms (2 * 32 * 60) - /** - * The max number of the samples of the data. - * - * When the number of audio channel is two, the sample rate is 32 kHZ, - * the buffer length of the data is 60 ms, the number of the samples of the data is 3840 (2 x 32 x 60). - */ - kMaxDataSizeSamples = 3840, - /** The max number of the bytes of the data. */ - kMaxDataSizeBytes = kMaxDataSizeSamples * sizeof(int16_t), + OPTIONAL_ENUM_SIZE_T{ + // Stereo, 32 kHz, 60 ms (2 * 32 * 60) + /** + * The max number of the samples of the data. + * + * When the number of audio channel is two, the sample rate is 32 kHZ, + * the buffer length of the data is 60 ms, the number of the samples of the data is 3840 (2 x + * 32 x 60). + */ + kMaxDataSizeSamples = 3840, + /** The max number of the bytes of the data. */ + kMaxDataSizeBytes = kMaxDataSizeSamples * sizeof(int16_t), }; /** The timestamp (ms) of the audio frame. @@ -402,24 +450,31 @@ struct AudioPcmFrame { rtc::BYTES_PER_SAMPLE bytes_per_sample; /** The audio frame data. */ int16_t data_[kMaxDataSizeSamples]; + + /** + * @technical preview + * data_[kMaxDataSizeSamples] is real stereo data + */ + bool is_stereo_; AudioPcmFrame& operator=(const AudioPcmFrame& src) { - if(this == &src) { + if (this == &src) { return *this; } - this->capture_timestamp = src.capture_timestamp; - this->samples_per_channel_ = src.samples_per_channel_; - this->sample_rate_hz_ = src.sample_rate_hz_; - this->bytes_per_sample = src.bytes_per_sample; - this->num_channels_ = src.num_channels_; + capture_timestamp = src.capture_timestamp; + samples_per_channel_ = src.samples_per_channel_; + sample_rate_hz_ = src.sample_rate_hz_; + bytes_per_sample = src.bytes_per_sample; + num_channels_ = src.num_channels_; + is_stereo_ = src.is_stereo_; size_t length = src.samples_per_channel_ * src.num_channels_; if (length > kMaxDataSizeSamples) { length = kMaxDataSizeSamples; } - memcpy(this->data_, src.data_, length * sizeof(int16_t)); + memcpy(data_, src.data_, length * sizeof(int16_t)); return *this; } @@ -429,7 +484,8 @@ struct AudioPcmFrame { samples_per_channel_(0), sample_rate_hz_(0), num_channels_(0), - bytes_per_sample(rtc::TWO_BYTES_PER_SAMPLE) { + bytes_per_sample(rtc::TWO_BYTES_PER_SAMPLE), + is_stereo_(false) { memset(data_, 0, sizeof(data_)); } @@ -438,7 +494,8 @@ struct AudioPcmFrame { samples_per_channel_(src.samples_per_channel_), sample_rate_hz_(src.sample_rate_hz_), num_channels_(src.num_channels_), - bytes_per_sample(src.bytes_per_sample) { + bytes_per_sample(src.bytes_per_sample), + is_stereo_(src.is_stereo_) { size_t length = src.samples_per_channel_ * src.num_channels_; if (length > kMaxDataSizeSamples) { length = kMaxDataSizeSamples; @@ -509,12 +566,17 @@ enum VIDEO_PIXEL_FORMAT { 14: pixel format for iOS CVPixelBuffer BGRA */ VIDEO_CVPIXEL_BGRA = 14, + /** + 15: pixel format for iOS CVPixelBuffer P010(10bit NV12) + */ + VIDEO_CVPIXEL_P010 = 15, /** * 16: I422. */ VIDEO_PIXEL_I422 = 16, /** - * 17: ID3D11Texture2D, only support DXGI_FORMAT_B8G8R8A8_UNORM, DXGI_FORMAT_B8G8R8A8_TYPELESS, DXGI_FORMAT_NV12 texture format + * 17: ID3D11Texture2D, only support DXGI_FORMAT_B8G8R8A8_UNORM, DXGI_FORMAT_B8G8R8A8_TYPELESS, + * DXGI_FORMAT_NV12 texture format */ VIDEO_TEXTURE_ID3D11TEXTURE2D = 17, /** @@ -569,14 +631,199 @@ enum CAMERA_VIDEO_SOURCE_TYPE { * This interface provides access to metadata information. */ class IVideoFrameMetaInfo { - public: - enum META_INFO_KEY { - KEY_FACE_CAPTURE = 0, - }; - virtual ~IVideoFrameMetaInfo() {}; - virtual const char* getMetaInfoStr(META_INFO_KEY key) const = 0; + public: + enum META_INFO_KEY { + KEY_FACE_CAPTURE = 0, + }; + virtual ~IVideoFrameMetaInfo(){}; + virtual const char* getMetaInfoStr(META_INFO_KEY key) const = 0; +}; + +struct ColorSpace { + enum PrimaryID { + // The indices are equal to the values specified in T-REC H.273 Table 2. + PRIMARYID_BT709 = 1, + PRIMARYID_UNSPECIFIED = 2, + PRIMARYID_BT470M = 4, + PRIMARYID_BT470BG = 5, + PRIMARYID_SMPTE170M = 6, // Identical to BT601 + PRIMARYID_SMPTE240M = 7, + PRIMARYID_FILM = 8, + PRIMARYID_BT2020 = 9, + PRIMARYID_SMPTEST428 = 10, + PRIMARYID_SMPTEST431 = 11, + PRIMARYID_SMPTEST432 = 12, + PRIMARYID_JEDECP22 = 22, // Identical to EBU3213-E + }; + + enum RangeID { + // The indices are equal to the values specified at + // https://www.webmproject.org/docs/container/#colour for the element Range. + RANGEID_INVALID = 0, + // Limited Rec. 709 color range with RGB values ranging from 16 to 235. + RANGEID_LIMITED = 1, + // Full RGB color range with RGB valees from 0 to 255. + RANGEID_FULL = 2, + // Range is defined by MatrixCoefficients/TransferCharacteristics. + RANGEID_DERIVED = 3, + }; + + enum MatrixID { + // The indices are equal to the values specified in T-REC H.273 Table 4. + MATRIXID_RGB = 0, + MATRIXID_BT709 = 1, + MATRIXID_UNSPECIFIED = 2, + MATRIXID_FCC = 4, + MATRIXID_BT470BG = 5, + MATRIXID_SMPTE170M = 6, + MATRIXID_SMPTE240M = 7, + MATRIXID_YCOCG = 8, + MATRIXID_BT2020_NCL = 9, + MATRIXID_BT2020_CL = 10, + MATRIXID_SMPTE2085 = 11, + MATRIXID_CDNCLS = 12, + MATRIXID_CDCLS = 13, + MATRIXID_BT2100_ICTCP = 14, + }; + + enum TransferID { + // The indices are equal to the values specified in T-REC H.273 Table 3. + TRANSFERID_BT709 = 1, + TRANSFERID_UNSPECIFIED = 2, + TRANSFERID_GAMMA22 = 4, + TRANSFERID_GAMMA28 = 5, + TRANSFERID_SMPTE170M = 6, + TRANSFERID_SMPTE240M = 7, + TRANSFERID_LINEAR = 8, + TRANSFERID_LOG = 9, + TRANSFERID_LOG_SQRT = 10, + TRANSFERID_IEC61966_2_4 = 11, + TRANSFERID_BT1361_ECG = 12, + TRANSFERID_IEC61966_2_1 = 13, + TRANSFERID_BT2020_10 = 14, + TRANSFERID_BT2020_12 = 15, + TRANSFERID_SMPTEST2084 = 16, + TRANSFERID_SMPTEST428 = 17, + TRANSFERID_ARIB_STD_B67 = 18, + }; + + PrimaryID primaries; + TransferID transfer; + MatrixID matrix; + RangeID range; + + ColorSpace() + : primaries(PRIMARYID_UNSPECIFIED), transfer(TRANSFERID_UNSPECIFIED), + matrix(MATRIXID_UNSPECIFIED), range(RANGEID_INVALID) {} + + bool validate() const { + return primaries != PRIMARYID_UNSPECIFIED || transfer != TRANSFERID_UNSPECIFIED || + matrix != MATRIXID_UNSPECIFIED || + range != RANGEID_INVALID; + } +}; + +/** + * The definition of the Hdr10MetadataInfo struct. + */ +struct Hdr10MetadataInfo { + /** + * The x coordinates of the red value in the CIE1931 color space. The values need to normalized to 50,000. + */ + uint16_t redPrimaryX; + /** + * The y coordinates of the red value in the CIE1931 color space. The values need to normalized to 50,000. + */ + uint16_t redPrimaryY; + /** + * The x coordinates of the green value in the CIE1931 color space. The values need to normalized to 50,000. + */ + uint16_t greenPrimaryX; + /** + * The y coordinates of the green value in the CIE1931 color space. The values need to normalized to 50,000. + */ + uint16_t greenPrimaryY; + /** + * The x coordinates of the blue value in the CIE1931 color space. The values need to normalized to 50,000. + */ + uint16_t bluePrimaryX; + /** + * The y coordinates of the blue value in the CIE1931 color space. The values need to normalized to 50,000. + */ + uint16_t bluePrimaryY; + /** + * The x coordinates of the white point in the CIE1931 color space.The values need to normalized to 50,000. + */ + uint16_t whitePointX; + /** + * The y coordinates of the white point in the CIE1931 color space.The values need to normalized to 50,000. + */ + uint16_t whitePointY; + /** + * The maximum number of nits of the display used to master the content. The values need to normalized to 10,000. + */ + unsigned int maxMasteringLuminance; + /** + * The minimum number of nits of the display used to master the content. The values need to normalized to 10,000. + */ + unsigned int minMasteringLuminance; + /** + * The maximum content light level (MaxCLL). This is the nit value corresponding to the brightest pixel used anywhere in the content. + */ + uint16_t maxContentLightLevel; + /** + * The maximum frame average light level (MaxFALL). This is the nit value corresponding to the average luminance of the frame which has the brightest average luminance anywhere in the content. + */ + uint16_t maxFrameAverageLightLevel; + + Hdr10MetadataInfo() + : redPrimaryX(0), + redPrimaryY(0), + greenPrimaryX(0), + greenPrimaryY(0), + bluePrimaryX(0), + bluePrimaryY(0), + whitePointX(0), + whitePointY(0), + maxMasteringLuminance(0), + minMasteringLuminance(0), + maxContentLightLevel(0), + maxFrameAverageLightLevel(0){} + + bool validate() const { + return maxContentLightLevel >= 0 && maxContentLightLevel <= 20000 && + maxFrameAverageLightLevel >= 0 && + maxFrameAverageLightLevel <= 20000; + } }; +/** + * The relative position between alphabuffer and the frame. + */ +enum ALPHA_STITCH_MODE { + /** + * 0: Normal frame without alphabuffer stitched + */ + NO_ALPHA_STITCH = 0, + /** + * 1: Alphabuffer is above the frame + */ + ALPHA_STITCH_UP = 1, + /** + * 2: Alphabuffer is below the frame + */ + ALPHA_STITCH_BELOW = 2, + /** + * 3: Alphabuffer is on the left of frame + */ + ALPHA_STITCH_LEFT = 3, + /** + * 4: Alphabuffer is on the right of frame + */ + ALPHA_STITCH_RIGHT = 4, +}; + + /** * The definition of the ExternalVideoFrame struct. */ @@ -596,14 +843,16 @@ struct ExternalVideoFrame { eglContext(NULL), eglType(EGL_CONTEXT10), textureId(0), - metadata_buffer(NULL), - metadata_size(0), + fenceObject(0), + metadataBuffer(NULL), + metadataSize(0), alphaBuffer(NULL), fillAlphaBuffer(false), - d3d11_texture_2d(NULL), - texture_slice_index(0){} + alphaStitchMode(NO_ALPHA_STITCH), + d3d11Texture2d(NULL), + textureSliceIndex(0){} - /** + /** * The EGL context type. */ enum EGL_CONTEXT_TYPE { @@ -643,6 +892,7 @@ struct ExternalVideoFrame { * The pixel format: #VIDEO_PIXEL_FORMAT */ VIDEO_PIXEL_FORMAT format; + /** * The video buffer. */ @@ -677,32 +927,39 @@ struct ExternalVideoFrame { */ int cropBottom; /** - * [Raw data related parameter] The clockwise rotation information of the video frame. You can set the - * rotation angle as 0, 90, 180, or 270. The default value is 0. + * [Raw data related parameter] The clockwise rotation information of the video frame. You can set + * the rotation angle as 0, 90, 180, or 270. The default value is 0. */ int rotation; /** - * The timestamp (ms) of the incoming video frame. An incorrect timestamp results in a frame loss or - * unsynchronized audio and video. - * + * The timestamp (ms) of the incoming video frame. An incorrect timestamp results in a frame loss + * or unsynchronized audio and video. + * * Please refer to getAgoraCurrentMonotonicTimeInMs or getCurrentMonotonicTimeInMs * to determine how to fill this filed. */ long long timestamp; /** * [Texture-related parameter] - * When using the OpenGL interface (javax.microedition.khronos.egl.*) defined by Khronos, set EGLContext to this field. - * When using the OpenGL interface (android.opengl.*) defined by Android, set EGLContext to this field. + * When using the OpenGL interface (javax.microedition.khronos.egl.*) defined by Khronos, set + * EGLContext to this field. When using the OpenGL interface (android.opengl.*) defined by + * Android, set EGLContext to this field. */ - void *eglContext; + void* eglContext; /** * [Texture related parameter] Texture ID used by the video frame. */ EGL_CONTEXT_TYPE eglType; /** - * [Texture related parameter] Incoming 4 × 4 transformational matrix. The typical value is a unit matrix. + * [Texture related parameter] Incoming 4 × 4 transformational matrix. The typical value is + * a unit matrix. */ int textureId; + /** + * [Texture related parameter] The fence object related to the textureId parameter, indicating the synchronization status of the video data in Texture format. + * The default value is 0 + */ + long long fenceObject; /** * [Texture related parameter] Incoming 4 × 4 transformational matrix. The typical value is a unit matrix. */ @@ -711,35 +968,53 @@ struct ExternalVideoFrame { * [Texture related parameter] The MetaData buffer. * The default value is NULL */ - uint8_t* metadata_buffer; + uint8_t* metadataBuffer; /** * [Texture related parameter] The MetaData size. * The default value is 0 */ - int metadata_size; + int metadataSize; /** * Indicates the alpha channel of current frame, which is consistent with the dimension of the video frame. * The value range of each pixel is [0,255], where 0 represents the background; 255 represents the foreground. * The default value is NULL. - * @technical preview */ uint8_t* alphaBuffer; /** - * Extract alphaBuffer from bgra or rgba data. Set it true if you do not explicitly specify the alphabuffer. + * [For bgra or rgba only] Extract alphaBuffer from bgra or rgba data. Set it true if you do not explicitly specify the alphabuffer. * The default value is false - * @technical preview */ bool fillAlphaBuffer; + /** + * The relative position between alphabuffer and the frame. + * 0: Normal frame; + * 1: Alphabuffer is above the frame; + * 2: Alphabuffer is below the frame; + * 3: Alphabuffer is on the left of frame; + * 4: Alphabuffer is on the right of frame; + * The default value is 0. + */ + ALPHA_STITCH_MODE alphaStitchMode; /** * [For Windows only] The pointer of ID3D11Texture2D used by the video frame. */ - void *d3d11_texture_2d; + void *d3d11Texture2d; /** * [For Windows only] The index of ID3D11Texture2D array used by the video frame. */ - int texture_slice_index; + int textureSliceIndex; + + /** + * metadata info used for hdr video data + */ + Hdr10MetadataInfo hdr10MetadataInfo; + + /** + * The ColorSpace of the video frame. + */ + ColorSpace colorSpace; }; /** @@ -765,6 +1040,7 @@ struct VideoFrame { textureId(0), d3d11Texture2d(NULL), alphaBuffer(NULL), + alphaStitchMode(NO_ALPHA_STITCH), pixelBuffer(NULL), metaInfo(NULL){ memset(matrix, 0, sizeof(matrix)); @@ -810,8 +1086,8 @@ struct VideoFrame { */ int rotation; /** - * The timestamp to render the video stream. Use this parameter for audio-video synchronization when - * rendering the video. + * The timestamp to render the video stream. Use this parameter for audio-video synchronization + * when rendering the video. * * @note This parameter is for rendering the video, not capturing the video. */ @@ -839,7 +1115,8 @@ struct VideoFrame { */ int textureId; /** - * [Texture related parameter] The pointer of ID3D11Texture2D used by the video frame,for Windows only. + * [Texture related parameter] The pointer of ID3D11Texture2D used by the video frame,for Windows + * only. */ void* d3d11Texture2d; /** @@ -850,17 +1127,37 @@ struct VideoFrame { * Indicates the alpha channel of current frame, which is consistent with the dimension of the video frame. * The value range of each pixel is [0,255], where 0 represents the background; 255 represents the foreground. * The default value is NULL. - * @technical preview */ uint8_t* alphaBuffer; + /** + * The relative position between alphabuffer and the frame. + * 0: Normal frame; + * 1: Alphabuffer is above the frame; + * 2: Alphabuffer is below the frame; + * 3: Alphabuffer is on the left of frame; + * 4: Alphabuffer is on the right of frame; + * The default value is 0. + */ + ALPHA_STITCH_MODE alphaStitchMode; /** *The type of CVPixelBufferRef, for iOS and macOS only. */ void* pixelBuffer; /** - * The pointer to IVideoFrameMetaInfo, which is the interface to get metainfo contents from VideoFrame. + * The pointer to IVideoFrameMetaInfo, which is the interface to get metainfo contents from + * VideoFrame. */ IVideoFrameMetaInfo* metaInfo; + + /** + * metadata info used for hdr video data + */ + Hdr10MetadataInfo hdr10MetadataInfo; + + /** + * The ColorSpace of the video frame + */ + ColorSpace colorSpace; }; /** @@ -872,7 +1169,8 @@ class IVideoFrameObserver { * Occurs each time the player receives a video frame. * * After registering the video frame observer, - * the callback occurs each time the player receives a video frame to report the detailed information of the video frame. + * the callback occurs each time the player receives a video frame to report the detailed + * information of the video frame. * @param frame The detailed information of the video frame. See {@link VideoFrame}. */ virtual void onFrame(const VideoFrame* frame) = 0; @@ -910,6 +1208,30 @@ enum VIDEO_MODULE_POSITION { } // namespace base +/** Definition of SnapshotConfig. + */ +struct SnapshotConfig { + /** + * The local path (including filename extensions) of the snapshot. For example: + * - Windows: `C:\Users\\AppData\Local\Agora\\example.jpg` + * - iOS: `/App Sandbox/Library/Caches/example.jpg` + * - macOS: `ļ½ž/Library/Logs/example.jpg` + * - Android: `/storage/emulated/0/Android/data//files/example.jpg` + */ + const char* filePath; + + /** + * The position of the video observation. See VIDEO_MODULE_POSITION. + * + * Allowed values vary depending on the `uid` parameter passed in `takeSnapshot` or `takeSnapshotEx`: + * - uid = 0: Position 2, 4 and 8 are allowed. + * - uid != 0: Only position 2 is allowed. + * + */ + media::base::VIDEO_MODULE_POSITION position; + SnapshotConfig() :filePath(NULL), position(media::base::POSITION_PRE_ENCODER) {} +}; + /** * The audio frame observer. */ @@ -969,7 +1291,7 @@ class IAudioFrameObserverBase { */ int samplesPerSec; /** - * The data buffer of the audio frame. When the audio frame uses a stereo channel, the data + * The data buffer of the audio frame. When the audio frame uses a stereo channel, the data * buffer is interleaved. * * Buffer data size: buffer = samplesPerChannel Ɨ channels Ɨ bytesPerSample. @@ -978,14 +1300,14 @@ class IAudioFrameObserverBase { /** * The timestamp to render the audio data. * - * You can use this timestamp to restore the order of the captured audio frame, and synchronize - * audio and video frames in video scenarios, including scenarios where external video sources + * You can use this timestamp to restore the order of the captured audio frame, and synchronize + * audio and video frames in video scenarios, including scenarios where external video sources * are used. */ int64_t renderTimeMs; /** * A reserved parameter. - * + * * You can use this presentationMs parameter to indicate the presenation milisecond timestamp, * this will then filled into audio4 extension part, the remote side could use this pts in av * sync process with video frame. @@ -994,11 +1316,11 @@ class IAudioFrameObserverBase { /** * The pts timestamp of this audio frame. * - * This timestamp is used to indicate the origin pts time of the frame, and sync with video frame by - * the pts time stamp + * This timestamp is used to indicate the origin pts time of the frame, and sync with video + * frame by the pts time stamp */ int64_t presentationMs; - /** + /** * The number of the audio track. */ int audioTrackNumber; @@ -1007,17 +1329,18 @@ class IAudioFrameObserverBase { */ uint32_t rtpTimestamp; - AudioFrame() : type(FRAME_TYPE_PCM16), - samplesPerChannel(0), - bytesPerSample(rtc::TWO_BYTES_PER_SAMPLE), - channels(0), - samplesPerSec(0), - buffer(NULL), - renderTimeMs(0), - avsync_type(0), - presentationMs(0), - audioTrackNumber(0), - rtpTimestamp(0) {} + AudioFrame() + : type(FRAME_TYPE_PCM16), + samplesPerChannel(0), + bytesPerSample(rtc::TWO_BYTES_PER_SAMPLE), + channels(0), + samplesPerSec(0), + buffer(NULL), + renderTimeMs(0), + avsync_type(0), + presentationMs(0), + audioTrackNumber(0), + rtpTimestamp(0) {} }; enum AUDIO_FRAME_POSITION { @@ -1066,8 +1389,17 @@ class IAudioFrameObserverBase { */ int samples_per_call; - AudioParams() : sample_rate(0), channels(0), mode(rtc::RAW_AUDIO_FRAME_OP_MODE_READ_ONLY), samples_per_call(0) {} - AudioParams(int samplerate, int channel, rtc::RAW_AUDIO_FRAME_OP_MODE_TYPE type, int samplesPerCall) : sample_rate(samplerate), channels(channel), mode(type), samples_per_call(samplesPerCall) {} + AudioParams() + : sample_rate(0), + channels(0), + mode(rtc::RAW_AUDIO_FRAME_OP_MODE_READ_ONLY), + samples_per_call(0) {} + AudioParams(int samplerate, int channel, rtc::RAW_AUDIO_FRAME_OP_MODE_TYPE type, + int samplesPerCall) + : sample_rate(samplerate), + channels(channel), + mode(type), + samples_per_call(samplesPerCall) {} }; public: @@ -1117,10 +1449,11 @@ class IAudioFrameObserverBase { * - true: The before-mixing playback audio frame is valid and is encoded and sent. * - false: The before-mixing playback audio frame is invalid and is not encoded or sent. */ - virtual bool onPlaybackAudioFrameBeforeMixing(const char* channelId, base::user_id_t userId, AudioFrame& audioFrame) { - (void) channelId; - (void) userId; - (void) audioFrame; + virtual bool onPlaybackAudioFrameBeforeMixing(const char* channelId, base::user_id_t userId, + AudioFrame& audioFrame) { + (void)channelId; + (void)userId; + (void)audioFrame; return true; } @@ -1129,12 +1462,19 @@ class IAudioFrameObserverBase { * @return A bit mask that controls the frame position of the audio observer. * @note - Use '|' (the OR operator) to observe multiple frame positions. *

- * After you successfully register the audio observer, the SDK triggers this callback each time it receives a audio frame. You can determine which position to observe by setting the return value. - * The SDK provides 4 positions for observer. Each position corresponds to a callback function: - * - `AUDIO_FRAME_POSITION_PLAYBACK (1 << 0)`: The position for playback audio frame is received, which corresponds to the \ref onPlaybackFrame "onPlaybackFrame" callback. - * - `AUDIO_FRAME_POSITION_RECORD (1 << 1)`: The position for record audio frame is received, which corresponds to the \ref onRecordFrame "onRecordFrame" callback. - * - `AUDIO_FRAME_POSITION_MIXED (1 << 2)`: The position for mixed audio frame is received, which corresponds to the \ref onMixedFrame "onMixedFrame" callback. - * - `AUDIO_FRAME_POSITION_BEFORE_MIXING (1 << 3)`: The position for playback audio frame before mixing is received, which corresponds to the \ref onPlaybackFrameBeforeMixing "onPlaybackFrameBeforeMixing" callback. + * After you successfully register the audio observer, the SDK triggers this callback each time it + * receives a audio frame. You can determine which position to observe by setting the return + * value. The SDK provides 4 positions for observer. Each position corresponds to a callback + * function: + * - `AUDIO_FRAME_POSITION_PLAYBACK (1 << 0)`: The position for playback audio frame is received, + * which corresponds to the \ref onPlaybackFrame "onPlaybackFrame" callback. + * - `AUDIO_FRAME_POSITION_RECORD (1 << 1)`: The position for record audio frame is received, + * which corresponds to the \ref onRecordFrame "onRecordFrame" callback. + * - `AUDIO_FRAME_POSITION_MIXED (1 << 2)`: The position for mixed audio frame is received, which + * corresponds to the \ref onMixedFrame "onMixedFrame" callback. + * - `AUDIO_FRAME_POSITION_BEFORE_MIXING (1 << 3)`: The position for playback audio frame before + * mixing is received, which corresponds to the \ref onPlaybackFrameBeforeMixing + * "onPlaybackFrameBeforeMixing" callback. * @return The bit mask that controls the audio observation positions. * See AUDIO_FRAME_POSITION. */ @@ -1206,25 +1546,25 @@ class IAudioFrameObserver : public IAudioFrameObserverBase { * - true: The before-mixing playback audio frame is valid and is encoded and sent. * - false: The before-mixing playback audio frame is invalid and is not encoded or sent. */ - virtual bool onPlaybackAudioFrameBeforeMixing(const char* channelId, rtc::uid_t uid, AudioFrame& audioFrame) = 0; + virtual bool onPlaybackAudioFrameBeforeMixing(const char* channelId, rtc::uid_t uid, + AudioFrame& audioFrame) = 0; }; struct AudioSpectrumData { /** * The audio spectrum data of audio. */ - const float *audioSpectrumData; + const float* audioSpectrumData; /** * The data length of audio spectrum data. */ int dataLength; AudioSpectrumData() : audioSpectrumData(NULL), dataLength(0) {} - AudioSpectrumData(const float *data, int length) : - audioSpectrumData(data), dataLength(length) {} + AudioSpectrumData(const float* data, int length) : audioSpectrumData(data), dataLength(length) {} }; -struct UserAudioSpectrumInfo { +struct UserAudioSpectrumInfo { /** * User ID of the speaker. */ @@ -1236,14 +1576,15 @@ struct UserAudioSpectrumInfo { UserAudioSpectrumInfo() : uid(0) {} - UserAudioSpectrumInfo(agora::rtc::uid_t uid, const float* data, int length) : uid(uid), spectrumData(data, length) {} + UserAudioSpectrumInfo(agora::rtc::uid_t uid, const float* data, int length) + : uid(uid), spectrumData(data, length) {} }; /** * The IAudioSpectrumObserver class. */ class IAudioSpectrumObserver { -public: + public: virtual ~IAudioSpectrumObserver() {} /** @@ -1252,7 +1593,8 @@ class IAudioSpectrumObserver { * This callback reports the audio spectrum data of the local audio at the moment * in the channel. * - * You can set the time interval of this callback using \ref ILocalUser::enableAudioSpectrumMonitor "enableAudioSpectrumMonitor". + * You can set the time interval of this callback using \ref + * ILocalUser::enableAudioSpectrumMonitor "enableAudioSpectrumMonitor". * * @param data The audio spectrum data of local audio. * - true: Processed. @@ -1265,10 +1607,12 @@ class IAudioSpectrumObserver { * This callback reports the IDs and audio spectrum data of the loudest speakers at the moment * in the channel. * - * You can set the time interval of this callback using \ref ILocalUser::enableAudioSpectrumMonitor "enableAudioSpectrumMonitor". + * You can set the time interval of this callback using \ref + * ILocalUser::enableAudioSpectrumMonitor "enableAudioSpectrumMonitor". * - * @param spectrums The pointer to \ref agora::media::UserAudioSpectrumInfo "UserAudioSpectrumInfo", which is an array containing - * the user ID and audio spectrum data for each speaker. + * @param spectrums The pointer to \ref agora::media::UserAudioSpectrumInfo + * "UserAudioSpectrumInfo", which is an array containing the user ID and audio spectrum data for + * each speaker. * - This array contains the following members: * - `uid`, which is the UID of each remote speaker * - `spectrumData`, which reports the audio spectrum of each remote speaker. @@ -1276,7 +1620,8 @@ class IAudioSpectrumObserver { * - true: Processed. * - false: Not processed. */ - virtual bool onRemoteAudioSpectrum(const UserAudioSpectrumInfo* spectrums, unsigned int spectrumNumber) = 0; + virtual bool onRemoteAudioSpectrum(const UserAudioSpectrumInfo* spectrums, + unsigned int spectrumNumber) = 0; }; /** @@ -1294,8 +1639,9 @@ class IVideoEncodedFrameObserver { * - true: Accept. * - false: Do not accept. */ - virtual bool onEncodedVideoFrameReceived(rtc::uid_t uid, const uint8_t* imageBuffer, size_t length, - const rtc::EncodedVideoFrameInfo& videoEncodedFrameInfo) = 0; + virtual bool onEncodedVideoFrameReceived( + rtc::uid_t uid, const uint8_t* imageBuffer, size_t length, + const rtc::EncodedVideoFrameInfo& videoEncodedFrameInfo) = 0; virtual ~IVideoEncodedFrameObserver() {} }; @@ -1312,16 +1658,18 @@ class IVideoFrameObserver { enum VIDEO_FRAME_PROCESS_MODE { /** * Read-only mode. - * + * * In this mode, you do not modify the video frame. The video frame observer is a renderer. */ - PROCESS_MODE_READ_ONLY, // Observer works as a pure renderer and will not modify the original frame. + PROCESS_MODE_READ_ONLY, // Observer works as a pure renderer and will not modify the original + // frame. /** * Read and write mode. - * + * * In this mode, you modify the video frame. The video frame observer is a video filter. */ - PROCESS_MODE_READ_WRITE, // Observer works as a filter that will process the video frame and affect the following frame processing in SDK. + PROCESS_MODE_READ_WRITE, // Observer works as a filter that will process the video frame and + // affect the following frame processing in SDK. }; public: @@ -1330,38 +1678,43 @@ class IVideoFrameObserver { /** * Occurs each time the SDK receives a video frame captured by the local camera. * - * After you successfully register the video frame observer, the SDK triggers this callback each time - * a video frame is received. In this callback, you can get the video data captured by the local - * camera. You can then pre-process the data according to your scenarios. + * After you successfully register the video frame observer, the SDK triggers this callback each + * time a video frame is received. In this callback, you can get the video data captured by the + * local camera. You can then pre-process the data according to your scenarios. * * After pre-processing, you can send the processed video data back to the SDK by setting the * `videoFrame` parameter in this callback. * * @note - * - If you get the video data in RGBA color encoding format, Agora does not support using this callback to send the processed data in RGBA color encoding format back to the SDK. - * - The video data that this callback gets has not been pre-processed, such as watermarking, cropping content, rotating, or image enhancement. + * - If you get the video data in RGBA color encoding format, Agora does not support using this + * callback to send the processed data in RGBA color encoding format back to the SDK. + * - The video data that this callback gets has not been pre-processed, such as watermarking, + * cropping content, rotating, or image enhancement. * * @param videoFrame A pointer to the video frame: VideoFrame * @param sourceType source type of video frame. See #VIDEO_SOURCE_TYPE. * @return Determines whether to ignore the current video frame if the pre-processing fails: * - true: Do not ignore. * - false: Ignore, in which case this method does not sent the current video frame to the SDK. - */ - virtual bool onCaptureVideoFrame(agora::rtc::VIDEO_SOURCE_TYPE sourceType, VideoFrame& videoFrame) = 0; + */ + virtual bool onCaptureVideoFrame(agora::rtc::VIDEO_SOURCE_TYPE sourceType, + VideoFrame& videoFrame) = 0; /** * Occurs each time the SDK receives a video frame before encoding. * - * After you successfully register the video frame observer, the SDK triggers this callback each time - * when it receives a video frame. In this callback, you can get the video data before encoding. You can then - * process the data according to your particular scenarios. + * After you successfully register the video frame observer, the SDK triggers this callback each + * time when it receives a video frame. In this callback, you can get the video data before + * encoding. You can then process the data according to your particular scenarios. * * After processing, you can send the processed video data back to the SDK by setting the * `videoFrame` parameter in this callback. * * @note - * - To get the video data captured from the second screen before encoding, you need to set (1 << 2) as a frame position through `getObservedFramePosition`. - * - The video data that this callback gets has been pre-processed, such as watermarking, cropping content, rotating, or image enhancement. + * - To get the video data captured from the second screen before encoding, you need to set (1 << + * 2) as a frame position through `getObservedFramePosition`. + * - The video data that this callback gets has been pre-processed, such as watermarking, cropping + * content, rotating, or image enhancement. * - This callback does not support sending processed RGBA video data back to the SDK. * * @param videoFrame A pointer to the video frame: VideoFrame @@ -1370,7 +1723,8 @@ class IVideoFrameObserver { * - true: Do not ignore. * - false: Ignore, in which case this method does not sent the current video frame to the SDK. */ - virtual bool onPreEncodeVideoFrame(agora::rtc::VIDEO_SOURCE_TYPE sourceType, VideoFrame& videoFrame) = 0; + virtual bool onPreEncodeVideoFrame(agora::rtc::VIDEO_SOURCE_TYPE sourceType, + VideoFrame& videoFrame) = 0; /** * Occurs each time the SDK receives a video frame decoded by the MediaPlayer. @@ -1381,10 +1735,13 @@ class IVideoFrameObserver { * * After pre-processing, you can send the processed video data back to the SDK by setting the * `videoFrame` parameter in this callback. - * + * * @note - * - This callback will not be affected by the return values of \ref getVideoFrameProcessMode "getVideoFrameProcessMode", \ref getRotationApplied "getRotationApplied", \ref getMirrorApplied "getMirrorApplied", \ref getObservedFramePosition "getObservedFramePosition". - * - On Android, this callback is not affected by the return value of \ref getVideoFormatPreference "getVideoFormatPreference" + * - This callback will not be affected by the return values of \ref getVideoFrameProcessMode + * "getVideoFrameProcessMode", \ref getRotationApplied "getRotationApplied", \ref getMirrorApplied + * "getMirrorApplied", \ref getObservedFramePosition "getObservedFramePosition". + * - On Android, this callback is not affected by the return value of \ref + * getVideoFormatPreference "getVideoFormatPreference" * * @param videoFrame A pointer to the video frame: VideoFrame * @param mediaPlayerId ID of the mediaPlayer. @@ -1397,13 +1754,13 @@ class IVideoFrameObserver { /** * Occurs each time the SDK receives a video frame sent by the remote user. * - * After you successfully register the video frame observer, the SDK triggers this callback each time a - * video frame is received. In this callback, you can get the video data sent by the remote user. You - * can then post-process the data according to your scenarios. + * After you successfully register the video frame observer, the SDK triggers this callback each + * time a video frame is received. In this callback, you can get the video data sent by the remote + * user. You can then post-process the data according to your scenarios. + * + * After post-processing, you can send the processed data back to the SDK by setting the + * `videoFrame` parameter in this callback. * - * After post-processing, you can send the processed data back to the SDK by setting the `videoFrame` - * parameter in this callback. - * * @note This callback does not support sending processed RGBA video data back to the SDK. * * @param channelId The channel name @@ -1413,45 +1770,48 @@ class IVideoFrameObserver { * - true: Do not ignore. * - false: Ignore, in which case this method does not sent the current video frame to the SDK. */ - virtual bool onRenderVideoFrame(const char* channelId, rtc::uid_t remoteUid, VideoFrame& videoFrame) = 0; + virtual bool onRenderVideoFrame(const char* channelId, rtc::uid_t remoteUid, + VideoFrame& videoFrame) = 0; virtual bool onTranscodedVideoFrame(VideoFrame& videoFrame) = 0; /** - * Occurs each time the SDK receives a video frame and prompts you to set the process mode of the video frame. - * - * After you successfully register the video frame observer, the SDK triggers this callback each time it receives - * a video frame. You need to set your preferred process mode in the return value of this callback. + * Occurs each time the SDK receives a video frame and prompts you to set the process mode of the + * video frame. + * + * After you successfully register the video frame observer, the SDK triggers this callback each + * time it receives a video frame. You need to set your preferred process mode in the return value + * of this callback. * @return VIDEO_FRAME_PROCESS_MODE. */ - virtual VIDEO_FRAME_PROCESS_MODE getVideoFrameProcessMode() { - return PROCESS_MODE_READ_ONLY; - } + virtual VIDEO_FRAME_PROCESS_MODE getVideoFrameProcessMode() { return PROCESS_MODE_READ_ONLY; } /** * Sets the format of the raw video data output by the SDK. * - * If you want to get raw video data in a color encoding format other than YUV 420, register this callback when - * calling `registerVideoFrameObserver`. After you successfully register the video frame observer, the SDK triggers - * this callback each time it receives a video frame. You need to set your preferred video data in the return value - * of this callback. - * - * @note If you want the video captured by the sender to be the original format, set the original video data format - * to VIDEO_PIXEL_DEFAULT in the return value. On different platforms, the original video pixel format is also - * different, for the actual video pixel format, see `VideoFrame`. - * + * If you want to get raw video data in a color encoding format other than YUV 420, register this + * callback when calling `registerVideoFrameObserver`. After you successfully register the video + * frame observer, the SDK triggers this callback each time it receives a video frame. You need to + * set your preferred video data in the return value of this callback. + * + * @note If you want the video captured by the sender to be the original format, set the original + * video data format to VIDEO_PIXEL_DEFAULT in the return value. On different platforms, the + * original video pixel format is also different, for the actual video pixel format, see + * `VideoFrame`. + * * @return Sets the video format. See VIDEO_PIXEL_FORMAT. */ virtual base::VIDEO_PIXEL_FORMAT getVideoFormatPreference() { return base::VIDEO_PIXEL_DEFAULT; } /** - * Occurs each time the SDK receives a video frame, and prompts you whether to rotate the captured video. - * - * If you want to rotate the captured video according to the rotation member in the `VideoFrame` class, register this - * callback by calling `registerVideoFrameObserver`. After you successfully register the video frame observer, the - * SDK triggers this callback each time it receives a video frame. You need to set whether to rotate the video frame - * in the return value of this callback. - * + * Occurs each time the SDK receives a video frame, and prompts you whether to rotate the captured + * video. + * + * If you want to rotate the captured video according to the rotation member in the `VideoFrame` + * class, register this callback by calling `registerVideoFrameObserver`. After you successfully + * register the video frame observer, the SDK triggers this callback each time it receives a video + * frame. You need to set whether to rotate the video frame in the return value of this callback. + * * @note This function only supports video data in RGBA or YUV420. * * @return Determines whether to rotate. @@ -1461,13 +1821,15 @@ class IVideoFrameObserver { virtual bool getRotationApplied() { return false; } /** - * Occurs each time the SDK receives a video frame and prompts you whether or not to mirror the captured video. - * - * If the video data you want to obtain is a mirror image of the original video, you need to register this callback - * when calling `registerVideoFrameObserver`. After you successfully register the video frame observer, the SDK - * triggers this callback each time it receives a video frame. You need to set whether or not to mirror the video - * frame in the return value of this callback. - * + * Occurs each time the SDK receives a video frame and prompts you whether or not to mirror the + * captured video. + * + * If the video data you want to obtain is a mirror image of the original video, you need to + * register this callback when calling `registerVideoFrameObserver`. After you successfully + * register the video frame observer, the SDK triggers this callback each time it receives a video + * frame. You need to set whether or not to mirror the video frame in the return value of this + * callback. + * * @note This function only supports video data in RGBA and YUV420 formats. * * @return Determines whether to mirror. @@ -1479,19 +1841,24 @@ class IVideoFrameObserver { /** * Sets the frame position for the video observer. * - * After you successfully register the video observer, the SDK triggers this callback each time it receives - * a video frame. You can determine which position to observe by setting the return value. The SDK provides - * 3 positions for observer. Each position corresponds to a callback function: + * After you successfully register the video observer, the SDK triggers this callback each time it + * receives a video frame. You can determine which position to observe by setting the return + * value. The SDK provides 3 positions for observer. Each position corresponds to a callback + * function: * - * POSITION_POST_CAPTURER(1 << 0): The position after capturing the video data, which corresponds to the onCaptureVideoFrame callback. - * POSITION_PRE_RENDERER(1 << 1): The position before receiving the remote video data, which corresponds to the onRenderVideoFrame callback. - * POSITION_PRE_ENCODER(1 << 2): The position before encoding the video data, which corresponds to the onPreEncodeVideoFrame callback. + * POSITION_POST_CAPTURER(1 << 0): The position after capturing the video data, which corresponds + * to the onCaptureVideoFrame callback. POSITION_PRE_RENDERER(1 << 1): The position before + * receiving the remote video data, which corresponds to the onRenderVideoFrame callback. + * POSITION_PRE_ENCODER(1 << 2): The position before encoding the video data, which corresponds to + * the onPreEncodeVideoFrame callback. * * To observe multiple frame positions, use '|' (the OR operator). - * This callback observes POSITION_POST_CAPTURER(1 << 0) and POSITION_PRE_RENDERER(1 << 1) by default. - * To conserve the system consumption, you can reduce the number of frame positions that you want to observe. + * This callback observes POSITION_POST_CAPTURER(1 << 0) and POSITION_PRE_RENDERER(1 << 1) by + * default. To conserve the system consumption, you can reduce the number of frame positions that + * you want to observe. * - * @return A bit mask that controls the frame position of the video observer: VIDEO_OBSERVER_POSITION. + * @return A bit mask that controls the frame position of the video observer: + * VIDEO_OBSERVER_POSITION. */ virtual uint32_t getObservedFramePosition() { return base::POSITION_POST_CAPTURER | base::POSITION_PRE_RENDERER; @@ -1585,7 +1952,8 @@ enum RecorderReasonCode { */ RECORDER_REASON_WRITE_FAILED = 1, /** - * 2: The SDK does not detect audio and video streams to be recorded, or audio and video streams are interrupted for more than five seconds during recording. + * 2: The SDK does not detect audio and video streams to be recorded, or audio and video streams + * are interrupted for more than five seconds during recording. */ RECORDER_REASON_NO_STREAM = 2, /** @@ -1613,7 +1981,8 @@ struct MediaRecorderConfiguration { */ const char* storagePath; /** - * The format of the recording file. See \ref agora::rtc::MediaRecorderContainerFormat "MediaRecorderContainerFormat". + * The format of the recording file. See \ref agora::rtc::MediaRecorderContainerFormat + * "MediaRecorderContainerFormat". */ MediaRecorderContainerFormat containerFormat; /** @@ -1631,23 +2000,70 @@ struct MediaRecorderConfiguration { * callback to report the updated recording information. */ int recorderInfoUpdateInterval; - - MediaRecorderConfiguration() : storagePath(NULL), containerFormat(FORMAT_MP4), streamType(STREAM_TYPE_BOTH), maxDurationMs(120000), recorderInfoUpdateInterval(0) {} - MediaRecorderConfiguration(const char* path, MediaRecorderContainerFormat format, MediaRecorderStreamType type, int duration, int interval) : storagePath(path), containerFormat(format), streamType(type), maxDurationMs(duration), recorderInfoUpdateInterval(interval) {} + /** + * The video width + */ + int width; + /** + * The video height + */ + int height; + /** + * The video fps + */ + int fps; + /** + * The audio sample rate + */ + int sample_rate; + /** + * The audio channel nums + */ + int channel_num; + /** + * The video source just for out channel recoder + */ + agora::rtc::VIDEO_SOURCE_TYPE videoSourceType; + + MediaRecorderConfiguration() + : storagePath(NULL), + containerFormat(FORMAT_MP4), + streamType(STREAM_TYPE_BOTH), + maxDurationMs(120000), + recorderInfoUpdateInterval(0), + width(1280), + height(720), + fps(30), + sample_rate(48000), + channel_num(1), + videoSourceType(rtc::VIDEO_SOURCE_CAMERA_PRIMARY) {} + MediaRecorderConfiguration(const char* path, MediaRecorderContainerFormat format, + MediaRecorderStreamType type, int duration, int interval) + : storagePath(path), + containerFormat(format), + streamType(type), + maxDurationMs(duration), + recorderInfoUpdateInterval(interval), + width(1280), + height(720), + fps(30), + sample_rate(48000), + channel_num(1), + videoSourceType(rtc::VIDEO_SOURCE_CAMERA_PRIMARY) {} }; class IFaceInfoObserver { -public: - /** - * Occurs when the face info is received. - * @param outFaceInfo The output face info. - * @return - * - true: The face info is valid. - * - false: The face info is invalid. + public: + /** + * Occurs when the face info is received. + * @param outFaceInfo The output face info. + * @return + * - true: The face info is valid. + * - false: The face info is invalid. */ - virtual bool onFaceInfo(const char* outFaceInfo) = 0; - - virtual ~IFaceInfoObserver() {} + virtual bool onFaceInfo(const char* outFaceInfo) = 0; + + virtual ~IFaceInfoObserver() {} }; /** @@ -1670,7 +2086,8 @@ struct RecorderInfo { unsigned int fileSize; RecorderInfo() : fileName(NULL), durationMs(0), fileSize(0) {} - RecorderInfo(const char* name, unsigned int dur, unsigned int size) : fileName(name), durationMs(dur), fileSize(size) {} + RecorderInfo(const char* name, unsigned int dur, unsigned int size) + : fileName(name), durationMs(dur), fileSize(size) {} }; class IMediaRecorderObserver { @@ -1680,30 +2097,35 @@ class IMediaRecorderObserver { * * @since v4.0.0 * - * When the local audio and video recording state changes, the SDK triggers this callback to report the current - * recording state and the reason for the change. + * When the local audio and video recording state changes, the SDK triggers this callback to + * report the current recording state and the reason for the change. * * @param channelId The channel name. * @param uid ID of the user. * @param state The current recording state. See \ref agora::media::RecorderState "RecorderState". - * @param reason The reason for the state change. See \ref agora::media::RecorderReasonCode "RecorderReasonCode". + * @param reason The reason for the state change. See \ref agora::media::RecorderReasonCode + * "RecorderReasonCode". */ - virtual void onRecorderStateChanged(const char* channelId, rtc::uid_t uid, RecorderState state, RecorderReasonCode reason) = 0; + virtual void onRecorderStateChanged(const char* channelId, rtc::uid_t uid, RecorderState state, + RecorderReasonCode reason) = 0; /** * Occurs when the recording information is updated. * * @since v4.0.0 * - * After you successfully register this callback and enable the local audio and video recording, the SDK periodically triggers - * the `onRecorderInfoUpdated` callback based on the set value of `recorderInfoUpdateInterval`. This callback reports the - * filename, duration, and size of the current recording file. + * After you successfully register this callback and enable the local audio and video recording, + * the SDK periodically triggers the `onRecorderInfoUpdated` callback based on the set value of + * `recorderInfoUpdateInterval`. This callback reports the filename, duration, and size of the + * current recording file. * * @param channelId The channel name. * @param uid ID of the user. - * @param info Information about the recording file. See \ref agora::media::RecorderInfo "RecorderInfo". + * @param info Information about the recording file. See \ref agora::media::RecorderInfo + * "RecorderInfo". * */ - virtual void onRecorderInfoUpdated(const char* channelId, rtc::uid_t uid, const RecorderInfo& info) = 0; + virtual void onRecorderInfoUpdated(const char* channelId, rtc::uid_t uid, + const RecorderInfo& info) = 0; virtual ~IMediaRecorderObserver() {} }; diff --git a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraLog.h b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraLog.h index 2fae3aa13..20b6416ef 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraLog.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraLog.h @@ -37,6 +37,7 @@ OPTIONAL_ENUM_CLASS LOG_LEVEL { LOG_LEVEL_ERROR = 0x0004, LOG_LEVEL_FATAL = 0x0008, LOG_LEVEL_API_CALL = 0x0010, + LOG_LEVEL_DEBUG = 0x0020, }; /* diff --git a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMediaEngine.h b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMediaEngine.h index b3b92e9e4..44975bfe9 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMediaEngine.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMediaEngine.h @@ -141,6 +141,24 @@ class IMediaEngine { bool enabled, bool useTexture, EXTERNAL_VIDEO_SOURCE_TYPE sourceType = VIDEO_FRAME, rtc::SenderOptions encodedVideoOption = rtc::SenderOptions()) = 0; +#if defined(__ANDROID__) + /** + * Sets the remote eglContext. + * + * When the engine is destroyed, the SDK will automatically release the eglContext. + * + * @param eglContext. + * + * @note + * setExternalRemoteEglContext needs to be called before joining the channel. + * + * @return + * - 0: Success. + * - < 0: Failure. + */ + virtual int setExternalRemoteEglContext(void* eglContext) = 0; +#endif + /** * Sets the external audio source. * diff --git a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMediaPlayer.h b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMediaPlayer.h index bd3c7597c..25f48a4a2 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMediaPlayer.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMediaPlayer.h @@ -49,17 +49,6 @@ class IMediaPlayer : public RefCountInterface { */ virtual int open(const char* url, int64_t startPos) = 0; - /** - * @deprecated - * @brief Open media file or stream with custom soucrce. - * @param startPos Set the starting position for playback, in seconds - * @param observer dataProvider object - * @return - * - 0: Success. - * - < 0: Failure. - */ - virtual int openWithCustomSource(int64_t startPos, media::base::IMediaPlayerCustomDataProvider* provider) __deprecated = 0; - /** * @brief Open a media file with a media file source. * @param source Media file source that you want to play, see `MediaSource` diff --git a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMediaPlayerSource.h b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMediaPlayerSource.h index 00be02233..99da405bc 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMediaPlayerSource.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMediaPlayerSource.h @@ -42,17 +42,6 @@ class IMediaPlayerSource : public RefCountInterface { * - < 0: Failure. */ virtual int open(const char* url, int64_t startPos) = 0; - - /** - * @deprecated - * @brief Open media file or stream with custom soucrce. - * @param startPos Set the starting position for playback, in seconds - * @param observer dataProvider object - * @return - * - 0: Success. - * - < 0: Failure. - */ - virtual int openWithCustomSource(int64_t startPos, media::base::IMediaPlayerCustomDataProvider* provider) __deprecated = 0; /** * Opens a media file with a media file source. diff --git a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMediaRecorder.h b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMediaRecorder.h index 17375607c..79a8db35e 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMediaRecorder.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMediaRecorder.h @@ -7,7 +7,6 @@ #include "AgoraBase.h" #include "AgoraMediaBase.h" -#include "IAgoraRtcEngineEx.h" namespace agora { namespace rtc { diff --git a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraParameter.h b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraParameter.h index 08c51dd22..f50afe9b5 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraParameter.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraParameter.h @@ -39,8 +39,7 @@ * set the video encoder mode (hardware or software) */ #define KEY_RTC_VIDEO_ENABLED_HW_ENCODER "engine.video.enable_hw_encoder" -#define KEY_RTC_VIDEO_HARDWARE_ENCODEING "che.hardware_encoding" -#define KEY_RTC_VIDEO_H264_HWENC "che.video.h264.hwenc" +#define KEY_RTC_VIDEO_HARDWARE_ENCODEING "che.hardware_encoding" // deprecated, please use engine.video.enable_hw_encoder /** * set the hardware video encoder provider (nv for nvidia or qsv for intel) */ @@ -50,7 +49,7 @@ * set the video decoder mode (hardware or software) */ #define KEY_RTC_VIDEO_ENABLED_HW_DECODER "engine.video.enable_hw_decoder" -#define KEY_RTC_VIDEO_HARDWARE_DECODING "che.hardware_decoding" +#define KEY_RTC_VIDEO_HARDWARE_DECODING "che.hardware_decoding" // deprecated, please use engine.video.enable_hw_decoder /** * set the hardware video decoder provider (h264_cuvid(default) or h264_qsv) diff --git a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngine.h b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngine.h index 55bb71e08..70c87f818 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngine.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngine.h @@ -97,12 +97,14 @@ enum AUDIO_MIXING_REASON_TYPE { AUDIO_MIXING_REASON_TOO_FREQUENT_CALL = 702, /** 703: The audio mixing file playback is interrupted. */ AUDIO_MIXING_REASON_INTERRUPTED_EOF = 703, - /** 715: The audio mixing file is played once. */ + /** 721: The audio mixing file is played once. */ AUDIO_MIXING_REASON_ONE_LOOP_COMPLETED = 721, - /** 716: The audio mixing file is all played out. */ + /** 723: The audio mixing file is all played out. */ AUDIO_MIXING_REASON_ALL_LOOPS_COMPLETED = 723, - /** 716: The audio mixing file stopped by user */ + /** 724: The audio mixing file stopped by user */ AUDIO_MIXING_REASON_STOPPED_BY_USER = 724, + /** 726: The audio mixing playback has resumed by user */ + AUDIO_MIXING_REASON_RESUMED_BY_USER = 726, /** 0: The SDK can open the audio mixing file. */ AUDIO_MIXING_REASON_OK = 0, }; @@ -230,19 +232,30 @@ enum AUDIO_REVERB_TYPE { }; enum STREAM_FALLBACK_OPTIONS { - /** 0: No fallback operation for the stream when the network - condition is poor. The stream quality cannot be guaranteed. */ - + /** 0: No fallback operation to a lower resolution stream when the network + condition is poor. Fallback to Scalable Video Coding (e.g. SVC) + is still possible, but the resolution remains in high stream. + The stream quality cannot be guaranteed. */ STREAM_FALLBACK_OPTION_DISABLED = 0, - /** 1: (Default) Under poor network conditions, the SDK will send or receive + /** 1: (Default) Under poor network conditions, the receiver SDK will receive agora::rtc::VIDEO_STREAM_LOW. You can only set this option in RtcEngineParameters::setRemoteSubscribeFallbackOption. Nothing happens when you set this in RtcEngineParameters::setLocalPublishFallbackOption. */ STREAM_FALLBACK_OPTION_VIDEO_STREAM_LOW = 1, - /** 2: Under poor network conditions, the SDK may receive - agora::rtc::VIDEO_STREAM_LOW first, but if the network still does - not allow displaying the video, the SDK will send or receive audio only. */ + /** 2: Under poor network conditions, the SDK may receive agora::rtc::VIDEO_STREAM_LOW first, + then agora::rtc::VIDEO_STREAM_LAYER_1 to agora::rtc::VIDEO_STREAM_LAYER_6 if the related layer exists. + If the network still does not allow displaying the video, the SDK will receive audio only. */ STREAM_FALLBACK_OPTION_AUDIO_ONLY = 2, + /** 3~8: If the receiver SDK uses RtcEngineParameters::setRemoteSubscribeFallbackOptionļ¼Œit will receive + one of the streams from agora::rtc::VIDEO_STREAM_LAYER_1 to agora::rtc::VIDEO_STREAM_LAYER_6 + if the related layer exists when the network condition is poor. The lower bound of fallback depends on + the STREAM_FALLBACK_OPTION_VIDEO_STREAM_LAYER_X. */ + STREAM_FALLBACK_OPTION_VIDEO_STREAM_LAYER_1 = 3, + STREAM_FALLBACK_OPTION_VIDEO_STREAM_LAYER_2 = 4, + STREAM_FALLBACK_OPTION_VIDEO_STREAM_LAYER_3 = 5, + STREAM_FALLBACK_OPTION_VIDEO_STREAM_LAYER_4 = 6, + STREAM_FALLBACK_OPTION_VIDEO_STREAM_LAYER_5 = 7, + STREAM_FALLBACK_OPTION_VIDEO_STREAM_LAYER_6 = 8, }; enum PRIORITY_TYPE { @@ -341,6 +354,9 @@ struct LocalVideoStats * - hardware = 1. */ int hwEncoderAccelerating; + /** The dimensions of the simulcast streams's encoding frame. + */ + VideoDimensions simulcastDimensions[SimulcastConfig::STREAM_LAYER_COUNT_MAX]; }; /** @@ -502,6 +518,9 @@ struct RemoteVideoStats { * Bitrate (Kbps) received since the last count. */ int receivedBitrate; + /** The decoder input frame rate (fps) of the remote video. + */ + int decoderInputFrameRate; /** The decoder output frame rate (fps) of the remote video. */ int decoderOutputFrameRate; @@ -837,7 +856,7 @@ struct ScreenCaptureConfiguration { /** * (macOS only) The display ID of the screen. */ - uint32_t displayId; + int64_t displayId; /** * (Windows only) The relative position of the shared screen to the virtual screen. * @note This parameter takes effect only when you want to capture the screen on Windows. @@ -847,7 +866,7 @@ struct ScreenCaptureConfiguration { * (For Windows and macOS only) The window ID. * @note This parameter takes effect only when you want to capture the window. */ - view_t windowId; + int64_t windowId; /** * (For Windows and macOS only) The screen capture configuration. For details, see ScreenCaptureParameters. */ @@ -927,7 +946,7 @@ struct ScreenCaptureSourceInfo { /** * The window ID for a window or the display ID for a screen. */ - view_t sourceId; + int64_t sourceId; /** * The name of the window or screen. UTF-8 encoding. */ @@ -970,11 +989,11 @@ struct ScreenCaptureSourceInfo { * ID to the display monitor that has the largest area of intersection with the window, Otherwise * the return value is -2. */ - view_t sourceDisplayId; - ScreenCaptureSourceInfo() : type(ScreenCaptureSourceType_Unknown), sourceId(nullptr), sourceName(nullptr), - processPath(nullptr), sourceTitle(nullptr), primaryMonitor(false), isOccluded(false), minimizeWindow(false), sourceDisplayId((view_t)-2) {} + int64_t sourceDisplayId; + ScreenCaptureSourceInfo() : type(ScreenCaptureSourceType_Unknown), sourceId(0), sourceName(nullptr), + processPath(nullptr), sourceTitle(nullptr), primaryMonitor(false), isOccluded(false), minimizeWindow(false), sourceDisplayId(-2) {} #else - ScreenCaptureSourceInfo() : type(ScreenCaptureSourceType_Unknown), sourceId(nullptr), sourceName(nullptr), processPath(nullptr), sourceTitle(nullptr), primaryMonitor(false), isOccluded(false) {} + ScreenCaptureSourceInfo() : type(ScreenCaptureSourceType_Unknown), sourceId(0), sourceName(nullptr), processPath(nullptr), sourceTitle(nullptr), primaryMonitor(false), isOccluded(false) {} #endif }; /** @@ -1256,6 +1275,12 @@ struct ChannelMediaOptions { */ Optional isAudioFilterable; + /** Provides the technical preview functionalities or special customizations by configuring the SDK with JSON options. + Pointer to the set parameters in a JSON string. + * @technical preview + */ + Optional parameters; + ChannelMediaOptions() {} ~ChannelMediaOptions() {} @@ -1301,6 +1326,7 @@ struct ChannelMediaOptions { SET_FROM(customVideoTrackId); SET_FROM(isAudioFilterable); SET_FROM(isInteractiveAudience); + SET_FROM(parameters); #undef SET_FROM } @@ -1349,6 +1375,7 @@ struct ChannelMediaOptions { ADD_COMPARE(customVideoTrackId); ADD_COMPARE(isAudioFilterable); ADD_COMPARE(isInteractiveAudience); + ADD_COMPARE(parameters); END_COMPARE(); #undef BEGIN_COMPARE @@ -1400,6 +1427,7 @@ struct ChannelMediaOptions { REPLACE_BY(customVideoTrackId); REPLACE_BY(isAudioFilterable); REPLACE_BY(isInteractiveAudience); + REPLACE_BY(parameters); #undef REPLACE_BY } return *this; @@ -2864,64 +2892,56 @@ class IRtcEngineEventHandler { (void)metadata; (void)length; } - + /** * The event callback of the extension. * * To listen for events while the extension is running, you need to register this callback. - * - * @param provider The name of the extension provider. - * @param extension The name of the extension. + * + * @param context The context of the extension. * @param key The key of the extension. * @param value The value of the extension key. */ - virtual void onExtensionEvent(const char* provider, const char* extension, const char* key, const char* value) { - (void)provider; - (void)extension; + virtual void onExtensionEventWithContext(const ExtensionContext &context, const char* key, const char* value) { + (void)context; (void)key; (void)value; } /** * Occurs when the extension is enabled. - * - * After a successful call of `enableExtension(true)`, the extension triggers this callback. - * - * @param provider The name of the extension provider. - * @param extension The name of the extension. + * + * After a successful creation of filter , the extension triggers this callback. + * + * @param context The context of the extension. */ - virtual void onExtensionStarted(const char* provider, const char* extension) { - (void)provider; - (void)extension; + virtual void onExtensionStartedWithContext(const ExtensionContext &context) { + (void)context; } /** * Occurs when the extension is disabled. - * - * After a successful call of `enableExtension(false)`, the extension triggers this callback. - * - * @param provider The name of the extension provider. - * @param extension The name of the extension. + * + * After a successful destroy filter, the extension triggers this callback. + * + * @param context The context of the extension. */ - virtual void onExtensionStopped(const char* provider, const char* extension) { - (void)provider; - (void)extension; + virtual void onExtensionStoppedWithContext(const ExtensionContext &context) { + (void)context; } /** * Occurs when the extension runs incorrectly. - * - * When calling `enableExtension(true)` fails or the extension runs in error, the extension triggers + * + * When the extension runs in error, the extension triggers * this callback and reports the error code and reason. * - * @param provider The name of the extension provider. - * @param extension The name of the extension. + * @param context The context of the extension. * @param error The error code. For details, see the extension documentation provided by the extension provider. * @param message The error message. For details, see the extension documentation provided by the extension provider. */ - virtual void onExtensionError(const char* provider, const char* extension, int error, const char* message) { - (void)provider; - (void)extension; + virtual void onExtensionErrorWithContext(const ExtensionContext &context, int error, const char* message) { + (void)context; (void)error; (void)message; } @@ -3224,23 +3244,26 @@ class IMetadataObserver { */ struct Metadata { - /** The User ID that sent the metadata. - * - For the receiver: The user ID of the user who sent the `metadata`. - * - For the sender: Ignore this value. - */ - unsigned int uid; - /** The buffer size of the sent or received `metadata`. - */ - unsigned int size; - /** The buffer address of the sent or received `metadata`. - */ - unsigned char* buffer; - /** The timestamp (ms) of the `metadata`. - * - */ - long long timeStampMs; - - Metadata() : uid(0), size(0), buffer(NULL), timeStampMs(0) {} + /** The channel ID of the `metadata`. + */ + const char* channelId; + /** The User ID that sent the metadata. + * - For the receiver: The user ID of the user who sent the `metadata`. + * - For the sender: Ignore this value. + */ + unsigned int uid; + /** The buffer size of the sent or received `metadata`. + */ + unsigned int size; + /** The buffer address of the sent or received `metadata`. + */ + unsigned char *buffer; + /** The NTP timestamp (ms) when the metadata is sent. + * @note If the receiver is audience, the receiver cannot get the NTP timestamp (ms). + */ + long long timeStampMs; + + Metadata() : channelId(NULL), uid(0), size(0), buffer(NULL), timeStampMs(0) {} }; /** Occurs when the SDK requests the maximum size of the metadata. @@ -3914,6 +3937,7 @@ class IRtcEngine : public agora::base::IEngineBase { * @return * - 0: Success. * - < 0: Failure. + * - -8(ERR_INVALID_STATE): The current status is invalid, only allowed to be called when the connection is disconnected. */ virtual int setChannelProfile(CHANNEL_PROFILE_TYPE profile) = 0; @@ -3962,48 +3986,10 @@ class IRtcEngine : public agora::base::IEngineBase { * - -1(ERR_FAILED): A general error occurs (no specified reason). * - -2(ERR_INALID_ARGUMENT): The parameter is invalid. * - -7(ERR_NOT_INITIALIZED): The SDK is not initialized. + * - -8(ERR_INVALID_STATE): The channel profile is not `LIVE_BROADCASTING`. */ virtual int setClientRole(CLIENT_ROLE_TYPE role, const ClientRoleOptions& options) = 0; - /** Starts an audio call test. - - This method launches an audio call test to determine whether the audio devices - (for example, headset and speaker) and the network connection are working - properly. - - In the test, the user first speaks, and the recording is played back - in 10 seconds. If the user can hear the recording in 10 seconds, it indicates - that the audio devices and network connection work properly. - - @note - After calling the startEchoTest() method, always call stopEchoTest() to end - the test. Otherwise, the app cannot run the next echo test, nor can - it call the joinChannel() method. - - @return - - 0: Success. - - < 0: Failure. - */ - virtual int startEchoTest() = 0; - - /** Starts an audio call test. - - This method starts an audio call test to determine whether the audio devices (for example, headset and speaker) and the network connection are working properly. - - In the audio call test, you record your voice. If the recording plays back within the set time interval, the audio devices and the network connection are working properly. - - @note - - Call this method before joining a channel. - - After calling this method, call the \ref IRtcEngine::stopEchoTest "stopEchoTest" method to end the test. Otherwise, the app cannot run the next echo test, or call the \ref IRtcEngine::joinChannel "joinChannel" method. - - In the `LIVE_BROADCASTING` profile, only a host can call this method. - @param intervalInSeconds The time interval (s) between when you speak and when the recording plays back. - - @return - - 0: Success. - - < 0: Failure. - */ - virtual int startEchoTest(int intervalInSeconds) = 0; - /** Starts a video call test. * * @param config: configuration for video call test. @@ -4168,6 +4154,67 @@ class IRtcEngine : public agora::base::IEngineBase { * @param options Sets the image enhancement option. See BeautyOptions. */ virtual int setBeautyEffectOptions(bool enabled, const BeautyOptions& options, agora::media::MEDIA_SOURCE_TYPE type = agora::media::PRIMARY_CAMERA_SOURCE) = 0; + /** Enables/Disables face shape and sets the beauty options. + * + * @note Call this method after calling the \ref IRtcEngine::enableVideo "enableVideo" method. + * + * @param enabled Sets whether or not to enable face shape: + * - true: enables face shape. + * - false: disables face shape. + * @param options Sets the face shape beauty option. See FaceShapeBeautyOptions. + */ + virtual int setFaceShapeBeautyOptions(bool enabled, const FaceShapeBeautyOptions& options, agora::media::MEDIA_SOURCE_TYPE type = agora::media::PRIMARY_CAMERA_SOURCE) = 0; + /** Enables/Disables face shape and sets the area options. + * + * @note Call this method after calling the \ref IRtcEngine::setFaceShapeBeautyOptions "setFaceShapeBeautyOptions" method. + * + * @param options Sets the face shape area option. See FaceShapeAreaOptions. + */ + virtual int setFaceShapeAreaOptions(const FaceShapeAreaOptions& options, agora::media::MEDIA_SOURCE_TYPE type = agora::media::PRIMARY_CAMERA_SOURCE) = 0; + + /** Gets the face shape beauty options. + * + * @note Call this method after calling the \ref IRtcEngine::enableVideo "enableVideo" method. + * + * @param options Gets the face shape beauty option. See FaceShapeBeautyOptions. + */ + virtual int getFaceShapeBeautyOptions(FaceShapeBeautyOptions& options, agora::media::MEDIA_SOURCE_TYPE type = agora::media::PRIMARY_CAMERA_SOURCE) = 0; + + /** Gets the face shape area options. + * + * @note Call this method after calling the \ref IRtcEngine::enableVideo "enableVideo" method. + * + * @param shapeArea The face area. See FaceShapeAreaOptions::FACE_SHAPE_AREA. + * @param options Gets the face area beauty option. See FaceShapeAreaOptions. + */ + virtual int getFaceShapeAreaOptions(agora::rtc::FaceShapeAreaOptions::FACE_SHAPE_AREA shapeArea, FaceShapeAreaOptions& options, agora::media::MEDIA_SOURCE_TYPE type = agora::media::PRIMARY_CAMERA_SOURCE) = 0; + + /** + * Sets filter effect options. + * + * @since v4.4.1 + * You can call this method to enable the filter effect feature and set the options of the filter effect. + * + * @note + * - Before calling this method, ensure that you have integrated the following dynamic library into your project: + * - Android: `libagora_clear_vision_extension.so` + * - iOS/macOS: `AgoraClearVisionExtension.xcframework` + * - Windows: `libagora_clear_vision_extension.dll` + * - Call this method after calling the \ref IRtcEngine::enableVideo "enableVideo" method. + * - You can call this method either before or after joining a channel. + * - The filter effect feature has specific performance requirements for devices. If your device overheats after enabling the filter effect, Agora recommends disabling it entirely. + * + * @param enabled. Whether to enable filter effect: + * - `true`: Enable. + * - `false`: (Default) Disable. + * @param options. Set the filter effect options. See FilterEffectOptions. + * + * @return + * - 0: Success. + * - < 0: Failure. + */ + virtual int setFilterEffectOptions(bool enabled, const FilterEffectOptions& options, agora::media::MEDIA_SOURCE_TYPE type = agora::media::PRIMARY_CAMERA_SOURCE) = 0; + /** * Sets low-light enhancement. * @@ -4179,9 +4226,9 @@ class IRtcEngine : public agora::base::IEngineBase { * * @note * - Before calling this method, ensure that you have integrated the following dynamic library into your project: - * - Android: `libagora_segmentation_extension.so` - * - iOS/macOS: `AgoraVideoSegmentationExtension.xcframework` - * - Windows: `libagora_segmentation_extension.dll` + * - Android: `libagora_clear_vision_extension.so` + * - iOS/macOS: `AgoraClearVisionExtension.xcframework` + * - Windows: `libagora_clear_vision_extension.dll` * - Call this method after \ref IRtcEngine::enableVideo "enableVideo". * - The low-light enhancement feature has certain performance requirements on devices. If your device overheats after you enable low-light enhancement, Agora recommends modifying the low-light enhancement options to a less performance-consuming level or disabling low-light enhancement entirely. * @@ -4206,9 +4253,9 @@ class IRtcEngine : public agora::base::IEngineBase { * * @note * - Before calling this method, ensure that you have integrated the following dynamic library into your project: - * - Android: `libagora_segmentation_extension.so` - * - iOS/macOS: `AgoraVideoSegmentationExtension.xcframework` - * - Windows: `libagora_segmentation_extension.dll` + * - Android: `libagora_clear_vision_extension.so` + * - iOS/macOS: `AgoraClearVisionExtension.xcframework` + * - Windows: `libagora_clear_vision_extension.dll` * - Call this method after \ref IRtcEngine::enableVideo "enableVideo". * - The video noise reduction feature has certain performance requirements on devices. If your device overheats after you enable video noise reduction, Agora recommends modifying the video noise reduction options to a less performance-consuming level or disabling video noise reduction entirely. * @@ -4233,9 +4280,9 @@ class IRtcEngine : public agora::base::IEngineBase { * * @note * - Before calling this method, ensure that you have integrated the following dynamic library into your project: - * - Android: `libagora_segmentation_extension.so` - * - iOS/macOS: `AgoraVideoSegmentationExtension.xcframework` - * - Windows: `libagora_segmentation_extension.dll` + * - Android: `libagora_clear_vision_extension.so` + * - iOS/macOS: `AgoraClearVisionExtension.xcframework` + * - Windows: `libagora_clear_vision_extension.dll` * - Call this method after \ref IRtcEngine::enableVideo "enableVideo". * - The color enhancement feature has certain performance requirements on devices. If your device overheats after you enable color enhancement, Agora recommends modifying the color enhancement options to a less performance-consuming level or disabling color enhancement entirely. * @@ -4536,29 +4583,6 @@ class IRtcEngine : public agora::base::IEngineBase { */ virtual int muteAllRemoteAudioStreams(bool mute) = 0; - /** - * Determines whether to receive all remote audio streams by default. - * - * @deprecated This method is deprecated. To set whether to receive remote - * audio streams by default, call - * \ref IRtcEngine::muteAllRemoteAudioStreams "muteAllRemoteAudioStreams" - * before calling `joinChannel` - * - * Use this method to set whether to receive audio streams of subsequent peer - * users. Agora recommends calling it before joining a channel. - * - * A successful call of setDefaultMuteAllRemoteAudioStreams(true) results in - * that the local user not receiving any audio stream after joining a channel. - * @param mute Whether to receive remote audio streams by default: - * - true: Do not receive any remote audio stream by default. - * - false: (Default) Receive remote audio streams by default. - * - * @return int - * - 0: Success. - * - < 0: Failure. - */ - virtual int setDefaultMuteAllRemoteAudioStreams(bool mute) __deprecated = 0; - /** * Stops or resumes receiving the audio stream of a specified user. * @@ -4657,29 +4681,6 @@ class IRtcEngine : public agora::base::IEngineBase { */ virtual int muteAllRemoteVideoStreams(bool mute) = 0; - /** - Determines whether to receive all remote video streams by default. - - @deprecated This method is deprecated. To set whether to receive remote - video streams by default, call - \ref IRtcEngine::muteAllRemoteVideoStreams "muteAllRemoteVideoStreams" - before calling `joinChannel`. - - Use this method to set whether to receive video streams of subsequent peer - users. Agora recommends calling it before joining a channel. - - A successful call of setDefaultMuteAllRemoteVideoStreams(true) results in - that the local user not receiving any video stream after joining a channel. - - @param mute Whether to receive remote video streams by default: - - true: Do not receive any remote video stream by default. - - false: (Default) Receive remote video streams by default. - @return int - - 0: Success. - - < 0: Failure. - */ - virtual int setDefaultMuteAllRemoteVideoStreams(bool mute) __deprecated = 0; - /** * Sets the default stream type of the remote video if the remote user has enabled dual-stream. * @@ -5870,6 +5871,19 @@ class IRtcEngine : public agora::base::IEngineBase { */ virtual int setHeadphoneEQParameters(int lowGain, int highGain) = 0; + /** Enables or disables the voice AI tuner. + * + * @param enabled Determines whether to enable the voice AI tuner: + * - true: Enable the voice AI tuner + * - false: (default) Disable the voice AI tuner. + * + * @param type. The options for SDK voice AI tuner types. See #VOICE_AI_TUNER_TYPE. + * @return + * - 0: Success. + * - < 0: Failure. + */ + virtual int enableVoiceAITuner(bool enabled, VOICE_AI_TUNER_TYPE type) = 0; + /** **DEPRECATED** Specifies an SDK output log file. * * The log file records all log data for the SDKā€™s operation. Ensure that the @@ -6014,7 +6028,26 @@ class IRtcEngine : public agora::base::IEngineBase { */ virtual int setRemoteRenderMode(uid_t uid, media::base::RENDER_MODE_TYPE renderMode, VIDEO_MIRROR_MODE_TYPE mirrorMode) = 0; - + /** + * Sets the target frames per second (FPS) for the local render target. + * + * @param sourceType The type of video source. + * @param targetFps The target frames per second to be set. + * + * @return + * - 0: Success. + * - < 0: Failure. + */ + virtual int setLocalRenderTargetFps(VIDEO_SOURCE_TYPE sourceType, int targetFps) = 0; + /** + * Sets the target frames per second (FPS) for the remote render target. + * + * @param targetFps The target frames per second to be set for the remote render target. + * @return + * - 0: Success. + * - < 0: Failure. + */ + virtual int setRemoteRenderTargetFps(int targetFps) = 0; // The following APIs are either deprecated and going to deleted. /** @@ -6099,6 +6132,23 @@ class IRtcEngine : public agora::base::IEngineBase { */ virtual int setDualStreamMode(SIMULCAST_STREAM_MODE mode) = 0; + /** + * Sets the multi-layer video stream configuration. + * + * If multi-layer is configured, the subscriber can choose to receive the coresponding layer + * of video stream using {@link setRemoteVideoStreamType setRemoteVideoStreamType}. + * + * @param simulcastConfig + * - The configuration for multi-layer video stream. It includes seven layers, ranging from + * STREAM_LAYER_1 to STREAM_LOW. A maximum of 3 layers can be enabled simultaneously. + * + * @return + * - 0: Success. + * - < 0: Failure. + * @technical preview + */ + virtual int setSimulcastConfig(const SimulcastConfig& simulcastConfig) = 0; + /** * Enables, disables or auto enable the dual video stream mode. * @@ -7018,7 +7068,7 @@ class IRtcEngine : public agora::base::IEngineBase { - ERR_INVALID_ARGUMENT (2): The argument is invalid. - ERR_NOT_INITIALIZED (7): You have not initialized IRtcEngine when try to start screen capture. */ - virtual int startScreenCaptureByDisplayId(uint32_t displayId, const Rectangle& regionRect, + virtual int startScreenCaptureByDisplayId(int64_t displayId, const Rectangle& regionRect, const ScreenCaptureParameters& captureParams) = 0; #endif // __APPLE__ && TARGET_OS_MAC && !TARGET_OS_IPHONE @@ -7081,7 +7131,7 @@ class IRtcEngine : public agora::base::IEngineBase { * - ERR_INVALID_ARGUMENT (2): The argument is invalid. * - ERR_NOT_INITIALIZED (7): You have not initialized IRtcEngine when try to start screen capture. */ - virtual int startScreenCaptureByWindowId(view_t windowId, const Rectangle& regionRect, + virtual int startScreenCaptureByWindowId(int64_t windowId, const Rectangle& regionRect, const ScreenCaptureParameters& captureParams) = 0; /** @@ -7183,6 +7233,26 @@ class IRtcEngine : public agora::base::IEngineBase { * - < 0: Failure.. */ virtual int queryCameraFocalLengthCapability(agora::rtc::FocalLengthInfo* focalLengthInfos, int& size) = 0; + +#if defined(__ANDROID__) + /** + * Sets screen sharing using the Android native class MediaProjection. + * + * When screen capture stopped, the SDK will automatically release the MediaProjection internally. + * + * @param mediaProjection MediaProjection is an Android class that provides access to screen capture and recording capabiliies. + * + * @note + * Additional MediaProjection is primarily used for specific scenarios, + * such as IOT custom devices or subprocess screen sharing. + * + * @return + * - 0: Success. + * - < 0: Failure. + * @technical preview + */ + virtual int setExternalMediaProjection(void* mediaProjection) = 0; +#endif #endif #if defined(_WIN32) || defined(__APPLE__) || defined(__ANDROID__) @@ -7343,6 +7413,40 @@ class IRtcEngine : public agora::base::IEngineBase { virtual int stopRtmpStream(const char* url) = 0; virtual int stopLocalVideoTranscoder() = 0; + + /** + * Starts the local audio with a mixed audio stream. + * @param config Sets the mixed audio stream source settings. + * @return + * - 0: Success. + * - < 0: Failure. + * - #ERR_NOT_INITIALIZED (7): You have not initialized the RTC engine when publishing the + * stream. + */ + virtual int startLocalAudioMixer(const LocalAudioMixerConfiguration& config) = 0; + + /** + * Update the source stream settings for the mixed audio stream. + * @param config Update the source audio stream settings. See + * @return + * - 0: Success. + * - < 0: Failure. + * - #ERR_NOT_INITIALIZED (7): You have not initialized the RTC engine when publishing the + stream. + */ + virtual int updateLocalAudioMixerConfiguration(const LocalAudioMixerConfiguration& config) = 0; + + /** + * Stops a mixed audio track. + * + * @return + * - 0: Success. + * - < 0: Failure. + * - #ERR_NOT_INITIALIZED (7): You have not initialized the RTC engine when publishing the + * stream. + */ + virtual int stopLocalAudioMixer() = 0; + /** * Starts video capture with a camera. * @@ -7437,47 +7541,6 @@ class IRtcEngine : public agora::base::IEngineBase { */ virtual int registerPacketObserver(IPacketObserver* observer) = 0; - /** - * Sets the built-in encryption mode. - * - * @deprecated This method is deprecated. Use enableEncryption(bool enabled, const EncryptionConfig&) instead. - * - * The Agora Native SDK supports built-in encryption. - * Call this API to set the encryption mode. - * - * All users in the same channel must use the same encryption mode and password. - * Refer to information related to the encryption algorithm on the differences - * between encryption modes. - * - * @note - * Call \ref setEncryptionSecret "setEncryptionSecret" to enable the built-in encryption function - * before calling this API. - * @param encryptionMode Encryption mode: - * - "sm4-128-ecb": 128-bit SM4 encryption, ECB mode. - * - * @return - * - 0: Success. - * - < 0: Failure. - */ - virtual int setEncryptionMode(const char* encryptionMode) __deprecated = 0; - - /** - * Enables built-in encryption. - * - * @deprecated This method is deprecated. Use enableEncryption(bool enabled, const EncryptionConfig&) instead. - * - * Use this method to specify an encryption password to enable built-in - * encryption before joining a channel. All users in a channel must set the same - * encryption password. The encryption password is automatically cleared once a - * user has left the channel. If the encryption password is not specified or set to - * empty, the encryption function will be disabled. - * - * @param secret The encryption password. - * @return - * - 0: Success. - * - < 0: Failure. - */ - virtual int setEncryptionSecret(const char* secret) __deprecated = 0; /** Enables/Disables the built-in encryption. * @@ -8144,6 +8207,32 @@ class IRtcEngine : public agora::base::IEngineBase { */ virtual int takeSnapshot(uid_t uid, const char* filePath) = 0; + /** + * Takes a snapshot of a video stream. + * + * This method takes a snapshot of a video stream from the specified user, generates a JPG + * image, and saves it to the specified path. + * + * The method is asynchronous, and the SDK has not taken the snapshot when the method call + * returns. After a successful method call, the SDK triggers the `onSnapshotTaken` callback + * to report whether the snapshot is successfully taken, as well as the details for that + * snapshot. + * + * @note + * - Call this method after joining a channel. + * - This method takes a snapshot of the published video stream specified in `ChannelMediaOptions`. + * + * @param uid The user ID. Set uid as 0 if you want to take a snapshot of the local user's video. + * @param config The configuration for the take snapshot. See SnapshotConfig. + * + * Ensure that the path you specify exists and is writable. + * @return + * - 0 : Success. + * - < 0: Failure. + * - -4: Incorrect observation position. Modify the input observation position according to the reqiurements specified in SnapshotConfig. + */ + virtual int takeSnapshot(uid_t uid, const media::SnapshotConfig& config) = 0; + /** Enables the content inspect. @param enabled Whether to enable content inspect: - `true`: Yes. @@ -8370,6 +8459,17 @@ class IRtcEngine : public agora::base::IEngineBase { * @technical preview */ virtual int sendAudioMetadata(const char* metadata, size_t length) = 0; + + /** + * @brief Queries the HDR capability of the video module + * @param videoModule The video module. See VIDEO_MODULE_TYPE + * @param capability HDR capability of video module. See HDR_CAPABILITY + * @return + * - 0: success + * - <0: failure + * @technical preview + */ + virtual int queryHDRCapability(VIDEO_MODULE_TYPE videoModule, HDR_CAPABILITY& capability) = 0; }; // The following types are either deprecated or not implmented yet. @@ -8393,6 +8493,11 @@ enum MEDIA_DEVICE_STATE_TYPE { /** 2: The device is disabled. */ MEDIA_DEVICE_STATE_DISABLED = 2, + + /** 3: The device is plugged in. + */ + MEDIA_DEVICE_STATE_PLUGGED_IN = 3, + /** 4: The device is not present. */ MEDIA_DEVICE_STATE_NOT_PRESENT = 4, diff --git a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngineEx.h b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngineEx.h index 8d21e8f97..bd0e816df 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngineEx.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngineEx.h @@ -318,40 +318,6 @@ class IRtcEngineEventHandlerEx : public IRtcEngineEventHandler { (void)rotation; } - /** Occurs when the local video stream state changes. - * - * When the state of the local video stream changes (including the state of the video capture and - * encoding), the SDK triggers this callback to report the current state. This callback indicates - * the state of the local video stream, including camera capturing and video encoding, and allows - * you to troubleshoot issues when exceptions occur. - * - * The SDK triggers the onLocalVideoStateChanged callback with the state code of `LOCAL_VIDEO_STREAM_STATE_FAILED` - * and error code of `LOCAL_VIDEO_STREAM_REASON_CAPTURE_FAILURE` in the following situations: - * - The app switches to the background, and the system gets the camera resource. - * - The camera starts normally, but does not output video for four consecutive seconds. - * - * When the camera outputs the captured video frames, if the video frames are the same for 15 - * consecutive frames, the SDK triggers the `onLocalVideoStateChanged` callback with the state code - * of `LOCAL_VIDEO_STREAM_STATE_CAPTURING` and error code of `LOCAL_VIDEO_STREAM_REASON_CAPTURE_FAILURE`. - * Note that the video frame duplication detection is only available for video frames with a resolution - * greater than 200 Ɨ 200, a frame rate greater than or equal to 10 fps, and a bitrate less than 20 Kbps. - * - * @note For some device models, the SDK does not trigger this callback when the state of the local - * video changes while the local video capturing device is in use, so you have to make your own - * timeout judgment. - * - * @param connection The RtcConnection object. - * @param state The state of the local video. See #LOCAL_VIDEO_STREAM_STATE. - * @param reason The detailed error information. See #LOCAL_VIDEO_STREAM_REASON. - */ - virtual void onLocalVideoStateChanged(const RtcConnection& connection, - LOCAL_VIDEO_STREAM_STATE state, - LOCAL_VIDEO_STREAM_REASON reason) { - (void)connection; - (void)state; - (void)reason; - } - /** * Occurs when the remote video state changes. * @@ -1161,6 +1127,55 @@ class IRtcEngineEx : public IRtcEngine { */ virtual int leaveChannelEx(const RtcConnection& connection, const LeaveChannelOptions& options) = 0; + /** + * Leaves a channel with the channel ID and user account. + * + * This method allows a user to leave the channel, for example, by hanging up or exiting a call. + * + * This method is an asynchronous call, which means that the result of this method returns even before + * the user has not actually left the channel. Once the user successfully leaves the channel, the + * SDK triggers the \ref IRtcEngineEventHandler::onLeaveChannel "onLeaveChannel" callback. + * + * @param channelId The channel name. The maximum length of this parameter is 64 bytes. Supported character scopes are: + * - All lowercase English letters: a to z. + * - All uppercase English letters: A to Z. + * - All numeric characters: 0 to 9. + * - The space character. + * - Punctuation characters and other symbols, including: "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", " {", "}", "|", "~", ",". + * @param userAccount The user account. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as null. Supported character scopes are: + * - All lowercase English letters: a to z. + * - All uppercase English letters: A to Z. + * - All numeric characters: 0 to 9. + * - The space character. + * - Punctuation characters and other symbols, including: "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", " {", "}", "|", "~", ",". + * @return + * - 0: Success. + * - < 0: Failure. + */ + virtual int leaveChannelWithUserAccountEx(const char* channelId, const char* userAccount) = 0; + + /** + * Leaves a channel with the channel ID and user account and sets the options for leaving. + * + * @param channelId The channel name. The maximum length of this parameter is 64 bytes. Supported character scopes are: + * - All lowercase English letters: a to z. + * - All uppercase English letters: A to Z. + * - All numeric characters: 0 to 9. + * - The space character. + * - Punctuation characters and other symbols, including: "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", " {", "}", "|", "~", ",". + * @param userAccount The user account. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as null. Supported character scopes are: + * - All lowercase English letters: a to z. + * - All uppercase English letters: A to Z. + * - All numeric characters: 0 to 9. + * - The space character. + * - Punctuation characters and other symbols, including: "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", " {", "}", "|", "~", ",". + * @param options The options for leaving the channel. See #LeaveChannelOptions. + * @return int + * - 0: Success. + * - < 0: Failure. + */ + virtual int leaveChannelWithUserAccountEx(const char* channelId, const char* userAccount, const LeaveChannelOptions& options) = 0; + /** * Updates the channel media options after joining the channel. * @@ -1879,6 +1894,24 @@ class IRtcEngineEx : public IRtcEngine { virtual int setDualStreamModeEx(SIMULCAST_STREAM_MODE mode, const SimulcastStreamConfig& streamConfig, const RtcConnection& connection) = 0; + + /** + * Set the multi-layer video stream configuration. + * + * If multi-layer is configed, the subscriber can choose to receive the coresponding layer + * of video stream using {@link setRemoteVideoStreamType setRemoteVideoStreamType}. + * + * @param simulcastConfig + * - The configuration for multi-layer video stream. It includes seven layers, ranging from + * STREAM_LAYER_1 to STREAM_LOW. A maximum of 3 layers can be enabled simultaneously. + * @param connection The RtcConnection object. + * @return + * - 0: Success. + * - < 0: Failure. + * @technical preview + */ + virtual int setSimulcastConfigEx(const SimulcastConfig& simulcastConfig, + const RtcConnection& connection) = 0; /** * Set the high priority user list and their fallback level in weak network condition. @@ -1931,6 +1964,33 @@ class IRtcEngineEx : public IRtcEngine { */ virtual int takeSnapshotEx(const RtcConnection& connection, uid_t uid, const char* filePath) = 0; + /** + * Takes a snapshot of a video stream. + * + * This method takes a snapshot of a video stream from the specified user, generates a JPG + * image, and saves it to the specified path. + * + * The method is asynchronous, and the SDK has not taken the snapshot when the method call + * returns. After a successful method call, the SDK triggers the `onSnapshotTaken` callback + * to report whether the snapshot is successfully taken, as well as the details for that + * snapshot. + * + * @note + * - Call this method after joining a channel. + * - This method takes a snapshot of the published video stream specified in `ChannelMediaOptions`. + * + * @param connection The RtcConnection object. + * @param uid The user ID. Set uid as 0 if you want to take a snapshot of the local user's video. + * @param config The configuration for the take snapshot. See SnapshotConfig. + * + * Ensure that the path you specify exists and is writable. + * @return + * - 0 : Success. + * - < 0: Failure. + * - -4: Incorrect observation position. Modify the input observation position according to the reqiurements specified in SnapshotConfig. + */ + virtual int takeSnapshotEx(const RtcConnection& connection, uid_t uid, const media::SnapshotConfig& config) = 0; + /** Enables video screenshot and upload with the connection ID. @param enabled Whether to enable video screenshot and upload: - `true`: Yes. diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/bridge.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/bridge.h new file mode 100644 index 000000000..675c4f5a1 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/bridge.h @@ -0,0 +1,25 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "handle.h" +#include "common.h" +#include "./stream/stream.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +AGORA_RTE_API_C Rte RteGetFromBridge(RteError *err); + +AGORA_RTE_API_C void RteChannelAndStreamGetFromBridge( + Rte *rte, const char *channel_name, int uid, RteChannel *channel, + RteStream *stream, RteError *error); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/c_error.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/c_error.h new file mode 100644 index 000000000..686c6603d --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/c_error.h @@ -0,0 +1,48 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include + +#include "common.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteString RteString; + +typedef enum RteErrorCode { + kRteOk, + kRteErrorDefault, + kRteErrorInvalidArgument, + kRteErrorInvalidOperation, + kRteErrorNetworkError, + kRteErrorAuthenticationFailed, + kRteErrorStreamNotFound, +} RteErrorCode; + +typedef struct RteError { + RteErrorCode code; + RteString *message; +} RteError; + +AGORA_RTE_API_C RteError *RteErrorCreate(); +AGORA_RTE_API_C bool RteErrorDestroy(RteError *err); + +AGORA_RTE_API_C bool RteErrorInit(RteError *err); +AGORA_RTE_API_C bool RteErrorDeinit(RteError *err); + +AGORA_RTE_API_C bool RteErrorCopy(RteError *dest, RteError *src); + +AGORA_RTE_API_C bool RteErrorSet(RteError *err, RteErrorCode code, + const char *fmt, ...); +AGORA_RTE_API_C bool RteErrorOccurred(RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/c_player.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/c_player.h new file mode 100644 index 000000000..b87b88af0 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/c_player.h @@ -0,0 +1,615 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include + +#include "handle.h" +#include "observer.h" +#include "utils/string.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct Rte Rte; +typedef struct RteStream RteStream; +typedef struct RtePlayerInternal RtePlayerInternal; + +/** + * Player states. + * When the player state changes, the state will be notified through the PlayerObserver::onStateChanged callback interface. + * @since v4.4.0 + */ +typedef enum RtePlayerState { + /** + * 0: Idle state. + */ + kRtePlayerStateIdle, + /** + * 1: Opening state. This state is notified after calling rte::Player::OpenWithUrl(). + */ + kRtePlayerStateOpening, + /** + * 2: Open completed state. This state is notified after successfully calling rte::Player::OpenWithUrl(). + */ + kRtePlayerStateOpenCompleted, + /** + * 3: Playing state. This state is notified when audience members successfully subscribe to the broadcaster after opening an RTE URL. + */ + kRtePlayerStatePlaying, + /** + * 4: Paused state. This state is notified when playback is paused. + */ + kRtePlayerStatePaused, + /** + * 5: Playback completed state. This state is notified when the broadcaster stops streaming and leaves the live streaming room after playing the rte URL. + */ + kRtePlayerStatePlaybackCompleted, + /** + * 6: Stopped state. This state is entered after the user calls Player::stop. + */ + kRtePlayerStateStopped, + /** + * 7: Failed state. This state is entered when an internal error occurs. + */ + kRtePlayerStateFailed +} RtePlayerState; + +/** + * Player events. + * When an event occurs, it will be notified through the PlayerObserver::onEvent callback interface. + * @since v4.4.0 + */ +typedef enum RtePlayerEvent { + /** + * 0: Start seeking to a specified position for playback. + */ + kRtePlayerEventSeekBegin, + /** + * 1: Seeking completes. + */ + kRtePlayerEventSeekComplete, + /** + * 2: An error occurs when seeking to a new playback position. + */ + kRtePlayerEventSeekError, + /** + * 3: The currently buffered data is not enough to support playback. + */ + kRtePlayerEventBufferLow, + /** + * 4: The currently buffered data is just enough to support playback. + */ + kRtePlayerEventBufferRecover, + /** + * 5: Audio or video playback starts freezing. + */ + kRtePlayerEventFreezeStart, + /** + * 6: The audio or video playback resumes without freezing. + */ + kRtePlayerEventFreezeStop, + /** + * 7: One loop playback completed. + */ + kRtePlayerEventOneLoopPlaybackCompleted, + /** + * 8: URL authentication will expire. + */ + kRtePlayerEventAuthenticationWillExpire, + /** + * 9: When the fallback option is enabled, ABR revert to the audio-only layer due to poor network. + */ + kRtePlayerEventAbrFallbackToAudioOnlyLayer, + /** + * 10: ABR recovers from audio-only layer to video layer when fallback option is enabled. + */ + kRtePlayerEventAbrRecoverFromAudioOnlyLayer +} RtePlayerEvent; + +/** + * ABR subscription layer. + * This enumeration can be used to set the value of the abr_subscription_layer query parameter in the rte URL. + * It can also be used in the PlayerConfig::SetAbrSubscriptionLayer setting interface. + * @since v4.4.0 + */ +typedef enum RteAbrSubscriptionLayer { + /** + * 0: High-quality video stream, this layer has the highest resolution and bitrate. + */ + kRteAbrSubscriptionHigh = 0, + /** + * 1: Low-quality video stream, this layer has the lowest resolution and bitrate. + */ + kRteAbrSubscriptionLow = 1, + /** + * 2: Layer1 video stream, this layer has lower resolution and bitrate than that of the high-quality video stream. + */ + kRteAbrSubscriptionLayer1 = 2, + /** + * 3: Layer2 video stream, this layer has lower resolution and bitrate than layer1. + */ + kRteAbrSubscriptionLayer2 = 3, + /** + * 4: Layer3 video stream, this layer has lower resolution and bitrate than layer2. + */ + kRteAbrSubscriptionLayer3 = 4, + /** + * 5: Layer4 video stream, this layer has lower resolution and bitrate than layer3. + */ + kRteAbrSubscriptionLayer4 = 5, + /** + * 6: Layer5 video stream, this layer has lower resolution and bitrate than layer4. + */ + kRteAbrSubscriptionLayer5 = 6, + /** + * 7: Layer6 video stream, this layer has lower resolution and bitrate than layer5. + */ + kRteAbrSubscriptionLayer6 = 7, +} RteAbrSubscriptionLayer; + + +/** + * ABR fallback layer. + * This enumeration can be used to set the value of the abr_fallback_layer query parameter in the rte URL. + * It can also be used in the PlayerConfig::SetAbrFallbackLayer setting interface. + * @since v4.4.0 + */ +typedef enum RteAbrFallbackLayer { + /** + * 0: When the network quality is poor, it will not revert to a lower resolution stream. + * It may still revert to scalable video coding but will maintain the high-quality video resolution. + */ + kRteAbrFallbackDisabled = 0, + /** + * 1: (Default) In a poor network environment, the receiver's SDK will receive the kRteAbrSubscriptionLow layer video stream. + */ + kRteAbrFallbackLow = 1, + /** + * 2: In a poor network environment, the SDK may first receive the kRteAbrSubscriptionLow layer, + * and if the relevant layer exists, it will revert to kRteAbrSubscriptionLayer1 to kRteAbrSubscriptionLayer6. + * If the network environment is too poor to play video, the SDK will only receive audio. + */ + kRteAbrFallbackAudioOnly = 2, + /** + * 3~8: If the receiving end sets the fallback option, the SDK will receive one of the layers from kRteAbrSubscriptionLayer1 to kRteAbrSubscriptionLayer6. + * The lower boundary of the fallback video stream is determined by the configured fallback option. + */ + kRteAbrFallbackLayer1 = 3, + kRteAbrFallbackLayer2 = 4, + kRteAbrFallbackLayer3 = 5, + kRteAbrFallbackLayer4 = 6, + kRteAbrFallbackLayer5 = 7, + kRteAbrFallbackLayer6 = 8, +} RteAbrFallbackLayer; + +/** + * Player information. + * When playerInfo changes, it will be notified through the PlayerObserver::onPlayerInfoUpdated callback interface. + * It can also be actively obtained through the Player::GetInfo interface. + * @since v4.4.0 + */ +typedef struct RtePlayerInfo { + /** + * Current player state + */ + RtePlayerState state; + /** + * Reserved parameter. + */ + size_t duration; + /** + * Reserved parameter. + */ + size_t stream_count; + /** + * Whether there is an audio stream. When opening an rte URL, it indicates whether the broadcaster has pushed audio. + */ + bool has_audio; + /** + * Whether there is a video stream. When opening an rte URL, it indicates whether the broadcaster has pushed video. + */ + bool has_video; + /** + * Whether the audio is muted. Indicates whether the audience has subscribed to the audio stream. + */ + bool is_audio_muted; + /** + * Whether the video is muted. Indicates whether the audience has subscribed to the video stream. + */ + bool is_video_muted; + /** + * Video resolution height + */ + int video_height; + /** + * Video resolution width + */ + int video_width; + /** + * The currently subscribed video layer + */ + RteAbrSubscriptionLayer abr_subscription_layer; + /** + * Audio sample rate + */ + int audio_sample_rate; + /** + * Number of audio channels + */ + int audio_channels; + /** + * Reserved parameter. + */ + int audio_bits_per_sample; +} RtePlayerInfo; + +/** + * Player statistics. + * Can be actively obtained through the Player::GetStats interface. + * @since v4.4.0 + */ +typedef struct RtePlayerStats { + /** + * Decoding frame rate + */ + int video_decode_frame_rate; + /** + * Rendering frame rate + */ + int video_render_frame_rate; + /** + * Video bitrate + */ + int video_bitrate; + + /** + * Audio bitrate + */ + int audio_bitrate; +} RtePlayerStats; + +typedef struct RteMediaTrackInfo { + void *placeholder; +} RteMediaTrackInfo; + +typedef enum RtePlayerMetadataType { + kRtePlayerMetadataTypeSei +} RtePlayerMetadataType; + +typedef enum RteAudioDualMonoMode { + RteAudioDualMonoStereo, + RteAudioDualMonoLeft, + RteAudioDualMonoRight, + RteAudioDualMonoMix, +} RteAudioDualMonoMode; + +typedef struct RtePlayerInitialConfig { + bool enable_cache; + bool _enable_cache_is_set; + + bool enable_multiple_audio_track; + bool _enable_multiple_audio_track_is_set; + + bool is_agora_source; + bool _is_agora_source_is_set; + + bool is_live_source; + bool _is_live_source_is_set; +} RtePlayerInitialConfig; + +typedef struct RtePlayerConfig { + bool auto_play; + bool _auto_play_is_set; + + int32_t speed; + bool _speed_is_set; + + int32_t playout_audio_track_idx; + bool _playout_audio_track_idx_is_set; + + int32_t publish_audio_track_idx; + bool _publish_audio_track_idx_is_set; + + int32_t subtitle_track_idx; + bool _subtitle_track_idx_is_set; + + int32_t external_subtitle_track_idx; + bool _external_subtitle_track_idx_is_set; + + int32_t audio_pitch; + bool _audio_pitch_is_set; + + int32_t playout_volume; + bool _playout_volume_is_set; + + int32_t audio_playback_delay; + bool _audio_playback_delay_is_set; + + RteAudioDualMonoMode audio_dual_mono_mode; + bool _audio_dual_mono_mode_is_set; + + int32_t publish_volume; + bool _publish_volume_is_set; + + int32_t loop_count; + bool _loop_count_is_set; + + RteString *json_parameter; + bool _json_parameter_is_set; + + // live player options + RteAbrSubscriptionLayer abr_subscription_layer; + bool _abr_subscription_layer_is_set; + + RteAbrFallbackLayer abr_fallback_layer; + bool _abr_fallback_layer_is_set; + +} RtePlayerConfig; + +typedef struct RtePlayerCustomSourceProvider RtePlayerCustomSourceProvider; +struct RtePlayerCustomSourceProvider { + void (*on_read_data)(RtePlayerCustomSourceProvider *self); + void (*on_seek)(RtePlayerCustomSourceProvider *self); +}; + +typedef struct RtePlayerObserver RtePlayerObserver; +struct RtePlayerObserver { + RteBaseObserver base_observer; + + void (*on_state_changed)(RtePlayerObserver *observer, + RtePlayerState old_state, RtePlayerState new_state, + RteError *err); + void (*on_position_changed)(RtePlayerObserver *observer, uint64_t curr_time, + uint64_t utc_time); + + void (*on_resolution_changed)(RtePlayerObserver *observer, int width, int height); + + void (*on_event)(RtePlayerObserver *observer, RtePlayerEvent event); + void (*on_metadata)(RtePlayerObserver *observer, RtePlayerMetadataType type, + const uint8_t *data, size_t length); + + void (*on_player_info_updated)(RtePlayerObserver *observer, + const RtePlayerInfo *info); + + void (*on_audio_volume_indication)(RtePlayerObserver *observer, + int32_t volume); +}; + +AGORA_RTE_API_C void RtePlayerInfoInit(RtePlayerInfo *info, RteError *err); +AGORA_RTE_API_C void RtePlayerInfoDeinit(RtePlayerInfo *info, RteError *err); + +AGORA_RTE_API_C void RtePlayerStatsInit(RtePlayerStats *stats, RteError *err); +AGORA_RTE_API_C void RtePlayerStatsDeinit(RtePlayerStats *stats, RteError *err); + +AGORA_RTE_API_C void RteMediaTrackInfoInit(RteMediaTrackInfo *info, + RteError *err); +AGORA_RTE_API_C void RteMediaTrackInfoDeinit(RteMediaTrackInfo *info, + RteError *err); + +AGORA_RTE_API_C void RtePlayerInitialConfigInit(RtePlayerInitialConfig *config, + RteError *err); + +AGORA_RTE_API_C void RtePlayerInitialConfigDeinit(RtePlayerInitialConfig *config, + RteError *err); + +AGORA_RTE_API_C void RtePlayerInitialConfigSetEnableCache( + RtePlayerInitialConfig *config, bool enable_cache, RteError *err); + +AGORA_RTE_API_C void RtePlayerInitialConfigGetEnableCache( + RtePlayerInitialConfig *config, bool *enable_cache, RteError *err); + +AGORA_RTE_API_C void RtePlayerInitialConfigSetEnableMultipleAudioTrack( + RtePlayerInitialConfig *config, bool enable_multiple_audio_track, + RteError *err); + +AGORA_RTE_API_C void RtePlayerInitialConfigGetEnableMultipleAudioTrack( + RtePlayerInitialConfig *config, bool *enable_multiple_audio_track, + RteError *err); + +AGORA_RTE_API_C void RtePlayerInitialConfigSetIsAgoraSource( + RtePlayerInitialConfig *config, bool is_agora_source, RteError *err); + +AGORA_RTE_API_C void RtePlayerInitialConfigGetIsAgoraSource( + RtePlayerInitialConfig *config, bool *is_agora_source, RteError *err); + +AGORA_RTE_API_C void RtePlayerInitialConfigSetIsLiveSource( + RtePlayerInitialConfig *config, bool is_agora_source, RteError *err); + +AGORA_RTE_API_C void RtePlayerInitialConfigGetIsLiveSource( + RtePlayerInitialConfig *config, bool *is_agora_source, RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigInit(RtePlayerConfig *config, RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigDeinit(RtePlayerConfig *config, + RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigCopy(RtePlayerConfig *dst, + const RtePlayerConfig *src, + RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigSetAutoPlay(RtePlayerConfig *config, + bool auto_play, RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigGetAutoPlay(RtePlayerConfig *config, + bool *auto_play, RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigSetPlaybackSpeed(RtePlayerConfig *config, + int32_t speed, + RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigGetPlaybackSpeed(RtePlayerConfig *config, + int32_t *speed, + RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigSetPlayoutAudioTrackIdx( + RtePlayerConfig *config, int32_t idx, RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigGetPlayoutAudioTrackIdx( + RtePlayerConfig *config, int32_t *idx, RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigSetPublishAudioTrackIdx( + RtePlayerConfig *config, int32_t idx, RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigGetPublishAudioTrackIdx( + RtePlayerConfig *config, int32_t *idx, RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigSetAudioTrackIdx(RtePlayerConfig *config, + int32_t idx, RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigGetAudioTrackIdx(RtePlayerConfig *config, + int32_t *idx, + RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigSetSubtitleTrackIdx(RtePlayerConfig *config, + int32_t idx, + RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigGetSubtitleTrackIdx(RtePlayerConfig *config, + int32_t *idx, + RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigSetExternalSubtitleTrackIdx( + RtePlayerConfig *config, int32_t idx, RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigGetExternalSubtitleTrackIdx( + RtePlayerConfig *config, int32_t *idx, RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigSetAudioPitch(RtePlayerConfig *config, + int32_t audio_pitch, + RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigGetAudioPitch(RtePlayerConfig *config, + int32_t *audio_pitch, + RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigSetPlayoutVolume(RtePlayerConfig *config, + int32_t volume, + RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigGetPlayoutVolume(RtePlayerConfig *config, + int32_t *volume, + RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigSetAudioPlaybackDelay( + RtePlayerConfig *config, int32_t delay, RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigGetAudioPlaybackDelay( + RtePlayerConfig *config, int32_t *delay, RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigSetAudioDualMonoMode( + RtePlayerConfig *config, RteAudioDualMonoMode mode, RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigGetAudioDualMonoMode( + RtePlayerConfig *config, RteAudioDualMonoMode *mode, RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigSetPublishVolume(RtePlayerConfig *config, + int32_t volume, + RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigGetPublishVolume(RtePlayerConfig *config, + int32_t *volume, + RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigSetLoopCount(RtePlayerConfig *config, + int32_t loop_count, + RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigGetLoopCount(RtePlayerConfig *config, + int32_t *loop_count, + RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigSetJsonParameter(RtePlayerConfig *config, + RteString *json_parameter, + RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigGetJsonParameter(RtePlayerConfig *config, + RteString *json_parameter, + RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigSetAbrSubscriptionLayer(RtePlayerConfig *config, + RteAbrSubscriptionLayer abr_subscription_layer, + RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigGetAbrSubscriptionLayer(RtePlayerConfig *config, + RteAbrSubscriptionLayer *abr_subscription_layer, + RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigSetAbrFallbackLayer(RtePlayerConfig *config, + RteAbrFallbackLayer abr_fallback_layer, + RteError *err); + +AGORA_RTE_API_C void RtePlayerConfigGetAbrFallbackLayer(RtePlayerConfig *config, + RteAbrFallbackLayer *abr_fallback_layer, + RteError *err); + +AGORA_RTE_API_C RtePlayer RtePlayerCreate(Rte *self, RtePlayerInitialConfig *config, + RteError *err); +AGORA_RTE_API_C void RtePlayerDestroy(RtePlayer *self, RteError *err); + +AGORA_RTE_API_C bool RtePlayerPreloadWithUrl(RtePlayer *self, const char *url, + RteError *err); + +AGORA_RTE_API_C void RtePlayerOpenWithUrl( + RtePlayer *self, const char *url, uint64_t start_time, + void (*cb)(RtePlayer *self, void *cb_data, RteError *err), void *cb_data); + +AGORA_RTE_API_C void RtePlayerOpenWithCustomSourceProvider( + RtePlayer *self, RtePlayerCustomSourceProvider *provider, + uint64_t start_time, + void (*cb)(RtePlayer *self, void *cb_data, RteError *err), void *cb_data); + +AGORA_RTE_API_C void RtePlayerOpenWithStream(RtePlayer *self, RteStream *stream, + void (*cb)(RtePlayer *self, void *cb_data, + RteError *err), + void *cb_data); + + +AGORA_RTE_API_C void RtePlayerGetStats(RtePlayer *self, void (*cb)(RtePlayer *player, RtePlayerStats *stats, void *cb_data, RteError *err), void *cb_data); + +AGORA_RTE_API_C bool RtePlayerSetCanvas(RtePlayer *self, RteCanvas *canvas, RteError *err); + +AGORA_RTE_API_C bool RtePlayerPlay(RtePlayer *self, RteError *err); +AGORA_RTE_API_C bool RtePlayerStop(RtePlayer *self, RteError *err); +AGORA_RTE_API_C bool RtePlayerPause(RtePlayer *self, RteError *err); +AGORA_RTE_API_C bool RtePlayerSeek(RtePlayer *self, uint64_t new_time, + RteError *err); + +AGORA_RTE_API_C bool RtePlayerMuteAudio(RtePlayer *self, bool mute, RteError *err); +AGORA_RTE_API_C bool RtePlayerMuteVideo(RtePlayer *self, bool mute, RteError *err); + +AGORA_RTE_API_C uint64_t RtePlayerGetPosition(RtePlayer *self, RteError *err); + +AGORA_RTE_API_C bool RtePlayerGetInfo(RtePlayer *self, RtePlayerInfo *info, RteError *err); + +AGORA_RTE_API_C bool RtePlayerGetConfigs(RtePlayer *self, + RtePlayerConfig *config, RteError *err); +AGORA_RTE_API_C bool RtePlayerSetConfigs(RtePlayer *self, RtePlayerConfig *config, RteError *err); + + +AGORA_RTE_API_C bool RtePlayerRegisterObserver( + RtePlayer *self, RtePlayerObserver *observer, RteError *err); +AGORA_RTE_API_C bool RtePlayerUnregisterObserver(RtePlayer *self, + RtePlayerObserver *observer, + RteError *err); + +AGORA_RTE_API_C RtePlayerObserver *RtePlayerObserverCreate(RteError *err); +AGORA_RTE_API_C void RtePlayerObserverDestroy(RtePlayerObserver *observer, + RteError *err); +AGORA_RTE_API_C RtePlayer +RtePlayerObserverGetEventSrc(RtePlayerObserver *observer, RteError *err); + +AGORA_RTE_API_C RtePlayerCustomSourceProvider +RtePlayerCustomSourceProviderCreate(Rte *self, RteError *err); + +AGORA_RTE_API_C void RtePlayerCustomSourceProviderDestroy( + RtePlayerCustomSourceProvider *self, RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/c_rte.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/c_rte.h new file mode 100644 index 000000000..fd39f376f --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/c_rte.h @@ -0,0 +1,142 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include + +#include "common.h" +#include "handle.h" +#include "observer.h" +#include "utils/string.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteRemoteStream RteRemoteStream; +typedef struct RteLocalStream RteLocalStream; + +typedef struct RteInitialConfig { + void *placeholder; +} RteInitialConfig; + +typedef struct RteConfig { + RteString *app_id; + bool has_app_id; + + RteString *log_folder; + bool has_log_folder; + + size_t log_file_size; + bool has_log_file_size; + + int32_t area_code; + bool has_area_code; + + RteString *cloud_proxy; + bool has_cloud_proxy; + + RteString *json_parameter; + bool has_json_parameter; +} RteConfig; + +typedef struct RteObserver { + RteBaseObserver base_observer; +} RteObserver; + +AGORA_RTE_API_C void RteInitialConfigInit(RteInitialConfig *config, RteError *err); +AGORA_RTE_API_C void RteInitialConfigDeinit(RteInitialConfig *config, RteError *err); + +AGORA_RTE_API_C void RteConfigInit(RteConfig *config, RteError *err); +AGORA_RTE_API_C void RteConfigDeinit(RteConfig *config, RteError *err); +AGORA_RTE_API_C void RteConfigCopy(RteConfig *dst, const RteConfig *src, + RteError *err); + +AGORA_RTE_API_C void RteConfigSetAppId(RteConfig *config, RteString *app_id, + RteError *err); +AGORA_RTE_API_C void RteConfigGetAppId(RteConfig *config, RteString *app_id, + RteError *err); + +AGORA_RTE_API_C void RteConfigSetLogFolder(RteConfig *config, + RteString *log_folder, RteError *err); +AGORA_RTE_API_C void RteConfigGetLogFolder(RteConfig *config, + RteString *log_folder, RteError *err); + +AGORA_RTE_API_C void RteConfigSetLogFileSize(RteConfig *config, + size_t log_file_size, + RteError *err); +AGORA_RTE_API_C void RteConfigGetLogFileSize(RteConfig *config, + size_t *log_file_size, + RteError *err); + +AGORA_RTE_API_C void RteConfigSetAreaCode(RteConfig *config, int32_t area_code, + RteError *err); +AGORA_RTE_API_C void RteConfigGetAreaCode(RteConfig *config, int32_t *area_code, + RteError *err); + +AGORA_RTE_API_C void RteConfigSetCloudProxy(RteConfig *config, + RteString *cloud_proxy, + RteError *err); +AGORA_RTE_API_C void RteConfigGetCloudProxy(RteConfig *config, + RteString *cloud_proxy, + RteError *err); + +AGORA_RTE_API_C void RteConfigSetJsonParameter(RteConfig *config, + RteString *json_parameter, + RteError *err); +AGORA_RTE_API_C void RteConfigGetJsonParameter(RteConfig *config, + RteString *json_parameter, + RteError *err); + +AGORA_RTE_API_C Rte RteCreate(RteInitialConfig *config, RteError *err); +AGORA_RTE_API_C bool RteDestroy(Rte *self, RteError *err); + +AGORA_RTE_API_C bool RteInitMediaEngine(Rte *self, + void (*cb)(Rte *self, void *cb_data, + RteError *err), + void *cb_data, RteError *err); + +AGORA_RTE_API_C bool RteGetConfigs(Rte *self, RteConfig *config, RteError *err); +AGORA_RTE_API_C bool RteSetConfigs(Rte *self, RteConfig *config, RteError *err); + +AGORA_RTE_API_C void RteRelayStream(RteChannel *src_channel, + RteRemoteStream *src_stream, + RteChannel *dest_channel, + RteLocalStream *dest_stream, + void (*cb)(void *cb_data, RteError *err), + void *cb_data); + +AGORA_RTE_API_C void RteRegisterExtension( + Rte *self, RteString *provider_name, RteString *extension_name, + void (*cb)(Rte *self, RteString *provider_name, RteString *extension_name, + void *cb_data, RteError *err), + void *cb_data); + +AGORA_RTE_API_C void RteReportMessage(Rte *self, RteError *err, const char *fmt, + ...); + +AGORA_RTE_API_C bool RteRegisterObserver(Rte *self, RteObserver *observer, RteError *err); +AGORA_RTE_API_C bool RteUnregisterObserver(Rte *self, RteObserver *observer, + RteError *err); + +AGORA_RTE_API_C void RteStartLastMileProbeTest(Rte *self, + void (*cb)(RteError *err)); + +AGORA_RTE_API_C RteObserver *RteObserverCreate(RteError *err); + +AGORA_RTE_API_C void RteObserverDestroy(RteObserver *observer, RteError *err); + +AGORA_RTE_API_C Rte RteObserverGetEventSrc(RteObserver *observer, RteError *err); + +// @{ +// Internal use only. +AGORA_RTE_API_C bool RteGlobalListIsEmpty(); +// @} + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/channel.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/channel.h new file mode 100644 index 000000000..4e553d6a2 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/channel.h @@ -0,0 +1,383 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "handle.h" +#include "common.h" +#include "metadata.h" +#include "observer.h" +#include "utils/string.h" +#include "track/track.h" +#include "utils/buf.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteTrack RteTrack; +typedef struct RteUser RteUser; +typedef struct RteStream RteStream; +typedef struct Rte Rte; +typedef struct RteLocalStream RteLocalStream; +typedef struct RteLocalStreamInfo RteLocalStreamInfo; +typedef struct RteRemoteStream RteRemoteStream; +typedef struct RteRemoteStreamInfo RteRemoteStreamInfo; + +typedef enum RteChannelType { + kRteChannelTypeDefault, +} RteChannelType; + +typedef enum RteLocalUserRtcRole { + kRteLocalUserRtcRoleBroadcaster, + kRteLocalUserRtcRoleAudience, +} RteLocalUserRtcRole; + +typedef enum RteChannelConnectionState { + kRteChannelConnectionStateDisconnected, + kRteChannelConnectionStateDisconnecting, + kRteChannelConnectionStateConnecting, + kRteChannelConnectionStateConnected, + kRteChannelConnectionStateReconnecting, + kRteChannelConnectionStateFailed +} RteChannelConnectionState; + +typedef enum RteChannelConnectionStateChangedReason { + kRteChannelConnectionStateChangedReasonConnecting, + kRteChannelConnectionStateChangedReasonSuccess, + kRteChannelConnectionStateChangedReasonInterrupted, + kRteChannelConnectionStateChangedReasonBannedByServer, + kRteChannelConnectionStateChangedReasonJoinFailed, + kRteChannelConnectionStateChangedReasonLeaveChannel, + kRteChannelConnectionStateChangedReasonInvalidAppId, + kRteChannelConnectionStateChangedReasonInvalidChannelName, + kRteChannelConnectionStateChangedReasonInvalidToken, + kRteChannelConnectionStateChangedReasonTokenExpired, + kRteChannelConnectionStateChangedReasonRejectedByServer, + kRteChannelConnectionStateChangedReasonSettingProxyServer, + kRteChannelConnectionStateChangedReasonRenewToken, + kRteChannelConnectionStateChangedReasonClientIpChanged, + kRteChannelConnectionStateChangedReasonKeepAliveTimeout, + kRteChannelConnectionStateChangedReasonRejoinSuccess, + kRteChannelConnectionStateChangedReasonLost, + kRteChannelConnectionStateChangedReasonEchoTest, + kRteChannelConnectionStateChangedReasonClientIpChangedByUser, + kRteChannelConnectionStateChangedReasonSameUidLogin, + kRteChannelConnectionStateChangedReasonTooManyBroadcasters, + kRteChannelConnectionStateChangedReasonLicenseValidationFailure, + kRteChannelConnectionStateChangedReasonCertificationVerifyFailure, + kRteChannelConnectionStateChangedReasonStreamChannelNotAvailable, + kRteChannelConnectionStateChangedReasonInconsistentAppId +} RteChannelConnectionStateChangedReason; + +typedef enum RteTrackSubState { + kRteTrackSubStateSubscribing, + kRteTrackSubStateSubscribed, + kRteTrackSubStateNotSubscribed +} RteTrackSubState; + +typedef enum RteTrackSubStateChangedReason { + kRteTrackSubStateChangedReasonRemotePublished, + kRteTrackSubStateChangedReasonRemoteUnpublished, + kRteTrackSubStateChangedReasonLocalSubscribed, + kRteTrackSubStateChangedReasonLocalUnsubscribed +} RteTrackSubStateChangedReason; + +typedef enum RteTrackPubState { + kRteTrackPubStatePublishing, + kRteTrackPubStatePublished, + kRteTrackPubStateNotPublished +} RteTrackPubState; + +typedef enum RteTrackPubStateChangedReason { + kRteTrackPubStateChangedReasonLocalPublished, + kRteTrackPubStateChangedReasonLocalUnpublished +} RteTrackPubStateChangedReason; + +typedef struct RteStateItem { + RteString *key; + RteString *value; +} RteStateItem; + +typedef struct RteState { + RteString *name; + RteStateItem *items; + size_t items_cnt; +} RteState; + +typedef struct RteLock { + RteString *lock_name; + RteString *owner; + uint32_t ttl; +} RteLock; + +typedef enum RteLockChangedEvent { + kRteLockChangedEventSet, + kRteLockChangedEventRemoved, + kRteLockChangedEventAcquired, + kRteLockChangedEventReleased, + kRteLockChangedEventExpired +} RteLockChangedEvent; + +typedef struct RteChannelConfig { + RteString *channel_id; + bool has_channel_id; + + RteChannelType type; + bool has_type; + + bool is_user_id_integer_only; + bool has_is_user_id_integer_only; + + bool is_user_id_same_as_stream_id; + bool has_is_useR_id_same_as_stream_id; + + RteLocalUserRtcRole local_user_rtc_role; + bool has_local_user_rtc_role; + + bool auto_subscribe_audio; + bool has_auto_subscribe_audio; + + bool auto_subscribe_video; + bool has_auto_subscribe_video; + + RteString *json_parameter; + bool has_json_parameter; +} RteChannelConfig; + +typedef struct RteChannelObserver RteChannelObserver; +struct RteChannelObserver { + RteBaseObserver base_observer; + + void (*on_remote_stream_added)(RteChannelObserver *self, + RteRemoteStream *stream, RteRemoteUser *user); + void (*on_local_stream_info)(RteChannelObserver *self, RteLocalStream *stream, + RteLocalStreamInfo *info); + void (*on_remote_stream_info)(RteChannelObserver *self, + RteRemoteStream *stream, + RteRemoteStreamInfo *info); + void (*on_channel_message_received)(RteChannelObserver *self, + RteString publisher, RteBuf *message); +}; + +typedef struct RteSubscribeOptions { + RteTrackMediaType track_media_type; + RteString *data_track_topic; +} RteSubscribeOptions; + +AGORA_RTE_API_C void RteStateItemInit(RteStateItem *self, RteError *err); +AGORA_RTE_API_C void RteStateItemDeinit(RteStateItem *self, RteError *err); +// @} + +// @{ +// Config +AGORA_RTE_API_C void RteChannelConfigInit(RteChannelConfig *config, + RteError *err); +AGORA_RTE_API_C void RteChannelConfigDeinit(RteChannelConfig *config, + RteError *err); + +AGORA_RTE_API_C void RteChannelConfigSetChannelId(RteChannelConfig *self, + const char *channel_id, + RteError *err); +AGORA_RTE_API_C void RteChannelConfigGetChannelId(RteChannelConfig *self, + RteString *channel_id, + RteError *err); + +AGORA_RTE_API_C void RteChannelConfigSetChannelType(RteChannelConfig *self, + RteChannelType type, + RteError *err); +AGORA_RTE_API_C void RteChannelConfigGetChannelType(RteChannelConfig *self, + RteChannelType *type, + RteError *err); + +AGORA_RTE_API_C void RteChannelConfigSetIsUserIdIntegerOnly( + RteChannelConfig *self, bool is_user_id_integer_only, RteError *err); + +AGORA_RTE_API_C void RteChannelConfigGetIsUserIdIntegerOnly( + RteChannelConfig *self, bool *is_user_id_integer_only, RteError *err); + +AGORA_RTE_API_C void RteChannelConfigSetIsUserIdSameAsStreamId( + RteChannelConfig *self, bool is_user_id_same_as_stream_id, RteError *err); +AGORA_RTE_API_C void RteChannelConfigGetIsUserIdSameAsStreamId( + RteChannelConfig *self, bool *is_user_id_same_as_stream_id, RteError *err); + +AGORA_RTE_API_C void RteChannelConfigSetLocalUserRtcRole( + RteChannelConfig *self, RteLocalUserRtcRole local_user_rtc_role, + RteError *err); +AGORA_RTE_API_C void RteChannelConfigGetLocalUserRtcRole( + RteChannelConfig *self, RteLocalUserRtcRole *local_user_rtc_role, + RteError *err); + +AGORA_RTE_API_C void RteChannelConfigSetAutoSubscribeAudio( + RteChannelConfig *self, bool auto_subscribe_audio, RteError *err); +AGORA_RTE_API_C void RteChannelConfigGetAutoSubscribeAudio( + RteChannelConfig *self, bool *auto_subscribe_audio, RteError *err); + +AGORA_RTE_API_C void RteChannelConfigSetAutoSubscribeVideo( + RteChannelConfig *self, bool auto_subscribe_video, RteError *err); +AGORA_RTE_API_C void RteChannelConfigGetAutoSubscribeVideo( + RteChannelConfig *self, bool *auto_subscribe_video, RteError *err); + +AGORA_RTE_API_C void RteChannelConfigSetJsonParameter(RteChannelConfig *self, + RteString *json_parameter, + RteError *err); +AGORA_RTE_API_C void RteChannelConfigGetJsonParameter(RteChannelConfig *self, + RteString *json_parameter, + RteError *err); +// @} + +// @{ +// Observer +AGORA_RTE_API_C RteChannelObserver *RteChannelObserverCreate(RteError *err); +AGORA_RTE_API_C void RteChannelObserverDestroy(RteChannelObserver *self, + RteError *err); +AGORA_RTE_API_C RteChannel +RteChannelObserverGetEventSrc(RteChannelObserver *self, RteError *err); +// @} + +AGORA_RTE_API_C RteChannel RteChannelCreate(Rte *self, RteChannelConfig *config, + RteError *err); +AGORA_RTE_API_C void RteChannelDestroy(RteChannel *channel, RteError *err); + +AGORA_RTE_API_C void RteChannelGetConfigs(RteChannel *self, + RteChannelConfig *config, + RteError *err); +AGORA_RTE_API_C void RteChannelSetConfigs( + RteChannel *self, RteChannelConfig *config, + void (*cb)(RteChannel *self, void *cb_data, RteError *err), void *cb_data); + +AGORA_RTE_API_C void RteChannelPublishStream( + RteChannel *self, RteLocalUser *user, RteLocalStream *stream, + void (*cb)(RteChannel *self, RteLocalStream *stream, void *cb_data, + RteError *err), + void *cb_data); +AGORA_RTE_API_C void RteChannelUnpublishStream( + RteChannel *self, RteLocalStream *stream, + void (*cb)(RteChannel *self, RteLocalStream *stream, void *cb_data, + RteError *err), + void *cb_data); + +AGORA_RTE_API_C void RteChannelSubscribeTrack( + RteChannel *self, RteRemoteStream *stream, RteSubscribeOptions *options, + void (*cb)(RteChannel *self, RteTrack *track, void *cb_data, RteError *err), + void *cb_data); + +AGORA_RTE_API_C bool RteChannelRegisterObserver( + RteChannel *self, RteChannelObserver *observer, RteError *err); + +AGORA_RTE_API_C RteUser RteChannelGetLocalUser(RteChannel *self, RteError *err); +AGORA_RTE_API_C size_t RteChannelGetRemoteUsersCount(RteChannel *self, + RteError *err); +AGORA_RTE_API_C void RteChannelGetRemoteUsers(RteChannel *self, + RteRemoteUser *remote_users, + size_t start_idx, + size_t remote_users_cnt, + RteError *err); + +AGORA_RTE_API_C void RteChannelJoin(RteChannel *self, RteLocalUser *user, + RteString *channel_token, + void (*cb)(RteChannel *self, + RteLocalUser *user, void *cb_data, + RteError *err), + void *cb_data); +AGORA_RTE_API_C void RteChannelLeave(RteChannel *self, RteLocalUser *user, + void (*cb)(RteChannel *self, + RteLocalUser *user, + void *cb_data, RteError *err), + void *cb_data); + +AGORA_RTE_API_C void RteChannelRenewToken( + RteChannel *self, RteString *channel_token, + void (*cb)(RteChannel *self, void *cb_data, RteError *err), void *cb_data); + +AGORA_RTE_API_C void RteChannelPublishMessage( + RteChannel *self, RteBuf *message, + void (*cb)(RteChannel *self, void *cb_data, RteError *err), void *cb_data); +AGORA_RTE_API_C void RteChannelSubscribeMessage( + RteChannel *self, + void (*cb)(RteChannel *self, void *cb_data, RteError *err), void *cb_data); +AGORA_RTE_API_C void RteChannelUnsubscribeMessage( + RteChannel *self, + void (*cb)(RteChannel *self, void *cb_data, RteError *err), void *cb_data); + +AGORA_RTE_API_C void RteChannelGetMetadata(RteChannel *self, + void (*cb)(RteChannel *self, + RteMetadata *items, + void *cb_data, + RteError *err), + void *cb_data); +AGORA_RTE_API_C void RteChannelSubscribeMetadata( + RteChannel *self, + void (*cb)(RteChannel *self, RteMetadata *items, size_t items_cnt, + void *cb_data, RteError *err), + void *cb_data); +AGORA_RTE_API_C void RteChannelUnsubscribeMetadata( + RteChannel *self, + void (*cb)(RteChannel *self, void *cb_data, RteError *err), void *cb_data); +AGORA_RTE_API_C void RteChannelRemoveMetadata( + RteChannel *self, RteMetadata *items, + void (*cb)(RteChannel *self, void *cb_data, RteError *err), void *cb_data); + +AGORA_RTE_API_C void RteChannelGetUserState( + RteChannel *self, RteString *user_name, + void (*cb)(RteChannel *self, RteState *state, void *cb_data, RteError *err), + void *cb_data); +AGORA_RTE_API_C void RteChannelSetUserState( + RteChannel *self, RteState *state, + void (*cb)(RteChannel *self, void *cb_data, RteError *err), void *cb_data); +AGORA_RTE_API_C void RteChannelRemoveUserState( + RteChannel *self, RteString *key, + void (*cb)(RteChannel *self, void *cb_data, RteError *err), void *cb_data); +AGORA_RTE_API_C void RteChannelSubscribeUserState( + RteChannel *self, + void (*cb)(RteChannel *self, void *cb_data, RteError *err), void *cb_data); +AGORA_RTE_API_C void RteChannelUnsubscribeUserState( + RteChannel *self, + void (*cb)(RteChannel *self, void *cb_data, RteError *err), void *cb_data); + +AGORA_RTE_API_C void RteChannelSetLock( + RteChannel *self, RteString *lock_name, uint32_t ttl, + void (*cb)(RteChannel *self, void *cb_data, RteError *err), void *cb_data); +AGORA_RTE_API_C void RteChannelRemoveLock( + RteChannel *self, RteString *lock_name, + void (*cb)(RteChannel *self, void *cb_data, RteError *err), void *cb_data); +AGORA_RTE_API_C void RteChannelGetLocks(RteChannel *self, + void (*cb)(RteChannel *self, + RteLock *locks, + size_t locks_cnt, + void *cb_data, RteError *err), + void *cb_data); +AGORA_RTE_API_C void RteChannelSubscribeLocks( + RteChannel *self, + void (*cb)(RteChannel *self, void *cb_data, RteError *err), void *cb_data); +AGORA_RTE_API_C void RteChannelUnsubscribeLocks( + RteChannel *self, + void (*cb)(RteChannel *self, void *cb_data, RteError *err), void *cb_data); +AGORA_RTE_API_C void RteChannelAcquireLock( + RteChannel *self, RteString *lock_name, bool retry, + void (*cb)(RteChannel *self, RteString owner, void *cb_data, RteError *err), + void *cb_data); +AGORA_RTE_API_C void RteChannelReleaseLock( + RteChannel *self, RteString *lock_name, + void (*cb)(RteChannel *self, void *cb_data, RteError *err), void *cb_data); + +AGORA_RTE_API_C size_t RteChannelGetLocalStreamsCount(RteChannel *self, + RteError *err); +AGORA_RTE_API_C void RteChannelGetLocalStreams(RteChannel *self, + RteLocalStream *streams, + size_t start_idx, + size_t streams_cnt, + RteError *err); + +AGORA_RTE_API_C size_t RteChannelGetRemoteStreamsCount(RteChannel *self, + RteError *err); +AGORA_RTE_API_C void RteChannelGetRemoteStreams( + RteChannel *self, RteRemoteUser *user, RteRemoteStream *streams, + size_t start_idx, size_t streams_cnt, RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/common.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/common.h new file mode 100644 index 000000000..991c8d635 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/common.h @@ -0,0 +1,59 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#if defined(_WIN32) + +#ifndef WIN32_LEAN_AND_MEAN +#define WIN32_LEAN_AND_MEAN +#endif // !WIN32_LEAN_AND_MEAN +#if defined(__aarch64__) +#include +#endif +#include + +#ifdef AGORARTC_EXPORT +#define AGORA_RTE_API_C __declspec(dllexport) +#else +#define AGORA_RTE_API_C __declspec(dllimport) +#endif // AGORARTC_EXPORT + +#define AGORA_CALL_C __cdecl + +#elif defined(__APPLE__) + +#include + +#define AGORA_RTE_API_C __attribute__((visibility("default"))) + +#elif defined(__ANDROID__) || defined(__linux__) + +#define AGORA_RTE_API_C __attribute__((visibility("default"))) + +#else // !_WIN32 && !__APPLE__ && !(__ANDROID__ || __linux__) + +#define AGORA_RTE_API_C + +#endif // _WIN32 + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef enum RteAreaCode { + kRteAreaCodeCn = 0x00000001, + kRteAreaCodeNa = 0x00000002, + kRteAreaCodeEu = 0x00000004, + kRteAreaCodeAs = 0x00000008, + kRteAreaCodeJp = 0x00000010, + kRteAreaCodeIn = 0x00000020, + kRteAreaCodeGlob = 0xFFFFFFFF +} RteAreaCode; + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/device/audio.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/device/audio.h new file mode 100644 index 000000000..8349eba52 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/device/audio.h @@ -0,0 +1,40 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/device/device.h" +#include "rte_base/c/handle.h" +#include "rte_base/c/common.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteAudioDevice RteAudioDevice; + +typedef enum RteAudioDeviceType { + kRteAudioDeviceTypePlayout, + kRteAudioDeviceTypeRecording, +} RteAudioDeviceType; + +typedef struct RteAudioDeviceInfo { + RteDeviceInfo info; + + RteAudioDeviceType type; +} RteAudioDeviceInfo; + +AGORA_RTE_API_C void RteAudioDeviceInfoInit(RteAudioDeviceInfo *info, + RteError *err); +AGORA_RTE_API_C void RteAudioDeviceInfoDeinit(RteAudioDeviceInfo *info, + RteError *err); + +AGORA_RTE_API_C void RteAudioDeviceGetInfo(RteAudioDevice *self, + RteAudioDeviceInfo *info, + RteError *err); +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/device/audio_device_manager.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/device/audio_device_manager.h new file mode 100644 index 000000000..7f5cdf03a --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/device/audio_device_manager.h @@ -0,0 +1,97 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include + +#include "rte_base/c/common.h" +#include "rte_base/c/c_error.h" +#include "rte_base/c/device/audio.h" +#include "rte_base/c/handle.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteAudioDeviceManagerConfig { + char placeholder; +} RteAudioDeviceManagerConfig; + +typedef struct RteAudioDeviceManager { + char placeholder; +} RteAudioDeviceManager; + +AGORA_RTE_API_C void RteAudioDeviceManagerConfigInit( + RteAudioDeviceManagerConfig *config, RteError *err); + +AGORA_RTE_API_C void RteAudioDeviceManagerConfigDeInit( + RteAudioDeviceManagerConfig *config, RteError *err); + +AGORA_RTE_API_C void RteAudioDeviceManagerConfigSetJsonParameter( + RteAudioDeviceManagerConfig *self, RteString *json_parameter, + RteError *err); + +AGORA_RTE_API_C void RteAudioDeviceManagerConfigGetJsonParameter( + RteAudioDeviceManagerConfig *config, RteString *json_parameter, + RteError *err); + +typedef void (*RteAudioDeviceManagerSetConfigsCallback)( + RteAudioDeviceManager *device_manager, RteAudioDeviceManagerConfig *config, + void *cb_data, RteError *err); + +AGORA_RTE_API_C RteAudioDeviceManager RteAudioDeviceManagerCreate( + Rte *rte, RteAudioDeviceManagerConfig *config, RteError *err); +AGORA_RTE_API_C void RteAudioDeviceManagerDestroy(RteAudioDeviceManager *self, + RteError *err); + +AGORA_RTE_API_C void RteAudioDeviceManagerGetConfigs( + RteAudioDeviceManager *self, RteAudioDeviceManagerConfig *config, + RteError *err); + +AGORA_RTE_API_C void RteAudioDeviceManagerSetConfigs( + RteAudioDeviceManager *self, RteAudioDeviceManagerConfig *config, + RteAudioDeviceManagerSetConfigsCallback cb, void *cb_data); + +typedef void (*RteAudioDeviceManagerEnumerateDevicesCallback)( + RteAudioDeviceManager *mgr, RteAudioDevice *devices, size_t devices_cnt, + void *cb_data, RteError *err); +AGORA_RTE_API_C void RteAudioDeviceManagerEnumerateDevices( + RteAudioDeviceManager *self, RteAudioDeviceType type, + RteAudioDeviceManagerEnumerateDevicesCallback cb, void *cb_data); + +AGORA_RTE_API_C void RteAudioDeviceManagerSetVolume(RteAudioDeviceManager *self, + RteAudioDeviceType type, + uint32_t volume, + RteError *err); + +AGORA_RTE_API_C void RteAudioDeviceManagerGetVolume(RteAudioDeviceManager *self, + RteAudioDeviceType type, + uint32_t *volume, + RteError *err); + +typedef void (*RteAudioDeviceManagerSetCurrentDeviceCallback)( + RteAudioDeviceManager *self, RteAudioDeviceType type, + RteAudioDevice *device, void *cb_data, RteError *err); +AGORA_RTE_API_C void RteAudioDeviceManagerSetCurrentDevice( + RteAudioDeviceManager *self, RteAudioDeviceType type, + RteAudioDevice *device, RteAudioDeviceManagerSetCurrentDeviceCallback cb, + void *cb_data); + +AGORA_RTE_API_C RteAudioDevice RteAudioDeviceManagerGetCurrentDevice( + RteAudioDeviceManager *self, RteAudioDeviceType type, RteError *err); + +#if defined(__ANDROID__) || defined(TARGET_OS_IOS) || defined(UNIT_TEST_MOCK) +AGORA_RTE_API_C void RteAudioDeviceManagerUseSpeakerphoneByDefault( + RteAudioDeviceManager *self, bool enable, RteError *err); + +AGORA_RTE_API_C void RteAudioDeviceManagerUseSpeakerphone( + RteAudioDeviceManager *self, bool enable, RteError *err); +#endif + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/device/device.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/device/device.h new file mode 100644 index 000000000..f85fe4d00 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/device/device.h @@ -0,0 +1,27 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/common.h" +#include "rte_base/c/utils/string.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteDeviceInfo { + RteString *device_name; + RteString *device_id; +} RteDeviceInfo; + +AGORA_RTE_API_C void RteDeviceInfoInit(RteDeviceInfo *info, RteError *err); + +AGORA_RTE_API_C void RteDeviceInfoDeinit(RteDeviceInfo *info, RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/device/video.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/device/video.h new file mode 100644 index 000000000..a2650bd7c --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/device/video.h @@ -0,0 +1,37 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/device/device.h" +#include "rte_base/c/common.h" +#include "rte_base/c/handle.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef enum RteVideoDeviceType { + kRteVideoDeviceTypeCapturing +} RteVideoDeviceType; + +typedef struct RteVideoDeviceInfo { + RteDeviceInfo base; + + RteVideoDeviceType type; +} RteVideoDeviceInfo; + +AGORA_RTE_API_C void RteVideoDeviceInfoInit(RteVideoDeviceInfo *info, + RteError *err); +AGORA_RTE_API_C void RteVideoDeviceInfoDeinit(RteVideoDeviceInfo *info, + RteError *err); + +AGORA_RTE_API_C void RteVideoDeviceGetInfo(RteVideoDevice *self, + RteVideoDeviceInfo *info, + RteError *err); +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/device/video_device_manager.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/device/video_device_manager.h new file mode 100644 index 000000000..e2ae19643 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/device/video_device_manager.h @@ -0,0 +1,67 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include + +#include "rte_base/c/c_error.h" +#include "rte_base/c/device/video.h" +#include "rte_base/c/handle.h" +#include "rte_base/c/common.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteVideoDeviceManagerConfig { + char placeholder; +} RteVideoDeviceManagerConfig; + +typedef struct RteVideoDeviceManager { + char placeholder; +} RteVideoDeviceManager; + +AGORA_RTE_API_C void RteVideoDeviceManagerConfigInit( + RteVideoDeviceManagerConfig *config, RteError *err); +AGORA_RTE_API_C void RteVideoDeviceManagerConfigDeinit( + RteVideoDeviceManagerConfig *config, RteError *err); +AGORA_RTE_API_C void RteVideoDeviceManagerConfigSetJsonParameter( + RteVideoDeviceManagerConfig *config, RteString *json_parameter, + RteError *err); +AGORA_RTE_API_C void RteVideoDeviceManagerConfigGetJsonParameter( + RteVideoDeviceManagerConfig *config, RteString *json_parameter, + RteError *err); + +typedef void (*RteVideoDeviceManagerSetConfigsCallback)( + RteVideoDeviceManager *device_manager, RteVideoDeviceManagerConfig *config, + void *cb_data, RteError *err); + +typedef void (*RteVideoDeviceManagerEnumerateDevicesCallback)( + RteVideoDeviceManager *device_manager, RteVideoDevice *devices, + size_t devices_cnt, void *cb_data, RteError *err); + +AGORA_RTE_API_C RteVideoDeviceManager RteVideoDeviceManagerCreate( + Rte *rte, RteVideoDeviceManagerConfig *config, RteError *err); +AGORA_RTE_API_C void RteVideoDeviceManagerDestroy(RteVideoDeviceManager *self, + RteError *err); +AGORA_RTE_API_C void RteVideoDeviceManagerGetConfigs( + RteVideoDeviceManager *self, RteVideoDeviceManagerConfig *config, + RteError *err); +AGORA_RTE_API_C void RteVideoDeviceManagerSetConfigs( + RteVideoDeviceManager *self, RteVideoDeviceManagerConfig *config, + RteVideoDeviceManagerSetConfigsCallback callback, void *cb_data); +AGORA_RTE_API_C void RteVideoDeviceManagerEnumerateDevices( + RteVideoDeviceManager *self, RteVideoDeviceType type, + RteVideoDeviceManagerEnumerateDevicesCallback cb, void *cb_data); +AGORA_RTE_API_C void RteVideoDeviceManagerSetCurrentDevice( + RteVideoDeviceManager *self, RteVideoDevice *device, RteError *err); +AGORA_RTE_API_C RteVideoDevice RteVideoDeviceManagerGetCurrentDevice( + RteVideoDeviceManager *self, RteVideoDeviceType type, RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/handle.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/handle.h new file mode 100644 index 000000000..a7ff4ac0f --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/handle.h @@ -0,0 +1,155 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include + +#include "utils/uuid.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteHandle { + RteUuid uuid; +} RteHandle; + +typedef struct Rte { + RteHandle handle; +} Rte; + +typedef struct RtePlayer { + RteHandle handle; +} RtePlayer; + +typedef struct RteChannel { + RteHandle handle; +} RteChannel; + +typedef struct RteUser { + RteHandle handle; +} RteUser; + +typedef struct RteLocalUser { + RteHandle handle; +} RteLocalUser; + +typedef struct RteRemoteUser { + RteHandle handle; +} RteRemoteUser; + +typedef struct RteTrack { + RteHandle handle; +} RteTrack; + +typedef struct RteLocalTrack { + RteHandle handle; +} RteLocalTrack; + +typedef struct RteRemoteTrack { + RteHandle handle; +} RteRemoteTrack; + +typedef struct RteVideoTrack { + RteHandle handle; +} RteVideoTrack; + +typedef struct RteAudioTrack { + RteHandle handle; +} RteAudioTrack; + +typedef struct RteDataTrack { + RteHandle handle; +} RteDataTrack; + +typedef struct RteLocalVideoTrack { + RteHandle handle; +} RteLocalVideoTrack; + +typedef struct RteRemoteVideoTrack { + RteHandle handle; +} RteRemoteVideoTrack; + +typedef struct RteCameraVideoTrack { + RteHandle handle; +} RteCameraVideoTrack; + +typedef struct RteMixedVideoTrack { + RteHandle handle; +} RteMixedVideoTrack; + +typedef struct RteScreenVideoTrack { + RteHandle handle; +} RteScreenVideoTrack; + +typedef struct RteLocalAudioTrack { + RteHandle handle; +} RteLocalAudioTrack; + +typedef struct RteRemoteAudioTrack { + RteHandle handle; +} RteRemoteAudioTrack; + +typedef struct RteMicAudioTrack { + RteHandle handle; +} RteMicAudioTrack; + +typedef struct RteLocalDataTrack { + RteHandle handle; +} RteLocalDataTrack; + +typedef struct RteRemoteDataTrack { + RteHandle handle; +} RteRemoteDataTrack; + +typedef struct RteCanvas { + RteHandle handle; +} RteCanvas; + +typedef struct RteAudioDevice { + RteHandle handle; +} RteAudioDevice; + +typedef struct RteVideoDevice { + RteHandle handle; +} RteVideoDevice; + +typedef struct RteRealTimeStream { + RteHandle handle; +} RteRealTimeStream; + +typedef struct RteCdnStream { + RteHandle handle; +} RteCdnStream; + +typedef struct RteLocalStream { + RteHandle handle; +} RteLocalStream; + +typedef struct RteRemoteStream { + RteHandle handle; +} RteRemoteStream; + +typedef struct RteLocalCdnStream { + RteHandle handle; +} RteLocalCdnStream; + +typedef struct RteLocalRealTimeStream { + RteHandle handle; +} RteLocalRealTimeStream; + +typedef struct RteRemoteCdnStream { + RteHandle handle; +} RteRemoteCdnStream; + +typedef struct RteRemoteRealTimeStream { + RteHandle handle; +} RteRemoteRealTimeStream; + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/info.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/info.h new file mode 100644 index 000000000..eff255b9b --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/info.h @@ -0,0 +1,27 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "c_error.h" +#include "common.h" +#include "handle.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteBaseInfo { + Rte rte; +} RteBaseInfo; + +AGORA_RTE_API_C void RteBaseInfoInit(RteBaseInfo *info, RteError *err); + +AGORA_RTE_API_C void RteBaseInfoDeinit(RteBaseInfo *info, RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/log.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/log.h new file mode 100644 index 000000000..8cd54c65b --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/log.h @@ -0,0 +1,48 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include + +#include "common.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct Rte Rte; +typedef struct RteError RteError; + +typedef enum RteLogLevel { + kRteLogInfo, + kRteLogWarn, + kRteLogError, + kRteLogFatal, +} RteLogLevel; + +AGORA_RTE_API_C void RteLog(Rte *self, RteLogLevel level, const char *func, + const char *file, size_t line, RteError *err, + const char *fmt, ...); + +#define RTE_LOG(level, ...) \ + RteLog(NULL, level, __func__, __FILE__, __LINE__, NULL, __VA_ARGS__) + +#define RTE_LOGI(...) \ + RteLog(NULL, kRteLogInfo, __func__, __FILE__, __LINE__, NULL, __VA_ARGS__) + +#define RTE_LOGW(...) \ + RteLog(NULL, kRteLogWarn, __func__, __FILE__, __LINE__, NULL, __VA_ARGS__) + +#define RTE_LOGE(...) \ + RteLog(NULL, kRteLogError, __func__, __FILE__, __LINE__, NULL, __VA_ARGS__) + +#define RTE_LOGF(...) \ + RteLog(NULL, kRteLogFatal, __func__, __FILE__, __LINE__, NULL, __VA_ARGS__) + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/metadata.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/metadata.h new file mode 100644 index 000000000..4ed3ea89b --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/metadata.h @@ -0,0 +1,92 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include +#include +#include + +#include "c_error.h" +#include "common.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteMetadataConfig { + bool record_ts; + bool has_record_ts; + + bool record_owner; + bool has_record_owner; + + RteString *lock_name; + bool has_lock_name; + + RteString *json_parameter; + bool has_json_parameter; +} RteMetadataConfig; + +typedef struct RteMetadataItem { + RteString *key; + RteString *value; + RteString *author; + int64_t revision; + int64_t update_timestamp; +} RteMetadataItem; + +typedef struct RteMetadata { + char placeholder; +} RteMetadata; + +AGORA_RTE_API_C void RteMetadataConfigInit(RteMetadataConfig *config, + RteError *err); +AGORA_RTE_API_C void RteMetadataConfigDeinit(RteMetadataConfig *config, + RteError *err); + +AGORA_RTE_API_C void RteMetadataConfigSetRecordTs(RteMetadataConfig *self, + bool record_ts, RteError *err); +AGORA_RTE_API_C void RteMetadataConfigGetRecordTs(RteMetadataConfig *self, + bool *record_ts, + RteError *err); + +AGORA_RTE_API_C void RteMetadataConfigSetRecordOwner(RteMetadataConfig *self, + bool record_owner, + RteError *err); +AGORA_RTE_API_C void RteMetadataConfigGetRecordOwner(RteMetadataConfig *self, + bool *record_owner, + RteError *err); + +AGORA_RTE_API_C void RteMetadataConfigSetJsonParameter(RteMetadataConfig *self, + RteString *json_parameter, + RteError *err); +AGORA_RTE_API_C void RteMetadataConfigGetJsonParameter(RteMetadataConfig *self, + RteString *json_parameter, + RteError *err); + +AGORA_RTE_API_C void RteMetadataItemInit(RteMetadataItem *self, RteError *err); +AGORA_RTE_API_C void RteMetadataItemDeinit(RteMetadataItem *self, RteError *err); + +AGORA_RTE_API_C void RteMetadataInit(RteMetadata *self, RteError *err); +AGORA_RTE_API_C void RteMetadataDeinit(RteMetadata *self, RteError *err); + +AGORA_RTE_API_C void RteMetadataSetRevision(RteMetadata *self, int64_t revision, + RteError *err); +AGORA_RTE_API_C void RteMetadataGetRevision(RteMetadata *self, int64_t *revision, + RteError *err); + +AGORA_RTE_API_C void RteMetadataClear(RteMetadata *self, RteError *err); + +AGORA_RTE_API_C void RteMetadataAddItem(RteMetadata *self, RteMetadataItem *item, + RteError *err); +AGORA_RTE_API_C size_t RteMetadataGetItems(RteMetadata *self, + RteMetadataItem *items, + size_t items_cnt, RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/observer.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/observer.h new file mode 100644 index 000000000..bae44c60c --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/observer.h @@ -0,0 +1,20 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteBaseObserver { + void *event_src; + void *me_in_target_lang; +} RteBaseObserver; + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/old.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/old.h new file mode 100644 index 000000000..b09168071 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/old.h @@ -0,0 +1,112 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include + +#include "common.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +struct RtcStats {}; + +enum USER_OFFLINE_REASON_TYPE {}; + +struct IRtcEngineEventHandler { + // When a local user successfully joins the channel, this callback is + // triggered. + virtual void onJoinChannelSuccess(const char *channel, uid_t uid, + int elapsed); + + // When the local host successfully leaves the channel, this callback is + // triggered. + virtual void onLeaveChannel(const RtcStats &stat); + + // When a remote host successfully joins the channel, this callback is + // triggered. Upon receiving this callback, you need to immediately call + // setupRemoteVideo to set up the remote host's view. + virtual void onUserJoined(uid_t uid, int elapsed); + + // When a remote host leaves the channel or disconnects, this callback is + // triggered. + virtual void onUserOffline(uid_t uid, USER_OFFLINE_REASON_TYPE reason); +}; + +// ē¹¼ę‰æ IRtcEngineEventHandler 锞äø­ēš„å›žå‘¼čˆ‡äŗ‹ä»¶ +struct SampleEngineEventHandler : public IRtcEngineEventHandler { + SampleEngineEventHandler() = default; + virtual ~SampleEngineEventHandler() = default; + + SampleEngineEventHandler(const SampleEngineEventHandler &) = delete; + SampleEngineEventHandler &operator=(const SampleEngineEventHandler &) = + delete; + + SampleEngineEventHandler(SampleEngineEventHandler &&) = delete; + SampleEngineEventHandler &operator=(SampleEngineEventHandler &&) = delete; +}; + +struct RtcEngineContext { + IRtcEngineEventHandler *eventHandler; + std::string appId; +}; + +enum RENDER_MODE_TYPE { + RENDER_MODE_HIDDEN, + RENDER_MODE_FIT, +}; + +struct VideoCanvas { + uid_t uid; + void *view; + RENDER_MODE_TYPE renderMode; +}; + +enum VIDEO_CODEC_TYPE { VIDEO_CODEC_SOME }; + +struct VideoEncoderConfiguration { + VIDEO_CODEC_TYPE codecType; +}; + +enum CHANNEL_PROFILE { CHANNEL_PROFILE_LIVE_BROADCASTING }; + +enum CLIENT_ROLE_TYPE { CLIENT_ROLE_TYPE_BROADCASTER }; + +struct ChannelMediaOptions { + CHANNEL_PROFILE channelProfile; + CLIENT_ROLE_TYPE clientRoleType; + + bool autoSubscribeAudio; + bool autoSubscribeVideo; +}; + +struct IRtcEngine { + void initialize(RtcEngineContext &ctx); + void release(bool some_param); + + void enableVideo(); + void disableVideo(); + + void startPreview(); + void stopPreview(); + + void setupLocalVideo(VideoCanvas &canvas); + void setupRemoteVideo(VideoCanvas &canvas); + + void setVideoEncoderConfiguration(VideoEncoderConfiguration &config); + + int joinChannel(const char *app_id, const char *channel_name, int uid, + ChannelMediaOptions &options); + int leaveChannel(); +}; + +AGORA_RTE_API_C IRtcEngine *createAgoraRtcEngine(); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/options.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/options.h new file mode 100644 index 000000000..5c1cc3aaa --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/options.h @@ -0,0 +1,22 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +/** + * @note This document lists functions that are for internal use and not part of + * the public API. + */ + +namespace rte_api_internal {} // namespace rte_api_internal + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/cdn_stream.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/cdn_stream.h new file mode 100644 index 000000000..21577f4b2 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/cdn_stream.h @@ -0,0 +1,36 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/c_error.h" +#include "rte_base/c/common.h" +#include "rte_base/c/stream/stream.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteCdnStreamConfig { + RteStreamConfig stream_config; +} RteCdnStreamConfig; + +typedef struct RteCdnStreamStats { + RteStreamStats stream_stats; +} RteCdnStreamStats; + +typedef struct RteCdnStreamInfo { + RteStreamInfo stream_info; +} RteCdnStreamInfo; + +AGORA_RTE_API_C void RteCdnStreamStatsInit(RteCdnStreamStats *stats, + RteError *err); +AGORA_RTE_API_C void RteCdnStreamStatsDeinit(RteCdnStreamStats *stats, + RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/local_cdn_stream.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/local_cdn_stream.h new file mode 100644 index 000000000..463c37310 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/local_cdn_stream.h @@ -0,0 +1,59 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/common.h" +#include "rte_base/c/stream/cdn_stream.h" +#include "rte_base/c/stream/local_stream.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteUser RteUser; +typedef struct RteTrack RteTrack; +typedef struct Rte Rte; +typedef struct RteChannel RteChannel; + +typedef struct RteLocalCdnStreamConfig { + RteCdnStreamConfig cdn_stream_config; + RteLocalStreamConfig local_stream_config; +} RteLocalCdnStreamConfig; + +AGORA_RTE_API_C void RteLocalCdnStreamConfigInit(RteLocalCdnStreamConfig *config, + RteError *err); +AGORA_RTE_API_C void RteLocalCdnStreamConfigDeinit( + RteLocalCdnStreamConfig *config, RteError *err); + +AGORA_RTE_API_C void RteLocalCdnStreamConfigSetUrl(RteLocalCdnStreamConfig *self, + RteString *url, + RteError *err); +AGORA_RTE_API_C void RteLocalCdnStreamConfigGetUrl(RteLocalCdnStreamConfig *self, + RteString *url, + RteError *err); + +AGORA_RTE_API_C void RteLocalCdnStreamConfigSetJsonParameter( + RteLocalCdnStreamConfig *self, RteString *json_parameter, RteError *err); +AGORA_RTE_API_C void RteLocalCdnStreamConfigGetJsonParameter( + RteLocalCdnStreamConfig *self, RteString *json_parameter, RteError *err); + +AGORA_RTE_API_C RteLocalCdnStream RteLocalCdnStreamCreate( + Rte *rte, RteLocalCdnStreamConfig *config, RteError *err); +AGORA_RTE_API_C void RteLocalCdnStreamDestroy(RteLocalCdnStream *self, + RteError *err); + +AGORA_RTE_API_C void RteLocalCdnStreamGetConfigs(RteLocalCdnStream *self, + RteLocalCdnStreamConfig *config, + RteError *err); +AGORA_RTE_API_C void RteLocalCdnStreamSetConfigs( + RteLocalCdnStream *self, RteLocalCdnStreamConfig *config, + void (*cb)(RteLocalCdnStream *stream, void *cb_data, RteError *err), + void *cb_data); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/local_realtime_stream.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/local_realtime_stream.h new file mode 100644 index 000000000..754a626d0 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/local_realtime_stream.h @@ -0,0 +1,33 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/common.h" +#include "rte_base/c/stream/local_stream.h" +#include "rte_base/c/stream/realtime_stream.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct Rte Rte; +typedef struct RteChannel RteChannel; + +typedef struct RteLocalRealTimeStreamConfig { + RteRealTimeStreamConfig realtime_stream_config; + RteLocalStreamConfig local_stream_config; +} RteLocalRealTimeStreamConfig; + +AGORA_RTE_API_C RteLocalRealTimeStream RteLocalRealTimeStreamCreate( + Rte *rte, RteLocalRealTimeStreamConfig *config, RteError *err); + +AGORA_RTE_API_C void RteLocalRealTimeStreamDestroy(RteLocalRealTimeStream *self, + RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/local_stream.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/local_stream.h new file mode 100644 index 000000000..c07abb643 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/local_stream.h @@ -0,0 +1,38 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/c_error.h" +#include "rte_base/c/common.h" +#include "rte_base/c/stream/stream.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteLocalStreamConfig { + RteStreamConfig stream_config; +} RteLocalStreamConfig; + +typedef struct RteLocalStreamStats { + RteStreamStats stream_stats; +} RteLocalStreamStats; + +typedef struct RteLocalStreamInfo { + RteStreamInfo stream_info; + bool has_subscribed; +} RteLocalStreamInfo; + +AGORA_RTE_API_C void RteLocalStreamStatsInit(RteLocalStreamStats *stats, + RteError *err); + +AGORA_RTE_API_C void RteLocalStreamStatsDeinit(RteLocalStreamStats *stats, + RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/realtime_stream.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/realtime_stream.h new file mode 100644 index 000000000..2f36bae12 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/realtime_stream.h @@ -0,0 +1,37 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/c_error.h" +#include "rte_base/c/common.h" +#include "rte_base/c/stream/stream.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteRealTimeStreamConfig { + RteStreamConfig stream_config; +} RteRealTimeStreamConfig; + +typedef struct RteRealTimeStreamStats { + RteStreamStats stream_stats; +} RteRealTimeStreamStats; + +typedef struct RteRealTimeStreamInfo { + RteStreamInfo stream_info; +} RteRealTimeStreamInfo; + +AGORA_RTE_API_C void RteRealTimeStreamStatsInit(RteRealTimeStreamStats *stats, + RteError *err); + +AGORA_RTE_API_C void RteRealTimeStreamStatsDeinit(RteRealTimeStreamStats *stats, + RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/remote_cdn_stream.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/remote_cdn_stream.h new file mode 100644 index 000000000..d935f2eea --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/remote_cdn_stream.h @@ -0,0 +1,54 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/common.h" +#include "rte_base/c/stream/cdn_stream.h" +#include "rte_base/c/stream/remote_stream.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteUser RteUser; +typedef struct RteTrack RteTrack; + +typedef struct RteRemoteCdnStreamConfig { + RteRemoteStreamConfig remote_stream_config; + RteCdnStreamConfig cdn_stream_config; +} RteRemoteCdnStreamConfig; + +AGORA_RTE_API_C void RteRemoteCdnStreamConfigInit( + RteRemoteCdnStreamConfig *config, RteError *err); +AGORA_RTE_API_C void RteRemoteCdnStreamConfigDeinit( + RteRemoteCdnStreamConfig *config, RteError *err); + +AGORA_RTE_API_C void RteRemoteCdnStreamConfigSetUrl( + RteRemoteCdnStreamConfig *self, RteString *url, RteError *err); +AGORA_RTE_API_C void RteRemoteCdnStreamConfigGetUrl( + RteRemoteCdnStreamConfig *self, RteString *url, RteError *err); + +AGORA_RTE_API_C void RteRemoteCdnStreamConfigSetJsonParameter( + RteRemoteCdnStreamConfig *self, RteString *json_parameter, RteError *err); +AGORA_RTE_API_C void RteRemoteCdnStreamConfigGetJsonParameter( + RteRemoteCdnStreamConfig *self, RteString *json_parameter, RteError *err); + +AGORA_RTE_API_C RteRemoteCdnStream RteRemoteCdnStreamCreate(Rte *self, + RteError *err); +AGORA_RTE_API_C void RteRemoteCdnStreamDestroy(RteRemoteCdnStream *self, + RteError *err); + +AGORA_RTE_API_C void RteRemoteCdnStreamGetConfigs( + RteRemoteCdnStream *self, RteRemoteCdnStreamConfig *config, RteError *err); +AGORA_RTE_API_C void RteRemoteCdnStreamSetConfigs( + RteRemoteCdnStream *self, RteRemoteCdnStreamConfig *config, + void (*cb)(RteRemoteCdnStream *stream, void *cb_data, RteError *err), + void *cb_data); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/remote_realtime_stream.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/remote_realtime_stream.h new file mode 100644 index 000000000..0062d69e8 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/remote_realtime_stream.h @@ -0,0 +1,32 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/common.h" +#include "rte_base/c/stream/realtime_stream.h" +#include "rte_base/c/stream/remote_stream.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct Rte Rte; +typedef struct RteChannel RteChannel; + +typedef struct RteRemoteRealTimeStreamConfig { + RteRemoteStreamConfig remote_stream_config; + RteRealTimeStreamConfig realtime_stream_config; +} RteRemoteRealTimeStreamConfig; + +AGORA_RTE_API_C RteRemoteRealTimeStream RteRemoteRealTimeStreamCreate( + Rte *rte, RteRemoteRealTimeStreamConfig *config, RteError *err); +AGORA_RTE_API_C void RteRemoteRealTimeStreamDestroy( + RteRemoteRealTimeStream *self, RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/remote_stream.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/remote_stream.h new file mode 100644 index 000000000..3564604fe --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/remote_stream.h @@ -0,0 +1,49 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include + +#include "rte_base/c/c_error.h" +#include "rte_base/c/common.h" +#include "rte_base/c/stream/stream.h" +#include "rte_base/c/track/track.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteRemoteStreamConfig { + RteStreamConfig stream_config; +} RteRemoteStreamConfig; + +typedef struct RteRemoteStreamStats { + RteStreamStats stream_stats; +} RteRemoteStreamStats; + +typedef struct RteRemoteStreamInfo { + RteStreamInfo stream_info; + + bool has_audio; + bool has_video; + bool has_data; + RteTrackSrc audio_track_src; + RteTrackSrc video_track_src; + RteTrackSrc audio_track_src_original; + RteTrackSrc video_track_src_original; + RteString *data_track_topics; + size_t data_track_topic_cnt; +} RteRemoteStreamInfo; + +AGORA_RTE_API_C void RteRemoteStreamStatsInit(RteRemoteStreamStats *stats, + RteError *err); +AGORA_RTE_API_C void RteRemoteStreamStatsDeinit(RteRemoteStreamStats *stats, + RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/stream.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/stream.h new file mode 100644 index 000000000..ab1f035d4 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/stream/stream.h @@ -0,0 +1,289 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include + +#include "rte_base/c/c_error.h" +#include "rte_base/c/handle.h" +#include "rte_base/c/observer.h" +#include "rte_base/c/common.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteChannel RteChannel; +typedef struct Rte Rte; +typedef struct RteStream RteStream; + +typedef struct RteStreamObserver RteStreamObserver; +struct RteStreamObserver { + RteBaseObserver base_observer; +}; + +typedef enum RteStreamType { + kRteStreamTypeRealTime, + kRteStreamTypeCdn, +} RteStreamType; + +typedef enum RteEncryptionMode { + kRteEncryptionModeAes128Xts, + kRteEncryptionModeAes128Ecb, + kRteEncryptionModeAes128Gcm, + kRteEncryptionModeAes128Gcm2, + kRteEncryptionModeAes256Xts, + kRteEncryptionModeAes256Gcm, + kRteEncryptionModeAes256Gcm2, + kRteEncryptionModeSm4128Ecb, +} RteEncryptionMode; + +typedef enum RteAudioEncoderProfile { + kRteAudioEncoderProfileDefault, + kRteAudioEncoderProfileStdSpeech, + kRteAudioEncoderProfileStdMusic, + kRteAudioEncoderProfileStdStereoMusic, + kRteAudioEncoderProfileHighQualityMusic, + kRteAudioEncoderProfileHighQualityStereoMusic, + kRteAudioEncoderProfileIot, +} RteAudioEncoderProfile; + +typedef enum RteOrientationMode { + kRteOrientationModeAdaptive, + kRteOrientationModeFixedLandscape, + kRteOrientationModeFixedPortrait, +} RteOrientationMode; + +typedef enum RteVideoDegradationPreference { + kRteVideoDegradationPreferenceMaintainFramerate, + kRteVideoDegradationPreferenceMaintainBalanced, + kRteVideoDegradationPreferenceMaintainResolution, + kRteVideoDegradationPreferenceDisabled, +} RteVideoDegradationPreference; + +typedef enum RteVideoMirrorMode { + kRteVideoMirrorModeAuto, + kRteVideoMirrorModeEnabled, + kRteVideoMirrorModeDisabled, +} RteVideoMirrorMode; + +typedef struct RteStreamConfig { + RteStreamType type; + bool has_type; + + RteString *stream_id; + bool has_stream_id; + + RteEncryptionMode encryption_mode; + bool has_encryption_mode; + + RteString *encryption_key; + bool has_encryption_key; + + uint8_t encryption_kdf_salt[32]; + bool has_encryption_kdf_salt; + + RteAudioEncoderProfile audio_encoder_profile; + bool has_audio_encoder_profile; + + uint32_t width; + bool has_width; + + uint32_t height; + bool has_height; + + uint32_t frame_rate; + bool has_frame_rate; + + uint32_t min_bitrate; + bool has_min_bitrate; + + RteOrientationMode orientation_mode; + bool has_orientation_mode; + + RteVideoDegradationPreference degradation_preference; + bool has_degradation_preference; + + RteVideoMirrorMode mirror_mode; + bool has_mirror_mode; + + bool che_hw_decoding; + bool has_che_hw_decoding; + + RteString *json_parameter; + bool has_json_parameter; +} RteStreamConfig; + +typedef struct RteStreamStats { + int placeholder; +} RteStreamStats; + +typedef struct RteStreamInfo { + RteChannel channel; + Rte rte; +} RteStreamInfo; + +typedef struct RteStream { + RteHandle handle; +} RteStream; + +// @{ +// Observer +AGORA_RTE_API_C RteStreamObserver *RteStreamObserverCreate(RteError *err); +AGORA_RTE_API_C void RteStreamObserverDestroy(RteStreamObserver *observer, + RteError *err); +// @} + +// @{ +// Config +AGORA_RTE_API_C void RteStreamConfigInit(RteStreamConfig *config, RteError *err); +AGORA_RTE_API_C void RteStreamConfigDeinit(RteStreamConfig *config, + RteError *err); + +AGORA_RTE_API_C void RteStreamConfigSetChannel(RteStreamConfig *self, + RteChannel *channel, + RteError *err); + +AGORA_RTE_API_C void RteStreamConfigSetStreamType(RteStreamConfig *self, + RteStreamType type, + RteError *err); +AGORA_RTE_API_C void RteStreamConfigGetStreamType(RteStreamConfig *self, + RteStreamType *type, + RteError *err); + +AGORA_RTE_API_C void RteStreamConfigSetStreamId(RteStreamConfig *self, + RteString *stream_id, + RteError *err); +AGORA_RTE_API_C void RteStreamConfigGetStreamId(RteStreamConfig *self, + RteString *stream_id, + RteError *err); + +AGORA_RTE_API_C void RteStreamConfigSetEncryptionMode(RteStreamConfig *self, + RteEncryptionMode mode, + RteError *err); +AGORA_RTE_API_C void RteStreamConfigGetEncryptionMode(RteStreamConfig *self, + RteEncryptionMode *mode, + RteError *err); + +AGORA_RTE_API_C void RteStreamConfigSetEncryptionKey(RteStreamConfig *self, + RteString *encryption_key, + RteError *err); +AGORA_RTE_API_C void RteStreamConfigGetEncryptionKey(RteStreamConfig *self, + RteString *encryption_key, + RteError *err); + +AGORA_RTE_API_C void RteStreamConfigSetEncryptionKdfSalt( + RteStreamConfig *self, uint8_t *encryption_kdf_salt, RteError *err); +AGORA_RTE_API_C void RteStreamConfigGetEncryptionKdfSalt( + RteStreamConfig *self, uint8_t *encryption_kdf_salt, RteError *err); + +AGORA_RTE_API_C void RteStreamConfigSetAudioEncoderProfile( + RteStreamConfig *self, RteAudioEncoderProfile profile, RteError *err); +AGORA_RTE_API_C void RteStreamConfigGetAudioEncoderProfile( + RteStreamConfig *self, RteAudioEncoderProfile *profile, RteError *err); + +AGORA_RTE_API_C void RteStreamConfigSetWidth(RteStreamConfig *self, + uint32_t width, RteError *err); +AGORA_RTE_API_C void RteStreamConfigGetWidth(RteStreamConfig *self, + uint32_t *width, RteError *err); +AGORA_RTE_API_C void RteStreamConfigSetHeight(RteStreamConfig *self, + uint32_t height, RteError *err); +AGORA_RTE_API_C void RteStreamConfigGetHeight(RteStreamConfig *self, + uint32_t *height, RteError *err); + +AGORA_RTE_API_C void RteStreamConfigSetFrameRate(RteStreamConfig *self, + uint32_t frame_rate, + RteError *err); +AGORA_RTE_API_C void RteStreamConfigGetFrameRate(RteStreamConfig *self, + uint32_t *frame_rate, + RteError *err); + +AGORA_RTE_API_C void RteStreamConfigSetMinBitRate(RteStreamConfig *self, + uint32_t min_bitrate, + RteError *err); +AGORA_RTE_API_C void RteStreamConfigGetMinBitRate(RteStreamConfig *self, + uint32_t *min_bitrate, + RteError *err); + +AGORA_RTE_API_C void RteStreamConfigSetOrientationMode( + RteStreamConfig *self, RteOrientationMode orientation_mode, RteError *err); +AGORA_RTE_API_C void RteStreamConfigGetOrientationMode( + RteStreamConfig *self, RteOrientationMode *orientation_mode, RteError *err); + +AGORA_RTE_API_C void RteStreamConfigSetVideoDegradationPreference( + RteStreamConfig *self, RteVideoDegradationPreference degradation_preference, + RteError *err); +AGORA_RTE_API_C void RteStreamConfigGetVideoDegradationPreference( + RteStreamConfig *self, + RteVideoDegradationPreference *degradation_preference, RteError *err); + +AGORA_RTE_API_C void RteStreamConfigSetVideoMirrorMode( + RteStreamConfig *self, RteVideoMirrorMode mirror_mode, RteError *err); +AGORA_RTE_API_C void RteStreamConfigGetVideoMirrorMode( + RteStreamConfig *self, RteVideoMirrorMode *mirror_mode, RteError *err); + +AGORA_RTE_API_C void RteStreamConfigSetCheHwDecoding(RteStreamConfig *self, + bool che_hw_decoding, + RteError *err); +AGORA_RTE_API_C void RteStreamConfigGetCheHwDecoding(RteStreamConfig *self, + bool *che_hw_decoding, + RteError *err); + +AGORA_RTE_API_C void RteStreamConfigSetJsonParameter(RteStreamConfig *self, + RteString *json_parameter, + RteError *err); +AGORA_RTE_API_C void RteStreamConfigGetJsonParameter(RteStreamConfig *self, + RteString *json_parameter, + RteError *err); +// @} + +// @{ +// Info +AGORA_RTE_API_C void RteStreamInfoInit(RteStreamInfo *info, RteError *err); +AGORA_RTE_API_C void RteStreamInfoDeinit(RteStreamInfo *info, RteError *err); +// @} + +AGORA_RTE_API_C void RteStreamGetInfo(RteStream *self, RteStreamInfo *info, + RteError *err); + +AGORA_RTE_API_C void RteStreamSetConfigs( + RteStream *self, RteStreamConfig *config, + void (*cb)(RteStream *stream, void *cb_data, RteError *err), void *cb_data); + +AGORA_RTE_API_C bool RteStreamRegisterObserver( + RteStream *self, RteStreamObserver *observer, RteError *err); + +AGORA_RTE_API_C RteAudioTrack RteStreamGetAudioTrack(RteStream *self, + RteError *err); +AGORA_RTE_API_C void RteStreamAddAudioTrack(RteStream *self, + RteAudioTrack *audio_track, + RteError *err); +AGORA_RTE_API_C void RteStreamDelAudioTrack(RteStream *self, + RteAudioTrack *audio_track, + RteError *err); + +AGORA_RTE_API_C RteVideoTrack RteStreamGetVideoTrack(RteStream *self, + RteError *err); +AGORA_RTE_API_C void RteStreamAddVideoTrack(RteStream *self, + RteVideoTrack *video_track, + RteError *err); +AGORA_RTE_API_C void RteStreamDelVideoTrack(RteStream *self, + RteVideoTrack *video_track, + RteError *err); + +AGORA_RTE_API_C RteDataTrack RteStreamGetDataTrack(RteStream *self, + RteError *err); +AGORA_RTE_API_C void RteStreamAddDataTrack(RteStream *self, + RteDataTrack *data_track, + RteError *err); +AGORA_RTE_API_C void RteStreamDelDataTrack(RteStream *self, + RteDataTrack *data_track, + RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/camera_video_track.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/camera_video_track.h new file mode 100644 index 000000000..bc96461f8 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/camera_video_track.h @@ -0,0 +1,34 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/handle.h" +#include "rte_base/c/common.h" +#include "rte_base/c/track/local_video_track.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct Rte Rte; + +typedef struct RteCameraVideoTrackInitialConfig { + RteLocalVideoTrackInitialConfig local_video_track_initial_config; +} RteCameraVideoTrackInitialConfig; + +typedef struct RteCameraVideoTrackConfig { + RteLocalVideoTrackConfig local_video_track_config; +} RteCameraVideoTrackConfig; + +AGORA_RTE_API_C RteCameraVideoTrack RteCameraVideoTrackCreate( + Rte *rte, RteCameraVideoTrackConfig *config, RteError *err); +AGORA_RTE_API_C void RteCameraVideoTrackDestroy(RteCameraVideoTrack *self, + RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/canvas.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/canvas.h new file mode 100644 index 000000000..d4358fb6e --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/canvas.h @@ -0,0 +1,87 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/handle.h" +#include "rte_base/c/common.h" +#include "rte_base/c/track/view.h" +#include "rte_base/c/stream/stream.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteViewConfig RteViewConfig; + + +typedef enum RteVideoRenderMode { + kRteVideoRenderModeHidden, + kRteVideoRenderModeFit +} RteVideoRenderMode; + +typedef struct RteCanvasInitialConfig { + char placeholder; +} RteCanvasInitialConfig; + + +typedef struct RteCanvasConfig { + + RteVideoRenderMode render_mode; + bool has_render_mode; + + RteVideoMirrorMode mirror_mode; + bool has_mirror_mode; + + RteRect crop_area; + bool has_crop_area; +} RteCanvasConfig; + + +// @{ +// InitialConfig +AGORA_RTE_API_C void RteCanvasInitialConfigInit(RteCanvasInitialConfig *config, + RteError *err); +AGORA_RTE_API_C void RteCanvasInitialConfigDeinit(RteCanvasInitialConfig *config, + RteError *err); +// @} + +// @{ +// Config +AGORA_RTE_API_C void RteCanvasConfigInit(RteCanvasConfig *config, RteError *err); +AGORA_RTE_API_C void RteCanvasConfigDeinit(RteCanvasConfig *config, + RteError *err); + +AGORA_RTE_API_C void RteCanvasConfigSetVideoRenderMode(RteCanvasConfig *self, RteVideoRenderMode render_mode, RteError *err); + +AGORA_RTE_API_C void RteCanvasConfigGetVideoRenderMode(RteCanvasConfig *self, RteVideoRenderMode *render_mode, RteError *err); + +AGORA_RTE_API_C void RteCanvasConfigSetVideoMirrorMode(RteCanvasConfig *self, RteVideoMirrorMode mirror_mode, RteError *err); + +AGORA_RTE_API_C void RteCanvasConfigGetVideoMirrorMode(RteCanvasConfig *self, RteVideoMirrorMode *mirror_mode, RteError *err); + +AGORA_RTE_API_C void RteCanvasConfigSetCropArea(RteCanvasConfig *self, RteRect crop_area, RteError *err); + +AGORA_RTE_API_C void RteCanvasConfigGetCropArea(RteCanvasConfig *self, RteRect *crop_area, RteError *err); + +// @} + +AGORA_RTE_API_C RteCanvas RteCanvasCreate(::Rte *rte, RteCanvasInitialConfig *config, + RteError *err); +AGORA_RTE_API_C void RteCanvasDestroy(RteCanvas *self, RteError *err); + +AGORA_RTE_API_C bool RteCanvasGetConfigs(RteCanvas *self, + RteCanvasConfig *config, RteError *err); +AGORA_RTE_API_C bool RteCanvasSetConfigs(RteCanvas *self, RteCanvasConfig *config, RteError *err); + +AGORA_RTE_API_C bool RteCanvasAddView( + RteCanvas *self, RteView *view, RteViewConfig *config, RteError *err); + +AGORA_RTE_API_C bool RteCanvasRemoveView(RteCanvas *self, RteView *view, RteViewConfig *config, RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/layout.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/layout.h new file mode 100644 index 000000000..7ff31ee76 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/layout.h @@ -0,0 +1,19 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteLayout { + char placeholder; +} RteLayout; + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/local_audio_track.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/local_audio_track.h new file mode 100644 index 000000000..4c76891d5 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/local_audio_track.h @@ -0,0 +1,101 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/common.h" +#include "rte_base/c/track/local_track.h" +#include "rte_base/c/utils/frame.h" +#include "rte_base/c/utils/string.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteLocalAudioTrackConfig { + RteLocalTrackConfig local_track_config; +} RteLocalAudioTrackConfig; + +typedef struct RteLocalAudioTrackObserver RteLocalAudioTrackObserver; +struct RteLocalAudioTrackObserver { + RteLocalTrackObserver local_track_observer; + + void (*on_frame)(RteLocalAudioTrackObserver *self, + RteAudioFrame *audio_frame); +}; + +typedef struct RteLocalAudioTrackInfo { + RteLocalTrackInfo local_track_info; +} RteLocalAudioTrackInfo; + +// @{ +// Config +AGORA_RTE_API_C void RteLocalAudioTrackConfigInit( + RteLocalAudioTrackConfig *config, RteError *err); +AGORA_RTE_API_C void RteLocalAudioTrackConfigDeinit( + RteLocalAudioTrackConfig *config, RteError *err); + +AGORA_RTE_API_C void RteLocalAudioTrackConfigSetPublishVolume( + RteLocalAudioTrackConfig *self, uint32_t volume, RteError *err); +AGORA_RTE_API_C void RteLocalAudioTrackConfigGetPublishVolume( + RteLocalAudioTrackConfig *self, uint32_t *volume, RteError *err); + +AGORA_RTE_API_C void RteLocalAudioTrackConfigSetLoopbackVolume( + RteLocalAudioTrackConfig *self, uint32_t volume, RteError *err); +AGORA_RTE_API_C void RteLocalAudioTrackConfigGetLoopbackVolume( + RteLocalAudioTrackConfig *self, uint32_t *volume, RteError *err); + +AGORA_RTE_API_C void RteLocalAudioTrackConfigSetEnableLoopbackFilter( + RteLocalAudioTrackConfig *self, bool enable_loopback_filter, RteError *err); +AGORA_RTE_API_C void RteLocalAudioTrackConfigGetEnableLoopbackFilter( + RteLocalAudioTrackConfig *self, bool *enable_loopback_filter, + RteError *err); + +AGORA_RTE_API_C void RteLocalAudioTrackConfigSetEnablePublishFilter( + RteLocalAudioTrackConfig *self, bool enable_publish_filter, RteError *err); +AGORA_RTE_API_C void RteLocalAudioTrackConfigGetEnablePublishFilter( + RteLocalAudioTrackConfig *self, bool *enable_publish_filter, RteError *err); + +AGORA_RTE_API_C void RteLocalAudioTrackConfigSetJsonParameter( + RteLocalAudioTrackConfig *self, RteString *json_parameter, RteError *err); +AGORA_RTE_API_C void RteLocalAudioTrackConfigGetJsonParameter( + RteLocalAudioTrackConfig *self, RteString *json_parameter, RteError *err); +// @} + +// @{ +// Track observer +AGORA_RTE_API_C RteLocalAudioTrackObserver *RteLocalAudioTrackObserverCreate( + RteError *err); +AGORA_RTE_API_C void RteLocalAudioTrackObserverDestroy( + RteLocalAudioTrackObserver *self, RteError *err); +//} + +AGORA_RTE_API_C void RteLocalAudioTrackInit(RteLocalAudioTrack *self, + RteError *err); +AGORA_RTE_API_C void RteLocalAudioTrackDeinit(RteLocalAudioTrack *self, + RteError *err); + +AGORA_RTE_API_C void RteLocalAudioTrackGetConfigs( + RteLocalAudioTrack *self, RteLocalAudioTrackConfig *config, RteError *err); +AGORA_RTE_API_C void RteLocalAudioTrackSetConfigs( + RteLocalAudioTrack *self, RteLocalAudioTrackConfig *config, + void (*cb)(RteLocalAudioTrack *track, void *cb_data, RteError *err), + void *cb_data); + +AGORA_RTE_API_C void RteLocalAudioTrackEnableLoopback(RteLocalAudioTrack *self, + RteError *err); + +AGORA_RTE_API_C void RteLocalAudioTrackRegisterTrackObserver( + RteLocalAudioTrack *self, RteLocalAudioTrackObserver *observer, + void (*destroyer)(RteLocalAudioTrackObserver *self, RteError *err), + RteError *err); +AGORA_RTE_API_C void RteLocalAudioTrackUnregisterTrackObserver( + RteLocalAudioTrack *self, RteLocalAudioTrackObserver *observer, + RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/local_track.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/local_track.h new file mode 100644 index 000000000..bbba90ee1 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/local_track.h @@ -0,0 +1,56 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/c_error.h" +#include "rte_base/c/common.h" +#include "rte_base/c/handle.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteLocalTrackInitialConfig { + char placeholder; +} RteLocalTrackInitialConfig; + +typedef struct RteLocalTrackConfig { + char placeholder; +} RteLocalTrackConfig; + +typedef struct RteLocalTrackInfo { + char placeholder; +} RteLocalTrackInfo; + +typedef struct RteLocalTrackObserver { + char placeholder; +} RteLocalTrackObserver; + +// @{ +// Info +AGORA_RTE_API_C void RteLocalTrackInfoInit(RteLocalTrackInfo *info, + RteError *err); +AGORA_RTE_API_C void RteLocalTrackInfoDeinit(RteLocalTrackInfo *info, + RteError *err); +//} + +AGORA_RTE_API_C void RteLocalTrackStart(RteLocalTrack *self, + void (*cb)(RteLocalTrack *self, + void *cb_data, RteError *err), + void *cb_data); +AGORA_RTE_API_C void RteLocalTrackStop(RteLocalTrack *self, + void (*cb)(RteLocalTrack *self, + void *cb_data, RteError *err), + void *cb_data); + +AGORA_RTE_API_C void RteLocalTrackGetInfo(RteLocalTrack *self, + RteLocalTrackInfo *info, + RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/local_video_track.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/local_video_track.h new file mode 100644 index 000000000..134347a73 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/local_video_track.h @@ -0,0 +1,25 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/track/local_track.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteLocalVideoTrackInitialConfig { + RteLocalTrackInitialConfig local_track_initial_config; +} RteLocalVideoTrackInitialConfig; + +typedef struct RteLocalVideoTrackConfig { + RteLocalTrackConfig local_track_config; +} RteLocalVideoTrackConfig; + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/mic_audio_track.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/mic_audio_track.h new file mode 100644 index 000000000..b73330fd5 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/mic_audio_track.h @@ -0,0 +1,121 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/handle.h" +#include "rte_base/c/common.h" +#include "rte_base/c/track/local_audio_track.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef enum RteVoiceBeautifierPreset { + kRteVoiceBeautifierPresetOff, + kRteVoiceBeautifierPresetChatBeautifierMagnetic, + kRteVoiceBeautifierPresetChatBeautifierFresh, + kRteVoiceBeautifierPresetChatBeautifierVitality, + kRteVoiceBeautifierPresetSingingBeautifier, + kRteVoiceBeautifierPresetTimbreTransformationVigorous, + kRteVoiceBeautifierPresetTimbreTransformationDeep, + kRteVoiceBeautifierPresetTimbreTransformationMellow, + kRteVoiceBeautifierPresetTimbreTransformationFalsetto, + kRteVoiceBeautifierPresetTimbreTransformationFull, + kRteVoiceBeautifierPresetTimbreTransformationClear, + kRteVoiceBeautifierPresetTimbreTransformationResounding, + kRteVoiceBeautifierPresetTimbreTransformationRinging, + kRteVoiceBeautifierPresetUltraHighQualityVoice +} RteVoiceBeautifierPreset; + +typedef enum RteAudioEffectPreset { + kRteAudioEffectPresetOff, + kRteAudioEffectPresetKtv, + kRteAudioEffectPresetVocalConcert, + kRteAudioEffectPresetStudio, + kRteAudioEffectPresetPhonograph, + kRteAudioEffectPresetVirtualStereo, + kRteAudioEffectPresetSpecial, + kRteAudioEffectPresetEthereal, + kRteAudioEffectPresetAcoustics3DVoice, + kRteAudioEffectPresetVirtualSurroundSound, + kRteAudioEffectPresetVoiceChangerEffectUncle, + kRteAudioEffectPresetVoiceChangerEffectOldMan, + kRteAudioEffectPresetVoiceChangerEffectBoy, + kRteAudioEffectPresetVoiceChangerEffectSister, + kRteAudioEffectPresetVoiceChangerEffectGirl, + kRteAudioEffectPresetVoiceChangerEffectPigKing, + kRteAudioEffectPresetVoiceChangerEffectHulk, + kRteAudioEffectPresetStyleTransformationRnb, + kRteAudioEffectPresetStyleTransformationPopular, + kRteAudioEffectPresetPitchCorrection +} RteAudioEffectPreset; + +typedef enum RteVoiceConversionPreset { + kRteVoiceConversionPresetOff, + kRteVoiceConversionPresetNeutral, + kRteVoiceConversionPresetSweet, + kRteVoiceConversionPresetSolid, + kRteVoiceConversionPresetBass, + kRteVoiceConversionPresetCartoon, + kRteVoiceConversionPresetChildlike, + kRteVoiceConversionPresetPhoneOperator, + kRteVoiceConversionPresetMonster, + kRteVoiceConversionPresetTransformers, + kRteVoiceConversionPresetGroot, + kRteVoiceConversionPresetDarthVader, + kRteVoiceConversionPresetIronLady, + kRteVoiceConversionPresetShinChan, + kRteVoiceConversionPresetGirlishMan, + kRteVoiceConversionPresetChipMunk +} RteVoiceConversionPreset; + +typedef struct RteMicAudioTrackConfig { + RteLocalAudioTrackConfig local_audio_track_config; +} RteMicAudioTrackConfig; + +typedef struct RteMicAudioTrackInfo { + RteLocalAudioTrackInfo local_audio_track_info; +} RteMicAudioTrackInfo; + +// @{ +// Info +AGORA_RTE_API_C void RteMicAudioTrackInfoInit(RteMicAudioTrackInfo *self, + RteError *err); +AGORA_RTE_API_C void RteMicAudioTrackInfoDeinit(RteMicAudioTrackInfo *self, + RteError *err); +//} + +AGORA_RTE_API_C RteMicAudioTrack RteMicAudioTrackCreate( + Rte *self, RteMicAudioTrackConfig *config, RteError *err); +AGORA_RTE_API_C void RteMicAudioTrackDestroy(RteMicAudioTrack *self, + RteError *err); + +AGORA_RTE_API_C void RteMicAudioTrackGetConfigs(RteMicAudioTrack *self, + RteMicAudioTrackConfig *config, + RteError *err); +AGORA_RTE_API_C void RteMicAudioTrackSetConfigs( + RteMicAudioTrack *self, RteMicAudioTrackConfig *config, + void (*cb)(RteMicAudioTrack *track, void *cb_data, RteError *err), + void *cb_data); + +AGORA_RTE_API_C void RteMicAudioTrackSetVoiceBeautifierPreset( + RteMicAudioTrack *self, RteVoiceBeautifierPreset preset, + void (*cb)(RteError *err), void *cb_data); +AGORA_RTE_API_C void RteMicAudioTrackSetAudioEffectPreset( + RteMicAudioTrack *self, RteAudioEffectPreset preset, + void (*cb)(RteError *err), void *cb_data); +AGORA_RTE_API_C void RteMicAudioTrackSetVoiceConversionPreset( + RteMicAudioTrack *self, RteVoiceConversionPreset preset, + void (*cb)(RteError *err), void *cb_data); + +AGORA_RTE_API_C void RteMicAudioTrackGetInfo(RteMicAudioTrack *self, + RteMicAudioTrackInfo *info, + RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/mixed_video_track.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/mixed_video_track.h new file mode 100644 index 000000000..bcb98b9cc --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/mixed_video_track.h @@ -0,0 +1,35 @@ +#pragma once + +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#include + +#include "rte_base/c/handle.h" +#include "rte_base/c/common.h" +#include "rte_base/c/track/local_video_track.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteLayout RteLayout; + +typedef struct RteMixedVideoTrackConfig { + RteLocalVideoTrackConfig local_video_track_config; +} RteMixedVideoTrackConfig; + +AGORA_RTE_API_C size_t +RteMixedVideoTrackGetLayoutsCount(RteMixedVideoTrack *self, RteError *err); +AGORA_RTE_API_C void RteMixedVideoTrackGetLayouts(RteMixedVideoTrack *self, + RteLayout *layouts, + size_t start_idx, + size_t layouts_cnt, + RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/remote_audio_track.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/remote_audio_track.h new file mode 100644 index 000000000..3f163516e --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/remote_audio_track.h @@ -0,0 +1,87 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include + +#include "rte_base/c/common.h" +#include "rte_base/c/track/remote_track.h" +#include "rte_base/c/utils/frame.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteRemoteAudioTrackConfig { + RteRemoteTrackConfig remote_track_config; + + uint32_t playback_volume; + bool has_playback_volume; +} RteRemoteAudioTrackConfig; + +typedef struct RteRemoteAudioTrackObserver RteRemoteAudioTrackObserver; +struct RteRemoteAudioTrackObserver { + RteRemoteTrackObserver remote_track_observer; + + void (*on_frame)(RteRemoteAudioTrackObserver *self, + RteAudioFrame *audio_frame); +}; + +typedef struct RteRemoteAudioTrackInfo { + RteRemoteTrackInfo remote_track_info; +} RteRemoteAudioTrackInfo; + +// @{ +// Config +AGORA_RTE_API_C void RteRemoteAudioTrackConfigInit( + RteRemoteAudioTrackConfig *config, RteError *err); +AGORA_RTE_API_C void RteRemoteAudioTrackConfigDeinit( + RteRemoteAudioTrackConfig *config, RteError *err); + +AGORA_RTE_API_C void RteRemoteAudioTrackConfigSetPlaybackVolume( + RteRemoteAudioTrackConfig *self, uint32_t volume, RteError *err); +AGORA_RTE_API_C void RteRemoteAudioTrackConfigGetPlaybackVolume( + RteRemoteAudioTrackConfig *self, uint32_t *volume, RteError *err); + +AGORA_RTE_API_C void RteRemoteAudioTrackConfigSetJsonParameter( + RteRemoteAudioTrackConfig *self, RteString *json_parameter, RteError *err); +AGORA_RTE_API_C void RteRemoteAudioTrackConfigGetJsonParameter( + RteRemoteAudioTrackConfig *self, RteString *json_parameter, RteError *err); +// @} + +// @{ +// Track observer +AGORA_RTE_API_C RteRemoteAudioTrackObserver *RteRemoteAudioTrackObserverCreate( + RteError *err); +AGORA_RTE_API_C void RteRemoteAudioTrackObserverDestroy( + RteRemoteAudioTrackObserver *self, RteError *err); +//} + +AGORA_RTE_API_C void RteRemoteAudioTrackInit(RteRemoteAudioTrack *self, + RteError *err); +AGORA_RTE_API_C void RteRemoteAudioTrackDeinit(RteRemoteAudioTrack *self, + RteError *err); + +AGORA_RTE_API_C void RteRemoteAudioTrackGetConfigs( + RteRemoteAudioTrack *self, RteRemoteAudioTrackConfig *config, + RteError *err); +AGORA_RTE_API_C void RteRemoteAudioTrackSetConfigs( + RteRemoteAudioTrack *self, RteRemoteAudioTrackConfig *config, + void (*cb)(RteRemoteAudioTrack *track, void *cb_data, RteError *err), + void *cb_data); + +AGORA_RTE_API_C void RteRemoteAudioTrackRegisterTrackObserver( + RteRemoteAudioTrack *self, RteRemoteAudioTrackObserver *observer, + void (*destroyer)(RteRemoteAudioTrackObserver *self, RteError *err), + RteError *err); +AGORA_RTE_API_C void RteRemoteAudioTrackUnregisterTrackObserver( + RteRemoteAudioTrack *self, RteRemoteAudioTrackObserver *observer, + RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/remote_track.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/remote_track.h new file mode 100644 index 000000000..ca7f6cc47 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/remote_track.h @@ -0,0 +1,43 @@ +#pragma once + +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#include "rte_base/c/handle.h" +#include "rte_base/c/common.h" +#include "rte_base/c/c_error.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteRemoteTrackConfig { + char placeholder; +} RteRemoteTrackConfig; + +typedef struct RteRemoteTrackInfo { + char placeholder; +} RteRemoteTrackInfo; + +typedef struct RteRemoteTrackObserver { + char placeholder; +} RteRemoteTrackObserver; + +// @{ +// Info +AGORA_RTE_API_C void RteRemoteTrackInfoInit(RteRemoteTrackInfo *info, + RteError *err); +AGORA_RTE_API_C void RteRemoteTrackInfoDeinit(RteRemoteTrackInfo *info, + RteError *err); +//} + +AGORA_RTE_API_C void RteRemoteTrackGetInfo(RteRemoteTrack *self, + RteRemoteTrackInfo *info, + RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/remote_video_track.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/remote_video_track.h new file mode 100644 index 000000000..273c1c0a3 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/remote_video_track.h @@ -0,0 +1,21 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/track/remote_track.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteRemoteVideoTrackConfig { + RteRemoteTrackConfig remote_track_config; +} RteRemoteVideoTrackConfig; + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/screen_video_track.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/screen_video_track.h new file mode 100644 index 000000000..73de05e74 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/screen_video_track.h @@ -0,0 +1,61 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/handle.h" +#include "rte_base/c/common.h" +#include "rte_base/c/track/local_video_track.h" +#include "rte_base/c/utils/rect.h" +#include "rte_base/c/utils/string.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef enum RteScreenCaptureType { + kRteScreenCaptureTypeScreen, + kRteScreenCaptureTypeWindow, +} RteScreenCaptureType; + +typedef void *RteWindow; +typedef void *RteMonitor; + +typedef struct RteScreenVideoTrackConfig { + RteLocalVideoTrackConfig local_video_track_config; + + RteScreenCaptureType type; + bool has_type; + + RteWindow window; + bool has_window; + + RteMonitor monitor; + bool has_monitor; + + RteRect rect; + bool has_rect; + + RteString *json_parameter; + bool has_json_parameter; +} RteScreenVideoTrackConfig; + +AGORA_RTE_API_C RteScreenVideoTrack RteScreenVideoTrackCreate( + Rte *rte, RteScreenVideoTrackConfig *config, RteError *err); +AGORA_RTE_API_C void RteScreenVideoTrackDestroy(RteScreenVideoTrack *self, + RteError *err); + +AGORA_RTE_API_C void RteScreenVideoTrackGetConfigs( + RteScreenVideoTrack *self, RteScreenVideoTrackConfig *config, + RteError *err); +AGORA_RTE_API_C void RteScreenVideoTrackSetConfigs( + RteScreenVideoTrack *self, RteScreenVideoTrackConfig *config, + void (*cb)(RteScreenVideoTrack *track, void *cb_data, RteError *err), + void *cb_data); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/track.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/track.h new file mode 100644 index 000000000..3d45a2017 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/track.h @@ -0,0 +1,54 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/handle.h" +#include "rte_base/c/common.h" +#include "rte_base/c/c_error.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteStream RteStream; + +typedef enum RteTrackMediaType { + kRteTrackMediaTypeAudio, + kRteTrackMediaTypeVideo, + kRteTrackMediaTypeData +} RteTrackMediaType; + +typedef enum RteTrackSrc { + kRteTrackSrcUnknown, + kRteTrackSrcMix, + kRteTrackSrcNetwork, + kRteTrackSrcMicrophone, + kRteTrackSrcLoopbackRecording, + kRteTrackSrcCamera, + kRteTrackSrcScreen, + kRteTrackSrcCustom, +} RteTrackSrc; + +typedef struct RteTrackConfig { + char placeholder; +} RteTrackConfig; + +typedef struct RteTrackInfo { + RteStream *stream; +} RteTrackInfo; + +// @{ +// Info +AGORA_RTE_API_C void RteTrackInfoInit(RteTrackInfo *info, RteError *err); +AGORA_RTE_API_C void RteTrackInfoDeinit(RteTrackInfo *info, RteError *err); +//} + +AGORA_RTE_API_C void RteTrackGetInfo(RteTrack *self, RteTrackInfo *info, RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/video_track.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/video_track.h new file mode 100644 index 000000000..9559651e2 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/video_track.h @@ -0,0 +1,37 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/handle.h" +#include "rte_base/c/common.h" +#include "rte_base/c/track/track.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef enum RteVideoPipelinePosition { + kRteVideoPipelinePositionLocalPostCapturer, + kRteVideoPipelinePositionLocalPostFilters, + kRteVideoPipelinePositionLocalPreEncoder, + kRteVideoPipelinePositionRemotePreRenderer +} RteVideoPipelinePosition; + +typedef struct RteVideoTrackConfig { + RteTrackConfig track_config; +} RteVideoTrackConfig; + +AGORA_RTE_API_C void RteVideoTrackSetCanvas(RteVideoTrack *self, RteCanvas *canvas, + RteVideoPipelinePosition position, + void (*cb)(RteVideoTrack *self, + RteCanvas *canvas, void *cb_data, + RteError *err), + void *cb_data); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/view.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/view.h new file mode 100644 index 000000000..7042eaa97 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/track/view.h @@ -0,0 +1,55 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/c_error.h" +#include "rte_base/c/common.h" +#include "rte_base/c/utils/rect.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct Rte Rte; +typedef struct RteCanvas RteCanvas; +typedef void *RteView; + +typedef struct RteViewConfig { + RteRect crop_area; +} RteViewConfig; + +typedef struct RteViewInfo { + RteCanvas *attached_canvas; +} RteViewInfo; + +// @{ +// Config +AGORA_RTE_API_C void RteViewConfigInit(RteViewConfig *config, RteError *err); +AGORA_RTE_API_C void RteViewConfigDeinit(RteViewConfig *config, RteError *err); + +AGORA_RTE_API_C void RteViewConfigSetCropArea(RteViewConfig *self, + RteRect crop_area, RteError *err); +AGORA_RTE_API_C void RteViewConfigGetCropArea(RteViewConfig *self, + RteRect *crop_area, RteError *err); +// @} + +// @{ +// Info +AGORA_RTE_API_C void RteViewInfoInit(RteViewInfo *info, RteError *err); +AGORA_RTE_API_C void RteViewInfoDeinit(RteViewInfo *info, RteError *err); +// @} + +AGORA_RTE_API_C RteView RteViewCreate(Rte *self, RteViewConfig *config, + RteError *err); +AGORA_RTE_API_C void RteViewDestroy(RteView *self, RteError *err); + +AGORA_RTE_API_C void RteViewGetInfo(RteView *self, RteViewInfo *info, + RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/user/local_user.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/user/local_user.h new file mode 100644 index 000000000..b15c7bea6 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/user/local_user.h @@ -0,0 +1,120 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/handle.h" +#include "rte_base/c/common.h" +#include "rte_base/c/user/user.h" +#include "rte_base/c/utils/buf.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteLocalUserConfig { + RteUserConfig user_config; +} RteLocalUserConfig; + +typedef struct RteLocalUserInfo { + RteUserInfo user_info; +} RteLocalUserInfo; + +typedef struct RteLocalUserObserver RteLocalUserObserver; +struct RteLocalUserObserver { + RteUserObserver base_observer; + + void (*on_user_message_received)(RteLocalUserObserver *self, + RteString *publisher, RteBuf *message); +}; + +// @{ +// Config +AGORA_RTE_API_C void RteLocalUserConfigInit(RteLocalUserConfig *config, + RteError *err); +AGORA_RTE_API_C void RteLocalUserConfigDeinit(RteLocalUserConfig *config, + RteError *err); + +AGORA_RTE_API_C void RteLocalUserConfigSetUserId(RteLocalUserConfig *self, + RteString *user_id, + RteError *err); +AGORA_RTE_API_C void RteLocalUserConfigGetUserId(RteLocalUserConfig *self, + RteString *user_id, + RteError *err); + +AGORA_RTE_API_C void RteLocalUserConfigSetUserName(RteLocalUserConfig *self, + RteString *user_name, + RteError *err); +AGORA_RTE_API_C void RteLocalUserConfigGetUserName(RteLocalUserConfig *self, + RteString *user_name, + RteError *err); + +AGORA_RTE_API_C void RteLocalUserConfigSetUserToken(RteLocalUserConfig *self, + RteString *user_token, + RteError *err); +AGORA_RTE_API_C void RteLocalUserConfigGetUserToken(RteLocalUserConfig *self, + RteString *user_token, + RteError *err); + +AGORA_RTE_API_C void RteLocalUserConfigSetJsonParameter( + RteLocalUserConfig *self, RteString *json_parameter, RteError *err); +AGORA_RTE_API_C void RteLocalUserConfigGetJsonParameter( + RteLocalUserConfig *self, RteString *json_parameter, RteError *err); +// @} + +// @{ +// Info +AGORA_RTE_API_C void RteLocalUserInfoInit(RteLocalUserInfo *info, RteError *err); +AGORA_RTE_API_C void RteLocalUserInfoDeinit(RteLocalUserInfo *info, + RteError *err); +// @} + +// @{ +// Observer +AGORA_RTE_API_C RteLocalUserObserver *RteLocalUserObserverCreate(RteError *err); +AGORA_RTE_API_C void RteLocalUserObserverDestroy(RteLocalUserObserver *self, + RteError *err); + +AGORA_RTE_API_C RteLocalUser +RteLocalUserObserverGetEventSrc(RteLocalUserObserver *self, RteError *err); +// @} + +RteLocalUser RteLocalUserCreate(Rte *self, RteLocalUserConfig *config, + RteError *err); +AGORA_RTE_API_C void RteLocalUserDestroy(RteLocalUser *self, RteError *err); + +AGORA_RTE_API_C void RteLocalUserGetConfigs(RteLocalUser *self, + RteLocalUserConfig *config, + RteError *err); +AGORA_RTE_API_C void RteLocalUserSetConfigs( + RteLocalUser *self, RteLocalUserConfig *config, + void (*cb)(RteLocalUser *user, void *cb_data, RteError *err), + void *cb_data); + +AGORA_RTE_API_C void RteLocalUserGetInfo(RteLocalUser *self, + RteLocalUserInfo *info, RteError *err); + +AGORA_RTE_API_C void RteLocalUserLogin(RteLocalUser *self, + void (*cb)(void *cb_data, RteError *err), + void *cb_data); +AGORA_RTE_API_C void RteLocalUserLogout(RteLocalUser *self, + void (*cb)(void *cb_data, RteError *err), + void *cb_data); +AGORA_RTE_API_C bool RteLocalUserIsLogin(RteLocalUser *self); + +AGORA_RTE_API_C void RteLocalUserPublishMessage( + RteLocalUser *self, const char *user_name, RteBuf *message, + void (*cb)(RteLocalUser *self, void *cb_data, RteError *err), + void *cb_data); + +AGORA_RTE_API_C bool RteLocalUserRegisterObserver( + RteLocalUser *self, RteLocalUserObserver *observer, RteError *err); +AGORA_RTE_API_C bool RteLocalUserUnregisterObserver( + RteLocalUser *self, RteLocalUserObserver *observer, RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/user/remote_user.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/user/remote_user.h new file mode 100644 index 000000000..b4a81379b --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/user/remote_user.h @@ -0,0 +1,77 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/handle.h" +#include "rte_base/c/common.h" +#include "rte_base/c/user/user.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteRemoteUserConfig { + RteUserConfig user_config; +} RteRemoteUserConfig; + +typedef struct RteRemoteUserInfo { + RteUserInfo user_info; +} RteRemoteUserInfo; + +typedef struct RteRemoteUserObserver { + RteUserObserver user_observer; +} RteRemoteUserObserver; + +// @{ +// Config +AGORA_RTE_API_C void RteRemoteUserConfigInit(RteRemoteUserConfig *config, + RteError *err); +AGORA_RTE_API_C void RteRemoteUserConfigDeinit(RteRemoteUserConfig *config, + RteError *err); + +AGORA_RTE_API_C void RteRemoteUserConfigSetJsonParameter( + RteRemoteUserConfig *self, RteString *json_parameter, RteError *err); +AGORA_RTE_API_C void RteRemoteUserConfigGetJsonParameter( + RteRemoteUserConfig *self, RteString *json_parameter, RteError *err); +// @} + +// @{ +// Info +AGORA_RTE_API_C void RteRemoteUserInfoInit(RteRemoteUserInfo *info, + RteError *err); +AGORA_RTE_API_C void RteRemoteUserInfoDeinit(RteRemoteUserInfo *info, + RteError *err); +// @} + +AGORA_RTE_API_C void RteRemoteUserGetConfigs(RteRemoteUser *self, + RteRemoteUserConfig *config, + RteError *err); +AGORA_RTE_API_C void RteRemoteUserSetConfigs( + RteRemoteUser *self, RteRemoteUserConfig *config, + void (*cb)(RteRemoteUser *user, void *cb_data, RteError *err), + void *cb_data); + +AGORA_RTE_API_C void RteRemoteUserGetInfo(RteRemoteUser *self, + RteRemoteUserInfo *info, + RteError *err); + +AGORA_RTE_API_C bool RteRemoteUserRegisterObserver( + RteRemoteUser *self, RteRemoteUserObserver *observer, RteError *err); +AGORA_RTE_API_C bool RteRemoteUserUnregisterObserver( + RteRemoteUser *self, RteRemoteUserObserver *observer, RteError *err); + +AGORA_RTE_API_C RteRemoteUserObserver *RteRemoteUserObserverCreate( + RteError *err); +AGORA_RTE_API_C void RteRemoteUserObserverDestroy(RteRemoteUserObserver *self, + RteError *err); + +AGORA_RTE_API_C RteRemoteUser +RteRemoteUserObserverGetEventSrc(RteRemoteUserObserver *self, RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/user/user.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/user/user.h new file mode 100644 index 000000000..fe8f13202 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/user/user.h @@ -0,0 +1,84 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/c/c_error.h" +#include "rte_base/c/handle.h" +#include "rte_base/c/info.h" +#include "rte_base/c/metadata.h" +#include "rte_base/c/observer.h" +#include "rte_base/c/common.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteUserConfig { + char placeholder; +} RteUserConfig; + +typedef struct RteUserInfo { + RteBaseInfo base_info; + + RteString *user_name; + RteString *user_id; +} RteUserInfo; + +typedef struct RteUserObserver { + RteBaseObserver base_observer; +} RteUserObserver; + +AGORA_RTE_API_C void RteUserConfigInit(RteUserConfig *config, RteError *err); +AGORA_RTE_API_C void RteUserConfigDeinit(RteUserConfig *config, RteError *err); + +AGORA_RTE_API_C void RteUserConfigSetJsonParameter(RteUserConfig *self, + RteString *json_parameter, + RteError *err); +AGORA_RTE_API_C void RteUserConfigGetJsonParameter(RteUserConfig *self, + RteString *json_parameter, + RteError *err); + +AGORA_RTE_API_C void RteUserInit(RteUser *self, RteUserConfig *config, + RteError *err); +AGORA_RTE_API_C void RteUserDeinit(RteUser *self, RteError *err); + +AGORA_RTE_API_C void RteUserGetConfigs(RteUser *self, RteUserConfig *config, + RteError *err); +AGORA_RTE_API_C void RteUserSetConfigs(RteUser *self, RteUserConfig *config, + void (*cb)(RteUser *user, void *cb_data, + RteError *err), + void *cb_data); + +AGORA_RTE_API_C bool RteUserRegisterObserver( + RteUser *self, RteUserObserver *observer, + RteError *err); +AGORA_RTE_API_C bool RteUserUnregisterObserver(RteUser *self, + RteUserObserver *observer, + RteError *err); + +AGORA_RTE_API_C void RteUserGetMetadata(RteUser *self, const char *user_name, + void (*cb)(RteUser *self, + RteMetadata *items, + void *cb_data, RteError *err), + void *cb_data); +AGORA_RTE_API_C void RteUserSubscribeMetadata( + RteUser *self, const char *user_name, + void (*cb)(RteUser *self, void *cb_data, RteError *err), void *cb_data); +AGORA_RTE_API_C void RteUserUnsubscribeMetadata( + RteUser *self, const char *user_name, + void (*cb)(RteUser *self, void *cb_data, RteError *err), void *cb_data); +AGORA_RTE_API_C void RteUserSetMetadata( + RteUser *self, const char *user_name, RteMetadata *items, + RteMetadataConfig *config, + void (*cb)(RteUser *self, void *cb_data, RteError *err), void *cb_data); +AGORA_RTE_API_C void RteUserRemoveMetadata( + RteUser *self, const char *user_name, RteMetadata *items, + void (*cb)(RteUser *self, void *cb_data, RteError *err), void *cb_data); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/utils/buf.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/utils/buf.h new file mode 100644 index 000000000..c3a6c0bb6 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/utils/buf.h @@ -0,0 +1,65 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include +#include + +#include "rte_base/c/c_error.h" +#include "rte_base/c/common.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteBuf { + void *data; + size_t size; + size_t capacity; + bool own; + void *user_data; +} RteBuf; + +AGORA_RTE_API_C void RteBufReset(RteBuf *self, RteError *err); + +AGORA_RTE_API_C void RteBufInit(RteBuf *self, size_t capacity, RteError *err); +AGORA_RTE_API_C void RteBufInitFromBuffer(RteBuf *self, void *buf, size_t size, + RteError *err); +AGORA_RTE_API_C void RteBufInitFromBufferWithOffset(RteBuf *self, size_t offset, + void *buf, size_t size, + RteError *err); +AGORA_RTE_API_C void RteBufInitFromString(RteBuf *self, RteString *str, + RteError *err); +AGORA_RTE_API_C void RteBufInitWithBuffer(RteBuf *self, void *buf, size_t size, + int own, RteError *err); + +AGORA_RTE_API_C RteBuf *RteBufCreate(RteError *err); +AGORA_RTE_API_C RteBuf *RteBufCreateWithCapacity(size_t capacity, RteError *err); + +AGORA_RTE_API_C void RteBufDeinit(RteBuf *self, RteError *err); +AGORA_RTE_API_C void RteBufDestroy(RteBuf *self, RteError *err); + +AGORA_RTE_API_C void RteBufReserve(RteBuf *self, size_t len, RteError *err); + +AGORA_RTE_API_C void RteBufPush(RteBuf *self, const void *src, size_t size, + RteError *err); +AGORA_RTE_API_C void RteBufPop(RteBuf *self, void *dest, size_t size, + RteError *err); + +AGORA_RTE_API_C void RteBufPeek(RteBuf *self, void *dest, size_t size, + RteError *err); + +AGORA_RTE_API_C void RteBufDisown(RteBuf *self, RteError *err); + +AGORA_RTE_API_C void RteBufAppendNullTerminator(RteBuf *self, RteError *err); + +AGORA_RTE_API_C size_t RteBufGetSize(RteBuf *self, RteError *err); +AGORA_RTE_API_C size_t RteBufGetCapacity(RteBuf *self, RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/utils/frame.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/utils/frame.h new file mode 100644 index 000000000..a68efcb6c --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/utils/frame.h @@ -0,0 +1,35 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef enum RteAudioFrameType { + kRteAudioFrameTypePcm16, +} RteAudioFrameType; + +typedef struct RteAudioFrame { + RteAudioFrameType type; + int samples_per_channel; + int bytes_per_sample; + int channels; + int samples_per_sec; + void *buffer; + int64_t render_time_in_ms; + int avsync_type; + int64_t presentation_in_ms; + size_t audio_track_number; +} RteAudioFrame; + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/utils/rect.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/utils/rect.h new file mode 100644 index 000000000..2c67bdcf4 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/utils/rect.h @@ -0,0 +1,24 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteRect { + int32_t x; + int32_t y; + int32_t width; + int32_t height; +} RteRect; + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/utils/string.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/utils/string.h new file mode 100644 index 000000000..3a3cbd8f1 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/utils/string.h @@ -0,0 +1,52 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include +#include +#include + +#include "rte_base/c/common.h" + +#define RTE_STRING_PRE_BUF_SIZE 256 + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef struct RteError RteError; +typedef struct RteString RteString; + +AGORA_RTE_API_C RteString *RteStringCreate(RteError *err); +AGORA_RTE_API_C void RteStringDestroy(RteString *self, RteError *err); + +AGORA_RTE_API_C void RteStringInit(RteString *self, RteError *err); +AGORA_RTE_API_C void RteStringInitWithCStr(RteString *self, const char *c_str, + RteError *err); +AGORA_RTE_API_C void RteStringInitWithValue(RteString *self, RteError *err, + const char *fmt, ...); +AGORA_RTE_API_C void RteStringDeinit(RteString *self, RteError *err); + +AGORA_RTE_API_C void RteStringVSet(RteString *self, RteError *err, + const char *fmt, va_list ap); + +AGORA_RTE_API_C void RteStringReserve(RteString *self, size_t extra, + RteError *err); + +AGORA_RTE_API_C void RteStringCopy(RteString *self, const RteString *other, + RteError *err); + +AGORA_RTE_API_C bool RteStringIsEqual(const RteString *self, + const RteString *other, RteError *err); +AGORA_RTE_API_C bool RteStringIsEqualCStr(const RteString *self, + const char *other, RteError *err); + +AGORA_RTE_API_C const char *RteStringCStr(const RteString *self, RteError *err); + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/utils/uuid.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/utils/uuid.h new file mode 100644 index 000000000..7132acd3b --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/c/utils/uuid.h @@ -0,0 +1,23 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +typedef union RteUuid { + uint8_t bytes[16]; + uint32_t dwords[4]; + uint64_t qwords[2]; +} RteUuid; + +#ifdef __cplusplus +} +#endif // __cplusplus diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_callback_utils.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_callback_utils.h new file mode 100644 index 000000000..27e1e36bc --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_callback_utils.h @@ -0,0 +1,200 @@ +#pragma once +#include +#include "rte_cpp_error.h" +#include "rte_base/c/handle.h" + +/** + * @technical preview + */ +namespace rte { + +template +class SingleUseCallback { + + public: + using CallbackType = std::function; + + SingleUseCallback(){ + cb_ = nullptr; + cb_data_ = nullptr; + self_ = nullptr; + }; + + SingleUseCallback(SingleUseCallback&& other){ + cb_ = other.cb_; + cb_data_ = other.cb_data_; + self_ = other.self_; + + other.cb_ = nullptr; + other.cb_data_ = nullptr; + other.self_ = nullptr; + } + + void Store(T* self, CallbackType cb, void* cb_data){ + self_ = self; + cb_ = cb; + cb_data_ = cb_data; + } + + void Invoke(RteError* err){ + if(cb_ != nullptr){ + cb_(self_, cb_data_, err); + + self_ = nullptr; + cb_ = nullptr; + cb_data_ = nullptr; + } + } + + bool Invalid(){ + return cb_ == nullptr; + } + + void Clear(){ + self_ = nullptr; + cb_ = nullptr; + cb_data_ = nullptr; + } + + CallbackType cb_; + void* cb_data_; + T* self_; +}; // class SingleUseCallback + +template +class CallbackContext { + public: + + using CallbackTypeOnlyError = std::function; + using CallbackTypeOnlyErrorWithCppError = std::function; + + using CallbackType = std::function; + using CallbackTypeWithCppError = std::function; + + CallbackContext(T* self, CallbackTypeOnlyError cb) + :self_(self), cb_only_error_(cb) {} + + CallbackContext(T* self, CallbackTypeOnlyErrorWithCppError cb) + :self_(self), cb_only_error_with_cpp_error_(cb) {} + + CallbackContext(T* self, CallbackType cb, void* cb_data) + :self_(self), cb_(cb), cb_data_(cb_data) {} + + CallbackContext(T* self, CallbackTypeWithCppError cb, void* cb_data) + :self_(self), cb_with_cpp_error_(cb), cb_data_(cb_data) {} + + CallbackTypeOnlyError cb_only_error_; + CallbackTypeOnlyErrorWithCppError cb_only_error_with_cpp_error_; + + CallbackType cb_; + CallbackTypeWithCppError cb_with_cpp_error_; + + void* cb_data_; + T* self_; +}; + +template +void CallbackFunc(FromeType* self, void* cb_data, RteError* err){ + auto *ctx = static_cast*>(cb_data); + + if(ctx->cb_only_error_ != nullptr){ + ctx->cb_only_error_(err); + } + + if(ctx->cb_only_error_with_cpp_error_ != nullptr){ + rte::Error cpp_err(err); + ctx->cb_only_error_with_cpp_error_(&cpp_err); + } + + if(ctx->cb_with_cpp_error_ != nullptr){ + rte::Error cpp_err(err); + ctx->cb_with_cpp_error_( self != nullptr ? ctx->self_ : nullptr, ctx->cb_data_, &cpp_err); + } + + if(ctx->cb_ != nullptr){ + ctx->cb_(self != nullptr ? ctx->self_ : nullptr, ctx->cb_data_, err); + } + + delete ctx; +} + +template +class CallbackContextWithArgs { + public: + + + using CallbackTypeOnlyError = std::function; + using CallbackTypeOnlyErrorWithCppError = std::function; + + using CallbackType = std::function; + using CallbackTypeWithCppError = std::function; + + CallbackContextWithArgs(T* self, CallbackTypeOnlyError cb) + :self_(self), cb_only_error_(cb) {} + + CallbackContextWithArgs(T* self, CallbackTypeOnlyErrorWithCppError cb) + :self_(self), cb_only_error_with_cpp_error_(cb) {} + + CallbackContextWithArgs(T* self, CallbackType cb, void* cb_data) + :self_(self), cb_(cb), cb_data_(cb_data) {} + + CallbackContextWithArgs(T* self, CallbackTypeWithCppError cb, void* cb_data) + :self_(self), cb_with_cpp_error_(cb), cb_data_(cb_data) {} + + + CallbackTypeOnlyError cb_only_error_; + CallbackTypeOnlyErrorWithCppError cb_only_error_with_cpp_error_; + + CallbackType cb_; + CallbackTypeWithCppError cb_with_cpp_error_; + void* cb_data_; + T* self_; +}; + +template +void CallbackFuncWithArgs(FromeType* self, Args... args, void* cb_data, RteError* err){ + auto *ctx = static_cast*>(cb_data); + + if(ctx->cb_only_error_ != nullptr){ + ctx->cb_only_error_(args..., err); + } + + if(ctx->cb_only_error_with_cpp_error_ != nullptr){ + rte::Error cpp_err(err); + ctx->cb_only_error_with_cpp_error_(args..., &cpp_err); + } + + if(ctx->cb_with_cpp_error_ != nullptr){ + Error cpp_err(err); + ctx->cb_with_cpp_error_( self != nullptr ? ctx->self_ : nullptr, args..., ctx->cb_data_, &cpp_err); + } + + if(ctx->cb_ != nullptr){ + ctx->cb_(self != nullptr ? ctx->self_ : nullptr, args..., ctx->cb_data_, err); + } + delete ctx; +} + +template +class ObserverDestroyContext { + public: + + using ObserverDestroyer = std::function; + + ObserverDestroyContext(ObserverDestroyer destroyer, void* cb_data) + :destroyer_(destroyer), cb_data_(cb_data) {} + + ObserverDestroyer destroyer_; + void* cb_data_; +}; + +template +void ObserverDestroyProxy(FromeType* observer, void* cb_data){ + auto *ctx = static_cast*>(cb_data); + if(ctx->destroyer_ != nullptr){ + ctx->destroyer_(static_cast(observer->base_observer.me_in_target_lang), ctx->cb_data_); + } + delete ctx; +} + +} // namespace rte diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_canvas.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_canvas.h new file mode 100644 index 000000000..c35ce3df6 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_canvas.h @@ -0,0 +1,227 @@ +#pragma once + +#include "rte_base/c/c_player.h" +#include "rte_base/c/handle.h" +#include "rte_base/c/track/canvas.h" + +#include "rte_cpp_error.h" +#include "rte_cpp_rte.h" +#include "rte_cpp_callback_utils.h" + + +namespace rte { + +using VideoRenderMode = ::RteVideoRenderMode; +using VideoMirrorMode = ::RteVideoMirrorMode; +using ViewConfig = ::RteViewConfig; +using View = ::RteView; +using Rect = ::RteRect; + +/** + * The CanvasInitialConfig class is used to initialize the Canvas object. + * @since v4.4.0 + * @technical preview + */ +class CanvasInitialConfig { + public: + CanvasInitialConfig() {RteCanvasInitialConfigInit(&c_canvas_initial_config, nullptr);} + ~CanvasInitialConfig() {RteCanvasInitialConfigDeinit(&c_canvas_initial_config, nullptr);} + + private: + friend class Canvas; + ::RteCanvasInitialConfig c_canvas_initial_config; +}; + +/** + * The CanvasConfig class is used to configure the Canvas object. + * @since v4.4.0 + */ +class CanvasConfig { + public: + CanvasConfig() {RteCanvasConfigInit(&c_canvas_config, nullptr);} + ~CanvasConfig() {RteCanvasConfigDeinit(&c_canvas_config, nullptr);} + + /** + * Set the video render mode. + * @since v4.4.0 + * @param mode The render mode to set. Refer to the rte::VideoRenderMode type, default is kRteVideoRenderModeHidden. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidArgument: The mode parameter is set to an illegal value. + * @return void + */ + void SetRenderMode(VideoRenderMode mode, Error *err = nullptr) { + RteCanvasConfigSetVideoRenderMode(&c_canvas_config, mode, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the render mode. + * @since v4.4.0 + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * @return VideoRenderMode + */ + VideoRenderMode GetRenderMode(Error *err = nullptr) { + VideoRenderMode mode; + RteCanvasConfigGetVideoRenderMode(&c_canvas_config, &mode, err != nullptr ? err->get_underlying_impl() : nullptr); + return mode; + } + + /** + * Set the video mirror mode. + * @since v4.4.0 + * @param mode The mirror mode to set. Refer to the rte::VideoMirrorMode type, default is kRteVideoMirrorModeAuto. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidArgument: The mode parameter is set to an illegal value. + * @return void + */ + void SetMirrorMode(VideoMirrorMode mode, Error *err = nullptr) { + RteCanvasConfigSetVideoMirrorMode(&c_canvas_config, mode, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the video mirror mode. + * @since v4.4.0 + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * @return VideoMirrorMode The current video mirror mode. + */ + VideoMirrorMode GetMirrorMode(Error *err = nullptr) { + VideoMirrorMode mode; + RteCanvasConfigGetVideoMirrorMode(&c_canvas_config, &mode, err != nullptr ? err->get_underlying_impl() : nullptr); + return mode; + } + + /** + * Set the Crop Area. + * @since v4.4.0 + * @param crop_area + * @param err + * @return void + * @technical preview + */ + void SetCropArea(RteRect &crop_area, Error *err = nullptr) { + RteCanvasConfigSetCropArea(&c_canvas_config, crop_area, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the Crop Area. + * @since v4.4.0 + * @param err + * @return RteRect + * @technical preview + */ + RteRect GetCropArea(Error *err = nullptr) { + RteRect crop_area; + RteCanvasConfigGetCropArea(&c_canvas_config, &crop_area, err != nullptr ? err->get_underlying_impl() : nullptr); + return crop_area; + } + + private: + friend class Canvas; + ::RteCanvasConfig c_canvas_config; +}; + +/** + * The Canvas class is used to render the video stream. + * @since v4.4.0 + */ +class Canvas { + public: + /** + * Construct a Canvas object. + * @since v4.4.0 + * @param rte Rte object. + * @param initial_config CanvasInitialConfig initialization configuration object. Currently, a null pointer can be passed. + */ + Canvas(Rte *rte, CanvasInitialConfig *initial_config = nullptr) { + c_canvas = ::RteCanvasCreate(&rte->c_rte, initial_config != nullptr ? &initial_config->c_canvas_initial_config : nullptr, nullptr); + }; + ~Canvas() { RteCanvasDestroy(&c_canvas, nullptr); }; + + Canvas(Canvas&& other) : c_canvas(other.c_canvas) { + other.c_canvas = {}; + } + + //@{ + Canvas(const Canvas& other) = delete; + Canvas& operator=(const Canvas& other) = delete; + Canvas& operator=(Canvas&& other) = delete; + //@} + + + /** + * Get the configuration of Canvas object. + * @since v4.4.0 + * @param config The object used to get the canvas config configuration. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: The corresponding internal Canvas object has been destroyed or is invalid. + * - kRteErrorInvalidArgument: The passed config object is null. + * @return bool Returns the result of getting the configuration information. + * - true: Successfully retrieved. + * - false: Failed to retrieve. + */ + bool GetConfigs(CanvasConfig *config, Error *err = nullptr) { + return RteCanvasGetConfigs(&c_canvas, &config->c_canvas_config, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Configure the Canvas object. + * @since v4.4.0 + * @param config The object used to set the canvas config configuration. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: The corresponding internal Canvas object has been destroyed or is invalid. + * - kRteErrorInvalidArgument: The passed config object is null. + * @return bool Returns the result of setting the configuration information. + * - true: Successfully set the configuration. + * - false: Failed to set the configuration. + */ + bool SetConfigs(CanvasConfig *config, Error *err = nullptr) { + return RteCanvasSetConfigs(&c_canvas, &config->c_canvas_config, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Add a rendering view. Currently, only one view is supported. + * @since v4.4.0 + * @param view Pointer to the View object. On the Windows platform, you can assign an HWND window handle to a View type variable and pass it to the interface. + * @param config View-related configuration. Currently, nullptr can be passed. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: The corresponding internal Canvas object has been destroyed or is invalid. + * - kRteErrorInvalidArgument: The passed view is null. + * @return bool Returns the result of adding the View. + * - true: Successfully add the View. + * - false: Failed to add the View. + */ + bool AddView(View *view, ViewConfig *config, rte::Error *err = nullptr) { + return RteCanvasAddView(&c_canvas, view, config, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Remove a rendering view. + * @since v4.4.0 + * @param view Pointer to the View object. + * @param config View-related configuration. Currently, nullptr can be passed. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: The corresponding internal Canvas object has been destroyed or is invalid. + * - kRteErrorInvalidArgument: The passed view is null. + * @return bool Returns the result of removing the View. + * - true: Successfully removed the View. + * - false: Failed to remove the View. + */ + bool RemoveView(View *view, ViewConfig *config, rte::Error *err = nullptr) { + return RteCanvasRemoveView(&c_canvas, view, config, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + private: + + friend class Player; + + ::RteCanvas c_canvas; +}; + +} // namespace rte \ No newline at end of file diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_error.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_error.h new file mode 100644 index 000000000..31afaae88 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_error.h @@ -0,0 +1,83 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include +#include + +#include "rte_base/c/c_error.h" +#include "rte_base/c/utils/string.h" + +namespace rte { + + +class Rte; +class Player; +class Canvas; +class Config; +class PlayerConfig; +class CanvasConfig; + +using ErrorCode = ::RteErrorCode; + +/** + * Error class. Used to record the execution result of an interface call. + * @since v4.4.0 + */ +class Error { + public: + + Error() : c_error(RteErrorCreate()) {} + explicit Error(::RteError *error) : c_error(error), c_error_owned(false) {} + + ~Error() { + if (c_error != nullptr && c_error_owned) { + RteErrorDestroy(c_error); + } + } + + // @{ + Error(Error &other) = delete; + Error(Error &&other) = delete; + Error &operator=(const Error &cmd) = delete; + Error &operator=(Error &&cmd) = delete; + // @} + + void Set(ErrorCode code, const char *message) { + if(c_error != nullptr){ + RteErrorSet(c_error, code, "%s", message ? message : ""); + } + } + + /** + * This interface is used to get the specific error code. + * @since v4.4.0 + * @return ErrorCode Error code,Refer to the ErrorCode type for details. + */ + ErrorCode Code() const { return c_error != nullptr ? c_error->code : kRteErrorDefault; } + + /** + * This interface is used to get the specific error description. + * @since v4.4.0 + * @return const char* Error description + */ + const char *Message() const { + if(c_error != nullptr && c_error->message != nullptr){ + return RteStringCStr(c_error->message, nullptr); + } + return ""; + } + + ::RteError *get_underlying_impl() const { return c_error; } + + private: + + ::RteError *c_error; + bool c_error_owned = true; +}; + +} // namespace rte diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_player.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_player.h new file mode 100644 index 000000000..213250176 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_player.h @@ -0,0 +1,1007 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once +#include +#include + +#include "rte_base/c/c_rte.h" +#include "rte_base/c/c_player.h" + +#include "rte_cpp_error.h" +#include "rte_cpp_callback_utils.h" +#include "rte_cpp_canvas.h" +#include "rte_cpp_string.h" +#include "rte_cpp_stream.h" + +namespace rte { + +using PlayerState = ::RtePlayerState; +using PlayerEvent = ::RtePlayerEvent; +using PlayerMetadataType = ::RtePlayerMetadataType; +using PlayerInfo = ::RtePlayerInfo; +using PlayerStats = ::RtePlayerStats; +using PlayerCustomSourceProvider = ::RtePlayerCustomSourceProvider; +using AbrSubscriptionLayer = ::RteAbrSubscriptionLayer; +using AbrFallbackLayer = ::RteAbrFallbackLayer; + +class PlayerInitialConfig {}; + +static void onStateChanged(::RtePlayerObserver *observer, + RtePlayerState old_state, RtePlayerState new_state, + RteError *err); + +static void onPositionChanged(::RtePlayerObserver *observer, uint64_t curr_time, + uint64_t utc_time); + +static void onResolutionChanged(::RtePlayerObserver *observer, int width, int height); + +static void onEvent(::RtePlayerObserver *observer, RtePlayerEvent event); + +static void onMetadata(::RtePlayerObserver *observer, ::RtePlayerMetadataType type, + const uint8_t *data, size_t length); + +static void onPlayerInfoUpdated(::RtePlayerObserver *observer, const RtePlayerInfo *info); + +static void onAudioVolumeIndication(::RtePlayerObserver *observer, int32_t volume); + + +/** + * The PlayerObserver class is used to observe the event of Player object. + * @since v4.4.0 + */ +class PlayerObserver { + public: + PlayerObserver() : c_player_observer(::RtePlayerObserverCreate(nullptr)) { + + c_player_observer->base_observer.me_in_target_lang = this; + + c_player_observer->on_state_changed = rte::onStateChanged; + c_player_observer->on_position_changed = rte::onPositionChanged; + c_player_observer->on_resolution_changed = rte::onResolutionChanged; + c_player_observer->on_event = rte::onEvent; + c_player_observer->on_metadata = rte::onMetadata; + c_player_observer->on_player_info_updated = rte::onPlayerInfoUpdated; + c_player_observer->on_audio_volume_indication = rte::onAudioVolumeIndication; + } + virtual ~PlayerObserver(){ RtePlayerObserverDestroy(c_player_observer, nullptr); } + + // @{ + PlayerObserver(PlayerObserver &other) = delete; + PlayerObserver(PlayerObserver &&other) = delete; + PlayerObserver &operator=(const PlayerObserver &cmd) = delete; + PlayerObserver &operator=(PlayerObserver &&cmd) = delete; + // @} + + /** + * Player state callback. This function is called when the player state changes. + * @since v4.4.0 + * @param old_state The old state. + * @param new_state The new state. + * @param err Possible return values for ErrorCode. Only when the new_state value is kRtePlayerStateFailed, you need to check the value of this parameter. + * - kRteErrorDefault. For specific reasons, see Error.Message, including the following situations: + * - Failed to connect to the channel. + * - kRteErrorInvalidArgument. + * - Invalid appid. + * - Invalid channelid. + * - Invalid uid. + * - kRteErrorAuthenticationFailed. + * - Invalid token. + * - Token expired. + * - kRteErrorStreamNotFound. After entering the channel, no stream was received from the broadcaster for more than 10 seconds. + * @return void + */ + virtual void onStateChanged(PlayerState old_state, PlayerState new_state, + rte::Error *err) {}; + + /** + * This callback will be triggered when the playback position changed. + * @since v4.4.0 + * @param curr_time + * @param utc_time + */ + virtual void onPositionChanged(uint64_t curr_time, + uint64_t utc_time) {}; + + /** + * Video resolution change callback. + * @since v4.4.0 + * @param width The width of the video frame. + * @param height The height of the video frame. + * @return void + */ + virtual void onResolutionChanged(int width, int height) {}; + + /** + * Event callback. + * @since v4.4.0 + * @param event The event notified by the callback. Refer to the rte::PlayerEvent type. Currently, the following events can be handled accordingly: + * - kRtePlayerEventFreezeStart: Indicates that stuttering has occurred or shows a loading animation. + * - kRtePlayerEventFreezeStop: Indicates that stuttering has ended or stops the loading animation. + * - kRtePlayerEventAuthenticationWillExpire: Regenerate the token, use the new token to construct the RTE URL, and call Player::OpenWithUrl to refresh the token. + * - kRtePlayerEventAbrFallbackToAudioOnlyLayer: Indicates that due to network reasons, it has fallen back to audio-only mode. + * - kRtePlayerEventAbrRecoverFromAudioOnlyLayer: Indicates that it has recovered from audio-only mode to video mode. + * @return void + */ + virtual void onEvent(PlayerEvent event) {}; + + /** + * Metadata callback. + * @since v4.4.0 + * @param type The type of metadata. + * @param data The metadata buffer. + * @param length The length of the metadata. + * @return void + */ + virtual void onMetadata(PlayerMetadataType type, + const uint8_t *data, size_t length) {}; + + /** + * Player information update callback. This is called when fields in rte::PlayerInfo are updated. + * @since v4.4.0 + * @param info The current PlayerInfo information. + * @return void + */ + virtual void onPlayerInfoUpdated(const PlayerInfo *info) {}; + + /** + * Broadcaster audio volume update callback. + * @since v4.4.0 + * @param volume The current volume of the Broadcaster. The value range is [0, 255]. + * @return void + */ + virtual void onAudioVolumeIndication(int32_t volume) {}; + + private: + friend class Player; + + ::RtePlayerObserver *c_player_observer; +}; + +void onStateChanged(::RtePlayerObserver *observer, + RtePlayerState old_state, RtePlayerState new_state, + RteError *err){ + auto *player_observer = static_cast(observer->base_observer.me_in_target_lang); + if (player_observer != nullptr){ + Error cpp_err(err); + player_observer->onStateChanged(old_state, new_state, &cpp_err); + } +} +void onPositionChanged(::RtePlayerObserver *observer, uint64_t curr_time, + uint64_t utc_time){ + auto *player_observer = static_cast(observer->base_observer.me_in_target_lang); + if (player_observer != nullptr){ + player_observer->onPositionChanged(curr_time, utc_time); + } +} + +void onResolutionChanged(::RtePlayerObserver *observer, int width, int height){ + auto *player_observer = static_cast(observer->base_observer.me_in_target_lang); + if (player_observer != nullptr){ + player_observer->onResolutionChanged(width, height); + } +} + +void onEvent(::RtePlayerObserver *observer, RtePlayerEvent event){ + auto *player_observer = static_cast(observer->base_observer.me_in_target_lang); + if (player_observer != nullptr){ + player_observer->onEvent(event); + } +} + +void onMetadata(::RtePlayerObserver *observer, RtePlayerMetadataType type, + const uint8_t *data, size_t length){ + auto *player_observer = static_cast(observer->base_observer.me_in_target_lang); + if (player_observer != nullptr){ + player_observer->onMetadata(type, data, length); + } +} + +void onPlayerInfoUpdated(::RtePlayerObserver *observer, const RtePlayerInfo *info){ + auto *player_observer = static_cast(observer->base_observer.me_in_target_lang); + if (player_observer != nullptr){ + player_observer->onPlayerInfoUpdated(info); + } +} + +void onAudioVolumeIndication(::RtePlayerObserver *observer, int32_t volume){ + auto *player_observer = static_cast(observer->base_observer.me_in_target_lang); + if (player_observer != nullptr){ + player_observer->onAudioVolumeIndication(volume); + } +} + +/** + * The PlayerConfig class is used to configure the Player object. + * @since v4.4.0 + */ +class PlayerConfig { + public: + PlayerConfig() { RtePlayerConfigInit(&c_player_config, nullptr); } + ~PlayerConfig() { RtePlayerConfigDeinit(&c_player_config, nullptr); } + + // @{ + PlayerConfig(PlayerConfig &other) = delete; + PlayerConfig(PlayerConfig &&other) = delete; + PlayerConfig &operator=(PlayerConfig &&cmd) = delete; + + PlayerConfig &operator=(const PlayerConfig &other) { + RtePlayerConfigCopy(&c_player_config, &other.c_player_config, nullptr); + return *this; + }; + + PlayerConfig &operator=(const RtePlayerConfig* other) { + RtePlayerConfigCopy(&c_player_config, other, nullptr); + return *this; + }; + // @} + + /** + * Whether to automatically play after a successful call to Player::OpenWithUrl. + * If not set, the default value is true. + * @since v4.4.0 + * @param auto_play + * - true: Automatically start streaming and playing after a successful opening. + * - false: After a successful open with OpenWithUrl, you need to actively call Player::Play() to play the audio and video stream. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * @return void + */ + void SetAutoPlay(bool auto_play, Error *err = nullptr) { + RtePlayerConfigSetAutoPlay(&c_player_config, auto_play, + err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the auto-play setting. + * @since v4.4.0 + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * @return bool Returns whether auto-play is enabled. + */ + bool GetAutoPlay(Error *err = nullptr) { + bool auto_play; + RtePlayerConfigGetAutoPlay(&c_player_config, &auto_play, + err != nullptr ? err->get_underlying_impl() : nullptr); + return auto_play; + } + + /** + * Set the playback speed parameter. + * @since v4.4.0 + * @param speed + * @param err + * @return void + * @technical preview + */ + void SetPlaybackSpeed(int32_t speed, Error *err = nullptr) { + RtePlayerConfigSetPlaybackSpeed(&c_player_config, speed, + err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the playback speed parameter. + * @since v4.4.0 + * @param err + * @return int32_t + * @technical preview + */ + int32_t GetPlaybackSpeed(Error *err = nullptr) { + int32_t speed; + RtePlayerConfigGetPlaybackSpeed(&c_player_config, &speed, + err != nullptr ? err->get_underlying_impl() : nullptr); + return speed; + } + + /** + * Set the playout audio track index parameter. + * @since v4.4.0 + * @param idx + * @param err + * @return void + * @technical preview + */ + void SetPlayoutAudioTrackIdx(int idx, Error *err = nullptr) { + RtePlayerConfigSetPlayoutAudioTrackIdx(&c_player_config, idx, + err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the playout audio track index parameter. + * @since v4.4.0 + * @param err + * @return int32_t + * @technical preview + */ + int32_t GetPlayoutAudioTrackIdx(Error *err = nullptr) { + int32_t idx; + RtePlayerConfigGetPlayoutAudioTrackIdx(&c_player_config, &idx, + err != nullptr ? err->get_underlying_impl() : nullptr); + return idx; + } + + /** + * Set the publish audio track index parameter. + * @since v4.4.0 + * @param idx + * @param err + * @return void + * @technical preview + */ + void SetPublishAudioTrackIdx(int32_t idx, Error *err = nullptr) { + RtePlayerConfigSetPublishAudioTrackIdx(&c_player_config, idx, + err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the publish audio track index parameter. + * @since v4.4.0 + * @param err + * @return int32_t + * @technical preview + */ + int32_t GetPublishAudioTrackIdx(Error *err = nullptr) { + int32_t idx; + RtePlayerConfigGetPublishAudioTrackIdx(&c_player_config, &idx, + err != nullptr ? err->get_underlying_impl() : nullptr); + return idx; + } + + /** + * Set the audio track index parameter. + * @since v4.4.0 + * @param idx + * @param err + * @return void + * @technical preview + */ + void SetAudioTrackIdx(int32_t idx, Error *err = nullptr) { + RtePlayerConfigSetAudioTrackIdx(&c_player_config, idx, + err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the audio track index parameter. + * @since v4.4.0 + * @param err + * @return int32_t + * @technical preview + */ + int32_t GetAudioTrackIdx(Error *err = nullptr) { + int32_t idx; + RtePlayerConfigGetAudioTrackIdx(&c_player_config, &idx, + err != nullptr ? err->get_underlying_impl() : nullptr); + return idx; + } + + /** + * Set the subtitle track index parameter. + * @since v4.4.0 + * @param idx + * @param err + * @return void + * @technical preview + */ + void SetSubtitleTrackIdx(int32_t idx, Error *err = nullptr) { + RtePlayerConfigSetSubtitleTrackIdx(&c_player_config, idx, + err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the subtitle track index parameter. + * @since v4.4.0 + * @param err + * @return int32_t + * @technical preview + */ + int32_t GetSubtitleTrackIdx(Error *err = nullptr) { + int32_t idx; + RtePlayerConfigGetSubtitleTrackIdx(&c_player_config, &idx, + err != nullptr ? err->get_underlying_impl() : nullptr); + return idx; + } + + /** + * Set the external subtitle track index parameter. + * @since v4.4.0 + * @param idx + * @param err + * @return void + * @technical preview + */ + void SetExternalSubtitleTrackIdx(int32_t idx, Error *err = nullptr) { + RtePlayerConfigSetExternalSubtitleTrackIdx(&c_player_config, idx, + err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the external subtitle track index parameter. + * @since v4.4.0 + * @param err + * @return int32_t + * @technical preview + */ + int32_t GetExternalSubtitleTrackIdx(Error *err = nullptr) { + int32_t idx; + RtePlayerConfigGetExternalSubtitleTrackIdx(&c_player_config, &idx, + err != nullptr ? err->get_underlying_impl() : nullptr); + return idx; + } + + /** + * Set the audio pitch parameter. + * @since v4.4.0 + * @param audio_pitch + * @param err + * @return void + * @technical preview + */ + void SetAudioPitch(int32_t audio_pitch, Error *err = nullptr) { + RtePlayerConfigSetAudioPitch(&c_player_config, audio_pitch, + err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the audio pitch parameter. + * @since v4.4.0 + * @param err + * @return int32_t + * @technical preview + */ + int32_t GetAudioPitch(Error *err = nullptr) { + int32_t audio_pitch; + RtePlayerConfigGetAudioPitch(&c_player_config, &audio_pitch, + err != nullptr ? err->get_underlying_impl() : nullptr); + return audio_pitch; + } + + /** + * Set the playout volume parameter. + * @since v4.4.0 + * @param volume + * @param err + * @return void + * @technical preview + */ + void SetPlayoutVolume(int32_t volume, Error *err = nullptr) { + RtePlayerConfigSetPlayoutVolume(&c_player_config, volume, + err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the playout volume parameter. + * @since v4.4.0 + * @param err + * @return int32_t + * @technical preview + */ + int32_t GetPlayoutVolume(Error *err = nullptr) { + int32_t volume; + RtePlayerConfigGetPlayoutVolume(&c_player_config, &volume, + err != nullptr ? err->get_underlying_impl() : nullptr); + return volume; + } + + /** + * Set the audio playback delay parameter. + * @since v4.4.0 + * @param volume + * @param err + * @return void + * @technical preview + */ + void SetAudioPlaybackDelay(int32_t delay, Error *err = nullptr) { + RtePlayerConfigSetAudioPlaybackDelay(&c_player_config, delay, + err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the audio playback delay parameter. + * @since v4.4.0 + * @param err + * @return int32_t + * @technical preview + */ + int32_t GetAudioPlaybackDelay(Error *err = nullptr) { + int32_t delay; + RtePlayerConfigGetAudioPlaybackDelay(&c_player_config, &delay, + err != nullptr ? err->get_underlying_impl() : nullptr); + return delay; + } + + /** + * Set the audio dual mono mode parameter. + * @since v4.4.0 + * @param mode + * @param err + * @return void + * @technical preview + */ + void SetAudioDualMonoMode(RteAudioDualMonoMode mode, Error *err = nullptr) { + RtePlayerConfigSetAudioDualMonoMode(&c_player_config, mode, + err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the audio dual mono mode parameter. + * @since v4.4.0 + * @param err + * @return RteAudioDualMonoMode + * @technical preview + */ + RteAudioDualMonoMode GetAudioDualMonoMode(Error *err = nullptr) { + RteAudioDualMonoMode mode; + RtePlayerConfigGetAudioDualMonoMode(&c_player_config, &mode, + err != nullptr ? err->get_underlying_impl() : nullptr); + return mode; + } + + /** + * Set the publish volume parameter. + * @since v4.4.0 + * @param volume + * @param err + * @return void + * @technical preview + */ + void SetPublishVolume(int32_t volume, Error *err = nullptr) { + RtePlayerConfigSetPublishVolume(&c_player_config, volume, + err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the publish volume parameter. + * @since v4.4.0 + * @param err + * @return int32_t + * @technical preview + */ + int32_t GetPublishVolume(Error *err = nullptr) { + int32_t volume; + RtePlayerConfigGetPublishVolume(&c_player_config, &volume, + err != nullptr ? err->get_underlying_impl() : nullptr); + return volume; + } + + /** + * Set the loop count parameter. + * @since v4.4.0 + * @param count + * @param err + * @return void + * @technical preview + */ + void SetLoopCount(int32_t count, Error *err = nullptr) { + RtePlayerConfigSetLoopCount(&c_player_config, count, + err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the loop count parameter. + * @since v4.4.0 + * @param err + * @return int32_t + * @technical preview + */ + int32_t GetLoopCount(Error *err = nullptr) { + int32_t count; + RtePlayerConfigGetLoopCount(&c_player_config, &count, + err != nullptr ? err->get_underlying_impl() : nullptr); + return count; + } + + /** + * Set player private parameters. This parameter setting can be done according to actual needs, referring to the suggestions of Agora SA. + * @since v4.4.0 + * @param json_parameter JSON formatted string + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidArgument: Indicates that the json_parameter parameter is empty. + * @return void + */ + void SetJsonParameter(const char *json_parameter, Error *err = nullptr) { + String str(json_parameter); + RtePlayerConfigSetJsonParameter(&c_player_config, str.get_underlying_impl(), + err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the currently configured private parameters of the player. + * @since v4.4.0 + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * @return std::string + */ + std::string GetJsonParameter(Error *err = nullptr) { + String str; + RtePlayerConfigGetJsonParameter(&c_player_config, str.get_underlying_impl(), + err != nullptr ? err->get_underlying_impl() : nullptr); + return std::string(str.Cstr()); + } + + /** + * Set the ABR subscription layer. + * If ABR is not enabled, the audience can only switch the high and low video stream in the origin channel. After enabling it, the audience can switch any layer in the abr channel. + * @since v4.4.0 + * @param abr_subscription_layer The layer to subscribe to. Refer to the rte::AbrSubscriptionLayer enumeration values for details. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidArgument: An illegal AbrSubscriptionLayer value was set. + * @return void + */ + void SetAbrSubscriptionLayer(AbrSubscriptionLayer abr_subscription_layer, Error *err = nullptr) { + RtePlayerConfigSetAbrSubscriptionLayer(&c_player_config, abr_subscription_layer, + err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the ABR subscription layer. + * @since v4.4.0 + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * @return AbrSubscriptionLayer The currently set subscription layer. + */ + AbrSubscriptionLayer GetAbrSubscriptionLayer(Error *err = nullptr) { + AbrSubscriptionLayer abr_subscription_layer; + RtePlayerConfigGetAbrSubscriptionLayer(&c_player_config, &abr_subscription_layer, + err != nullptr ? err->get_underlying_impl() : nullptr); + return abr_subscription_layer; + } + + /** + * Set the ABR fallback layer option. + * If ABR is not enabled, after calling this method, the audience can only set kRteAbrFallbackDisabled ~ kRteAbrFallbackAudioOnly in the original channel. + * After enabling it, the audience can switch all values of AbrFallbackLayer in the abr channel. + * @since v4.4.0 + * @param abr_fallback_layer The ABR fallback option to set. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidArgument: An illegal AbrFallbackLayer value was set. Check the value of the passed abr_fallback_layer parameter. + * @return void + */ + void SetAbrFallbackLayer(AbrFallbackLayer abr_fallback_layer, Error *err = nullptr) { + RtePlayerConfigSetAbrFallbackLayer(&c_player_config, abr_fallback_layer, + err != nullptr ? err->get_underlying_impl() : nullptr); + } + + + /** + * Get the ABR fallback layer option. + * @since v4.4.0 + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * @return AbrFallbackLayer The currently set ABR fallback option. + */ + AbrFallbackLayer GetAbrFallbackLayer(Error *err = nullptr) { + AbrFallbackLayer abr_fallback_layer; + RtePlayerConfigGetAbrFallbackLayer(&c_player_config, &abr_fallback_layer, + err != nullptr ? err->get_underlying_impl() : nullptr); + return abr_fallback_layer; + } + + + ::RtePlayerConfig* get_underlying_impl() { return &c_player_config; } + + private: + friend class Player; + + ::RtePlayerConfig c_player_config; +}; + +/** + * The Player class can be used to play URL resources. + * @since v4.4.0 + */ +class Player { + public: +/** + * Construct a Player object. + * @since v4.4.0 + * @param rte Rte object. + * @param config PlayerInitialConfig initialization configuration object. Currently, a null pointer can be passed. + */ + explicit Player(Rte *self, PlayerInitialConfig *config = nullptr) + : c_player(::RtePlayerCreate(&self->c_rte, nullptr, nullptr)) {}; + ~Player() { + RtePlayerDestroy(&c_player, nullptr); + }; + + Player(Player &other) = default; + Player(Player &&other) = default; + + // @{ + Player &operator=(const Player &cmd) = delete; + Player &operator=(Player &&cmd) = delete; + // @} + + /** + * Preload URL, only valid for rte type URLs. This interface can speed up the OpenWithUrl operation. Up to 20 URLs can be preloaded. If the limit is exceeded, new preloads will replace old ones. + * @since v4.4.0 + * @param url rte type URL + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidArgument: The passed URL is empty or has an invalid format. + * @return bool Whether the preload operation was successful. + * - true: Successfully preload the Rte URL. + * - false: Failed to preload the Rte URL. + */ + static bool PreloadWithUrl(const char* url, Error *err = nullptr) { + return RtePlayerPreloadWithUrl(nullptr, url, err != nullptr ? err->get_underlying_impl() : nullptr); + }; + + /** + * Open URL resource. Currently, only rte URLs are supported, and cdn URLs and files are not supported. + * This interface can also be used to refresh the token of an already opened URL. + * For URL format definition and token refresh method description, refer to the doc: + * https://doc.shengwang.cn/doc/rtc/android/best-practice/playing-url + * @since v4.4.0 + * @param url The URL resource to open + * @param start_time Start time [currently not supported] + * @param cb Callback to asynchronously notify the result of the open operation. If an error occurs during open, it will enter the kRtePlayerStateFailed state. You need to call the Stop method before calling OpenWithUrl again. + * @param err Possible return values for ErrorCode. At this time, the new_state value corresponds to kRtePlayerStateFailed. + * - kRteOk: Success + * - kRteErrorDefault: For specific reasons, see Error.Message, including the following situations: + * - Failed to connect to the channel + * - kRteErrorInvalidArgument: + * - Invalid appid + * - Invalid channelid + * - Invalid uid + * - kRteErrorAuthenticationFailed: + * - Invalid token + * - Token expired + * - kRteErrorInvalidOperation: + * - Engine not initialized + * @return void + */ + void OpenWithUrl(const char* url, uint64_t start_time, std::function cb) { + CallbackContext* callbackCtx = new CallbackContext(this, cb); + RtePlayerOpenWithUrl(&c_player, url, start_time, &CallbackFunc<::RtePlayer, Player>, callbackCtx); + }; + + /** + * Open a custom source provider. + * @since v4.4.0 + * @param provider + * @param start_time + * @param cb + * @return void + * @technical preview + */ + void OpenWithCustomSourceProvider(PlayerCustomSourceProvider* provider, uint64_t start_time, + std::function cb) { + CallbackContext* callbackCtx = new CallbackContext(this, cb); + RtePlayerOpenWithCustomSourceProvider(&c_player, provider, start_time, &CallbackFunc<::RtePlayer, Player>, callbackCtx); + }; + + /** + * Open a stream. + * @since v4.4.0 + * @param stream + * @param cb + * @return void + * @technical preview + */ + void OpenWithStream(Stream* stream, std::function cb) { + CallbackContext* callbackCtx = new CallbackContext(this, cb); + RtePlayerOpenWithStream(&c_player, stream != nullptr ? &stream->c_rte_stream : nullptr, &CallbackFunc<::RtePlayer, Player>, callbackCtx); + }; + + /** + * Get player playback statistics. + * @since v4.4.0 + * @param cb Asynchronous callback for statistical data. + * @param stats Statistical values. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * @return void + */ + void GetStats(std::function cb){ + CallbackContextWithArgs *ctx = new CallbackContextWithArgs(this, cb); + RtePlayerGetStats(&c_player, &CallbackFuncWithArgs<::RtePlayer, Player, rte::PlayerStats*>, ctx); + } + + /** + * Set canvas. After the stream is successfully pulled, the video frame will be rendered on the set canvas. + * @since v4.4.0 + * @param canvas The canvas object used to render video frames. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidArgument: The canvas is null. + * - kRteErrorInvalidOperation: The corresponding internal Player object has been destroyed or is invalid. + * @return bool The result of the SetCanvas operation. If it fails, you can check the specific error through err. + * - true: Successfully set the canvas. + * - false: Failed to set the canvas. + */ + bool SetCanvas(Canvas *canvas, Error *err = nullptr) { + return RtePlayerSetCanvas(&c_player, canvas != nullptr ? &canvas->c_canvas : nullptr, err != nullptr ? err->get_underlying_impl() : nullptr); + }; + + /** + * Start stream playback. + * @since v4.4.0 + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: The corresponding internal Player object has been destroyed or is invalid. + * @return bool The result of the Play operation. If it fails, you can check the specific error through err. + * - true: Successfully play. + * - false: Failed to play. + */ + bool Play(Error *err = nullptr) { + return RtePlayerPlay(&c_player, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Stop playback. + * @since v4.4.0 + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: The corresponding internal Player object has been destroyed or is invalid. + * @return bool The result of the Stop operation. If it fails, you can check the specific error through err. + * - true: Successfully stop. + * - false: Failed to stop. + */ + bool Stop(Error *err = nullptr) { + return RtePlayerStop(&c_player, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Pause playback. + * @since v4.4.0 + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: The corresponding internal Player object has been destroyed or is invalid. + * @return bool The result of the Pause operation. If it fails, you can check the specific error through err. + * - true: Successfully pause. + * - false: Failed to pause. + */ + bool Pause(Error *err = nullptr) { + return RtePlayerPause(&c_player, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Seek the playback position. + * @since v4.4.0 + * @param new_time The new playback position to seek to. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: The corresponding internal Player object has been destroyed or is invalid. + * @return bool The result of the Seek operation. If it fails, you can check the specific error through err. + * - true: Successfully Seek. + * - false: Failed to Seek. + * @technical preview + */ + bool Seek(uint64_t new_time, Error *err = nullptr) { + return RtePlayerSeek(&c_player, new_time, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Mute/unmute audio separately. + * @since v4.4.0 + * @param mute Whether to mute. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: The corresponding internal Player object has been destroyed or is invalid. + * @return bool The result of the MuteAudio operation. If it fails, you can check the specific error through err. + * - true: The mute operation was successful. + * - false: The mute operation failed. + */ + bool MuteAudio(bool mute, Error *err = nullptr) { + return RtePlayerMuteAudio(&c_player, mute, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Mute/unmute video separately. + * @since v4.4.0 + * @param mute Whether to mute. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: The corresponding internal Player object has been destroyed or is invalid. + * @return bool The result of the MuteVideo operation. If it fails, you can check the specific error through err. + * - true: The mute operation was successful. + * - false: The mute operation failedl. + */ + bool MuteVideo(bool mute, Error *err = nullptr) { + return RtePlayerMuteVideo(&c_player, mute, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the playback position. + * @since v4.4.0 + * @param err + * @return uint64_t + * @technical preview + */ + uint64_t GetPosition(Error *err = nullptr){ + return RtePlayerGetPosition(&c_player, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get player information. + * @since v4.4.0 + * @param info The object used to receive player information. After the interface call is successful, the player information will be copied to the info object. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: The corresponding internal Player object has been destroyed or is invalid. + * - kRteErrorInvalidArgument: The info object is null. + * @return bool The result of the GetInfo operation. If it fails, you can check the specific error through err. + * - true: Successfully get the player information. + * - false: Failed to get the player information. + */ + bool GetInfo(PlayerInfo *info, Error *err = nullptr){ + return RtePlayerGetInfo(&c_player, info, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the configuration of Player object. + * @since v4.4.0 + * @param config The object used to receive PlayerConfig information. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: The corresponding internal Player object has been destroyed or is invalid. + * - kRteErrorInvalidArgument: The config object is null. + * @return bool The result of the GetConfigs operation. If it fails, you can check the specific error through err. + * - true: Successfully get the configuration. + * - false: Failed to get the configuration. + */ + bool GetConfigs(PlayerConfig* config, Error *err = nullptr) { + return RtePlayerGetConfigs(&c_player, config->get_underlying_impl(), err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Configure the Player object. + * @since v4.4.0 + * @param config The object used to change the player configuration. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: The corresponding internal Player object has been destroyed or is invalid. + * - kRteErrorInvalidArgument: The config object is null. + * @return bool The result of the SetConfigs operation. If it fails, you can check the specific error through err. + * - true: Successfully set the configuration. + * - false: Failed to set the configuration. + */ + bool SetConfigs(PlayerConfig* config, Error *err = nullptr) { + return RtePlayerSetConfigs(&c_player, config->get_underlying_impl(), err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Register player observer. + * @since v4.4.0 + * @param observer The object used to receive player-related callbacks. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: The corresponding internal Player object has been destroyed or is invalid. + * - kRteErrorInvalidArgument: The observer object is null. + * @return bool The result of the RegisterObserver operation. If it fails, you can check the specific error through err. + * - true: Successfully register the observer. + * - false: Failed to register the observer. + */ + bool RegisterObserver(PlayerObserver *observer, Error *err = nullptr) { + return RtePlayerRegisterObserver( + &c_player, observer != nullptr ? observer->c_player_observer : nullptr, err != nullptr ? err->get_underlying_impl() : nullptr); + } + +/** + * Unregister player observer. + * @since v4.4.0 + * @param observer The object used to receive player-related callbacks. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: The corresponding internal Player object has been destroyed or is invalid. + * - kRteErrorInvalidArgument: The observer object is null. + * @return bool The result of the UnregisterObserver operation. If it fails, you can check the specific error through err. + * - true: Successfully unregister the observer. + * - false: Failed to unregister the observer. + */ + bool UnregisterObserver(PlayerObserver *observer, Error *err = nullptr){ + return RtePlayerUnregisterObserver(&c_player, observer != nullptr ? observer->c_player_observer : nullptr, + err != nullptr ? err->get_underlying_impl() : nullptr); + } + + private: + ::RtePlayer c_player; +}; + +} // namespace rte diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_rte.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_rte.h new file mode 100644 index 000000000..7c51e346d --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_rte.h @@ -0,0 +1,403 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once +#include +#include "rte_base/c/c_rte.h" +#include "rte_base/c/bridge.h" + +#include "rte_cpp_error.h" +#include "rte_cpp_callback_utils.h" +#include "rte_cpp_string.h" + +struct RteObserver; +struct RteInitialConfig; +struct RteConfig; + +namespace rte { + +class Player; + +/** + * The InitialConfig class is used to initialize the Rte object. + * @since v4.4.0 + * @technical preview + */ +class InitialConfig { + public: + InitialConfig() { RteInitialConfigInit(&c_rte_init_cfg, nullptr); } + ~InitialConfig() { RteInitialConfigDeinit(&c_rte_init_cfg, nullptr);} + + private: + friend class Rte; + ::RteInitialConfig c_rte_init_cfg; +}; + +/** + * The Observer class is used to observe the event of Rte object. + * @since v4.4.0 + * @technical preview + */ +class Observer { + public: + Observer(): c_rte_observer(::RteObserverCreate(nullptr)) { + c_rte_observer->base_observer.me_in_target_lang = this;} + ~Observer() { RteObserverDestroy(c_rte_observer, nullptr); } + + // @{ + Observer(Observer &other) = delete; + Observer(Observer &&other) = delete; + Observer &operator=(const Observer &cmd) = delete; + Observer &operator=(Observer &&cmd) = delete; + // @} + + private: + friend class Rte; + + ::RteObserver *c_rte_observer; +}; + +/** + * The Config class is used to configure the Rte object. + * @since v4.4.0 + */ +class Config { + public: + Config() {RteConfigInit(&c_rte_config, nullptr);} + ~Config() {RteConfigDeinit(&c_rte_config, nullptr);} + + // @{ + Config(Config &other) = delete; + Config(Config &&other) = delete; + Config &operator=(const Config &cmd) = delete; + Config &operator=(Config &&cmd) = delete; + // @} + + /** + * Set the App ID Parameter, which is used to initialize the engine. This field value needs to be set before calling Rte::InitMediaEngine to initialize the engine. + * If not set, the default value is an empty string. + * @since v4.4.0 + * @param app_id Your project's App ID + * @param err Possible return of the following ErrorCode + * - kRteOk: Success + * - kRteErrorInvalidArgument: Indicates that the app_id parameter is empty. + * @return void + */ + void SetAppId(const char *app_id, Error *err = nullptr){ + String str(app_id); + RteConfigSetAppId(&c_rte_config, str.get_underlying_impl(), err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the App ID Parameter. + * @since v4.4.0 + * @param err Possible return of the following ErrorCode + * - kRteOk: Success + * @return std::string The AppId value + */ + std::string GetAppId(Error *err = nullptr){ + String str; + RteConfigGetAppId(&c_rte_config, str.get_underlying_impl(), err != nullptr ? err->get_underlying_impl() : nullptr); + return std::string(str.Cstr()); + } + + + /** + * Set the Log Folder Parameter + * @since v4.4.0 + * @param log_folder + * @param err + * @technical preview + */ + void SetLogFolder(const char *log_folder, Error *err = nullptr){ + String str(log_folder); + RteConfigSetLogFolder(&c_rte_config, str.get_underlying_impl(), err != nullptr ? err->get_underlying_impl() : nullptr); + } + + + /** + * Get the Log Folder Parameter + * @since v4.4.0 + * @param err + * @return const char* + * @technical preview + */ + std::string GetLogFolder(Error *err = nullptr){ + String str; + RteConfigGetLogFolder(&c_rte_config, str.get_underlying_impl(), err != nullptr ? err->get_underlying_impl() : nullptr); + return std::string(str.Cstr()); + } + + /** + * Set the Log File Size Parameter + * @since v4.4.0 + * @param log_file_size + * @param err + * @technical preview + */ + void SetLogFileSize(size_t log_file_size, Error *err = nullptr){ + RteConfigSetLogFileSize(&c_rte_config, log_file_size, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the Log File Size Parameter + * @since v4.4.0 + * @param err + * @return size_t + * @technical preview + */ + size_t GetLogFileSize(Error *err = nullptr){ + size_t log_file_size; + RteConfigGetLogFileSize(&c_rte_config, &log_file_size, err != nullptr ? err->get_underlying_impl() : nullptr); + return log_file_size; + } + + /** + * Set the Area Code Parameter + * @since v4.4.0 + * @param area_code + * @param err + * @technical preview + */ + void SetAreaCode(int32_t area_code, Error *err = nullptr){ + RteConfigSetAreaCode(&c_rte_config, area_code, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the Area Code Parameter + * @since v4.4.0 + * @param err + * @return int32_t + * @technical preview + */ + int32_t GetAreaCode(Error *err = nullptr){ + int32_t area_code; + RteConfigGetAreaCode(&c_rte_config, &area_code, err != nullptr ? err->get_underlying_impl() : nullptr); + return area_code; + } + + /** + * Set the Cloud Proxy Parameter + * @since v4.4.0 + * @param cloud_proxy + * @param err + * @technical preview + */ + void SetCloudProxy(const char *cloud_proxy, Error *err = nullptr){ + String str(cloud_proxy); + RteConfigSetCloudProxy(&c_rte_config, str.get_underlying_impl(), err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the Cloud Proxy Parameter + * @since v4.4.0 + * @param err + * @return const char* + * @technical preview + */ + std::string GetCloudProxy(Error *err = nullptr){ + String str; + RteConfigGetCloudProxy(&c_rte_config, str.get_underlying_impl(), err != nullptr ? err->get_underlying_impl() : nullptr); + return std::string(str.Cstr()); + } + + /** + * Set Json format parameters, usually used to set some private parameters supported by rte. + * @since v4.4.0 + * @param json_parameter json format parameter set + * @param err Possible return of the following ErrorCode + * - kRteOk: Success + * - kRteErrorInvalidArgument: Indicates that the json_parameter parameter is empty. + * @return void + */ + + void SetJsonParameter(const char *json_parameter, Error *err = nullptr){ + String str(json_parameter); + RteConfigSetJsonParameter(&c_rte_config, str.get_underlying_impl(), err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Get the currently configured private parameters of the Rte. + * + * @since v4.4.0 + * @param err Possible return of the following error codes: + * - kRteOk: Success + * @return std::string Returns the set JSON format parameter set. + */ + std::string GetJsonParameter(Error *err = nullptr){ + String str; + RteConfigGetJsonParameter(&c_rte_config, str.get_underlying_impl(), err != nullptr ? err->get_underlying_impl() : nullptr); + return std::string(str.Cstr()); + } + + private: + ::RteConfig* get_underlying_impl() { return &c_rte_config; } + + private: + friend class Rte; + ::RteConfig c_rte_config; +}; + +/** + * The Rte class, which is the base interface of the Agora Real Time Engagement SDK. + * @since v4.4.0 + */ +class Rte { + public: + + /** + * Create an Rte object from the rtc bridge. Used in scenarios where the rtc engine has already been initialized, + * which can save the operation of initializing the rte engine. + * @since v4.4.0 + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: Indicates that the rtc engine instance has not been created or the rtc engine has not been initialized. + * Unable to bridge rte from rtc engine. + * @return Rte object. If the Rte object is invalid, subsequent operations on Rte will return an error. + */ + static Rte GetFromBridge(Error* err = nullptr){ + Rte rte( RteGetFromBridge(err != nullptr ? err->get_underlying_impl() : nullptr)); + return rte; + } + + /** + * Construct an Rte object. + * @since v4.4.0 + * @param config Rte object initialization configuration object. + */ + explicit Rte(InitialConfig *config = nullptr): c_rte(::RteCreate(config != nullptr ? &config->c_rte_init_cfg : nullptr, nullptr)) {} + ~Rte(){Destroy();}; + + /** + * Construct a new Rte object. + * + * @param other + */ + Rte(Rte &&other) : c_rte(other.c_rte) { + other.c_rte = {}; + } + + // @{ + Rte(Rte &other) = delete; + Rte &operator=(const Rte &other) = delete; + Rte &operator=(Rte &&other) = delete; + // @} + + /** + * Register an RTE observer. + * @since v4.4.0 + * @param observer The object that observes RTE callback events. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: The corresponding internal RTE object has been destroyed or is invalid. + * - kRteErrorInvalidArgument: The registered observer object is null. + * @return bool + * - true: Registration is successful. + * - false: Registration failed. + * @technical preview + */ + bool RegisterObserver(Observer *observer, Error *err = nullptr){ + return RteRegisterObserver(&c_rte, observer != nullptr ? observer->c_rte_observer : nullptr, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Unregister the RTE observer object. + * @since v4.4.0 + * @param observer The object that observes RTE callback events. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: The corresponding internal RTE object has been destroyed or is invalid. + * - kRteErrorInvalidArgument: The unregistered observer object is null. + * @return bool + * - true: Unregistration is successful. + * - false: Unregistration failed. + * @technical preview + */ + bool UnregisterObserver(Observer *observer, Error *err = nullptr){ + return RteUnregisterObserver(&c_rte, observer != nullptr ? observer->c_rte_observer : nullptr, + err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Initialize the media engine. + * + * @param cb Asynchronous callback function that returns the result of engine initialization. + * - @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorDefault: Engine initialization failed, specific error reason can be obtained through Error.Message(). + * - kRteErrorInvalidOperation: Rte object created through GetFromBridge, initialization is not allowed. + * + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorDefault: Engine initialization failed, specific error description can be obtained through Error.Message(). + * - kRteErrorInvalidOperation: The corresponding internal Rte object has been destroyed or is invalid. + * @return bool Returns whether the asynchronous operation was successfully placed in the asynchronous operation queue, not whether the initialization action was successful. + * - true: Asynchronous initialization action executed normally. + * - false: Asynchronous initialization action did not execute normally. + */ + bool InitMediaEngine(std::function cb, Error *err = nullptr){ + auto* ctx = new CallbackContext(this, cb); + return RteInitMediaEngine(&c_rte, &CallbackFunc<::Rte, Rte>, ctx, err != nullptr ? err->get_underlying_impl() : nullptr); + } + +/** + * Get the configuration of Rte object. + * @since v4.4.0 + * @param config The object used to get the rte config configuration. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: The corresponding internal Rte object has been destroyed or is invalid. + * - kRteErrorInvalidArgument: The passed config object is null. + * @return bool Returns the result of getting the configuration information. + * - true: Successfully retrieved. + * - false: Failed to retrieve. + */ + bool GetConfigs(Config *config, Error *err = nullptr){ + return RteGetConfigs(&c_rte, config != nullptr ? config->get_underlying_impl(): nullptr, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Configure the Rte object. + * @since v4.4.0 + * @param config The object used to set the rte config configuration. + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: The corresponding internal Rte object has been destroyed or is invalid. + * - kRteErrorInvalidArgument: The passed config object is null. + * @return bool Returns the result of setting the configuration information. + * - true: Successfully set the configuration. + * - false: Failed to set the configuration. + */ + bool SetConfigs(Config *config, Error *err = nullptr){ + return RteSetConfigs(&c_rte, config != nullptr ? config->get_underlying_impl(): nullptr, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + /** + * Destroy the Rte object. The operation will release all resources used by the Rte object. + * @since v4.4.0 + * @param err Possible return values for ErrorCode: + * - kRteOk: Success + * - kRteErrorInvalidOperation: The corresponding internal Rte object has been destroyed or is invalid. + * @return bool Returns the result of destroying the Rte object. + * - true: Successfully destroyed. + * - false: Failed to destroy. + */ + bool Destroy(Error *err = nullptr){ + return RteDestroy(&c_rte, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + private: + + explicit Rte(::Rte other) { c_rte = other; } + + private: + friend class Player; + friend class Canvas; + + ::Rte c_rte; +}; + +} // namespace rte diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_stream.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_stream.h new file mode 100644 index 000000000..1f646ad16 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_stream.h @@ -0,0 +1,30 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once +#include "rte_base/c/stream/stream.h" + +namespace rte { + +/** + * The Stream class is used to manage the stream. + * @since v4.4.0 + * @technical preview + */ +class Stream { + + public: + Stream() = default; + ~Stream() = default; + + private: + friend class Rte; + friend class Player; + + ::RteStream c_rte_stream; +}; + +} // namespace rte \ No newline at end of file diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_string.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_string.h new file mode 100644 index 000000000..106891d62 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_base/rte_cpp_string.h @@ -0,0 +1,66 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once +#include "rte_base/c/utils/string.h" + +namespace rte { + +class Config; +class PlayerConfig; + +/** + * The String class is used to manage the string. + * @since v4.4.0 + * @technical preview + */ +class String { + public: + + String(){ + c_rte_string = RteStringCreate(nullptr); + RteStringInit(c_rte_string, nullptr); + } + + String(const char* str) { + c_rte_string = RteStringCreate(nullptr); + RteStringInit(c_rte_string, nullptr); + if(nullptr != str){ + RteStringInitWithCStr(c_rte_string, str, nullptr); + } + } + + ~String() { + RteStringDeinit(c_rte_string, nullptr); + RteStringDestroy(c_rte_string, nullptr); + } + + void Format(const char* fmt, ...) { + va_list args; + va_start(args, fmt); + RteStringInitWithValue(c_rte_string, nullptr, fmt, args); + va_end(args); + } + + void Copy(const String &other) { + RteStringCopy(c_rte_string, other.c_rte_string, nullptr); + } + + const char* Cstr() const { + return RteStringCStr(c_rte_string, nullptr); + } + + friend class Config; + friend class PlayerConfig; + + private: + ::RteString* get_underlying_impl() const { return c_rte_string; } + + private: + ::RteString *c_rte_string; +}; + +} // namespace rte diff --git a/android/src/main/cpp/third_party/include/agora_rtc/rte_cpp.h b/android/src/main/cpp/third_party/include/agora_rtc/rte_cpp.h new file mode 100644 index 000000000..4ea178b95 --- /dev/null +++ b/android/src/main/cpp/third_party/include/agora_rtc/rte_cpp.h @@ -0,0 +1,14 @@ +/** + * + * Agora Real Time Engagement + * Copyright (c) 2024 Agora IO. All rights reserved. + * + */ +#pragma once + +#include "rte_base/rte_cpp_error.h" // IWYU pragma: export +#include "rte_base/rte_cpp_player.h" // IWYU pragma: export +#include "rte_base/rte_cpp_rte.h" // IWYU pragma: export +#include "rte_base/rte_cpp_canvas.h" // IWYU pragma: export +#include "rte_base/rte_cpp_string.h" // IWYU pragma: export +#include "rte_base/rte_cpp_callback_utils.h" // IWYU pragma: export diff --git a/android/src/main/cpp/third_party/include/iris/iris_base.h b/android/src/main/cpp/third_party/include/iris/iris_base.h index 437e1e54f..e3069d8fc 100644 --- a/android/src/main/cpp/third_party/include/iris/iris_base.h +++ b/android/src/main/cpp/third_party/include/iris/iris_base.h @@ -45,6 +45,7 @@ typedef enum IrisError { ERR_FAILED = 1, ERR_INVALID_ARGUMENT = 2, ERR_NOT_SUPPORTED = 4, + ERR_BUFFER_TOO_SMALL = 6, ERR_NOT_INITIALIZED = 7, /*base from IRIS_VIDEO_PROCESS_ERR::ERR_NULL_POINTER=1*/ @@ -59,11 +60,19 @@ typedef enum IrisError { IRIS_API void enableUseJsonArray(bool enable); +bool getEnableUseJsonArray(); + void saveAppType(IrisAppType type); IrisAppType getAppType(); -IRIS_API void InitIrisLogger(const char *path, int maxSize, IrisLogLevel level); +IRIS_API void IRIS_CALL InitIrisLogger(const char *path, int maxSize, + IrisLogLevel level); + +IRIS_API void IRIS_CALL WriteIrisLogInternal(IrisLogLevel level, + const char *fmt, ...); + +IRIS_API void IRIS_CALL WriteIrisLog(IrisLogLevel level, const char *content); typedef struct EventParam { const char *event; diff --git a/android/src/main/cpp/third_party/include/iris/iris_rtc_high_performance_c_api.h b/android/src/main/cpp/third_party/include/iris/iris_rtc_high_performance_c_api.h index 6d0621fe6..bb1a4b37a 100644 --- a/android/src/main/cpp/third_party/include/iris/iris_rtc_high_performance_c_api.h +++ b/android/src/main/cpp/third_party/include/iris/iris_rtc_high_performance_c_api.h @@ -49,6 +49,40 @@ struct IrisAudioFrame { uint32_t rtpTimestamp; }; +struct IrisHdr10MetadataInfo { + //The x coordinates of the red value in the CIE1931 color space. The values need to normalized to 50,000. + uint16_t redPrimaryX; + // The y coordinates of the red value in the CIE1931 color space. The values need to normalized to 50,000. + uint16_t redPrimaryY; + // The x coordinates of the green value in the CIE1931 color space. The values need to normalized to 50,000. + uint16_t greenPrimaryX; + // The y coordinates of the green value in the CIE1931 color space. The values need to normalized to 50,000. + uint16_t greenPrimaryY; + // The x coordinates of the blue value in the CIE1931 color space. The values need to normalized to 50,000. + uint16_t bluePrimaryX; + // The y coordinates of the blue value in the CIE1931 color space. The values need to normalized to 50,000. + uint16_t bluePrimaryY; + // The x coordinates of the white point in the CIE1931 color space.The values need to normalized to 50,000. + uint16_t whitePointX; + // The y coordinates of the white point in the CIE1931 color space.The values need to normalized to 50,000. + uint16_t whitePointY; + // The maximum number of nits of the display used to master the content. The values need to normalized to 10,000. + unsigned int maxMasteringLuminance; + // The minimum number of nits of the display used to master the content. The values need to normalized to 10,000. + unsigned int minMasteringLuminance; + // The maximum content light level (MaxCLL). This is the nit value corresponding to the brightest pixel used anywhere in the content. + uint16_t maxContentLightLevel; + // The maximum frame average light level (MaxFALL). This is the nit value corresponding to the average luminance of the frame which has the brightest average luminance anywhere in the content. + uint16_t maxFrameAverageLightLevel; +}; + +struct IrisColorSpace { + int primaries; + int transfer; + int matrix; + int range; +}; + struct IrisExternalVideoFrame { //The buffer type: #VIDEO_BUFFER_TYPE. int type; @@ -82,20 +116,39 @@ struct IrisExternalVideoFrame { int eglType; // [Texture related parameter] Incoming 4 × 4 transformational matrix. The typical value is a unit matrix. int textureId; + /** + * [Texture related parameter] The fence object related to the textureId parameter, indicating the synchronization status of the video data in Texture format. + * The default value is 0 + */ + long long fenceObject; // [Texture related parameter] Incoming 4 × 4 transformational matrix. The typical value is a unit matrix. float matrix[16]; // [Texture related parameter] The MetaData buffer. The default value is NULL - uint8_t *metadata_buffer; + uint8_t *metadataBuffer; // [Texture related parameter] The MetaData size.The default value is 0 - int metadata_size; + int metadataSize; // Indicates the alpha channel of current frame, which is consistent with the dimension of the video frame. uint8_t *alphaBuffer; // Extract alphaBuffer from bgra or rgba data. Set it true if you do not explicitly specify the alphabuffer. bool fillAlphaBuffer; + /** + * The relative position between alphabuffer and the frame. + * 0: Normal frame; + * 1: Alphabuffer is above the frame; + * 2: Alphabuffer is below the frame; + * 3: Alphabuffer is on the left of frame; + * 4: Alphabuffer is on the right of frame; + * The default value is 0. + */ + int alphaStitchMode; //[For Windows only] The pointer of ID3D11Texture2D used by the video frame. - void *d3d11_texture_2d; + void *d3d11Texture2d; // [For Windows only] The index of ID3D11Texture2D array used by the video frame. - int texture_slice_index; + int textureSliceIndex; + // metadata info used for hdr video data + IrisHdr10MetadataInfo hdr10MetadataInfo; + // The ColorSpace of the video frame. + IrisColorSpace colorSpace; }; struct IrisEncodedVideoFrameInfo { @@ -126,86 +179,82 @@ struct IrisEncodedVideoFrameInfo { }; IRIS_API int IRIS_CALL ILocalSpatialAudioEngine_SetMaxAudioRecvCount( - IrisApiEnginePtr enginePtr, int maxCount); + IrisHandle enginePtr, int maxCount); -IRIS_API int IRIS_CALL ILocalSpatialAudioEngine_SetAudioRecvRange( - IrisApiEnginePtr enginePtr, float range); +IRIS_API int IRIS_CALL +ILocalSpatialAudioEngine_SetAudioRecvRange(IrisHandle enginePtr, float range); -IRIS_API int IRIS_CALL ILocalSpatialAudioEngine_SetDistanceUnit( - IrisApiEnginePtr enginePtr, float unit); +IRIS_API int IRIS_CALL +ILocalSpatialAudioEngine_SetDistanceUnit(IrisHandle enginePtr, float unit); IRIS_API int IRIS_CALL ILocalSpatialAudioEngine_UpdateSelfPosition( - IrisApiEnginePtr enginePtr, float positionX, float positionY, - float positionZ, float axisForwardX, float axisForwardY, float axisForwardZ, + IrisHandle enginePtr, float positionX, float positionY, float positionZ, + float axisForwardX, float axisForwardY, float axisForwardZ, float axisRightX, float axisRightY, float axisRightZ, float axisUpX, float axisUpY, float axisUpZ); IRIS_API int IRIS_CALL ILocalSpatialAudioEngine_UpdateSelfPositionEx( - IrisApiEnginePtr enginePtr, float positionX, float positionY, - float positionZ, float axisForwardX, float axisForwardY, float axisForwardZ, + IrisHandle enginePtr, float positionX, float positionY, float positionZ, + float axisForwardX, float axisForwardY, float axisForwardZ, float axisRightX, float axisRightY, float axisRightZ, float axisUpX, - float axisUpY, float axisUpZ, char *channelId, unsigned int localUid); + float axisUpY, float axisUpZ, const char *channelId, unsigned int localUid); IRIS_API int IRIS_CALL ILocalSpatialAudioEngine_UpdatePlayerPositionInfo( - IrisApiEnginePtr enginePtr, int playerId, float positionX, float positionY, + IrisHandle enginePtr, int playerId, float positionX, float positionY, float positionZ, float forwardX, float forwardY, float forwardZ); -IRIS_API int IRIS_CALL ILocalSpatialAudioEngine_MuteLocalAudioStream( - IrisApiEnginePtr enginePtr, bool mute); +IRIS_API int IRIS_CALL +ILocalSpatialAudioEngine_MuteLocalAudioStream(IrisHandle enginePtr, bool mute); IRIS_API int IRIS_CALL ILocalSpatialAudioEngine_MuteAllRemoteAudioStreams( - IrisApiEnginePtr enginePtr, bool mute); + IrisHandle enginePtr, bool mute); IRIS_API int IRIS_CALL ILocalSpatialAudioEngine_SetZones( - IrisApiEnginePtr enginePtr, IrisSpatialAudioZone *zones, - unsigned int zoneCount); + IrisHandle enginePtr, IrisSpatialAudioZone *zones, unsigned int zoneCount); IRIS_API int IRIS_CALL ILocalSpatialAudioEngine_SetPlayerAttenuation( - IrisApiEnginePtr enginePtr, int playerId, double attenuation, - bool forceSet); + IrisHandle enginePtr, int playerId, double attenuation, bool forceSet); IRIS_API int IRIS_CALL ILocalSpatialAudioEngine_MuteRemoteAudioStream( - IrisApiEnginePtr enginePtr, unsigned int uid, bool mute); + IrisHandle enginePtr, unsigned int uid, bool mute); IRIS_API int IRIS_CALL ILocalSpatialAudioEngine_UpdateRemotePosition( - IrisApiEnginePtr enginePtr, unsigned int uid, float positionX, - float positionY, float positionZ, float forwardX, float forwardY, - float forwardZ); + IrisHandle enginePtr, unsigned int uid, float positionX, float positionY, + float positionZ, float forwardX, float forwardY, float forwardZ); IRIS_API int IRIS_CALL ILocalSpatialAudioEngine_UpdateRemotePositionEx( - IrisApiEnginePtr enginePtr, unsigned int uid, float positionX, - float positionY, float positionZ, float forwardX, float forwardY, - float forwardZ, char *channelId, unsigned int localUid); + IrisHandle enginePtr, unsigned int uid, float positionX, float positionY, + float positionZ, float forwardX, float forwardY, float forwardZ, + const char *channelId, unsigned int localUid); IRIS_API int IRIS_CALL ILocalSpatialAudioEngine_RemoveRemotePosition( - IrisApiEnginePtr enginePtr, unsigned int uid); + IrisHandle enginePtr, unsigned int uid); IRIS_API int IRIS_CALL ILocalSpatialAudioEngine_RemoveRemotePositionEx( - IrisApiEnginePtr enginePtr, unsigned int uid, char *channelId, + IrisHandle enginePtr, unsigned int uid, const char *channelId, unsigned int localUid); IRIS_API int IRIS_CALL -ILocalSpatialAudioEngine_ClearRemotePositions(IrisApiEnginePtr enginePtr); +ILocalSpatialAudioEngine_ClearRemotePositions(IrisHandle enginePtr); IRIS_API int IRIS_CALL ILocalSpatialAudioEngine_ClearRemotePositionsEx( - IrisApiEnginePtr enginePtr, char *channelId, unsigned int localUid); + IrisHandle enginePtr, const char *channelId, unsigned int localUid); IRIS_API int IRIS_CALL ILocalSpatialAudioEngine_SetRemoteAudioAttenuation( - IrisApiEnginePtr enginePtr, unsigned int uid, double attenuation, - bool forceSet); + IrisHandle enginePtr, unsigned int uid, double attenuation, bool forceSet); -IRIS_API int IRIS_CALL IMediaEngine_PushAudioFrame(IrisApiEnginePtr enginePtr, +IRIS_API int IRIS_CALL IMediaEngine_PushAudioFrame(IrisHandle enginePtr, IrisAudioFrame *frame, unsigned int trackId); -IRIS_API int IRIS_CALL IMediaEngine_PullAudioFrame(IrisApiEnginePtr enginePtr, +IRIS_API int IRIS_CALL IMediaEngine_PullAudioFrame(IrisHandle enginePtr, IrisAudioFrame *frame); -IRIS_API int IRIS_CALL IMediaEngine_PushVideoFrame( - IrisApiEnginePtr enginePtr, IrisExternalVideoFrame *frame, - unsigned int videoTrackId); +IRIS_API int IRIS_CALL +IMediaEngine_PushVideoFrame(IrisHandle enginePtr, IrisExternalVideoFrame *frame, + unsigned int videoTrackId); IRIS_API int IRIS_CALL IMediaEngine_PushEncodedVideoImage( - IrisApiEnginePtr enginePtr, const unsigned char *imageBuffer, + IrisHandle enginePtr, const unsigned char *imageBuffer, unsigned long long length, IrisEncodedVideoFrameInfo &videoEncodedFrameInfo, unsigned int videoTrackId); \ No newline at end of file diff --git a/android/src/main/cpp/third_party/include/iris/iris_rtc_rendering_cxx.h b/android/src/main/cpp/third_party/include/iris/iris_rtc_rendering_cxx.h index 646f52e14..a6c158a48 100644 --- a/android/src/main/cpp/third_party/include/iris/iris_rtc_rendering_cxx.h +++ b/android/src/main/cpp/third_party/include/iris/iris_rtc_rendering_cxx.h @@ -4,10 +4,6 @@ #include "iris_base.h" #include "iris_rtc_rendering_c.h" -//////// operators for IrisRtcVideoFrameConfig /////////// -IRIS_CPP_API bool operator<(const IrisRtcVideoFrameConfig &lhs, - const IrisRtcVideoFrameConfig &rhs); - namespace agora { namespace iris { diff --git a/example/android/build.gradle b/example/android/build.gradle index 27deaadb0..fec634e19 100644 --- a/example/android/build.gradle +++ b/example/android/build.gradle @@ -1,6 +1,6 @@ buildscript { // Min kotlin version for Flutter SDK 3.24 - ext.kotlin_version = '1.7.0' + ext.kotlin_version = '1.9.10' repositories { google() mavenCentral() diff --git a/example/lib/components/stats_monitoring_widget.dart b/example/lib/components/stats_monitoring_widget.dart index 55b116baa..749bc936a 100644 --- a/example/lib/components/stats_monitoring_widget.dart +++ b/example/lib/components/stats_monitoring_widget.dart @@ -87,7 +87,7 @@ class __StatsMonitoringInternalWidgetState _localAudioStats = stats; }); }, - onLocalVideoStats: (source, stats) { + onLocalVideoStats: (connection, stats) { setState(() { _localVideoStats = stats; }); diff --git a/example/lib/examples/advanced/set_beauty_effect/set_beauty_effect.dart b/example/lib/examples/advanced/set_beauty_effect/set_beauty_effect.dart index 49795e1f4..509b5bd61 100644 --- a/example/lib/examples/advanced/set_beauty_effect/set_beauty_effect.dart +++ b/example/lib/examples/advanced/set_beauty_effect/set_beauty_effect.dart @@ -85,23 +85,23 @@ class _State extends State with KeepRemoteVideoViewsMixin { isJoined = false; }); }, - onExtensionError: - (String provider, String extName, int error, String msg) { + onExtensionErrorWithContext: + (ExtensionContext context, int error, String message) { logSink.log( - '[onExtensionErrored] provider: $provider, extName: $extName, error: $error, msg: $msg'); + '[onExtensionErrored] provider: ${context.providerName}, extName: ${context.extensionName}, error: $error, message: $message'); }, - onExtensionStarted: (String provider, String extName) { - logSink - .log('[onExtensionStarted] provider: $provider, extName: $extName'); + onExtensionStartedWithContext: (ExtensionContext context) { + logSink.log( + '[onExtensionStarted] provider: ${context.providerName}, extName: ${context.extensionName}'); }, - onExtensionEvent: - (String provider, String extName, String key, String value) { - logSink - .log('[onExtensionEvent] provider: $provider, extName: $extName'); + onExtensionEventWithContext: + (ExtensionContext context, String key, String value) { + logSink.log( + '[onExtensionEvent] provider: ${context.providerName}, extName: ${context.extensionName}'); }, - onExtensionStopped: (String provider, String extName) { - logSink - .log('[onExtensionStopped] provider: $provider, extName: $extName'); + onExtensionStoppedWithContext: (ExtensionContext context) { + logSink.log( + '[onExtensionStopped] provider: ${context.providerName}, extName: ${context.extensionName}'); }, )); diff --git a/example/lib/main.dart b/example/lib/main.dart index ff8c23089..6dc61733c 100644 --- a/example/lib/main.dart +++ b/example/lib/main.dart @@ -22,7 +22,7 @@ void main() { // `PlatformDispatcher.instance.onError` but not `runZonedGuarded` to // handle "Errors not caught by Flutter", // see: https://docs.flutter.dev/testing/errors#handling-all-types-of-errors, - // follow the Flutter SDK doc after we can bump the mini supported Flutter SDK (currently 2.10.x) + // follow the Flutter SDK doc after we can bump the mini supported Flutter SDK (currently 2.10.x) // to the newer version of Flutter SDK. runZonedGuarded(() { runApp(const MyApp()); @@ -64,7 +64,8 @@ class _MyAppState extends State { Future _requestPermissionIfNeed() async { if (defaultTargetPlatform == TargetPlatform.android) { - await [Permission.audio, Permission.microphone, Permission.camera].request(); + await [Permission.audio, Permission.microphone, Permission.camera] + .request(); } } diff --git a/example/web/index.html b/example/web/index.html index a99f7f52e..5494265ad 100644 --- a/example/web/index.html +++ b/example/web/index.html @@ -100,6 +100,6 @@ loadMainDartJs(); } - + diff --git a/internal/deps_summary.txt b/internal/deps_summary.txt index db0574bb9..214a0a682 100644 --- a/internal/deps_summary.txt +++ b/internal/deps_summary.txt @@ -1,18 +1,18 @@ Iris: -https://download.agora.io/sdk/release/iris_4.3.2-build.1_DCG_Android_Video_20240604_0456_504.zip -https://download.agora.io/sdk/release/iris_4.3.2-build.1_DCG_iOS_Video_20240604_0459_409.zip -https://download.agora.io/sdk/release/iris_4.3.2-build.1_DCG_Mac_Video_20240604_0500_404.zip -https://download.agora.io/sdk/release/iris_4.3.2-build.1_DCG_Windows_Video_20240604_0456_441.zip -implementation 'io.agora.rtc:iris-rtc:4.3.2-build.1' -pod 'AgoraIrisRTC_iOS', '4.3.2-build.1' -pod 'AgoraIrisRTC_macOS', '4.3.2-build.1' +https://download.agora.io/sdk/release/iris_4.5.0-build.1_DCG_Android_Video_16K_20241203_0322_701.zip +https://download.agora.io/sdk/release/iris_4.5.0-build.1_DCG_iOS_Video_20241203_0325_575.zip +https://download.agora.io/sdk/release/iris_4.5.0-build.1_DCG_Mac_Video_20241203_0322_534.zip +https://download.agora.io/sdk/release/iris_4.5.0-build.1_DCG_Windows_Video_20241203_0322_577.zip +implementation 'io.agora.rtc:iris-rtc:4.5.0-build.1' +pod 'AgoraIrisRTC_iOS', '4.5.0-build.1' +pod 'AgoraIrisRTC_macOS', '4.5.0-build.1' Native: -implementation 'io.agora.rtc:full-sdk:4.3.2' -implementation 'io.agora.rtc:full-screen-sharing:4.3.2' -pod 'AgoraRtcEngine_iOS', '4.3.2' -pod 'AgoraRtcEngine_macOS', '4.3.2' \ No newline at end of file +implementation 'io.agora.rtc:agora-special-full:4.5.0.1' +implementation 'io.agora.rtc:full-screen-sharing:4.5.0.1' +pod 'AgoraRtcEngine_iOS', '4.5.0' +pod 'AgoraRtcEngine_macOS', '4.5.0' \ No newline at end of file diff --git a/ios/agora_rtc_engine.podspec b/ios/agora_rtc_engine.podspec index 926f6d303..858c8e513 100644 --- a/ios/agora_rtc_engine.podspec +++ b/ios/agora_rtc_engine.podspec @@ -23,8 +23,8 @@ Pod::Spec.new do |s| puts '[plugin_dev] Found .plugin_dev file, use vendored_frameworks instead.' s.vendored_frameworks = 'libs/*.xcframework' else - s.dependency 'AgoraIrisRTC_iOS', '4.3.2-build.1' - s.dependency 'AgoraRtcEngine_iOS', '4.3.2' + s.dependency 'AgoraIrisRTC_iOS', '4.5.0-build.1' + s.dependency 'AgoraRtcEngine_iOS', '4.5.0' end s.platform = :ios, '9.0' diff --git a/lib/src/agora_base.dart b/lib/src/agora_base.dart index 0a8a35a08..dd0f10942 100644 --- a/lib/src/agora_base.dart +++ b/lib/src/agora_base.dart @@ -692,7 +692,7 @@ enum QualityType { @JsonValue(7) qualityUnsupported, - /// 8: Detecting the network quality. + /// 8: The last-mile network probe test is in progress. @JsonValue(8) qualityDetecting, } @@ -930,7 +930,11 @@ extension OrientationModeExt on OrientationMode { /// Video degradation preferences when the bandwidth is a constraint. @JsonEnum(alwaysCreate: true) enum DegradationPreference { - /// 0: (Default) Prefers to reduce the video frame rate while maintaining video resolution during video encoding under limited bandwidth. This degradation preference is suitable for scenarios where video quality is prioritized. + /// 0: (Default) Automatic mode. The SDK will automatically select maintainFramerate, maintainBalanced or maintainResolution based on the video scenario you set, in order to achieve the best overall quality of experience (QoE). + @JsonValue(-1) + maintainAuto, + + /// 0: Prefers to reduce the video frame rate while maintaining video resolution during video encoding under limited bandwidth. This degradation preference is suitable for scenarios where video quality is prioritized. Deprecated: This enumerator is deprecated. Use other enumerations instead. @JsonValue(0) maintainQuality, @@ -1241,6 +1245,10 @@ enum AudioCodecType { /// @nodoc @JsonValue(12) audioCodecLpcnet, + + /// @nodoc + @JsonValue(13) + audioCodecOpusmc, } /// @nodoc @@ -1484,6 +1492,30 @@ enum VideoStreamType { /// 1: Low-quality video stream. @JsonValue(1) videoStreamLow, + + /// @nodoc + @JsonValue(4) + videoStreamLayer1, + + /// @nodoc + @JsonValue(5) + videoStreamLayer2, + + /// @nodoc + @JsonValue(6) + videoStreamLayer3, + + /// @nodoc + @JsonValue(7) + videoStreamLayer4, + + /// @nodoc + @JsonValue(8) + videoStreamLayer5, + + /// @nodoc + @JsonValue(9) + videoStreamLayer6, } /// @nodoc @@ -1619,11 +1651,15 @@ class EncodedVideoFrameInfo { /// Compression preference for video encoding. @JsonEnum(alwaysCreate: true) enum CompressionPreference { + /// -1: (Default) Automatic mode. The SDK will automatically select preferLowLatency or preferQuality based on the video scenario you set to achieve the best user experience. + @JsonValue(-1) + preferCompressionAuto, + /// 0: Low latency preference. The SDK compresses video frames to reduce latency. This preference is suitable for scenarios where smoothness is prioritized and reduced video quality is acceptable. @JsonValue(0) preferLowLatency, - /// 1: (Default) High quality preference. The SDK compresses video frames while maintaining video quality. This preference is suitable for scenarios where video quality is prioritized. + /// 1: High quality preference. The SDK compresses video frames while maintaining video quality. This preference is suitable for scenarios where video quality is prioritized. @JsonValue(1) preferQuality, } @@ -1674,7 +1710,8 @@ extension EncodingPreferenceExt on EncodingPreference { @JsonSerializable(explicitToJson: true, includeIfNull: false) class AdvanceOptions { /// @nodoc - const AdvanceOptions({this.encodingPreference, this.compressionPreference}); + const AdvanceOptions( + {this.encodingPreference, this.compressionPreference, this.encodeAlpha}); /// Video encoder preference. See EncodingPreference. @JsonKey(name: 'encodingPreference') @@ -1684,6 +1721,10 @@ class AdvanceOptions { @JsonKey(name: 'compressionPreference') final CompressionPreference? compressionPreference; + /// Whether to encode and send the Alpha data present in the video frame to the remote end: true : Encode and send Alpha data. false : (Default) Do not encode and send Alpha data. + @JsonKey(name: 'encodeAlpha') + final bool? encodeAlpha; + /// @nodoc factory AdvanceOptions.fromJson(Map json) => _$AdvanceOptionsFromJson(json); @@ -1723,6 +1764,101 @@ extension VideoMirrorModeTypeExt on VideoMirrorModeType { } } +/// @nodoc +@JsonEnum(alwaysCreate: true) +enum CameraFormatType { + /// @nodoc + @JsonValue(0) + cameraFormatNv12, + + /// @nodoc + @JsonValue(1) + cameraFormatBgra, +} + +/// @nodoc +extension CameraFormatTypeExt on CameraFormatType { + /// @nodoc + static CameraFormatType fromValue(int value) { + return $enumDecode(_$CameraFormatTypeEnumMap, value); + } + + /// @nodoc + int value() { + return _$CameraFormatTypeEnumMap[this]!; + } +} + +/// @nodoc +@JsonEnum(alwaysCreate: true) +enum VideoModuleType { + /// @nodoc + @JsonValue(0) + videoModuleCapturer, + + /// @nodoc + @JsonValue(1) + videoModuleSoftwareEncoder, + + /// @nodoc + @JsonValue(2) + videoModuleHardwareEncoder, + + /// @nodoc + @JsonValue(3) + videoModuleSoftwareDecoder, + + /// @nodoc + @JsonValue(4) + videoModuleHardwareDecoder, + + /// @nodoc + @JsonValue(5) + videoModuleRenderer, +} + +/// @nodoc +extension VideoModuleTypeExt on VideoModuleType { + /// @nodoc + static VideoModuleType fromValue(int value) { + return $enumDecode(_$VideoModuleTypeEnumMap, value); + } + + /// @nodoc + int value() { + return _$VideoModuleTypeEnumMap[this]!; + } +} + +/// @nodoc +@JsonEnum(alwaysCreate: true) +enum HdrCapability { + /// @nodoc + @JsonValue(-1) + hdrCapabilityUnknown, + + /// @nodoc + @JsonValue(0) + hdrCapabilityUnsupported, + + /// @nodoc + @JsonValue(1) + hdrCapabilitySupported, +} + +/// @nodoc +extension HdrCapabilityExt on HdrCapability { + /// @nodoc + static HdrCapability fromValue(int value) { + return $enumDecode(_$HdrCapabilityEnumMap, value); + } + + /// @nodoc + int value() { + return _$HdrCapabilityEnumMap[this]!; + } +} + /// The bit mask of the codec type. @JsonEnum(alwaysCreate: true) enum CodecCapMask { @@ -1970,6 +2106,99 @@ class SimulcastStreamConfig { Map toJson() => _$SimulcastStreamConfigToJson(this); } +/// @nodoc +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class SimulcastConfig { + /// @nodoc + const SimulcastConfig({this.configs}); + + /// @nodoc + @JsonKey(name: 'configs') + final List? configs; + + /// @nodoc + factory SimulcastConfig.fromJson(Map json) => + _$SimulcastConfigFromJson(json); + + /// @nodoc + Map toJson() => _$SimulcastConfigToJson(this); +} + +/// @nodoc +@JsonEnum(alwaysCreate: true) +enum StreamLayerIndex { + /// @nodoc + @JsonValue(0) + streamLayer1, + + /// @nodoc + @JsonValue(1) + streamLayer2, + + /// @nodoc + @JsonValue(2) + streamLayer3, + + /// @nodoc + @JsonValue(3) + streamLayer4, + + /// @nodoc + @JsonValue(4) + streamLayer5, + + /// @nodoc + @JsonValue(5) + streamLayer6, + + /// @nodoc + @JsonValue(6) + streamLow, + + /// @nodoc + @JsonValue(7) + streamLayerCountMax, +} + +/// @nodoc +extension StreamLayerIndexExt on StreamLayerIndex { + /// @nodoc + static StreamLayerIndex fromValue(int value) { + return $enumDecode(_$StreamLayerIndexEnumMap, value); + } + + /// @nodoc + int value() { + return _$StreamLayerIndexEnumMap[this]!; + } +} + +/// @nodoc +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class StreamLayerConfig { + /// @nodoc + const StreamLayerConfig({this.dimensions, this.framerate, this.enable}); + + /// @nodoc + @JsonKey(name: 'dimensions') + final VideoDimensions? dimensions; + + /// @nodoc + @JsonKey(name: 'framerate') + final int? framerate; + + /// @nodoc + @JsonKey(name: 'enable') + final bool? enable; + + /// @nodoc + factory StreamLayerConfig.fromJson(Map json) => + _$StreamLayerConfigFromJson(json); + + /// @nodoc + Map toJson() => _$StreamLayerConfigToJson(this); +} + /// The location of the target area relative to the screen or window. If you do not set this parameter, the SDK selects the whole screen or window. @JsonSerializable(explicitToJson: true, includeIfNull: false) class Rectangle { @@ -2643,6 +2872,14 @@ enum VideoApplicationScenarioType { /// Bitrate: 500 Kbps 1: The meeting scenario. @JsonValue(1) applicationScenarioMeeting, + + /// applicationScenario1v1 (2) This is applicable to the scenario. To meet the requirements for low latency and high-quality video in this scenario, the SDK optimizes its strategies, improving performance in terms of video quality, first frame rendering, latency on mid-to-low-end devices, and smoothness under weak network conditions. 2: 1v1 video call scenario. + @JsonValue(2) + applicationScenario1v1, + + /// applicationScenarioLiveshow (3) This is applicable to the scenario. In this scenario, fast video rendering and high image quality are crucial. The SDK implements several performance optimizations, including automatically enabling accelerated audio and video frame rendering to minimize first-frame latency (no need to call enableInstantMediaRendering), and B-frame encoding to achieve better image quality and bandwidth efficiency. The SDK also provides enhanced video quality and smooth playback, even in poor network conditions or on lower-end devices. 3. Live show scenario. + @JsonValue(3) + applicationScenarioLiveshow, } /// @nodoc @@ -2933,7 +3170,7 @@ enum LocalVideoStreamReason { @JsonValue(8) localVideoStreamReasonDeviceNotFound, - /// 9: (macOS only) The video capture device currently in use is disconnected (such as being unplugged). + /// 9: (macOS and Windows only) The video capture device currently in use is disconnected (such as being unplugged). @JsonValue(9) localVideoStreamReasonDeviceDisconnected, @@ -2974,7 +3211,7 @@ enum LocalVideoStreamReason { @JsonValue(20) localVideoStreamReasonScreenCaptureWindowNotSupported, - /// 21: (Windows only) The screen has not captured any data available for window sharing. + /// 21: (Windows and Android only) The currently captured window has no data. @JsonValue(21) localVideoStreamReasonScreenCaptureFailure, @@ -3005,6 +3242,10 @@ enum LocalVideoStreamReason { /// 29: (Windows only) Screen capture has resumed from paused state. @JsonValue(29) localVideoStreamReasonScreenCaptureResumed, + + /// 30: (Windows and macOS only) The displayer used for screen capture is disconnected. + @JsonValue(30) + localVideoStreamReasonScreenCaptureDisplayDisconnected, } /// @nodoc @@ -4223,6 +4464,68 @@ extension VideoTranscoderErrorExt on VideoTranscoderError { } } +/// The source of the audio streams that are mixed locally. +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class MixedAudioStream { + /// @nodoc + const MixedAudioStream( + {this.sourceType, this.remoteUserUid, this.channelId, this.trackId}); + + /// The type of the audio source. See AudioSourceType. + @JsonKey(name: 'sourceType') + final AudioSourceType? sourceType; + + /// The user ID of the remote user. Set this parameter if the source type of the locally mixed audio steams is audioSourceRemoteUser. + @JsonKey(name: 'remoteUserUid') + final int? remoteUserUid; + + /// The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters (89 characters in total): + /// All lowercase English letters: a to z. + /// All uppercase English letters: A to Z. + /// All numeric characters: 0 to 9. + /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," Set this parameter if the source type of the locally mixed audio streams is audioSourceRemoteChannel or audioSourceRemoteUser. + @JsonKey(name: 'channelId') + final String? channelId; + + /// The audio track ID. Set this parameter to the custom audio track ID returned in createCustomAudioTrack. Set this parameter if the source type of the locally mixed audio steams is audioSourceCustom. + @JsonKey(name: 'trackId') + final int? trackId; + + /// @nodoc + factory MixedAudioStream.fromJson(Map json) => + _$MixedAudioStreamFromJson(json); + + /// @nodoc + Map toJson() => _$MixedAudioStreamToJson(this); +} + +/// The configurations for mixing the lcoal audio. +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class LocalAudioMixerConfiguration { + /// @nodoc + const LocalAudioMixerConfiguration( + {this.streamCount, this.audioInputStreams, this.syncWithLocalMic}); + + /// The number of the audio streams that are mixed locally. + @JsonKey(name: 'streamCount') + final int? streamCount; + + /// The source of the audio streams that are mixed locally. See MixedAudioStream. + @JsonKey(name: 'audioInputStreams') + final List? audioInputStreams; + + /// Whether the mxied audio stream uses the timestamp of the audio frames captured by the local microphone. true : (Default) Yes. Set to this value if you want all locally captured audio streams synchronized. false : No. The SDK uses the timestamp of the audio frames at the time when they are mixed. + @JsonKey(name: 'syncWithLocalMic') + final bool? syncWithLocalMic; + + /// @nodoc + factory LocalAudioMixerConfiguration.fromJson(Map json) => + _$LocalAudioMixerConfigurationFromJson(json); + + /// @nodoc + Map toJson() => _$LocalAudioMixerConfigurationToJson(this); +} + /// Configurations of the last-mile network test. @JsonSerializable(explicitToJson: true, includeIfNull: false) class LastmileProbeConfig { @@ -4696,7 +4999,7 @@ class VideoCanvas { final int? subviewUid; /// The video display window. In one VideoCanvas, you can only choose to set either view or surfaceTexture. If both are set, only the settings in view take effect. - @JsonKey(name: 'view') + @JsonKey(name: 'view', readValue: readIntPtr) final int? view; /// The background color of the video canvas in RGBA format. The default value is 0x00000000, which represents completely transparent black. @@ -4815,6 +5118,173 @@ extension LighteningContrastLevelExt on LighteningContrastLevel { } } +/// @nodoc +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class FaceShapeAreaOptions { + /// @nodoc + const FaceShapeAreaOptions({this.shapeArea, this.shapeIntensity}); + + /// @nodoc + @JsonKey(name: 'shapeArea') + final FaceShapeArea? shapeArea; + + /// @nodoc + @JsonKey(name: 'shapeIntensity') + final int? shapeIntensity; + + /// @nodoc + factory FaceShapeAreaOptions.fromJson(Map json) => + _$FaceShapeAreaOptionsFromJson(json); + + /// @nodoc + Map toJson() => _$FaceShapeAreaOptionsToJson(this); +} + +/// @nodoc +@JsonEnum(alwaysCreate: true) +enum FaceShapeArea { + /// @nodoc + @JsonValue(-1) + faceShapeAreaNone, + + /// @nodoc + @JsonValue(0) + faceShapeAreaHeadscale, + + /// @nodoc + @JsonValue(1) + faceShapeAreaForehead, + + /// @nodoc + @JsonValue(2) + faceShapeAreaFacecontour, + + /// @nodoc + @JsonValue(3) + faceShapeAreaFacelength, + + /// @nodoc + @JsonValue(4) + faceShapeAreaFacewidth, + + /// @nodoc + @JsonValue(5) + faceShapeAreaCheekbone, + + /// @nodoc + @JsonValue(6) + faceShapeAreaCheek, + + /// @nodoc + @JsonValue(7) + faceShapeAreaChin, + + /// @nodoc + @JsonValue(8) + faceShapeAreaEyescale, + + /// @nodoc + @JsonValue(9) + faceShapeAreaNoselength, + + /// @nodoc + @JsonValue(10) + faceShapeAreaNosewidth, + + /// @nodoc + @JsonValue(11) + faceShapeAreaMouthscale, +} + +/// @nodoc +extension FaceShapeAreaExt on FaceShapeArea { + /// @nodoc + static FaceShapeArea fromValue(int value) { + return $enumDecode(_$FaceShapeAreaEnumMap, value); + } + + /// @nodoc + int value() { + return _$FaceShapeAreaEnumMap[this]!; + } +} + +/// @nodoc +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class FaceShapeBeautyOptions { + /// @nodoc + const FaceShapeBeautyOptions({this.shapeStyle, this.styleIntensity}); + + /// @nodoc + @JsonKey(name: 'shapeStyle') + final FaceShapeBeautyStyle? shapeStyle; + + /// @nodoc + @JsonKey(name: 'styleIntensity') + final int? styleIntensity; + + /// @nodoc + factory FaceShapeBeautyOptions.fromJson(Map json) => + _$FaceShapeBeautyOptionsFromJson(json); + + /// @nodoc + Map toJson() => _$FaceShapeBeautyOptionsToJson(this); +} + +/// @nodoc +@JsonEnum(alwaysCreate: true) +enum FaceShapeBeautyStyle { + /// @nodoc + @JsonValue(0) + faceShapeBeautyStyleFemale, + + /// @nodoc + @JsonValue(1) + faceShapeBeautyStyleMale, +} + +/// @nodoc +extension FaceShapeBeautyStyleExt on FaceShapeBeautyStyle { + /// @nodoc + static FaceShapeBeautyStyle fromValue(int value) { + return $enumDecode(_$FaceShapeBeautyStyleEnumMap, value); + } + + /// @nodoc + int value() { + return _$FaceShapeBeautyStyleEnumMap[this]!; + } +} + +/// Filter effect options. +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class FilterEffectOptions { + /// @nodoc + const FilterEffectOptions({this.path, this.strength}); + + /// The absolute path to the local cube map texture file, which can be used to customize the filter effect. The specified .cude file should strictly follow the Cube LUT Format Specification; otherwise, the filter options do not take effect. The following is a sample of the .cude file: + /// LUT_3D_SIZE 32 + /// 0.0039215689 0 0.0039215682 + /// 0.0086021447 0.0037950677 0 + /// ... + /// 0.0728652592 0.0039215689 0 + /// The identifier LUT_3D_SIZE on the first line of the cube map file represents the size of the three-dimensional lookup table. The LUT size for filter effect can only be set to 32. + /// The SDK provides a built-in built_in_whiten_filter.cube file. You can pass the absolute path of this file to get the whitening filter effect. + @JsonKey(name: 'path') + final String? path; + + /// The intensity of the filter effect, with a range value of [0.0,1.0], in which 0.0 represents no filter effect. The default value is 0.5. The higher the value, the stronger the filter effect. + @JsonKey(name: 'strength') + final double? strength; + + /// @nodoc + factory FilterEffectOptions.fromJson(Map json) => + _$FilterEffectOptionsFromJson(json); + + /// @nodoc + Map toJson() => _$FilterEffectOptionsToJson(this); +} + /// The low-light enhancement options. @JsonSerializable(explicitToJson: true, includeIfNull: false) class LowlightEnhanceOptions { @@ -4934,20 +5404,16 @@ extension VideoDenoiserModeExt on VideoDenoiserMode { } } -/// The video noise reduction level. +/// Video noise reduction level. @JsonEnum(alwaysCreate: true) enum VideoDenoiserLevel { /// 0: (Default) Promotes video quality during video noise reduction. balances performance consumption and video noise reduction quality. The performance consumption is moderate, the video noise reduction speed is moderate, and the overall video quality is optimal. @JsonValue(0) videoDenoiserLevelHighQuality, - /// 1: Promotes reducing performance consumption during video noise reduction. prioritizes reducing performance consumption over video noise reduction quality. The performance consumption is lower, and the video noise reduction speed is faster. To avoid a noticeable shadowing effect (shadows trailing behind moving objects) in the processed video, Agora recommends that you use this settinging when the camera is fixed. + /// 1: Promotes reducing performance consumption during video noise reduction. It prioritizes reducing performance consumption over video noise reduction quality. The performance consumption is lower, and the video noise reduction speed is faster. To avoid a noticeable shadowing effect (shadows trailing behind moving objects) in the processed video, Agora recommends that you use this setting when the camera is fixed. @JsonValue(1) videoDenoiserLevelFast, - - /// 2: Enhanced video noise reduction. prioritizes video noise reduction quality over reducing performance consumption. The performance consumption is higher, the video noise reduction speed is slower, and the video noise reduction quality is better. If videoDenoiserLevelHighQuality is not enough for your video noise reduction needs, you can use this enumerator. - @JsonValue(2) - videoDenoiserLevelStrength, } /// @nodoc @@ -5164,12 +5630,17 @@ extension AudioTrackTypeExt on AudioTrackType { @JsonSerializable(explicitToJson: true, includeIfNull: false) class AudioTrackConfig { /// @nodoc - const AudioTrackConfig({this.enableLocalPlayback}); + const AudioTrackConfig( + {this.enableLocalPlayback, this.enableAudioProcessing}); /// Whether to enable the local audio-playback device: true : (Default) Enable the local audio-playback device. false : Do not enable the local audio-playback device. @JsonKey(name: 'enableLocalPlayback') final bool? enableLocalPlayback; + /// Whether to enable audio processing module: true Enable the audio processing module to apply the Automatic Echo Cancellation (AEC), Automatic Noise Suppression (ANS), and Automatic Gain Control (AGC) effects. false : (Default) Do not enable the audio processing module. This parameter only takes effect on audioTrackDirect in custom audio capturing. + @JsonKey(name: 'enableAudioProcessing') + final bool? enableAudioProcessing; + /// @nodoc factory AudioTrackConfig.fromJson(Map json) => _$AudioTrackConfigFromJson(json); @@ -5468,6 +5939,63 @@ extension HeadphoneEqualizerPresetExt on HeadphoneEqualizerPreset { } } +/// Voice AI tuner sound types. +@JsonEnum(alwaysCreate: true) +enum VoiceAiTunerType { + /// 0: Mature male voice. A deep and magnetic male voice. + @JsonValue(0) + voiceAiTunerMatureMale, + + /// 1: Fresh male voice. A fresh and slightly sweet male voice. + @JsonValue(1) + voiceAiTunerFreshMale, + + /// 2: Elegant female voice. A deep and charming female voice. + @JsonValue(2) + voiceAiTunerElegantFemale, + + /// 3: Sweet female voice. A high-pitched and cute female voice. + @JsonValue(3) + voiceAiTunerSweetFemale, + + /// 4: Warm male singing. A warm and melodious male voice. + @JsonValue(4) + voiceAiTunerWarmMaleSinging, + + /// 5: Gentle female singing. A soft and delicate female voice. + @JsonValue(5) + voiceAiTunerGentleFemaleSinging, + + /// 6: Husky male singing. A unique husky male voice. + @JsonValue(6) + voiceAiTunerHuskyMaleSinging, + + /// 7: Warm elegant female singing. A warm and mature female voice. + @JsonValue(7) + voiceAiTunerWarmElegantFemaleSinging, + + /// 8: Powerful male singing. A strong and powerful male voice. + @JsonValue(8) + voiceAiTunerPowerfulMaleSinging, + + /// 9: Dreamy female singing. A dreamy and soft female voice. + @JsonValue(9) + voiceAiTunerDreamyFemaleSinging, +} + +/// @nodoc +extension VoiceAiTunerTypeExt on VoiceAiTunerType { + /// @nodoc + static VoiceAiTunerType fromValue(int value) { + return $enumDecode(_$VoiceAiTunerTypeEnumMap, value); + } + + /// @nodoc + int value() { + return _$VoiceAiTunerTypeEnumMap[this]!; + } +} + /// Screen sharing configurations. @JsonSerializable(explicitToJson: true, includeIfNull: false) class ScreenCaptureParameters { @@ -5511,7 +6039,7 @@ class ScreenCaptureParameters { final bool? windowFocus; /// The ID list of the windows to be blocked. When calling startScreenCaptureByDisplayId to start screen sharing, you can use this parameter to block a specified window. When calling updateScreenCaptureParameters to update screen sharing configurations, you can use this parameter to dynamically block a specified window. - @JsonKey(name: 'excludeWindowList') + @JsonKey(name: 'excludeWindowList', readValue: readIntPtrList) final List? excludeWindowList; /// The number of windows to be excluded. On the Windows platform, the maximum value of this parameter is 24; if this value is exceeded, excluding the window fails. @@ -5831,6 +6359,10 @@ enum AreaCodeEx { @JsonValue(0x00000800) areaCodeUs, + /// @nodoc + @JsonValue(0x00001000) + areaCodeRu, + /// @nodoc @JsonValue(0xFFFFFFFE) areaCodeOvs, @@ -5988,7 +6520,7 @@ class ChannelMediaRelayConfiguration { /// The information of the target channel ChannelMediaInfo. It contains the following members: channelName : The name of the target channel. token : The token for joining the target channel. It is generated with the channelName and uid you set in destInfos. /// If you have not enabled the App Certificate, set this parameter as the default value NULL, which means the SDK applies the App ID. - /// If you have enabled the App Certificate, you must use the token generated with the channelName and uid. If the token of any target channel expires, the whole media relay stops; hence Agora recommends that you specify the same expiration time for the tokens of all the target channels. uid : The unique user ID to identify the relay stream in the target channel. The value ranges from 0 to (2 32 -1). To avoid user ID conflicts, this user ID must be different from any other user ID in the target channel. The default value is 0, which means the SDK generates a random user ID. + /// If you have enabled the App Certificate, you must use the token generated with the channelName and uid. If the token of any target channel expires, the whole media relay stops; hence Agora recommends that you specify the same expiration time for the tokens of all the target channels. uid : The unique user ID to identify the relay stream in the target channel. The value ranges from 0 to (2 32 -1). To avoid user ID conflicts, this user ID must be different from any other user ID in the target channel. The default value is 0, which means the SDK generates a random UID. @JsonKey(name: 'destInfos') final List? destInfos; @@ -6370,7 +6902,7 @@ class EchoTestConfiguration { this.intervalInSeconds}); /// The view used to render the local user's video. This parameter is only applicable to scenarios testing video devices, that is, when enableVideo is true. - @JsonKey(name: 'view') + @JsonKey(name: 'view', readValue: readIntPtr) final int? view; /// Whether to enable the audio device for the loop test: true : (Default) Enable the audio device. To test the audio device, set this parameter as true. false : Disable the audio device. @@ -6834,11 +7366,36 @@ class LocalAccessPointConfiguration { Map toJson() => _$LocalAccessPointConfigurationToJson(this); } +/// @nodoc +@JsonEnum(alwaysCreate: true) +enum RecorderStreamType { + /// @nodoc + @JsonValue(0) + rtc, + + /// @nodoc + @JsonValue(1) + preview, +} + +/// @nodoc +extension RecorderStreamTypeExt on RecorderStreamType { + /// @nodoc + static RecorderStreamType fromValue(int value) { + return $enumDecode(_$RecorderStreamTypeEnumMap, value); + } + + /// @nodoc + int value() { + return _$RecorderStreamTypeEnumMap[this]!; + } +} + /// @nodoc @JsonSerializable(explicitToJson: true, includeIfNull: false) class RecorderStreamInfo { /// @nodoc - const RecorderStreamInfo({this.channelId, this.uid}); + const RecorderStreamInfo({this.channelId, this.uid, this.type}); /// @nodoc @JsonKey(name: 'channelId') @@ -6848,6 +7405,10 @@ class RecorderStreamInfo { @JsonKey(name: 'uid') final int? uid; + /// @nodoc + @JsonKey(name: 'type') + final RecorderStreamType? type; + /// @nodoc factory RecorderStreamInfo.fromJson(Map json) => _$RecorderStreamInfoFromJson(json); diff --git a/lib/src/agora_base.g.dart b/lib/src/agora_base.g.dart index 3b6728e30..ffae9f4e4 100644 --- a/lib/src/agora_base.g.dart +++ b/lib/src/agora_base.g.dart @@ -133,6 +133,7 @@ const _$AudioCodecTypeEnumMap = { AudioCodecType.audioCodecJc1: 10, AudioCodecType.audioCodecHeaac2: 11, AudioCodecType.audioCodecLpcnet: 12, + AudioCodecType.audioCodecOpusmc: 13, }; AudioPcmDataInfo _$AudioPcmDataInfoFromJson(Map json) => @@ -186,6 +187,12 @@ Map _$VideoSubscriptionOptionsToJson( const _$VideoStreamTypeEnumMap = { VideoStreamType.videoStreamHigh: 0, VideoStreamType.videoStreamLow: 1, + VideoStreamType.videoStreamLayer1: 4, + VideoStreamType.videoStreamLayer2: 5, + VideoStreamType.videoStreamLayer3: 6, + VideoStreamType.videoStreamLayer4: 7, + VideoStreamType.videoStreamLayer5: 8, + VideoStreamType.videoStreamLayer6: 9, }; EncodedVideoFrameInfo _$EncodedVideoFrameInfoFromJson( @@ -256,6 +263,7 @@ AdvanceOptions _$AdvanceOptionsFromJson(Map json) => _$EncodingPreferenceEnumMap, json['encodingPreference']), compressionPreference: $enumDecodeNullable( _$CompressionPreferenceEnumMap, json['compressionPreference']), + encodeAlpha: json['encodeAlpha'] as bool?, ); Map _$AdvanceOptionsToJson(AdvanceOptions instance) { @@ -271,6 +279,7 @@ Map _$AdvanceOptionsToJson(AdvanceOptions instance) { _$EncodingPreferenceEnumMap[instance.encodingPreference]); writeNotNull('compressionPreference', _$CompressionPreferenceEnumMap[instance.compressionPreference]); + writeNotNull('encodeAlpha', instance.encodeAlpha); return val; } @@ -281,6 +290,7 @@ const _$EncodingPreferenceEnumMap = { }; const _$CompressionPreferenceEnumMap = { + CompressionPreference.preferCompressionAuto: -1, CompressionPreference.preferLowLatency: 0, CompressionPreference.preferQuality: 1, }; @@ -426,6 +436,7 @@ const _$OrientationModeEnumMap = { }; const _$DegradationPreferenceEnumMap = { + DegradationPreference.maintainAuto: -1, DegradationPreference.maintainQuality: 0, DegradationPreference.maintainFramerate: 1, DegradationPreference.maintainBalanced: 2, @@ -486,6 +497,51 @@ Map _$SimulcastStreamConfigToJson( return val; } +SimulcastConfig _$SimulcastConfigFromJson(Map json) => + SimulcastConfig( + configs: (json['configs'] as List?) + ?.map((e) => StreamLayerConfig.fromJson(e as Map)) + .toList(), + ); + +Map _$SimulcastConfigToJson(SimulcastConfig instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('configs', instance.configs?.map((e) => e.toJson()).toList()); + return val; +} + +StreamLayerConfig _$StreamLayerConfigFromJson(Map json) => + StreamLayerConfig( + dimensions: json['dimensions'] == null + ? null + : VideoDimensions.fromJson( + json['dimensions'] as Map), + framerate: (json['framerate'] as num?)?.toInt(), + enable: json['enable'] as bool?, + ); + +Map _$StreamLayerConfigToJson(StreamLayerConfig instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('dimensions', instance.dimensions?.toJson()); + writeNotNull('framerate', instance.framerate); + writeNotNull('enable', instance.enable); + return val; +} + Rectangle _$RectangleFromJson(Map json) => Rectangle( x: (json['x'] as num?)?.toInt(), y: (json['y'] as num?)?.toInt(), @@ -1133,6 +1189,69 @@ Map _$LocalTranscoderConfigurationToJson( return val; } +MixedAudioStream _$MixedAudioStreamFromJson(Map json) => + MixedAudioStream( + sourceType: + $enumDecodeNullable(_$AudioSourceTypeEnumMap, json['sourceType']), + remoteUserUid: (json['remoteUserUid'] as num?)?.toInt(), + channelId: json['channelId'] as String?, + trackId: (json['trackId'] as num?)?.toInt(), + ); + +Map _$MixedAudioStreamToJson(MixedAudioStream instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('sourceType', _$AudioSourceTypeEnumMap[instance.sourceType]); + writeNotNull('remoteUserUid', instance.remoteUserUid); + writeNotNull('channelId', instance.channelId); + writeNotNull('trackId', instance.trackId); + return val; +} + +const _$AudioSourceTypeEnumMap = { + AudioSourceType.audioSourceMicrophone: 0, + AudioSourceType.audioSourceCustom: 1, + AudioSourceType.audioSourceMediaPlayer: 2, + AudioSourceType.audioSourceLoopbackRecording: 3, + AudioSourceType.audioSourceMixedStream: 4, + AudioSourceType.audioSourceRemoteUser: 5, + AudioSourceType.audioSourceRemoteChannel: 6, + AudioSourceType.audioSourceUnknown: 100, +}; + +LocalAudioMixerConfiguration _$LocalAudioMixerConfigurationFromJson( + Map json) => + LocalAudioMixerConfiguration( + streamCount: (json['streamCount'] as num?)?.toInt(), + audioInputStreams: (json['audioInputStreams'] as List?) + ?.map((e) => MixedAudioStream.fromJson(e as Map)) + .toList(), + syncWithLocalMic: json['syncWithLocalMic'] as bool?, + ); + +Map _$LocalAudioMixerConfigurationToJson( + LocalAudioMixerConfiguration instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('streamCount', instance.streamCount); + writeNotNull('audioInputStreams', + instance.audioInputStreams?.map((e) => e.toJson()).toList()); + writeNotNull('syncWithLocalMic', instance.syncWithLocalMic); + return val; +} + LastmileProbeConfig _$LastmileProbeConfigFromJson(Map json) => LastmileProbeConfig( probeUplink: json['probeUplink'] as bool?, @@ -1243,7 +1362,7 @@ Map _$WlAccStatsToJson(WlAccStats instance) { VideoCanvas _$VideoCanvasFromJson(Map json) => VideoCanvas( uid: (json['uid'] as num?)?.toInt(), subviewUid: (json['subviewUid'] as num?)?.toInt(), - view: (json['view'] as num?)?.toInt(), + view: (readIntPtr(json, 'view') as num?)?.toInt(), backgroundColor: (json['backgroundColor'] as num?)?.toInt(), renderMode: $enumDecodeNullable(_$RenderModeTypeEnumMap, json['renderMode']), @@ -1339,6 +1458,93 @@ const _$LighteningContrastLevelEnumMap = { LighteningContrastLevel.lighteningContrastHigh: 2, }; +FaceShapeAreaOptions _$FaceShapeAreaOptionsFromJson( + Map json) => + FaceShapeAreaOptions( + shapeArea: $enumDecodeNullable(_$FaceShapeAreaEnumMap, json['shapeArea']), + shapeIntensity: (json['shapeIntensity'] as num?)?.toInt(), + ); + +Map _$FaceShapeAreaOptionsToJson( + FaceShapeAreaOptions instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('shapeArea', _$FaceShapeAreaEnumMap[instance.shapeArea]); + writeNotNull('shapeIntensity', instance.shapeIntensity); + return val; +} + +const _$FaceShapeAreaEnumMap = { + FaceShapeArea.faceShapeAreaNone: -1, + FaceShapeArea.faceShapeAreaHeadscale: 0, + FaceShapeArea.faceShapeAreaForehead: 1, + FaceShapeArea.faceShapeAreaFacecontour: 2, + FaceShapeArea.faceShapeAreaFacelength: 3, + FaceShapeArea.faceShapeAreaFacewidth: 4, + FaceShapeArea.faceShapeAreaCheekbone: 5, + FaceShapeArea.faceShapeAreaCheek: 6, + FaceShapeArea.faceShapeAreaChin: 7, + FaceShapeArea.faceShapeAreaEyescale: 8, + FaceShapeArea.faceShapeAreaNoselength: 9, + FaceShapeArea.faceShapeAreaNosewidth: 10, + FaceShapeArea.faceShapeAreaMouthscale: 11, +}; + +FaceShapeBeautyOptions _$FaceShapeBeautyOptionsFromJson( + Map json) => + FaceShapeBeautyOptions( + shapeStyle: $enumDecodeNullable( + _$FaceShapeBeautyStyleEnumMap, json['shapeStyle']), + styleIntensity: (json['styleIntensity'] as num?)?.toInt(), + ); + +Map _$FaceShapeBeautyOptionsToJson( + FaceShapeBeautyOptions instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull( + 'shapeStyle', _$FaceShapeBeautyStyleEnumMap[instance.shapeStyle]); + writeNotNull('styleIntensity', instance.styleIntensity); + return val; +} + +const _$FaceShapeBeautyStyleEnumMap = { + FaceShapeBeautyStyle.faceShapeBeautyStyleFemale: 0, + FaceShapeBeautyStyle.faceShapeBeautyStyleMale: 1, +}; + +FilterEffectOptions _$FilterEffectOptionsFromJson(Map json) => + FilterEffectOptions( + path: json['path'] as String?, + strength: (json['strength'] as num?)?.toDouble(), + ); + +Map _$FilterEffectOptionsToJson(FilterEffectOptions instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('path', instance.path); + writeNotNull('strength', instance.strength); + return val; +} + LowlightEnhanceOptions _$LowlightEnhanceOptionsFromJson( Map json) => LowlightEnhanceOptions( @@ -1401,7 +1607,6 @@ const _$VideoDenoiserModeEnumMap = { const _$VideoDenoiserLevelEnumMap = { VideoDenoiserLevel.videoDenoiserLevelHighQuality: 0, VideoDenoiserLevel.videoDenoiserLevelFast: 1, - VideoDenoiserLevel.videoDenoiserLevelStrength: 2, }; ColorEnhanceOptions _$ColorEnhanceOptionsFromJson(Map json) => @@ -1498,6 +1703,7 @@ const _$SegModelTypeEnumMap = { AudioTrackConfig _$AudioTrackConfigFromJson(Map json) => AudioTrackConfig( enableLocalPlayback: json['enableLocalPlayback'] as bool?, + enableAudioProcessing: json['enableAudioProcessing'] as bool?, ); Map _$AudioTrackConfigToJson(AudioTrackConfig instance) { @@ -1510,6 +1716,7 @@ Map _$AudioTrackConfigToJson(AudioTrackConfig instance) { } writeNotNull('enableLocalPlayback', instance.enableLocalPlayback); + writeNotNull('enableAudioProcessing', instance.enableAudioProcessing); return val; } @@ -1524,9 +1731,10 @@ ScreenCaptureParameters _$ScreenCaptureParametersFromJson( bitrate: (json['bitrate'] as num?)?.toInt(), captureMouseCursor: json['captureMouseCursor'] as bool?, windowFocus: json['windowFocus'] as bool?, - excludeWindowList: (json['excludeWindowList'] as List?) - ?.map((e) => (e as num).toInt()) - .toList(), + excludeWindowList: + (readIntPtrList(json, 'excludeWindowList') as List?) + ?.map((e) => (e as num).toInt()) + .toList(), excludeWindowCount: (json['excludeWindowCount'] as num?)?.toInt(), highLightWidth: (json['highLightWidth'] as num?)?.toInt(), highLightColor: (json['highLightColor'] as num?)?.toInt(), @@ -1830,7 +2038,7 @@ const _$EncryptionModeEnumMap = { EchoTestConfiguration _$EchoTestConfigurationFromJson( Map json) => EchoTestConfiguration( - view: (json['view'] as num?)?.toInt(), + view: (readIntPtr(json, 'view') as num?)?.toInt(), enableAudio: json['enableAudio'] as bool?, enableVideo: json['enableVideo'] as bool?, token: json['token'] as String?, @@ -2098,6 +2306,7 @@ RecorderStreamInfo _$RecorderStreamInfoFromJson(Map json) => RecorderStreamInfo( channelId: json['channelId'] as String?, uid: (json['uid'] as num?)?.toInt(), + type: $enumDecodeNullable(_$RecorderStreamTypeEnumMap, json['type']), ); Map _$RecorderStreamInfoToJson(RecorderStreamInfo instance) { @@ -2111,9 +2320,15 @@ Map _$RecorderStreamInfoToJson(RecorderStreamInfo instance) { writeNotNull('channelId', instance.channelId); writeNotNull('uid', instance.uid); + writeNotNull('type', _$RecorderStreamTypeEnumMap[instance.type]); return val; } +const _$RecorderStreamTypeEnumMap = { + RecorderStreamType.rtc: 0, + RecorderStreamType.preview: 1, +}; + SpatialAudioParams _$SpatialAudioParamsFromJson(Map json) => SpatialAudioParams( speakerAzimuth: (json['speaker_azimuth'] as num?)?.toDouble(), @@ -2381,6 +2596,26 @@ const _$MaxUserAccountLengthTypeEnumMap = { MaxUserAccountLengthType.maxUserAccountLength: 256, }; +const _$CameraFormatTypeEnumMap = { + CameraFormatType.cameraFormatNv12: 0, + CameraFormatType.cameraFormatBgra: 1, +}; + +const _$VideoModuleTypeEnumMap = { + VideoModuleType.videoModuleCapturer: 0, + VideoModuleType.videoModuleSoftwareEncoder: 1, + VideoModuleType.videoModuleHardwareEncoder: 2, + VideoModuleType.videoModuleSoftwareDecoder: 3, + VideoModuleType.videoModuleHardwareDecoder: 4, + VideoModuleType.videoModuleRenderer: 5, +}; + +const _$HdrCapabilityEnumMap = { + HdrCapability.hdrCapabilityUnknown: -1, + HdrCapability.hdrCapabilityUnsupported: 0, + HdrCapability.hdrCapabilitySupported: 1, +}; + const _$CodecCapMaskEnumMap = { CodecCapMask.codecCapMaskNone: 0, CodecCapMask.codecCapMaskHwDec: 1, @@ -2395,6 +2630,17 @@ const _$SimulcastStreamModeEnumMap = { SimulcastStreamMode.enableSimulcastStream: 1, }; +const _$StreamLayerIndexEnumMap = { + StreamLayerIndex.streamLayer1: 0, + StreamLayerIndex.streamLayer2: 1, + StreamLayerIndex.streamLayer3: 2, + StreamLayerIndex.streamLayer4: 3, + StreamLayerIndex.streamLayer5: 4, + StreamLayerIndex.streamLayer6: 5, + StreamLayerIndex.streamLow: 6, + StreamLayerIndex.streamLayerCountMax: 7, +}; + const _$ClientRoleTypeEnumMap = { ClientRoleType.clientRoleBroadcaster: 1, ClientRoleType.clientRoleAudience: 2, @@ -2455,6 +2701,8 @@ const _$ScreenScenarioTypeEnumMap = { const _$VideoApplicationScenarioTypeEnumMap = { VideoApplicationScenarioType.applicationScenarioGeneral: 0, VideoApplicationScenarioType.applicationScenarioMeeting: 1, + VideoApplicationScenarioType.applicationScenario1v1: 2, + VideoApplicationScenarioType.applicationScenarioLiveshow: 3, }; const _$VideoQoePreferenceTypeEnumMap = { @@ -2538,6 +2786,8 @@ const _$LocalVideoStreamReasonEnumMap = { .localVideoStreamReasonScreenCaptureWindowRecoverFromMinimized: 27, LocalVideoStreamReason.localVideoStreamReasonScreenCapturePaused: 28, LocalVideoStreamReason.localVideoStreamReasonScreenCaptureResumed: 29, + LocalVideoStreamReason.localVideoStreamReasonScreenCaptureDisplayDisconnected: + 30, }; const _$RemoteAudioStateEnumMap = { @@ -2776,6 +3026,19 @@ const _$HeadphoneEqualizerPresetEnumMap = { HeadphoneEqualizerPreset.headphoneEqualizerInear: 67108866, }; +const _$VoiceAiTunerTypeEnumMap = { + VoiceAiTunerType.voiceAiTunerMatureMale: 0, + VoiceAiTunerType.voiceAiTunerFreshMale: 1, + VoiceAiTunerType.voiceAiTunerElegantFemale: 2, + VoiceAiTunerType.voiceAiTunerSweetFemale: 3, + VoiceAiTunerType.voiceAiTunerWarmMaleSinging: 4, + VoiceAiTunerType.voiceAiTunerGentleFemaleSinging: 5, + VoiceAiTunerType.voiceAiTunerHuskyMaleSinging: 6, + VoiceAiTunerType.voiceAiTunerWarmElegantFemaleSinging: 7, + VoiceAiTunerType.voiceAiTunerPowerfulMaleSinging: 8, + VoiceAiTunerType.voiceAiTunerDreamyFemaleSinging: 9, +}; + const _$AreaCodeEnumMap = { AreaCode.areaCodeCn: 1, AreaCode.areaCodeNa: 2, @@ -2793,6 +3056,7 @@ const _$AreaCodeExEnumMap = { AreaCodeEx.areaCodeKr: 512, AreaCodeEx.areaCodeHkmc: 1024, AreaCodeEx.areaCodeUs: 2048, + AreaCodeEx.areaCodeRu: 4096, AreaCodeEx.areaCodeOvs: 4294967294, }; diff --git a/lib/src/agora_log.dart b/lib/src/agora_log.dart index 01ca40c78..519b0994a 100644 --- a/lib/src/agora_log.dart +++ b/lib/src/agora_log.dart @@ -27,6 +27,10 @@ enum LogLevel { /// @nodoc @JsonValue(0x0010) logLevelApiCall, + + /// @nodoc + @JsonValue(0x0020) + logLevelDebug, } /// @nodoc diff --git a/lib/src/agora_log.g.dart b/lib/src/agora_log.g.dart index 0eea06f5b..4e1239a5b 100644 --- a/lib/src/agora_log.g.dart +++ b/lib/src/agora_log.g.dart @@ -36,6 +36,7 @@ const _$LogLevelEnumMap = { LogLevel.logLevelError: 4, LogLevel.logLevelFatal: 8, LogLevel.logLevelApiCall: 16, + LogLevel.logLevelDebug: 32, }; const _$LogFilterTypeEnumMap = { diff --git a/lib/src/agora_media_base.dart b/lib/src/agora_media_base.dart index e9323f3f9..40d4b5ed9 100644 --- a/lib/src/agora_media_base.dart +++ b/lib/src/agora_media_base.dart @@ -10,6 +10,37 @@ const defaultConnectionId = 0; /// @nodoc const dummyConnectionId = 4294967295; +/// The context information of the extension. +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class ExtensionContext { + /// @nodoc + const ExtensionContext( + {this.isValid, this.uid, this.providerName, this.extensionName}); + + /// Whether the uid in ExtensionContext is valid: true : The uid is valid. false : The uid is invalid. + @JsonKey(name: 'isValid') + final bool? isValid; + + /// The user ID. 0 represents a local user, while greater than 0 represents a remote user. + @JsonKey(name: 'uid') + final int? uid; + + /// The name of the extension provider. + @JsonKey(name: 'providerName') + final String? providerName; + + /// The name of the extension. + @JsonKey(name: 'extensionName') + final String? extensionName; + + /// @nodoc + factory ExtensionContext.fromJson(Map json) => + _$ExtensionContextFromJson(json); + + /// @nodoc + Map toJson() => _$ExtensionContextToJson(this); +} + /// The type of the video source. @JsonEnum(alwaysCreate: true) enum VideoSourceType { @@ -103,6 +134,55 @@ extension VideoSourceTypeExt on VideoSourceType { } } +/// The audio source type. +@JsonEnum(alwaysCreate: true) +enum AudioSourceType { + /// 0: (Default) Microphone. + @JsonValue(0) + audioSourceMicrophone, + + /// 1: Custom audio stream. + @JsonValue(1) + audioSourceCustom, + + /// 2: Media player. + @JsonValue(2) + audioSourceMediaPlayer, + + /// 3: System audio stream captured during screen sharing. + @JsonValue(3) + audioSourceLoopbackRecording, + + /// @nodoc + @JsonValue(4) + audioSourceMixedStream, + + /// 5: Audio stream from a specified remote user. + @JsonValue(5) + audioSourceRemoteUser, + + /// 6: Mixed audio streams from all users in the current channel. + @JsonValue(6) + audioSourceRemoteChannel, + + /// 100: An unknown audio source. + @JsonValue(100) + audioSourceUnknown, +} + +/// @nodoc +extension AudioSourceTypeExt on AudioSourceType { + /// @nodoc + static AudioSourceType fromValue(int value) { + return $enumDecode(_$AudioSourceTypeEnumMap, value); + } + + /// @nodoc + int value() { + return _$AudioSourceTypeEnumMap[this]!; + } +} + /// The type of the audio route. @JsonEnum(alwaysCreate: true) enum AudioRoute { @@ -489,7 +569,8 @@ class AudioPcmFrame { this.sampleRateHz, this.numChannels, this.bytesPerSample, - this.data}); + this.data, + this.isStereo}); /// The timestamp (ms) of the audio frame. @JsonKey(name: 'capture_timestamp') @@ -515,6 +596,10 @@ class AudioPcmFrame { @JsonKey(name: 'data_') final List? data; + /// @nodoc + @JsonKey(name: 'is_stereo_') + final bool? isStereo; + /// @nodoc factory AudioPcmFrame.fromJson(Map json) => _$AudioPcmFrameFromJson(json); @@ -603,6 +688,10 @@ enum VideoPixelFormat { @JsonValue(14) videoCvpixelBgra, + /// @nodoc + @JsonValue(15) + videoCvpixelP010, + /// 16: The format is I422. @JsonValue(16) videoPixelI422, @@ -632,11 +721,11 @@ extension VideoPixelFormatExt on VideoPixelFormat { /// Video display modes. @JsonEnum(alwaysCreate: true) enum RenderModeType { - /// 1: Hidden mode. Uniformly scale the video until one of its dimension fits the boundary (zoomed to fit). One dimension of the video may have clipped contents. + /// 1: Hidden mode. The priority is to fill the window. Any excess video that does not match the window size will be cropped. @JsonValue(1) renderModeHidden, - /// 2: Fit mode. Uniformly scale the video until one of its dimension fits the boundary (zoomed to fit). Areas that are not filled due to disparity in the aspect ratio are filled with black. + /// 2: Fit mode. The priority is to ensure that all video content is displayed. Any areas of the window that are not filled due to the mismatch between video size and window size will be filled with black. @JsonValue(2) renderModeFit, @@ -714,6 +803,403 @@ extension MetaInfoKeyExt on MetaInfoKey { } } +/// @nodoc +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class ColorSpace { + /// @nodoc + const ColorSpace({this.primaries, this.transfer, this.matrix, this.range}); + + /// @nodoc + @JsonKey(name: 'primaries') + final PrimaryID? primaries; + + /// @nodoc + @JsonKey(name: 'transfer') + final TransferID? transfer; + + /// @nodoc + @JsonKey(name: 'matrix') + final MatrixID? matrix; + + /// @nodoc + @JsonKey(name: 'range') + final RangeID? range; + + /// @nodoc + factory ColorSpace.fromJson(Map json) => + _$ColorSpaceFromJson(json); + + /// @nodoc + Map toJson() => _$ColorSpaceToJson(this); +} + +/// @nodoc +@JsonEnum(alwaysCreate: true) +enum PrimaryID { + /// @nodoc + @JsonValue(1) + primaryidBt709, + + /// @nodoc + @JsonValue(2) + primaryidUnspecified, + + /// @nodoc + @JsonValue(4) + primaryidBt470m, + + /// @nodoc + @JsonValue(5) + primaryidBt470bg, + + /// @nodoc + @JsonValue(6) + primaryidSmpte170m, + + /// @nodoc + @JsonValue(7) + primaryidSmpte240m, + + /// @nodoc + @JsonValue(8) + primaryidFilm, + + /// @nodoc + @JsonValue(9) + primaryidBt2020, + + /// @nodoc + @JsonValue(10) + primaryidSmptest428, + + /// @nodoc + @JsonValue(11) + primaryidSmptest431, + + /// @nodoc + @JsonValue(12) + primaryidSmptest432, + + /// @nodoc + @JsonValue(22) + primaryidJedecp22, +} + +/// @nodoc +extension PrimaryIDExt on PrimaryID { + /// @nodoc + static PrimaryID fromValue(int value) { + return $enumDecode(_$PrimaryIDEnumMap, value); + } + + /// @nodoc + int value() { + return _$PrimaryIDEnumMap[this]!; + } +} + +/// @nodoc +@JsonEnum(alwaysCreate: true) +enum RangeID { + /// @nodoc + @JsonValue(0) + rangeidInvalid, + + /// @nodoc + @JsonValue(1) + rangeidLimited, + + /// @nodoc + @JsonValue(2) + rangeidFull, + + /// @nodoc + @JsonValue(3) + rangeidDerived, +} + +/// @nodoc +extension RangeIDExt on RangeID { + /// @nodoc + static RangeID fromValue(int value) { + return $enumDecode(_$RangeIDEnumMap, value); + } + + /// @nodoc + int value() { + return _$RangeIDEnumMap[this]!; + } +} + +/// @nodoc +@JsonEnum(alwaysCreate: true) +enum MatrixID { + /// @nodoc + @JsonValue(0) + matrixidRgb, + + /// @nodoc + @JsonValue(1) + matrixidBt709, + + /// @nodoc + @JsonValue(2) + matrixidUnspecified, + + /// @nodoc + @JsonValue(4) + matrixidFcc, + + /// @nodoc + @JsonValue(5) + matrixidBt470bg, + + /// @nodoc + @JsonValue(6) + matrixidSmpte170m, + + /// @nodoc + @JsonValue(7) + matrixidSmpte240m, + + /// @nodoc + @JsonValue(8) + matrixidYcocg, + + /// @nodoc + @JsonValue(9) + matrixidBt2020Ncl, + + /// @nodoc + @JsonValue(10) + matrixidBt2020Cl, + + /// @nodoc + @JsonValue(11) + matrixidSmpte2085, + + /// @nodoc + @JsonValue(12) + matrixidCdncls, + + /// @nodoc + @JsonValue(13) + matrixidCdcls, + + /// @nodoc + @JsonValue(14) + matrixidBt2100Ictcp, +} + +/// @nodoc +extension MatrixIDExt on MatrixID { + /// @nodoc + static MatrixID fromValue(int value) { + return $enumDecode(_$MatrixIDEnumMap, value); + } + + /// @nodoc + int value() { + return _$MatrixIDEnumMap[this]!; + } +} + +/// @nodoc +@JsonEnum(alwaysCreate: true) +enum TransferID { + /// @nodoc + @JsonValue(1) + transferidBt709, + + /// @nodoc + @JsonValue(2) + transferidUnspecified, + + /// @nodoc + @JsonValue(4) + transferidGamma22, + + /// @nodoc + @JsonValue(5) + transferidGamma28, + + /// @nodoc + @JsonValue(6) + transferidSmpte170m, + + /// @nodoc + @JsonValue(7) + transferidSmpte240m, + + /// @nodoc + @JsonValue(8) + transferidLinear, + + /// @nodoc + @JsonValue(9) + transferidLog, + + /// @nodoc + @JsonValue(10) + transferidLogSqrt, + + /// @nodoc + @JsonValue(11) + transferidIec6196624, + + /// @nodoc + @JsonValue(12) + transferidBt1361Ecg, + + /// @nodoc + @JsonValue(13) + transferidIec6196621, + + /// @nodoc + @JsonValue(14) + transferidBt202010, + + /// @nodoc + @JsonValue(15) + transferidBt202012, + + /// @nodoc + @JsonValue(16) + transferidSmptest2084, + + /// @nodoc + @JsonValue(17) + transferidSmptest428, + + /// @nodoc + @JsonValue(18) + transferidAribStdB67, +} + +/// @nodoc +extension TransferIDExt on TransferID { + /// @nodoc + static TransferID fromValue(int value) { + return $enumDecode(_$TransferIDEnumMap, value); + } + + /// @nodoc + int value() { + return _$TransferIDEnumMap[this]!; + } +} + +/// @nodoc +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class Hdr10MetadataInfo { + /// @nodoc + const Hdr10MetadataInfo( + {this.redPrimaryX, + this.redPrimaryY, + this.greenPrimaryX, + this.greenPrimaryY, + this.bluePrimaryX, + this.bluePrimaryY, + this.whitePointX, + this.whitePointY, + this.maxMasteringLuminance, + this.minMasteringLuminance, + this.maxContentLightLevel, + this.maxFrameAverageLightLevel}); + + /// @nodoc + @JsonKey(name: 'redPrimaryX') + final int? redPrimaryX; + + /// @nodoc + @JsonKey(name: 'redPrimaryY') + final int? redPrimaryY; + + /// @nodoc + @JsonKey(name: 'greenPrimaryX') + final int? greenPrimaryX; + + /// @nodoc + @JsonKey(name: 'greenPrimaryY') + final int? greenPrimaryY; + + /// @nodoc + @JsonKey(name: 'bluePrimaryX') + final int? bluePrimaryX; + + /// @nodoc + @JsonKey(name: 'bluePrimaryY') + final int? bluePrimaryY; + + /// @nodoc + @JsonKey(name: 'whitePointX') + final int? whitePointX; + + /// @nodoc + @JsonKey(name: 'whitePointY') + final int? whitePointY; + + /// @nodoc + @JsonKey(name: 'maxMasteringLuminance') + final int? maxMasteringLuminance; + + /// @nodoc + @JsonKey(name: 'minMasteringLuminance') + final int? minMasteringLuminance; + + /// @nodoc + @JsonKey(name: 'maxContentLightLevel') + final int? maxContentLightLevel; + + /// @nodoc + @JsonKey(name: 'maxFrameAverageLightLevel') + final int? maxFrameAverageLightLevel; + + /// @nodoc + factory Hdr10MetadataInfo.fromJson(Map json) => + _$Hdr10MetadataInfoFromJson(json); + + /// @nodoc + Map toJson() => _$Hdr10MetadataInfoToJson(this); +} + +/// The relative position of alphaBuffer and video frames. +@JsonEnum(alwaysCreate: true) +enum AlphaStitchMode { + /// 0: (Default) Only video frame, that is, alphaBuffer is not stitched with the video frame. + @JsonValue(0) + noAlphaStitch, + + /// 1: alphaBuffer is above the video frame. + @JsonValue(1) + alphaStitchUp, + + /// 2: alphaBuffer is below the video frame. + @JsonValue(2) + alphaStitchBelow, + + /// 3: alphaBuffer is to the left of the video frame. + @JsonValue(3) + alphaStitchLeft, + + /// 4: alphaBuffer is to the right of the video frame. + @JsonValue(4) + alphaStitchRight, +} + +/// @nodoc +extension AlphaStitchModeExt on AlphaStitchMode { + /// @nodoc + static AlphaStitchMode fromValue(int value) { + return $enumDecode(_$AlphaStitchModeEnumMap, value); + } + + /// @nodoc + int value() { + return _$AlphaStitchModeEnumMap[this]!; + } +} + /// The external video frame. @JsonSerializable(explicitToJson: true, includeIfNull: false) class ExternalVideoFrame { @@ -732,12 +1218,17 @@ class ExternalVideoFrame { this.timestamp, this.eglType, this.textureId, + this.fenceObject, this.matrix, this.metadataBuffer, this.metadataSize, this.alphaBuffer, this.fillAlphaBuffer, - this.textureSliceIndex}); + this.alphaStitchMode, + this.d3d11Texture2d, + this.textureSliceIndex, + this.hdr10MetadataInfo, + this.colorSpace}); /// The video type. See VideoBufferType. @JsonKey(name: 'type') @@ -791,16 +1282,20 @@ class ExternalVideoFrame { @JsonKey(name: 'textureId') final int? textureId; + /// @nodoc + @JsonKey(name: 'fenceObject') + final int? fenceObject; + /// This parameter only applies to video data in Texture format. Incoming 4 Ɨ 4 transformational matrix. The typical value is a unit matrix. @JsonKey(name: 'matrix') final List? matrix; /// This parameter only applies to video data in Texture format. The MetaData buffer. The default value is NULL. - @JsonKey(name: 'metadata_buffer', ignore: true) + @JsonKey(name: 'metadataBuffer', ignore: true) final Uint8List? metadataBuffer; /// This parameter only applies to video data in Texture format. The MetaData size. The default value is 0. - @JsonKey(name: 'metadata_size') + @JsonKey(name: 'metadataSize') final int? metadataSize; /// The alpha channel data output by using portrait segmentation algorithm. This data matches the size of the video frame, with each pixel value ranging from [0,255], where 0 represents the background and 255 represents the foreground (portrait). By setting this parameter, you can render the video background into various effects, such as transparent, solid color, image, video, etc. In custom video rendering scenarios, ensure that both the video frame and alphaBuffer are of the Full Range type; other types may cause abnormal alpha data rendering. @@ -813,10 +1308,26 @@ class ExternalVideoFrame { @JsonKey(name: 'fillAlphaBuffer') final bool? fillAlphaBuffer; + /// When the video frame contains alpha channel data, it represents the relative position of alphaBuffer and the video frame. See AlphaStitchMode. + @JsonKey(name: 'alphaStitchMode') + final AlphaStitchMode? alphaStitchMode; + + /// This parameter only applies to video data in Windows Texture format. It represents a pointer to an object of type ID3D11Texture2D, which is used by a video frame. + @JsonKey(name: 'd3d11Texture2d', readValue: readIntPtr) + final int? d3d11Texture2d; + /// This parameter only applies to video data in Windows Texture format. It represents an index of an ID3D11Texture2D texture object used by the video frame in the ID3D11Texture2D array. - @JsonKey(name: 'texture_slice_index') + @JsonKey(name: 'textureSliceIndex') final int? textureSliceIndex; + /// @nodoc + @JsonKey(name: 'hdr10MetadataInfo') + final Hdr10MetadataInfo? hdr10MetadataInfo; + + /// By default, the color space properties of video frames will apply the Full Range and BT.709 standard configurations. You can configure the settings according your needs for custom video capturing and rendering. + @JsonKey(name: 'colorSpace') + final ColorSpace? colorSpace; + /// @nodoc factory ExternalVideoFrame.fromJson(Map json) => _$ExternalVideoFrameFromJson(json); @@ -903,8 +1414,11 @@ class VideoFrame { this.textureId, this.matrix, this.alphaBuffer, + this.alphaStitchMode, this.pixelBuffer, - this.metaInfo}); + this.metaInfo, + this.hdr10MetadataInfo, + this.colorSpace}); /// The pixel format. See VideoPixelFormat. @JsonKey(name: 'type') @@ -970,19 +1484,33 @@ class VideoFrame { @JsonKey(name: 'matrix') final List? matrix; - /// The alpha channel data output by using portrait segmentation algorithm. This data matches the size of the video frame, with each pixel value ranging from [0,255], where 0 represents the background and 255 represents the foreground (portrait). By setting this parameter, you can render the video background into various effects, such as transparent, solid color, image, video, etc. In custom video rendering scenarios, ensure that both the video frame and alphaBuffer are of the Full Range type; other types may cause abnormal alpha data rendering. + /// The alpha channel data output by using portrait segmentation algorithm. This data matches the size of the video frame, with each pixel value ranging from [0,255], where 0 represents the background and 255 represents the foreground (portrait). By setting this parameter, you can render the video background into various effects, such as transparent, solid color, image, video, etc. + /// In custom video rendering scenarios, ensure that both the video frame and alphaBuffer are of the Full Range type; other types may cause abnormal alpha data rendering. + /// Make sure that alphaBuffer is exactly the same size as the video frame (width Ɨ height), otherwise it may cause the app to crash. @JsonKey(name: 'alphaBuffer', ignore: true) final Uint8List? alphaBuffer; + /// When the video frame contains alpha channel data, it represents the relative position of alphaBuffer and the video frame. See AlphaStitchMode. + @JsonKey(name: 'alphaStitchMode') + final AlphaStitchMode? alphaStitchMode; + /// @nodoc @JsonKey(name: 'pixelBuffer', ignore: true) final Uint8List? pixelBuffer; - /// The meta information in the video frame. To use this parameter, please contact. + /// The meta information in the video frame. To use this parameter, contact. @VideoFrameMetaInfoConverter() @JsonKey(name: 'metaInfo') final VideoFrameMetaInfo? metaInfo; + /// @nodoc + @JsonKey(name: 'hdr10MetadataInfo') + final Hdr10MetadataInfo? hdr10MetadataInfo; + + /// By default, the color space properties of video frames will apply the Full Range and BT.709 standard configurations. You can configure the settings according your needs for custom video capturing and rendering. + @JsonKey(name: 'colorSpace') + final ColorSpace? colorSpace; + /// @nodoc factory VideoFrame.fromJson(Map json) => _$VideoFrameFromJson(json); @@ -1055,6 +1583,32 @@ extension VideoModulePositionExt on VideoModulePosition { } } +/// The snapshot configuration. +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class SnapshotConfig { + /// @nodoc + const SnapshotConfig({this.filePath, this.position}); + + /// The local path (including filename extensions) of the snapshot. For example: + /// Windows: C:\Users\\AppData\Local\Agora\\example.jpg + /// iOS: /App Sandbox/Library/Caches/example.jpg + /// macOS: ļ½ž/Library/Logs/example.jpg + /// Android: /storage/emulated/0/Android/data//files/example.jpg Ensure that the path you specify exists and is writable. + @JsonKey(name: 'filePath') + final String? filePath; + + /// The position of the snapshot video frame in the video pipeline. See VideoModulePosition. + @JsonKey(name: 'position') + final VideoModulePosition? position; + + /// @nodoc + factory SnapshotConfig.fromJson(Map json) => + _$SnapshotConfigFromJson(json); + + /// @nodoc + Map toJson() => _$SnapshotConfigToJson(this); +} + /// This class is used to get raw PCM audio. /// /// You can inherit this class and implement the onFrame callback to get raw PCM audio. @@ -1393,7 +1947,7 @@ class AudioSpectrumObserver { /// /// After successfully calling registerAudioSpectrumObserver to implement the onRemoteAudioSpectrum callback in the AudioSpectrumObserver and calling enableAudioSpectrumMonitor to enable audio spectrum monitoring, the SDK will trigger the callback as the time interval you set to report the received remote audio data spectrum. /// - /// * [spectrums] The audio spectrum information of the remote user, see UserAudioSpectrumInfo. The number of arrays is the number of remote users monitored by the SDK. If the array is null, it means that no audio spectrum of remote users is detected. + /// * [spectrums] The audio spectrum information of the remote user. See UserAudioSpectrumInfo. The number of arrays is the number of remote users monitored by the SDK. If the array is null, it means that no audio spectrum of remote users is detected. /// * [spectrumNumber] The number of remote users. final void Function( List spectrums, int spectrumNumber)? @@ -1659,7 +2213,13 @@ class MediaRecorderConfiguration { this.containerFormat, this.streamType, this.maxDurationMs, - this.recorderInfoUpdateInterval}); + this.recorderInfoUpdateInterval, + this.width, + this.height, + this.fps, + this.sampleRate, + this.channelNum, + this.videoSourceType}); /// @nodoc @JsonKey(name: 'storagePath') @@ -1681,6 +2241,30 @@ class MediaRecorderConfiguration { @JsonKey(name: 'recorderInfoUpdateInterval') final int? recorderInfoUpdateInterval; + /// @nodoc + @JsonKey(name: 'width') + final int? width; + + /// @nodoc + @JsonKey(name: 'height') + final int? height; + + /// @nodoc + @JsonKey(name: 'fps') + final int? fps; + + /// @nodoc + @JsonKey(name: 'sample_rate') + final int? sampleRate; + + /// @nodoc + @JsonKey(name: 'channel_num') + final int? channelNum; + + /// @nodoc + @JsonKey(name: 'videoSourceType') + final VideoSourceType? videoSourceType; + /// @nodoc factory MediaRecorderConfiguration.fromJson(Map json) => _$MediaRecorderConfigurationFromJson(json); diff --git a/lib/src/agora_media_base.g.dart b/lib/src/agora_media_base.g.dart index 81491b552..b79b7ed67 100644 --- a/lib/src/agora_media_base.g.dart +++ b/lib/src/agora_media_base.g.dart @@ -8,6 +8,30 @@ part of 'agora_media_base.dart'; // JsonSerializableGenerator // ************************************************************************** +ExtensionContext _$ExtensionContextFromJson(Map json) => + ExtensionContext( + isValid: json['isValid'] as bool?, + uid: (json['uid'] as num?)?.toInt(), + providerName: json['providerName'] as String?, + extensionName: json['extensionName'] as String?, + ); + +Map _$ExtensionContextToJson(ExtensionContext instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('isValid', instance.isValid); + writeNotNull('uid', instance.uid); + writeNotNull('providerName', instance.providerName); + writeNotNull('extensionName', instance.extensionName); + return val; +} + AudioParameters _$AudioParametersFromJson(Map json) => AudioParameters( sampleRate: (json['sample_rate'] as num?)?.toInt(), @@ -140,6 +164,7 @@ AudioPcmFrame _$AudioPcmFrameFromJson(Map json) => data: (json['data_'] as List?) ?.map((e) => (e as num).toInt()) .toList(), + isStereo: json['is_stereo_'] as bool?, ); Map _$AudioPcmFrameToJson(AudioPcmFrame instance) { @@ -158,6 +183,7 @@ Map _$AudioPcmFrameToJson(AudioPcmFrame instance) { writeNotNull( 'bytes_per_sample', _$BytesPerSampleEnumMap[instance.bytesPerSample]); writeNotNull('data_', instance.data); + writeNotNull('is_stereo_', instance.isStereo); return val; } @@ -165,6 +191,129 @@ const _$BytesPerSampleEnumMap = { BytesPerSample.twoBytesPerSample: 2, }; +ColorSpace _$ColorSpaceFromJson(Map json) => ColorSpace( + primaries: $enumDecodeNullable(_$PrimaryIDEnumMap, json['primaries']), + transfer: $enumDecodeNullable(_$TransferIDEnumMap, json['transfer']), + matrix: $enumDecodeNullable(_$MatrixIDEnumMap, json['matrix']), + range: $enumDecodeNullable(_$RangeIDEnumMap, json['range']), + ); + +Map _$ColorSpaceToJson(ColorSpace instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('primaries', _$PrimaryIDEnumMap[instance.primaries]); + writeNotNull('transfer', _$TransferIDEnumMap[instance.transfer]); + writeNotNull('matrix', _$MatrixIDEnumMap[instance.matrix]); + writeNotNull('range', _$RangeIDEnumMap[instance.range]); + return val; +} + +const _$PrimaryIDEnumMap = { + PrimaryID.primaryidBt709: 1, + PrimaryID.primaryidUnspecified: 2, + PrimaryID.primaryidBt470m: 4, + PrimaryID.primaryidBt470bg: 5, + PrimaryID.primaryidSmpte170m: 6, + PrimaryID.primaryidSmpte240m: 7, + PrimaryID.primaryidFilm: 8, + PrimaryID.primaryidBt2020: 9, + PrimaryID.primaryidSmptest428: 10, + PrimaryID.primaryidSmptest431: 11, + PrimaryID.primaryidSmptest432: 12, + PrimaryID.primaryidJedecp22: 22, +}; + +const _$TransferIDEnumMap = { + TransferID.transferidBt709: 1, + TransferID.transferidUnspecified: 2, + TransferID.transferidGamma22: 4, + TransferID.transferidGamma28: 5, + TransferID.transferidSmpte170m: 6, + TransferID.transferidSmpte240m: 7, + TransferID.transferidLinear: 8, + TransferID.transferidLog: 9, + TransferID.transferidLogSqrt: 10, + TransferID.transferidIec6196624: 11, + TransferID.transferidBt1361Ecg: 12, + TransferID.transferidIec6196621: 13, + TransferID.transferidBt202010: 14, + TransferID.transferidBt202012: 15, + TransferID.transferidSmptest2084: 16, + TransferID.transferidSmptest428: 17, + TransferID.transferidAribStdB67: 18, +}; + +const _$MatrixIDEnumMap = { + MatrixID.matrixidRgb: 0, + MatrixID.matrixidBt709: 1, + MatrixID.matrixidUnspecified: 2, + MatrixID.matrixidFcc: 4, + MatrixID.matrixidBt470bg: 5, + MatrixID.matrixidSmpte170m: 6, + MatrixID.matrixidSmpte240m: 7, + MatrixID.matrixidYcocg: 8, + MatrixID.matrixidBt2020Ncl: 9, + MatrixID.matrixidBt2020Cl: 10, + MatrixID.matrixidSmpte2085: 11, + MatrixID.matrixidCdncls: 12, + MatrixID.matrixidCdcls: 13, + MatrixID.matrixidBt2100Ictcp: 14, +}; + +const _$RangeIDEnumMap = { + RangeID.rangeidInvalid: 0, + RangeID.rangeidLimited: 1, + RangeID.rangeidFull: 2, + RangeID.rangeidDerived: 3, +}; + +Hdr10MetadataInfo _$Hdr10MetadataInfoFromJson(Map json) => + Hdr10MetadataInfo( + redPrimaryX: (json['redPrimaryX'] as num?)?.toInt(), + redPrimaryY: (json['redPrimaryY'] as num?)?.toInt(), + greenPrimaryX: (json['greenPrimaryX'] as num?)?.toInt(), + greenPrimaryY: (json['greenPrimaryY'] as num?)?.toInt(), + bluePrimaryX: (json['bluePrimaryX'] as num?)?.toInt(), + bluePrimaryY: (json['bluePrimaryY'] as num?)?.toInt(), + whitePointX: (json['whitePointX'] as num?)?.toInt(), + whitePointY: (json['whitePointY'] as num?)?.toInt(), + maxMasteringLuminance: (json['maxMasteringLuminance'] as num?)?.toInt(), + minMasteringLuminance: (json['minMasteringLuminance'] as num?)?.toInt(), + maxContentLightLevel: (json['maxContentLightLevel'] as num?)?.toInt(), + maxFrameAverageLightLevel: + (json['maxFrameAverageLightLevel'] as num?)?.toInt(), + ); + +Map _$Hdr10MetadataInfoToJson(Hdr10MetadataInfo instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('redPrimaryX', instance.redPrimaryX); + writeNotNull('redPrimaryY', instance.redPrimaryY); + writeNotNull('greenPrimaryX', instance.greenPrimaryX); + writeNotNull('greenPrimaryY', instance.greenPrimaryY); + writeNotNull('bluePrimaryX', instance.bluePrimaryX); + writeNotNull('bluePrimaryY', instance.bluePrimaryY); + writeNotNull('whitePointX', instance.whitePointX); + writeNotNull('whitePointY', instance.whitePointY); + writeNotNull('maxMasteringLuminance', instance.maxMasteringLuminance); + writeNotNull('minMasteringLuminance', instance.minMasteringLuminance); + writeNotNull('maxContentLightLevel', instance.maxContentLightLevel); + writeNotNull('maxFrameAverageLightLevel', instance.maxFrameAverageLightLevel); + return val; +} + ExternalVideoFrame _$ExternalVideoFrameFromJson(Map json) => ExternalVideoFrame( type: $enumDecodeNullable(_$VideoBufferTypeEnumMap, json['type']), @@ -179,12 +328,23 @@ ExternalVideoFrame _$ExternalVideoFrameFromJson(Map json) => timestamp: (json['timestamp'] as num?)?.toInt(), eglType: $enumDecodeNullable(_$EglContextTypeEnumMap, json['eglType']), textureId: (json['textureId'] as num?)?.toInt(), + fenceObject: (json['fenceObject'] as num?)?.toInt(), matrix: (json['matrix'] as List?) ?.map((e) => (e as num).toDouble()) .toList(), - metadataSize: (json['metadata_size'] as num?)?.toInt(), + metadataSize: (json['metadataSize'] as num?)?.toInt(), fillAlphaBuffer: json['fillAlphaBuffer'] as bool?, - textureSliceIndex: (json['texture_slice_index'] as num?)?.toInt(), + alphaStitchMode: $enumDecodeNullable( + _$AlphaStitchModeEnumMap, json['alphaStitchMode']), + d3d11Texture2d: (readIntPtr(json, 'd3d11Texture2d') as num?)?.toInt(), + textureSliceIndex: (json['textureSliceIndex'] as num?)?.toInt(), + hdr10MetadataInfo: json['hdr10MetadataInfo'] == null + ? null + : Hdr10MetadataInfo.fromJson( + json['hdr10MetadataInfo'] as Map), + colorSpace: json['colorSpace'] == null + ? null + : ColorSpace.fromJson(json['colorSpace'] as Map), ); Map _$ExternalVideoFrameToJson(ExternalVideoFrame instance) { @@ -208,10 +368,16 @@ Map _$ExternalVideoFrameToJson(ExternalVideoFrame instance) { writeNotNull('timestamp', instance.timestamp); writeNotNull('eglType', _$EglContextTypeEnumMap[instance.eglType]); writeNotNull('textureId', instance.textureId); + writeNotNull('fenceObject', instance.fenceObject); writeNotNull('matrix', instance.matrix); - writeNotNull('metadata_size', instance.metadataSize); + writeNotNull('metadataSize', instance.metadataSize); writeNotNull('fillAlphaBuffer', instance.fillAlphaBuffer); - writeNotNull('texture_slice_index', instance.textureSliceIndex); + writeNotNull( + 'alphaStitchMode', _$AlphaStitchModeEnumMap[instance.alphaStitchMode]); + writeNotNull('d3d11Texture2d', instance.d3d11Texture2d); + writeNotNull('textureSliceIndex', instance.textureSliceIndex); + writeNotNull('hdr10MetadataInfo', instance.hdr10MetadataInfo?.toJson()); + writeNotNull('colorSpace', instance.colorSpace?.toJson()); return val; } @@ -233,6 +399,7 @@ const _$VideoPixelFormatEnumMap = { VideoPixelFormat.videoCvpixelNv12: 12, VideoPixelFormat.videoCvpixelI420: 13, VideoPixelFormat.videoCvpixelBgra: 14, + VideoPixelFormat.videoCvpixelP010: 15, VideoPixelFormat.videoPixelI422: 16, VideoPixelFormat.videoTextureId3d11texture2d: 17, VideoPixelFormat.videoPixelI010: 18, @@ -243,6 +410,14 @@ const _$EglContextTypeEnumMap = { EglContextType.eglContext14: 1, }; +const _$AlphaStitchModeEnumMap = { + AlphaStitchMode.noAlphaStitch: 0, + AlphaStitchMode.alphaStitchUp: 1, + AlphaStitchMode.alphaStitchBelow: 2, + AlphaStitchMode.alphaStitchLeft: 3, + AlphaStitchMode.alphaStitchRight: 4, +}; + VideoFrame _$VideoFrameFromJson(Map json) => VideoFrame( type: $enumDecodeNullable(_$VideoPixelFormatEnumMap, json['type']), width: (json['width'] as num?)?.toInt(), @@ -258,7 +433,16 @@ VideoFrame _$VideoFrameFromJson(Map json) => VideoFrame( matrix: (json['matrix'] as List?) ?.map((e) => (e as num).toDouble()) .toList(), + alphaStitchMode: $enumDecodeNullable( + _$AlphaStitchModeEnumMap, json['alphaStitchMode']), metaInfo: const VideoFrameMetaInfoConverter().fromJson(json['metaInfo']), + hdr10MetadataInfo: json['hdr10MetadataInfo'] == null + ? null + : Hdr10MetadataInfo.fromJson( + json['hdr10MetadataInfo'] as Map), + colorSpace: json['colorSpace'] == null + ? null + : ColorSpace.fromJson(json['colorSpace'] as Map), ); Map _$VideoFrameToJson(VideoFrame instance) { @@ -282,11 +466,43 @@ Map _$VideoFrameToJson(VideoFrame instance) { writeNotNull('metadata_size', instance.metadataSize); writeNotNull('textureId', instance.textureId); writeNotNull('matrix', instance.matrix); + writeNotNull( + 'alphaStitchMode', _$AlphaStitchModeEnumMap[instance.alphaStitchMode]); writeNotNull('metaInfo', const VideoFrameMetaInfoConverter().toJson(instance.metaInfo)); + writeNotNull('hdr10MetadataInfo', instance.hdr10MetadataInfo?.toJson()); + writeNotNull('colorSpace', instance.colorSpace?.toJson()); return val; } +SnapshotConfig _$SnapshotConfigFromJson(Map json) => + SnapshotConfig( + filePath: json['filePath'] as String?, + position: + $enumDecodeNullable(_$VideoModulePositionEnumMap, json['position']), + ); + +Map _$SnapshotConfigToJson(SnapshotConfig instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('filePath', instance.filePath); + writeNotNull('position', _$VideoModulePositionEnumMap[instance.position]); + return val; +} + +const _$VideoModulePositionEnumMap = { + VideoModulePosition.positionPostCapturer: 1, + VideoModulePosition.positionPreRenderer: 2, + VideoModulePosition.positionPreEncoder: 4, + VideoModulePosition.positionPostCapturerOrigin: 8, +}; + AudioFrame _$AudioFrameFromJson(Map json) => AudioFrame( type: $enumDecodeNullable(_$AudioFrameTypeEnumMap, json['type']), samplesPerChannel: (json['samplesPerChannel'] as num?)?.toInt(), @@ -414,6 +630,13 @@ MediaRecorderConfiguration _$MediaRecorderConfigurationFromJson( maxDurationMs: (json['maxDurationMs'] as num?)?.toInt(), recorderInfoUpdateInterval: (json['recorderInfoUpdateInterval'] as num?)?.toInt(), + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), + fps: (json['fps'] as num?)?.toInt(), + sampleRate: (json['sample_rate'] as num?)?.toInt(), + channelNum: (json['channel_num'] as num?)?.toInt(), + videoSourceType: $enumDecodeNullable( + _$VideoSourceTypeEnumMap, json['videoSourceType']), ); Map _$MediaRecorderConfigurationToJson( @@ -434,6 +657,13 @@ Map _$MediaRecorderConfigurationToJson( writeNotNull('maxDurationMs', instance.maxDurationMs); writeNotNull( 'recorderInfoUpdateInterval', instance.recorderInfoUpdateInterval); + writeNotNull('width', instance.width); + writeNotNull('height', instance.height); + writeNotNull('fps', instance.fps); + writeNotNull('sample_rate', instance.sampleRate); + writeNotNull('channel_num', instance.channelNum); + writeNotNull( + 'videoSourceType', _$VideoSourceTypeEnumMap[instance.videoSourceType]); return val; } @@ -447,6 +677,28 @@ const _$MediaRecorderStreamTypeEnumMap = { MediaRecorderStreamType.streamTypeBoth: 3, }; +const _$VideoSourceTypeEnumMap = { + VideoSourceType.videoSourceCameraPrimary: 0, + VideoSourceType.videoSourceCamera: 0, + VideoSourceType.videoSourceCameraSecondary: 1, + VideoSourceType.videoSourceScreenPrimary: 2, + VideoSourceType.videoSourceScreen: 2, + VideoSourceType.videoSourceScreenSecondary: 3, + VideoSourceType.videoSourceCustom: 4, + VideoSourceType.videoSourceMediaPlayer: 5, + VideoSourceType.videoSourceRtcImagePng: 6, + VideoSourceType.videoSourceRtcImageJpeg: 7, + VideoSourceType.videoSourceRtcImageGif: 8, + VideoSourceType.videoSourceRemote: 9, + VideoSourceType.videoSourceTranscoded: 10, + VideoSourceType.videoSourceCameraThird: 11, + VideoSourceType.videoSourceCameraFourth: 12, + VideoSourceType.videoSourceScreenThird: 13, + VideoSourceType.videoSourceScreenFourth: 14, + VideoSourceType.videoSourceSpeechDriven: 15, + VideoSourceType.videoSourceUnknown: 100, +}; + RecorderInfo _$RecorderInfoFromJson(Map json) => RecorderInfo( fileName: json['fileName'] as String?, durationMs: (json['durationMs'] as num?)?.toInt(), @@ -468,26 +720,15 @@ Map _$RecorderInfoToJson(RecorderInfo instance) { return val; } -const _$VideoSourceTypeEnumMap = { - VideoSourceType.videoSourceCameraPrimary: 0, - VideoSourceType.videoSourceCamera: 0, - VideoSourceType.videoSourceCameraSecondary: 1, - VideoSourceType.videoSourceScreenPrimary: 2, - VideoSourceType.videoSourceScreen: 2, - VideoSourceType.videoSourceScreenSecondary: 3, - VideoSourceType.videoSourceCustom: 4, - VideoSourceType.videoSourceMediaPlayer: 5, - VideoSourceType.videoSourceRtcImagePng: 6, - VideoSourceType.videoSourceRtcImageJpeg: 7, - VideoSourceType.videoSourceRtcImageGif: 8, - VideoSourceType.videoSourceRemote: 9, - VideoSourceType.videoSourceTranscoded: 10, - VideoSourceType.videoSourceCameraThird: 11, - VideoSourceType.videoSourceCameraFourth: 12, - VideoSourceType.videoSourceScreenThird: 13, - VideoSourceType.videoSourceScreenFourth: 14, - VideoSourceType.videoSourceSpeechDriven: 15, - VideoSourceType.videoSourceUnknown: 100, +const _$AudioSourceTypeEnumMap = { + AudioSourceType.audioSourceMicrophone: 0, + AudioSourceType.audioSourceCustom: 1, + AudioSourceType.audioSourceMediaPlayer: 2, + AudioSourceType.audioSourceLoopbackRecording: 3, + AudioSourceType.audioSourceMixedStream: 4, + AudioSourceType.audioSourceRemoteUser: 5, + AudioSourceType.audioSourceRemoteChannel: 6, + AudioSourceType.audioSourceUnknown: 100, }; const _$AudioRouteEnumMap = { @@ -558,13 +799,6 @@ const _$MediaPlayerSourceTypeEnumMap = { MediaPlayerSourceType.mediaPlayerSourceSimple: 2, }; -const _$VideoModulePositionEnumMap = { - VideoModulePosition.positionPostCapturer: 1, - VideoModulePosition.positionPreRenderer: 2, - VideoModulePosition.positionPreEncoder: 4, - VideoModulePosition.positionPostCapturerOrigin: 8, -}; - const _$AudioFramePositionEnumMap = { AudioFramePosition.audioFramePositionNone: 0, AudioFramePosition.audioFramePositionPlayback: 1, diff --git a/lib/src/agora_media_engine.dart b/lib/src/agora_media_engine.dart index 0db692f67..da76413b8 100644 --- a/lib/src/agora_media_engine.dart +++ b/lib/src/agora_media_engine.dart @@ -48,13 +48,7 @@ abstract class MediaEngine { /// Registers a raw video frame observer object. /// - /// If you want to obtain the original video data of some remote users (referred to as group A) and the encoded video data of other remote users (referred to as group B), you can refer to the following steps: - /// Call registerVideoFrameObserver to register the raw video frame observer before joining the channel. - /// Call registerVideoEncodedFrameObserver to register the encoded video frame observer before joining the channel. - /// After joining the channel, get the user IDs of group B users through onUserJoined, and then call setRemoteVideoSubscriptionOptions to set the encodedFrameOnly of this group of users to true. - /// Call muteAllRemoteVideoStreams (false) to start receiving the video streams of all remote users. Then: - /// The raw video data of group A users can be obtained through the callback in VideoFrameObserver, and the SDK renders the data by default. - /// The encoded video data of group B users can be obtained through the callback in VideoEncodedFrameObserver. If you want to observe raw video frames (such as YUV or RGBA format), Agora recommends that you implement one VideoFrameObserver class with this method. When calling this method to register a video observer, you can register callbacks in the VideoFrameObserver class as needed. After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received. + /// If you want to observe raw video frames (such as YUV or RGBA format), Agora recommends that you implement one VideoFrameObserver class with this method. When calling this method to register a video observer, you can register callbacks in the VideoFrameObserver class as needed. After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received. /// /// * [observer] The observer instance. See VideoFrameObserver. /// @@ -65,14 +59,7 @@ abstract class MediaEngine { /// Registers a receiver object for the encoded video image. /// - /// If you only want to observe encoded video frames (such as h.264 format) without decoding and rendering the video, Agora recommends that you implement one VideoEncodedFrameObserver class through this method. If you want to obtain the original video data of some remote users (referred to as group A) and the encoded video data of other remote users (referred to as group B), you can refer to the following steps: - /// Call registerVideoFrameObserver to register the raw video frame observer before joining the channel. - /// Call registerVideoEncodedFrameObserver to register the encoded video frame observer before joining the channel. - /// After joining the channel, get the user IDs of group B users through onUserJoined, and then call setRemoteVideoSubscriptionOptions to set the encodedFrameOnly of this group of users to true. - /// Call muteAllRemoteVideoStreams (false) to start receiving the video streams of all remote users. Then: - /// The raw video data of group A users can be obtained through the callback in VideoFrameObserver, and the SDK renders the data by default. - /// The encoded video data of group B users can be obtained through the callback in VideoEncodedFrameObserver. - /// Call this method before joining a channel. + /// If you only want to observe encoded video frames (such as H.264 format) without decoding and rendering the video, Agora recommends that you implement one VideoEncodedFrameObserver class through this method. Call this method before joining a channel. /// /// * [observer] The video frame observer object. See VideoEncodedFrameObserver. /// @@ -130,6 +117,9 @@ abstract class MediaEngine { ExternalVideoSourceType sourceType = ExternalVideoSourceType.videoFrame, SenderOptions encodedVideoOption = const SenderOptions()}); + /// @nodoc + Future setExternalRemoteEglContext(int eglContext); + /// Sets the external audio source parameters. /// /// Deprecated: This method is deprecated, use createCustomAudioTrack instead. diff --git a/lib/src/agora_media_player.dart b/lib/src/agora_media_player.dart index f93b0f1c9..4eaf31414 100644 --- a/lib/src/agora_media_player.dart +++ b/lib/src/agora_media_player.dart @@ -105,8 +105,7 @@ abstract class MediaPlayer { /// * [index] The index of the media stream. This parameter must be less than the return value of getStreamCount. /// /// Returns - /// If the call succeeds, returns the detailed information of the media stream. See PlayerStreamInfo. - /// If the call fails, returns NULL. + /// If the call succeeds, returns the detailed information of the media stream. See PlayerStreamInfo. NULL, if the method call fails. Future getStreamInfo(int index); /// Sets the loop playback. @@ -125,8 +124,8 @@ abstract class MediaPlayer { /// /// Call this method after calling open. /// - /// * [speed] The playback speed. Agora recommends that you limit this value to a range between 50 and 400, which is defined as follows: - /// 50: Half the original speed. + /// * [speed] The playback speed. Agora recommends that you set this to a value between 30 and 400, defined as follows: + /// 30: 0.3 times the original speed. /// 100: The original speed. /// 400: 4 times the original speed. /// @@ -351,7 +350,7 @@ abstract class MediaPlayer { /// /// You can call this method to switch the media resource to be played according to the current network status. For example: /// When the network is poor, the media resource to be played is switched to a media resource address with a lower bitrate. - /// When the network is good, the media resource to be played is switched to a media resource address with a higher bitrate. After calling this method, if you receive the playerEventSwitchComplete event in the onPlayerEvent callback, the switch is successful; If you receive the playerEventSwitchError event in the onPlayerEvent callback, the switch fails. + /// When the network is good, the media resource to be played is switched to a media resource address with a higher bitrate. After calling this method, if you receive the onPlayerEvent callback report the playerEventSwitchComplete event, the switching is successful. If the switching fails, the SDK will automatically retry 3 times. If it still fails, you will receive the onPlayerEvent callback reporting the playerEventSwitchError event indicating an error occurred during media resource switching. /// Ensure that you call this method after open. /// To ensure normal playback, pay attention to the following when calling this method: /// Do not call this method when playback is paused. @@ -359,7 +358,7 @@ abstract class MediaPlayer { /// Before switching the media resource, make sure that the playback position does not exceed the total duration of the media resource to be switched. /// /// * [src] The URL of the media resource. - /// * [syncPts] Whether to synchronize the playback position (ms) before and after the switch: true : Synchronize the playback position before and after the switch. false : (Default) Do not synchronize the playback position before and after the switch. Make sure to set this parameter as false if you need to play live streams, or the switch fails. If you need to play on-demand streams, you can set the value of this parameter according to your scenarios. + /// * [syncPts] Whether to synchronize the playback position (ms) before and after the switch: true : Synchronize the playback position before and after the switch. false : (Default) Do not synchronize the playback position before and after the switch. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -367,7 +366,9 @@ abstract class MediaPlayer { /// Preloads a media resource. /// - /// You can call this method to preload a media resource into the playlist. If you need to preload multiple media resources, you can call this method multiple times. If the preload is successful and you want to play the media resource, call playPreloadedSrc; if you want to clear the playlist, call stop. Agora does not support preloading duplicate media resources to the playlist. However, you can preload the media resources that are being played to the playlist again. + /// You can call this method to preload a media resource into the playlist. If you need to preload multiple media resources, you can call this method multiple times. If the preload is successful and you want to play the media resource, call playPreloadedSrc; if you want to clear the playlist, call stop. + /// Before calling this method, ensure that you have called open or openWithMediaSource to open the media resource successfully. + /// Agora does not support preloading duplicate media resources to the playlist. However, you can preload the media resources that are being played to the playlist again. /// /// * [src] The URL of the media resource. /// * [startPos] The starting position (ms) for playing after the media resource is preloaded to the playlist. When preloading a live stream, set this parameter to 0. diff --git a/lib/src/agora_media_player_source.dart b/lib/src/agora_media_player_source.dart index 3042da599..d772c08ea 100644 --- a/lib/src/agora_media_player_source.dart +++ b/lib/src/agora_media_player_source.dart @@ -40,7 +40,7 @@ class MediaPlayerSourceObserver { /// /// After calling the seek method, the SDK triggers the callback to report the results of the seek operation. /// - /// * [eventCode] The player events. See MediaPlayerEvent. + /// * [eventCode] The player event. See MediaPlayerEvent. /// * [elapsedTime] The time (ms) when the event occurs. /// * [message] Information about the event. final void Function( @@ -58,8 +58,8 @@ class MediaPlayerSourceObserver { /// Reports the playback duration that the buffered data can support. /// /// When playing online media resources, the SDK triggers this callback every two seconds to report the playback duration that the currently buffered data can support. - /// When the playback duration supported by the buffered data is less than the threshold (0 by default), the SDK returns playerEventBufferLow. - /// When the playback duration supported by the buffered data is greater than the threshold (0 by default), the SDK returns playerEventBufferRecover. + /// When the playback duration supported by the buffered data is less than the threshold (0 by default), the SDK returns playerEventBufferLow (6). + /// When the playback duration supported by the buffered data is greater than the threshold (0 by default), the SDK returns playerEventBufferRecover (7). /// /// * [playCachedBuffer] The playback duration (ms) that the buffered data can support. final void Function(int playCachedBuffer)? onPlayBufferUpdated; diff --git a/lib/src/agora_media_player_types.dart b/lib/src/agora_media_player_types.dart index 7c74ce72b..4d695d4b5 100644 --- a/lib/src/agora_media_player_types.dart +++ b/lib/src/agora_media_player_types.dart @@ -582,7 +582,7 @@ class MediaSource { @JsonKey(name: 'startPos') final int? startPos; - /// Whether to enable autoplay once the media file is opened: true : (Default) Enables autoplay. false : Disables autoplay. If autoplay is disabled, you need to call the play method to play a media file after it is opened. + /// Whether to enable autoplay once the media file is opened: true : (Default) Yes. false : No. If autoplay is disabled, you need to call the play method to play a media file after it is opened. @JsonKey(name: 'autoPlay') final bool? autoPlay; diff --git a/lib/src/agora_rtc_engine.dart b/lib/src/agora_rtc_engine.dart index e09c6b292..8383846d6 100644 --- a/lib/src/agora_rtc_engine.dart +++ b/lib/src/agora_rtc_engine.dart @@ -110,6 +110,10 @@ enum AudioMixingReasonType { @JsonValue(724) audioMixingReasonStoppedByUser, + /// @nodoc + @JsonValue(726) + audioMixingReasonResumedByUser, + /// 0: The SDK opens music file successfully. @JsonValue(0) audioMixingReasonOk, @@ -297,6 +301,30 @@ enum StreamFallbackOptions { /// 2: When the network conditions are weak, try to receive the low-quality video stream first. If the video cannot be displayed due to extremely weak network environment, then fall back to receiving audio-only stream. @JsonValue(2) streamFallbackOptionAudioOnly, + + /// @nodoc + @JsonValue(3) + streamFallbackOptionVideoStreamLayer1, + + /// @nodoc + @JsonValue(4) + streamFallbackOptionVideoStreamLayer2, + + /// @nodoc + @JsonValue(5) + streamFallbackOptionVideoStreamLayer3, + + /// @nodoc + @JsonValue(6) + streamFallbackOptionVideoStreamLayer4, + + /// @nodoc + @JsonValue(7) + streamFallbackOptionVideoStreamLayer5, + + /// @nodoc + @JsonValue(8) + streamFallbackOptionVideoStreamLayer6, } /// @nodoc @@ -364,7 +392,8 @@ class LocalVideoStats { this.txPacketLossRate, this.captureBrightnessLevel, this.dualStreamEnabled, - this.hwEncoderAccelerating}); + this.hwEncoderAccelerating, + this.simulcastDimensions}); /// The ID of the local user. @JsonKey(name: 'uid') @@ -460,6 +489,10 @@ class LocalVideoStats { @JsonKey(name: 'hwEncoderAccelerating') final int? hwEncoderAccelerating; + /// @nodoc + @JsonKey(name: 'simulcastDimensions') + final List? simulcastDimensions; + /// @nodoc factory LocalVideoStats.fromJson(Map json) => _$LocalVideoStatsFromJson(json); @@ -588,6 +621,7 @@ class RemoteVideoStats { this.width, this.height, this.receivedBitrate, + this.decoderInputFrameRate, this.decoderOutputFrameRate, this.rendererOutputFrameRate, this.frameLossRate, @@ -625,6 +659,10 @@ class RemoteVideoStats { @JsonKey(name: 'receivedBitrate') final int? receivedBitrate; + /// @nodoc + @JsonKey(name: 'decoderInputFrameRate') + final int? decoderInputFrameRate; + /// The frame rate (fps) of decoding the remote video. @JsonKey(name: 'decoderOutputFrameRate') final int? decoderOutputFrameRate; @@ -1340,7 +1378,8 @@ class ChannelMediaOptions { this.publishRhythmPlayerTrack, this.isInteractiveAudience, this.customVideoTrackId, - this.isAudioFilterable}); + this.isAudioFilterable, + this.parameters}); /// Whether to publish the video captured by the camera: true : Publish the video captured by the camera. false : Do not publish the video captured by the camera. @JsonKey(name: 'publishCameraTrack') @@ -1390,7 +1429,7 @@ class ChannelMediaOptions { @JsonKey(name: 'publishCustomAudioTrack') final bool? publishCustomAudioTrack; - /// The ID of the custom audio source to publish. The default value is 0. If you have set sourceNumber in setExternalAudioSource to a value greater than 1, the SDK creates the corresponding number of custom audio tracks and assigns an ID to each audio track, starting from 0. + /// The ID of the custom audio track to be published. The default value is 0. You can obtain the custom audio track ID through the createCustomAudioTrack method. @JsonKey(name: 'publishCustomAudioTrackId') final int? publishCustomAudioTrackId; @@ -1414,7 +1453,7 @@ class ChannelMediaOptions { @JsonKey(name: 'publishTranscodedVideoTrack') final bool? publishTranscodedVideoTrack; - /// @nodoc + /// Whether to publish the mixed audio track: true : Publish the mixed audio track. false : Do not publish the mixed audio track. @JsonKey(name: 'publishMixedAudioTrack') final bool? publishMixedAudioTrack; @@ -1490,6 +1529,10 @@ class ChannelMediaOptions { @JsonKey(name: 'isAudioFilterable') final bool? isAudioFilterable; + /// @nodoc + @JsonKey(name: 'parameters') + final String? parameters; + /// @nodoc factory ChannelMediaOptions.fromJson(Map json) => _$ChannelMediaOptionsFromJson(json); @@ -1693,10 +1736,10 @@ class RtcEngineEventHandler { this.onVideoPublishStateChanged, this.onTranscodedStreamLayoutInfo, this.onAudioMetadataReceived, - this.onExtensionEvent, - this.onExtensionStarted, - this.onExtensionStopped, - this.onExtensionError, + this.onExtensionEventWithContext, + this.onExtensionStartedWithContext, + this.onExtensionStoppedWithContext, + this.onExtensionErrorWithContext, this.onSetRtmFlagResult, }); @@ -1825,7 +1868,7 @@ class RtcEngineEventHandler { /// Reports the last mile network quality of each user in the channel. /// - /// This callback reports the last mile network conditions of each user in the channel. Last mile refers to the connection between the local device and Agora's edge server. The SDK triggers this callback once every two seconds. If a channel includes multiple users, the SDK triggers this callback as many times. This callback provides feedback on network quality through sending and receiving broadcast packets within the channel. Excessive broadcast packets can lead to broadcast storms. To prevent broadcast storms from causing a large amount of data transmission within the channel, this callback supports feedback on the network quality of up to 4 remote hosts simultaneously by default. txQuality is when the user is not sending a stream; rxQuality is when the user is not receiving a stream. + /// This callback reports the last mile network conditions of each user in the channel. Last mile refers to the connection between the local device and Agora's edge server. The SDK triggers this callback once every two seconds. If a channel includes multiple users, the SDK triggers this callback as many times. This callback provides feedback on network quality through sending and receiving broadcast packets within the channel. Excessive broadcast packets can lead to broadcast storms. To prevent broadcast storms from causing a large amount of data transmission within the channel, this callback supports feedback on the network quality of up to 4 remote hosts simultaneously by default. txQuality is Unknown when the user is not sending a stream; rxQuality is Unknown when the user is not receiving a stream. /// /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The user ID. The network quality of the user with this user ID is reported. If the uid is 0, the local network quality is reported. @@ -1851,7 +1894,7 @@ class RtcEngineEventHandler { /// /// This callback reports the last-mile network conditions of the local user before the user joins the channel. Last mile refers to the connection between the local device and Agora's edge server. Before the user joins the channel, this callback is triggered by the SDK once startLastmileProbeTest is called and reports the last-mile network conditions of the local user. /// - /// * [quality] The last-mile network quality. qualityUnknown (0): The quality is unknown. qualityExcellent (1): The quality is excellent. qualityGood (2): The network quality seems excellent, but the bitrate can be slightly lower than excellent. qualityPoor (3): Users can feel the communication is slightly impaired. qualityBad (4): Users cannot communicate smoothly. qualityVbad (5): The quality is so bad that users can barely communicate. qualityDown (6): The network is down, and users cannot communicate at all. See QualityType. + /// * [quality] The last-mile network quality. qualityUnknown (0): The quality is unknown. qualityExcellent (1): The quality is excellent. qualityGood (2): The network quality seems excellent, but the bitrate can be slightly lower than excellent. qualityPoor (3): Users can feel the communication is slightly impaired. qualityBad (4): Users cannot communicate smoothly. qualityVbad (5): The quality is so bad that users can barely communicate. qualityDown (6): The network is down, and users cannot communicate at all. qualityDetecting (8): The last-mile probe test is in progress. See QualityType. final void Function(QualityType quality)? onLastmileQuality; /// Occurs when the first local video frame is displayed on the local video view. @@ -2143,7 +2186,7 @@ class RtcEngineEventHandler { /// The SDK triggers this callback when the local user receives the stream message that the remote user sends by calling the sendStreamMessage method. /// /// * [connection] The connection information. See RtcConnection. - /// * [uid] The ID of the remote user sending the message. + /// * [remoteUid] The ID of the remote user sending the message. /// * [streamId] The stream ID of the received message. /// * [data] The data received. /// * [length] The data length (byte). @@ -2158,7 +2201,7 @@ class RtcEngineEventHandler { /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The ID of the remote user sending the message. /// * [streamId] The stream ID of the received message. - /// * [code] The error code. See ErrorCodeType. + /// * [code] Error code. See ErrorCodeType. /// * [missed] The number of lost messages. /// * [cached] Number of incoming cached messages when the data stream is interrupted. final void Function(RtcConnection connection, int remoteUid, int streamId, @@ -2465,7 +2508,7 @@ class RtcEngineEventHandler { /// Video frame rendering event callback. /// - /// After calling the startMediaRenderingTracing method or joining the channel, the SDK triggers this callback to report the events of video frame rendering and the indicators during the rendering process. Developers can optimize the indicators to improve the efficiency of the first video frame rendering. + /// After calling the startMediaRenderingTracing method or joining a channel, the SDK triggers this callback to report the events of video frame rendering and the indicators during the rendering process. Developers can optimize the indicators to improve the efficiency of the first video frame rendering. /// /// * [connection] The connection information. See RtcConnection. /// * [uid] The user ID. @@ -2568,21 +2611,39 @@ class RtcEngineEventHandler { RtcConnection connection, int uid, Uint8List metadata, int length)? onAudioMetadataReceived; - /// @nodoc - final void Function( - String provider, String extension, String key, String value)? - onExtensionEvent; + /// The event callback of the extension. + /// + /// To listen for events while the extension is running, you need to register this callback. + /// + /// * [value] The value of the extension key. + /// * [key] The key of the extension. + /// * [context] The context information of the extension, see ExtensionContext. + final void Function(ExtensionContext context, String key, String value)? + onExtensionEventWithContext; - /// @nodoc - final void Function(String provider, String extension)? onExtensionStarted; + /// Occurrs when the extension is enabled. + /// + /// The callback is triggered after the extension is successfully enabled. + /// + /// * [context] The context information of the extension, see ExtensionContext. + final void Function(ExtensionContext context)? onExtensionStartedWithContext; - /// @nodoc - final void Function(String provider, String extension)? onExtensionStopped; + /// Occurs when the extension is disabled. + /// + /// The callback is triggered after the extension is successfully disabled. + /// + /// * [context] The context information of the extension, see ExtensionContext. + final void Function(ExtensionContext context)? onExtensionStoppedWithContext; - /// @nodoc - final void Function( - String provider, String extension, int error, String message)? - onExtensionError; + /// Occurs when the extension runs incorrectly. + /// + /// In case of extension enabling failure or runtime errors, the extension triggers this callback and reports the error code along with the reasons. + /// + /// * [context] The context information of the extension, see ExtensionContext. + /// * [error] Error code. For details, see the extension documentation provided by the extension provider. + /// * [message] Reason. For details, see the extension documentation provided by the extension provider. + final void Function(ExtensionContext context, int error, String message)? + onExtensionErrorWithContext; /// @nodoc final void Function(RtcConnection connection, int code)? onSetRtmFlagResult; @@ -2798,7 +2859,12 @@ extension MaxMetadataSizeTypeExt on MaxMetadataSizeType { @JsonSerializable(explicitToJson: true, includeIfNull: false) class Metadata { /// @nodoc - const Metadata({this.uid, this.size, this.buffer, this.timeStampMs}); + const Metadata( + {this.channelId, this.uid, this.size, this.buffer, this.timeStampMs}); + + /// The channel name. + @JsonKey(name: 'channelId') + final String? channelId; /// The user ID. /// For the recipient: The ID of the remote user who sent the Metadata. @@ -3079,10 +3145,10 @@ abstract class RtcEngine { /// Gets the warning or error description. /// - /// * [code] The error code or warning code reported by the SDK. + /// * [code] The error code reported by the SDK. /// /// Returns - /// The specific error or warning description. + /// The specific error description. Future getErrorDescription(int code); /// Queries the video codec capabilities of the SDK. @@ -3364,7 +3430,9 @@ abstract class RtcEngine { /// /// * [enabled] Whether to enable the image enhancement function: true : Enable the image enhancement function. false : (Default) Disable the image enhancement function. /// * [options] The image enhancement options. See BeautyOptions. - /// * [type] Source type of the extension. See MediaSourceType. + /// * [type] The type of the media source to which the filter effect is applied. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// Use the default value primaryCameraSource if you use camera to capture local video. + /// Set this parameter to customVideoSource if you use custom video source. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3373,19 +3441,50 @@ abstract class RtcEngine { required BeautyOptions options, MediaSourceType type = MediaSourceType.primaryCameraSource}); + /// @nodoc + Future setFaceShapeBeautyOptions( + {required bool enabled, + required FaceShapeBeautyOptions options, + MediaSourceType type = MediaSourceType.primaryCameraSource}); + + /// @nodoc + Future setFaceShapeAreaOptions( + {required FaceShapeAreaOptions options, + MediaSourceType type = MediaSourceType.primaryCameraSource}); + + /// @nodoc + Future getFaceShapeBeautyOptions( + {MediaSourceType type = MediaSourceType.primaryCameraSource}); + + /// @nodoc + Future getFaceShapeAreaOptions( + {required FaceShapeArea shapeArea, + MediaSourceType type = MediaSourceType.primaryCameraSource}); + + /// Sets the filter effect options and specifies the media source. + /// + /// * [enabled] Whether to enable the filter effect: true : Yes. false : (Default) No. + /// * [options] The filter effect options. See FilterEffectOptions. + /// * [type] The type of the media source to which the filter effect is applied. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// Use the default value primaryCameraSource if you use camera to capture local video. + /// Set this parameter to customVideoSource if you use custom video source. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + Future setFilterEffectOptions( + {required bool enabled, + required FilterEffectOptions options, + MediaSourceType type = MediaSourceType.primaryCameraSource}); + /// Sets low-light enhancement. /// - /// The low-light enhancement feature can adaptively adjust the brightness value of the video captured in situations with low or uneven lighting, such as backlit, cloudy, or dark scenes. It restores or highlights the image details and improves the overall visual effect of the video. You can call this method to enable the color enhancement feature and set the options of the color enhancement effect. - /// Call this method after calling enableVideo. - /// Dark light enhancement has certain requirements for equipment performance. The low-light enhancement feature has certain performance requirements on devices. If your device overheats after you enable low-light enhancement, Agora recommends modifying the low-light enhancement options to a less performance-consuming level or disabling low-light enhancement entirely. - /// Both this method and setExtensionProperty can turn on low-light enhancement: - /// When you use the SDK to capture video, Agora recommends this method (this method only works for video captured by the SDK). - /// When you use an external video source to implement custom video capture, or send an external video source to the SDK, Agora recommends using setExtensionProperty. - /// This method relies on the image enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// You can call this method to enable the color enhancement feature and set the options of the color enhancement effect. /// /// * [enabled] Whether to enable low-light enhancement: true : Enable low-light enhancement. false : (Default) Disable low-light enhancement. /// * [options] The low-light enhancement options. See LowlightEnhanceOptions. - /// * [type] The type of the video source. See MediaSourceType. + /// * [type] The type of the media source to which the filter effect is applied. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// Use the default value primaryCameraSource if you use camera to capture local video. + /// Set this parameter to customVideoSource if you use custom video source. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3396,17 +3495,13 @@ abstract class RtcEngine { /// Sets video noise reduction. /// - /// Underlit environments and low-end video capture devices can cause video images to contain significant noise, which affects video quality. In real-time interactive scenarios, video noise also consumes bitstream resources and reduces encoding efficiency during encoding. You can call this method to enable the video noise reduction feature and set the options of the video noise reduction effect. - /// Call this method after calling enableVideo. - /// Video noise reduction has certain requirements for equipment performance. If your device overheats after you enable video noise reduction, Agora recommends modifying the video noise reduction options to a less performance-consuming level or disabling video noise reduction entirely. - /// Both this method and setExtensionProperty can turn on video noise reduction function: - /// When you use the SDK to capture video, Agora recommends this method (this method only works for video captured by the SDK). - /// When you use an external video source to implement custom video capture, or send an external video source to the SDK, Agora recommends using setExtensionProperty. - /// This method relies on the image enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// You can call this method to enable the video noise reduction feature and set the options of the video noise reduction effect. If the noise reduction implemented by this method does not meet your needs, Agora recommends that you call the setBeautyEffectOptions method to enable the beauty and skin smoothing function to achieve better video noise reduction effects. The recommended BeautyOptions settings for intense noise reduction effect are as follows: lighteningContrastLevel lighteningContrastNormal lighteningLevel : 0.0 smoothnessLevel : 0.5 rednessLevel : 0.0 sharpnessLevel : 0.1 /// /// * [enabled] Whether to enable video noise reduction: true : Enable video noise reduction. false : (Default) Disable video noise reduction. /// * [options] The video noise reduction options. See VideoDenoiserOptions. - /// * [type] The type of the video source. See MediaSourceType. + /// * [type] The type of the media source to which the filter effect is applied. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// Use the default value primaryCameraSource if you use camera to capture local video. + /// Set this parameter to customVideoSource if you use custom video source. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3420,14 +3515,13 @@ abstract class RtcEngine { /// The video images captured by the camera can have color distortion. The color enhancement feature intelligently adjusts video characteristics such as saturation and contrast to enhance the video color richness and color reproduction, making the video more vivid. You can call this method to enable the color enhancement feature and set the options of the color enhancement effect. /// Call this method after calling enableVideo. /// The color enhancement feature has certain performance requirements on devices. With color enhancement turned on, Agora recommends that you change the color enhancement level to one that consumes less performance or turn off color enhancement if your device is experiencing severe heat problems. - /// Both this method and setExtensionProperty can enable color enhancement: - /// When you use the SDK to capture video, Agora recommends this method (this method only works for video captured by the SDK). - /// When you use an external video source to implement custom video capture, or send an external video source to the SDK, Agora recommends using setExtensionProperty. /// This method relies on the image enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// /// * [enabled] Whether to enable color enhancement: true Enable color enhancement. false : (Default) Disable color enhancement. /// * [options] The color enhancement options. See ColorEnhanceOptions. - /// * [type] The type of the video source. See MediaSourceType. + /// * [type] The type of the media source to which the filter effect is applied. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// Use the default value primaryCameraSource if you use camera to capture local video. + /// Set this parameter to customVideoSource if you use custom video source. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3459,9 +3553,9 @@ abstract class RtcEngine { /// * [enabled] Whether to enable virtual background: true : Enable virtual background. false : Disable virtual background. /// * [backgroundSource] The custom background. See VirtualBackgroundSource. To adapt the resolution of the custom background image to that of the video captured by the SDK, the SDK scales and crops the custom background image while ensuring that the content of the custom background image is not distorted. /// * [segproperty] Processing properties for background images. See SegmentationProperty. - /// * [type] The type of the video source. See MediaSourceType. In this method, this parameter supports only the following two settings: - /// The default value is primaryCameraSource. - /// If you want to use the second camera to capture video, set this parameter to secondaryCameraSource. + /// * [type] The type of the media source to which the filter effect is applied. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// Use the default value primaryCameraSource if you use camera to capture local video. + /// Set this parameter to customVideoSource if you use custom video source. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3515,7 +3609,7 @@ abstract class RtcEngine { /// If someone subscribes to the low-quality stream, the SDK enables the low-quality stream and resets it to the SimulcastStreamConfig configuration used in the most recent calling of setDualStreamMode. If no configuration has been set by the user previously, the following values are used: /// Resolution: 480 Ɨ 272 /// Frame rate: 15 fps - /// Bitrate: 500 Kbps applicationScenario1v1 (2) is suitable for 1v1 video call scenarios. To meet the requirements for low latency and high-quality video in this scenario, the SDK optimizes its strategies, improving performance in terms of video quality, first frame rendering, latency on mid-to-low-end devices, and smoothness under weak network conditions. + /// Bitrate: 500 Kbps applicationScenario1v1 (2) This is applicable to the scenario. To meet the requirements for low latency and high-quality video in this scenario, the SDK optimizes its strategies, improving performance in terms of video quality, first frame rendering, latency on mid-to-low-end devices, and smoothness under weak network conditions. applicationScenarioLiveshow (3) This is applicable to the scenario. In this scenario, fast video rendering and high image quality are crucial. The SDK implements several performance optimizations, including automatically enabling accelerated audio and video frame rendering to minimize first-frame latency (no need to call enableInstantMediaRendering), and B-frame encoding to achieve better image quality and bandwidth efficiency. The SDK also provides enhanced video quality and smooth playback, even in poor network conditions or on lower-end devices. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3589,9 +3683,6 @@ abstract class RtcEngine { /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future muteAllRemoteAudioStreams(bool mute); - /// @nodoc - Future setDefaultMuteAllRemoteAudioStreams(bool mute); - /// Stops or resumes subscribing to the audio stream of a specified user. /// /// * [uid] The user ID of the specified user. @@ -3633,9 +3724,6 @@ abstract class RtcEngine { /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future muteAllRemoteVideoStreams(bool mute); - /// @nodoc - Future setDefaultMuteAllRemoteVideoStreams(bool mute); - /// Sets the default video stream type to subscribe to. /// /// The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. Depending on the default behavior of the sender and the specific settings when calling setDualStreamMode, the scenarios for the receiver calling this method are as follows: @@ -3678,14 +3766,10 @@ abstract class RtcEngine { /// Options for subscribing to remote video streams. /// - /// When a remote user has enabled dual-stream mode, you can call this method to choose the option for subscribing to the video streams sent by the remote user. - /// If you only register one VideoFrameObserver object, the SDK subscribes to the raw video data and encoded video data by default (the effect is equivalent to setting encodedFrameOnly to false). - /// If you only register one VideoEncodedFrameObserver object, the SDK only subscribes to the encoded video data by default (the effect is equivalent to setting encodedFrameOnly to true). - /// If you register one VideoFrameObserver object and one VideoEncodedFrameObserver object successively, the SDK subscribes to the encoded video data by default (the effect is equivalent to setting encodedFrameOnly to false). - /// If you call this method first with the options parameter set, and then register one VideoFrameObserver or VideoEncodedFrameObserver object, you need to call this method again and set the options parameter as described in the above two items to get the desired results. Agora recommends the following steps: - /// Set autoSubscribeVideo to false when calling joinChannel to join a channel. - /// Call this method after receiving the onUserJoined callback to set the subscription options for the specified remote user's video stream. - /// Call the muteRemoteVideoStream method to resume subscribing to the video stream of the specified remote user. If you set encodedFrameOnly to true in the previous step, the SDK triggers the onEncodedVideoFrameReceived callback locally to report the received encoded video frame information. + /// When a remote user has enabled dual-stream mode, you can call this method to choose the option for subscribing to the video streams sent by the remote user. The default subscription behavior of the SDK for remote video streams depends on the type of registered video observer: + /// If the VideoFrameObserver observer is registered, the default is to subscribe to both raw data and encoded data. + /// If the VideoEncodedFrameObserver observer is registered, the default is to subscribe only to the encoded data. + /// If both types of observers are registered, the default behavior follows the last registered video observer. For example, if the last registered observer is the VideoFrameObserver observer, the default is to subscribe to both raw data and encoded data. If you want to modify the default behavior, or set different subscription options for different uids, you can call this method to set it. /// /// * [uid] The user ID of the remote user. /// * [options] The video subscription options. See VideoSubscriptionOptions. @@ -3897,7 +3981,7 @@ abstract class RtcEngine { /// Adjusts the volume during audio mixing. /// - /// This method adjusts the audio mixing volume on both the local client and remote clients. + /// This method adjusts the audio mixing volume on both the local client and remote clients. This method does not affect the volume of the audio file set in the playEffect method. /// /// * [volume] Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. /// @@ -4413,6 +4497,18 @@ abstract class RtcEngine { Future setHeadphoneEQParameters( {required int lowGain, required int highGain}); + /// Enables or disables the voice AI tuner. + /// + /// The voice AI tuner supports enhancing sound quality and adjusting tone style. + /// + /// * [enabled] Whether to enable the voice AI tuner: true : Enables the voice AI tuner. false : (Default) Disable the voice AI tuner. + /// * [type] Voice AI tuner sound types, see VoiceAiTunerType. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + Future enableVoiceAITuner( + {required bool enabled, required VoiceAiTunerType type}); + /// Sets the log file. /// /// Deprecated: This method is deprecated. Set the log file path by configuring the context parameter when calling initialize. Specifies an SDK output log file. The log file records all log data for the SDKā€™s operation. @@ -4502,6 +4598,24 @@ abstract class RtcEngine { required RenderModeType renderMode, required VideoMirrorModeType mirrorMode}); + /// Sets the maximum frame rate for rendering local video. + /// + /// * [sourceType] The type of the video source. See VideoSourceType. + /// * [targetFps] The capture frame rate (fps) of the local video. Sopported values are: 1, 7, 10, 15, 24, 30, 60. Set this parameter to a value lower than the actual video frame rate; otherwise, the settings do not take effect. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + Future setLocalRenderTargetFps( + {required VideoSourceType sourceType, required int targetFps}); + + /// Sets the maximum frame rate for rendering remote video. + /// + /// * [targetFps] The capture frame rate (fps) of the local video. Sopported values are: 1, 7, 10, 15, 24, 30, 60. Set this parameter to a value lower than the actual video frame rate; otherwise, the settings do not take effect. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + Future setRemoteRenderTargetFps(int targetFps); + /// Sets the local video mirror mode. /// /// Deprecated: This method is deprecated. Use setupLocalVideo or setLocalRenderMode instead. @@ -4546,6 +4660,9 @@ abstract class RtcEngine { Future setDualStreamMode( {required SimulcastStreamMode mode, SimulcastStreamConfig? streamConfig}); + /// @nodoc + Future setSimulcastConfig(SimulcastConfig simulcastConfig); + /// Sets whether to enable the local playback of external audio source. /// /// Ensure you have called the createCustomAudioTrack method to create a custom audio track before calling this method. After calling this method to enable the local playback of external audio source, if you need to stop local playback, you can call this method again and set enabled to false. You can call adjustCustomAudioPlayoutVolume to adjust the local playback volume of the custom audio track. @@ -4815,11 +4932,11 @@ abstract class RtcEngine { /// /// If you enable loopback audio capturing, the output of the sound card is mixed into the audio stream sent to the other end. /// This method applies to the macOS and Windows only. - /// macOS does not support loopback audio capture of the default sound card. If you need to use this function, use a virtual sound card and pass its name to the deviceName parameter. Agora recommends using AgoraALD as the virtual sound card for audio capturing. + /// The macOS system's default sound card does not support recording functionality. As of v4.5.0, when you call this method for the first time, the SDK will automatically install the built-in AgoraALD virtual sound card developed by Agora. After successful installation, the audio routing will automatically switch to the virtual sound card and use it for audio capturing. /// You can call this method either before or after joining a channel. /// If you call the disableAudio method to disable the audio module, audio capturing will be disabled as well. If you need to enable audio capturing, call the enableAudio method to enable the audio module and then call the enableLoopbackRecording method. /// - /// * [enabled] Sets whether to enable loopback audio capturing. true : Enable loopback audio capturing. false : (Default) Disable loopback audio capturing. + /// * [enabled] Sets whether to enable loopback audio capturing. true : Enable sound card capturing. You can find the name of the virtual sound card in your system's Audio Devices > Output. false : Disable sound card capturing. The name of the virtual sound card will not be shown in your system's Audio Devices > Output. /// * [deviceName] macOS: The device name of the virtual sound card. The default value is set to NULL, which means using AgoraALD for loopback audio capturing. /// Windows: The device name of the sound card. The default is set to NULL, which means the SDK uses the sound card of your device for loopback audio capturing. /// @@ -4960,7 +5077,7 @@ abstract class RtcEngine { /// Checks whether the device camera supports face detection. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method is for Android and iOS only. /// /// Returns @@ -4969,7 +5086,7 @@ abstract class RtcEngine { /// Checks whether the device supports camera flash. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method is for Android and iOS only. /// The app enables the front camera by default. If your front camera does not support flash, this method returns false. If you want to check whether the rear camera supports the flash function, call switchCamera before this method. /// On iPads with system version 15, even if isCameraTorchSupported returns true, you might fail to successfully enable the flash by calling setCameraTorchOn due to system issues. @@ -4980,7 +5097,7 @@ abstract class RtcEngine { /// Check whether the device supports the manual focus function. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method is for Android and iOS only. /// /// Returns @@ -4989,7 +5106,7 @@ abstract class RtcEngine { /// Checks whether the device supports the face auto-focus function. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method is for Android and iOS only. /// /// Returns @@ -5021,7 +5138,7 @@ abstract class RtcEngine { /// Gets the maximum zoom ratio supported by the camera. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method is for Android and iOS only. /// /// Returns @@ -5065,7 +5182,7 @@ abstract class RtcEngine { /// Checks whether the device supports manual exposure. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method is for Android and iOS only. /// /// Returns @@ -5089,7 +5206,7 @@ abstract class RtcEngine { /// Queries whether the current camera supports adjusting exposure value. /// /// This method is for Android and iOS only. - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// Before calling setCameraExposureFactor, Agora recoomends that you call this method to query whether the current camera supports adjusting the exposure value. /// By calling this method, you adjust the exposure value of the currently active camera, that is, the camera specified when calling setCameraCapturerConfiguration. /// @@ -5113,7 +5230,7 @@ abstract class RtcEngine { /// Checks whether the device supports auto exposure. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method applies to iOS only. /// /// Returns @@ -5321,7 +5438,7 @@ abstract class RtcEngine { /// This method is for Windows and macOS only. /// Call this method after starting screen sharing or window sharing. /// - /// * [captureParams] The screen sharing encoding parameters. The default video resolution is 1920 Ɨ 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters. The video properties of the screen sharing stream only need to be set through this parameter, and are unrelated to setVideoEncoderConfiguration. + /// * [captureParams] The screen sharing encoding parameters. See ScreenCaptureParameters. The video properties of the screen sharing stream only need to be set through this parameter, and are unrelated to setVideoEncoderConfiguration. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -5335,7 +5452,7 @@ abstract class RtcEngine { /// When you do not pass in a value, Agora bills you at 1280 Ɨ 720. /// When you pass in a value, Agora bills you at that value. /// - /// * [captureParams] The screen sharing encoding parameters. The default video dimension is 1920 x 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters2. + /// * [captureParams] The screen sharing encoding parameters. See ScreenCaptureParameters2. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -5349,7 +5466,7 @@ abstract class RtcEngine { /// This method is for Android and iOS only. /// On the iOS platform, screen sharing is only available on iOS 12.0 and later. /// - /// * [captureParams] The screen sharing encoding parameters. The default video resolution is 1920 Ɨ 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters2. + /// * [captureParams] The screen sharing encoding parameters. See ScreenCaptureParameters2. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -5370,6 +5487,16 @@ abstract class RtcEngine { /// Returns an array of FocalLengthInfo objects, which contain the camera's orientation and focal length type. Future> queryCameraFocalLengthCapability(); + /// Configures MediaProjection outside of the SDK to capture screen video streams. + /// + /// This method is for Android only. After successfully calling this method, the external MediaProjection you set will replace the MediaProjection requested by the SDK to capture the screen video stream. When the screen sharing is stopped or RtcEngine is destroyed, the SDK will automatically release the MediaProjection. + /// + /// * [mediaProjection] An object used to capture screen video streams. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + Future setExternalMediaProjection(int mediaProjection); + /// Sets the screen sharing scenario. /// /// When you start screen sharing or window sharing, you can call this method to set the screen sharing scenario. The SDK adjusts the video quality and experience of the sharing according to the scenario. Agora recommends that you call this method before joining a channel. @@ -5493,6 +5620,37 @@ abstract class RtcEngine { /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stopLocalVideoTranscoder(); + /// Starts local audio mixing. + /// + /// This method supports merging multiple audio streams into one audio stream locally. For example, merging the audio streams captured from the local microphone, and that from the media player, the sound card, and the remote users into one audio stream, and then publish the merged audio stream to the channel. + /// If you want to mix the locally captured audio streams, you can set publishMixedAudioTrack in ChannelMediaOptions to true, and then publish the mixed audio stream to the channel. + /// If you want to mix the remote audio stream, ensure that the remote audio stream has been published in the channel and you have subcribed to the audio stream that you need to mix. + /// + /// * [config] The configurations for mixing the lcoal audio. See LocalAudioMixerConfiguration. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + Future startLocalAudioMixer(LocalAudioMixerConfiguration config); + + /// Updates the configurations for mixing audio streams locally. + /// + /// After calling startLocalAudioMixer, call this method if you want to update the local audio mixing configuration. + /// + /// * [config] The configurations for mixing the lcoal audio. See LocalAudioMixerConfiguration. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + Future updateLocalAudioMixerConfiguration( + LocalAudioMixerConfiguration config); + + /// Stops the local audio mixing. + /// + /// After calling startLocalAudioMixer, call this method if you want to stop the local audio mixing. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + Future stopLocalAudioMixer(); + /// Starts camera capture. /// /// You can call this method to start capturing video from one or more cameras by specifying sourceType. On the iOS platform, if you want to enable multi-camera capture, you need to call enableMultiCamera and set enabled to true before calling this method. @@ -5567,12 +5725,6 @@ abstract class RtcEngine { Future setRemoteUserPriority( {required int uid, required PriorityType userPriority}); - /// @nodoc - Future setEncryptionMode(String encryptionMode); - - /// @nodoc - Future setEncryptionSecret(String secret); - /// Enables or disables the built-in encryption. /// /// After the user leaves the channel, the SDK automatically disables the built-in encryption. To enable the built-in encryption, call this method before the user joins the channel again. @@ -5597,9 +5749,8 @@ abstract class RtcEngine { /// Sends data stream messages. /// /// After calling createDataStream, you can call this method to send data stream messages to all users in the channel. The SDK has the following restrictions on this method: - /// Each user can have up to five data streams simultaneously. - /// Up to 60 packets can be sent per second in a data stream with each packet having a maximum size of 1 KB. - /// Up to 30 KB of data can be sent per second in a data stream. A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. A failed method call triggers the onStreamMessageError callback on the remote client. + /// Each client within the channel can have up to 5 data channels simultaneously, with a total shared packet bitrate limit of 30 KB/s for all data channels. + /// Each data channel can send up to 60 packets per second, with each packet being a maximum of 1 KB. A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. A failed method call triggers the onStreamMessageError callback on the remote client. /// This method needs to be called after createDataStream and joining the channel. /// In live streaming scenarios, this method only applies to hosts. /// @@ -5990,7 +6141,7 @@ abstract class RtcEngine { /// When video screenshot and upload function is enabled, the SDK takes screenshots and uploads videos sent by local users based on the type and frequency of the module you set in ContentInspectConfig. After video screenshot and upload, the Agora server sends the callback notification to your app server in HTTPS requests and sends all screenshots to the third-party cloud storage service. /// /// * [enabled] Whether to enalbe video screenshot and upload: true : Enables video screenshot and upload. false : Disables video screenshot and upload. - /// * [config] Screenshot and upload configuration. See ContentInspectConfig. When the video moderation module is set to video moderation via Agora self-developed extension(contentInspectSupervision), the video screenshot and upload dynamic library libagora_content_inspect_extension.dll is required. Deleting this library disables the screenshot and upload feature. + /// * [config] Screenshot and upload configuration. See ContentInspectConfig. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -6026,7 +6177,7 @@ abstract class RtcEngine { /// Sets up cloud proxy service. /// /// When users' network access is restricted by a firewall, configure the firewall to allow specific IP addresses and ports provided by Agora; then, call this method to enable the cloud proxyType and set the cloud proxy type with the proxyType parameter. After successfully connecting to the cloud proxy, the SDK triggers the onConnectionStateChanged (connectionStateConnecting, connectionChangedSettingProxyServer) callback. To disable the cloud proxy that has been set, call the setCloudProxy (noneProxy). To change the cloud proxy type that has been set, call the setCloudProxy (noneProxy) first, and then call the setCloudProxy to set the proxyType you want. - /// Agora recommends that you call this method after joining a channel. + /// Agora recommends that you call this method before joining a channel. /// When a user is behind a firewall and uses the Force UDP cloud proxy, the services for Media Push and cohosting across channels are not available. /// When you use the Force TCP cloud proxy, note that an error would occur when calling the startAudioMixing method to play online music files in the HTTP protocol. The services for Media Push and cohosting across channels use the cloud proxy with the TCP protocol. /// @@ -6143,6 +6294,9 @@ abstract class RtcEngine { Future sendAudioMetadata( {required Uint8List metadata, required int length}); + /// @nodoc + Future queryHDRCapability(VideoModuleType videoModule); + /// Starts screen capture from the specified video source. /// /// This method applies to the macOS and Windows only. @@ -6259,6 +6413,18 @@ abstract class RtcEngine { /// Returns /// The native handle of the SDK. Future getNativeHandle(); + + /// Takes a screenshot of the video at the specified observation point. + /// + /// This method takes a snapshot of a video stream from the specified user, generates a JPG image, and saves it to the specified path. + /// + /// * [uid] The user ID. Set uid as 0 if you want to take a snapshot of the local user's video. + /// * [config] The configuration of the snaptshot. See SnapshotConfig. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + Future takeSnapshotWithConfig( + {required int uid, required SnapshotConfig config}); } /// @nodoc @@ -6301,6 +6467,10 @@ enum MediaDeviceStateType { @JsonValue(2) mediaDeviceStateDisabled, + /// 3: The device is plugged in. + @JsonValue(3) + mediaDeviceStatePluggedIn, + /// 4: The device is not found. @JsonValue(4) mediaDeviceStateNotPresent, diff --git a/lib/src/agora_rtc_engine.g.dart b/lib/src/agora_rtc_engine.g.dart index 0d18a4bd6..555752915 100644 --- a/lib/src/agora_rtc_engine.g.dart +++ b/lib/src/agora_rtc_engine.g.dart @@ -40,6 +40,9 @@ LocalVideoStats _$LocalVideoStatsFromJson(Map json) => _$CaptureBrightnessLevelTypeEnumMap, json['captureBrightnessLevel']), dualStreamEnabled: json['dualStreamEnabled'] as bool?, hwEncoderAccelerating: (json['hwEncoderAccelerating'] as num?)?.toInt(), + simulcastDimensions: (json['simulcastDimensions'] as List?) + ?.map((e) => VideoDimensions.fromJson(e as Map)) + .toList(), ); Map _$LocalVideoStatsToJson(LocalVideoStats instance) { @@ -78,6 +81,8 @@ Map _$LocalVideoStatsToJson(LocalVideoStats instance) { _$CaptureBrightnessLevelTypeEnumMap[instance.captureBrightnessLevel]); writeNotNull('dualStreamEnabled', instance.dualStreamEnabled); writeNotNull('hwEncoderAccelerating', instance.hwEncoderAccelerating); + writeNotNull('simulcastDimensions', + instance.simulcastDimensions?.map((e) => e.toJson()).toList()); return val; } @@ -170,6 +175,7 @@ RemoteVideoStats _$RemoteVideoStatsFromJson(Map json) => width: (json['width'] as num?)?.toInt(), height: (json['height'] as num?)?.toInt(), receivedBitrate: (json['receivedBitrate'] as num?)?.toInt(), + decoderInputFrameRate: (json['decoderInputFrameRate'] as num?)?.toInt(), decoderOutputFrameRate: (json['decoderOutputFrameRate'] as num?)?.toInt(), rendererOutputFrameRate: (json['rendererOutputFrameRate'] as num?)?.toInt(), @@ -201,6 +207,7 @@ Map _$RemoteVideoStatsToJson(RemoteVideoStats instance) { writeNotNull('width', instance.width); writeNotNull('height', instance.height); writeNotNull('receivedBitrate', instance.receivedBitrate); + writeNotNull('decoderInputFrameRate', instance.decoderInputFrameRate); writeNotNull('decoderOutputFrameRate', instance.decoderOutputFrameRate); writeNotNull('rendererOutputFrameRate', instance.rendererOutputFrameRate); writeNotNull('frameLossRate', instance.frameLossRate); @@ -219,6 +226,12 @@ Map _$RemoteVideoStatsToJson(RemoteVideoStats instance) { const _$VideoStreamTypeEnumMap = { VideoStreamType.videoStreamHigh: 0, VideoStreamType.videoStreamLow: 1, + VideoStreamType.videoStreamLayer1: 4, + VideoStreamType.videoStreamLayer2: 5, + VideoStreamType.videoStreamLayer3: 6, + VideoStreamType.videoStreamLayer4: 7, + VideoStreamType.videoStreamLayer5: 8, + VideoStreamType.videoStreamLayer6: 9, }; VideoCompositingLayout _$VideoCompositingLayoutFromJson( @@ -657,6 +670,7 @@ ChannelMediaOptions _$ChannelMediaOptionsFromJson(Map json) => isInteractiveAudience: json['isInteractiveAudience'] as bool?, customVideoTrackId: (json['customVideoTrackId'] as num?)?.toInt(), isAudioFilterable: json['isAudioFilterable'] as bool?, + parameters: json['parameters'] as String?, ); Map _$ChannelMediaOptionsToJson(ChannelMediaOptions instance) { @@ -715,6 +729,7 @@ Map _$ChannelMediaOptionsToJson(ChannelMediaOptions instance) { writeNotNull('isInteractiveAudience', instance.isInteractiveAudience); writeNotNull('customVideoTrackId', instance.customVideoTrackId); writeNotNull('isAudioFilterable', instance.isAudioFilterable); + writeNotNull('parameters', instance.parameters); return val; } @@ -822,6 +837,7 @@ const _$ThreadPriorityTypeEnumMap = { }; Metadata _$MetadataFromJson(Map json) => Metadata( + channelId: json['channelId'] as String?, uid: (json['uid'] as num?)?.toInt(), size: (json['size'] as num?)?.toInt(), timeStampMs: (json['timeStampMs'] as num?)?.toInt(), @@ -836,6 +852,7 @@ Map _$MetadataToJson(Metadata instance) { } } + writeNotNull('channelId', instance.channelId); writeNotNull('uid', instance.uid); writeNotNull('size', instance.size); writeNotNull('timeStampMs', instance.timeStampMs); @@ -1034,6 +1051,7 @@ const _$AudioMixingReasonTypeEnumMap = { AudioMixingReasonType.audioMixingReasonOneLoopCompleted: 721, AudioMixingReasonType.audioMixingReasonAllLoopsCompleted: 723, AudioMixingReasonType.audioMixingReasonStoppedByUser: 724, + AudioMixingReasonType.audioMixingReasonResumedByUser: 726, AudioMixingReasonType.audioMixingReasonOk: 0, }; @@ -1076,6 +1094,12 @@ const _$StreamFallbackOptionsEnumMap = { StreamFallbackOptions.streamFallbackOptionDisabled: 0, StreamFallbackOptions.streamFallbackOptionVideoStreamLow: 1, StreamFallbackOptions.streamFallbackOptionAudioOnly: 2, + StreamFallbackOptions.streamFallbackOptionVideoStreamLayer1: 3, + StreamFallbackOptions.streamFallbackOptionVideoStreamLayer2: 4, + StreamFallbackOptions.streamFallbackOptionVideoStreamLayer3: 5, + StreamFallbackOptions.streamFallbackOptionVideoStreamLayer4: 6, + StreamFallbackOptions.streamFallbackOptionVideoStreamLayer5: 7, + StreamFallbackOptions.streamFallbackOptionVideoStreamLayer6: 8, }; const _$PriorityTypeEnumMap = { @@ -1146,6 +1170,7 @@ const _$MediaDeviceStateTypeEnumMap = { MediaDeviceStateType.mediaDeviceStateIdle: 0, MediaDeviceStateType.mediaDeviceStateActive: 1, MediaDeviceStateType.mediaDeviceStateDisabled: 2, + MediaDeviceStateType.mediaDeviceStatePluggedIn: 3, MediaDeviceStateType.mediaDeviceStateNotPresent: 4, MediaDeviceStateType.mediaDeviceStateUnplugged: 8, }; diff --git a/lib/src/agora_rtc_engine_ex.dart b/lib/src/agora_rtc_engine_ex.dart index d155f28f9..b8e94ce49 100644 --- a/lib/src/agora_rtc_engine_ex.dart +++ b/lib/src/agora_rtc_engine_ex.dart @@ -67,6 +67,12 @@ abstract class RtcEngineEx implements RtcEngine { Future leaveChannelEx( {required RtcConnection connection, LeaveChannelOptions? options}); + /// @nodoc + Future leaveChannelWithUserAccountEx( + {required String channelId, + required String userAccount, + LeaveChannelOptions? options}); + /// Updates the channel media options after joining the channel. /// /// * [options] The channel media options. See ChannelMediaOptions. @@ -427,9 +433,8 @@ abstract class RtcEngineEx implements RtcEngine { /// Sends data stream messages. /// /// A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. A failed method call triggers the onStreamMessageError callback on the remote client. The SDK has the following restrictions on this method: - /// Each user can have up to five data streams simultaneously. - /// Up to 60 packets can be sent per second in a data stream with each packet having a maximum size of 1 KB. - /// Up to 30 KB of data can be sent per second in a data stream. After calling createDataStreamEx, you can call this method to send data stream messages to all users in the channel. + /// Each client within the channel can have up to 5 data channels simultaneously, with a total shared packet bitrate limit of 30 KB/s for all data channels. + /// Each data channel can send up to 60 packets per second, with each packet being a maximum of 1 KB. After calling createDataStreamEx, you can call this method to send data stream messages to all users in the channel. /// Call this method after joinChannelEx. /// Ensure that you call createDataStreamEx to create a data channel before calling this method. /// This method applies only to the COMMUNICATION profile or to the hosts in the LIVE_BROADCASTING profile. If an audience in the LIVE_BROADCASTING profile calls this method, the audience may be switched to a host. @@ -668,6 +673,11 @@ abstract class RtcEngineEx implements RtcEngine { required SimulcastStreamConfig streamConfig, required RtcConnection connection}); + /// @nodoc + Future setSimulcastConfigEx( + {required SimulcastConfig simulcastConfig, + required RtcConnection connection}); + /// @nodoc Future setHighPriorityUserListEx( {required List uidList, @@ -699,7 +709,7 @@ abstract class RtcEngineEx implements RtcEngine { /// This method can take screenshots for multiple video streams and upload them. When video screenshot and upload function is enabled, the SDK takes screenshots and uploads videos sent by local users based on the type and frequency of the module you set in ContentInspectConfig. After video screenshot and upload, the Agora server sends the callback notification to your app server in HTTPS requests and sends all screenshots to the third-party cloud storage service. /// /// * [enabled] Whether to enalbe video screenshot and upload: true : Enables video screenshot and upload. false : Disables video screenshot and upload. - /// * [config] Screenshot and upload configuration. See ContentInspectConfig. When the video moderation module is set to video moderation via Agora self-developed extension(contentInspectSupervision), the video screenshot and upload dynamic library libagora_content_inspect_extension.dll is required. Deleting this library disables the screenshot and upload feature. + /// * [config] Screenshot and upload configuration. See ContentInspectConfig. /// * [connection] The connection information. See RtcConnection. /// /// Returns @@ -726,7 +736,7 @@ abstract class RtcEngineEx implements RtcEngine { /// Gets the call ID with the connection ID. /// - /// When a user joins a channel on a client, a callId is generated to identify the call from the client. You can call this method to get the callId parameter, and pass it in when calling methods such as rate and complain. + /// When a user joins a channel on a client, a callId is generated to identify the call from the client. You can call this method to get callId, and pass it in when calling methods such as rate and complain. /// /// * [connection] The connection information. See RtcConnection. /// @@ -739,4 +749,19 @@ abstract class RtcEngineEx implements RtcEngine { {required RtcConnection connection, required Uint8List metadata, required int length}); + + /// Gets a video screenshot of the specified observation point using the connection ID. + /// + /// This method takes a snapshot of a video stream from the specified user, generates a JPG image, and saves it to the specified path. + /// + /// * [connection] The connection information. See RtcConnection. + /// * [uid] The user ID. Set uid as 0 if you want to take a snapshot of the local user's video. + /// * [config] The configuration of the snaptshot. See SnapshotConfig. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + Future takeSnapshotWithConfigEx( + {required RtcConnection connection, + required int uid, + required SnapshotConfig config}); } diff --git a/lib/src/audio_device_manager.dart b/lib/src/audio_device_manager.dart index 0a94bb61c..ebcab7694 100644 --- a/lib/src/audio_device_manager.dart +++ b/lib/src/audio_device_manager.dart @@ -163,10 +163,18 @@ abstract class AudioDeviceManager { /// true : The audio playback device is muted. false : The audio playback device is unmuted. Future getPlaybackDeviceMute(); - /// @nodoc + /// Sets the mute status of the audio capture device. + /// + /// * [mute] Whether to mute the audio recording device: true : Mute the audio capture device. false : Unmute the audio capture device. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setRecordingDeviceMute(bool mute); - /// @nodoc + /// Gets whether the audio capture device is muted. + /// + /// Returns + /// true : The microphone is muted. false : The microphone is unmuted. Future getRecordingDeviceMute(); /// Starts the audio playback device test. diff --git a/lib/src/binding/agora_media_engine_impl.dart b/lib/src/binding/agora_media_engine_impl.dart index 16b06cb3a..485093ed7 100644 --- a/lib/src/binding/agora_media_engine_impl.dart +++ b/lib/src/binding/agora_media_engine_impl.dart @@ -159,6 +159,23 @@ class MediaEngineImpl implements MediaEngine { } } + @override + Future setExternalRemoteEglContext(int eglContext) async { + final apiType = + '${isOverrideClassName ? className : 'MediaEngine'}_setExternalRemoteEglContext_f337cbf'; + final param = createParams({'eglContext': eglContext}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future setExternalAudioSource( {required bool enabled, diff --git a/lib/src/binding/agora_rtc_engine_event_impl.dart b/lib/src/binding/agora_rtc_engine_event_impl.dart index 86633d767..b3a661bdf 100644 --- a/lib/src/binding/agora_rtc_engine_event_impl.dart +++ b/lib/src/binding/agora_rtc_engine_event_impl.dart @@ -1808,84 +1808,77 @@ class RtcEngineEventHandlerWrapper implements EventLoopEventHandler { connection, uid, metadata, length); return true; - case 'onExtensionEvent_062d13c': - if (rtcEngineEventHandler.onExtensionEvent == null) { + case 'onExtensionEventWithContext_a5fb27a': + if (rtcEngineEventHandler.onExtensionEventWithContext == null) { return true; } final jsonMap = jsonDecode(eventData); - RtcEngineEventHandlerOnExtensionEventJson paramJson = - RtcEngineEventHandlerOnExtensionEventJson.fromJson(jsonMap); + RtcEngineEventHandlerOnExtensionEventWithContextJson paramJson = + RtcEngineEventHandlerOnExtensionEventWithContextJson.fromJson( + jsonMap); paramJson = paramJson.fillBuffers(buffers); - String? provider = paramJson.provider; - String? extension = paramJson.extension; + ExtensionContext? context = paramJson.context; String? key = paramJson.key; String? value = paramJson.value; - if (provider == null || - extension == null || - key == null || - value == null) { + if (context == null || key == null || value == null) { return true; } - - rtcEngineEventHandler.onExtensionEvent!( - provider, extension, key, value); + context = context.fillBuffers(buffers); + rtcEngineEventHandler.onExtensionEventWithContext!(context, key, value); return true; - case 'onExtensionStarted_ccad422': - if (rtcEngineEventHandler.onExtensionStarted == null) { + case 'onExtensionStartedWithContext_67c38e3': + if (rtcEngineEventHandler.onExtensionStartedWithContext == null) { return true; } final jsonMap = jsonDecode(eventData); - RtcEngineEventHandlerOnExtensionStartedJson paramJson = - RtcEngineEventHandlerOnExtensionStartedJson.fromJson(jsonMap); + RtcEngineEventHandlerOnExtensionStartedWithContextJson paramJson = + RtcEngineEventHandlerOnExtensionStartedWithContextJson.fromJson( + jsonMap); paramJson = paramJson.fillBuffers(buffers); - String? provider = paramJson.provider; - String? extension = paramJson.extension; - if (provider == null || extension == null) { + ExtensionContext? context = paramJson.context; + if (context == null) { return true; } - - rtcEngineEventHandler.onExtensionStarted!(provider, extension); + context = context.fillBuffers(buffers); + rtcEngineEventHandler.onExtensionStartedWithContext!(context); return true; - case 'onExtensionStopped_ccad422': - if (rtcEngineEventHandler.onExtensionStopped == null) { + case 'onExtensionStoppedWithContext_67c38e3': + if (rtcEngineEventHandler.onExtensionStoppedWithContext == null) { return true; } final jsonMap = jsonDecode(eventData); - RtcEngineEventHandlerOnExtensionStoppedJson paramJson = - RtcEngineEventHandlerOnExtensionStoppedJson.fromJson(jsonMap); + RtcEngineEventHandlerOnExtensionStoppedWithContextJson paramJson = + RtcEngineEventHandlerOnExtensionStoppedWithContextJson.fromJson( + jsonMap); paramJson = paramJson.fillBuffers(buffers); - String? provider = paramJson.provider; - String? extension = paramJson.extension; - if (provider == null || extension == null) { + ExtensionContext? context = paramJson.context; + if (context == null) { return true; } - - rtcEngineEventHandler.onExtensionStopped!(provider, extension); + context = context.fillBuffers(buffers); + rtcEngineEventHandler.onExtensionStoppedWithContext!(context); return true; - case 'onExtensionError_bd3489b': - if (rtcEngineEventHandler.onExtensionError == null) { + case 'onExtensionErrorWithContext_a452f11': + if (rtcEngineEventHandler.onExtensionErrorWithContext == null) { return true; } final jsonMap = jsonDecode(eventData); - RtcEngineEventHandlerOnExtensionErrorJson paramJson = - RtcEngineEventHandlerOnExtensionErrorJson.fromJson(jsonMap); + RtcEngineEventHandlerOnExtensionErrorWithContextJson paramJson = + RtcEngineEventHandlerOnExtensionErrorWithContextJson.fromJson( + jsonMap); paramJson = paramJson.fillBuffers(buffers); - String? provider = paramJson.provider; - String? extension = paramJson.extension; + ExtensionContext? context = paramJson.context; int? error = paramJson.error; String? message = paramJson.message; - if (provider == null || - extension == null || - error == null || - message == null) { + if (context == null || error == null || message == null) { return true; } - - rtcEngineEventHandler.onExtensionError!( - provider, extension, error, message); + context = context.fillBuffers(buffers); + rtcEngineEventHandler.onExtensionErrorWithContext!( + context, error, message); return true; case 'onSetRtmFlagResult_263e4cd': diff --git a/lib/src/binding/agora_rtc_engine_ex_impl.dart b/lib/src/binding/agora_rtc_engine_ex_impl.dart index 3980d429c..0867b3c17 100644 --- a/lib/src/binding/agora_rtc_engine_ex_impl.dart +++ b/lib/src/binding/agora_rtc_engine_ex_impl.dart @@ -75,6 +75,34 @@ class RtcEngineExImpl extends RtcEngineImpl implements RtcEngineEx { } } + @override + Future leaveChannelWithUserAccountEx( + {required String channelId, + required String userAccount, + LeaveChannelOptions? options}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngineEx'}_leaveChannelWithUserAccountEx_8bbe372'; + final param = createParams({ + 'channelId': channelId, + 'userAccount': userAccount, + 'options': options?.toJson() + }); + final List buffers = []; + if (options != null) { + buffers.addAll(options.collectBufferList()); + } + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future updateChannelMediaOptionsEx( {required ChannelMediaOptions options, @@ -1084,6 +1112,31 @@ class RtcEngineExImpl extends RtcEngineImpl implements RtcEngineEx { } } + @override + Future setSimulcastConfigEx( + {required SimulcastConfig simulcastConfig, + required RtcConnection connection}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngineEx'}_setSimulcastConfigEx_bd8d7d0'; + final param = createParams({ + 'simulcastConfig': simulcastConfig.toJson(), + 'connection': connection.toJson() + }); + final List buffers = []; + buffers.addAll(simulcastConfig.collectBufferList()); + buffers.addAll(connection.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future setHighPriorityUserListEx( {required List uidList, @@ -1246,4 +1299,31 @@ class RtcEngineExImpl extends RtcEngineImpl implements RtcEngineEx { throw AgoraRtcException(code: result); } } + + @override + Future takeSnapshotWithConfigEx( + {required RtcConnection connection, + required int uid, + required SnapshotConfig config}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngineEx'}_takeSnapshotEx_b856417'; + final param = createParams({ + 'connection': connection.toJson(), + 'uid': uid, + 'config': config.toJson() + }); + final List buffers = []; + buffers.addAll(connection.collectBufferList()); + buffers.addAll(config.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } } diff --git a/lib/src/binding/agora_rtc_engine_impl.dart b/lib/src/binding/agora_rtc_engine_impl.dart index 401106606..ff4f79e81 100644 --- a/lib/src/binding/agora_rtc_engine_impl.dart +++ b/lib/src/binding/agora_rtc_engine_impl.dart @@ -661,6 +661,124 @@ class RtcEngineImpl implements RtcEngine { } } + @override + Future setFaceShapeBeautyOptions( + {required bool enabled, + required FaceShapeBeautyOptions options, + MediaSourceType type = MediaSourceType.primaryCameraSource}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_setFaceShapeBeautyOptions_a862ce7'; + final param = createParams({ + 'enabled': enabled, + 'options': options.toJson(), + 'type': type.value() + }); + final List buffers = []; + buffers.addAll(options.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + + @override + Future setFaceShapeAreaOptions( + {required FaceShapeAreaOptions options, + MediaSourceType type = MediaSourceType.primaryCameraSource}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_setFaceShapeAreaOptions_2e242a3'; + final param = + createParams({'options': options.toJson(), 'type': type.value()}); + final List buffers = []; + buffers.addAll(options.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + + @override + Future getFaceShapeBeautyOptions( + {MediaSourceType type = MediaSourceType.primaryCameraSource}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_getFaceShapeBeautyOptions_8382895'; + final param = createParams({'type': type.value()}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + final getFaceShapeBeautyOptionsJson = + RtcEngineGetFaceShapeBeautyOptionsJson.fromJson(rm); + return getFaceShapeBeautyOptionsJson.options; + } + + @override + Future getFaceShapeAreaOptions( + {required FaceShapeArea shapeArea, + MediaSourceType type = MediaSourceType.primaryCameraSource}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_getFaceShapeAreaOptions_0783e2c'; + final param = + createParams({'shapeArea': shapeArea.value(), 'type': type.value()}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + final getFaceShapeAreaOptionsJson = + RtcEngineGetFaceShapeAreaOptionsJson.fromJson(rm); + return getFaceShapeAreaOptionsJson.options; + } + + @override + Future setFilterEffectOptions( + {required bool enabled, + required FilterEffectOptions options, + MediaSourceType type = MediaSourceType.primaryCameraSource}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_setFilterEffectOptions_53b4be3'; + final param = createParams({ + 'enabled': enabled, + 'options': options.toJson(), + 'type': type.value() + }); + final List buffers = []; + buffers.addAll(options.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future setLowlightEnhanceOptions( {required bool enabled, @@ -965,23 +1083,6 @@ class RtcEngineImpl implements RtcEngine { } } - @override - Future setDefaultMuteAllRemoteAudioStreams(bool mute) async { - final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_setDefaultMuteAllRemoteAudioStreams_5039d15'; - final param = createParams({'mute': mute}); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: null)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - @override Future muteRemoteAudioStream( {required int uid, required bool mute}) async { @@ -1051,23 +1152,6 @@ class RtcEngineImpl implements RtcEngine { } } - @override - Future setDefaultMuteAllRemoteVideoStreams(bool mute) async { - final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_setDefaultMuteAllRemoteVideoStreams_5039d15'; - final param = createParams({'mute': mute}); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: null)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - @override Future setRemoteDefaultVideoStreamType( VideoStreamType streamType) async { @@ -2273,6 +2357,24 @@ class RtcEngineImpl implements RtcEngine { } } + @override + Future enableVoiceAITuner( + {required bool enabled, required VoiceAiTunerType type}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_enableVoiceAITuner_28f5d5b'; + final param = createParams({'enabled': enabled, 'type': type.value()}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future setLogFile(String filePath) async { final apiType = @@ -2422,6 +2524,42 @@ class RtcEngineImpl implements RtcEngine { } } + @override + Future setLocalRenderTargetFps( + {required VideoSourceType sourceType, required int targetFps}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_setLocalRenderTargetFps_2ad83d8'; + final param = createParams( + {'sourceType': sourceType.value(), 'targetFps': targetFps}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + + @override + Future setRemoteRenderTargetFps(int targetFps) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_setRemoteRenderTargetFps_46f8ab7'; + final param = createParams({'targetFps': targetFps}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future setLocalVideoMirrorMode(VideoMirrorModeType mirrorMode) async { final apiType = @@ -2486,6 +2624,25 @@ class RtcEngineImpl implements RtcEngine { } } + @override + Future setSimulcastConfig(SimulcastConfig simulcastConfig) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_setSimulcastConfig_3dcdfd7'; + final param = createParams({'simulcastConfig': simulcastConfig.toJson()}); + final List buffers = []; + buffers.addAll(simulcastConfig.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future enableCustomAudioLocalPlayback( {required int trackId, required bool enabled}) async { @@ -3599,7 +3756,7 @@ class RtcEngineImpl implements RtcEngine { required Rectangle regionRect, required ScreenCaptureParameters captureParams}) async { final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_startScreenCaptureByDisplayId_7cf6800'; + '${isOverrideClassName ? className : 'RtcEngine'}_startScreenCaptureByDisplayId_ce89867'; final param = createParams({ 'displayId': displayId, 'regionRect': regionRect.toJson(), @@ -3673,7 +3830,7 @@ class RtcEngineImpl implements RtcEngine { required Rectangle regionRect, required ScreenCaptureParameters captureParams}) async { final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_startScreenCaptureByWindowId_5ab7e59'; + '${isOverrideClassName ? className : 'RtcEngine'}_startScreenCaptureByWindowId_ce89867'; final param = createParams({ 'windowId': windowId, 'regionRect': regionRect.toJson(), @@ -3825,6 +3982,23 @@ class RtcEngineImpl implements RtcEngine { return queryCameraFocalLengthCapabilityJson.focalLengthInfos; } + @override + Future setExternalMediaProjection(int mediaProjection) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_setExternalMediaProjection_f337cbf'; + final param = createParams({'mediaProjection': mediaProjection}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future setScreenCaptureScenario( ScreenScenarioType screenScenario) async { @@ -4049,6 +4223,62 @@ class RtcEngineImpl implements RtcEngine { } } + @override + Future startLocalAudioMixer(LocalAudioMixerConfiguration config) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_startLocalAudioMixer_a7ff78e'; + final param = createParams({'config': config.toJson()}); + final List buffers = []; + buffers.addAll(config.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + + @override + Future updateLocalAudioMixerConfiguration( + LocalAudioMixerConfiguration config) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_updateLocalAudioMixerConfiguration_a7ff78e'; + final param = createParams({'config': config.toJson()}); + final List buffers = []; + buffers.addAll(config.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + + @override + Future stopLocalAudioMixer() async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_stopLocalAudioMixer'; + final param = createParams({}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future startCameraCapture( {required VideoSourceType sourceType, @@ -4196,40 +4426,6 @@ class RtcEngineImpl implements RtcEngine { } } - @override - Future setEncryptionMode(String encryptionMode) async { - final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_setEncryptionMode_3a2037f'; - final param = createParams({'encryptionMode': encryptionMode}); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: null)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - - @override - Future setEncryptionSecret(String secret) async { - final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_setEncryptionSecret_3a2037f'; - final param = createParams({'secret': secret}); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: null)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - @override Future enableEncryption( {required bool enabled, required EncryptionConfig config}) async { @@ -5180,6 +5376,25 @@ class RtcEngineImpl implements RtcEngine { } } + @override + Future queryHDRCapability(VideoModuleType videoModule) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_queryHDRCapability_bebdacb'; + final param = createParams({'videoModule': videoModule.value()}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + final queryHDRCapabilityJson = RtcEngineQueryHDRCapabilityJson.fromJson(rm); + return queryHDRCapabilityJson.capability; + } + @override Future startScreenCaptureBySourceType( {required VideoSourceType sourceType, @@ -5418,4 +5633,24 @@ class RtcEngineImpl implements RtcEngine { final result = rm['result']; return result as int; } + + @override + Future takeSnapshotWithConfig( + {required int uid, required SnapshotConfig config}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_takeSnapshot_5669ea6'; + final param = createParams({'uid': uid, 'config': config.toJson()}); + final List buffers = []; + buffers.addAll(config.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } } diff --git a/lib/src/binding/call_api_event_handler_buffer_ext.dart b/lib/src/binding/call_api_event_handler_buffer_ext.dart index 891b6e9ee..26d5b5592 100644 --- a/lib/src/binding/call_api_event_handler_buffer_ext.dart +++ b/lib/src/binding/call_api_event_handler_buffer_ext.dart @@ -173,6 +173,30 @@ extension SimulcastStreamConfigBufferExt on SimulcastStreamConfig { } } +extension SimulcastConfigBufferExt on SimulcastConfig { + SimulcastConfig fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + +extension StreamLayerConfigBufferExt on StreamLayerConfig { + StreamLayerConfig fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + extension RectangleBufferExt on Rectangle { Rectangle fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; @@ -385,6 +409,31 @@ extension LocalTranscoderConfigurationBufferExt } } +extension MixedAudioStreamBufferExt on MixedAudioStream { + MixedAudioStream fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + +extension LocalAudioMixerConfigurationBufferExt + on LocalAudioMixerConfiguration { + LocalAudioMixerConfiguration fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + extension LastmileProbeConfigBufferExt on LastmileProbeConfig { LastmileProbeConfig fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; @@ -457,6 +506,42 @@ extension BeautyOptionsBufferExt on BeautyOptions { } } +extension FaceShapeAreaOptionsBufferExt on FaceShapeAreaOptions { + FaceShapeAreaOptions fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + +extension FaceShapeBeautyOptionsBufferExt on FaceShapeBeautyOptions { + FaceShapeBeautyOptions fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + +extension FilterEffectOptionsBufferExt on FilterEffectOptions { + FilterEffectOptions fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + extension LowlightEnhanceOptionsBufferExt on LowlightEnhanceOptions { LowlightEnhanceOptions fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; @@ -795,6 +880,18 @@ extension VideoLayoutBufferExt on VideoLayout { } } +extension ExtensionContextBufferExt on ExtensionContext { + ExtensionContext fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + extension AudioParametersBufferExt on AudioParameters { AudioParameters fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; @@ -867,6 +964,30 @@ extension AudioPcmFrameBufferExt on AudioPcmFrame { } } +extension ColorSpaceBufferExt on ColorSpace { + ColorSpace fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + +extension Hdr10MetadataInfoBufferExt on Hdr10MetadataInfo { + Hdr10MetadataInfo fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + extension ExternalVideoFrameBufferExt on ExternalVideoFrame { ExternalVideoFrame fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; @@ -896,12 +1017,17 @@ extension ExternalVideoFrameBufferExt on ExternalVideoFrame { timestamp: timestamp, eglType: eglType, textureId: textureId, + fenceObject: fenceObject, matrix: matrix, metadataBuffer: metadataBuffer, metadataSize: metadataSize, alphaBuffer: alphaBuffer, fillAlphaBuffer: fillAlphaBuffer, - textureSliceIndex: textureSliceIndex); + alphaStitchMode: alphaStitchMode, + d3d11Texture2d: d3d11Texture2d, + textureSliceIndex: textureSliceIndex, + hdr10MetadataInfo: hdr10MetadataInfo, + colorSpace: colorSpace); } List collectBufferList() { @@ -964,8 +1090,11 @@ extension VideoFrameBufferExt on VideoFrame { textureId: textureId, matrix: matrix, alphaBuffer: alphaBuffer, + alphaStitchMode: alphaStitchMode, pixelBuffer: pixelBuffer, - metaInfo: metaInfo); + metaInfo: metaInfo, + hdr10MetadataInfo: hdr10MetadataInfo, + colorSpace: colorSpace); } List collectBufferList() { @@ -992,6 +1121,18 @@ extension VideoFrameBufferExt on VideoFrame { } } +extension SnapshotConfigBufferExt on SnapshotConfig { + SnapshotConfig fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + extension AudioFrameBufferExt on AudioFrame { AudioFrame fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; @@ -1509,7 +1650,11 @@ extension MetadataBufferExt on Metadata { buffer = bufferList[0]; } return Metadata( - uid: uid, size: size, buffer: buffer, timeStampMs: timeStampMs); + channelId: channelId, + uid: uid, + size: size, + buffer: buffer, + timeStampMs: timeStampMs); } List collectBufferList() { diff --git a/lib/src/binding/call_api_impl_params_json.dart b/lib/src/binding/call_api_impl_params_json.dart index 196cfa8d4..340d8ff81 100644 --- a/lib/src/binding/call_api_impl_params_json.dart +++ b/lib/src/binding/call_api_impl_params_json.dart @@ -273,6 +273,36 @@ class RtcEngineQueryCodecCapabilityJson { _$RtcEngineQueryCodecCapabilityJsonToJson(this); } +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class RtcEngineGetFaceShapeBeautyOptionsJson { + const RtcEngineGetFaceShapeBeautyOptionsJson(this.options); + + @JsonKey(name: 'options') + final FaceShapeBeautyOptions options; + + factory RtcEngineGetFaceShapeBeautyOptionsJson.fromJson( + Map json) => + _$RtcEngineGetFaceShapeBeautyOptionsJsonFromJson(json); + + Map toJson() => + _$RtcEngineGetFaceShapeBeautyOptionsJsonToJson(this); +} + +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class RtcEngineGetFaceShapeAreaOptionsJson { + const RtcEngineGetFaceShapeAreaOptionsJson(this.options); + + @JsonKey(name: 'options') + final FaceShapeAreaOptions options; + + factory RtcEngineGetFaceShapeAreaOptionsJson.fromJson( + Map json) => + _$RtcEngineGetFaceShapeAreaOptionsJsonFromJson(json); + + Map toJson() => + _$RtcEngineGetFaceShapeAreaOptionsJsonToJson(this); +} + @JsonSerializable(explicitToJson: true, includeIfNull: false) class RtcEngineUploadLogFileJson { const RtcEngineUploadLogFileJson(this.requestId); @@ -384,6 +414,20 @@ class RtcEngineGetUserInfoByUidJson { Map toJson() => _$RtcEngineGetUserInfoByUidJsonToJson(this); } +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class RtcEngineQueryHDRCapabilityJson { + const RtcEngineQueryHDRCapabilityJson(this.capability); + + @JsonKey(name: 'capability') + final HdrCapability capability; + + factory RtcEngineQueryHDRCapabilityJson.fromJson(Map json) => + _$RtcEngineQueryHDRCapabilityJsonFromJson(json); + + Map toJson() => + _$RtcEngineQueryHDRCapabilityJsonToJson(this); +} + @JsonSerializable(explicitToJson: true, includeIfNull: false) class RtcEngineExCreateDataStreamExJson { const RtcEngineExCreateDataStreamExJson(this.streamId); diff --git a/lib/src/binding/call_api_impl_params_json.g.dart b/lib/src/binding/call_api_impl_params_json.g.dart index 34d356efc..919265014 100644 --- a/lib/src/binding/call_api_impl_params_json.g.dart +++ b/lib/src/binding/call_api_impl_params_json.g.dart @@ -246,6 +246,33 @@ Map _$RtcEngineQueryCodecCapabilityJsonToJson( 'codecInfo': instance.codecInfo.map((e) => e.toJson()).toList(), }; +RtcEngineGetFaceShapeBeautyOptionsJson + _$RtcEngineGetFaceShapeBeautyOptionsJsonFromJson( + Map json) => + RtcEngineGetFaceShapeBeautyOptionsJson( + FaceShapeBeautyOptions.fromJson( + json['options'] as Map), + ); + +Map _$RtcEngineGetFaceShapeBeautyOptionsJsonToJson( + RtcEngineGetFaceShapeBeautyOptionsJson instance) => + { + 'options': instance.options.toJson(), + }; + +RtcEngineGetFaceShapeAreaOptionsJson + _$RtcEngineGetFaceShapeAreaOptionsJsonFromJson(Map json) => + RtcEngineGetFaceShapeAreaOptionsJson( + FaceShapeAreaOptions.fromJson( + json['options'] as Map), + ); + +Map _$RtcEngineGetFaceShapeAreaOptionsJsonToJson( + RtcEngineGetFaceShapeAreaOptionsJson instance) => + { + 'options': instance.options.toJson(), + }; + RtcEngineUploadLogFileJson _$RtcEngineUploadLogFileJsonFromJson( Map json) => RtcEngineUploadLogFileJson( @@ -347,6 +374,24 @@ Map _$RtcEngineGetUserInfoByUidJsonToJson( 'userInfo': instance.userInfo.toJson(), }; +RtcEngineQueryHDRCapabilityJson _$RtcEngineQueryHDRCapabilityJsonFromJson( + Map json) => + RtcEngineQueryHDRCapabilityJson( + $enumDecode(_$HdrCapabilityEnumMap, json['capability']), + ); + +Map _$RtcEngineQueryHDRCapabilityJsonToJson( + RtcEngineQueryHDRCapabilityJson instance) => + { + 'capability': _$HdrCapabilityEnumMap[instance.capability]!, + }; + +const _$HdrCapabilityEnumMap = { + HdrCapability.hdrCapabilityUnknown: -1, + HdrCapability.hdrCapabilityUnsupported: 0, + HdrCapability.hdrCapabilitySupported: 1, +}; + RtcEngineExCreateDataStreamExJson _$RtcEngineExCreateDataStreamExJsonFromJson( Map json) => RtcEngineExCreateDataStreamExJson( diff --git a/lib/src/binding/event_handler_param_json.dart b/lib/src/binding/event_handler_param_json.dart index c799b9ccf..c83da9a40 100644 --- a/lib/src/binding/event_handler_param_json.dart +++ b/lib/src/binding/event_handler_param_json.dart @@ -4682,15 +4682,12 @@ extension RtcEngineEventHandlerOnAudioMetadataReceivedJsonBufferExt } @JsonSerializable(explicitToJson: true, includeIfNull: false) -class RtcEngineEventHandlerOnExtensionEventJson { - const RtcEngineEventHandlerOnExtensionEventJson( - {this.provider, this.extension, this.key, this.value}); +class RtcEngineEventHandlerOnExtensionEventWithContextJson { + const RtcEngineEventHandlerOnExtensionEventWithContextJson( + {this.context, this.key, this.value}); - @JsonKey(name: 'provider') - final String? provider; - - @JsonKey(name: 'extension') - final String? extension; + @JsonKey(name: 'context') + final ExtensionContext? context; @JsonKey(name: 'key') final String? key; @@ -4698,17 +4695,17 @@ class RtcEngineEventHandlerOnExtensionEventJson { @JsonKey(name: 'value') final String? value; - factory RtcEngineEventHandlerOnExtensionEventJson.fromJson( + factory RtcEngineEventHandlerOnExtensionEventWithContextJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnExtensionEventJsonFromJson(json); + _$RtcEngineEventHandlerOnExtensionEventWithContextJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnExtensionEventJsonToJson(this); + _$RtcEngineEventHandlerOnExtensionEventWithContextJsonToJson(this); } -extension RtcEngineEventHandlerOnExtensionEventJsonBufferExt - on RtcEngineEventHandlerOnExtensionEventJson { - RtcEngineEventHandlerOnExtensionEventJson fillBuffers( +extension RtcEngineEventHandlerOnExtensionEventWithContextJsonBufferExt + on RtcEngineEventHandlerOnExtensionEventWithContextJson { + RtcEngineEventHandlerOnExtensionEventWithContextJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -4721,27 +4718,23 @@ extension RtcEngineEventHandlerOnExtensionEventJsonBufferExt } @JsonSerializable(explicitToJson: true, includeIfNull: false) -class RtcEngineEventHandlerOnExtensionStartedJson { - const RtcEngineEventHandlerOnExtensionStartedJson( - {this.provider, this.extension}); - - @JsonKey(name: 'provider') - final String? provider; +class RtcEngineEventHandlerOnExtensionStartedWithContextJson { + const RtcEngineEventHandlerOnExtensionStartedWithContextJson({this.context}); - @JsonKey(name: 'extension') - final String? extension; + @JsonKey(name: 'context') + final ExtensionContext? context; - factory RtcEngineEventHandlerOnExtensionStartedJson.fromJson( + factory RtcEngineEventHandlerOnExtensionStartedWithContextJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnExtensionStartedJsonFromJson(json); + _$RtcEngineEventHandlerOnExtensionStartedWithContextJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnExtensionStartedJsonToJson(this); + _$RtcEngineEventHandlerOnExtensionStartedWithContextJsonToJson(this); } -extension RtcEngineEventHandlerOnExtensionStartedJsonBufferExt - on RtcEngineEventHandlerOnExtensionStartedJson { - RtcEngineEventHandlerOnExtensionStartedJson fillBuffers( +extension RtcEngineEventHandlerOnExtensionStartedWithContextJsonBufferExt + on RtcEngineEventHandlerOnExtensionStartedWithContextJson { + RtcEngineEventHandlerOnExtensionStartedWithContextJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -4754,27 +4747,23 @@ extension RtcEngineEventHandlerOnExtensionStartedJsonBufferExt } @JsonSerializable(explicitToJson: true, includeIfNull: false) -class RtcEngineEventHandlerOnExtensionStoppedJson { - const RtcEngineEventHandlerOnExtensionStoppedJson( - {this.provider, this.extension}); +class RtcEngineEventHandlerOnExtensionStoppedWithContextJson { + const RtcEngineEventHandlerOnExtensionStoppedWithContextJson({this.context}); - @JsonKey(name: 'provider') - final String? provider; + @JsonKey(name: 'context') + final ExtensionContext? context; - @JsonKey(name: 'extension') - final String? extension; - - factory RtcEngineEventHandlerOnExtensionStoppedJson.fromJson( + factory RtcEngineEventHandlerOnExtensionStoppedWithContextJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnExtensionStoppedJsonFromJson(json); + _$RtcEngineEventHandlerOnExtensionStoppedWithContextJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnExtensionStoppedJsonToJson(this); + _$RtcEngineEventHandlerOnExtensionStoppedWithContextJsonToJson(this); } -extension RtcEngineEventHandlerOnExtensionStoppedJsonBufferExt - on RtcEngineEventHandlerOnExtensionStoppedJson { - RtcEngineEventHandlerOnExtensionStoppedJson fillBuffers( +extension RtcEngineEventHandlerOnExtensionStoppedWithContextJsonBufferExt + on RtcEngineEventHandlerOnExtensionStoppedWithContextJson { + RtcEngineEventHandlerOnExtensionStoppedWithContextJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -4787,15 +4776,12 @@ extension RtcEngineEventHandlerOnExtensionStoppedJsonBufferExt } @JsonSerializable(explicitToJson: true, includeIfNull: false) -class RtcEngineEventHandlerOnExtensionErrorJson { - const RtcEngineEventHandlerOnExtensionErrorJson( - {this.provider, this.extension, this.error, this.message}); - - @JsonKey(name: 'provider') - final String? provider; +class RtcEngineEventHandlerOnExtensionErrorWithContextJson { + const RtcEngineEventHandlerOnExtensionErrorWithContextJson( + {this.context, this.error, this.message}); - @JsonKey(name: 'extension') - final String? extension; + @JsonKey(name: 'context') + final ExtensionContext? context; @JsonKey(name: 'error') final int? error; @@ -4803,17 +4789,17 @@ class RtcEngineEventHandlerOnExtensionErrorJson { @JsonKey(name: 'message') final String? message; - factory RtcEngineEventHandlerOnExtensionErrorJson.fromJson( + factory RtcEngineEventHandlerOnExtensionErrorWithContextJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnExtensionErrorJsonFromJson(json); + _$RtcEngineEventHandlerOnExtensionErrorWithContextJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnExtensionErrorJsonToJson(this); + _$RtcEngineEventHandlerOnExtensionErrorWithContextJsonToJson(this); } -extension RtcEngineEventHandlerOnExtensionErrorJsonBufferExt - on RtcEngineEventHandlerOnExtensionErrorJson { - RtcEngineEventHandlerOnExtensionErrorJson fillBuffers( +extension RtcEngineEventHandlerOnExtensionErrorWithContextJsonBufferExt + on RtcEngineEventHandlerOnExtensionErrorWithContextJson { + RtcEngineEventHandlerOnExtensionErrorWithContextJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; diff --git a/lib/src/binding/event_handler_param_json.g.dart b/lib/src/binding/event_handler_param_json.g.dart index c50a42e0b..0a4292c10 100644 --- a/lib/src/binding/event_handler_param_json.g.dart +++ b/lib/src/binding/event_handler_param_json.g.dart @@ -1561,6 +1561,7 @@ const _$MediaDeviceStateTypeEnumMap = { MediaDeviceStateType.mediaDeviceStateIdle: 0, MediaDeviceStateType.mediaDeviceStateActive: 1, MediaDeviceStateType.mediaDeviceStateDisabled: 2, + MediaDeviceStateType.mediaDeviceStatePluggedIn: 3, MediaDeviceStateType.mediaDeviceStateNotPresent: 4, MediaDeviceStateType.mediaDeviceStateUnplugged: 8, }; @@ -1957,6 +1958,8 @@ const _$LocalVideoStreamReasonEnumMap = { .localVideoStreamReasonScreenCaptureWindowRecoverFromMinimized: 27, LocalVideoStreamReason.localVideoStreamReasonScreenCapturePaused: 28, LocalVideoStreamReason.localVideoStreamReasonScreenCaptureResumed: 29, + LocalVideoStreamReason.localVideoStreamReasonScreenCaptureDisplayDisconnected: + 30, }; RtcEngineEventHandlerOnRemoteVideoStateChangedJson @@ -2512,6 +2515,7 @@ const _$AudioMixingReasonTypeEnumMap = { AudioMixingReasonType.audioMixingReasonOneLoopCompleted: 721, AudioMixingReasonType.audioMixingReasonAllLoopsCompleted: 723, AudioMixingReasonType.audioMixingReasonStoppedByUser: 724, + AudioMixingReasonType.audioMixingReasonResumedByUser: 726, AudioMixingReasonType.audioMixingReasonOk: 0, }; @@ -4084,18 +4088,21 @@ Map _$RtcEngineEventHandlerOnAudioMetadataReceivedJsonToJson( return val; } -RtcEngineEventHandlerOnExtensionEventJson - _$RtcEngineEventHandlerOnExtensionEventJsonFromJson( +RtcEngineEventHandlerOnExtensionEventWithContextJson + _$RtcEngineEventHandlerOnExtensionEventWithContextJsonFromJson( Map json) => - RtcEngineEventHandlerOnExtensionEventJson( - provider: json['provider'] as String?, - extension: json['extension'] as String?, + RtcEngineEventHandlerOnExtensionEventWithContextJson( + context: json['context'] == null + ? null + : ExtensionContext.fromJson( + json['context'] as Map), key: json['key'] as String?, value: json['value'] as String?, ); -Map _$RtcEngineEventHandlerOnExtensionEventJsonToJson( - RtcEngineEventHandlerOnExtensionEventJson instance) { +Map + _$RtcEngineEventHandlerOnExtensionEventWithContextJsonToJson( + RtcEngineEventHandlerOnExtensionEventWithContextJson instance) { final val = {}; void writeNotNull(String key, dynamic value) { @@ -4104,23 +4111,25 @@ Map _$RtcEngineEventHandlerOnExtensionEventJsonToJson( } } - writeNotNull('provider', instance.provider); - writeNotNull('extension', instance.extension); + writeNotNull('context', instance.context?.toJson()); writeNotNull('key', instance.key); writeNotNull('value', instance.value); return val; } -RtcEngineEventHandlerOnExtensionStartedJson - _$RtcEngineEventHandlerOnExtensionStartedJsonFromJson( +RtcEngineEventHandlerOnExtensionStartedWithContextJson + _$RtcEngineEventHandlerOnExtensionStartedWithContextJsonFromJson( Map json) => - RtcEngineEventHandlerOnExtensionStartedJson( - provider: json['provider'] as String?, - extension: json['extension'] as String?, + RtcEngineEventHandlerOnExtensionStartedWithContextJson( + context: json['context'] == null + ? null + : ExtensionContext.fromJson( + json['context'] as Map), ); -Map _$RtcEngineEventHandlerOnExtensionStartedJsonToJson( - RtcEngineEventHandlerOnExtensionStartedJson instance) { +Map + _$RtcEngineEventHandlerOnExtensionStartedWithContextJsonToJson( + RtcEngineEventHandlerOnExtensionStartedWithContextJson instance) { final val = {}; void writeNotNull(String key, dynamic value) { @@ -4129,21 +4138,23 @@ Map _$RtcEngineEventHandlerOnExtensionStartedJsonToJson( } } - writeNotNull('provider', instance.provider); - writeNotNull('extension', instance.extension); + writeNotNull('context', instance.context?.toJson()); return val; } -RtcEngineEventHandlerOnExtensionStoppedJson - _$RtcEngineEventHandlerOnExtensionStoppedJsonFromJson( +RtcEngineEventHandlerOnExtensionStoppedWithContextJson + _$RtcEngineEventHandlerOnExtensionStoppedWithContextJsonFromJson( Map json) => - RtcEngineEventHandlerOnExtensionStoppedJson( - provider: json['provider'] as String?, - extension: json['extension'] as String?, + RtcEngineEventHandlerOnExtensionStoppedWithContextJson( + context: json['context'] == null + ? null + : ExtensionContext.fromJson( + json['context'] as Map), ); -Map _$RtcEngineEventHandlerOnExtensionStoppedJsonToJson( - RtcEngineEventHandlerOnExtensionStoppedJson instance) { +Map + _$RtcEngineEventHandlerOnExtensionStoppedWithContextJsonToJson( + RtcEngineEventHandlerOnExtensionStoppedWithContextJson instance) { final val = {}; void writeNotNull(String key, dynamic value) { @@ -4152,23 +4163,25 @@ Map _$RtcEngineEventHandlerOnExtensionStoppedJsonToJson( } } - writeNotNull('provider', instance.provider); - writeNotNull('extension', instance.extension); + writeNotNull('context', instance.context?.toJson()); return val; } -RtcEngineEventHandlerOnExtensionErrorJson - _$RtcEngineEventHandlerOnExtensionErrorJsonFromJson( +RtcEngineEventHandlerOnExtensionErrorWithContextJson + _$RtcEngineEventHandlerOnExtensionErrorWithContextJsonFromJson( Map json) => - RtcEngineEventHandlerOnExtensionErrorJson( - provider: json['provider'] as String?, - extension: json['extension'] as String?, + RtcEngineEventHandlerOnExtensionErrorWithContextJson( + context: json['context'] == null + ? null + : ExtensionContext.fromJson( + json['context'] as Map), error: (json['error'] as num?)?.toInt(), message: json['message'] as String?, ); -Map _$RtcEngineEventHandlerOnExtensionErrorJsonToJson( - RtcEngineEventHandlerOnExtensionErrorJson instance) { +Map + _$RtcEngineEventHandlerOnExtensionErrorWithContextJsonToJson( + RtcEngineEventHandlerOnExtensionErrorWithContextJson instance) { final val = {}; void writeNotNull(String key, dynamic value) { @@ -4177,8 +4190,7 @@ Map _$RtcEngineEventHandlerOnExtensionErrorJsonToJson( } } - writeNotNull('provider', instance.provider); - writeNotNull('extension', instance.extension); + writeNotNull('context', instance.context?.toJson()); writeNotNull('error', instance.error); writeNotNull('message', instance.message); return val; diff --git a/macos/agora_rtc_engine.podspec b/macos/agora_rtc_engine.podspec index 54f681f9c..f036fb7e1 100644 --- a/macos/agora_rtc_engine.podspec +++ b/macos/agora_rtc_engine.podspec @@ -21,8 +21,8 @@ A new flutter plugin project. puts '[plugin_dev] Found .plugin_dev file, use vendored_frameworks instead.' s.vendored_frameworks = 'libs/*.xcframework', 'libs/*.framework' else - s.dependency 'AgoraRtcEngine_macOS', '4.3.2' - s.dependency 'AgoraIrisRTC_macOS', '4.3.2-build.1' + s.dependency 'AgoraRtcEngine_macOS', '4.5.0' + s.dependency 'AgoraIrisRTC_macOS', '4.5.0-build.1' end s.platform = :osx, '10.11' diff --git a/pubspec.yaml b/pubspec.yaml index e284e36da..5b6add32f 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -2,7 +2,7 @@ name: agora_rtc_engine description: >- Flutter plugin of Agora RTC SDK, allow you to simply integrate Agora Video Calling or Live Video Streaming to your app with just a few lines of code. -version: 6.3.2 +version: 6.4.0 homepage: https://www.agora.io repository: https://github.com/AgoraIO-Extensions/Agora-Flutter-SDK/tree/main environment: diff --git a/scripts/artifacts_version.sh b/scripts/artifacts_version.sh index 2c766684a..29c48de42 100644 --- a/scripts/artifacts_version.sh +++ b/scripts/artifacts_version.sh @@ -1,6 +1,6 @@ set -e -export IRIS_CDN_URL_ANDROID="https://download.agora.io/sdk/release/iris_4.3.2-build.1_DCG_Android_Video_20240604_0456_504.zip" -export IRIS_CDN_URL_IOS="https://download.agora.io/sdk/release/iris_4.3.2-build.1_DCG_iOS_Video_20240604_0459_409.zip" -export IRIS_CDN_URL_MACOS="https://download.agora.io/sdk/release/iris_4.3.2-build.1_DCG_Mac_Video_20240604_0500_404.zip" -export IRIS_CDN_URL_WINDOWS="https://download.agora.io/sdk/release/iris_4.3.2-build.1_DCG_Windows_Video_20240604_0456_441.zip" +export IRIS_CDN_URL_ANDROID="https://download.agora.io/sdk/release/iris_4.5.0-build.1_DCG_Android_Video_16K_20241203_0322_701.zip" +export IRIS_CDN_URL_IOS="https://download.agora.io/sdk/release/iris_4.5.0-build.1_DCG_iOS_Video_20241203_0325_575.zip" +export IRIS_CDN_URL_MACOS="https://download.agora.io/sdk/release/iris_4.5.0-build.1_DCG_Mac_Video_20241203_0322_534.zip" +export IRIS_CDN_URL_WINDOWS="https://download.agora.io/sdk/release/iris_4.5.0-build.1_DCG_Windows_Video_20241203_0322_577.zip" diff --git a/scripts/iris_web_version.js b/scripts/iris_web_version.js index 9c6ff8464..adccd4009 100644 --- a/scripts/iris_web_version.js +++ b/scripts/iris_web_version.js @@ -1,8 +1,8 @@ // Share the iris web url to all the tests // This url should be same as the url inside the `example/web/index.html` -const irisWebUrl = 'https://download.agora.io/sdk/release/iris-web-rtc_n440_w4220_0.8.0.js'; -const irisWebFakeUrl = 'https://download.agora.io/sdk/release/iris-web-rtc-fake_n440_w4220_0.8.0.js'; +const irisWebUrl = 'https://download.agora.io/sdk/release/iris-web-rtc_n450_w4220_0.8.6.js'; +const irisWebFakeUrl = 'https://download.agora.io/sdk/release/iris-web-rtc-fake_n450_w4220_0.8.6.js'; (function() { var scriptLoaded = false; diff --git a/test_shard/fake_test_app/android/build.gradle b/test_shard/fake_test_app/android/build.gradle index db6122536..f0df1b78b 100644 --- a/test_shard/fake_test_app/android/build.gradle +++ b/test_shard/fake_test_app/android/build.gradle @@ -1,6 +1,6 @@ buildscript { // Min kotlin version for Flutter SDK 3.24 - ext.kotlin_version = '1.7.0' + ext.kotlin_version = '1.9.10' repositories { google() mavenCentral() diff --git a/test_shard/fake_test_app/integration_test/generated/event_ids_mapping_gen.dart b/test_shard/fake_test_app/integration_test/generated/event_ids_mapping_gen.dart index 179cd0886..66215cec9 100644 --- a/test_shard/fake_test_app/integration_test/generated/event_ids_mapping_gen.dart +++ b/test_shard/fake_test_app/integration_test/generated/event_ids_mapping_gen.dart @@ -387,17 +387,17 @@ const eventIdsMapping = { "RtcEngineEventHandler_onAudioMetadataReceived": [ "RtcEngineEventHandler_onAudioMetadataReceived_0d4eb96" ], - "RtcEngineEventHandler_onExtensionEvent": [ - "RtcEngineEventHandler_onExtensionEvent_062d13c" + "RtcEngineEventHandler_onExtensionEventWithContext": [ + "RtcEngineEventHandler_onExtensionEventWithContext_a5fb27a" ], - "RtcEngineEventHandler_onExtensionStarted": [ - "RtcEngineEventHandler_onExtensionStarted_ccad422" + "RtcEngineEventHandler_onExtensionStartedWithContext": [ + "RtcEngineEventHandler_onExtensionStartedWithContext_67c38e3" ], - "RtcEngineEventHandler_onExtensionStopped": [ - "RtcEngineEventHandler_onExtensionStopped_ccad422" + "RtcEngineEventHandler_onExtensionStoppedWithContext": [ + "RtcEngineEventHandler_onExtensionStoppedWithContext_67c38e3" ], - "RtcEngineEventHandler_onExtensionError": [ - "RtcEngineEventHandler_onExtensionError_bd3489b" + "RtcEngineEventHandler_onExtensionErrorWithContext": [ + "RtcEngineEventHandler_onExtensionErrorWithContext_a452f11" ], "RtcEngineEventHandler_onSetRtmFlagResult": [ "RtcEngineEventHandler_onSetRtmFlagResult_263e4cd" diff --git a/test_shard/fake_test_app/integration_test/generated/mediaengine_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaengine_fake_test.generated.dart index a72fec8b7..43bb4c951 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaengine_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaengine_fake_test.generated.dart @@ -366,6 +366,45 @@ void mediaEngineSmokeTestCases() { // skip: !(), ); + testWidgets( + 'MediaEngine.setExternalRemoteEglContext', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + final mediaEngine = rtcEngine.getMediaEngine(); + + try { + int eglContext = 5; + await mediaEngine.setExternalRemoteEglContext( + eglContext, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[MediaEngine.setExternalRemoteEglContext] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await mediaEngine.release(); + await rtcEngine.release(); + }, +// skip: !(), + ); + testWidgets( 'MediaEngine.setExternalAudioSource', (WidgetTester tester) async { @@ -555,6 +594,43 @@ void mediaEngineSmokeTestCases() { VideoBufferType frameType = VideoBufferType.videoBufferRawData; VideoPixelFormat frameFormat = VideoPixelFormat.videoPixelDefault; EglContextType frameEglType = EglContextType.eglContext10; + AlphaStitchMode frameAlphaStitchMode = AlphaStitchMode.noAlphaStitch; + int hdr10MetadataInfoRedPrimaryX = 5; + int hdr10MetadataInfoRedPrimaryY = 5; + int hdr10MetadataInfoGreenPrimaryX = 5; + int hdr10MetadataInfoGreenPrimaryY = 5; + int hdr10MetadataInfoBluePrimaryX = 5; + int hdr10MetadataInfoBluePrimaryY = 5; + int hdr10MetadataInfoWhitePointX = 5; + int hdr10MetadataInfoWhitePointY = 5; + int hdr10MetadataInfoMaxMasteringLuminance = 5; + int hdr10MetadataInfoMinMasteringLuminance = 5; + int hdr10MetadataInfoMaxContentLightLevel = 5; + int hdr10MetadataInfoMaxFrameAverageLightLevel = 5; + Hdr10MetadataInfo frameHdr10MetadataInfo = Hdr10MetadataInfo( + redPrimaryX: hdr10MetadataInfoRedPrimaryX, + redPrimaryY: hdr10MetadataInfoRedPrimaryY, + greenPrimaryX: hdr10MetadataInfoGreenPrimaryX, + greenPrimaryY: hdr10MetadataInfoGreenPrimaryY, + bluePrimaryX: hdr10MetadataInfoBluePrimaryX, + bluePrimaryY: hdr10MetadataInfoBluePrimaryY, + whitePointX: hdr10MetadataInfoWhitePointX, + whitePointY: hdr10MetadataInfoWhitePointY, + maxMasteringLuminance: hdr10MetadataInfoMaxMasteringLuminance, + minMasteringLuminance: hdr10MetadataInfoMinMasteringLuminance, + maxContentLightLevel: hdr10MetadataInfoMaxContentLightLevel, + maxFrameAverageLightLevel: hdr10MetadataInfoMaxFrameAverageLightLevel, + ); + PrimaryID colorSpacePrimaries = PrimaryID.primaryidBt709; + TransferID colorSpaceTransfer = TransferID.transferidBt709; + MatrixID colorSpaceMatrix = MatrixID.matrixidRgb; + RangeID colorSpaceRange = RangeID.rangeidInvalid; + ColorSpace frameColorSpace = ColorSpace( + primaries: colorSpacePrimaries, + transfer: colorSpaceTransfer, + matrix: colorSpaceMatrix, + range: colorSpaceRange, + ); Uint8List frameBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); int frameStride = 5; int frameHeight = 5; @@ -565,11 +641,13 @@ void mediaEngineSmokeTestCases() { int frameRotation = 5; int frameTimestamp = 5; int frameTextureId = 5; + int frameFenceObject = 5; List frameMatrix = List.filled(5, 5.0); Uint8List frameMetadataBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); int frameMetadataSize = 5; Uint8List frameAlphaBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); bool frameFillAlphaBuffer = true; + int frameD3d11Texture2d = 5; int frameTextureSliceIndex = 5; ExternalVideoFrame frame = ExternalVideoFrame( type: frameType, @@ -585,12 +663,17 @@ void mediaEngineSmokeTestCases() { timestamp: frameTimestamp, eglType: frameEglType, textureId: frameTextureId, + fenceObject: frameFenceObject, matrix: frameMatrix, metadataBuffer: frameMetadataBuffer, metadataSize: frameMetadataSize, alphaBuffer: frameAlphaBuffer, fillAlphaBuffer: frameFillAlphaBuffer, + alphaStitchMode: frameAlphaStitchMode, + d3d11Texture2d: frameD3d11Texture2d, textureSliceIndex: frameTextureSliceIndex, + hdr10MetadataInfo: frameHdr10MetadataInfo, + colorSpace: frameColorSpace, ); int videoTrackId = 5; await mediaEngine.pushVideoFrame( diff --git a/test_shard/fake_test_app/integration_test/generated/mediaengine_videoframeobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaengine_videoframeobserver_testcases.generated.dart index 2de559fa9..6ababda7c 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaengine_videoframeobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaengine_videoframeobserver_testcases.generated.dart @@ -43,7 +43,45 @@ void generatedTestCases(ValueGetter irisTester) { { VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary; VideoPixelFormat videoFrameType = VideoPixelFormat.videoPixelDefault; + AlphaStitchMode videoFrameAlphaStitchMode = + AlphaStitchMode.noAlphaStitch; VideoFrameMetaInfo? videoFrameMetaInfo = null; + int hdr10MetadataInfoRedPrimaryX = 5; + int hdr10MetadataInfoRedPrimaryY = 5; + int hdr10MetadataInfoGreenPrimaryX = 5; + int hdr10MetadataInfoGreenPrimaryY = 5; + int hdr10MetadataInfoBluePrimaryX = 5; + int hdr10MetadataInfoBluePrimaryY = 5; + int hdr10MetadataInfoWhitePointX = 5; + int hdr10MetadataInfoWhitePointY = 5; + int hdr10MetadataInfoMaxMasteringLuminance = 5; + int hdr10MetadataInfoMinMasteringLuminance = 5; + int hdr10MetadataInfoMaxContentLightLevel = 5; + int hdr10MetadataInfoMaxFrameAverageLightLevel = 5; + Hdr10MetadataInfo videoFrameHdr10MetadataInfo = Hdr10MetadataInfo( + redPrimaryX: hdr10MetadataInfoRedPrimaryX, + redPrimaryY: hdr10MetadataInfoRedPrimaryY, + greenPrimaryX: hdr10MetadataInfoGreenPrimaryX, + greenPrimaryY: hdr10MetadataInfoGreenPrimaryY, + bluePrimaryX: hdr10MetadataInfoBluePrimaryX, + bluePrimaryY: hdr10MetadataInfoBluePrimaryY, + whitePointX: hdr10MetadataInfoWhitePointX, + whitePointY: hdr10MetadataInfoWhitePointY, + maxMasteringLuminance: hdr10MetadataInfoMaxMasteringLuminance, + minMasteringLuminance: hdr10MetadataInfoMinMasteringLuminance, + maxContentLightLevel: hdr10MetadataInfoMaxContentLightLevel, + maxFrameAverageLightLevel: hdr10MetadataInfoMaxFrameAverageLightLevel, + ); + PrimaryID colorSpacePrimaries = PrimaryID.primaryidBt709; + TransferID colorSpaceTransfer = TransferID.transferidBt709; + MatrixID colorSpaceMatrix = MatrixID.matrixidRgb; + RangeID colorSpaceRange = RangeID.rangeidInvalid; + ColorSpace videoFrameColorSpace = ColorSpace( + primaries: colorSpacePrimaries, + transfer: colorSpaceTransfer, + matrix: colorSpaceMatrix, + range: colorSpaceRange, + ); int videoFrameWidth = 5; int videoFrameHeight = 5; int videoFrameYStride = 5; @@ -80,8 +118,11 @@ void generatedTestCases(ValueGetter irisTester) { textureId: videoFrameTextureId, matrix: videoFrameMatrix, alphaBuffer: videoFrameAlphaBuffer, + alphaStitchMode: videoFrameAlphaStitchMode, pixelBuffer: videoFramePixelBuffer, metaInfo: videoFrameMetaInfo, + hdr10MetadataInfo: videoFrameHdr10MetadataInfo, + colorSpace: videoFrameColorSpace, ); final eventJson = { @@ -149,7 +190,45 @@ void generatedTestCases(ValueGetter irisTester) { { VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary; VideoPixelFormat videoFrameType = VideoPixelFormat.videoPixelDefault; + AlphaStitchMode videoFrameAlphaStitchMode = + AlphaStitchMode.noAlphaStitch; VideoFrameMetaInfo? videoFrameMetaInfo = null; + int hdr10MetadataInfoRedPrimaryX = 5; + int hdr10MetadataInfoRedPrimaryY = 5; + int hdr10MetadataInfoGreenPrimaryX = 5; + int hdr10MetadataInfoGreenPrimaryY = 5; + int hdr10MetadataInfoBluePrimaryX = 5; + int hdr10MetadataInfoBluePrimaryY = 5; + int hdr10MetadataInfoWhitePointX = 5; + int hdr10MetadataInfoWhitePointY = 5; + int hdr10MetadataInfoMaxMasteringLuminance = 5; + int hdr10MetadataInfoMinMasteringLuminance = 5; + int hdr10MetadataInfoMaxContentLightLevel = 5; + int hdr10MetadataInfoMaxFrameAverageLightLevel = 5; + Hdr10MetadataInfo videoFrameHdr10MetadataInfo = Hdr10MetadataInfo( + redPrimaryX: hdr10MetadataInfoRedPrimaryX, + redPrimaryY: hdr10MetadataInfoRedPrimaryY, + greenPrimaryX: hdr10MetadataInfoGreenPrimaryX, + greenPrimaryY: hdr10MetadataInfoGreenPrimaryY, + bluePrimaryX: hdr10MetadataInfoBluePrimaryX, + bluePrimaryY: hdr10MetadataInfoBluePrimaryY, + whitePointX: hdr10MetadataInfoWhitePointX, + whitePointY: hdr10MetadataInfoWhitePointY, + maxMasteringLuminance: hdr10MetadataInfoMaxMasteringLuminance, + minMasteringLuminance: hdr10MetadataInfoMinMasteringLuminance, + maxContentLightLevel: hdr10MetadataInfoMaxContentLightLevel, + maxFrameAverageLightLevel: hdr10MetadataInfoMaxFrameAverageLightLevel, + ); + PrimaryID colorSpacePrimaries = PrimaryID.primaryidBt709; + TransferID colorSpaceTransfer = TransferID.transferidBt709; + MatrixID colorSpaceMatrix = MatrixID.matrixidRgb; + RangeID colorSpaceRange = RangeID.rangeidInvalid; + ColorSpace videoFrameColorSpace = ColorSpace( + primaries: colorSpacePrimaries, + transfer: colorSpaceTransfer, + matrix: colorSpaceMatrix, + range: colorSpaceRange, + ); int videoFrameWidth = 5; int videoFrameHeight = 5; int videoFrameYStride = 5; @@ -186,8 +265,11 @@ void generatedTestCases(ValueGetter irisTester) { textureId: videoFrameTextureId, matrix: videoFrameMatrix, alphaBuffer: videoFrameAlphaBuffer, + alphaStitchMode: videoFrameAlphaStitchMode, pixelBuffer: videoFramePixelBuffer, metaInfo: videoFrameMetaInfo, + hdr10MetadataInfo: videoFrameHdr10MetadataInfo, + colorSpace: videoFrameColorSpace, ); final eventJson = { @@ -253,7 +335,45 @@ void generatedTestCases(ValueGetter irisTester) { { VideoPixelFormat videoFrameType = VideoPixelFormat.videoPixelDefault; + AlphaStitchMode videoFrameAlphaStitchMode = + AlphaStitchMode.noAlphaStitch; VideoFrameMetaInfo? videoFrameMetaInfo = null; + int hdr10MetadataInfoRedPrimaryX = 5; + int hdr10MetadataInfoRedPrimaryY = 5; + int hdr10MetadataInfoGreenPrimaryX = 5; + int hdr10MetadataInfoGreenPrimaryY = 5; + int hdr10MetadataInfoBluePrimaryX = 5; + int hdr10MetadataInfoBluePrimaryY = 5; + int hdr10MetadataInfoWhitePointX = 5; + int hdr10MetadataInfoWhitePointY = 5; + int hdr10MetadataInfoMaxMasteringLuminance = 5; + int hdr10MetadataInfoMinMasteringLuminance = 5; + int hdr10MetadataInfoMaxContentLightLevel = 5; + int hdr10MetadataInfoMaxFrameAverageLightLevel = 5; + Hdr10MetadataInfo videoFrameHdr10MetadataInfo = Hdr10MetadataInfo( + redPrimaryX: hdr10MetadataInfoRedPrimaryX, + redPrimaryY: hdr10MetadataInfoRedPrimaryY, + greenPrimaryX: hdr10MetadataInfoGreenPrimaryX, + greenPrimaryY: hdr10MetadataInfoGreenPrimaryY, + bluePrimaryX: hdr10MetadataInfoBluePrimaryX, + bluePrimaryY: hdr10MetadataInfoBluePrimaryY, + whitePointX: hdr10MetadataInfoWhitePointX, + whitePointY: hdr10MetadataInfoWhitePointY, + maxMasteringLuminance: hdr10MetadataInfoMaxMasteringLuminance, + minMasteringLuminance: hdr10MetadataInfoMinMasteringLuminance, + maxContentLightLevel: hdr10MetadataInfoMaxContentLightLevel, + maxFrameAverageLightLevel: hdr10MetadataInfoMaxFrameAverageLightLevel, + ); + PrimaryID colorSpacePrimaries = PrimaryID.primaryidBt709; + TransferID colorSpaceTransfer = TransferID.transferidBt709; + MatrixID colorSpaceMatrix = MatrixID.matrixidRgb; + RangeID colorSpaceRange = RangeID.rangeidInvalid; + ColorSpace videoFrameColorSpace = ColorSpace( + primaries: colorSpacePrimaries, + transfer: colorSpaceTransfer, + matrix: colorSpaceMatrix, + range: colorSpaceRange, + ); int videoFrameWidth = 5; int videoFrameHeight = 5; int videoFrameYStride = 5; @@ -290,8 +410,11 @@ void generatedTestCases(ValueGetter irisTester) { textureId: videoFrameTextureId, matrix: videoFrameMatrix, alphaBuffer: videoFrameAlphaBuffer, + alphaStitchMode: videoFrameAlphaStitchMode, pixelBuffer: videoFramePixelBuffer, metaInfo: videoFrameMetaInfo, + hdr10MetadataInfo: videoFrameHdr10MetadataInfo, + colorSpace: videoFrameColorSpace, ); int mediaPlayerId = 5; @@ -361,7 +484,45 @@ void generatedTestCases(ValueGetter irisTester) { String channelId = "hello"; int remoteUid = 5; VideoPixelFormat videoFrameType = VideoPixelFormat.videoPixelDefault; + AlphaStitchMode videoFrameAlphaStitchMode = + AlphaStitchMode.noAlphaStitch; VideoFrameMetaInfo? videoFrameMetaInfo = null; + int hdr10MetadataInfoRedPrimaryX = 5; + int hdr10MetadataInfoRedPrimaryY = 5; + int hdr10MetadataInfoGreenPrimaryX = 5; + int hdr10MetadataInfoGreenPrimaryY = 5; + int hdr10MetadataInfoBluePrimaryX = 5; + int hdr10MetadataInfoBluePrimaryY = 5; + int hdr10MetadataInfoWhitePointX = 5; + int hdr10MetadataInfoWhitePointY = 5; + int hdr10MetadataInfoMaxMasteringLuminance = 5; + int hdr10MetadataInfoMinMasteringLuminance = 5; + int hdr10MetadataInfoMaxContentLightLevel = 5; + int hdr10MetadataInfoMaxFrameAverageLightLevel = 5; + Hdr10MetadataInfo videoFrameHdr10MetadataInfo = Hdr10MetadataInfo( + redPrimaryX: hdr10MetadataInfoRedPrimaryX, + redPrimaryY: hdr10MetadataInfoRedPrimaryY, + greenPrimaryX: hdr10MetadataInfoGreenPrimaryX, + greenPrimaryY: hdr10MetadataInfoGreenPrimaryY, + bluePrimaryX: hdr10MetadataInfoBluePrimaryX, + bluePrimaryY: hdr10MetadataInfoBluePrimaryY, + whitePointX: hdr10MetadataInfoWhitePointX, + whitePointY: hdr10MetadataInfoWhitePointY, + maxMasteringLuminance: hdr10MetadataInfoMaxMasteringLuminance, + minMasteringLuminance: hdr10MetadataInfoMinMasteringLuminance, + maxContentLightLevel: hdr10MetadataInfoMaxContentLightLevel, + maxFrameAverageLightLevel: hdr10MetadataInfoMaxFrameAverageLightLevel, + ); + PrimaryID colorSpacePrimaries = PrimaryID.primaryidBt709; + TransferID colorSpaceTransfer = TransferID.transferidBt709; + MatrixID colorSpaceMatrix = MatrixID.matrixidRgb; + RangeID colorSpaceRange = RangeID.rangeidInvalid; + ColorSpace videoFrameColorSpace = ColorSpace( + primaries: colorSpacePrimaries, + transfer: colorSpaceTransfer, + matrix: colorSpaceMatrix, + range: colorSpaceRange, + ); int videoFrameWidth = 5; int videoFrameHeight = 5; int videoFrameYStride = 5; @@ -398,8 +559,11 @@ void generatedTestCases(ValueGetter irisTester) { textureId: videoFrameTextureId, matrix: videoFrameMatrix, alphaBuffer: videoFrameAlphaBuffer, + alphaStitchMode: videoFrameAlphaStitchMode, pixelBuffer: videoFramePixelBuffer, metaInfo: videoFrameMetaInfo, + hdr10MetadataInfo: videoFrameHdr10MetadataInfo, + colorSpace: videoFrameColorSpace, ); final eventJson = { @@ -466,7 +630,45 @@ void generatedTestCases(ValueGetter irisTester) { { VideoPixelFormat videoFrameType = VideoPixelFormat.videoPixelDefault; + AlphaStitchMode videoFrameAlphaStitchMode = + AlphaStitchMode.noAlphaStitch; VideoFrameMetaInfo? videoFrameMetaInfo = null; + int hdr10MetadataInfoRedPrimaryX = 5; + int hdr10MetadataInfoRedPrimaryY = 5; + int hdr10MetadataInfoGreenPrimaryX = 5; + int hdr10MetadataInfoGreenPrimaryY = 5; + int hdr10MetadataInfoBluePrimaryX = 5; + int hdr10MetadataInfoBluePrimaryY = 5; + int hdr10MetadataInfoWhitePointX = 5; + int hdr10MetadataInfoWhitePointY = 5; + int hdr10MetadataInfoMaxMasteringLuminance = 5; + int hdr10MetadataInfoMinMasteringLuminance = 5; + int hdr10MetadataInfoMaxContentLightLevel = 5; + int hdr10MetadataInfoMaxFrameAverageLightLevel = 5; + Hdr10MetadataInfo videoFrameHdr10MetadataInfo = Hdr10MetadataInfo( + redPrimaryX: hdr10MetadataInfoRedPrimaryX, + redPrimaryY: hdr10MetadataInfoRedPrimaryY, + greenPrimaryX: hdr10MetadataInfoGreenPrimaryX, + greenPrimaryY: hdr10MetadataInfoGreenPrimaryY, + bluePrimaryX: hdr10MetadataInfoBluePrimaryX, + bluePrimaryY: hdr10MetadataInfoBluePrimaryY, + whitePointX: hdr10MetadataInfoWhitePointX, + whitePointY: hdr10MetadataInfoWhitePointY, + maxMasteringLuminance: hdr10MetadataInfoMaxMasteringLuminance, + minMasteringLuminance: hdr10MetadataInfoMinMasteringLuminance, + maxContentLightLevel: hdr10MetadataInfoMaxContentLightLevel, + maxFrameAverageLightLevel: hdr10MetadataInfoMaxFrameAverageLightLevel, + ); + PrimaryID colorSpacePrimaries = PrimaryID.primaryidBt709; + TransferID colorSpaceTransfer = TransferID.transferidBt709; + MatrixID colorSpaceMatrix = MatrixID.matrixidRgb; + RangeID colorSpaceRange = RangeID.rangeidInvalid; + ColorSpace videoFrameColorSpace = ColorSpace( + primaries: colorSpacePrimaries, + transfer: colorSpaceTransfer, + matrix: colorSpaceMatrix, + range: colorSpaceRange, + ); int videoFrameWidth = 5; int videoFrameHeight = 5; int videoFrameYStride = 5; @@ -503,8 +705,11 @@ void generatedTestCases(ValueGetter irisTester) { textureId: videoFrameTextureId, matrix: videoFrameMatrix, alphaBuffer: videoFrameAlphaBuffer, + alphaStitchMode: videoFrameAlphaStitchMode, pixelBuffer: videoFramePixelBuffer, metaInfo: videoFrameMetaInfo, + hdr10MetadataInfo: videoFrameHdr10MetadataInfo, + colorSpace: videoFrameColorSpace, ); final eventJson = { diff --git a/test_shard/fake_test_app/integration_test/generated/mediaplayer_audiopcmframesink_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaplayer_audiopcmframesink_testcases.generated.dart index 06ea82b10..038edb4dd 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaplayer_audiopcmframesink_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaplayer_audiopcmframesink_testcases.generated.dart @@ -52,6 +52,7 @@ void generatedTestCases(ValueGetter irisTester) { int frameSampleRateHz = 5; int frameNumChannels = 5; List frameData = List.filled(5, 5); + bool frameIsStereo = true; AudioPcmFrame frame = AudioPcmFrame( captureTimestamp: frameCaptureTimestamp, samplesPerChannel: frameSamplesPerChannel, @@ -59,6 +60,7 @@ void generatedTestCases(ValueGetter irisTester) { numChannels: frameNumChannels, bytesPerSample: frameBytesPerSample, data: frameData, + isStereo: frameIsStereo, ); final eventJson = { diff --git a/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayervideoframeobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayervideoframeobserver_testcases.generated.dart index ad45b529b..3e068be1b 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayervideoframeobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayervideoframeobserver_testcases.generated.dart @@ -43,7 +43,44 @@ void generatedTestCases(ValueGetter irisTester) { { VideoPixelFormat frameType = VideoPixelFormat.videoPixelDefault; + AlphaStitchMode frameAlphaStitchMode = AlphaStitchMode.noAlphaStitch; VideoFrameMetaInfo? frameMetaInfo = null; + int hdr10MetadataInfoRedPrimaryX = 5; + int hdr10MetadataInfoRedPrimaryY = 5; + int hdr10MetadataInfoGreenPrimaryX = 5; + int hdr10MetadataInfoGreenPrimaryY = 5; + int hdr10MetadataInfoBluePrimaryX = 5; + int hdr10MetadataInfoBluePrimaryY = 5; + int hdr10MetadataInfoWhitePointX = 5; + int hdr10MetadataInfoWhitePointY = 5; + int hdr10MetadataInfoMaxMasteringLuminance = 5; + int hdr10MetadataInfoMinMasteringLuminance = 5; + int hdr10MetadataInfoMaxContentLightLevel = 5; + int hdr10MetadataInfoMaxFrameAverageLightLevel = 5; + Hdr10MetadataInfo frameHdr10MetadataInfo = Hdr10MetadataInfo( + redPrimaryX: hdr10MetadataInfoRedPrimaryX, + redPrimaryY: hdr10MetadataInfoRedPrimaryY, + greenPrimaryX: hdr10MetadataInfoGreenPrimaryX, + greenPrimaryY: hdr10MetadataInfoGreenPrimaryY, + bluePrimaryX: hdr10MetadataInfoBluePrimaryX, + bluePrimaryY: hdr10MetadataInfoBluePrimaryY, + whitePointX: hdr10MetadataInfoWhitePointX, + whitePointY: hdr10MetadataInfoWhitePointY, + maxMasteringLuminance: hdr10MetadataInfoMaxMasteringLuminance, + minMasteringLuminance: hdr10MetadataInfoMinMasteringLuminance, + maxContentLightLevel: hdr10MetadataInfoMaxContentLightLevel, + maxFrameAverageLightLevel: hdr10MetadataInfoMaxFrameAverageLightLevel, + ); + PrimaryID colorSpacePrimaries = PrimaryID.primaryidBt709; + TransferID colorSpaceTransfer = TransferID.transferidBt709; + MatrixID colorSpaceMatrix = MatrixID.matrixidRgb; + RangeID colorSpaceRange = RangeID.rangeidInvalid; + ColorSpace frameColorSpace = ColorSpace( + primaries: colorSpacePrimaries, + transfer: colorSpaceTransfer, + matrix: colorSpaceMatrix, + range: colorSpaceRange, + ); int frameWidth = 5; int frameHeight = 5; int frameYStride = 5; @@ -79,8 +116,11 @@ void generatedTestCases(ValueGetter irisTester) { textureId: frameTextureId, matrix: frameMatrix, alphaBuffer: frameAlphaBuffer, + alphaStitchMode: frameAlphaStitchMode, pixelBuffer: framePixelBuffer, metaInfo: frameMetaInfo, + hdr10MetadataInfo: frameHdr10MetadataInfo, + colorSpace: frameColorSpace, ); final eventJson = { diff --git a/test_shard/fake_test_app/integration_test/generated/mediarecorder_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediarecorder_fake_test.generated.dart index 2838e5898..fefb1b1c2 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediarecorder_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediarecorder_fake_test.generated.dart @@ -76,15 +76,28 @@ void mediaRecorderSmokeTestCases() { MediaRecorderContainerFormat.formatMp4; MediaRecorderStreamType configStreamType = MediaRecorderStreamType.streamTypeAudio; + VideoSourceType configVideoSourceType = + VideoSourceType.videoSourceCameraPrimary; String configStoragePath = "hello"; int configMaxDurationMs = 5; int configRecorderInfoUpdateInterval = 5; + int configWidth = 5; + int configHeight = 5; + int configFps = 5; + int configSampleRate = 5; + int configChannelNum = 5; MediaRecorderConfiguration config = MediaRecorderConfiguration( storagePath: configStoragePath, containerFormat: configContainerFormat, streamType: configStreamType, maxDurationMs: configMaxDurationMs, recorderInfoUpdateInterval: configRecorderInfoUpdateInterval, + width: configWidth, + height: configHeight, + fps: configFps, + sampleRate: configSampleRate, + channelNum: configChannelNum, + videoSourceType: configVideoSourceType, ); await mediaRecorder.startRecording( config, diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengine_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengine_fake_test.generated.dart index 11d76ef60..badb47733 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengine_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengine_fake_test.generated.dart @@ -340,6 +340,7 @@ void rtcEngineSmokeTestCases() { bool optionsIsInteractiveAudience = true; int optionsCustomVideoTrackId = 5; bool optionsIsAudioFilterable = true; + String optionsParameters = "hello"; ChannelMediaOptions options = ChannelMediaOptions( publishCameraTrack: optionsPublishCameraTrack, publishSecondaryCameraTrack: optionsPublishSecondaryCameraTrack, @@ -377,6 +378,7 @@ void rtcEngineSmokeTestCases() { isInteractiveAudience: optionsIsInteractiveAudience, customVideoTrackId: optionsCustomVideoTrackId, isAudioFilterable: optionsIsAudioFilterable, + parameters: optionsParameters, ); await rtcEngine.joinChannel( token: token, @@ -454,6 +456,7 @@ void rtcEngineSmokeTestCases() { bool optionsIsInteractiveAudience = true; int optionsCustomVideoTrackId = 5; bool optionsIsAudioFilterable = true; + String optionsParameters = "hello"; ChannelMediaOptions options = ChannelMediaOptions( publishCameraTrack: optionsPublishCameraTrack, publishSecondaryCameraTrack: optionsPublishSecondaryCameraTrack, @@ -491,6 +494,7 @@ void rtcEngineSmokeTestCases() { isInteractiveAudience: optionsIsInteractiveAudience, customVideoTrackId: optionsCustomVideoTrackId, isAudioFilterable: optionsIsAudioFilterable, + parameters: optionsParameters, ); await rtcEngine.updateChannelMediaOptions( options, @@ -1027,16 +1031,18 @@ void rtcEngineSmokeTestCases() { OrientationMode configOrientationMode = OrientationMode.orientationModeAdaptive; DegradationPreference configDegradationPreference = - DegradationPreference.maintainQuality; + DegradationPreference.maintainAuto; VideoMirrorModeType configMirrorMode = VideoMirrorModeType.videoMirrorModeAuto; EncodingPreference advanceOptionsEncodingPreference = EncodingPreference.preferAuto; CompressionPreference advanceOptionsCompressionPreference = - CompressionPreference.preferLowLatency; + CompressionPreference.preferCompressionAuto; + bool advanceOptionsEncodeAlpha = true; AdvanceOptions configAdvanceOptions = AdvanceOptions( encodingPreference: advanceOptionsEncodingPreference, compressionPreference: advanceOptionsCompressionPreference, + encodeAlpha: advanceOptionsEncodeAlpha, ); int configFrameRate = 5; int configBitrate = 5; @@ -1123,6 +1129,209 @@ void rtcEngineSmokeTestCases() { }, ); + testWidgets( + 'RtcEngine.setFaceShapeBeautyOptions', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + bool enabled = true; + FaceShapeBeautyStyle optionsShapeStyle = + FaceShapeBeautyStyle.faceShapeBeautyStyleFemale; + int optionsStyleIntensity = 5; + FaceShapeBeautyOptions options = FaceShapeBeautyOptions( + shapeStyle: optionsShapeStyle, + styleIntensity: optionsStyleIntensity, + ); + MediaSourceType type = MediaSourceType.audioPlayoutSource; + await rtcEngine.setFaceShapeBeautyOptions( + enabled: enabled, + options: options, + type: type, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[RtcEngine.setFaceShapeBeautyOptions] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + + testWidgets( + 'RtcEngine.setFaceShapeAreaOptions', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + FaceShapeArea optionsShapeArea = FaceShapeArea.faceShapeAreaNone; + int optionsShapeIntensity = 5; + FaceShapeAreaOptions options = FaceShapeAreaOptions( + shapeArea: optionsShapeArea, + shapeIntensity: optionsShapeIntensity, + ); + MediaSourceType type = MediaSourceType.audioPlayoutSource; + await rtcEngine.setFaceShapeAreaOptions( + options: options, + type: type, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[RtcEngine.setFaceShapeAreaOptions] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + + testWidgets( + 'RtcEngine.getFaceShapeBeautyOptions', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + MediaSourceType type = MediaSourceType.audioPlayoutSource; + await rtcEngine.getFaceShapeBeautyOptions( + type: type, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[RtcEngine.getFaceShapeBeautyOptions] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + + testWidgets( + 'RtcEngine.getFaceShapeAreaOptions', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + FaceShapeArea shapeArea = FaceShapeArea.faceShapeAreaNone; + MediaSourceType type = MediaSourceType.audioPlayoutSource; + await rtcEngine.getFaceShapeAreaOptions( + shapeArea: shapeArea, + type: type, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[RtcEngine.getFaceShapeAreaOptions] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + + testWidgets( + 'RtcEngine.setFilterEffectOptions', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + bool enabled = true; + String optionsPath = "hello"; + double optionsStrength = 5.0; + FilterEffectOptions options = FilterEffectOptions( + path: optionsPath, + strength: optionsStrength, + ); + MediaSourceType type = MediaSourceType.audioPlayoutSource; + await rtcEngine.setFilterEffectOptions( + enabled: enabled, + options: options, + type: type, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[RtcEngine.setFilterEffectOptions] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( 'RtcEngine.setLowlightEnhanceOptions', (WidgetTester tester) async { @@ -1765,41 +1974,6 @@ void rtcEngineSmokeTestCases() { }, ); - testWidgets( - 'RtcEngine.setDefaultMuteAllRemoteAudioStreams', - (WidgetTester tester) async { - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); - - try { - bool mute = true; - await rtcEngine.setDefaultMuteAllRemoteAudioStreams( - mute, - ); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint( - '[RtcEngine.setDefaultMuteAllRemoteAudioStreams] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await rtcEngine.release(); - }, - ); - testWidgets( 'RtcEngine.muteRemoteAudioStream', (WidgetTester tester) async { @@ -1940,41 +2114,6 @@ void rtcEngineSmokeTestCases() { }, ); - testWidgets( - 'RtcEngine.setDefaultMuteAllRemoteVideoStreams', - (WidgetTester tester) async { - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); - - try { - bool mute = true; - await rtcEngine.setDefaultMuteAllRemoteVideoStreams( - mute, - ); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint( - '[RtcEngine.setDefaultMuteAllRemoteVideoStreams] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await rtcEngine.release(); - }, - ); - testWidgets( 'RtcEngine.setRemoteDefaultVideoStreamType', (WidgetTester tester) async { @@ -4094,6 +4233,42 @@ void rtcEngineSmokeTestCases() { }, ); + testWidgets( + 'RtcEngine.enableVoiceAITuner', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + bool enabled = true; + VoiceAiTunerType type = VoiceAiTunerType.voiceAiTunerMatureMale; + await rtcEngine.enableVoiceAITuner( + enabled: enabled, + type: type, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[RtcEngine.enableVoiceAITuner] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( 'RtcEngine.setLogFile', (WidgetTester tester) async { @@ -4373,6 +4548,78 @@ void rtcEngineSmokeTestCases() { }, ); + testWidgets( + 'RtcEngine.setLocalRenderTargetFps', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary; + int targetFps = 5; + await rtcEngine.setLocalRenderTargetFps( + sourceType: sourceType, + targetFps: targetFps, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[RtcEngine.setLocalRenderTargetFps] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + + testWidgets( + 'RtcEngine.setRemoteRenderTargetFps', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + int targetFps = 5; + await rtcEngine.setRemoteRenderTargetFps( + targetFps, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[RtcEngine.setRemoteRenderTargetFps] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( 'RtcEngine.setLocalVideoMirrorMode', (WidgetTester tester) async { @@ -4485,13 +4732,50 @@ void rtcEngineSmokeTestCases() { kBitrate: streamConfigKBitrate, framerate: streamConfigFramerate, ); - await rtcEngine.setDualStreamMode( - mode: mode, - streamConfig: streamConfig, + await rtcEngine.setDualStreamMode( + mode: mode, + streamConfig: streamConfig, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[RtcEngine.setDualStreamMode] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + + testWidgets( + 'RtcEngine.setSimulcastConfig', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + List simulcastConfigConfigs = []; + SimulcastConfig simulcastConfig = SimulcastConfig( + configs: simulcastConfigConfigs, + ); + await rtcEngine.setSimulcastConfig( + simulcastConfig, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[RtcEngine.setDualStreamMode] error: ${e.toString()}'); + debugPrint('[RtcEngine.setSimulcastConfig] error: ${e.toString()}'); rethrow; } @@ -7163,6 +7447,41 @@ void rtcEngineSmokeTestCases() { }, ); + testWidgets( + 'RtcEngine.setExternalMediaProjection', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + int mediaProjection = 5; + await rtcEngine.setExternalMediaProjection( + mediaProjection, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[RtcEngine.setExternalMediaProjection] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( 'RtcEngine.setScreenCaptureScenario', (WidgetTester tester) async { @@ -7569,16 +7888,18 @@ void rtcEngineSmokeTestCases() { OrientationMode videoOutputConfigurationOrientationMode = OrientationMode.orientationModeAdaptive; DegradationPreference videoOutputConfigurationDegradationPreference = - DegradationPreference.maintainQuality; + DegradationPreference.maintainAuto; VideoMirrorModeType videoOutputConfigurationMirrorMode = VideoMirrorModeType.videoMirrorModeAuto; EncodingPreference advanceOptionsEncodingPreference = EncodingPreference.preferAuto; CompressionPreference advanceOptionsCompressionPreference = - CompressionPreference.preferLowLatency; + CompressionPreference.preferCompressionAuto; + bool advanceOptionsEncodeAlpha = true; AdvanceOptions videoOutputConfigurationAdvanceOptions = AdvanceOptions( encodingPreference: advanceOptionsEncodingPreference, compressionPreference: advanceOptionsCompressionPreference, + encodeAlpha: advanceOptionsEncodeAlpha, ); int videoOutputConfigurationFrameRate = 5; int videoOutputConfigurationBitrate = 5; @@ -7649,16 +7970,18 @@ void rtcEngineSmokeTestCases() { OrientationMode videoOutputConfigurationOrientationMode = OrientationMode.orientationModeAdaptive; DegradationPreference videoOutputConfigurationDegradationPreference = - DegradationPreference.maintainQuality; + DegradationPreference.maintainAuto; VideoMirrorModeType videoOutputConfigurationMirrorMode = VideoMirrorModeType.videoMirrorModeAuto; EncodingPreference advanceOptionsEncodingPreference = EncodingPreference.preferAuto; CompressionPreference advanceOptionsCompressionPreference = - CompressionPreference.preferLowLatency; + CompressionPreference.preferCompressionAuto; + bool advanceOptionsEncodeAlpha = true; AdvanceOptions videoOutputConfigurationAdvanceOptions = AdvanceOptions( encodingPreference: advanceOptionsEncodingPreference, compressionPreference: advanceOptionsCompressionPreference, + encodeAlpha: advanceOptionsEncodeAlpha, ); int videoOutputConfigurationFrameRate = 5; int videoOutputConfigurationBitrate = 5; @@ -7770,6 +8093,120 @@ void rtcEngineSmokeTestCases() { }, ); + testWidgets( + 'RtcEngine.startLocalAudioMixer', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + int configStreamCount = 5; + List configAudioInputStreams = []; + bool configSyncWithLocalMic = true; + LocalAudioMixerConfiguration config = LocalAudioMixerConfiguration( + streamCount: configStreamCount, + audioInputStreams: configAudioInputStreams, + syncWithLocalMic: configSyncWithLocalMic, + ); + await rtcEngine.startLocalAudioMixer( + config, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[RtcEngine.startLocalAudioMixer] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + + testWidgets( + 'RtcEngine.updateLocalAudioMixerConfiguration', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + int configStreamCount = 5; + List configAudioInputStreams = []; + bool configSyncWithLocalMic = true; + LocalAudioMixerConfiguration config = LocalAudioMixerConfiguration( + streamCount: configStreamCount, + audioInputStreams: configAudioInputStreams, + syncWithLocalMic: configSyncWithLocalMic, + ); + await rtcEngine.updateLocalAudioMixerConfiguration( + config, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[RtcEngine.updateLocalAudioMixerConfiguration] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + + testWidgets( + 'RtcEngine.stopLocalAudioMixer', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + await rtcEngine.stopLocalAudioMixer(); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[RtcEngine.stopLocalAudioMixer] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( 'RtcEngine.startCameraCapture', (WidgetTester tester) async { @@ -8156,12 +8593,12 @@ void rtcEngineSmokeTestCases() { int width, int height, int layoutCount, List layoutlist) {}, onAudioMetadataReceived: (RtcConnection connection, int uid, Uint8List metadata, int length) {}, - onExtensionEvent: - (String provider, String extension, String key, String value) {}, - onExtensionStarted: (String provider, String extension) {}, - onExtensionStopped: (String provider, String extension) {}, - onExtensionError: - (String provider, String extension, int error, String message) {}, + onExtensionEventWithContext: + (ExtensionContext context, String key, String value) {}, + onExtensionStartedWithContext: (ExtensionContext context) {}, + onExtensionStoppedWithContext: (ExtensionContext context) {}, + onExtensionErrorWithContext: + (ExtensionContext context, int error, String message) {}, onSetRtmFlagResult: (RtcConnection connection, int code) {}, ); rtcEngine.registerEventHandler( @@ -8373,12 +8810,12 @@ void rtcEngineSmokeTestCases() { int width, int height, int layoutCount, List layoutlist) {}, onAudioMetadataReceived: (RtcConnection connection, int uid, Uint8List metadata, int length) {}, - onExtensionEvent: - (String provider, String extension, String key, String value) {}, - onExtensionStarted: (String provider, String extension) {}, - onExtensionStopped: (String provider, String extension) {}, - onExtensionError: - (String provider, String extension, int error, String message) {}, + onExtensionEventWithContext: + (ExtensionContext context, String key, String value) {}, + onExtensionStartedWithContext: (ExtensionContext context) {}, + onExtensionStoppedWithContext: (ExtensionContext context) {}, + onExtensionErrorWithContext: + (ExtensionContext context, int error, String message) {}, onSetRtmFlagResult: (RtcConnection connection, int code) {}, ); rtcEngine.unregisterEventHandler( @@ -8438,74 +8875,6 @@ void rtcEngineSmokeTestCases() { }, ); - testWidgets( - 'RtcEngine.setEncryptionMode', - (WidgetTester tester) async { - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); - - try { - String encryptionMode = "hello"; - await rtcEngine.setEncryptionMode( - encryptionMode, - ); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint('[RtcEngine.setEncryptionMode] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await rtcEngine.release(); - }, - ); - - testWidgets( - 'RtcEngine.setEncryptionSecret', - (WidgetTester tester) async { - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); - - try { - String secret = "hello"; - await rtcEngine.setEncryptionSecret( - secret, - ); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint('[RtcEngine.setEncryptionSecret] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await rtcEngine.release(); - }, - ); - testWidgets( 'RtcEngine.enableEncryption', (WidgetTester tester) async { @@ -9124,6 +9493,7 @@ void rtcEngineSmokeTestCases() { bool optionsIsInteractiveAudience = true; int optionsCustomVideoTrackId = 5; bool optionsIsAudioFilterable = true; + String optionsParameters = "hello"; ChannelMediaOptions options = ChannelMediaOptions( publishCameraTrack: optionsPublishCameraTrack, publishSecondaryCameraTrack: optionsPublishSecondaryCameraTrack, @@ -9161,6 +9531,7 @@ void rtcEngineSmokeTestCases() { isInteractiveAudience: optionsIsInteractiveAudience, customVideoTrackId: optionsCustomVideoTrackId, isAudioFilterable: optionsIsAudioFilterable, + parameters: optionsParameters, ); await rtcEngine.joinChannelWithUserAccount( token: token, @@ -9242,6 +9613,7 @@ void rtcEngineSmokeTestCases() { bool optionsIsInteractiveAudience = true; int optionsCustomVideoTrackId = 5; bool optionsIsAudioFilterable = true; + String optionsParameters = "hello"; ChannelMediaOptions options = ChannelMediaOptions( publishCameraTrack: optionsPublishCameraTrack, publishSecondaryCameraTrack: optionsPublishSecondaryCameraTrack, @@ -9279,6 +9651,7 @@ void rtcEngineSmokeTestCases() { isInteractiveAudience: optionsIsInteractiveAudience, customVideoTrackId: optionsCustomVideoTrackId, isAudioFilterable: optionsIsAudioFilterable, + parameters: optionsParameters, ); await rtcEngine.joinChannelWithUserAccountEx( token: token, @@ -9527,16 +9900,18 @@ void rtcEngineSmokeTestCases() { OrientationMode configOrientationMode = OrientationMode.orientationModeAdaptive; DegradationPreference configDegradationPreference = - DegradationPreference.maintainQuality; + DegradationPreference.maintainAuto; VideoMirrorModeType configMirrorMode = VideoMirrorModeType.videoMirrorModeAuto; EncodingPreference advanceOptionsEncodingPreference = EncodingPreference.preferAuto; CompressionPreference advanceOptionsCompressionPreference = - CompressionPreference.preferLowLatency; + CompressionPreference.preferCompressionAuto; + bool advanceOptionsEncodeAlpha = true; AdvanceOptions configAdvanceOptions = AdvanceOptions( encodingPreference: advanceOptionsEncodingPreference, compressionPreference: advanceOptionsCompressionPreference, + encodeAlpha: advanceOptionsEncodeAlpha, ); int configFrameRate = 5; int configBitrate = 5; @@ -10434,6 +10809,40 @@ void rtcEngineSmokeTestCases() { }, ); + testWidgets( + 'RtcEngine.queryHDRCapability', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + VideoModuleType videoModule = VideoModuleType.videoModuleCapturer; + await rtcEngine.queryHDRCapability( + videoModule, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[RtcEngine.queryHDRCapability] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( 'RtcEngine.startScreenCaptureBySourceType', (WidgetTester tester) async { @@ -10833,11 +11242,13 @@ void rtcEngineSmokeTestCases() { await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); try { + String metadataChannelId = "hello"; int metadataUid = 5; int metadataSize = 5; Uint8List metadataBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); int metadataTimeStampMs = 5; Metadata metadata = Metadata( + channelId: metadataChannelId, uid: metadataUid, size: metadataSize, buffer: metadataBuffer, @@ -10970,4 +11381,47 @@ void rtcEngineSmokeTestCases() { await rtcEngine.release(); }, ); + + testWidgets( + 'RtcEngine.takeSnapshotWithConfig', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + int uid = 5; + VideoModulePosition configPosition = + VideoModulePosition.positionPostCapturer; + String configFilePath = "hello"; + SnapshotConfig config = SnapshotConfig( + filePath: configFilePath, + position: configPosition, + ); + await rtcEngine.takeSnapshotWithConfig( + uid: uid, + config: config, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[RtcEngine.takeSnapshotWithConfig] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); } diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengine_metadataobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengine_metadataobserver_testcases.generated.dart index c8fef3989..80273c574 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengine_metadataobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengine_metadataobserver_testcases.generated.dart @@ -42,11 +42,13 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { + String metadataChannelId = "hello"; int metadataUid = 5; int metadataSize = 5; Uint8List metadataBuffer = Uint8List.fromList([1, 1, 1, 1, 1]); int metadataTimeStampMs = 5; Metadata metadata = Metadata( + channelId: metadataChannelId, uid: metadataUid, size: metadataSize, buffer: metadataBuffer, diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengine_rtcengineeventhandler_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengine_rtcengineeventhandler_testcases.generated.dart index 62f3ccde1..4f18910ed 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengine_rtcengineeventhandler_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengine_rtcengineeventhandler_testcases.generated.dart @@ -2702,6 +2702,7 @@ void generatedTestCases(ValueGetter irisTester) { int statsTxPacketLossRate = 5; bool statsDualStreamEnabled = true; int statsHwEncoderAccelerating = 5; + List statsSimulcastDimensions = []; LocalVideoStats stats = LocalVideoStats( uid: statsUid, sentBitrate: statsSentBitrate, @@ -2726,6 +2727,7 @@ void generatedTestCases(ValueGetter irisTester) { captureBrightnessLevel: statsCaptureBrightnessLevel, dualStreamEnabled: statsDualStreamEnabled, hwEncoderAccelerating: statsHwEncoderAccelerating, + simulcastDimensions: statsSimulcastDimensions, ); final eventJson = { @@ -2802,6 +2804,7 @@ void generatedTestCases(ValueGetter irisTester) { int statsWidth = 5; int statsHeight = 5; int statsReceivedBitrate = 5; + int statsDecoderInputFrameRate = 5; int statsDecoderOutputFrameRate = 5; int statsRendererOutputFrameRate = 5; int statsFrameLossRate = 5; @@ -2820,6 +2823,7 @@ void generatedTestCases(ValueGetter irisTester) { width: statsWidth, height: statsHeight, receivedBitrate: statsReceivedBitrate, + decoderInputFrameRate: statsDecoderInputFrameRate, decoderOutputFrameRate: statsDecoderOutputFrameRate, rendererOutputFrameRate: statsRendererOutputFrameRate, frameLossRate: statsFrameLossRate, @@ -6534,7 +6538,7 @@ void generatedTestCases(ValueGetter irisTester) { ); testWidgets( - 'RtcEngineEventHandler.onExtensionEvent', + 'RtcEngineEventHandler.onExtensionEventWithContext', (WidgetTester tester) async { RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( @@ -6543,11 +6547,11 @@ void generatedTestCases(ValueGetter irisTester) { )); await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); - final onExtensionEventCompleter = Completer(); + final onExtensionEventWithContextCompleter = Completer(); final theRtcEngineEventHandler = RtcEngineEventHandler( - onExtensionEvent: - (String provider, String extension, String key, String value) { - onExtensionEventCompleter.complete(true); + onExtensionEventWithContext: + (ExtensionContext context, String key, String value) { + onExtensionEventWithContextCompleter.complete(true); }, ); @@ -6559,34 +6563,42 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - String provider = "hello"; - String extension = "hello"; + bool contextIsValid = true; + int contextUid = 5; + String contextProviderName = "hello"; + String contextExtensionName = "hello"; + ExtensionContext context = ExtensionContext( + isValid: contextIsValid, + uid: contextUid, + providerName: contextProviderName, + extensionName: contextExtensionName, + ); String key = "hello"; String value = "hello"; final eventJson = { - 'provider': provider, - 'extension': extension, + 'context': context.toJson(), 'key': key, 'value': value, }; - final eventIds = - eventIdsMapping['RtcEngineEventHandler_onExtensionEvent'] ?? []; + final eventIds = eventIdsMapping[ + 'RtcEngineEventHandler_onExtensionEventWithContext'] ?? + []; for (final event in eventIds) { final ret = irisTester().fireEvent(event, params: eventJson); // Delay 200 milliseconds to ensure the callback is called. await Future.delayed(const Duration(milliseconds: 200)); // TODO(littlegnal): Most of callbacks on web are not implemented, we're temporarily skip these callbacks at this time. if (kIsWeb && ret) { - if (!onExtensionEventCompleter.isCompleted) { - onExtensionEventCompleter.complete(true); + if (!onExtensionEventWithContextCompleter.isCompleted) { + onExtensionEventWithContextCompleter.complete(true); } } } } - final eventCalled = await onExtensionEventCompleter.future; + final eventCalled = await onExtensionEventWithContextCompleter.future; expect(eventCalled, isTrue); { @@ -6603,7 +6615,7 @@ void generatedTestCases(ValueGetter irisTester) { ); testWidgets( - 'RtcEngineEventHandler.onExtensionStarted', + 'RtcEngineEventHandler.onExtensionStartedWithContext', (WidgetTester tester) async { RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( @@ -6612,10 +6624,10 @@ void generatedTestCases(ValueGetter irisTester) { )); await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); - final onExtensionStartedCompleter = Completer(); + final onExtensionStartedWithContextCompleter = Completer(); final theRtcEngineEventHandler = RtcEngineEventHandler( - onExtensionStarted: (String provider, String extension) { - onExtensionStartedCompleter.complete(true); + onExtensionStartedWithContext: (ExtensionContext context) { + onExtensionStartedWithContextCompleter.complete(true); }, ); @@ -6627,30 +6639,38 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - String provider = "hello"; - String extension = "hello"; + bool contextIsValid = true; + int contextUid = 5; + String contextProviderName = "hello"; + String contextExtensionName = "hello"; + ExtensionContext context = ExtensionContext( + isValid: contextIsValid, + uid: contextUid, + providerName: contextProviderName, + extensionName: contextExtensionName, + ); final eventJson = { - 'provider': provider, - 'extension': extension, + 'context': context.toJson(), }; - final eventIds = - eventIdsMapping['RtcEngineEventHandler_onExtensionStarted'] ?? []; + final eventIds = eventIdsMapping[ + 'RtcEngineEventHandler_onExtensionStartedWithContext'] ?? + []; for (final event in eventIds) { final ret = irisTester().fireEvent(event, params: eventJson); // Delay 200 milliseconds to ensure the callback is called. await Future.delayed(const Duration(milliseconds: 200)); // TODO(littlegnal): Most of callbacks on web are not implemented, we're temporarily skip these callbacks at this time. if (kIsWeb && ret) { - if (!onExtensionStartedCompleter.isCompleted) { - onExtensionStartedCompleter.complete(true); + if (!onExtensionStartedWithContextCompleter.isCompleted) { + onExtensionStartedWithContextCompleter.complete(true); } } } } - final eventCalled = await onExtensionStartedCompleter.future; + final eventCalled = await onExtensionStartedWithContextCompleter.future; expect(eventCalled, isTrue); { @@ -6667,7 +6687,7 @@ void generatedTestCases(ValueGetter irisTester) { ); testWidgets( - 'RtcEngineEventHandler.onExtensionStopped', + 'RtcEngineEventHandler.onExtensionStoppedWithContext', (WidgetTester tester) async { RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( @@ -6676,10 +6696,10 @@ void generatedTestCases(ValueGetter irisTester) { )); await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); - final onExtensionStoppedCompleter = Completer(); + final onExtensionStoppedWithContextCompleter = Completer(); final theRtcEngineEventHandler = RtcEngineEventHandler( - onExtensionStopped: (String provider, String extension) { - onExtensionStoppedCompleter.complete(true); + onExtensionStoppedWithContext: (ExtensionContext context) { + onExtensionStoppedWithContextCompleter.complete(true); }, ); @@ -6691,30 +6711,38 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - String provider = "hello"; - String extension = "hello"; + bool contextIsValid = true; + int contextUid = 5; + String contextProviderName = "hello"; + String contextExtensionName = "hello"; + ExtensionContext context = ExtensionContext( + isValid: contextIsValid, + uid: contextUid, + providerName: contextProviderName, + extensionName: contextExtensionName, + ); final eventJson = { - 'provider': provider, - 'extension': extension, + 'context': context.toJson(), }; - final eventIds = - eventIdsMapping['RtcEngineEventHandler_onExtensionStopped'] ?? []; + final eventIds = eventIdsMapping[ + 'RtcEngineEventHandler_onExtensionStoppedWithContext'] ?? + []; for (final event in eventIds) { final ret = irisTester().fireEvent(event, params: eventJson); // Delay 200 milliseconds to ensure the callback is called. await Future.delayed(const Duration(milliseconds: 200)); // TODO(littlegnal): Most of callbacks on web are not implemented, we're temporarily skip these callbacks at this time. if (kIsWeb && ret) { - if (!onExtensionStoppedCompleter.isCompleted) { - onExtensionStoppedCompleter.complete(true); + if (!onExtensionStoppedWithContextCompleter.isCompleted) { + onExtensionStoppedWithContextCompleter.complete(true); } } } } - final eventCalled = await onExtensionStoppedCompleter.future; + final eventCalled = await onExtensionStoppedWithContextCompleter.future; expect(eventCalled, isTrue); { @@ -6731,7 +6759,7 @@ void generatedTestCases(ValueGetter irisTester) { ); testWidgets( - 'RtcEngineEventHandler.onExtensionError', + 'RtcEngineEventHandler.onExtensionErrorWithContext', (WidgetTester tester) async { RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( @@ -6740,11 +6768,11 @@ void generatedTestCases(ValueGetter irisTester) { )); await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); - final onExtensionErrorCompleter = Completer(); + final onExtensionErrorWithContextCompleter = Completer(); final theRtcEngineEventHandler = RtcEngineEventHandler( - onExtensionError: - (String provider, String extension, int error, String message) { - onExtensionErrorCompleter.complete(true); + onExtensionErrorWithContext: + (ExtensionContext context, int error, String message) { + onExtensionErrorWithContextCompleter.complete(true); }, ); @@ -6756,34 +6784,42 @@ void generatedTestCases(ValueGetter irisTester) { await Future.delayed(const Duration(milliseconds: 500)); { - String provider = "hello"; - String extension = "hello"; + bool contextIsValid = true; + int contextUid = 5; + String contextProviderName = "hello"; + String contextExtensionName = "hello"; + ExtensionContext context = ExtensionContext( + isValid: contextIsValid, + uid: contextUid, + providerName: contextProviderName, + extensionName: contextExtensionName, + ); int error = 5; String message = "hello"; final eventJson = { - 'provider': provider, - 'extension': extension, + 'context': context.toJson(), 'error': error, 'message': message, }; - final eventIds = - eventIdsMapping['RtcEngineEventHandler_onExtensionError'] ?? []; + final eventIds = eventIdsMapping[ + 'RtcEngineEventHandler_onExtensionErrorWithContext'] ?? + []; for (final event in eventIds) { final ret = irisTester().fireEvent(event, params: eventJson); // Delay 200 milliseconds to ensure the callback is called. await Future.delayed(const Duration(milliseconds: 200)); // TODO(littlegnal): Most of callbacks on web are not implemented, we're temporarily skip these callbacks at this time. if (kIsWeb && ret) { - if (!onExtensionErrorCompleter.isCompleted) { - onExtensionErrorCompleter.complete(true); + if (!onExtensionErrorWithContextCompleter.isCompleted) { + onExtensionErrorWithContextCompleter.complete(true); } } } } - final eventCalled = await onExtensionErrorCompleter.future; + final eventCalled = await onExtensionErrorWithContextCompleter.future; expect(eventCalled, isTrue); { diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengineex_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengineex_fake_test.generated.dart index 3edf082f9..cf29ceab7 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengineex_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengineex_fake_test.generated.dart @@ -72,6 +72,7 @@ void rtcEngineExSmokeTestCases() { bool optionsIsInteractiveAudience = true; int optionsCustomVideoTrackId = 5; bool optionsIsAudioFilterable = true; + String optionsParameters = "hello"; ChannelMediaOptions options = ChannelMediaOptions( publishCameraTrack: optionsPublishCameraTrack, publishSecondaryCameraTrack: optionsPublishSecondaryCameraTrack, @@ -109,6 +110,7 @@ void rtcEngineExSmokeTestCases() { isInteractiveAudience: optionsIsInteractiveAudience, customVideoTrackId: optionsCustomVideoTrackId, isAudioFilterable: optionsIsAudioFilterable, + parameters: optionsParameters, ); await rtcEngineEx.joinChannelEx( token: token, @@ -181,6 +183,53 @@ void rtcEngineExSmokeTestCases() { // skip: !(), ); + testWidgets( + 'RtcEngineEx.leaveChannelWithUserAccountEx', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngineEx rtcEngineEx = createAgoraRtcEngineEx(); + await rtcEngineEx.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); + + try { + String channelId = "hello"; + String userAccount = "hello"; + bool optionsStopAudioMixing = true; + bool optionsStopAllEffect = true; + bool optionsStopMicrophoneRecording = true; + LeaveChannelOptions options = LeaveChannelOptions( + stopAudioMixing: optionsStopAudioMixing, + stopAllEffect: optionsStopAllEffect, + stopMicrophoneRecording: optionsStopMicrophoneRecording, + ); + await rtcEngineEx.leaveChannelWithUserAccountEx( + channelId: channelId, + userAccount: userAccount, + options: options, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[RtcEngineEx.leaveChannelWithUserAccountEx] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngineEx.release(); + }, +// skip: !(), + ); + testWidgets( 'RtcEngineEx.updateChannelMediaOptionsEx', (WidgetTester tester) async { @@ -235,6 +284,7 @@ void rtcEngineExSmokeTestCases() { bool optionsIsInteractiveAudience = true; int optionsCustomVideoTrackId = 5; bool optionsIsAudioFilterable = true; + String optionsParameters = "hello"; ChannelMediaOptions options = ChannelMediaOptions( publishCameraTrack: optionsPublishCameraTrack, publishSecondaryCameraTrack: optionsPublishSecondaryCameraTrack, @@ -272,6 +322,7 @@ void rtcEngineExSmokeTestCases() { isInteractiveAudience: optionsIsInteractiveAudience, customVideoTrackId: optionsCustomVideoTrackId, isAudioFilterable: optionsIsAudioFilterable, + parameters: optionsParameters, ); String connectionChannelId = "hello"; int connectionLocalUid = 5; @@ -325,16 +376,18 @@ void rtcEngineExSmokeTestCases() { OrientationMode configOrientationMode = OrientationMode.orientationModeAdaptive; DegradationPreference configDegradationPreference = - DegradationPreference.maintainQuality; + DegradationPreference.maintainAuto; VideoMirrorModeType configMirrorMode = VideoMirrorModeType.videoMirrorModeAuto; EncodingPreference advanceOptionsEncodingPreference = EncodingPreference.preferAuto; CompressionPreference advanceOptionsCompressionPreference = - CompressionPreference.preferLowLatency; + CompressionPreference.preferCompressionAuto; + bool advanceOptionsEncodeAlpha = true; AdvanceOptions configAdvanceOptions = AdvanceOptions( encodingPreference: advanceOptionsEncodingPreference, compressionPreference: advanceOptionsCompressionPreference, + encodeAlpha: advanceOptionsEncodeAlpha, ); int configFrameRate = 5; int configBitrate = 5; @@ -2106,6 +2159,52 @@ void rtcEngineExSmokeTestCases() { // skip: !(), ); + testWidgets( + 'RtcEngineEx.setSimulcastConfigEx', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngineEx rtcEngineEx = createAgoraRtcEngineEx(); + await rtcEngineEx.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); + + try { + List simulcastConfigConfigs = []; + SimulcastConfig simulcastConfig = SimulcastConfig( + configs: simulcastConfigConfigs, + ); + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( + channelId: connectionChannelId, + localUid: connectionLocalUid, + ); + await rtcEngineEx.setSimulcastConfigEx( + simulcastConfig: simulcastConfig, + connection: connection, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[RtcEngineEx.setSimulcastConfigEx] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngineEx.release(); + }, +// skip: !(), + ); + testWidgets( 'RtcEngineEx.takeSnapshotEx', (WidgetTester tester) async { @@ -2371,4 +2470,55 @@ void rtcEngineExSmokeTestCases() { }, // skip: !(), ); + + testWidgets( + 'RtcEngineEx.takeSnapshotWithConfigEx', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngineEx rtcEngineEx = createAgoraRtcEngineEx(); + await rtcEngineEx.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); + + try { + String connectionChannelId = "hello"; + int connectionLocalUid = 5; + RtcConnection connection = RtcConnection( + channelId: connectionChannelId, + localUid: connectionLocalUid, + ); + int uid = 5; + VideoModulePosition configPosition = + VideoModulePosition.positionPostCapturer; + String configFilePath = "hello"; + SnapshotConfig config = SnapshotConfig( + filePath: configFilePath, + position: configPosition, + ); + await rtcEngineEx.takeSnapshotWithConfigEx( + connection: connection, + uid: uid, + config: config, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[RtcEngineEx.takeSnapshotWithConfigEx] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngineEx.release(); + }, +// skip: !(), + ); } diff --git a/test_shard/integration_test_app/android/build.gradle b/test_shard/integration_test_app/android/build.gradle index db6122536..f0df1b78b 100644 --- a/test_shard/integration_test_app/android/build.gradle +++ b/test_shard/integration_test_app/android/build.gradle @@ -1,6 +1,6 @@ buildscript { // Min kotlin version for Flutter SDK 3.24 - ext.kotlin_version = '1.7.0' + ext.kotlin_version = '1.9.10' repositories { google() mavenCentral() diff --git a/test_shard/integration_test_app/ios/Flutter/AppFrameworkInfo.plist b/test_shard/integration_test_app/ios/Flutter/AppFrameworkInfo.plist index 9625e105d..7c5696400 100644 --- a/test_shard/integration_test_app/ios/Flutter/AppFrameworkInfo.plist +++ b/test_shard/integration_test_app/ios/Flutter/AppFrameworkInfo.plist @@ -21,6 +21,6 @@ CFBundleVersion 1.0 MinimumOSVersion - 11.0 + 12.0 diff --git a/test_shard/integration_test_app/ios/Podfile b/test_shard/integration_test_app/ios/Podfile index 80f20ebef..f1e36b1b5 100644 --- a/test_shard/integration_test_app/ios/Podfile +++ b/test_shard/integration_test_app/ios/Podfile @@ -1,5 +1,5 @@ # Uncomment this line to define a global platform for your project -# platform :ios, '11.0' +# platform :ios, '12.0' # CocoaPods analytics sends network stats synchronously affecting flutter build latency. ENV['COCOAPODS_DISABLE_STATS'] = 'true' diff --git a/test_shard/integration_test_app/ios/Runner.xcodeproj/project.pbxproj b/test_shard/integration_test_app/ios/Runner.xcodeproj/project.pbxproj index 192c86b22..62d395549 100644 --- a/test_shard/integration_test_app/ios/Runner.xcodeproj/project.pbxproj +++ b/test_shard/integration_test_app/ios/Runner.xcodeproj/project.pbxproj @@ -3,7 +3,7 @@ archiveVersion = 1; classes = { }; - objectVersion = 50; + objectVersion = 54; objects = { /* Begin PBXBuildFile section */ @@ -76,7 +76,6 @@ 1214EC7B36F09712D06E5572 /* Pods-Runner.release.xcconfig */, 82BEC1199950B363131A87D4 /* Pods-Runner.profile.xcconfig */, ); - name = Pods; path = Pods; sourceTree = ""; }; @@ -156,7 +155,7 @@ 97C146E61CF9000F007C117D /* Project object */ = { isa = PBXProject; attributes = { - LastUpgradeCheck = 1300; + LastUpgradeCheck = 1510; ORGANIZATIONNAME = ""; TargetAttributes = { 97C146ED1CF9000F007C117D = { @@ -239,10 +238,12 @@ }; 3B06AD1E1E4923F5004D2608 /* Thin Binary */ = { isa = PBXShellScriptBuildPhase; + alwaysOutOfDate = 1; buildActionMask = 2147483647; files = ( ); inputPaths = ( + "${TARGET_BUILD_DIR}/${INFOPLIST_PATH}", ); name = "Thin Binary"; outputPaths = ( @@ -253,6 +254,7 @@ }; 9740EEB61CF901F6004384FC /* Run Script */ = { isa = PBXShellScriptBuildPhase; + alwaysOutOfDate = 1; buildActionMask = 2147483647; files = ( ); @@ -340,7 +342,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 11.0; + IPHONEOS_DEPLOYMENT_TARGET = 12.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SUPPORTED_PLATFORMS = iphoneos; @@ -418,7 +420,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 11.0; + IPHONEOS_DEPLOYMENT_TARGET = 12.0; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; @@ -467,7 +469,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 11.0; + IPHONEOS_DEPLOYMENT_TARGET = 12.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SUPPORTED_PLATFORMS = iphoneos; diff --git a/test_shard/integration_test_app/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme b/test_shard/integration_test_app/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme index c87d15a33..5e31d3d34 100644 --- a/test_shard/integration_test_app/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme +++ b/test_shard/integration_test_app/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme @@ -1,6 +1,6 @@ + CADisableMinimumFrameDurationOnPhone + CFBundleDevelopmentRegion $(DEVELOPMENT_LANGUAGE) CFBundleDisplayName @@ -24,6 +26,12 @@ $(FLUTTER_BUILD_NUMBER) LSRequiresIPhoneOS + NSCameraUsageDescription + CAMERA + NSMicrophoneUsageDescription + MIC + UIApplicationSupportsIndirectInputEvents + UILaunchStoryboardName LaunchScreen UIMainStoryboardFile @@ -43,7 +51,5 @@ UIViewControllerBasedStatusBarAppearance - CADisableMinimumFrameDurationOnPhone - diff --git a/test_shard/iris_tester/android/build.gradle b/test_shard/iris_tester/android/build.gradle index 5c7a1c019..9ed52b4e5 100644 --- a/test_shard/iris_tester/android/build.gradle +++ b/test_shard/iris_tester/android/build.gradle @@ -3,7 +3,7 @@ version '1.0-SNAPSHOT' buildscript { // Min kotlin version for Flutter SDK 3.24 - ext.kotlin_version = '1.7.0' + ext.kotlin_version = '1.9.10' repositories { google() mavenCentral() diff --git a/test_shard/iris_tester/example/android/build.gradle b/test_shard/iris_tester/example/android/build.gradle index dcaf1f21f..21a9f2476 100644 --- a/test_shard/iris_tester/example/android/build.gradle +++ b/test_shard/iris_tester/example/android/build.gradle @@ -1,6 +1,6 @@ buildscript { // Min kotlin version for Flutter SDK 3.24 - ext.kotlin_version = '1.7.0' + ext.kotlin_version = '1.9.10' repositories { google() mavenCentral() diff --git a/test_shard/rendering_test/android/build.gradle b/test_shard/rendering_test/android/build.gradle index db6122536..f0df1b78b 100644 --- a/test_shard/rendering_test/android/build.gradle +++ b/test_shard/rendering_test/android/build.gradle @@ -1,6 +1,6 @@ buildscript { // Min kotlin version for Flutter SDK 3.24 - ext.kotlin_version = '1.7.0' + ext.kotlin_version = '1.9.10' repositories { google() mavenCentral() diff --git a/tool/terra/package.json b/tool/terra/package.json index 05d896a19..0c03a9ba3 100644 --- a/tool/terra/package.json +++ b/tool/terra/package.json @@ -21,5 +21,5 @@ "ts-node": "^10.9.1", "typescript": "^5.1.6" }, - "packageManager": "yarn@4.3.0" + "packageManager": "yarn@4.5.1" } diff --git a/tool/terra/terra_config_main.yaml b/tool/terra/terra_config_main.yaml index b34148281..0e1f8b7f8 100644 --- a/tool/terra/terra_config_main.yaml +++ b/tool/terra/terra_config_main.yaml @@ -3,17 +3,18 @@ parsers: package: '@agoraio-extensions/cxx-parser' args: includeHeaderDirs: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.5.0/include' parseFiles: include: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include/*.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.5.0/include/*.h' exclude: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include/AgoraRefPtr.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include/time_utils.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include/AgoraOptional.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include/AgoraRefPtr.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include/IAgoraMediaComponentFactory.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include/IAgoraParameter.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.5.0/include/AgoraRefPtr.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.5.0/include/time_utils.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.5.0/include/AgoraOptional.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.5.0/include/AgoraRefPtr.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.5.0/include/IAgoraMediaComponentFactory.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.5.0/include/IAgoraParameter.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.5.0/include/rte*.h' - name: IrisApiIdParser package: '@agoraio-extensions/terra_shared_configs' @@ -23,14 +24,15 @@ parsers: args: customHeaderFileNamePrefix: 'Custom' includeHeaderDirs: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.5.0/include' parseFiles: include: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include/*.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.5.0/include/*.h' - '@agoraio-extensions/terra_shared_configs:headers/custom_headers/*.h' exclude: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include/time_utils.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.2/include/IAgoraMediaComponentFactory.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.5.0/include/time_utils.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.5.0/include/IAgoraMediaComponentFactory.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.5.0/include/rte*.h' - path: parsers/cud_node_parser.ts args: diff --git a/windows/CMakeLists.txt b/windows/CMakeLists.txt index 3c0732e30..a9bc1abe8 100644 --- a/windows/CMakeLists.txt +++ b/windows/CMakeLists.txt @@ -12,8 +12,8 @@ project(${PROJECT_NAME} LANGUAGES CXX) # not be changed set(PLUGIN_NAME "agora_rtc_engine_plugin") -set(IRIS_SDK_DOWNLOAD_URL "https://download.agora.io/sdk/release/iris_4.3.2-build.1_DCG_Windows_Video_20240604_0456_441.zip") -set(IRIS_SDK_DOWNLOAD_NAME "iris_4.3.2-build.1_DCG_Windows") +set(IRIS_SDK_DOWNLOAD_URL "https://download.agora.io/sdk/release/iris_4.5.0-build.1_DCG_Windows_Video_20241203_0322_577.zip") +set(IRIS_SDK_DOWNLOAD_NAME "iris_4.5.0-build.1_DCG_Windows") set(RTC_SDK_DOWNLOAD_NAME "Agora_Native_SDK_for_Windows_FULL") set(IRIS_SDK_VERSION "v3_6_2_fix.1")