diff --git a/dita/RTC-NG/API/api_rte_destroy.dita b/dita/RTC-NG/API/api_rte_destroy.dita
new file mode 100644
index 00000000000..47cf09e782e
--- /dev/null
+++ b/dita/RTC-NG/API/api_rte_destroy.dita
@@ -0,0 +1,58 @@
+
+
+
+
+ 销毁 RTE 对象。
+
+
+
+
+
+
+
+
+
+
+ public synchronized void destroy() throws RteException;
+ - (BOOL)destroy:(AgoraRteError * _Nullable)error;
+ bool Destroy(Error *err = nullptr){
+ return RteDestroy(&c_rte, err != nullptr ? err->get_underlying_impl() : nullptr);
+ }
+
+
+
+
+
+
+
+
+
+ - 自从
+ - v4.5.0
+
+
+ 该方法释放 RTE 对象使用的所有资源。
+
+
+
+
+
+ 方法成功调用时,无返回值;方法调用失败时,会抛出 异常,你需要捕获异常并进行处理。详见了解详情和解决建议。
+ 销毁 RTE 对象是否成功:
+
+
+
+
diff --git a/dita/RTC-NG/API/class_videoframe.dita b/dita/RTC-NG/API/class_videoframe.dita
index 4d67fedea84..41b754df902 100644
--- a/dita/RTC-NG/API/class_videoframe.dita
+++ b/dita/RTC-NG/API/class_videoframe.dita
@@ -7,45 +7,45 @@
public class VideoFrame implements RefCounted {
-
+
public interface Buffer extends RefCounted {
-
+
@CalledByNative("Buffer") int getWidth();
-
+
@CalledByNative("Buffer") int getHeight();
-
+
@CalledByNative("Buffer") I420Buffer toI420();
-
+
@Override @CalledByNative("Buffer") void release();
-
+
@Override @CalledByNative("Buffer") void retain();
-
+
@CalledByNative("Buffer")
Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight);
-
+
@CalledByNative("Buffer") @Nullable Buffer mirror(int frameRotation);
-
+
@CalledByNative("Buffer") @Nullable Buffer rotate(int frameRotation);
-
+
@CalledByNative("Buffer")
@Nullable
Buffer transform(int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth,
int scaleHeight, int frameRotation);
}
-
+
public interface I420Buffer extends Buffer {
-
+
@CalledByNative("I420Buffer") ByteBuffer getDataY();
-
+
@CalledByNative("I420Buffer") ByteBuffer getDataU();
-
+
@CalledByNative("I420Buffer") ByteBuffer getDataV();
@CalledByNative("I420Buffer") int getStrideY();
@CalledByNative("I420Buffer") int getStrideU();
@CalledByNative("I420Buffer") int getStrideV();
}
-
+
public interface I422Buffer extends Buffer {
@CalledByNative("I422Buffer") ByteBuffer getDataY();
@CalledByNative("I422Buffer") ByteBuffer getDataU();
@@ -55,13 +55,13 @@
@CalledByNative("I422Buffer") int getStrideV();
}
public interface RgbaBuffer extends Buffer { @CalledByNative("RgbaBuffer") ByteBuffer getData(); }
-
+
public interface TextureBuffer extends Buffer {
-
+
enum Type {
-
+
OES(GLES11Ext.GL_TEXTURE_EXTERNAL_OES),
-
+
RGB(GLES20.GL_TEXTURE_2D);
private final int glTarget;
private Type(final int glTarget) {
@@ -76,17 +76,17 @@
EGL_CONTEXT_14;
}
Type getType();
-
+
@CalledByNative("TextureBuffer") int getTextureId();
-
+
Matrix getTransformMatrix();
-
+
@CalledByNative("TextureBuffer") EglBase.Context getEglBaseContext();
@CalledByNative("TextureBuffer") Object getSourceTexturePool();
@CalledByNative("TextureBuffer") long getNativeEglContext();
@CalledByNative("TextureBuffer") int getEglContextType();
@CalledByNative("TextureBuffer") float[] getTransformMatrixArray();
-
+
@CalledByNative("TextureBuffer") int getSequence();
@CalledByNative("TextureBuffer") long getFenceObject();
@CalledByNative("TextureBuffer") boolean is10BitTexture();
@@ -198,22 +198,22 @@
return stitchMode;
}
}
-
+
private Buffer buffer;
-
+
private int rotation;
-
+
private long timestampNs;
private ColorSpace colorSpace;
private SourceType sourceType;
private float sampleAspectRatio;
-
+
private AlphaStitchMode alphaStitchMode = AlphaStitchMode.ALPHA_NO_STITCH;
private VideoFrameMetaInfo metaInfo = new VideoFrameMetaInfo();
-
+
private @Nullable ByteBuffer alphaBuffer;
private long nativeAlphaBuffer;
-
+
public VideoFrame(Buffer buffer, int rotation, long timestampNs) {
this(buffer, rotation, timestampNs, new WrappedNativeColorSpace(), null, 0L, 1.0f,
SourceType.kUnspecified.ordinal());
@@ -243,12 +243,12 @@
public float getSampleAspectRatio() {
return sampleAspectRatio;
}
-
+
@CalledByNative
public Buffer getBuffer() {
return buffer;
}
-
+
@CalledByNative
public int getRotation() {
return rotation;
@@ -261,7 +261,7 @@
public void setAlphaStitchMode(int stitchMode) {
alphaStitchMode = AlphaStitchMode.values()[stitchMode];
}
-
+
@CalledByNative
public long getTimestampNs() {
return timestampNs;
@@ -270,7 +270,7 @@
public VideoFrameMetaInfo getMetaInfo() {
return metaInfo;
}
-
+
public int getRotatedWidth() {
if (rotation % 180 == 0) {
return (alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_LEFT
@@ -283,7 +283,7 @@
? buffer.getHeight() / 2
: buffer.getHeight();
}
-
+
public int getRotatedHeight() {
if (rotation % 180 == 0) {
return (alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_UP
@@ -296,7 +296,7 @@
? buffer.getWidth() / 2
: buffer.getWidth();
}
-
+
public void replaceBuffer(Buffer buffer, int rotation, long timestampNs) {
release();
this.buffer = buffer;
@@ -351,12 +351,12 @@
public void fillAlphaData(ByteBuffer buffer) {
alphaBuffer = buffer;
}
-
+
@Override
public void retain() {
buffer.retain();
}
-
+
@Override
@CalledByNative
public void release() {
@@ -431,51 +431,51 @@ struct VideoFrame {
metaInfo(NULL){
memset(matrix, 0, sizeof(matrix));
}
-
+
VIDEO_PIXEL_FORMAT type;
-
+
int width;
-
+
int height;
-
+
int yStride;
-
+
int uStride;
-
+
int vStride;
-
+
uint8_t* yBuffer;
-
+
uint8_t* uBuffer;
-
+
uint8_t* vBuffer;
-
+
int rotation;
-
+
int64_t renderTimeMs;
-
+
int avsync_type;
-
+
uint8_t* metadata_buffer;
-
+
int metadata_size;
-
+
void* sharedContext;
-
+
int textureId;
-
+
void* d3d11Texture2d;
-
+
float matrix[16];
-
+
uint8_t* alphaBuffer;
-
+
ALPHA_STITCH_MODE alphaStitchMode;
-
+
void* pixelBuffer;
-
+
IVideoFrameMetaInfo* metaInfo;
-
+
ColorSpace colorSpace;
};
USTRUCT(BlueprintType)
@@ -752,19 +752,6 @@ class VideoFrame {
缓冲区给出的是指向指针的指针,该接口不能修改缓冲区的指针,只能修改缓冲区的内容。
-
- 方法
-
-
- getColorSpace
- 获取视频帧的色彩空间属性。
-
-
- setColorSpace
- 设置视频帧的色彩空间属性。
-
-
-
@@ -891,7 +878,14 @@ class VideoFrame {
colorSpace
- 视频帧的色彩空间属性,默认情况下会应用 Full Range 和 BT.709 标准配置。你可以根据自定义采集、自定义渲染的业务需求进行自定义设置,详见 。
+ 视频帧的色彩空间属性,默认情况下会应用 Full Range 和 BT.709 标准配置。你可以根据自定义采集、自定义渲染的业务需求进行自定义设置,详见 。
+ 视频帧的色彩空间属性,默认情况下会应用 Full Range 和 BT.709 标准配置。你可以根据自定义采集、自定义渲染的业务需求进行自定义设置,详见 。与该参数相关的方法如下所示:
+
+ - getColorSpace:获取视频帧的色彩空间属性。
+ - setColorSpace:设置视频帧的色彩空间属性。
+
sourceType
diff --git a/dita/RTC-NG/RTC_NG_API_Android.ditamap b/dita/RTC-NG/RTC_NG_API_Android.ditamap
index 7bb07b2d790..96b00956087 100644
--- a/dita/RTC-NG/RTC_NG_API_Android.ditamap
+++ b/dita/RTC-NG/RTC_NG_API_Android.ditamap
@@ -469,6 +469,7 @@
+
diff --git a/dita/RTC-NG/RTC_NG_API_CPP.ditamap b/dita/RTC-NG/RTC_NG_API_CPP.ditamap
index 44343521f9e..fbeb9c2d459 100644
--- a/dita/RTC-NG/RTC_NG_API_CPP.ditamap
+++ b/dita/RTC-NG/RTC_NG_API_CPP.ditamap
@@ -469,6 +469,7 @@
+
diff --git a/dita/RTC-NG/RTC_NG_API_iOS.ditamap b/dita/RTC-NG/RTC_NG_API_iOS.ditamap
index 8a4c15f005e..4b5ccef98a7 100644
--- a/dita/RTC-NG/RTC_NG_API_iOS.ditamap
+++ b/dita/RTC-NG/RTC_NG_API_iOS.ditamap
@@ -467,6 +467,7 @@
+
diff --git a/dita/RTC-NG/RTC_NG_API_macOS.ditamap b/dita/RTC-NG/RTC_NG_API_macOS.ditamap
index 684718635ac..9f2737cca53 100644
--- a/dita/RTC-NG/RTC_NG_API_macOS.ditamap
+++ b/dita/RTC-NG/RTC_NG_API_macOS.ditamap
@@ -459,6 +459,7 @@
+
diff --git a/dita/RTC-NG/config/keys-rtc-ng-api-cpp.ditamap b/dita/RTC-NG/config/keys-rtc-ng-api-cpp.ditamap
index 13f50fb975d..8be293ba31c 100644
--- a/dita/RTC-NG/config/keys-rtc-ng-api-cpp.ditamap
+++ b/dita/RTC-NG/config/keys-rtc-ng-api-cpp.ditamap
@@ -1008,6 +1008,13 @@
+
+
+
+ Destroy
+
+
+
diff --git a/dita/RTC-NG/config/keys-rtc-ng-api-ios.ditamap b/dita/RTC-NG/config/keys-rtc-ng-api-ios.ditamap
index 5c4990327cf..f8a329a2ddb 100644
--- a/dita/RTC-NG/config/keys-rtc-ng-api-ios.ditamap
+++ b/dita/RTC-NG/config/keys-rtc-ng-api-ios.ditamap
@@ -905,6 +905,13 @@
+
+
+
+ destroy:
+
+
+
diff --git a/dita/RTC-NG/config/keys-rtc-ng-api-java.ditamap b/dita/RTC-NG/config/keys-rtc-ng-api-java.ditamap
index 00946ab6624..baf188eaf26 100644
--- a/dita/RTC-NG/config/keys-rtc-ng-api-java.ditamap
+++ b/dita/RTC-NG/config/keys-rtc-ng-api-java.ditamap
@@ -945,6 +945,13 @@
+
+
+
+ destroy
+
+
+
diff --git a/dita/RTC-NG/config/keys-rtc-ng-api-macos.ditamap b/dita/RTC-NG/config/keys-rtc-ng-api-macos.ditamap
index 11e23b989a4..8029e0f5244 100644
--- a/dita/RTC-NG/config/keys-rtc-ng-api-macos.ditamap
+++ b/dita/RTC-NG/config/keys-rtc-ng-api-macos.ditamap
@@ -862,6 +862,13 @@
+
+
+
+ destroy:
+
+
+
diff --git a/dita/RTC-NG/config/relations-rtc-ng-api.ditamap b/dita/RTC-NG/config/relations-rtc-ng-api.ditamap
index 5fb4608f5ee..d4207a2265c 100644
--- a/dita/RTC-NG/config/relations-rtc-ng-api.ditamap
+++ b/dita/RTC-NG/config/relations-rtc-ng-api.ditamap
@@ -3312,6 +3312,7 @@
+
diff --git a/dita/RTC-NG/release/release_notes.dita b/dita/RTC-NG/release/release_notes.dita
index 6f29cabb064..fe93f018b77 100644
--- a/dita/RTC-NG/release/release_notes.dita
+++ b/dita/RTC-NG/release/release_notes.dita
@@ -67,7 +67,8 @@
自该版本起,SDK 支持自动安装虚拟声卡功能,当你第一次调用 时,SDK 会自动安装内置的声网自研虚拟声卡 AgoraALD。安装成功后,音频路由会自动切换为虚拟声卡,同时使用虚拟声卡进行采集。
强效视频降噪实现变更
- 该版本调整了强效视频降噪的实现方式,在 中删除了 ,改为调用 启用视频降噪后,调用 方法启用美颜磨皮功能,二者搭配使用以获得更好的视频降噪效果。实现强效降噪时,推荐磨皮参数设置详见 。
+ 该版本调整了强效视频降噪的实现方式,在 中删除了 ,改为调用 启用视频降噪后,调用 方法启用美颜磨皮功能,二者搭配使用以获得更好的视频降噪效果。实现强效降噪时,推荐磨皮参数设置详见 。
+ 该版本调整了强效视频降噪的实现方式,删除了 ,改为调用 启用视频降噪后,调用 方法启用美颜磨皮功能,二者搭配使用以获得更好的视频降噪效果。实现强效降噪时,推荐磨皮参数设置详见 。
此外,受该调整影响,实现画质优先的暗光增强效果时,需要先开启视频降噪并采用特定设置,详见 。
摄像头插拔状态变更 (macOS, Windows)
@@ -118,7 +119,7 @@
本地音频合流
该版本新增本地音频合流功能,你可以调用 方法,将本地麦克风采集的音频流、媒体播放器中的音频流、声卡采集的音频流、远端音频流等合并为一路音频流,你可以将合并后的音频流发布到频道中。不再需要音频合流时,你可以调用 方法停止本地音频合流;在合流过程中,可以调用 方法更新正在合流的音频流配置。
- 该功能的使用场景示例如下:
+
该功能的使用场景示例如下:
- 结合本地合图功能一起使用,可将合图视频流相关的音频流同步采集和发布。
- 在直播场景下,用户接收频道内的音频流,在本地进行多路音频流合流后转发到其他频道。
@@ -133,8 +134,7 @@
该版本新增了 方法,用于设置远端视频流渲染的 EGL 环境上下文。使用 Texture 格式的视频数据进行远端视频自渲染时,你可以通过该方法替换 SDK 默认的远端 EGL 环境上下文,实现统一的 EGL 上下文管理。
色彩空间设置
- 该版本在 中新增了 和 ,你可以通过 获取视频帧的色彩空间属性,并通过 对其进行自定义设置。默认情况下,色彩空间采用 Full Range 和 BT.709 标准配置,开发者可以根据自采集或自渲染的需求灵活调整,进一步提升视频处理的定制化能力。
- 该版本在 和 中新增了 colorSpace,你可以通过该参数设置视频帧的色彩空间属性。默认情况下,色彩空间采用 Full Range 和 BT.709 标准配置,你可以根据自采集或自渲染的需求灵活调整,进一步提升视频处理的定制化能力。
+ 该版本在 和 中新增了 colorSpace,你可以通过该参数设置视频帧的色彩空间属性。默认情况下,色彩空间采用 Full Range 和 BT.709 标准配置,你可以根据自采集或自渲染的需求灵活调整,进一步提升视频处理的定制化能力。
其他
diff --git a/en-US/dita/RTC-NG/API/api_rte_destroy.dita b/en-US/dita/RTC-NG/API/api_rte_destroy.dita
new file mode 100644
index 00000000000..32a0e83d9e5
--- /dev/null
+++ b/en-US/dita/RTC-NG/API/api_rte_destroy.dita
@@ -0,0 +1,58 @@
+
+
+
+
+ Destroys an RTE object.
+
+
+
+
+
+
+
+
+
+
+ public synchronized void destroy() throws RteException;
+ - (BOOL)destroy:(AgoraRteError * _Nullable)error;
+ bool Destroy(Error *err = nullptr){
+ return RteDestroy(&c_rte, err != nullptr ? err->get_underlying_impl() : nullptr);
+ }
+
+
+
+
+
+
+
+
+
+ - Since
+ - v4.5.0
+
+
+ This method releases all resources used by the RTE object.
+
+
+
+
+
+ When the method call succeeds, there is no return value; when fails, the exception is thrown. You need to catch the exception and handle it accordingly. See for details and resolution suggestions.
+ Whether the RTE object is successfully destroyed:
+ - : The RTE object is destroyed.
+ - : The RTE object is not destroyed.
+
+
+
+
+
diff --git a/en-US/dita/RTC-NG/API/class_videoframe.dita b/en-US/dita/RTC-NG/API/class_videoframe.dita
index 9a1bb22e2e1..675475f9c7b 100644
--- a/en-US/dita/RTC-NG/API/class_videoframe.dita
+++ b/en-US/dita/RTC-NG/API/class_videoframe.dita
@@ -7,45 +7,45 @@
public class VideoFrame implements RefCounted {
-
+
public interface Buffer extends RefCounted {
-
+
@CalledByNative("Buffer") int getWidth();
-
+
@CalledByNative("Buffer") int getHeight();
-
+
@CalledByNative("Buffer") I420Buffer toI420();
-
+
@Override @CalledByNative("Buffer") void release();
-
+
@Override @CalledByNative("Buffer") void retain();
-
+
@CalledByNative("Buffer")
Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight);
-
+
@CalledByNative("Buffer") @Nullable Buffer mirror(int frameRotation);
-
+
@CalledByNative("Buffer") @Nullable Buffer rotate(int frameRotation);
-
+
@CalledByNative("Buffer")
@Nullable
Buffer transform(int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth,
int scaleHeight, int frameRotation);
}
-
+
public interface I420Buffer extends Buffer {
-
+
@CalledByNative("I420Buffer") ByteBuffer getDataY();
-
+
@CalledByNative("I420Buffer") ByteBuffer getDataU();
-
+
@CalledByNative("I420Buffer") ByteBuffer getDataV();
@CalledByNative("I420Buffer") int getStrideY();
@CalledByNative("I420Buffer") int getStrideU();
@CalledByNative("I420Buffer") int getStrideV();
}
-
+
public interface I422Buffer extends Buffer {
@CalledByNative("I422Buffer") ByteBuffer getDataY();
@CalledByNative("I422Buffer") ByteBuffer getDataU();
@@ -55,13 +55,13 @@
@CalledByNative("I422Buffer") int getStrideV();
}
public interface RgbaBuffer extends Buffer { @CalledByNative("RgbaBuffer") ByteBuffer getData(); }
-
+
public interface TextureBuffer extends Buffer {
-
+
enum Type {
-
+
OES(GLES11Ext.GL_TEXTURE_EXTERNAL_OES),
-
+
RGB(GLES20.GL_TEXTURE_2D);
private final int glTarget;
private Type(final int glTarget) {
@@ -76,17 +76,17 @@
EGL_CONTEXT_14;
}
Type getType();
-
+
@CalledByNative("TextureBuffer") int getTextureId();
-
+
Matrix getTransformMatrix();
-
+
@CalledByNative("TextureBuffer") EglBase.Context getEglBaseContext();
@CalledByNative("TextureBuffer") Object getSourceTexturePool();
@CalledByNative("TextureBuffer") long getNativeEglContext();
@CalledByNative("TextureBuffer") int getEglContextType();
@CalledByNative("TextureBuffer") float[] getTransformMatrixArray();
-
+
@CalledByNative("TextureBuffer") int getSequence();
@CalledByNative("TextureBuffer") long getFenceObject();
@CalledByNative("TextureBuffer") boolean is10BitTexture();
@@ -198,22 +198,22 @@
return stitchMode;
}
}
-
+
private Buffer buffer;
-
+
private int rotation;
-
+
private long timestampNs;
private ColorSpace colorSpace;
private SourceType sourceType;
private float sampleAspectRatio;
-
+
private AlphaStitchMode alphaStitchMode = AlphaStitchMode.ALPHA_NO_STITCH;
private VideoFrameMetaInfo metaInfo = new VideoFrameMetaInfo();
-
+
private @Nullable ByteBuffer alphaBuffer;
private long nativeAlphaBuffer;
-
+
public VideoFrame(Buffer buffer, int rotation, long timestampNs) {
this(buffer, rotation, timestampNs, new WrappedNativeColorSpace(), null, 0L, 1.0f,
SourceType.kUnspecified.ordinal());
@@ -243,12 +243,12 @@
public float getSampleAspectRatio() {
return sampleAspectRatio;
}
-
+
@CalledByNative
public Buffer getBuffer() {
return buffer;
}
-
+
@CalledByNative
public int getRotation() {
return rotation;
@@ -261,7 +261,7 @@
public void setAlphaStitchMode(int stitchMode) {
alphaStitchMode = AlphaStitchMode.values()[stitchMode];
}
-
+
@CalledByNative
public long getTimestampNs() {
return timestampNs;
@@ -270,7 +270,7 @@
public VideoFrameMetaInfo getMetaInfo() {
return metaInfo;
}
-
+
public int getRotatedWidth() {
if (rotation % 180 == 0) {
return (alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_LEFT
@@ -283,7 +283,7 @@
? buffer.getHeight() / 2
: buffer.getHeight();
}
-
+
public int getRotatedHeight() {
if (rotation % 180 == 0) {
return (alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_UP
@@ -296,7 +296,7 @@
? buffer.getWidth() / 2
: buffer.getWidth();
}
-
+
public void replaceBuffer(Buffer buffer, int rotation, long timestampNs) {
release();
this.buffer = buffer;
@@ -351,12 +351,12 @@
public void fillAlphaData(ByteBuffer buffer) {
alphaBuffer = buffer;
}
-
+
@Override
public void retain() {
buffer.retain();
}
-
+
@Override
@CalledByNative
public void release() {
@@ -431,51 +431,51 @@ struct VideoFrame {
metaInfo(NULL){
memset(matrix, 0, sizeof(matrix));
}
-
+
VIDEO_PIXEL_FORMAT type;
-
+
int width;
-
+
int height;
-
+
int yStride;
-
+
int uStride;
-
+
int vStride;
-
+
uint8_t* yBuffer;
-
+
uint8_t* uBuffer;
-
+
uint8_t* vBuffer;
-
+
int rotation;
-
+
int64_t renderTimeMs;
-
+
int avsync_type;
-
+
uint8_t* metadata_buffer;
-
+
int metadata_size;
-
+
void* sharedContext;
-
+
int textureId;
-
+
void* d3d11Texture2d;
-
+
float matrix[16];
-
+
uint8_t* alphaBuffer;
-
+
ALPHA_STITCH_MODE alphaStitchMode;
-
+
void* pixelBuffer;
-
+
IVideoFrameMetaInfo* metaInfo;
-
+
ColorSpace colorSpace;
};
USTRUCT(BlueprintType)
@@ -742,19 +742,6 @@ class VideoFrame {
Note that the buffer provides a pointer to a pointer. This interface cannot modify the pointer of the buffer, but it can modify the content of the buffer.
-
- Method
-
-
- getColorSpace
- Get the color space properties of the video frame.
-
-
- setColorSpace
- Get the color space properties of the video frame.
-
-
-
@@ -878,7 +865,13 @@ class VideoFrame {
colorSpace
- By default, the color space properties of video frames will apply the Full Range and BT.709 standard configurations. You can configure the settings according your needs for custom video capturing and rendering. See .
+ By default, the color space properties of video frames will apply the Full Range and BT.709 standard configurations. You can configure the settings according your needs for custom video capturing and rendering. See .
+ By default, the color space properties of video frames will apply the Full Range and BT.709 standard configurations. You can configure the settings according your needs for custom video capturing and rendering. See . The methods associated with this parameter are as follows:
+ - getColorSpace: Get the color space attribute of the video frame.
+ - setColorSpace:Set the color space attribute of the video frame.
+
sourceType
diff --git a/en-US/dita/RTC-NG/config/keys-rtc-ng-api-cpp.ditamap b/en-US/dita/RTC-NG/config/keys-rtc-ng-api-cpp.ditamap
index b86f51fb442..62f937fcd3e 100644
--- a/en-US/dita/RTC-NG/config/keys-rtc-ng-api-cpp.ditamap
+++ b/en-US/dita/RTC-NG/config/keys-rtc-ng-api-cpp.ditamap
@@ -1008,6 +1008,13 @@
+
+
+
+ Destroy
+
+
+
diff --git a/en-US/dita/RTC-NG/config/keys-rtc-ng-api-ios.ditamap b/en-US/dita/RTC-NG/config/keys-rtc-ng-api-ios.ditamap
index 5c4990327cf..f8a329a2ddb 100644
--- a/en-US/dita/RTC-NG/config/keys-rtc-ng-api-ios.ditamap
+++ b/en-US/dita/RTC-NG/config/keys-rtc-ng-api-ios.ditamap
@@ -905,6 +905,13 @@
+
+
+
+ destroy:
+
+
+
diff --git a/en-US/dita/RTC-NG/config/keys-rtc-ng-api-java.ditamap b/en-US/dita/RTC-NG/config/keys-rtc-ng-api-java.ditamap
index 00946ab6624..baf188eaf26 100644
--- a/en-US/dita/RTC-NG/config/keys-rtc-ng-api-java.ditamap
+++ b/en-US/dita/RTC-NG/config/keys-rtc-ng-api-java.ditamap
@@ -945,6 +945,13 @@
+
+
+
+ destroy
+
+
+
diff --git a/en-US/dita/RTC-NG/config/keys-rtc-ng-api-macos.ditamap b/en-US/dita/RTC-NG/config/keys-rtc-ng-api-macos.ditamap
index 11e23b989a4..8029e0f5244 100644
--- a/en-US/dita/RTC-NG/config/keys-rtc-ng-api-macos.ditamap
+++ b/en-US/dita/RTC-NG/config/keys-rtc-ng-api-macos.ditamap
@@ -862,6 +862,13 @@
+
+
+
+ destroy:
+
+
+
diff --git a/en-US/dita/RTC-NG/release/release_notes.dita b/en-US/dita/RTC-NG/release/release_notes.dita
index 188c55213e2..64ba335eff8 100644
--- a/en-US/dita/RTC-NG/release/release_notes.dita
+++ b/en-US/dita/RTC-NG/release/release_notes.dita
@@ -136,8 +136,7 @@
This version introduces the method, which is used to set the EGL context for rendering remote video streams. When using Texture format video data for remote video self-rendering, you can use this method to replace the SDK's default remote EGL context, achieving unified EGL context management.
Color space settings
- This version adds and to . You can use to obtain the color space properties of the video frame and use to customize the settings. By default, the color space uses Full Range and BT.709 standard configuration. Developers can flexibly adjust according to their own capture or rendering needs, further enhancing the customization capabilities of video processing.
- This version adds the colorSpace parameter to and . You can use this parameter to set the color space properties of the video frame. By default, the color space uses Full Range and BT.709 standard configuration. You can flexibly adjust according to your own capture or rendering needs, further enhancing the customization capabilities of video processing.
+ This version adds the colorSpace parameter to and . You can use this parameter to set the color space properties of the video frame. By default, the color space uses Full Range and BT.709 standard configuration. You can flexibly adjust according to your own capture or rendering needs, further enhancing the customization capabilities of video processing.
Others