diff --git a/en-US/dita/RTC-NG/API/api_rte_destroy.dita b/en-US/dita/RTC-NG/API/api_rte_destroy.dita new file mode 100644 index 00000000000..488245fdf61 --- /dev/null +++ b/en-US/dita/RTC-NG/API/api_rte_destroy.dita @@ -0,0 +1,58 @@ + + + + <ph keyref="Destroy_Rte"/> + An RTE object. + + + + + + + + +
+

+ public synchronized void destroy() throws RteException; + - (BOOL)destroy:(AgoraRteError * _Nullable)error; + bool Destroy(Error *err = nullptr){ + return RteDestroy(&c_rte, err != nullptr ? err->get_underlying_impl() : nullptr); + } + + + + +

+
+
+
+ +
Since
+
v4.5.0
+
+
+

该方法释放 RTE 对象使用的所有资源。

+
+
+ Restrictions +

None.

+
+
+ <ph props="cpp apple">Parameters</ph> + + + + + +
+
+ <ph keyref="return-section-title"/> +

When the method call succeeds, there is no return value; when fails, the exception is thrown. You need to catch the exception and handle it accordingly. See for details and resolution suggestions.

+

销毁 RTE 对象是否成功:

    +
  • :销毁成功。
  • +
  • :销毁失败。
  • +
+

+
+ + diff --git a/en-US/dita/RTC-NG/API/class_videoframe.dita b/en-US/dita/RTC-NG/API/class_videoframe.dita index 9a1bb22e2e1..33aa6fe6973 100644 --- a/en-US/dita/RTC-NG/API/class_videoframe.dita +++ b/en-US/dita/RTC-NG/API/class_videoframe.dita @@ -7,45 +7,45 @@

public class VideoFrame implements RefCounted { - + public interface Buffer extends RefCounted { - + @CalledByNative("Buffer") int getWidth(); - + @CalledByNative("Buffer") int getHeight(); - + @CalledByNative("Buffer") I420Buffer toI420(); - + @Override @CalledByNative("Buffer") void release(); - + @Override @CalledByNative("Buffer") void retain(); - + @CalledByNative("Buffer") Buffer cropAndScale( int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight); - + @CalledByNative("Buffer") @Nullable Buffer mirror(int frameRotation); - + @CalledByNative("Buffer") @Nullable Buffer rotate(int frameRotation); - + @CalledByNative("Buffer") @Nullable Buffer transform(int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight, int frameRotation); } - + public interface I420Buffer extends Buffer { - + @CalledByNative("I420Buffer") ByteBuffer getDataY(); - + @CalledByNative("I420Buffer") ByteBuffer getDataU(); - + @CalledByNative("I420Buffer") ByteBuffer getDataV(); @CalledByNative("I420Buffer") int getStrideY(); @CalledByNative("I420Buffer") int getStrideU(); @CalledByNative("I420Buffer") int getStrideV(); } - + public interface I422Buffer extends Buffer { @CalledByNative("I422Buffer") ByteBuffer getDataY(); @CalledByNative("I422Buffer") ByteBuffer getDataU(); @@ -55,13 +55,13 @@ @CalledByNative("I422Buffer") int getStrideV(); } public interface RgbaBuffer extends Buffer { @CalledByNative("RgbaBuffer") ByteBuffer getData(); } - + public interface TextureBuffer extends Buffer { - + enum Type { - + OES(GLES11Ext.GL_TEXTURE_EXTERNAL_OES), - + RGB(GLES20.GL_TEXTURE_2D); private final int glTarget; private Type(final int glTarget) { @@ -76,17 +76,17 @@ EGL_CONTEXT_14; } Type getType(); - + @CalledByNative("TextureBuffer") int getTextureId(); - + Matrix getTransformMatrix(); - + @CalledByNative("TextureBuffer") EglBase.Context getEglBaseContext(); @CalledByNative("TextureBuffer") Object getSourceTexturePool(); @CalledByNative("TextureBuffer") long getNativeEglContext(); @CalledByNative("TextureBuffer") int getEglContextType(); @CalledByNative("TextureBuffer") float[] getTransformMatrixArray(); - + @CalledByNative("TextureBuffer") int getSequence(); @CalledByNative("TextureBuffer") long getFenceObject(); @CalledByNative("TextureBuffer") boolean is10BitTexture(); @@ -198,22 +198,22 @@ return stitchMode; } } - + private Buffer buffer; - + private int rotation; - + private long timestampNs; private ColorSpace colorSpace; private SourceType sourceType; private float sampleAspectRatio; - + private AlphaStitchMode alphaStitchMode = AlphaStitchMode.ALPHA_NO_STITCH; private VideoFrameMetaInfo metaInfo = new VideoFrameMetaInfo(); - + private @Nullable ByteBuffer alphaBuffer; private long nativeAlphaBuffer; - + public VideoFrame(Buffer buffer, int rotation, long timestampNs) { this(buffer, rotation, timestampNs, new WrappedNativeColorSpace(), null, 0L, 1.0f, SourceType.kUnspecified.ordinal()); @@ -243,12 +243,12 @@ public float getSampleAspectRatio() { return sampleAspectRatio; } - + @CalledByNative public Buffer getBuffer() { return buffer; } - + @CalledByNative public int getRotation() { return rotation; @@ -261,7 +261,7 @@ public void setAlphaStitchMode(int stitchMode) { alphaStitchMode = AlphaStitchMode.values()[stitchMode]; } - + @CalledByNative public long getTimestampNs() { return timestampNs; @@ -270,7 +270,7 @@ public VideoFrameMetaInfo getMetaInfo() { return metaInfo; } - + public int getRotatedWidth() { if (rotation % 180 == 0) { return (alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_LEFT @@ -283,7 +283,7 @@ ? buffer.getHeight() / 2 : buffer.getHeight(); } - + public int getRotatedHeight() { if (rotation % 180 == 0) { return (alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_UP @@ -296,7 +296,7 @@ ? buffer.getWidth() / 2 : buffer.getWidth(); } - + public void replaceBuffer(Buffer buffer, int rotation, long timestampNs) { release(); this.buffer = buffer; @@ -351,12 +351,12 @@ public void fillAlphaData(ByteBuffer buffer) { alphaBuffer = buffer; } - + @Override public void retain() { buffer.retain(); } - + @Override @CalledByNative public void release() { @@ -431,51 +431,51 @@ struct VideoFrame { metaInfo(NULL){ memset(matrix, 0, sizeof(matrix)); } - + VIDEO_PIXEL_FORMAT type; - + int width; - + int height; - + int yStride; - + int uStride; - + int vStride; - + uint8_t* yBuffer; - + uint8_t* uBuffer; - + uint8_t* vBuffer; - + int rotation; - + int64_t renderTimeMs; - + int avsync_type; - + uint8_t* metadata_buffer; - + int metadata_size; - + void* sharedContext; - + int textureId; - + void* d3d11Texture2d; - + float matrix[16]; - + uint8_t* alphaBuffer; - + ALPHA_STITCH_MODE alphaStitchMode; - + void* pixelBuffer; - + IVideoFrameMetaInfo* metaInfo; - + ColorSpace colorSpace; }; USTRUCT(BlueprintType) @@ -742,19 +742,6 @@ class VideoFrame {

Note that the buffer provides a pointer to a pointer. This interface cannot modify the pointer of the buffer, but it can modify the content of the buffer.

-
- Method - - - getColorSpace - Get the color space properties of the video frame. - - - setColorSpace - Get the color space properties of the video frame. - - -
<text conref="../conref/conref_api_metadata.dita#conref_api_metadata/property"/> @@ -878,7 +865,13 @@ class VideoFrame { colorSpace - By default, the color space properties of video frames will apply the Full Range and BT.709 standard configurations. You can configure the settings according your needs for custom video capturing and rendering. See . + By default, the color space properties of video frames will apply the Full Range and BT.709 standard configurations. You can configure the settings according your needs for custom video capturing and rendering. See . + By default, the color space properties of video frames will apply the Full Range and BT.709 standard configurations. You can configure the settings according your needs for custom video capturing and rendering. See . The methods associated with this parameter are as follows:
    +
  • getColorSpace:获取视频帧的色彩空间属性。
  • +
  • setColorSpace:设置视频帧的色彩空间属性。
  • +
sourceType