public class AgoraVideoFrame {
+
public static final int FORMAT_NONE = -1;
+
public static final int FORMAT_TEXTURE_2D = 10;
+
public static final int FORMAT_TEXTURE_OES = 11;
+
public static final int FORMAT_I420 = 1;
+
public static final int FORMAT_BGRA = 2;
+
public static final int FORMAT_NV21 = 3;
+
public static final int FORMAT_RGBA = 4;
+
public static final int FORMAT_I422 = 16;
+
public static final int BUFFER_TYPE_NONE = -1;
+
public static final int BUFFER_TYPE_BUFFER = 1;
+
public static final int BUFFER_TYPE_ARRAY = 2;
+
public static final int BUFFER_TYPE_TEXTURE = 3;
public AgoraVideoFrame() {
- format = 10;
+ format = 10;
timeStamp = 0;
stride = 0;
height = 0;
@@ -37,53 +49,78 @@
rotation = 0;
alphaStitchMode = 0;
}
+
public int format;
+
public long timeStamp;
+
public int stride;
+
public int height;
+
public int textureID;
+
public boolean syncMode;
+
public float[] transform;
+
public javax.microedition.khronos.egl.EGLContext eglContext10;
- public android.opengl.EGLContext eglContext14;
+ public android.opengl.EGLContext eglContext14;
+
public byte[] buf;
+
public int cropLeft;
+
public int cropTop;
+
public int cropRight;
+
public int cropBottom;
+
public int rotation;
+
public int alphaStitchMode;
@Override
public String toString() {
- return "AgoraVideoFrame{"
- + "format=" + format + ", timeStamp=" + timeStamp + ", stride=" + stride
- + ", height=" + height + ", textureID=" + textureID
- + ", buf.length=" + (buf != null ? buf.length : 0) + ", cropLeft=" + cropLeft
- + ", cropTop=" + cropTop + ", cropRight=" + cropRight + ", cropBottom=" + cropBottom
- + ", rotation=" + rotation + ", alphaStitchMode=" + alphaStitchMode + '}';
+ return "AgoraVideoFrame{"
+ + "format=" + format + ", timeStamp=" + timeStamp + ", stride=" + stride
+ + ", height=" + height + ", textureID=" + textureID
+ + ", buf.length=" + (buf != null ? buf.length : 0) + ", cropLeft=" + cropLeft
+ + ", cropTop=" + cropTop + ", cropRight=" + cropRight + ", cropBottom=" + cropBottom
+ + ", rotation=" + rotation + ", alphaStitchMode=" + alphaStitchMode + '}';
}
+
}
- __attribute__((visibility("default"))) @interface AgoraVideoFrame : NSObject
+
+__attribute__((visibility("default"))) @interface AgoraVideoFrame : NSObject
@property(assign, nonatomic) NSInteger format;
-@property(assign, nonatomic) CMTime time;
-@property(assign, nonatomic) int stride DEPRECATED_MSG_ATTRIBUTE("use strideInPixels instead");
-@property(assign, nonatomic) int strideInPixels;
-@property(assign, nonatomic) int height;
+
+@property(assign, nonatomic) CMTime time;
+@property(assign, nonatomic) int stride DEPRECATED_MSG_ATTRIBUTE("use strideInPixels instead");
+
+@property(assign, nonatomic) int strideInPixels;
+@property(assign, nonatomic) int height;
@property(assign, nonatomic) CVPixelBufferRef _Nullable textureBuf;
+
@property(strong, nonatomic) IMAGE_CLASS * _Nullable image;
-@property(strong, nonatomic) NSData *_Nullable dataBuf;
+
+@property(strong, nonatomic) NSData *_Nullable dataBuf;
@property(strong, nonatomic) NSData *_Nullable alphaBuf;
@property(assign, nonatomic) AgoraAlphaStitchMode alphaStitchMode;
-@property(assign, nonatomic) int cropLeft;
-@property(assign, nonatomic) int cropTop;
-@property(assign, nonatomic) int cropRight;
-@property(assign, nonatomic) int cropBottom;
-@property(assign, nonatomic) int rotation;
+
+@property(assign, nonatomic) int cropLeft;
+@property(assign, nonatomic) int cropTop;
+@property(assign, nonatomic) int cropRight;
+@property(assign, nonatomic) int cropBottom;
+@property(assign, nonatomic) int rotation;
+@property(strong, nonatomic) AgoraColorSpace *_Nullable colorSpace;
+
- (void)fillAlphaData;
@end
- struct ExternalVideoFrame {
+
+struct ExternalVideoFrame {
ExternalVideoFrame()
: type(VIDEO_BUFFER_RAW_DATA),
format(VIDEO_PIXEL_DEFAULT),
@@ -99,44 +136,78 @@
eglContext(NULL),
eglType(EGL_CONTEXT10),
textureId(0),
+ fenceObject(0),
metadataBuffer(NULL),
metadataSize(0),
alphaBuffer(NULL),
fillAlphaBuffer(false),
- alphaStitchMode(0),
+ alphaStitchMode(NO_ALPHA_STITCH),
d3d11Texture2d(NULL),
textureSliceIndex(0){}
+
enum EGL_CONTEXT_TYPE {
+
EGL_CONTEXT10 = 0,
+
EGL_CONTEXT14 = 1,
};
+
enum VIDEO_BUFFER_TYPE {
+
VIDEO_BUFFER_RAW_DATA = 1,
+
VIDEO_BUFFER_ARRAY = 2,
+
VIDEO_BUFFER_TEXTURE = 3,
};
+
VIDEO_BUFFER_TYPE type;
+
VIDEO_PIXEL_FORMAT format;
+
void* buffer;
+
int stride;
+
int height;
+
int cropLeft;
+
int cropTop;
+
int cropRight;
+
int cropBottom;
+
int rotation;
+
long long timestamp;
- void *eglContext;
+
+ void* eglContext;
+
EGL_CONTEXT_TYPE eglType;
+
int textureId;
+
+ long long fenceObject;
+
float matrix[16];
+
uint8_t* metadataBuffer;
+
int metadataSize;
+
uint8_t* alphaBuffer;
+
bool fillAlphaBuffer;
- int alphaStitchMode;
+
+ ALPHA_STITCH_MODE alphaStitchMode;
+
void *d3d11Texture2d;
+
int textureSliceIndex;
+
+ ColorSpace colorSpace;
};
export class ExternalVideoFrame {
type?: VideoBufferType;
@@ -403,8 +474,8 @@ class ExternalVideoFrame {
Texture 帧额外的转换。该参数仅适用于 Texture 格式的视频数据。
- eglContext11
- EGLContext11。该参数仅适用于 Texture 格式的视频数据。
+ eglContext10
+ EGLContext10。该参数仅适用于 Texture 格式的视频数据。
eglContext14
@@ -414,8 +485,8 @@ class ExternalVideoFrame {
eglContext
该参数仅适用于 Texture 格式的视频数据。
- - 当使用 Khronos 定义的 OpenGL 接口 (javax.microedition.khronos.egl.*)时,需要将 eglContext 设置给这个字段。
- - 当使用 Android 定义的 OpenGL 接口 (android.opengl.*)时,需要将 eglContext 设置给这个字段。
+ - 当使用 Khronos 定义的 OpenGL 接口 (javax.microedition.khronos.egl.*) 时,需要将 eglContext 设置给这个字段。
+ - 当使用 Android 定义的 OpenGL 接口 (android.opengl.*) 时,需要将 eglContext 设置给这个字段。
@@ -506,6 +577,10 @@ class ExternalVideoFrame {
time
传入的视频帧的时间戳,以毫秒为单位。不正确的时间戳会导致丢帧或者音视频不同步。
+
+
+
+
diff --git a/dita/RTC-NG/API/class_videoframe.dita b/dita/RTC-NG/API/class_videoframe.dita
index 0e2de8f9661..6583607bdf2 100644
--- a/dita/RTC-NG/API/class_videoframe.dita
+++ b/dita/RTC-NG/API/class_videoframe.dita
@@ -7,42 +7,61 @@
public class VideoFrame implements RefCounted {
+
public interface Buffer extends RefCounted {
- @CalledByNative("Buffer") int getWidth();
- @CalledByNative("Buffer") int getHeight();
- @CalledByNative("Buffer") I420Buffer toI420();
- @Override @CalledByNative("Buffer") void release();
- @Override @CalledByNative("Buffer") void retain();
- @CalledByNative("Buffer")
+
+ @CalledByNative("Buffer") int getWidth();
+
+ @CalledByNative("Buffer") int getHeight();
+
+ @CalledByNative("Buffer") I420Buffer toI420();
+
+ @Override @CalledByNative("Buffer") void release();
+
+ @Override @CalledByNative("Buffer") void retain();
+
+ @CalledByNative("Buffer")
Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight);
- @CalledByNative("Buffer") @Nullable Buffer mirror(int frameRotation);
- @CalledByNative("Buffer") @Nullable Buffer rotate(int frameRotation);
- @CalledByNative("Buffer")
+
+ @CalledByNative("Buffer") @Nullable Buffer mirror(int frameRotation);
+
+ @CalledByNative("Buffer") @Nullable Buffer rotate(int frameRotation);
+
+ @CalledByNative("Buffer")
@Nullable
Buffer transform(int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth,
int scaleHeight, int frameRotation);
}
+
public interface I420Buffer extends Buffer {
- @CalledByNative("I420Buffer") ByteBuffer getDataY();
- @CalledByNative("I420Buffer") ByteBuffer getDataU();
- @CalledByNative("I420Buffer") ByteBuffer getDataV();
- @CalledByNative("I420Buffer") int getStrideY();
- @CalledByNative("I420Buffer") int getStrideU();
- @CalledByNative("I420Buffer") int getStrideV();
+
+ @CalledByNative("I420Buffer") ByteBuffer getDataY();
+
+ @CalledByNative("I420Buffer") ByteBuffer getDataU();
+
+ @CalledByNative("I420Buffer") ByteBuffer getDataV();
+ @CalledByNative("I420Buffer") int getStrideY();
+ @CalledByNative("I420Buffer") int getStrideU();
+ @CalledByNative("I420Buffer") int getStrideV();
}
+
public interface I422Buffer extends Buffer {
- @CalledByNative("I422Buffer") ByteBuffer getDataY();
- @CalledByNative("I422Buffer") ByteBuffer getDataU();
- @CalledByNative("I422Buffer") ByteBuffer getDataV();
- @CalledByNative("I422Buffer") int getStrideY();
- @CalledByNative("I422Buffer") int getStrideU();
- @CalledByNative("I422Buffer") int getStrideV();
- }
- public interface RgbaBuffer extends Buffer { @CalledByNative("RgbaBuffer") ByteBuffer getData(); }
+ @CalledByNative("I422Buffer") ByteBuffer getDataY();
+ @CalledByNative("I422Buffer") ByteBuffer getDataU();
+ @CalledByNative("I422Buffer") ByteBuffer getDataV();
+ @CalledByNative("I422Buffer") int getStrideY();
+ @CalledByNative("I422Buffer") int getStrideU();
+ @CalledByNative("I422Buffer") int getStrideV();
+ }
+ public interface RgbaBuffer extends Buffer { @CalledByNative("RgbaBuffer") ByteBuffer getData(); }
+
public interface TextureBuffer extends Buffer {
+
enum Type {
+
OES(GLES11Ext.GL_TEXTURE_EXTERNAL_OES),
+
RGB(GLES20.GL_TEXTURE_2D);
private final int glTarget;
private Type(final int glTarget) {
@@ -57,22 +76,26 @@
EGL_CONTEXT_14;
}
Type getType();
- @CalledByNative("TextureBuffer") int getTextureId();
+
+ @CalledByNative("TextureBuffer") int getTextureId();
+
Matrix getTransformMatrix();
- @CalledByNative("TextureBuffer") EglBase.Context getEglBaseContext();
- @CalledByNative("TextureBuffer") Object getSourceTexturePool();
- @CalledByNative("TextureBuffer") long getNativeEglContext();
- @CalledByNative("TextureBuffer") int getEglContextType();
- @CalledByNative("TextureBuffer") float[] getTransformMatrixArray();
- @CalledByNative("TextureBuffer") int getSequence();
- @CalledByNative("TextureBuffer") boolean is10BitTexture();
+
+ @CalledByNative("TextureBuffer") EglBase.Context getEglBaseContext();
+ @CalledByNative("TextureBuffer") Object getSourceTexturePool();
+ @CalledByNative("TextureBuffer") long getNativeEglContext();
+ @CalledByNative("TextureBuffer") int getEglContextType();
+ @CalledByNative("TextureBuffer") float[] getTransformMatrixArray();
+
+ @CalledByNative("TextureBuffer") int getSequence();
+ @CalledByNative("TextureBuffer") long getFenceObject();
+ @CalledByNative("TextureBuffer") boolean is10BitTexture();
}
public interface ColorSpace {
enum Range {
Invalid(0),
Limited(1),
- Full(2),
- Derived(3);
+ Full(2);
private final int range;
private Range(int range) {
this.range = range;
@@ -161,32 +184,55 @@
kBackCamera,
kUnspecified,
}
+ public enum AlphaStitchMode {
+ ALPHA_NO_STITCH(0),
+ ALPHA_STITCH_UP(1),
+ ALPHA_STITCH_BELOW(2),
+ ALPHA_STITCH_LEFT(3),
+ ALPHA_STITCH_RIGHT(4);
+ private final int stitchMode;
+ private AlphaStitchMode(int stitchMode) {
+ this.stitchMode = stitchMode;
+ }
+ public int value() {
+ return stitchMode;
+ }
+ }
+
private Buffer buffer;
+
private int rotation;
+
private long timestampNs;
private ColorSpace colorSpace;
private SourceType sourceType;
private float sampleAspectRatio;
+
+ private AlphaStitchMode alphaStitchMode = AlphaStitchMode.ALPHA_NO_STITCH;
private VideoFrameMetaInfo metaInfo = new VideoFrameMetaInfo();
+
private @Nullable ByteBuffer alphaBuffer;
- private int alphaStitchMode;
+ private long nativeAlphaBuffer;
+
public VideoFrame(Buffer buffer, int rotation, long timestampNs) {
- this(buffer, rotation, timestampNs, new WrappedNativeColorSpace(), null, 0L, 1.0f, SourceType.kUnspecified.ordinal());
+ this(buffer, rotation, timestampNs, new WrappedNativeColorSpace(), null, 0L, 1.0f,
+ SourceType.kUnspecified.ordinal());
}
@CalledByNative
public VideoFrame(Buffer buffer, int rotation, long timestampNs, ColorSpace colorSpace,
- ByteBuffer alphaBuffer, float sampleAspectRatio, int sourceType) {
+ ByteBuffer alphaBuffer, long nativeAlphaBuffer, float sampleAspectRatio, int sourceType) {
if (buffer == null) {
- throw new IllegalArgumentException("buffer not allowed to be null");
+ throw new IllegalArgumentException("buffer not allowed to be null");
}
if (rotation % 90 != 0) {
- throw new IllegalArgumentException("rotation must be a multiple of 90");
+ throw new IllegalArgumentException("rotation must be a multiple of 90");
}
this.buffer = buffer;
this.rotation = rotation;
this.timestampNs = timestampNs;
this.colorSpace = colorSpace;
this.alphaBuffer = alphaBuffer;
+ this.nativeAlphaBuffer = nativeAlphaBuffer;
this.sampleAspectRatio = sampleAspectRatio;
this.sourceType = SourceType.values()[sourceType];
}
@@ -197,18 +243,25 @@
public float getSampleAspectRatio() {
return sampleAspectRatio;
}
+
@CalledByNative
public Buffer getBuffer() {
return buffer;
}
+
@CalledByNative
public int getRotation() {
return rotation;
}
@CalledByNative
public int getAlphaStitchMode() {
- return alphaStitchMode;
+ return alphaStitchMode.value();
}
+ @CalledByNative
+ public void setAlphaStitchMode(int stitchMode) {
+ alphaStitchMode = AlphaStitchMode.values()[stitchMode];
+ }
+
@CalledByNative
public long getTimestampNs() {
return timestampNs;
@@ -217,41 +270,93 @@
public VideoFrameMetaInfo getMetaInfo() {
return metaInfo;
}
+
public int getRotatedWidth() {
if (rotation % 180 == 0) {
- return buffer.getWidth();
+ return (alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_LEFT
+ || alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_RIGHT)
+ ? buffer.getWidth() / 2
+ : buffer.getWidth();
}
- return buffer.getHeight();
+ return (alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_UP
+ || alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_BELOW)
+ ? buffer.getHeight() / 2
+ : buffer.getHeight();
}
+
public int getRotatedHeight() {
if (rotation % 180 == 0) {
- return buffer.getHeight();
+ return (alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_UP
+ || alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_BELOW)
+ ? buffer.getHeight() / 2
+ : buffer.getHeight();
}
- return buffer.getWidth();
+ return (alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_LEFT
+ || alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_RIGHT)
+ ? buffer.getWidth() / 2
+ : buffer.getWidth();
}
+
public void replaceBuffer(Buffer buffer, int rotation, long timestampNs) {
release();
this.buffer = buffer;
this.rotation = rotation;
this.timestampNs = timestampNs;
}
+ @CalledByNative
public ColorSpace getColorSpace() {
return colorSpace;
}
+ public void setColorSpace(ColorSpace colorSpace) {
+ this.colorSpace = colorSpace;
+ }
+ @CalledByNative
+ private int getColorSpaceRange() {
+ if (colorSpace == null) {
+ return ColorSpace.Range.Invalid.getRange();
+ }
+ return colorSpace.getRange().getRange();
+ }
+ @CalledByNative
+ private int getColorSpaceMatrix() {
+ if (colorSpace == null) {
+ return ColorSpace.Matrix.Unspecified.getMatrix();
+ }
+ return colorSpace.getMatrix().getMatrix();
+ }
+ @CalledByNative
+ private int getColorSpaceTransfer() {
+ if (colorSpace == null) {
+ return ColorSpace.Transfer.Unspecified.getTransfer();
+ }
+ return colorSpace.getTransfer().getTransfer();
+ }
+ @CalledByNative
+ private int getColorSpacePrimary() {
+ if (colorSpace == null) {
+ return ColorSpace.Primary.Unspecified.getPrimary();
+ }
+ return colorSpace.getPrimary().getPrimary();
+ }
@CalledByNative
public ByteBuffer getAlphaBuffer() {
return alphaBuffer;
}
+ public void retainAlphaBuffer() {
+ JniCommon.nativeAddRef(nativeAlphaBuffer);
+ }
+ public void releaseAlphaBuffer() {
+ JniCommon.nativeReleaseRef(nativeAlphaBuffer);
+ }
public void fillAlphaData(ByteBuffer buffer) {
alphaBuffer = buffer;
}
- public void setAlphaStitchMode(int mode) {
- this.alphaStitchMode = mode;
- }
+
@Override
public void retain() {
buffer.retain();
}
+
@Override
@CalledByNative
public void release() {
@@ -278,7 +383,8 @@
this.format = format;
}
}
- __attribute__((visibility("default"))) @interface AgoraOutputVideoFrame : NSObject
+ __attribute__((visibility("default"))) @interface AgoraOutputVideoFrame : NSObject
+
@property (nonatomic, assign) NSInteger type;
@property (nonatomic, assign) int width;
@property (nonatomic, assign) int height;
@@ -291,12 +397,16 @@
@property (nonatomic, assign) int rotation;
@property (nonatomic, assign) int64_t renderTimeMs;
@property (nonatomic, assign) int avSyncType;
+
@property(assign, nonatomic) CVPixelBufferRef _Nullable pixelBuffer;
@property (nonatomic, assign) uint8_t* _Nullable alphaBuffer;
@property (nonatomic, assign) AgoraAlphaStitchMode alphaStitchMode;
+
@property(nonatomic, strong) NSDictionary *_Nonnull metaInfo;
+@property(nonatomic, strong) AgoraColorSpace* _Nullable colorSpace;
@end
- struct VideoFrame {
+
+struct VideoFrame {
VideoFrame():
type(VIDEO_PIXEL_DEFAULT),
width(0),
@@ -316,33 +426,57 @@
textureId(0),
d3d11Texture2d(NULL),
alphaBuffer(NULL),
- alphaStitchMode(0),
+ alphaStitchMode(NO_ALPHA_STITCH),
pixelBuffer(NULL),
metaInfo(NULL){
memset(matrix, 0, sizeof(matrix));
}
+
VIDEO_PIXEL_FORMAT type;
+
int width;
+
int height;
+
int yStride;
+
int uStride;
+
int vStride;
+
uint8_t* yBuffer;
+
uint8_t* uBuffer;
+
uint8_t* vBuffer;
+
int rotation;
+
int64_t renderTimeMs;
+
int avsync_type;
+
uint8_t* metadata_buffer;
+
int metadata_size;
+
void* sharedContext;
+
int textureId;
+
void* d3d11Texture2d;
+
float matrix[16];
+
uint8_t* alphaBuffer;
- int alphaStitchMode;
+
+ ALPHA_STITCH_MODE alphaStitchMode;
+
void* pixelBuffer;
+
IVideoFrameMetaInfo* metaInfo;
+
+ ColorSpace colorSpace;
};
USTRUCT(BlueprintType)
struct FVideoFrame {
@@ -608,6 +742,19 @@ class VideoFrame {
缓冲区给出的是指向指针的指针,该接口不能修改缓冲区的指针,只能修改缓冲区的内容。
+
+ 方法
+
+
+ getColorSpace
+ 获取视频帧的色彩空间属性。
+
+
+ setColorSpace
+ 设置视频帧的色彩空间属性。
+
+
+
@@ -727,9 +874,9 @@ class VideoFrame {
pixelBuffer
将数据填充到 CVPixelBuffer。
-
- colorSpace
- 表示视频帧的色彩空间。详见 。
+
+ colorSpace
+ 视频帧的色彩空间属性,默认情况下会应用 Full Range 和 BT.709 标准配置。你可以根据自采集、自渲染的业务需求进行自定义设置,详见 。
sourceType
From 0452b39abfb33367dc10aa3c4368f3a61d6f8a4b Mon Sep 17 00:00:00 2001
From: jinyu
Date: Fri, 18 Oct 2024 17:03:17 +0800
Subject: [PATCH 06/18] encoding -> capturing
---
dita/RTC-NG/API/api_irtcengine_iscameraexposuresupported.dita | 2 +-
dita/RTC-NG/API/api_irtcengine_iscamerazoomsupported.dita | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/dita/RTC-NG/API/api_irtcengine_iscameraexposuresupported.dita b/dita/RTC-NG/API/api_irtcengine_iscameraexposuresupported.dita
index c49d159269e..f9fdf8f143e 100644
--- a/dita/RTC-NG/API/api_irtcengine_iscameraexposuresupported.dita
+++ b/dita/RTC-NG/API/api_irtcengine_iscameraexposuresupported.dita
@@ -36,7 +36,7 @@
- 该方法仅适用于 Android 和 iOS。
- - 该方法必须在 SDK 触发 回调,返回本地视频状态为 (2) 之后调用。
+ - 该方法必须在 SDK 触发 回调,返回本地视频状态为 (1) 之后调用。
- 建议你在调用 调节曝光系数前,先调用该方法查询当前摄像头是否支持曝光调节。
- 当你调用该方法时,查询的是当前正在使用的摄像头是否支持曝光调节,即调用 时指定的摄像头。
diff --git a/dita/RTC-NG/API/api_irtcengine_iscamerazoomsupported.dita b/dita/RTC-NG/API/api_irtcengine_iscamerazoomsupported.dita
index 68e714401b7..e4625c76f02 100644
--- a/dita/RTC-NG/API/api_irtcengine_iscamerazoomsupported.dita
+++ b/dita/RTC-NG/API/api_irtcengine_iscamerazoomsupported.dita
@@ -30,7 +30,7 @@
调用时机
- 该方法必须在 SDK 触发 回调,返回本地视频状态为 (2) 之后调用。
+ 该方法必须在 SDK 触发 回调,返回本地视频状态为 (1) 之后调用。
调用限制
From ad7bbc2e06ded6ed0f52df5c0be5a23e7b0b25f1 Mon Sep 17 00:00:00 2001
From: jinyu
Date: Mon, 21 Oct 2024 11:54:09 +0800
Subject: [PATCH 07/18] resolve comments
---
dita/RTC-NG/config/keys-rtc-ng-links-android.ditamap | 7 -------
dita/RTC-NG/config/keys-rtc-ng-links-harmony.ditamap | 7 -------
dita/RTC-NG/config/keys-rtc-ng-links.ditamap | 7 +++++++
3 files changed, 7 insertions(+), 14 deletions(-)
diff --git a/dita/RTC-NG/config/keys-rtc-ng-links-android.ditamap b/dita/RTC-NG/config/keys-rtc-ng-links-android.ditamap
index 38c5a447f70..6cb00aa1ebc 100644
--- a/dita/RTC-NG/config/keys-rtc-ng-links-android.ditamap
+++ b/dita/RTC-NG/config/keys-rtc-ng-links-android.ditamap
@@ -125,13 +125,6 @@