diff --git a/dita/RTC-NG/API/api_irtcengine_iscameraexposuresupported.dita b/dita/RTC-NG/API/api_irtcengine_iscameraexposuresupported.dita index c49d159269e..f9fdf8f143e 100644 --- a/dita/RTC-NG/API/api_irtcengine_iscameraexposuresupported.dita +++ b/dita/RTC-NG/API/api_irtcengine_iscameraexposuresupported.dita @@ -36,7 +36,7 @@ diff --git a/dita/RTC-NG/API/api_irtcengine_iscamerazoomsupported.dita b/dita/RTC-NG/API/api_irtcengine_iscamerazoomsupported.dita index 68e714401b7..e4625c76f02 100644 --- a/dita/RTC-NG/API/api_irtcengine_iscamerazoomsupported.dita +++ b/dita/RTC-NG/API/api_irtcengine_iscamerazoomsupported.dita @@ -30,7 +30,7 @@
调用时机 -

该方法必须在 SDK 触发 回调,返回本地视频状态为 (2) 之后调用。

+

该方法必须在 SDK 触发 回调,返回本地视频状态为 (1) 之后调用。

调用限制 diff --git a/dita/RTC-NG/API/api_irtcengine_setexternalremoteeglcontext.dita b/dita/RTC-NG/API/api_irtcengine_setexternalremoteeglcontext.dita new file mode 100644 index 00000000000..5a01a581576 --- /dev/null +++ b/dita/RTC-NG/API/api_irtcengine_setexternalremoteeglcontext.dita @@ -0,0 +1,65 @@ + + + + <ph keyref="setExternalRemoteEglContext"/> + 设置远端视频流渲染的 EGL 环境上下文。 + + + + + + + + +
+

+ public abstract int setExternalRemoteEglContext(Object eglContext); + + + virtual int setExternalRemoteEglContext(void* eglContext) = 0; + + + + +

+
+
+
+ +
自从
+
v4.5.0
+
+
+

通过设置该方法,开发者可以替换 SDK 内部默认的远端 EGL 环境上下文,便于实现统一的 EGL 上下文管理。

+

引擎销毁时,SDK 会自动释放 EGL 环境上下文。

+ 该方法仅适用于 Android。 +
+
+ 适用场景 +

该方法适用于使用 Texture 格式的视频数据进行远端视频自渲染的场景。

+
+
+ 调用时机 +

该方法需要在加入频道前调用。

+
+
+ 调用限制 +

无。

+
+
+ 参数 + + + eglContext + 用于远端视频流渲染的 EGL 环境上下文对象。 + +
+
+ <ph keyref="return-section-title"/> +

方法成功调用时,无返回值;方法调用失败时,会抛出 异常,你需要捕获异常并进行处理。详见了解详情和解决建议。

+
    +
  • 0: 方法调用成功。
  • +
  • < 0: 方法调用失败。详见了解详情和解决建议。
  • +
+
+
\ No newline at end of file diff --git a/dita/RTC-NG/API/api_irtcengine_setvideoscenario.dita b/dita/RTC-NG/API/api_irtcengine_setvideoscenario.dita index 5142f5585d6..d929855a308 100644 --- a/dita/RTC-NG/API/api_irtcengine_setvideoscenario.dita +++ b/dita/RTC-NG/API/api_irtcengine_setvideoscenario.dita @@ -77,7 +77,8 @@

-

(2) 适用于视频 1v1 通话场景。针对该场景低延迟、高画质的体验要求,SDK 进行了策略调优,提升了画质、首帧出图、中低端机延迟及弱网流畅度等性能表现。

+

(2) 适用于场景。针对该场景低延迟、高画质的体验要求,SDK 进行了策略调优,提升了画质、首帧出图、中低端机延迟及弱网流畅度等性能表现。

+

(3) 适用于场景。针对该场景对首帧出图时间和画质清晰度的高要求,SDK 进行了策略调优,重点提升了首帧出图体验和画质表现,同时增强了在弱网环境和低端设备上的画质和流畅度表现。

diff --git a/dita/RTC-NG/API/class_audiotrackconfig.dita b/dita/RTC-NG/API/class_audiotrackconfig.dita index 95e46f8b937..7c425f5025d 100644 --- a/dita/RTC-NG/API/class_audiotrackconfig.dita +++ b/dita/RTC-NG/API/class_audiotrackconfig.dita @@ -7,27 +7,35 @@

public class AudioTrackConfig { + public boolean enableLocalPlayback; - - + + public boolean enableAudioProcessing; public AudioTrackConfig() { this.enableLocalPlayback = true; + this.enableAudioProcessing = false; + } + @Override + public String toString() { + return "AudioTrackConfig{" + + "enableLocalPlayback=" + enableLocalPlayback + "enableAudioProcessing" + + enableAudioProcessing + '}'; } - } +} export class AudioTrackConfig { public enableLocalPlayback: boolean = true; } - NS_SWIFT_NAME(AgoraAudioTrackConfig) __attribute__((visibility("default"))) @interface AgoraAudioTrackConfig : NSObject + NS_SWIFT_NAME(AgoraAudioTrackConfig) __attribute__((visibility("default"))) @interface AgoraAudioTrackConfig : NSObject @property (assign, nonatomic) BOOL enableLocalPlayback NS_SWIFT_NAME(enableLocalPlayback); - +@property (assign, nonatomic) BOOL enableAudioProcessing NS_SWIFT_NAME(enableAudioProcessing); @end struct AudioTrackConfig { + bool enableLocalPlayback; - - - AudioTrackConfig() - : enableLocalPlayback(true) {} + + bool enableAudioProcessing; + AudioTrackConfig() : enableLocalPlayback(true),enableAudioProcessing(false) {} }; USTRUCT(BlueprintType) struct FAudioTrackConfig @@ -84,6 +92,16 @@ class AudioTrackConfig { + + enableAudioProcessing + 是否启用音频处理模块: +

+ 该参数设置仅对 类型的自定义音频采集轨道生效。 + +
\ No newline at end of file diff --git a/dita/RTC-NG/API/class_externalvideoframe.dita b/dita/RTC-NG/API/class_externalvideoframe.dita index 505c28c9634..b2de1d3b9ae 100644 --- a/dita/RTC-NG/API/class_externalvideoframe.dita +++ b/dita/RTC-NG/API/class_externalvideoframe.dita @@ -20,7 +20,7 @@ public static final int BUFFER_TYPE_ARRAY = 2; public static final int BUFFER_TYPE_TEXTURE = 3; public AgoraVideoFrame() { - format = 10; + format = 10; timeStamp = 0; stride = 0; height = 0; @@ -37,6 +37,7 @@ rotation = 0; alphaStitchMode = 0; } + public int format; public long timeStamp; public int stride; @@ -46,7 +47,6 @@ public float[] transform; public javax.microedition.khronos.egl.EGLContext eglContext10; public android.opengl.EGLContext eglContext14; - public byte[] buf; public int cropLeft; public int cropTop; @@ -56,34 +56,43 @@ public int alphaStitchMode; @Override public String toString() { - return "AgoraVideoFrame{" - + "format=" + format + ", timeStamp=" + timeStamp + ", stride=" + stride - + ", height=" + height + ", textureID=" + textureID - + ", buf.length=" + (buf != null ? buf.length : 0) + ", cropLeft=" + cropLeft - + ", cropTop=" + cropTop + ", cropRight=" + cropRight + ", cropBottom=" + cropBottom - + ", rotation=" + rotation + ", alphaStitchMode=" + alphaStitchMode + '}'; + return "AgoraVideoFrame{" + + "format=" + format + ", timeStamp=" + timeStamp + ", stride=" + stride + + ", height=" + height + ", textureID=" + textureID + + ", buf.length=" + (buf != null ? buf.length : 0) + ", cropLeft=" + cropLeft + + ", cropTop=" + cropTop + ", cropRight=" + cropRight + ", cropBottom=" + cropBottom + + ", rotation=" + rotation + ", alphaStitchMode=" + alphaStitchMode + '}'; } } - __attribute__((visibility("default"))) @interface AgoraVideoFrame : NSObject + +__attribute__((visibility("default"))) @interface AgoraVideoFrame : NSObject @property(assign, nonatomic) NSInteger format; -@property(assign, nonatomic) CMTime time; -@property(assign, nonatomic) int stride DEPRECATED_MSG_ATTRIBUTE("use strideInPixels instead"); -@property(assign, nonatomic) int strideInPixels; -@property(assign, nonatomic) int height; + +@property(assign, nonatomic) CMTime time; +@property(assign, nonatomic) int stride DEPRECATED_MSG_ATTRIBUTE("use strideInPixels instead"); + +@property(assign, nonatomic) int strideInPixels; +@property(assign, nonatomic) int height; @property(assign, nonatomic) CVPixelBufferRef _Nullable textureBuf; + @property(strong, nonatomic) IMAGE_CLASS * _Nullable image; -@property(strong, nonatomic) NSData *_Nullable dataBuf; + +@property(strong, nonatomic) NSData *_Nullable dataBuf; @property(strong, nonatomic) NSData *_Nullable alphaBuf; @property(assign, nonatomic) AgoraAlphaStitchMode alphaStitchMode; -@property(assign, nonatomic) int cropLeft; -@property(assign, nonatomic) int cropTop; -@property(assign, nonatomic) int cropRight; -@property(assign, nonatomic) int cropBottom; -@property(assign, nonatomic) int rotation; + +@property(assign, nonatomic) int cropLeft; +@property(assign, nonatomic) int cropTop; +@property(assign, nonatomic) int cropRight; +@property(assign, nonatomic) int cropBottom; +@property(assign, nonatomic) int rotation; +@property(strong, nonatomic) AgoraColorSpace *_Nullable colorSpace; + - (void)fillAlphaData; @end - struct ExternalVideoFrame { + +struct ExternalVideoFrame { ExternalVideoFrame() : type(VIDEO_BUFFER_RAW_DATA), format(VIDEO_PIXEL_DEFAULT), @@ -99,22 +108,26 @@ eglContext(NULL), eglType(EGL_CONTEXT10), textureId(0), + fenceObject(0), metadataBuffer(NULL), metadataSize(0), alphaBuffer(NULL), fillAlphaBuffer(false), - alphaStitchMode(0), + alphaStitchMode(NO_ALPHA_STITCH), d3d11Texture2d(NULL), textureSliceIndex(0){} + enum EGL_CONTEXT_TYPE { EGL_CONTEXT10 = 0, EGL_CONTEXT14 = 1, }; + enum VIDEO_BUFFER_TYPE { VIDEO_BUFFER_RAW_DATA = 1, VIDEO_BUFFER_ARRAY = 2, VIDEO_BUFFER_TEXTURE = 3, }; + VIDEO_BUFFER_TYPE type; VIDEO_PIXEL_FORMAT format; void* buffer; @@ -126,17 +139,19 @@ int cropBottom; int rotation; long long timestamp; - void *eglContext; + void* eglContext; EGL_CONTEXT_TYPE eglType; int textureId; + long long fenceObject; float matrix[16]; uint8_t* metadataBuffer; int metadataSize; uint8_t* alphaBuffer; bool fillAlphaBuffer; - int alphaStitchMode; + ALPHA_STITCH_MODE alphaStitchMode; void *d3d11Texture2d; int textureSliceIndex; + ColorSpace colorSpace; }; export class ExternalVideoFrame { type?: VideoBufferType; @@ -402,9 +417,9 @@ class ExternalVideoFrame { transform Texture 帧额外的转换。该参数仅适用于 Texture 格式的视频数据。 - - eglContext11 - EGLContext11。该参数仅适用于 Texture 格式的视频数据。 + + eglContext10 + EGLContext10。该参数仅适用于 Texture 格式的视频数据。 eglContext14 @@ -414,8 +429,8 @@ class ExternalVideoFrame { eglContext 该参数仅适用于 Texture 格式的视频数据。
    -
  • 当使用 Khronos 定义的 OpenGL 接口 (javax.microedition.khronos.egl.*)时,需要将 eglContext 设置给这个字段。
  • -
  • 当使用 Android 定义的 OpenGL 接口 (android.opengl.*)时,需要将 eglContext 设置给这个字段。
  • +
  • 当使用 Khronos 定义的 OpenGL 接口 (javax.microedition.khronos.egl.*) 时,需要将 eglContext 设置给这个字段。
  • +
  • 当使用 Android 定义的 OpenGL 接口 (android.opengl.*) 时,需要将 eglContext 设置给这个字段。
@@ -506,6 +521,10 @@ class ExternalVideoFrame { time 传入的视频帧的时间戳,以毫秒为单位。不正确的时间戳会导致丢帧或者音视频不同步。 + + + +
diff --git a/dita/RTC-NG/API/class_videoframe.dita b/dita/RTC-NG/API/class_videoframe.dita index 0e2de8f9661..d2aca9e6441 100644 --- a/dita/RTC-NG/API/class_videoframe.dita +++ b/dita/RTC-NG/API/class_videoframe.dita @@ -7,42 +7,61 @@

public class VideoFrame implements RefCounted { + public interface Buffer extends RefCounted { - @CalledByNative("Buffer") int getWidth(); - @CalledByNative("Buffer") int getHeight(); - @CalledByNative("Buffer") I420Buffer toI420(); - @Override @CalledByNative("Buffer") void release(); - @Override @CalledByNative("Buffer") void retain(); - @CalledByNative("Buffer") + + @CalledByNative("Buffer") int getWidth(); + + @CalledByNative("Buffer") int getHeight(); + + @CalledByNative("Buffer") I420Buffer toI420(); + + @Override @CalledByNative("Buffer") void release(); + + @Override @CalledByNative("Buffer") void retain(); + + @CalledByNative("Buffer") Buffer cropAndScale( int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight); - @CalledByNative("Buffer") @Nullable Buffer mirror(int frameRotation); - @CalledByNative("Buffer") @Nullable Buffer rotate(int frameRotation); - @CalledByNative("Buffer") + + @CalledByNative("Buffer") @Nullable Buffer mirror(int frameRotation); + + @CalledByNative("Buffer") @Nullable Buffer rotate(int frameRotation); + + @CalledByNative("Buffer") @Nullable Buffer transform(int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight, int frameRotation); } + public interface I420Buffer extends Buffer { - @CalledByNative("I420Buffer") ByteBuffer getDataY(); - @CalledByNative("I420Buffer") ByteBuffer getDataU(); - @CalledByNative("I420Buffer") ByteBuffer getDataV(); - @CalledByNative("I420Buffer") int getStrideY(); - @CalledByNative("I420Buffer") int getStrideU(); - @CalledByNative("I420Buffer") int getStrideV(); + + @CalledByNative("I420Buffer") ByteBuffer getDataY(); + + @CalledByNative("I420Buffer") ByteBuffer getDataU(); + + @CalledByNative("I420Buffer") ByteBuffer getDataV(); + @CalledByNative("I420Buffer") int getStrideY(); + @CalledByNative("I420Buffer") int getStrideU(); + @CalledByNative("I420Buffer") int getStrideV(); } + public interface I422Buffer extends Buffer { - @CalledByNative("I422Buffer") ByteBuffer getDataY(); - @CalledByNative("I422Buffer") ByteBuffer getDataU(); - @CalledByNative("I422Buffer") ByteBuffer getDataV(); - @CalledByNative("I422Buffer") int getStrideY(); - @CalledByNative("I422Buffer") int getStrideU(); - @CalledByNative("I422Buffer") int getStrideV(); - } - public interface RgbaBuffer extends Buffer { @CalledByNative("RgbaBuffer") ByteBuffer getData(); } + @CalledByNative("I422Buffer") ByteBuffer getDataY(); + @CalledByNative("I422Buffer") ByteBuffer getDataU(); + @CalledByNative("I422Buffer") ByteBuffer getDataV(); + @CalledByNative("I422Buffer") int getStrideY(); + @CalledByNative("I422Buffer") int getStrideU(); + @CalledByNative("I422Buffer") int getStrideV(); + } + public interface RgbaBuffer extends Buffer { @CalledByNative("RgbaBuffer") ByteBuffer getData(); } + public interface TextureBuffer extends Buffer { + enum Type { + OES(GLES11Ext.GL_TEXTURE_EXTERNAL_OES), + RGB(GLES20.GL_TEXTURE_2D); private final int glTarget; private Type(final int glTarget) { @@ -57,22 +76,26 @@ EGL_CONTEXT_14; } Type getType(); - @CalledByNative("TextureBuffer") int getTextureId(); + + @CalledByNative("TextureBuffer") int getTextureId(); + Matrix getTransformMatrix(); - @CalledByNative("TextureBuffer") EglBase.Context getEglBaseContext(); - @CalledByNative("TextureBuffer") Object getSourceTexturePool(); - @CalledByNative("TextureBuffer") long getNativeEglContext(); - @CalledByNative("TextureBuffer") int getEglContextType(); - @CalledByNative("TextureBuffer") float[] getTransformMatrixArray(); - @CalledByNative("TextureBuffer") int getSequence(); - @CalledByNative("TextureBuffer") boolean is10BitTexture(); + + @CalledByNative("TextureBuffer") EglBase.Context getEglBaseContext(); + @CalledByNative("TextureBuffer") Object getSourceTexturePool(); + @CalledByNative("TextureBuffer") long getNativeEglContext(); + @CalledByNative("TextureBuffer") int getEglContextType(); + @CalledByNative("TextureBuffer") float[] getTransformMatrixArray(); + + @CalledByNative("TextureBuffer") int getSequence(); + @CalledByNative("TextureBuffer") long getFenceObject(); + @CalledByNative("TextureBuffer") boolean is10BitTexture(); } public interface ColorSpace { enum Range { Invalid(0), Limited(1), - Full(2), - Derived(3); + Full(2); private final int range; private Range(int range) { this.range = range; @@ -161,32 +184,55 @@ kBackCamera, kUnspecified, } + public enum AlphaStitchMode { + ALPHA_NO_STITCH(0), + ALPHA_STITCH_UP(1), + ALPHA_STITCH_BELOW(2), + ALPHA_STITCH_LEFT(3), + ALPHA_STITCH_RIGHT(4); + private final int stitchMode; + private AlphaStitchMode(int stitchMode) { + this.stitchMode = stitchMode; + } + public int value() { + return stitchMode; + } + } + private Buffer buffer; + private int rotation; + private long timestampNs; private ColorSpace colorSpace; private SourceType sourceType; private float sampleAspectRatio; + + private AlphaStitchMode alphaStitchMode = AlphaStitchMode.ALPHA_NO_STITCH; private VideoFrameMetaInfo metaInfo = new VideoFrameMetaInfo(); + private @Nullable ByteBuffer alphaBuffer; - private int alphaStitchMode; + private long nativeAlphaBuffer; + public VideoFrame(Buffer buffer, int rotation, long timestampNs) { - this(buffer, rotation, timestampNs, new WrappedNativeColorSpace(), null, 0L, 1.0f, SourceType.kUnspecified.ordinal()); + this(buffer, rotation, timestampNs, new WrappedNativeColorSpace(), null, 0L, 1.0f, + SourceType.kUnspecified.ordinal()); } @CalledByNative public VideoFrame(Buffer buffer, int rotation, long timestampNs, ColorSpace colorSpace, - ByteBuffer alphaBuffer, float sampleAspectRatio, int sourceType) { + ByteBuffer alphaBuffer, long nativeAlphaBuffer, float sampleAspectRatio, int sourceType) { if (buffer == null) { - throw new IllegalArgumentException("buffer not allowed to be null"); + throw new IllegalArgumentException("buffer not allowed to be null"); } if (rotation % 90 != 0) { - throw new IllegalArgumentException("rotation must be a multiple of 90"); + throw new IllegalArgumentException("rotation must be a multiple of 90"); } this.buffer = buffer; this.rotation = rotation; this.timestampNs = timestampNs; this.colorSpace = colorSpace; this.alphaBuffer = alphaBuffer; + this.nativeAlphaBuffer = nativeAlphaBuffer; this.sampleAspectRatio = sampleAspectRatio; this.sourceType = SourceType.values()[sourceType]; } @@ -197,18 +243,25 @@ public float getSampleAspectRatio() { return sampleAspectRatio; } + @CalledByNative public Buffer getBuffer() { return buffer; } + @CalledByNative public int getRotation() { return rotation; } @CalledByNative public int getAlphaStitchMode() { - return alphaStitchMode; + return alphaStitchMode.value(); } + @CalledByNative + public void setAlphaStitchMode(int stitchMode) { + alphaStitchMode = AlphaStitchMode.values()[stitchMode]; + } + @CalledByNative public long getTimestampNs() { return timestampNs; @@ -217,41 +270,93 @@ public VideoFrameMetaInfo getMetaInfo() { return metaInfo; } + public int getRotatedWidth() { if (rotation % 180 == 0) { - return buffer.getWidth(); + return (alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_LEFT + || alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_RIGHT) + ? buffer.getWidth() / 2 + : buffer.getWidth(); } - return buffer.getHeight(); + return (alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_UP + || alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_BELOW) + ? buffer.getHeight() / 2 + : buffer.getHeight(); } + public int getRotatedHeight() { if (rotation % 180 == 0) { - return buffer.getHeight(); + return (alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_UP + || alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_BELOW) + ? buffer.getHeight() / 2 + : buffer.getHeight(); } - return buffer.getWidth(); + return (alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_LEFT + || alphaStitchMode == AlphaStitchMode.ALPHA_STITCH_RIGHT) + ? buffer.getWidth() / 2 + : buffer.getWidth(); } + public void replaceBuffer(Buffer buffer, int rotation, long timestampNs) { release(); this.buffer = buffer; this.rotation = rotation; this.timestampNs = timestampNs; } + @CalledByNative public ColorSpace getColorSpace() { return colorSpace; } + public void setColorSpace(ColorSpace colorSpace) { + this.colorSpace = colorSpace; + } + @CalledByNative + private int getColorSpaceRange() { + if (colorSpace == null) { + return ColorSpace.Range.Invalid.getRange(); + } + return colorSpace.getRange().getRange(); + } + @CalledByNative + private int getColorSpaceMatrix() { + if (colorSpace == null) { + return ColorSpace.Matrix.Unspecified.getMatrix(); + } + return colorSpace.getMatrix().getMatrix(); + } + @CalledByNative + private int getColorSpaceTransfer() { + if (colorSpace == null) { + return ColorSpace.Transfer.Unspecified.getTransfer(); + } + return colorSpace.getTransfer().getTransfer(); + } + @CalledByNative + private int getColorSpacePrimary() { + if (colorSpace == null) { + return ColorSpace.Primary.Unspecified.getPrimary(); + } + return colorSpace.getPrimary().getPrimary(); + } @CalledByNative public ByteBuffer getAlphaBuffer() { return alphaBuffer; } + public void retainAlphaBuffer() { + JniCommon.nativeAddRef(nativeAlphaBuffer); + } + public void releaseAlphaBuffer() { + JniCommon.nativeReleaseRef(nativeAlphaBuffer); + } public void fillAlphaData(ByteBuffer buffer) { alphaBuffer = buffer; } - public void setAlphaStitchMode(int mode) { - this.alphaStitchMode = mode; - } + @Override public void retain() { buffer.retain(); } + @Override @CalledByNative public void release() { @@ -278,7 +383,8 @@ this.format = format; } } - __attribute__((visibility("default"))) @interface AgoraOutputVideoFrame : NSObject + __attribute__((visibility("default"))) @interface AgoraOutputVideoFrame : NSObject + @property (nonatomic, assign) NSInteger type; @property (nonatomic, assign) int width; @property (nonatomic, assign) int height; @@ -291,12 +397,16 @@ @property (nonatomic, assign) int rotation; @property (nonatomic, assign) int64_t renderTimeMs; @property (nonatomic, assign) int avSyncType; + @property(assign, nonatomic) CVPixelBufferRef _Nullable pixelBuffer; @property (nonatomic, assign) uint8_t* _Nullable alphaBuffer; @property (nonatomic, assign) AgoraAlphaStitchMode alphaStitchMode; + @property(nonatomic, strong) NSDictionary *_Nonnull metaInfo; +@property(nonatomic, strong) AgoraColorSpace* _Nullable colorSpace; @end - struct VideoFrame { + +struct VideoFrame { VideoFrame(): type(VIDEO_PIXEL_DEFAULT), width(0), @@ -316,33 +426,57 @@ textureId(0), d3d11Texture2d(NULL), alphaBuffer(NULL), - alphaStitchMode(0), + alphaStitchMode(NO_ALPHA_STITCH), pixelBuffer(NULL), metaInfo(NULL){ memset(matrix, 0, sizeof(matrix)); } + VIDEO_PIXEL_FORMAT type; + int width; + int height; + int yStride; + int uStride; + int vStride; + uint8_t* yBuffer; + uint8_t* uBuffer; + uint8_t* vBuffer; + int rotation; + int64_t renderTimeMs; + int avsync_type; + uint8_t* metadata_buffer; + int metadata_size; + void* sharedContext; + int textureId; + void* d3d11Texture2d; + float matrix[16]; + uint8_t* alphaBuffer; - int alphaStitchMode; + + ALPHA_STITCH_MODE alphaStitchMode; + void* pixelBuffer; + IVideoFrameMetaInfo* metaInfo; + + ColorSpace colorSpace; }; USTRUCT(BlueprintType) struct FVideoFrame { @@ -608,6 +742,19 @@ class VideoFrame {

缓冲区给出的是指向指针的指针,该接口不能修改缓冲区的指针,只能修改缓冲区的内容。

+
+ 方法 + + + getColorSpace + 获取视频帧的色彩空间属性。 + + + setColorSpace + 设置视频帧的色彩空间属性。 + + +
<text conref="../conref/conref_api_metadata.dita#conref_api_metadata/property"/> @@ -727,9 +874,9 @@ class VideoFrame { pixelBuffer 将数据填充到 CVPixelBuffer。 - - colorSpace - 表示视频帧的色彩空间。详见 + + colorSpace + 视频帧的色彩空间属性,默认情况下会应用 Full Range 和 BT.709 标准配置。你可以根据自定义采集、自定义渲染的业务需求进行自定义设置,详见 sourceType diff --git a/dita/RTC-NG/API/enum_videoapplicationscenariotype.dita b/dita/RTC-NG/API/enum_videoapplicationscenariotype.dita index a2092fd8af5..6d3aa5285a9 100644 --- a/dita/RTC-NG/API/enum_videoapplicationscenariotype.dita +++ b/dita/RTC-NG/API/enum_videoapplicationscenariotype.dita @@ -33,6 +33,13 @@

+ + + +

3: 秀场直播

+

+ +

diff --git a/dita/RTC-NG/API/rtc_api_data_type.dita b/dita/RTC-NG/API/rtc_api_data_type.dita index d52c798d28e..6fe27c9c5d9 100644 --- a/dita/RTC-NG/API/rtc_api_data_type.dita +++ b/dita/RTC-NG/API/rtc_api_data_type.dita @@ -206,6 +206,7 @@
  • +
  • @@ -216,6 +217,7 @@
  • +
  • @@ -1276,6 +1278,7 @@