From 41e3d9c8e5556c9d75f7a823f51d71b8d610c5b9 Mon Sep 17 00:00:00 2001 From: sda-rob <149643938+sda-rob@users.noreply.github.com> Date: Fri, 25 Oct 2024 17:24:03 +0800 Subject: [PATCH] [AUTO] Generate codes by terra (#833) Co-authored-by: guoxianzhe --- src/AgoraBase.ts | 12 ++++++---- src/AgoraMediaBase.ts | 8 ++++--- src/IAgoraMediaEngine.ts | 17 ++------------ src/IAgoraMediaPlayerSource.ts | 4 ++-- src/IAgoraRtcEngine.ts | 39 ++++++++++++++------------------- src/IAgoraRtcEngineEx.ts | 9 ++++---- src/impl/IAgoraRtcEngineImpl.ts | 21 +++++++++++++----- 7 files changed, 53 insertions(+), 57 deletions(-) diff --git a/src/AgoraBase.ts b/src/AgoraBase.ts index 5f93297f..3ce16831 100644 --- a/src/AgoraBase.ts +++ b/src/AgoraBase.ts @@ -622,7 +622,7 @@ export enum QualityType { */ QualityUnsupported = 7, /** - * 8: Detecting the network quality. + * 8: The last-mile network probe test is in progress. */ QualityDetecting = 8, } @@ -3299,7 +3299,7 @@ export class MixedAudioStream { /** * @ignore */ - channelName?: string; + channelId?: string; /** * @ignore */ @@ -3317,7 +3317,7 @@ export class LocalAudioMixerConfiguration { /** * @ignore */ - sourceStreams?: MixedAudioStream[]; + audioInputStreams?: MixedAudioStream[]; /** * @ignore */ @@ -3973,7 +3973,7 @@ export class ColorEnhanceOptions { */ export enum BackgroundSourceType { /** - * 0: Process the background as alpha data without replacement, only separating the portrait and the background. After setting this value, you can call startLocalVideoTranscoder to implement the picture-in-picture effect. + * @ignore */ BackgroundNone = 0, /** @@ -4088,6 +4088,10 @@ export class AudioTrackConfig { * Whether to enable the local audio-playback device: true : (Default) Enable the local audio-playback device. false : Do not enable the local audio-playback device. */ enableLocalPlayback?: boolean; + /** + * @ignore + */ + enableAudioProcessing?: boolean; } /** diff --git a/src/AgoraMediaBase.ts b/src/AgoraMediaBase.ts index 107a8cf3..19f4ae41 100644 --- a/src/AgoraMediaBase.ts +++ b/src/AgoraMediaBase.ts @@ -1108,7 +1108,9 @@ export class VideoFrame { */ matrix?: number[]; /** - * The alpha channel data output by using portrait segmentation algorithm. This data matches the size of the video frame, with each pixel value ranging from [0,255], where 0 represents the background and 255 represents the foreground (portrait). By setting this parameter, you can render the video background into various effects, such as transparent, solid color, image, video, etc. In custom video rendering scenarios, ensure that both the video frame and alphaBuffer are of the Full Range type; other types may cause abnormal alpha data rendering. + * The alpha channel data output by using portrait segmentation algorithm. This data matches the size of the video frame, with each pixel value ranging from [0,255], where 0 represents the background and 255 represents the foreground (portrait). By setting this parameter, you can render the video background into various effects, such as transparent, solid color, image, video, etc. + * In custom video rendering scenarios, ensure that both the video frame and alphaBuffer are of the Full Range type; other types may cause abnormal alpha data rendering. + * Make sure that alphaBuffer is exactly the same size as the video frame (width × height), otherwise it may cause the app to crash. */ alphaBuffer?: Uint8Array; /** @@ -1120,7 +1122,7 @@ export class VideoFrame { */ pixelBuffer?: Uint8Array; /** - * The meta information in the video frame. To use this parameter, please contact. + * The meta information in the video frame. To use this parameter, contact. */ metaInfo?: IVideoFrameMetaInfo; /** @@ -1440,7 +1442,7 @@ export interface IAudioSpectrumObserver { * * After successfully calling registerAudioSpectrumObserver to implement the onRemoteAudioSpectrum callback in the IAudioSpectrumObserver and calling enableAudioSpectrumMonitor to enable audio spectrum monitoring, the SDK will trigger the callback as the time interval you set to report the received remote audio data spectrum. * - * @param spectrums The audio spectrum information of the remote user, see UserAudioSpectrumInfo. The number of arrays is the number of remote users monitored by the SDK. If the array is null, it means that no audio spectrum of remote users is detected. + * @param spectrums The audio spectrum information of the remote user. See UserAudioSpectrumInfo. The number of arrays is the number of remote users monitored by the SDK. If the array is null, it means that no audio spectrum of remote users is detected. * @param spectrumNumber The number of remote users. */ onRemoteAudioSpectrum?( diff --git a/src/IAgoraMediaEngine.ts b/src/IAgoraMediaEngine.ts index 540274f2..5de2de98 100644 --- a/src/IAgoraMediaEngine.ts +++ b/src/IAgoraMediaEngine.ts @@ -57,13 +57,7 @@ export abstract class IMediaEngine { /** * Registers a raw video frame observer object. * - * If you want to obtain the original video data of some remote users (referred to as group A) and the encoded video data of other remote users (referred to as group B), you can refer to the following steps: - * Call registerVideoFrameObserver to register the raw video frame observer before joining the channel. - * Call registerVideoEncodedFrameObserver to register the encoded video frame observer before joining the channel. - * After joining the channel, get the user IDs of group B users through onUserJoined, and then call setRemoteVideoSubscriptionOptions to set the encodedFrameOnly of this group of users to true. - * Call muteAllRemoteVideoStreams (false) to start receiving the video streams of all remote users. Then: - * The raw video data of group A users can be obtained through the callback in IVideoFrameObserver, and the SDK renders the data by default. - * The encoded video data of group B users can be obtained through the callback in IVideoEncodedFrameObserver. If you want to observe raw video frames (such as YUV or RGBA format), Agora recommends that you implement one IVideoFrameObserver class with this method. When calling this method to register a video observer, you can register callbacks in the IVideoFrameObserver class as needed. After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received. + * If you want to observe raw video frames (such as YUV or RGBA format), Agora recommends that you implement one IVideoFrameObserver class with this method. When calling this method to register a video observer, you can register callbacks in the IVideoFrameObserver class as needed. After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received. * * @param observer The observer instance. See IVideoFrameObserver. * @@ -76,14 +70,7 @@ export abstract class IMediaEngine { /** * Registers a receiver object for the encoded video image. * - * If you only want to observe encoded video frames (such as h.264 format) without decoding and rendering the video, Agora recommends that you implement one IVideoEncodedFrameObserver class through this method. If you want to obtain the original video data of some remote users (referred to as group A) and the encoded video data of other remote users (referred to as group B), you can refer to the following steps: - * Call registerVideoFrameObserver to register the raw video frame observer before joining the channel. - * Call registerVideoEncodedFrameObserver to register the encoded video frame observer before joining the channel. - * After joining the channel, get the user IDs of group B users through onUserJoined, and then call setRemoteVideoSubscriptionOptions to set the encodedFrameOnly of this group of users to true. - * Call muteAllRemoteVideoStreams (false) to start receiving the video streams of all remote users. Then: - * The raw video data of group A users can be obtained through the callback in IVideoFrameObserver, and the SDK renders the data by default. - * The encoded video data of group B users can be obtained through the callback in IVideoEncodedFrameObserver. - * Call this method before joining a channel. + * If you only want to observe encoded video frames (such as H.264 format) without decoding and rendering the video, Agora recommends that you implement one IVideoEncodedFrameObserver class through this method. Call this method before joining a channel. * * @param observer The video frame observer object. See IVideoEncodedFrameObserver. * diff --git a/src/IAgoraMediaPlayerSource.ts b/src/IAgoraMediaPlayerSource.ts index b6c43784..00530af6 100644 --- a/src/IAgoraMediaPlayerSource.ts +++ b/src/IAgoraMediaPlayerSource.ts @@ -66,8 +66,8 @@ export interface IMediaPlayerSourceObserver { * Reports the playback duration that the buffered data can support. * * When playing online media resources, the SDK triggers this callback every two seconds to report the playback duration that the currently buffered data can support. - * When the playback duration supported by the buffered data is less than the threshold (0 by default), the SDK returns PlayerEventBufferLow. - * When the playback duration supported by the buffered data is greater than the threshold (0 by default), the SDK returns PlayerEventBufferRecover. + * When the playback duration supported by the buffered data is less than the threshold (0 by default), the SDK returns PlayerEventBufferLow (6). + * When the playback duration supported by the buffered data is greater than the threshold (0 by default), the SDK returns PlayerEventBufferRecover (7). * * @param playCachedBuffer The playback duration (ms) that the buffered data can support. */ diff --git a/src/IAgoraRtcEngine.ts b/src/IAgoraRtcEngine.ts index 7aa21df3..6d7e9968 100644 --- a/src/IAgoraRtcEngine.ts +++ b/src/IAgoraRtcEngine.ts @@ -1161,7 +1161,7 @@ export class ChannelMediaOptions { */ publishCustomAudioTrack?: boolean; /** - * The ID of the custom audio source to publish. The default value is 0. If you have set sourceNumber in setExternalAudioSource to a value greater than 1, the SDK creates the corresponding number of custom audio tracks and assigns an ID to each audio track, starting from 0. + * The ID of the custom audio track to be published. The default value is 0. You can obtain the custom audio track ID through the createCustomAudioTrack method. */ publishCustomAudioTrackId?: number; /** @@ -1507,8 +1507,8 @@ export interface IRtcEngineEventHandler { * * @param connection The connection information. See RtcConnection. * @param remoteUid The user ID. The network quality of the user with this user ID is reported. If the uid is 0, the local network quality is reported. - * @param txQuality Uplink network quality rating of the user in terms of the transmission bit rate, packet loss rate, average RTT (Round-Trip Time) and jitter of the uplink network. This parameter is a quality rating helping you understand how well the current uplink network conditions can support the selected video encoder configuration. For example, a 1000 Kbps uplink network may be adequate for video frames with a resolution of 640 × 480 and a frame rate of 15 fps in the LIVE_BROADCASTING profile, but might be inadequate for resolutions higher than 1280 × 720. QualityUnknown (0): The quality is unknown. QualityExcellent (1): The quality is excellent. QualityGood (2): The network quality seems excellent, but the bitrate can be slightly lower than excellent. QualityPoor (3): Users can feel the communication is slightly impaired. QualityBad (4): Users cannot communicate smoothly. QualityVbad (5): The quality is so bad that users can barely communicate. QualityDown (6): The network is down, and users cannot communicate at all. - * @param rxQuality Downlink network quality rating of the user in terms of packet loss rate, average RTT, and jitter of the downlink network. QualityUnknown (0): The quality is unknown. QualityExcellent (1): The quality is excellent. QualityGood (2): The network quality seems excellent, but the bitrate can be slightly lower than excellent. QualityPoor (3): Users can feel the communication is slightly impaired. QualityBad (4): Users cannot communicate smoothly. QualityVbad (5): The quality is so bad that users can barely communicate. QualityDown (6): The network is down, and users cannot communicate at all. + * @param txQuality Uplink network quality rating of the user in terms of the transmission bit rate, packet loss rate, average RTT (Round-Trip Time) and jitter of the uplink network. This parameter is a quality rating helping you understand how well the current uplink network conditions can support the selected video encoder configuration. For example, a 1000 Kbps uplink network may be adequate for video frames with a resolution of 640 × 480 and a frame rate of 15 fps in the LIVE_BROADCASTING profile, but might be inadequate for resolutions higher than 1280 × 720. QualityUnknown (0): The quality is unknown. QualityExcellent (1): The quality is excellent. QualityGood (2): The network quality seems excellent, but the bitrate can be slightly lower than excellent. QualityPoor (3): Users can feel the communication is slightly impaired. QualityBad (4): Users cannot communicate smoothly. QualityVbad (5): The quality is so bad that users can barely communicate. QualityDown (6): The network is down, and users cannot communicate at all. QualityDetecting (8): The last-mile probe test is in progress. + * @param rxQuality Downlink network quality rating of the user in terms of packet loss rate, average RTT, and jitter of the downlink network. QualityUnknown (0): The quality is unknown. QualityExcellent (1): The quality is excellent. QualityGood (2): The network quality seems excellent, but the bitrate can be slightly lower than excellent. QualityPoor (3): Users can feel the communication is slightly impaired. QualityBad (4): Users cannot communicate smoothly. QualityVbad (5): The quality is so bad that users can barely communicate. QualityDown (6): The network is down, and users cannot communicate at all. QualityDetecting (8): The last-mile probe test is in progress. */ onNetworkQuality?( connection: RtcConnection, @@ -1541,7 +1541,7 @@ export interface IRtcEngineEventHandler { * * This callback reports the last-mile network conditions of the local user before the user joins the channel. Last mile refers to the connection between the local device and Agora's edge server. Before the user joins the channel, this callback is triggered by the SDK once startLastmileProbeTest is called and reports the last-mile network conditions of the local user. * - * @param quality The last-mile network quality. QualityUnknown (0): The quality is unknown. QualityExcellent (1): The quality is excellent. QualityGood (2): The network quality seems excellent, but the bitrate can be slightly lower than excellent. QualityPoor (3): Users can feel the communication is slightly impaired. QualityBad (4): Users cannot communicate smoothly. QualityVbad (5): The quality is so bad that users can barely communicate. QualityDown (6): The network is down, and users cannot communicate at all. See QualityType. + * @param quality The last-mile network quality. QualityUnknown (0): The quality is unknown. QualityExcellent (1): The quality is excellent. QualityGood (2): The network quality seems excellent, but the bitrate can be slightly lower than excellent. QualityPoor (3): Users can feel the communication is slightly impaired. QualityBad (4): Users cannot communicate smoothly. QualityVbad (5): The quality is so bad that users can barely communicate. QualityDown (6): The network is down, and users cannot communicate at all. QualityDetecting (8): The last-mile probe test is in progress. See QualityType. */ onLastmileQuality?(quality: QualityType): void; @@ -1972,7 +1972,7 @@ export interface IRtcEngineEventHandler { * @param connection The connection information. See RtcConnection. * @param remoteUid The ID of the remote user sending the message. * @param streamId The stream ID of the received message. - * @param code The error code. See ErrorCodeType. + * @param code Error code. See ErrorCodeType. * @param missed The number of lost messages. * @param cached Number of incoming cached messages when the data stream is interrupted. */ @@ -2965,10 +2965,10 @@ export abstract class IRtcEngine { /** * Gets the warning or error description. * - * @param code The error code or warning code reported by the SDK. + * @param code The error code reported by the SDK. * * @returns - * The specific error or warning description. + * The specific error description. */ abstract getErrorDescription(code: number): string; @@ -3737,14 +3737,10 @@ export abstract class IRtcEngine { /** * Options for subscribing to remote video streams. * - * When a remote user has enabled dual-stream mode, you can call this method to choose the option for subscribing to the video streams sent by the remote user. - * If you only register one IVideoFrameObserver object, the SDK subscribes to the raw video data and encoded video data by default (the effect is equivalent to setting encodedFrameOnly to false). - * If you only register one IVideoEncodedFrameObserver object, the SDK only subscribes to the encoded video data by default (the effect is equivalent to setting encodedFrameOnly to true). - * If you register one IVideoFrameObserver object and one IVideoEncodedFrameObserver object successively, the SDK subscribes to the encoded video data by default (the effect is equivalent to setting encodedFrameOnly to false). - * If you call this method first with the options parameter set, and then register one IVideoFrameObserver or IVideoEncodedFrameObserver object, you need to call this method again and set the options parameter as described in the above two items to get the desired results. Agora recommends the following steps: - * Set autoSubscribeVideo to false when calling joinChannel to join a channel. - * Call this method after receiving the onUserJoined callback to set the subscription options for the specified remote user's video stream. - * Call the muteRemoteVideoStream method to resume subscribing to the video stream of the specified remote user. If you set encodedFrameOnly to true in the previous step, the SDK triggers the onEncodedVideoFrameReceived callback locally to report the received encoded video frame information. + * When a remote user has enabled dual-stream mode, you can call this method to choose the option for subscribing to the video streams sent by the remote user. The default subscription behavior of the SDK for remote video streams depends on the type of registered video observer: + * If the IVideoFrameObserver observer is registered, the default is to subscribe to both raw data and encoded data. + * If the IVideoEncodedFrameObserver observer is registered, the default is to subscribe only to the encoded data. + * If both types of observers are registered, the default behavior follows the last registered video observer. For example, if the last registered observer is the IVideoFrameObserver observer, the default is to subscribe to both raw data and encoded data. If you want to modify the default behavior, or set different subscription options for different uids, you can call this method to set it. * * @param uid The user ID of the remote user. * @param options The video subscription options. See VideoSubscriptionOptions. @@ -4026,7 +4022,7 @@ export abstract class IRtcEngine { /** * Adjusts the volume during audio mixing. * - * This method adjusts the audio mixing volume on both the local client and remote clients. + * This method adjusts the audio mixing volume on both the local client and remote clients. This method does not affect the volume of the audio file set in the playEffect method. * * @param volume Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. * @@ -5854,7 +5850,7 @@ export abstract class IRtcEngine { /** * @ignore */ - abstract setExternalMediaProjection(): any; + abstract setExternalMediaProjection(mediaProjection: any): number; /** * Sets the screen sharing scenario. @@ -6176,9 +6172,8 @@ export abstract class IRtcEngine { * Sends data stream messages. * * After calling createDataStream, you can call this method to send data stream messages to all users in the channel. The SDK has the following restrictions on this method: - * Each user can have up to five data streams simultaneously. - * Up to 60 packets can be sent per second in a data stream with each packet having a maximum size of 1 KB. - * Up to 30 KB of data can be sent per second in a data stream. A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. A failed method call triggers the onStreamMessageError callback on the remote client. + * Each client within the channel can have up to 5 data channels simultaneously, with a total shared packet bitrate limit of 30 KB/s for all data channels. + * Each data channel can send up to 60 packets per second, with each packet being a maximum of 1 KB. A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. A failed method call triggers the onStreamMessageError callback on the remote client. * This method needs to be called after createDataStream and joining the channel. * In live streaming scenarios, this method only applies to hosts. * @@ -6664,7 +6659,7 @@ export abstract class IRtcEngine { * When video screenshot and upload function is enabled, the SDK takes screenshots and uploads videos sent by local users based on the type and frequency of the module you set in ContentInspectConfig. After video screenshot and upload, the Agora server sends the callback notification to your app server in HTTPS requests and sends all screenshots to the third-party cloud storage service. * * @param enabled Whether to enalbe video screenshot and upload: true : Enables video screenshot and upload. false : Disables video screenshot and upload. - * @param config Screenshot and upload configuration. See ContentInspectConfig. When the video moderation module is set to video moderation via Agora self-developed extension(ContentInspectSupervision), the video screenshot and upload dynamic library libagora_content_inspect_extension.dll is required. Deleting this library disables the screenshot and upload feature. + * @param config Screenshot and upload configuration. See ContentInspectConfig. * * @returns * 0: Success. @@ -6713,7 +6708,7 @@ export abstract class IRtcEngine { * Sets up cloud proxy service. * * When users' network access is restricted by a firewall, configure the firewall to allow specific IP addresses and ports provided by Agora; then, call this method to enable the cloud proxyType and set the cloud proxy type with the proxyType parameter. After successfully connecting to the cloud proxy, the SDK triggers the onConnectionStateChanged (ConnectionStateConnecting, ConnectionChangedSettingProxyServer) callback. To disable the cloud proxy that has been set, call the setCloudProxy (NoneProxy). To change the cloud proxy type that has been set, call the setCloudProxy (NoneProxy) first, and then call the setCloudProxy to set the proxyType you want. - * Agora recommends that you call this method after joining a channel. + * Agora recommends that you call this method before joining a channel. * When a user is behind a firewall and uses the Force UDP cloud proxy, the services for Media Push and cohosting across channels are not available. * When you use the Force TCP cloud proxy, note that an error would occur when calling the startAudioMixing method to play online music files in the HTTP protocol. The services for Media Push and cohosting across channels use the cloud proxy with the TCP protocol. * diff --git a/src/IAgoraRtcEngineEx.ts b/src/IAgoraRtcEngineEx.ts index 06310325..cf410a4e 100644 --- a/src/IAgoraRtcEngineEx.ts +++ b/src/IAgoraRtcEngineEx.ts @@ -544,9 +544,8 @@ export abstract class IRtcEngineEx extends IRtcEngine { * Sends data stream messages. * * A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. A failed method call triggers the onStreamMessageError callback on the remote client. The SDK has the following restrictions on this method: - * Each user can have up to five data streams simultaneously. - * Up to 60 packets can be sent per second in a data stream with each packet having a maximum size of 1 KB. - * Up to 30 KB of data can be sent per second in a data stream. After calling createDataStreamEx, you can call this method to send data stream messages to all users in the channel. + * Each client within the channel can have up to 5 data channels simultaneously, with a total shared packet bitrate limit of 30 KB/s for all data channels. + * Each data channel can send up to 60 packets per second, with each packet being a maximum of 1 KB. After calling createDataStreamEx, you can call this method to send data stream messages to all users in the channel. * Call this method after joinChannelEx. * Ensure that you call createDataStreamEx to create a data channel before calling this method. * This method applies only to the COMMUNICATION profile or to the hosts in the LIVE_BROADCASTING profile. If an audience in the LIVE_BROADCASTING profile calls this method, the audience may be switched to a host. @@ -891,7 +890,7 @@ export abstract class IRtcEngineEx extends IRtcEngine { * This method can take screenshots for multiple video streams and upload them. When video screenshot and upload function is enabled, the SDK takes screenshots and uploads videos sent by local users based on the type and frequency of the module you set in ContentInspectConfig. After video screenshot and upload, the Agora server sends the callback notification to your app server in HTTPS requests and sends all screenshots to the third-party cloud storage service. * * @param enabled Whether to enalbe video screenshot and upload: true : Enables video screenshot and upload. false : Disables video screenshot and upload. - * @param config Screenshot and upload configuration. See ContentInspectConfig. When the video moderation module is set to video moderation via Agora self-developed extension(ContentInspectSupervision), the video screenshot and upload dynamic library libagora_content_inspect_extension.dll is required. Deleting this library disables the screenshot and upload feature. + * @param config Screenshot and upload configuration. See ContentInspectConfig. * @param connection The connection information. See RtcConnection. * * @returns @@ -929,7 +928,7 @@ export abstract class IRtcEngineEx extends IRtcEngine { /** * Gets the call ID with the connection ID. * - * When a user joins a channel on a client, a callId is generated to identify the call from the client. You can call this method to get the callId parameter, and pass it in when calling methods such as rate and complain. + * When a user joins a channel on a client, a callId is generated to identify the call from the client. You can call this method to get callId, and pass it in when calling methods such as rate and complain. * * @param connection The connection information. See RtcConnection. * diff --git a/src/impl/IAgoraRtcEngineImpl.ts b/src/impl/IAgoraRtcEngineImpl.ts index d17ed658..df0f15a2 100644 --- a/src/impl/IAgoraRtcEngineImpl.ts +++ b/src/impl/IAgoraRtcEngineImpl.ts @@ -5479,15 +5479,24 @@ export class IRtcEngineImpl implements IRtcEngine { return 'RtcEngine_queryCameraFocalLengthCapability_2dee6af'; } - setExternalMediaProjection(): any { - const apiType = this.getApiTypeFromSetExternalMediaProjection(); - const jsonParams = {}; + setExternalMediaProjection(mediaProjection: any): number { + const apiType = + this.getApiTypeFromSetExternalMediaProjection(mediaProjection); + const jsonParams = { + mediaProjection: mediaProjection, + toJSON: () => { + return { + mediaProjection: mediaProjection, + }; + }, + }; const jsonResults = callIrisApi.call(this, apiType, jsonParams); - const mediaProjection = jsonResults.mediaProjection; - return mediaProjection; + return jsonResults.result; } - protected getApiTypeFromSetExternalMediaProjection(): string { + protected getApiTypeFromSetExternalMediaProjection( + mediaProjection: any + ): string { return 'RtcEngine_setExternalMediaProjection_f337cbf'; }