From 320844a5863cfd6da099dda98058d0774e75cff5 Mon Sep 17 00:00:00 2001 From: xucz Date: Thu, 25 May 2023 14:52:49 +0800 Subject: [PATCH] Dev/4.2.0 (#340) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [Android]fix bytedance beauty mirror bug * update Scene Beuaty * update Byte Beauty * update fu Beauty * Update README.zh.md * Update README.md * [Android]fix faceunity beauty bug(CSSA-1070) * [Android]remove inaccurate comment. * [Android]perfect sense time beauty implement. * [Android]adapte to 4.2.0. * Adaptive PCM acquisition * Add Record Moudle * add voice formant adjust * add video background effect * [Android]add MediaRecorder case * Adaptive multi-channel camera * modify module name * [Android]perfect MediaRecorder case * choose screen share frameRate * add videoProcess remote alpha mask * [Android]add LocalVideoTranscoding case. * [Android]add more voice configs for VoiceEffects * [Android]add virtual background video source type for VideoProcessExtension. * [Android]add ScreenScenarioType selector for ScreenSharing. * [Android]update rtc version to 4.2.0 in config files * [Android][Audio]adapt to 4.2.0 sdk. * replace video source * Fit the 4.2.0 Mac SDK * add local and remote record module * fix record stop bug * add virtual video background * Mac Add local composite graph moudle * add screen share fps choose * open remote video alpha mask * Fit the 4.2.0 audio-only SDK * set voice formant * modify mediaplayer url * update mac media player url * [Windows]adapt to 420 sdk and add media recoder case. * [Windows]fix bugs of media recorder case. * update scene resouces * [Windows]perfect local video transcoding case. * [Windows]add voice conversion config to beauty audio case. * [Windows]add virtual background type choices to beauty case. * [Windows]add screen capture scenario config. * add video resource * update video resource * [Android]update test video url and file reader. * [Android]perfect local video transcoding case * add smaplebufferredner init method * add multi video track moudle * fix scene beauty change frameRate bug * update role bug * fix rawVideoData snapshot bug * update save video path * update rtmp error bug * [Android]fix sceneTimeBeauty crash and default video resolution size problem. * fix voice changer bug * [Android]perfect third part beauty cases * fix pcm play bug * [Android]fix NMS-11565 * update script * update script * update exportplist file * update script * update scene bug * update exportPlist file * update exportplist file * update script * update script * [Android]perfect third part beauty cases * [Android]fix setting crash * update script * add logpath * update script * update script * update script * update script * update script * pip remote video muted replace image * update audio example bundle id * update script * update mac exportplist file * fix mac raw video data snapshot bug * update mac script * update mac script * update mac script * update mac script * update mac script * update mac script * update mac script * update mac script * update mac script * update mac script * update mac script * update mac script * [Windows]add PushExternalVideoYUV case. * [Windows]fix PushExternalVideoYUV bugs. * [Windows]add MultiVideoSourceTracks case. * update script * [Windows]perfect MultiVideoSourceTrack case and fix some bugs. * [Windows]fix bug - NMS-11562 * [Windows]fix yuv raw data bug. * [Android]add push video buffer type selector for PushExternalVideoYUV case. * [Android]update gradle config. * [Android]fix beauty bug in arm64. * [Android]fix render bug in arm64 * [Android]remove ndk filter limit. * [Windows]fix crash of switching between LiveBroadcast and AgoraScreenCapture; fix local render bug of resuming PushExternalVideoYUV. * [Windows]perfect screen sharing case(NMS-11314) and fix some bugs. * [Windows]fix sample res bug. * update mac readme * update screen share exclude window params * update README * update README. * [Android]update README. * fix pip Android publish stream rotation 90° * fix ci remove sources bug * update sign mode * update auto sign * update script * update script * [Android]update byte dance camera facing config. * fix pip display bug * update beauty params * Add pip version tips * [Android]perfect MultiVideoSourceTracks case and fix some bugs. * [Windows]fix statistic display bug and etc. * update recorder api * update recorder api * pip support BGRA encode * [Android]adjust MediaRecorder case and etc. * [Android]fix mirror bug of scene time and face unity. * update english content * [Android]fix ci bug. * [Windows]adjust media recorder api. * support pip i420 * update new sdk * update exportPlist file * [Android]fix token/app id input bug. * [Android]fix push timestamp bug(NMS-12801) * [Android]fix sensetime sticker oritation bug. * enable filters * fix custom capturer audio crash bug * fix mutil capturer video bug * fix custom video capturer publish streaming bug * [Android]fix video filter invalid bug. * [Android]remove publishCameraTrack=false * [Windows]fix beauty external bug. * Adapt to new versions * [Windows]use timestamp of getCurrentMonotonicTimeInMs api to pushVideoFrame. * update sdk version to 4.2.0 * [Windows]update sdk link. * update azure ci --------- Co-authored-by: zhaoyongqiang Co-authored-by: Qianze Zhang --- .github/ci/build/build_ios_ipa.sh | 40 +- .github/ci/build/build_mac.sh | 2 +- .github/ci/build/build_mac_ipa.sh | 27 +- Android/APIExample-Audio/app/build.gradle | 2 +- .../examples/advanced/VoiceEffects.java | 50 +- .../customaudio/CustomAudioRender.java | 8 +- .../customaudio/CustomAudioSource.java | 137 +--- .../api/example/utils/AudioFileReader.java | 116 +++ .../res/layout/fragment_voice_effects.xml | 27 + .../app/src/main/res/values/arrays.xml | 11 + Android/APIExample/README.md | 3 +- Android/APIExample/README.zh.md | 2 +- Android/APIExample/app/build.gradle | 7 +- .../api/example/common/gles/GLTestUtils.java | 125 +++ .../api/example/common/gles/GLThread.java | 11 + .../example/common/model/GlobalSettings.java | 12 +- .../common/widget/VideoReportLayout.java | 6 +- .../CDNStreaming/AudienceFragment.java | 10 +- .../advanced/CustomRemoteVideoRender.java | 14 +- .../advanced/LocalVideoTranscoding.java | 494 ++++++++++++ .../examples/advanced/MediaPlayer.java | 7 +- .../examples/advanced/MediaRecorder.java | 674 ++++++++++++++++ .../advanced/MultiVideoSourceTracks.java | 204 ++++- .../examples/advanced/ProcessRawData.java | 17 +- .../advanced/PushExternalVideoYUV.java | 157 +++- .../examples/advanced/ScreenSharing.java | 32 +- .../advanced/SwitchCameraScreenShare.java | 17 +- .../advanced/VideoProcessExtension.java | 8 +- .../examples/advanced/VoiceEffects.java | 46 +- .../advanced/beauty/ByteDanceBeauty.java | 228 ++++-- .../advanced/beauty/FaceUnityBeauty.java | 228 +++--- .../advanced/beauty/SceneTimeBeauty.java | 311 ++++---- .../advanced/beauty/VideoCaptureUtils.java | 29 + .../customaudio/CustomAudioRender.java | 3 +- .../customaudio/CustomAudioSource.java | 24 +- .../advanced/videoRender/YuvFboProgram.java | 91 +++ .../api/example/utils/VideoFileReader.java | 11 +- .../res/layout/fragment_beauty_bytedance.xml | 15 +- .../res/layout/fragment_beauty_faceunity.xml | 23 +- .../res/layout/fragment_beauty_scenetime.xml | 24 +- .../fragment_joinchannel_video_by_token.xml | 2 - .../layout/fragment_localvideotranscoding.xml | 51 ++ .../res/layout/fragment_media_recorder.xml | 152 ++++ .../fragment_multi_video_source_tracks.xml | 28 +- .../layout/fragment_push_externalvideo.xml | 27 +- .../res/layout/fragment_screen_sharing.xml | 28 +- .../res/layout/fragment_video_enhancement.xml | 6 + .../res/layout/fragment_voice_effects.xml | 27 + .../app/src/main/res/navigation/nav_graph.xml | 16 + .../app/src/main/res/values-zh/strings.xml | 9 + .../app/src/main/res/values/arrays.xml | 22 + .../app/src/main/res/values/strings.xml | 9 + .../agora/beauty/base/IBeautyByteDance.java | 4 +- .../agora/beauty/base/IBeautyFaceUnity.java | 4 +- .../agora/beauty/base/IBeautySenseTime.java | 2 +- .../beauty/bytedance/BeautyByteDanceImpl.java | 84 +- Android/APIExample/beauty/faceunity/README.md | 2 +- .../APIExample/beauty/faceunity/README.zh.md | 2 +- .../java/com/faceunity/nama/FURenderer.java | 11 +- .../java/com/faceunity/nama/IFURenderer.java | 4 +- .../beauty/faceunity/BeautyFaceUnityImpl.java | 27 +- .../APIExample/beauty/sense-time/README.md | 2 +- .../APIExample/beauty/sense-time/README.zh.md | 2 +- .../com/sensetime/effects/STRenderer.java | 111 ++- .../sensetime/effects/display/STGLRender.java | 9 +- .../beauty/sensetime/BeautySenseTimeImpl.java | 10 +- README.md | 12 +- README.zh.md | 12 +- cicd/scripts/ios_build.sh | 12 +- .../project.pbxproj | 4 +- .../Common/ExternalAudio/ExternalAudio.mm | 2 +- .../CustomPcmAudioSource.swift | 22 +- .../Base.lproj/VoiceChanger.storyboard | 87 ++- .../Advanced/VoiceChanger/VoiceChanger.swift | 3 + .../APIExample-Audio/Info.plist | 4 +- iOS/APIExample-Audio/ExportOptions.plist | 18 +- iOS/APIExample-Audio/Podfile | 2 +- iOS/APIExample-Audio/iOS_ExportOptions.plist | 27 + .../APIExample.xcodeproj/project.pbxproj | 54 +- .../Common/ExternalAudio/ExternalAudio.mm | 2 +- .../ExternalVideo/AgoraMetalRender.swift | 9 +- .../AgoraPictureInPictureController.h | 2 + .../AgoraPictureInPictureController.m | 10 +- .../ExternalVideo/AgoraSampleBufferRender.m | 64 +- .../APIExample/Common/PickerView.swift | 124 +++ .../Examples/Advanced/ARKit/ARKit.swift | 2 +- .../CreateDataStream/CreateDataStream.swift | 4 +- .../CustomPcmAudioSource.swift | 22 +- .../CustomVideoRender/CustomVideoRender.swift | 4 +- .../CustomVideoSourcePush.swift | 4 +- .../CustomVideoSourcePushMulti.swift | 2 +- .../Advanced/FusionCDN/FusionCDN.swift | 10 +- .../LiveStreaming/LiveStreaming.swift | 6 +- .../MediaChannelRelay/MediaChannelRelay.swift | 4 +- .../Base.lproj/MediaPlayer.storyboard | 28 +- .../Advanced/MutliCamera/MutliCamera.swift | 70 +- .../PictureInPicture/PictureInPicture.swift | 52 +- .../RTMPStreaming/RTMPStreaming.swift | 6 +- .../Advanced/RawVideoData/RawVideoData.swift | 14 +- .../Advanced/RhythmPlayer/RhythmPlayer.swift | 4 +- .../Base.lproj/ScreenShare.storyboard | 61 +- .../Advanced/ScreenShare/ScreenShare.swift | 56 +- .../Advanced/SimpleFilter/SimpleFilter.swift | 4 +- .../Base.lproj/BytedEffect.storyboard | 152 +++- .../ByteBeautify/BytedEffectVC.m | 25 +- .../ByteBeautify/Manager/ByteDanceFilter.h | 7 +- .../ByteBeautify/Manager/ByteDanceFilter.m | 48 +- .../ThirdBeautify/ByteBeautify/Manager/Core.h | 2 +- .../zh-Hans.lproj/BytedEffect.strings | 16 +- .../Base.lproj/FUBeautify.storyboard | 151 +++- .../ThirdBeautify/FUBeautify/FUBeautifyVC.m | 27 +- .../FUBeautify/Manager/FUManager.h | 6 +- .../FUBeautify/Manager/FUManager.m | 76 +- .../zh-Hans.lproj/FUBeautify.strings | 16 +- .../Base.lproj/SenseBeautify.storyboard | 142 +++- .../Manager/VideoProcessingManager.h | 5 + .../Manager/VideoProcessingManager.m | 86 ++- .../SenseBeautify/SenseBeautifyVC.m | 22 +- .../zh-Hans.lproj/SenseBeautify.strings | 15 +- .../Advanced/VideoChat/VideoChat.swift | 4 +- .../VideoMetadata/VideoMetadata.swift | 4 +- .../Base.lproj/VideoProcess.storyboard | 5 +- .../Advanced/VideoProcess/VideoProcess.swift | 16 +- .../zh-Hans.lproj/VideoProcess.strings | 2 + .../Base.lproj/VoiceChanger.storyboard | 77 +- .../Advanced/VoiceChanger/VoiceChanger.swift | 4 + .../JoinChannelVideoRecorder.storyboard | 102 +++ .../JoinChannelVideoRecorder.swift | 388 ++++++++++ .../JoinChannelVideoRecorder.strings | 21 + .../JoinChannelVideoToken.swift | 4 +- .../JoinChannelVideo/JoinChannelVideo.swift | 4 +- iOS/APIExample/APIExample/Info.plist | 4 +- .../APIExample/Resources/sample.mov | Bin 0 -> 219059 bytes .../APIExample/ViewController.swift | 3 +- .../zh-Hans.lproj/Localizable.strings | 4 + iOS/APIExample/ExportOptions.plist | 20 +- iOS/APIExample/Podfile | 12 +- iOS/APIExample/iOS_ExportOptions.plist | 29 + iOS/APIExample/sense.podspec | 1 + macOS/APIExample.xcodeproj/project.pbxproj | 234 +++++- macOS/APIExample/Common/AgoraExtension.swift | 3 +- macOS/APIExample/Common/Configs.swift | 3 +- .../Common/ExternalAudio/ExternalAudio.mm | 3 +- .../ExternalVideo/AgoraMetalRender.swift | 8 +- macOS/APIExample/Common/Utils/MediaUtils.h | 2 + macOS/APIExample/Common/Utils/MediaUtils.m | 16 + macOS/APIExample/Common/Utils/Util.swift | 11 + .../Base.lproj/CustomAudioSource.storyboard | 25 +- .../CustomAudioSource/CustomAudioSource.swift | 22 +- .../zh-Hans.lproj/CustomAudioSource.strings | 2 + .../CustomVideoSourcePushMulti.storyboard | 146 ++++ .../CustomVideoSourcePushMulti.swift | 428 +++++++++++ .../CustomVideoSourcePushMulti.strings | 24 + .../Base.lproj/LiveStreaming.storyboard | 28 +- .../LiveStreaming/LiveStreaming.swift | 9 +- .../en.lproj/LiveStreaming.storyboard | 304 ++++++++ .../en.lproj/LiveStreaming.strings | 38 + .../zh-Hans.lproj/LiveStreaming.strings | 43 +- .../Base.lproj/LocalCompositeGraph.storyboard | 209 +++++ .../LocalCompositeGraph.swift | 627 +++++++++++++++ .../zh-Hans.lproj/LocalCompositeGraph.strings | 45 ++ .../{ => Base.lproj}/MediaPlayer.storyboard | 18 +- .../Advanced/MediaPlayer/MediaPlayer.swift | 2 +- .../en.lproj/MediaPlayer.storyboard | 182 +++++ .../zh-Hans.lproj/MediaPlayer.strings | 24 + .../QuickSwitchChannel.storyboard | 6 +- .../en.lproj/QuickSwitchChannel.storyboard | 124 +++ .../zh-Hans.lproj/QuickSwitchChannel.strings | 12 + .../RTMPStreaming/RTMPStreaming.swift | 2 +- .../Advanced/RawVideoData/RawVideoData.swift | 15 +- .../Base.lproj/ScreenShare.storyboard | 10 +- .../Advanced/ScreenShare/ScreenShare.swift | 77 +- .../zh-Hans.lproj/ScreenShare.strings | 3 + .../Advanced/SimpleFilter/SimpleFilter.swift | 4 +- .../{ => Base.lproj}/SpatialAudio.storyboard | 32 +- .../en.lproj/SpatialAudio.storyboard | 449 +++++++++++ .../zh-Hans.lproj/SpatialAudio.strings | 60 ++ .../Base.lproj/VideoProcess.storyboard | 4 +- .../Advanced/VideoProcess/VideoProcess.swift | 13 +- .../Advanced/VoiceChanger/VoiceChanger.swift | 4 +- .../JoinChannelVideoRecorder.storyboard | 137 ++++ .../JoinChannelVideoRecorder.swift | 608 +++++++++++++++ .../JoinChannelVideoRecorder.strings | 24 + .../JoinChannelVideoToken.swift | 4 +- .../JoinChannelVideo/JoinChannelVideo.swift | 4 +- macOS/APIExample/Resources/sample.mov | Bin 0 -> 219059 bytes macOS/APIExample/ViewController.swift | 3 + .../{ => zh-Hans.lproj}/Localizable.strings | 11 + macOS/ExportOptions.plist | 2 +- macOS/Podfile | 8 +- macOS/README.md | 4 + windows/APIExample/APIExample/AGVideoWnd.cpp | 4 +- windows/APIExample/APIExample/APIExample.rc | 246 ++++-- .../APIExample/APIExample/APIExample.vcxproj | 22 +- .../APIExample/APIExample.vcxproj.filters | 36 + .../APIExample/APIExample/APIExampleDlg.cpp | 87 ++- windows/APIExample/APIExample/APIExampleDlg.h | 6 + .../APIExample/Advanced/Beauty/CDlgBeauty.cpp | 60 +- .../APIExample/Advanced/Beauty/CDlgBeauty.h | 8 + .../BeautyAudio/CAgoraBeautyAudio.cpp | 72 ++ .../Advanced/BeautyAudio/CAgoraBeautyAudio.h | 4 + .../CAgoraCaptureVideoDlg.cpp | 36 +- .../CAgoraCaptureVideoDlg.h | 21 +- .../CLocalVideoTranscodingDlg.cpp | 121 ++- .../CLocalVideoTranscodingDlg.h | 10 +- .../MediaRecorder/CAgoraMediaRecorder.cpp | 404 ++++++++++ .../MediaRecorder/CAgoraMediaRecorder.h | 186 +++++ .../CAgoraMutilVideoSourceDlg.cpp | 12 +- .../MultiVideoSourceTracks.cpp | 718 ++++++++++++++++++ .../MultiVideoSourceTracks.h | 212 ++++++ .../OriginalAudio/CAgoraOriginalAudioDlg.cpp | 6 +- .../OriginalAudio/CAgoraOriginalAudioDlg.h | 6 +- .../OriginalVideo/CAgoraOriginalVideoDlg.cpp | 69 +- .../OriginalVideo/CAgoraOriginalVideoDlg.h | 43 +- .../PushExternalVideoYUV.cpp | 473 ++++++++++++ .../PushExternalVideoYUV.h | 145 ++++ .../RTMPStream/AgoraRtmpStreaming.cpp | 42 +- .../Advanced/RTMPStream/AgoraRtmpStreaming.h | 0 .../ScreenShare/AgoraScreenCapture.cpp | 424 ++++++++--- .../Advanced/ScreenShare/AgoraScreenCapture.h | 148 ++-- .../DirectShow/AGDShowAudioCapture.cpp | 2 +- windows/APIExample/APIExample/Language.h | 11 + windows/APIExample/APIExample/YUVReader.cpp | 59 ++ windows/APIExample/APIExample/YUVReader.h | 40 + windows/APIExample/APIExample/en.ini | 22 +- windows/APIExample/APIExample/resource.h | 34 +- windows/APIExample/APIExample/sample.yuv | 1 + windows/APIExample/APIExample/stdafx.cpp | 24 + windows/APIExample/APIExample/stdafx.h | 3 + windows/APIExample/APIExample/zh-cn.ini | 13 +- windows/APIExample/install.ps1 | 2 +- 231 files changed, 12296 insertions(+), 1839 deletions(-) create mode 100644 Android/APIExample-Audio/app/src/main/java/io/agora/api/example/utils/AudioFileReader.java create mode 100644 Android/APIExample/app/src/main/java/io/agora/api/example/common/gles/GLTestUtils.java create mode 100644 Android/APIExample/app/src/main/java/io/agora/api/example/common/gles/GLThread.java create mode 100644 Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/LocalVideoTranscoding.java create mode 100644 Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaRecorder.java create mode 100644 Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/VideoCaptureUtils.java create mode 100644 Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/videoRender/YuvFboProgram.java create mode 100644 Android/APIExample/app/src/main/res/layout/fragment_localvideotranscoding.xml create mode 100644 Android/APIExample/app/src/main/res/layout/fragment_media_recorder.xml create mode 100644 iOS/APIExample-Audio/iOS_ExportOptions.plist create mode 100644 iOS/APIExample/APIExample/Common/PickerView.swift create mode 100644 iOS/APIExample/APIExample/Examples/Basic/JoinChannelVideo(Recorder)/Base.lproj/JoinChannelVideoRecorder.storyboard create mode 100644 iOS/APIExample/APIExample/Examples/Basic/JoinChannelVideo(Recorder)/JoinChannelVideoRecorder.swift create mode 100644 iOS/APIExample/APIExample/Examples/Basic/JoinChannelVideo(Recorder)/zh-Hans.lproj/JoinChannelVideoRecorder.strings create mode 100644 iOS/APIExample/APIExample/Resources/sample.mov create mode 100644 iOS/APIExample/iOS_ExportOptions.plist create mode 100644 macOS/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/Base.lproj/CustomVideoSourcePushMulti.storyboard create mode 100644 macOS/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/CustomVideoSourcePushMulti.swift create mode 100644 macOS/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/zh-Hans.lproj/CustomVideoSourcePushMulti.strings create mode 100644 macOS/APIExample/Examples/Advanced/LiveStreaming/en.lproj/LiveStreaming.storyboard create mode 100644 macOS/APIExample/Examples/Advanced/LiveStreaming/en.lproj/LiveStreaming.strings create mode 100644 macOS/APIExample/Examples/Advanced/LocalCompositeGraph/Base.lproj/LocalCompositeGraph.storyboard create mode 100644 macOS/APIExample/Examples/Advanced/LocalCompositeGraph/LocalCompositeGraph.swift create mode 100644 macOS/APIExample/Examples/Advanced/LocalCompositeGraph/zh-Hans.lproj/LocalCompositeGraph.strings rename macOS/APIExample/Examples/Advanced/MediaPlayer/{ => Base.lproj}/MediaPlayer.storyboard (94%) create mode 100644 macOS/APIExample/Examples/Advanced/MediaPlayer/en.lproj/MediaPlayer.storyboard create mode 100644 macOS/APIExample/Examples/Advanced/MediaPlayer/zh-Hans.lproj/MediaPlayer.strings rename macOS/APIExample/Examples/Advanced/QuickSwitchChannel/{ => Base.lproj}/QuickSwitchChannel.storyboard (97%) create mode 100644 macOS/APIExample/Examples/Advanced/QuickSwitchChannel/en.lproj/QuickSwitchChannel.storyboard create mode 100644 macOS/APIExample/Examples/Advanced/QuickSwitchChannel/zh-Hans.lproj/QuickSwitchChannel.strings rename macOS/APIExample/Examples/Advanced/SpatialAudio/{ => Base.lproj}/SpatialAudio.storyboard (98%) create mode 100644 macOS/APIExample/Examples/Advanced/SpatialAudio/en.lproj/SpatialAudio.storyboard create mode 100644 macOS/APIExample/Examples/Advanced/SpatialAudio/zh-Hans.lproj/SpatialAudio.strings create mode 100644 macOS/APIExample/Examples/Basic/JoinChannelVideo(Recorder)/Base.lproj/JoinChannelVideoRecorder.storyboard create mode 100644 macOS/APIExample/Examples/Basic/JoinChannelVideo(Recorder)/JoinChannelVideoRecorder.swift create mode 100644 macOS/APIExample/Examples/Basic/JoinChannelVideo(Recorder)/zh-Hans.lproj/JoinChannelVideoRecorder.strings create mode 100644 macOS/APIExample/Resources/sample.mov rename macOS/APIExample/{ => zh-Hans.lproj}/Localizable.strings (94%) mode change 100644 => 100755 windows/APIExample/APIExample/Advanced/Beauty/CDlgBeauty.cpp create mode 100755 windows/APIExample/APIExample/Advanced/MediaRecorder/CAgoraMediaRecorder.cpp create mode 100755 windows/APIExample/APIExample/Advanced/MediaRecorder/CAgoraMediaRecorder.h create mode 100755 windows/APIExample/APIExample/Advanced/MultiVideoSourceTracks/MultiVideoSourceTracks.cpp create mode 100755 windows/APIExample/APIExample/Advanced/MultiVideoSourceTracks/MultiVideoSourceTracks.h create mode 100755 windows/APIExample/APIExample/Advanced/PushExternalVideoYUV/PushExternalVideoYUV.cpp create mode 100755 windows/APIExample/APIExample/Advanced/PushExternalVideoYUV/PushExternalVideoYUV.h mode change 100644 => 100755 windows/APIExample/APIExample/Advanced/RTMPStream/AgoraRtmpStreaming.h mode change 100644 => 100755 windows/APIExample/APIExample/DirectShow/AGDShowAudioCapture.cpp create mode 100755 windows/APIExample/APIExample/YUVReader.cpp create mode 100755 windows/APIExample/APIExample/YUVReader.h create mode 100644 windows/APIExample/APIExample/sample.yuv diff --git a/.github/ci/build/build_ios_ipa.sh b/.github/ci/build/build_ios_ipa.sh index 01501f2f0..722f2b866 100755 --- a/.github/ci/build/build_ios_ipa.sh +++ b/.github/ci/build/build_ios_ipa.sh @@ -17,10 +17,8 @@ TARGET_NAME=${PROJECT_PATH##*/} KEYCENTER_PATH=${PROJECT_PATH}"/"${TARGET_NAME}"/Common/KeyCenter.swift" -METHOD_PATH=${PROJECT_PATH}"/ExportOptions.plist" - # 打包环境 -CONFIGURATION=$method +CONFIGURATION="Debug" #工程文件路径 APP_PATH="${PROJECT_PATH}/${TARGET_NAME}.xcworkspace" @@ -65,8 +63,6 @@ echo PBXPROJ_PATH: $PBXPROJ_PATH # Release /usr/libexec/PlistBuddy -c "Set :objects:03D13BF82448758C00B599B3:buildSettings:CURRENT_PROJECT_VERSION ${BUILD_NUMBER}" $PBXPROJ_PATH -#修改打包方式 -/usr/libexec/PlistBuddy -c "Set :method $CONFIGURATION" $METHOD_PATH # 读取APPID环境变量 echo AGORA_APP_ID:$APP_ID @@ -87,33 +83,39 @@ xcodebuild clean -workspace "${APP_PATH}" -configuration "${CONFIGURATION}" -sch CURRENT_TIME=$(date "+%Y-%m-%d %H-%M-%S") # 归档路径 -ARCHIVE_PATH="${PROJECT_PATH}/${TARGET_NAME} ${CURRENT_TIME}/${TARGET_NAME}.xcarchive" +ARCHIVE_PATH="${WORKSPACE}/${TARGET_NAME}_${BUILD_NUMBER}.xcarchive" # 编译环境 -# 导出路径 -EXPORT_PATH="${PROJECT_PATH}/${TARGET_NAME} ${CURRENT_TIME}" - # plist路径 PLIST_PATH="${PROJECT_PATH}/ExportOptions.plist" echo PLIST_PATH: $PLIST_PATH # archive 这边使用的工作区间 也可以使用project -xcodebuild archive -workspace "${APP_PATH}" -scheme "${TARGET_NAME}" -configuration "${CONFIGURATION}" -archivePath "${ARCHIVE_PATH}" -destination 'generic/platform=iOS' +xcodebuild CODE_SIGN_STYLE="Manual" archive -workspace "${APP_PATH}" -scheme "${TARGET_NAME}" clean CODE_SIGNING_REQUIRED=NO CODE_SIGNING_ALLOWED=NO -configuration "${CONFIGURATION}" -archivePath "${ARCHIVE_PATH}" -destination 'generic/platform=iOS' -quiet || exit + +cd ${WORKSPACE} -# 导出ipa -xcodebuild -exportArchive -archivePath "${ARCHIVE_PATH}" -exportPath "${EXPORT_PATH}" -exportOptionsPlist "${PLIST_PATH}" +# 压缩archive +7za a -tzip "${TARGET_NAME}_${BUILD_NUMBER}.xcarchive.zip" "${ARCHIVE_PATH}" + +# 签名 +# sh sign "${TARGET_NAME}_${BUILD_NUMBER}.xcarchive.zip" --type xcarchive --plist "${PLIST_PATH}" +sh export "${TARGET_NAME}_${BUILD_NUMBER}.xcarchive.zip" --plist "${PLIST_PATH}" # 上传IPA -7za a "$WORKSPACE/${TARGET_NAME}_${BUILD_NUMBER}_IPA.zip" -r "${EXPORT_PATH}/${TARGET_NAME}.ipa" +PAYLOAD_PATH="${TARGET_NAME}_${BUILD_NUMBER}_Payload" +mkdir "${PAYLOAD_PATH}" +# mv "${TARGET_NAME}_${BUILD_NUMBER}_iOS.ipa" "${PAYLOAD_PATH}" +mv "${TARGET_NAME}_${BUILD_NUMBER}.ipa" "${PAYLOAD_PATH}" -# 删除IPA文件夹 -rm -rf "${EXPORT_PATH}" +7za a "${TARGET_NAME}_${BUILD_NUMBER}_IPA.zip" -r "${PAYLOAD_PATH}" +python3 artifactory_utils.py --action=upload_file --file="${TARGET_NAME}_${BUILD_NUMBER}_IPA.zip" --project -# rm -rf "${EXPORT_PATH}/${TARGET_NAME}.xcarchive" -# rm -rf "${EXPORT_PATH}/Packaging.log" -# rm -rf "${EXPORT_PATH}/ExportOptions.plist" -# rm -rf "${EXPORT_PATH}/DistributionSummary.plist" +# 删除IPA文件夹 +rm -rf ${TARGET_NAME}_${BUILD_NUMBER}.xcarchive +rm -rf *.zip +rm -rf ${PAYLOAD_PATH} #复原Keycenter文件 python3 /tmp/jenkins/api-examples/.github/ci/build/modify_ios_keycenter.py $KEYCENTER_PATH 1 diff --git a/.github/ci/build/build_mac.sh b/.github/ci/build/build_mac.sh index 315de4f6b..1ba0323f4 100644 --- a/.github/ci/build/build_mac.sh +++ b/.github/ci/build/build_mac.sh @@ -71,7 +71,7 @@ else echo "failed" exit 1 fi -cp -rf ./macOS/** ./$unzip_name/samples/APIExample +cp -a ./macOS/** ./$unzip_name/samples/APIExample mv ./$unzip_name/samples/APIExample/sdk.podspec ./$unzip_name/ python3 ./.github/ci/build/modify_podfile.py ./$unzip_name/samples/APIExample/Podfile diff --git a/.github/ci/build/build_mac_ipa.sh b/.github/ci/build/build_mac_ipa.sh index 3eb0c737e..21e5b2125 100755 --- a/.github/ci/build/build_mac_ipa.sh +++ b/.github/ci/build/build_mac_ipa.sh @@ -17,7 +17,7 @@ TARGET_NAME=${PROJECT_PATH##*/} KEYCENTER_PATH=${PROJECT_PATH}"/"${TARGET_NAME}"/Common/KeyCenter.swift" # 打包环境 -CONFIGURATION=developer-id +CONFIGURATION=Release #工程文件路径 APP_PATH="${PROJECT_PATH}/${TARGET_NAME}.xcworkspace" @@ -69,12 +69,9 @@ xcodebuild clean -workspace "${APP_PATH}" -configuration "${CONFIGURATION}" -sch CURRENT_TIME=$(date "+%Y-%m-%d %H-%M-%S") # 归档路径 -ARCHIVE_PATH="${PROJECT_PATH}/${TARGET_NAME} ${CURRENT_TIME}/${TARGET_NAME}.xcarchive" +ARCHIVE_PATH="${WORKSPACE}/${TARGET_NAME}_${BUILD_NUMBER}.xcarchive" # 编译环境 -# 导出路径 -EXPORT_PATH="${PROJECT_PATH}/${TARGET_NAME} ${CURRENT_TIME}" - # plist路径 PLIST_PATH="${PROJECT_PATH}/ExportOptions.plist" @@ -83,20 +80,20 @@ echo PLIST_PATH: $PLIST_PATH # archive 这边使用的工作区间 也可以使用project xcodebuild archive -workspace "${APP_PATH}" -scheme "${TARGET_NAME}" -configuration "${CONFIGURATION}" -archivePath "${ARCHIVE_PATH}" -# 导出ipa -xcodebuild -exportArchive -archivePath "${ARCHIVE_PATH}" -exportPath "${EXPORT_PATH}" -exportOptionsPlist "${PLIST_PATH}" +cd ${WORKSPACE} -# 删除archive文件 -rm -rf "${EXPORT_PATH}/${TARGET_NAME}.xcarchive" -rm -rf "${EXPORT_PATH}/Packaging.log" -rm -rf "${EXPORT_PATH}/ExportOptions.plist" -rm -rf "${EXPORT_PATH}/DistributionSummary.plist" +# 压缩archive +7za a -slp "${TARGET_NAME}_${BUILD_NUMBER}.xcarchive.zip" "${ARCHIVE_PATH}" + +# 签名 +sh sign "${WORKSPACE}/${TARGET_NAME}_${BUILD_NUMBER}.xcarchive.zip" --type xcarchive --plist "${PLIST_PATH}" --application macApp # 上传IPA -7za a "$WORKSPACE/${TARGET_NAME}_Mac_${BUILD_NUMBER}_APP.zip" -r "${EXPORT_PATH}" +python3 artifactory_utils.py --action=upload_file --file="${TARGET_NAME}_${BUILD_NUMBER}.app.zip" --project -# 删除IPA文件夹 -rm -rf "${EXPORT_PATH}" +# 删除archive文件 +rm -rf ${TARGET_NAME}_${BUILD_NUMBER}.xcarchive +rm -rf *.zip #复原Keycenter文件 python3 /tmp/jenkins/api-examples/.github/ci/build/modify_ios_keycenter.py $KEYCENTER_PATH 1 diff --git a/Android/APIExample-Audio/app/build.gradle b/Android/APIExample-Audio/app/build.gradle index 334dd33a0..6f958c176 100644 --- a/Android/APIExample-Audio/app/build.gradle +++ b/Android/APIExample-Audio/app/build.gradle @@ -48,7 +48,7 @@ dependencies { implementation fileTree(dir: "${localSdkPath}", include: ['*.jar', '*.aar']) } else{ - def agora_sdk_version = "4.1.1" + def agora_sdk_version = "4.2.0" // case 1: full single lib with voice only implementation "io.agora.rtc:voice-sdk:${agora_sdk_version}" // case 2: partial libs with voice only diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java index d212f82b0..41e6b2c7f 100644 --- a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java @@ -27,6 +27,10 @@ import static io.agora.rtc2.Constants.ULTRA_HIGH_QUALITY_VOICE; import static io.agora.rtc2.Constants.VOICE_BEAUTIFIER_OFF; import static io.agora.rtc2.Constants.VOICE_CHANGER_BASS; +import static io.agora.rtc2.Constants.VOICE_CHANGER_CARTOON; +import static io.agora.rtc2.Constants.VOICE_CHANGER_CHILDLIKE; +import static io.agora.rtc2.Constants.VOICE_CHANGER_CHIPMUNK; +import static io.agora.rtc2.Constants.VOICE_CHANGER_DARTH_VADER; import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_BOY; import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_GIRL; import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_HULK; @@ -34,9 +38,16 @@ import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_PIGKING; import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_SISTER; import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_UNCLE; +import static io.agora.rtc2.Constants.VOICE_CHANGER_GIRLISH_MAN; +import static io.agora.rtc2.Constants.VOICE_CHANGER_GROOT; +import static io.agora.rtc2.Constants.VOICE_CHANGER_IRON_LADY; +import static io.agora.rtc2.Constants.VOICE_CHANGER_MONSTER; import static io.agora.rtc2.Constants.VOICE_CHANGER_NEUTRAL; +import static io.agora.rtc2.Constants.VOICE_CHANGER_PHONE_OPERATOR; +import static io.agora.rtc2.Constants.VOICE_CHANGER_SHIN_CHAN; import static io.agora.rtc2.Constants.VOICE_CHANGER_SOLID; import static io.agora.rtc2.Constants.VOICE_CHANGER_SWEET; +import static io.agora.rtc2.Constants.VOICE_CHANGER_TRANSFORMERS; import static io.agora.rtc2.Constants.VOICE_CONVERSION_OFF; import android.content.Context; @@ -97,7 +108,7 @@ public class VoiceEffects extends BaseFragment implements View.OnClickListener, chatBeautifier, timbreTransformation, voiceChanger, styleTransformation, roomAcoustics, pitchCorrection, _pitchModeOption, _pitchValueOption, voiceConversion, customBandFreq, customReverbKey; private ViewGroup _voice3DLayout, _pitchModeLayout, _pitchValueLayout; - private SeekBar _voice3DCircle, customPitch, customBandGain, customReverbValue; + private SeekBar _voice3DCircle, customPitch, customBandGain, customReverbValue, customVoiceFormant; private AudioSeatManager audioSeatManager; @@ -159,10 +170,12 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat customBandGain = view.findViewById(R.id.audio_custom_band_gain); // engine.setLocalVoiceEqualization() customReverbKey = view.findViewById(R.id.audio_custom_reverb_key); customReverbValue = view.findViewById(R.id.audio_custom_reverb_value); //engine.setLocalVoiceReverb() + customVoiceFormant = view.findViewById(R.id.audio_voice_formant_value); //engine.setLocalVoiceFormant() customPitch.setOnSeekBarChangeListener(this); customBandGain.setOnSeekBarChangeListener(this); customReverbValue.setOnSeekBarChangeListener(this); + customVoiceFormant.setOnSeekBarChangeListener(this); customBandFreq.setOnItemSelectedListener(this); customReverbKey.setOnItemSelectedListener(this); @@ -194,6 +207,7 @@ private void resetControlLayoutByJoined() { customBandGain.setEnabled(joined); customReverbKey.setEnabled(joined); customReverbValue.setEnabled(joined); + customVoiceFormant.setEnabled(joined); chatBeautifier.setSelection(0); @@ -206,6 +220,7 @@ private void resetControlLayoutByJoined() { customPitch.setProgress(0); customBandGain.setProgress(0); customReverbValue.setProgress(0); + customVoiceFormant.setProgress(50); } @Override @@ -407,8 +422,8 @@ private void joinChannel(String channelId) { private final IRtcEngineEventHandler iRtcEngineEventHandler = new IRtcEngineEventHandler() { /** * Error code description can be found at: - * en: https://api-ref.agora.io/en/voice-sdk/android/4.x/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror - * cn: https://docs.agora.io/cn/voice-call-4.x/API%20Reference/java_ng/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror + * en: https://api-ref.agora.io/en/video-sdk/android/4.x/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror + * cn: https://docs.agora.io/cn/video-call-4.x/API%20Reference/java_ng/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror */ @Override public void onError(int err) { @@ -617,6 +632,28 @@ private int getVoiceConversionValue(String label) { return VOICE_CHANGER_SOLID; case "VOICE_CHANGER_BASS": return VOICE_CHANGER_BASS; + case "VOICE_CHANGER_CARTOON": + return VOICE_CHANGER_CARTOON; + case "VOICE_CHANGER_CHILDLIKE": + return VOICE_CHANGER_CHILDLIKE; + case "VOICE_CHANGER_PHONE_OPERATOR": + return VOICE_CHANGER_PHONE_OPERATOR; + case "VOICE_CHANGER_MONSTER": + return VOICE_CHANGER_MONSTER; + case "VOICE_CHANGER_TRANSFORMERS": + return VOICE_CHANGER_TRANSFORMERS; + case "VOICE_CHANGER_GROOT": + return VOICE_CHANGER_GROOT; + case "VOICE_CHANGER_DARTH_VADER": + return VOICE_CHANGER_DARTH_VADER; + case "VOICE_CHANGER_IRON_LADY": + return VOICE_CHANGER_IRON_LADY; + case "VOICE_CHANGER_SHIN_CHAN": + return VOICE_CHANGER_SHIN_CHAN; + case "VOICE_CHANGER_GIRLISH_MAN": + return VOICE_CHANGER_GIRLISH_MAN; + case "VOICE_CHANGER_CHIPMUNK": + return VOICE_CHANGER_CHIPMUNK; case "VOICE_CONVERSION_OFF": default: return VOICE_CONVERSION_OFF; @@ -743,6 +780,9 @@ public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { + if (!fromUser) { + return; + } if(seekBar == _voice3DCircle){ int cicle = (int) (1 + 59 * progress * 1.0f / seekBar.getMax()); // [1,60], 10 default @@ -771,6 +811,10 @@ public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { value = (int) (100 * progress * 1.0f / seekBar.getMax()); } engine.setLocalVoiceReverb(reverbKey, value); + } else if (seekBar == customVoiceFormant) { + // [-1, 1] + double value = (progress - 50) * 1.0f / 100; + engine.setLocalVoiceFormant(value); } } diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java index 75b8bb6a1..e66641d57 100755 --- a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java @@ -236,6 +236,7 @@ public void onClick(View v) { engine.leaveChannel(); pulling = false; join.setText(getString(R.string.join)); + audioSeatManager.downAllSeats(); if(pullingTask != null){ try { pullingTask.join(); @@ -268,7 +269,7 @@ private void joinChannel(String channelId) { * 0: Success. * < 0: Failure. * PS: Ensure that you call this method before the joinChannel method.*/ - engine.setExternalAudioSource(true, SAMPLE_RATE, SAMPLE_NUM_OF_CHANNEL, 2, false, true); + // engine.setExternalAudioSource(true, SAMPLE_RATE, SAMPLE_NUM_OF_CHANNEL, 2, false, true); @@ -303,9 +304,8 @@ private void joinChannel(String channelId) { private final IRtcEngineEventHandler iRtcEngineEventHandler = new IRtcEngineEventHandler() { /** - * Error code description can be found at: - * en: https://api-ref.agora.io/en/voice-sdk/android/4.x/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror - * cn: https://docs.agora.io/cn/voice-call-4.x/API%20Reference/java_ng/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror + * en: https://api-ref.agora.io/en/video-sdk/android/4.x/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror + * cn: https://docs.agora.io/cn/video-call-4.x/API%20Reference/java_ng/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror */ @Override public void onError(int err) { diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java index b4abaa337..a94f62cfb 100755 --- a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java @@ -5,7 +5,6 @@ import android.content.Context; import android.os.Bundle; import android.os.Handler; -import android.os.Process; import android.util.Log; import android.view.LayoutInflater; import android.view.View; @@ -21,14 +20,12 @@ import com.yanzhenjie.permission.AndPermission; import com.yanzhenjie.permission.runtime.Permission; -import java.io.IOException; -import java.io.InputStream; - import io.agora.api.example.MainApplication; import io.agora.api.example.R; import io.agora.api.example.annotation.Example; import io.agora.api.example.common.BaseFragment; import io.agora.api.example.common.widget.AudioSeatManager; +import io.agora.api.example.utils.AudioFileReader; import io.agora.api.example.utils.CommonUtil; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; @@ -37,6 +34,7 @@ import io.agora.rtc2.RtcEngine; import io.agora.rtc2.RtcEngineConfig; import io.agora.rtc2.RtcEngineEx; +import io.agora.rtc2.audio.AudioTrackConfig; /** * This demo demonstrates how to make a one-to-one voice call @@ -57,19 +55,11 @@ public class CustomAudioSource extends BaseFragment implements View.OnClickListe public static RtcEngineEx engine; private Switch mic, pcm; private ChannelMediaOptions option = new ChannelMediaOptions(); - private static final String AUDIO_FILE = "output.raw"; - private static final Integer SAMPLE_RATE = 44100; - private static final Integer SAMPLE_NUM_OF_CHANNEL = 2; - private static final Integer BITS_PER_SAMPLE = 16; - private static final Integer SAMPLES = 441; - private static final Integer BUFFER_SIZE = SAMPLES * BITS_PER_SAMPLE / 8 * SAMPLE_NUM_OF_CHANNEL; - private static final Integer PUSH_INTERVAL = SAMPLES * 1000 / SAMPLE_RATE; - - private InputStream inputStream; - private Thread pushingTask; - private boolean pushing = false; + private int pushTimes = 0; private AudioSeatManager audioSeatManager; + private AudioFileReader audioPushingHelper; + private int customAudioTrack = -1; @Override public void onCreate(@Nullable Bundle savedInstanceState) { @@ -87,36 +77,6 @@ private void initMediaOption() { option.enableAudioRecordingOrPlayout = true; } - private void openAudioFile() { - try { - inputStream = this.getResources().getAssets().open(AUDIO_FILE); - } catch (IOException e) { - e.printStackTrace(); - } - } - - private void closeAudioFile() { - try { - inputStream.close(); - } catch (IOException e) { - e.printStackTrace(); - } - } - - private byte[] readBuffer() { - int byteSize = BUFFER_SIZE; - byte[] buffer = new byte[byteSize]; - try { - if (inputStream.read(buffer) < 0) { - inputStream.reset(); - return readBuffer(); - } - } catch (IOException e) { - e.printStackTrace(); - } - return buffer; - } - @Nullable @Override public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { @@ -195,7 +155,13 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) { + "}"); /* setting the local access point if the private cloud ip was set, otherwise the config will be invalid.*/ engine.setLocalAccessPoint(((MainApplication) getActivity().getApplication()).getGlobalSettings().getPrivateCloudConfig()); - openAudioFile(); + + audioPushingHelper = new AudioFileReader(requireContext(), (buffer, timestamp) -> { + if(joined && engine != null && customAudioTrack != -1){ + int ret = engine.pushExternalAudioFrame(buffer, timestamp, AudioFileReader.SAMPLE_RATE, AudioFileReader.SAMPLE_NUM_OF_CHANNEL, Constants.BytesPerSample.TWO_BYTES_PER_SAMPLE, customAudioTrack); + Log.i(TAG, "pushExternalAudioFrame times:" + (++pushTimes) + ", ret=" + ret); + } + }); } catch (Exception e) { e.printStackTrace(); getActivity().onBackPressed(); @@ -205,34 +171,32 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) { @Override public void onDestroy() { super.onDestroy(); - pushing = false; + if(customAudioTrack != -1){ + engine.destroyCustomAudioTrack(customAudioTrack); + customAudioTrack = -1; + } + if(audioPushingHelper != null){ + audioPushingHelper.stop(); + } /**leaveChannel and Destroy the RtcEngine instance*/ if (engine != null) { engine.leaveChannel(); } handler.post(RtcEngine::destroy); engine = null; - closeAudioFile(); } @Override - public void onCheckedChanged(CompoundButton compoundButton, boolean b) { + public void onCheckedChanged(CompoundButton compoundButton, boolean checked) { if (compoundButton.getId() == R.id.microphone) { - if (b) { - option.publishMicrophoneTrack = true; - } else { - option.publishMicrophoneTrack = false; - } + option.publishMicrophoneTrack = checked; engine.updateChannelMediaOptions(option); } else if (compoundButton.getId() == R.id.localAudio) { - if (b) { - option.publishCustomAudioTrack = true; - } else { - option.publishCustomAudioTrack = false; - } + option.publishCustomAudioTrackId = customAudioTrack; + option.publishCustomAudioTrack = checked; engine.updateChannelMediaOptions(option); - engine.enableCustomAudioLocalPlayback(0, b); + engine.enableCustomAudioLocalPlayback(customAudioTrack, checked); } } @@ -278,19 +242,13 @@ public void onClick(View v) { * 2:If you call the leaveChannel method during CDN live streaming, the SDK * triggers the removeInjectStreamUrl method.*/ engine.leaveChannel(); - pushing = false; join.setText(getString(R.string.join)); mic.setEnabled(false); pcm.setEnabled(false); pcm.setChecked(false); mic.setChecked(true); - if(pushingTask != null){ - try { - pushingTask.join(); - pushingTask = null; - } catch (InterruptedException e) { - // do nothing - } + if(audioPushingHelper != null){ + audioPushingHelper.stop(); } audioSeatManager.downAllSeats(); } @@ -317,9 +275,9 @@ private void joinChannel(String channelId) { * 0: Success. * < 0: Failure. * PS: Ensure that you call this method before the joinChannel method.*/ - engine.setExternalAudioSource(true, SAMPLE_RATE, SAMPLE_NUM_OF_CHANNEL, 2, false, true); - - + AudioTrackConfig config = new AudioTrackConfig(); + config.enableLocalPlayback = false; + customAudioTrack = engine.createCustomAudioTrack(Constants.AudioTrackType.AUDIO_TRACK_MIXABLE, config); /**Please configure accessToken in the string_config file. * A temporary token generated in Console. A temporary token is valid for 24 hours. For details, see @@ -349,10 +307,11 @@ private void joinChannel(String channelId) { * The SDK uses this class to report to the app on SDK runtime events. */ private final IRtcEngineEventHandler iRtcEngineEventHandler = new IRtcEngineEventHandler() { + /** * Error code description can be found at: - * en: https://api-ref.agora.io/en/voice-sdk/android/4.x/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror - * cn: https://docs.agora.io/cn/voice-call-4.x/API%20Reference/java_ng/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror + * en: https://api-ref.agora.io/en/video-sdk/android/4.x/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror + * cn: https://docs.agora.io/cn/video-call-4.x/API%20Reference/java_ng/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror */ @Override public void onError(int err) { @@ -379,12 +338,14 @@ public void run() { pcm.setEnabled(true); join.setEnabled(true); join.setText(getString(R.string.leave)); - pushing = true; - if(pushingTask == null){ - pushingTask = new Thread(new PushingTask()); - pushingTask.start(); + if(audioPushingHelper != null){ + pushTimes = 0; + audioPushingHelper.start(); } audioSeatManager.upLocalSeat(uid); + if (pcm.isChecked()) { + engine.enableCustomAudioLocalPlayback(0, true); + } } }); } @@ -403,26 +364,4 @@ public void onUserOffline(int uid, int reason) { } }; - class PushingTask implements Runnable { - long number = 0; - - @Override - public void run() { - Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO); - while (pushing) { - Log.i(TAG, "pushExternalAudioFrame times:" + number++); - long before = System.currentTimeMillis(); - engine.pushExternalAudioFrame(readBuffer(), 0); - long now = System.currentTimeMillis(); - long consuming = now - before; - if(consuming < PUSH_INTERVAL){ - try { - Thread.sleep(PUSH_INTERVAL - consuming); - } catch (InterruptedException e) { - Log.e(TAG, "PushingTask Interrupted"); - } - } - } - } - } } diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/utils/AudioFileReader.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/utils/AudioFileReader.java new file mode 100644 index 000000000..387463604 --- /dev/null +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/utils/AudioFileReader.java @@ -0,0 +1,116 @@ +package io.agora.api.example.utils; + +import android.content.Context; +import android.os.Process; + +import java.io.IOException; +import java.io.InputStream; + +public class AudioFileReader { + private static final String AUDIO_FILE = "output.raw"; + public static final int SAMPLE_RATE = 44100; + public static final int SAMPLE_NUM_OF_CHANNEL = 2; + public static final int BITS_PER_SAMPLE = 16; + + public static final float BYTE_PER_SAMPLE = 1.0f * BITS_PER_SAMPLE / 8 * SAMPLE_NUM_OF_CHANNEL; + public static final float DURATION_PER_SAMPLE = 1000.0f / SAMPLE_RATE; // ms + public static final float SAMPLE_COUNT_PER_MS = SAMPLE_RATE * 1.0f / 1000; // ms + + private static final int BUFFER_SAMPLE_COUNT = (int) (SAMPLE_COUNT_PER_MS * 10); // 10ms sample count + private static final int BUFFER_BYTE_SIZE = (int) (BUFFER_SAMPLE_COUNT * BYTE_PER_SAMPLE); // byte + private static final long BUFFER_DURATION = (long) (BUFFER_SAMPLE_COUNT * DURATION_PER_SAMPLE); // ms + + private final Context context; + private final OnAudioReadListener audioReadListener; + private volatile boolean pushing = false; + private InnerThread thread; + private InputStream inputStream; + + public AudioFileReader(Context context, OnAudioReadListener listener){ + this.context = context; + this.audioReadListener = listener; + } + + public void start() { + if(thread == null){ + thread = new InnerThread(); + thread.start(); + } + } + + public void stop(){ + pushing = false; + if(thread != null){ + try { + thread.join(); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + thread = null; + } + } + } + + public interface OnAudioReadListener { + void onAudioRead(byte[] buffer, long timestamp); + } + + private class InnerThread extends Thread{ + + @Override + public void run() { + super.run(); + try { + inputStream = context.getAssets().open(AUDIO_FILE); + } catch (IOException e) { + e.printStackTrace(); + } + Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO); + pushing = true; + + long start_time = System.currentTimeMillis();; + int sent_audio_frames = 0; + while (pushing) { + if(audioReadListener != null){ + audioReadListener.onAudioRead(readBuffer(), System.currentTimeMillis()); + } + ++ sent_audio_frames; + long next_frame_start_time = sent_audio_frames * BUFFER_DURATION + start_time; + long now = System.currentTimeMillis(); + + if(next_frame_start_time > now){ + long sleep_duration = next_frame_start_time - now; + try { + Thread.sleep(sleep_duration); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + + if (inputStream != null) { + try { + inputStream.close(); + } catch (IOException e) { + e.printStackTrace(); + } finally { + inputStream = null; + } + } + } + + private byte[] readBuffer() { + int byteSize = BUFFER_BYTE_SIZE; + byte[] buffer = new byte[byteSize]; + try { + if (inputStream.read(buffer) < 0) { + inputStream.reset(); + return readBuffer(); + } + } catch (IOException e) { + e.printStackTrace(); + } + return buffer; + } + } +} diff --git a/Android/APIExample-Audio/app/src/main/res/layout/fragment_voice_effects.xml b/Android/APIExample-Audio/app/src/main/res/layout/fragment_voice_effects.xml index 9990b1a41..96db9042b 100644 --- a/Android/APIExample-Audio/app/src/main/res/layout/fragment_voice_effects.xml +++ b/Android/APIExample-Audio/app/src/main/res/layout/fragment_voice_effects.xml @@ -439,6 +439,33 @@ + + + + + + + + + diff --git a/Android/APIExample-Audio/app/src/main/res/values/arrays.xml b/Android/APIExample-Audio/app/src/main/res/values/arrays.xml index 8a2ba89f6..615b4fb69 100644 --- a/Android/APIExample-Audio/app/src/main/res/values/arrays.xml +++ b/Android/APIExample-Audio/app/src/main/res/values/arrays.xml @@ -63,6 +63,17 @@ VOICE_CHANGER_SWEET VOICE_CHANGER_SOLID VOICE_CHANGER_BASS + VOICE_CHANGER_CARTOON + VOICE_CHANGER_CHILDLIKE + VOICE_CHANGER_PHONE_OPERATOR + VOICE_CHANGER_MONSTER + VOICE_CHANGER_TRANSFORMERS + VOICE_CHANGER_GROOT + VOICE_CHANGER_DARTH_VADER + VOICE_CHANGER_IRON_LADY + VOICE_CHANGER_SHIN_CHAN + VOICE_CHANGER_GIRLISH_MAN + VOICE_CHANGER_CHIPMUNK AUDIO_EQUALIZATION_BAND_31 diff --git a/Android/APIExample/README.md b/Android/APIExample/README.md index aa0d6d7ab..6f2c16d37 100644 --- a/Android/APIExample/README.md +++ b/Android/APIExample/README.md @@ -46,12 +46,11 @@ This project contains third-party beauty integration examples, which are disable ### For Agora Extension Developers -从4.0.0SDK开始,Agora SDK支持插件系统和开放的云市场帮助开发者发布自己的音视频插件,本项目包含了一个SimpleFilter示例,默认是禁用的状态,如果需要开启编译和使用需要完成以下步骤: Since version 4.0.0, Agora SDK provides an Extension Interface Framework. Developers could publish their own video/audio extension to Agora Extension Market. In this project includes a sample SimpleFilter example, by default it is disabled. In order to enable it, you could do as follows: 1. Download [opencv](https://agora-adc-artifacts.s3.cn-north-1.amazonaws.com.cn/androidLibs/opencv4.zip) library, unzip it and copy into Android/APIExample/agora-simple-filter/src/main/jniLibs -2. Download [Agora SDK包](https://download.agora.io/sdk/release/Agora_Native_SDK_for_Android_v4.1.0_FULL.zip), unzip it and copy c++ .so library (keeps arch folder) to Android/APIExample/agora-simple-filter/src/main/agoraLibs +2. Download [Agora SDK包](https://docs.agora.io/cn/video-call-4.x/downloads?platform=Android), unzip it and copy c++ .so library (keeps arch folder) to Android/APIExample/agora-simple-filter/src/main/agoraLibs 3. Modify simpleFilter to true in Android/APIExample/gradle.properties ## Contact Us diff --git a/Android/APIExample/README.zh.md b/Android/APIExample/README.zh.md index 6e1fb2672..922ea03c4 100644 --- a/Android/APIExample/README.zh.md +++ b/Android/APIExample/README.zh.md @@ -51,7 +51,7 @@ 从4.0.0SDK开始,Agora SDK支持插件系统和开放的云市场帮助开发者发布自己的音视频插件,本项目包含了一个SimpleFilter示例,默认是禁用的状态,如果需要开启编译和使用需要完成以下步骤: 1. 下载 [opencv](https://agora-adc-artifacts.s3.cn-north-1.amazonaws.com.cn/androidLibs/opencv4.zip) 解压后复制到 Android/APIExample/agora-simple-filter/src/main/jniLibs -2. 手动下载[Agora SDK包](https://download.agora.io/sdk/release/Agora_Native_SDK_for_Android_v4.1.0_FULL.zip), 解压后将c++动态库(包括架构文件夹)copy到Android/APIExample/agora-simple-filter/src/main/agoraLibs +2. 手动下载[Agora SDK包](https://docs.agora.io/cn/video-call-4.x/downloads?platform=Android), 解压后将c++动态库(包括架构文件夹)copy到Android/APIExample/agora-simple-filter/src/main/agoraLibs 3. 修改Android/APIExample/gradle.properties配置文件中simpleFilter值为true ## 联系我们 diff --git a/Android/APIExample/app/build.gradle b/Android/APIExample/app/build.gradle index a340a149a..43c2ba5fc 100644 --- a/Android/APIExample/app/build.gradle +++ b/Android/APIExample/app/build.gradle @@ -8,6 +8,7 @@ android { defaultConfig { applicationId "io.agora.api.example" + // ndk.abiFilters 'armeabi-v7a'//, 'arm64-v8a', 'x86', 'x86-64' minSdkVersion 21 targetSdkVersion 32 versionCode 1 @@ -60,7 +61,7 @@ dependencies { implementation fileTree(dir: "${localSdkPath}", include: ['*.jar', '*.aar']) } else{ - def agora_sdk_version = "4.1.1" + def agora_sdk_version = "4.2.0" // case 1: full libs implementation "io.agora.rtc:full-sdk:${agora_sdk_version}" implementation "io.agora.rtc:full-screen-sharing:${agora_sdk_version}" @@ -95,8 +96,8 @@ dependencies { implementation project(path: ':agora-simple-filter') } testImplementation 'junit:junit:4.12' - androidTestImplementation 'androidx.test.ext:junit:1.1.1' - androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0' + androidTestImplementation 'androidx.test.ext:junit:1.1.3' + androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0' implementation 'io.github.luizgrp.sectionedrecyclerviewadapter:sectionedrecyclerviewadapter:1.2.0' implementation 'com.yanzhenjie:permission:2.0.3' diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/common/gles/GLTestUtils.java b/Android/APIExample/app/src/main/java/io/agora/api/example/common/gles/GLTestUtils.java new file mode 100644 index 000000000..f909044d2 --- /dev/null +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/common/gles/GLTestUtils.java @@ -0,0 +1,125 @@ +package io.agora.api.example.common.gles; + +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.ImageFormat; +import android.graphics.Rect; +import android.graphics.YuvImage; +import android.opengl.GLES11Ext; +import android.opengl.GLES20; +import android.util.Log; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.IntBuffer; + +public class GLTestUtils { + private static final String TAG = "GLUtils"; + + public static Bitmap getTexture2DImage(int textureID, int width, int height) { + try { + int[] oldFboId = new int[1]; + GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, IntBuffer.wrap(oldFboId)); + + int[] framebuffers = new int[1]; + GLES20.glGenFramebuffers(1, framebuffers, 0); + int framebufferId = framebuffers[0]; + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, framebufferId); + + int[] renderbuffers = new int[1]; + GLES20.glGenRenderbuffers(1, renderbuffers, 0); + int renderId = renderbuffers[0]; + GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, renderId); + GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16, width, height); + + GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureID, 0); + GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT, GLES20.GL_RENDERBUFFER, renderId); + if (GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER) != GLES20.GL_FRAMEBUFFER_COMPLETE) { + Log.d(TAG, "Framebuffer error"); + } + + ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4); + rgbaBuf.position(0); + GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaBuf); + + Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); + bitmap.copyPixelsFromBuffer(rgbaBuf); + + GLES20.glDeleteRenderbuffers(1, IntBuffer.wrap(framebuffers)); + GLES20.glDeleteFramebuffers(1, IntBuffer.allocate(framebufferId)); + + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, oldFboId[0]); + + return bitmap; + } catch (Exception e) { + Log.e(TAG, "", e); + } + return null; + } + + public static Bitmap getTextureOESImage(int textureID, int width, int height) { + try { + int[] oldFboId = new int[1]; + GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, IntBuffer.wrap(oldFboId)); + + int[] framebuffers = new int[1]; + GLES20.glGenFramebuffers(1, framebuffers, 0); + int framebufferId = framebuffers[0]; + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, framebufferId); + + int[] renderbuffers = new int[1]; + GLES20.glGenRenderbuffers(1, renderbuffers, 0); + int renderId = renderbuffers[0]; + GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, renderId); + GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16, width, height); + + GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID, 0); + GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT, GLES20.GL_RENDERBUFFER, renderId); + if (GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER) != GLES20.GL_FRAMEBUFFER_COMPLETE) { + Log.d(TAG, "Framebuffer error"); + } + + ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4); + rgbaBuf.position(0); + GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaBuf); + + Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); + bitmap.copyPixelsFromBuffer(rgbaBuf); + + GLES20.glDeleteRenderbuffers(1, IntBuffer.wrap(framebuffers)); + GLES20.glDeleteFramebuffers(1, IntBuffer.allocate(framebufferId)); + + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, oldFboId[0]); + + return bitmap; + } catch (Exception e) { + Log.e(TAG, "", e); + } + return null; + } + + private static Bitmap nv21ToBitmap(byte[] nv21, int width, int height) { + Bitmap bitmap = null; + try { + YuvImage image = new YuvImage(nv21, ImageFormat.NV21, width, height, null); + ByteArrayOutputStream stream = new ByteArrayOutputStream(); + image.compressToJpeg(new Rect(0, 0, width, height), 80, stream); + bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size()); + stream.close(); + } catch (IOException e) { + e.printStackTrace(); + } + return bitmap; + } + + private static Bitmap readBitmap(int width, int height){ + ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4); + rgbaBuf.position(0); + GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaBuf); + + Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); + bitmap.copyPixelsFromBuffer(rgbaBuf); + return bitmap; + } +} diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/common/gles/GLThread.java b/Android/APIExample/app/src/main/java/io/agora/api/example/common/gles/GLThread.java new file mode 100644 index 000000000..daeb29a38 --- /dev/null +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/common/gles/GLThread.java @@ -0,0 +1,11 @@ +package io.agora.api.example.common.gles; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +public @interface GLThread { +} diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/common/model/GlobalSettings.java b/Android/APIExample/app/src/main/java/io/agora/api/example/common/model/GlobalSettings.java index ff2453e02..4ce81f103 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/common/model/GlobalSettings.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/common/model/GlobalSettings.java @@ -2,7 +2,7 @@ import static io.agora.rtc2.video.VideoEncoderConfiguration.FRAME_RATE.FRAME_RATE_FPS_15; import static io.agora.rtc2.video.VideoEncoderConfiguration.ORIENTATION_MODE.ORIENTATION_MODE_ADAPTIVE; -import static io.agora.rtc2.video.VideoEncoderConfiguration.VD_640x360; +import static io.agora.rtc2.video.VideoEncoderConfiguration.VD_960x540; import android.text.TextUtils; import android.util.Log; @@ -66,17 +66,15 @@ public LocalAccessPointConfiguration getPrivateCloudConfig() { } public VideoEncoderConfiguration.VideoDimensions getVideoEncodingDimensionObject() { - if (videoEncodingDimension == null) - return VD_640x360; - VideoEncoderConfiguration.VideoDimensions value = VD_640x360; + VideoEncoderConfiguration.VideoDimensions value = VD_960x540; try { - Field tmp = VideoEncoderConfiguration.class.getDeclaredField(videoEncodingDimension); + Field tmp = VideoEncoderConfiguration.class.getDeclaredField(getVideoEncodingDimension()); tmp.setAccessible(true); value = (VideoEncoderConfiguration.VideoDimensions) tmp.get(null); } catch (NoSuchFieldException e) { - Log.e("Field", "Can not find field " + videoEncodingDimension); + Log.e("Field", "Can not find field " + getVideoEncodingDimension()); } catch (IllegalAccessException e) { - Log.e("Field", "Could not access field " + videoEncodingDimension); + Log.e("Field", "Could not access field " + getVideoEncodingDimension()); } return value; } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/common/widget/VideoReportLayout.java b/Android/APIExample/app/src/main/java/io/agora/api/example/common/widget/VideoReportLayout.java index 89b42c43b..16986f1c7 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/common/widget/VideoReportLayout.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/common/widget/VideoReportLayout.java @@ -48,8 +48,10 @@ public void onViewAttachedToWindow(View v) { @Override public void onViewDetachedFromWindow(View v) { - reportTextView.removeOnAttachStateChangeListener(this); - reportTextView = null; + if (reportTextView != null) { + reportTextView.removeOnAttachStateChangeListener(this); + reportTextView = null; + } } }); reportTextView.setTextColor(Color.parseColor("#eeeeee")); diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CDNStreaming/AudienceFragment.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CDNStreaming/AudienceFragment.java index 84327a0df..e71b715b6 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CDNStreaming/AudienceFragment.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CDNStreaming/AudienceFragment.java @@ -157,8 +157,9 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) { } fl_local.addView(surfaceView); // Setup local video to render your local media player view - VideoCanvas videoCanvas = new VideoCanvas(surfaceView, Constants.RENDER_MODE_HIDDEN, Constants.VIDEO_MIRROR_MODE_AUTO, - Constants.VIDEO_SOURCE_MEDIA_PLAYER, mediaPlayer.getMediaPlayerId(), 0); + VideoCanvas videoCanvas = new VideoCanvas(surfaceView, Constants.RENDER_MODE_HIDDEN, 0); + videoCanvas.sourceType = Constants.VIDEO_SOURCE_MEDIA_PLAYER; + videoCanvas.mediaPlayerId = mediaPlayer.getMediaPlayerId(); engine.setupLocalVideo(videoCanvas); // Your have to call startPreview to see player video engine.startPreview(); @@ -384,8 +385,9 @@ private void toggleVideoLayout(boolean isMultiple) { } fl_local.addView(surfaceView); // Setup local video to render your local media player view - VideoCanvas videoCanvas = new VideoCanvas(surfaceView, Constants.RENDER_MODE_HIDDEN, Constants.VIDEO_MIRROR_MODE_AUTO, - Constants.VIDEO_SOURCE_MEDIA_PLAYER, mediaPlayer.getMediaPlayerId(), 0); + VideoCanvas videoCanvas = new VideoCanvas(surfaceView, Constants.RENDER_MODE_HIDDEN, 0); + videoCanvas.sourceType = Constants.VIDEO_SOURCE_MEDIA_PLAYER; + videoCanvas.mediaPlayerId = mediaPlayer.getMediaPlayerId(); engine.setupLocalVideo(videoCanvas); } engine.startPreview(); diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CustomRemoteVideoRender.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CustomRemoteVideoRender.java index 23c4b89b0..1e2290158 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CustomRemoteVideoRender.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CustomRemoteVideoRender.java @@ -472,22 +472,12 @@ public void onUserOffline(int uid, int reason) { IVideoFrameObserver videoFrameObserver = new IVideoFrameObserver() { @Override - public boolean onCaptureVideoFrame(VideoFrame videoFrame) { + public boolean onCaptureVideoFrame(int sourceType, VideoFrame videoFrame) { return false; } @Override - public boolean onPreEncodeVideoFrame(VideoFrame videoFrame) { - return false; - } - - @Override - public boolean onScreenCaptureVideoFrame(VideoFrame videoFrame) { - return false; - } - - @Override - public boolean onPreEncodeScreenVideoFrame(VideoFrame videoFrame) { + public boolean onPreEncodeVideoFrame(int sourceType, VideoFrame videoFrame) { return false; } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/LocalVideoTranscoding.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/LocalVideoTranscoding.java new file mode 100644 index 000000000..e352b6289 --- /dev/null +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/LocalVideoTranscoding.java @@ -0,0 +1,494 @@ +package io.agora.api.example.examples.advanced; + +import static io.agora.api.example.common.model.Examples.ADVANCED; +import static io.agora.rtc2.Constants.RENDER_MODE_HIDDEN; +import static io.agora.rtc2.video.VideoEncoderConfiguration.STANDARD_BITRATE; + +import android.annotation.SuppressLint; +import android.content.Context; +import android.graphics.Color; +import android.os.Bundle; +import android.util.DisplayMetrics; +import android.util.Log; +import android.view.LayoutInflater; +import android.view.SurfaceView; +import android.view.View; +import android.view.ViewGroup; +import android.widget.Button; +import android.widget.CompoundButton; +import android.widget.EditText; +import android.widget.FrameLayout; +import android.widget.Switch; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; + +import java.util.ArrayList; + +import io.agora.api.example.MainApplication; +import io.agora.api.example.R; +import io.agora.api.example.annotation.Example; +import io.agora.api.example.common.BaseFragment; +import io.agora.api.example.common.widget.VideoReportLayout; +import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.TokenUtils; +import io.agora.rtc2.ChannelMediaOptions; +import io.agora.rtc2.Constants; +import io.agora.rtc2.IRtcEngineEventHandler; +import io.agora.rtc2.LocalTranscoderConfiguration; +import io.agora.rtc2.RtcEngine; +import io.agora.rtc2.RtcEngineConfig; +import io.agora.rtc2.ScreenCaptureParameters; +import io.agora.rtc2.video.CameraCapturerConfiguration; +import io.agora.rtc2.video.SegmentationProperty; +import io.agora.rtc2.video.VideoCanvas; +import io.agora.rtc2.video.VideoEncoderConfiguration; +import io.agora.rtc2.video.VirtualBackgroundSource; + +/**This demo demonstrates how to make a one-to-one video call*/ +@Example( + index = 19, + group = ADVANCED, + name = R.string.item_localvideotranscoding, + actionId = R.id.action_mainFragment_to_LocalVideoTranscoding, + tipsId = R.string.localvideotranscoding +) +public class LocalVideoTranscoding extends BaseFragment implements View.OnClickListener, CompoundButton.OnCheckedChangeListener +{ + private static final String TAG = LocalVideoTranscoding.class.getSimpleName(); + + private VideoReportLayout videoReportLayout; + private Button join; + private Switch switchTransparentBackground; + private EditText et_channel; + private RtcEngine engine; + private int myUid; + private boolean joined = false; + + + @Nullable + @Override + public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) + { + return inflater.inflate(R.layout.fragment_localvideotranscoding, container, false); + } + + @Override + public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) + { + super.onViewCreated(view, savedInstanceState); + join = view.findViewById(R.id.btn_join); + switchTransparentBackground = view.findViewById(R.id.btn_transparent_background); + et_channel = view.findViewById(R.id.et_channel); + view.findViewById(R.id.btn_join).setOnClickListener(this); + switchTransparentBackground.setOnCheckedChangeListener(this); + videoReportLayout = view.findViewById(R.id.videoReportLayout); + } + + @Override + public void onActivityCreated(@Nullable Bundle savedInstanceState) + { + super.onActivityCreated(savedInstanceState); + // Check if the context is valid + Context context = getContext(); + if (context == null) + { + return; + } + try + { + RtcEngineConfig config = new RtcEngineConfig(); + /** + * The context of Android Activity + */ + config.mContext = context.getApplicationContext(); + /** + * The App ID issued to you by Agora. See How to get the App ID + */ + config.mAppId = getString(R.string.agora_app_id); + /** Sets the channel profile of the Agora RtcEngine. + CHANNEL_PROFILE_COMMUNICATION(0): (Default) The Communication profile. + Use this profile in one-on-one calls or group calls, where all users can talk freely. + CHANNEL_PROFILE_LIVE_BROADCASTING(1): The Live-Broadcast profile. Users in a live-broadcast + channel have a role as either broadcaster or audience. A broadcaster can both send and receive streams; + an audience can only receive streams.*/ + config.mChannelProfile = Constants.CHANNEL_PROFILE_LIVE_BROADCASTING; + /** + * IRtcEngineEventHandler is an abstract class providing default implementation. + * The SDK uses this class to report to the app on SDK runtime events. + */ + config.mEventHandler = iRtcEngineEventHandler; + config.mAudioScenario = Constants.AudioScenario.getValue(Constants.AudioScenario.DEFAULT); + config.mAreaCode = ((MainApplication)getActivity().getApplication()).getGlobalSettings().getAreaCode(); + engine = RtcEngine.create(config); + /** + * This parameter is for reporting the usages of APIExample to agora background. + * Generally, it is not necessary for you to set this parameter. + */ + engine.setParameters("{" + + "\"rtc.report_app_scenario\":" + + "{" + + "\"appScenario\":" + 100 + "," + + "\"serviceType\":" + 11 + "," + + "\"appVersion\":\"" + RtcEngine.getSdkVersion() + "\"" + + "}" + + "}"); + /* setting the local access point if the private cloud ip was set, otherwise the config will be invalid.*/ + engine.setLocalAccessPoint(((MainApplication) getActivity().getApplication()).getGlobalSettings().getPrivateCloudConfig()); + } + catch (Exception e) + { + e.printStackTrace(); + getActivity().onBackPressed(); + } + } + + @Override + public void onDestroy() + { + super.onDestroy(); + /**leaveChannel and Destroy the RtcEngine instance*/ + if(engine != null) + { + engine.leaveChannel(); + engine.stopPreview(Constants.VideoSourceType.VIDEO_SOURCE_TRANSCODED); + engine.stopCameraCapture(Constants.VideoSourceType.VIDEO_SOURCE_CAMERA_PRIMARY); + engine.stopScreenCapture(); + } + handler.post(RtcEngine::destroy); + engine = null; + } + + @SuppressLint("WrongConstant") + @Override + public void onClick(View v) + { + if (v.getId() == R.id.btn_join) + { + if (!joined) + { + CommonUtil.hideInputBoard(getActivity(), et_channel); + // call when join button hit + String channelId = et_channel.getText().toString(); + // Check permission + joinChannel(channelId); + } + else + { + joined = false; + /**After joining a channel, the user must call the leaveChannel method to end the + * call before joining another channel. This method returns 0 if the user leaves the + * channel and releases all resources related to the call. This method call is + * asynchronous, and the user has not exited the channel when the method call returns. + * Once the user leaves the channel, the SDK triggers the onLeaveChannel callback. + * A successful leaveChannel method call triggers the following callbacks: + * 1:The local client: onLeaveChannel. + * 2:The remote client: onUserOffline, if the user leaving the channel is in the + * Communication channel, or is a BROADCASTER in the Live Broadcast profile. + * @returns 0: Success. + * < 0: Failure. + * PS: + * 1:If you call the destroy method immediately after calling the leaveChannel + * method, the leaveChannel process interrupts, and the SDK does not trigger + * the onLeaveChannel callback. + * 2:If you call the leaveChannel method during CDN live streaming, the SDK + * triggers the removeInjectStreamUrl method.*/ + engine.leaveChannel(); + engine.stopPreview(Constants.VideoSourceType.VIDEO_SOURCE_TRANSCODED); + engine.stopCameraCapture(Constants.VideoSourceType.VIDEO_SOURCE_CAMERA_PRIMARY); + engine.stopScreenCapture(); + join.setText(getString(R.string.join)); + videoReportLayout.removeAllViews(); + } + } + } + + private void joinChannel(String channelId) + { + // Check if the context is valid + Context context = getContext(); + if (context == null) + { + return; + } + DisplayMetrics metrics = new DisplayMetrics(); + requireActivity().getWindowManager().getDefaultDisplay().getRealMetrics(metrics); + int width = 720; + int height = (int) (width * 1.0f / metrics.widthPixels * metrics.heightPixels); + + /**In the demo, the default is to enter as the anchor.*/ + engine.setClientRole(Constants.CLIENT_ROLE_BROADCASTER); + // Enable video module + engine.enableVideo(); + // Setup video encoding configs + engine.setVideoEncoderConfiguration(new VideoEncoderConfiguration( + new VideoEncoderConfiguration.VideoDimensions(width, height), + VideoEncoderConfiguration.FRAME_RATE.valueOf(((MainApplication)getActivity().getApplication()).getGlobalSettings().getVideoEncodingFrameRate()), + STANDARD_BITRATE, + VideoEncoderConfiguration.ORIENTATION_MODE.valueOf(((MainApplication)getActivity().getApplication()).getGlobalSettings().getVideoEncodingOrientation()) + )); + + // Set audio route to microPhone + engine.setDefaultAudioRoutetoSpeakerphone(true); + + + engine.startCameraCapture(Constants.VideoSourceType.VIDEO_SOURCE_CAMERA_PRIMARY, new CameraCapturerConfiguration(CameraCapturerConfiguration.CAMERA_DIRECTION.CAMERA_FRONT)); + ScreenCaptureParameters screenCaptureParameters = new ScreenCaptureParameters(); + screenCaptureParameters.captureVideo = true; + screenCaptureParameters.videoCaptureParameters.width = width; + screenCaptureParameters.videoCaptureParameters.height = height; + engine.startScreenCapture(screenCaptureParameters); + + + LocalTranscoderConfiguration config = new LocalTranscoderConfiguration(); + config.videoOutputConfiguration = new VideoEncoderConfiguration( + new VideoEncoderConfiguration.VideoDimensions(width, height), + VideoEncoderConfiguration.FRAME_RATE.FRAME_RATE_FPS_24, + STANDARD_BITRATE, + VideoEncoderConfiguration.ORIENTATION_MODE.ORIENTATION_MODE_ADAPTIVE + ); + config.transcodingVideoStreams = new ArrayList<>(); + + + LocalTranscoderConfiguration.TranscodingVideoStream screenStream = new LocalTranscoderConfiguration.TranscodingVideoStream(); + screenStream.sourceType = Constants.VideoSourceType.VIDEO_SOURCE_SCREEN_PRIMARY; + screenStream.width = width; + screenStream.height = height; + screenStream.zOrder = 1; + config.transcodingVideoStreams.add(screenStream); + + LocalTranscoderConfiguration.TranscodingVideoStream cameraStream = new LocalTranscoderConfiguration.TranscodingVideoStream(); + cameraStream.sourceType = Constants.VideoSourceType.VIDEO_SOURCE_CAMERA_PRIMARY; + cameraStream.width = width / 2; + cameraStream.height = height / 2; + cameraStream.x = 0; + cameraStream.y = height / 2; + cameraStream.zOrder = 2; + cameraStream.mirror = true; + config.transcodingVideoStreams.add(cameraStream); + + engine.startLocalVideoTranscoder(config); + + // Create render view by RtcEngine + SurfaceView surfaceView = new SurfaceView(context); + if(videoReportLayout.getChildCount() > 0) + { + videoReportLayout.removeAllViews(); + } + // Setup local video to render your local camera preview + VideoCanvas local = new VideoCanvas(surfaceView, RENDER_MODE_HIDDEN, 0); + local.sourceType = Constants.VideoSourceType.VIDEO_SOURCE_TRANSCODED.getValue(); + local.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED; + engine.setupLocalVideo(local); + // Add to the local container + videoReportLayout.addView(surfaceView, new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); + + engine.startPreview(Constants.VideoSourceType.VIDEO_SOURCE_TRANSCODED); + + ChannelMediaOptions option = new ChannelMediaOptions(); + option.autoSubscribeAudio = true; + option.autoSubscribeVideo = true; + option.publishMicrophoneTrack = true; + option.publishTranscodedVideoTrack = true; + + /**Please configure accessToken in the string_config file. + * A temporary token generated in Console. A temporary token is valid for 24 hours. For details, see + * https://docs.agora.io/en/Agora%20Platform/token?platform=All%20Platforms#get-a-temporary-token + * A token generated at the server. This applies to scenarios with high-security requirements. For details, see + * https://docs.agora.io/en/cloud-recording/token_server_java?platform=Java*/ + TokenUtils.gen(requireContext(), channelId, 0, ret -> { + + /** Allows a user to join a channel. + if you do not specify the uid, we will generate the uid for you*/ + int res = engine.joinChannel(ret, channelId, 0, option); + if (res != 0) + { + // Usually happens with invalid parameters + // Error code description can be found at: + // en: https://docs.agora.io/en/Voice/API%20Reference/java/classio_1_1agora_1_1rtc_1_1_i_rtc_engine_event_handler_1_1_error_code.html + // cn: https://docs.agora.io/cn/Voice/API%20Reference/java/classio_1_1agora_1_1rtc_1_1_i_rtc_engine_event_handler_1_1_error_code.html + showAlert(RtcEngine.getErrorDescription(Math.abs(res))); + return; + } + // Prevent repeated entry + join.setEnabled(false); + }); + + + + + } + + /** + * IRtcEngineEventHandler is an abstract class providing default implementation. + * The SDK uses this class to report to the app on SDK runtime events. + */ + private final IRtcEngineEventHandler iRtcEngineEventHandler = new IRtcEngineEventHandler() + { + @Override + public void onLocalVideoTranscoderError(LocalTranscoderConfiguration.TranscodingVideoStream stream, int error) { + super.onLocalVideoTranscoderError(stream, error); + Log.i(TAG, "LocalVideoTranscoding -- onLocalVideoTranscoderError stream=" + stream + ", error=" + error); + } + + /** + * Error code description can be found at: + * en: https://api-ref.agora.io/en/video-sdk/android/4.x/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror + * cn: https://docs.agora.io/cn/video-call-4.x/API%20Reference/java_ng/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror + */ + @Override + public void onError(int err) { + super.onError(err); + showLongToast("Error code:" + err + ", msg:" + RtcEngine.getErrorDescription(err)); + if (err == Constants.ERR_INVALID_TOKEN || err == Constants.ERR_TOKEN_EXPIRED) { + engine.leaveChannel(); + runOnUIThread(() -> join.setEnabled(true)); + + if (Constants.ERR_INVALID_TOKEN == err) { + showAlert(getString(R.string.token_invalid)); + } if (Constants.ERR_TOKEN_EXPIRED == err) { + showAlert(getString(R.string.token_expired)); + } + } + } + + /**Occurs when a user leaves the channel. + * @param stats With this callback, the application retrieves the channel information, + * such as the call duration and statistics.*/ + @Override + public void onLeaveChannel(RtcStats stats) + { + super.onLeaveChannel(stats); + Log.i(TAG, String.format("local user %d leaveChannel!", myUid)); + showLongToast(String.format("local user %d leaveChannel!", myUid)); + } + + /**Occurs when the local user joins a specified channel. + * The channel name assignment is based on channelName specified in the joinChannel method. + * If the uid is not specified when joinChannel is called, the server automatically assigns a uid. + * @param channel Channel name + * @param uid User ID + * @param elapsed Time elapsed (ms) from the user calling joinChannel until this callback is triggered*/ + @Override + public void onJoinChannelSuccess(String channel, int uid, int elapsed) + { + Log.i(TAG, String.format("onJoinChannelSuccess channel %s uid %d", channel, uid)); + showLongToast(String.format("onJoinChannelSuccess channel %s uid %d", channel, uid)); + myUid = uid; + joined = true; + handler.post(new Runnable() + { + @Override + public void run() + { + join.setEnabled(true); + join.setText(getString(R.string.leave)); + videoReportLayout.setReportUid(uid); + } + }); + } + + /**Since v2.9.0. + * This callback indicates the state change of the remote audio stream. + * PS: This callback does not work properly when the number of users (in the Communication profile) or + * broadcasters (in the Live-broadcast profile) in the channel exceeds 17. + * @param uid ID of the user whose audio state changes. + * @param state State of the remote audio + * REMOTE_AUDIO_STATE_STOPPED(0): The remote audio is in the default state, probably due + * to REMOTE_AUDIO_REASON_LOCAL_MUTED(3), REMOTE_AUDIO_REASON_REMOTE_MUTED(5), + * or REMOTE_AUDIO_REASON_REMOTE_OFFLINE(7). + * REMOTE_AUDIO_STATE_STARTING(1): The first remote audio packet is received. + * REMOTE_AUDIO_STATE_DECODING(2): The remote audio stream is decoded and plays normally, + * probably due to REMOTE_AUDIO_REASON_NETWORK_RECOVERY(2), + * REMOTE_AUDIO_REASON_LOCAL_UNMUTED(4) or REMOTE_AUDIO_REASON_REMOTE_UNMUTED(6). + * REMOTE_AUDIO_STATE_FROZEN(3): The remote audio is frozen, probably due to + * REMOTE_AUDIO_REASON_NETWORK_CONGESTION(1). + * REMOTE_AUDIO_STATE_FAILED(4): The remote audio fails to start, probably due to + * REMOTE_AUDIO_REASON_INTERNAL(0). + * @param reason The reason of the remote audio state change. + * REMOTE_AUDIO_REASON_INTERNAL(0): Internal reasons. + * REMOTE_AUDIO_REASON_NETWORK_CONGESTION(1): Network congestion. + * REMOTE_AUDIO_REASON_NETWORK_RECOVERY(2): Network recovery. + * REMOTE_AUDIO_REASON_LOCAL_MUTED(3): The local user stops receiving the remote audio + * stream or disables the audio module. + * REMOTE_AUDIO_REASON_LOCAL_UNMUTED(4): The local user resumes receiving the remote audio + * stream or enables the audio module. + * REMOTE_AUDIO_REASON_REMOTE_MUTED(5): The remote user stops sending the audio stream or + * disables the audio module. + * REMOTE_AUDIO_REASON_REMOTE_UNMUTED(6): The remote user resumes sending the audio stream + * or enables the audio module. + * REMOTE_AUDIO_REASON_REMOTE_OFFLINE(7): The remote user leaves the channel. + * @param elapsed Time elapsed (ms) from the local user calling the joinChannel method + * until the SDK triggers this callback.*/ + @Override + public void onRemoteAudioStateChanged(int uid, int state, int reason, int elapsed) { + super.onRemoteAudioStateChanged(uid, state, reason, elapsed); + Log.i(TAG, "onRemoteAudioStateChanged->" + uid + ", state->" + state + ", reason->" + reason); + } + + /**Since v2.9.0. + * Occurs when the remote video state changes. + * PS: This callback does not work properly when the number of users (in the Communication + * profile) or broadcasters (in the Live-broadcast profile) in the channel exceeds 17. + * @param uid ID of the remote user whose video state changes. + * @param state State of the remote video: + * REMOTE_VIDEO_STATE_STOPPED(0): The remote video is in the default state, probably due + * to REMOTE_VIDEO_STATE_REASON_LOCAL_MUTED(3), REMOTE_VIDEO_STATE_REASON_REMOTE_MUTED(5), + * or REMOTE_VIDEO_STATE_REASON_REMOTE_OFFLINE(7). + * REMOTE_VIDEO_STATE_STARTING(1): The first remote video packet is received. + * REMOTE_VIDEO_STATE_DECODING(2): The remote video stream is decoded and plays normally, + * probably due to REMOTE_VIDEO_STATE_REASON_NETWORK_RECOVERY (2), + * REMOTE_VIDEO_STATE_REASON_LOCAL_UNMUTED(4), REMOTE_VIDEO_STATE_REASON_REMOTE_UNMUTED(6), + * or REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK_RECOVERY(9). + * REMOTE_VIDEO_STATE_FROZEN(3): The remote video is frozen, probably due to + * REMOTE_VIDEO_STATE_REASON_NETWORK_CONGESTION(1) or REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK(8). + * REMOTE_VIDEO_STATE_FAILED(4): The remote video fails to start, probably due to + * REMOTE_VIDEO_STATE_REASON_INTERNAL(0). + * @param reason The reason of the remote video state change: + * REMOTE_VIDEO_STATE_REASON_INTERNAL(0): Internal reasons. + * REMOTE_VIDEO_STATE_REASON_NETWORK_CONGESTION(1): Network congestion. + * REMOTE_VIDEO_STATE_REASON_NETWORK_RECOVERY(2): Network recovery. + * REMOTE_VIDEO_STATE_REASON_LOCAL_MUTED(3): The local user stops receiving the remote + * video stream or disables the video module. + * REMOTE_VIDEO_STATE_REASON_LOCAL_UNMUTED(4): The local user resumes receiving the remote + * video stream or enables the video module. + * REMOTE_VIDEO_STATE_REASON_REMOTE_MUTED(5): The remote user stops sending the video + * stream or disables the video module. + * REMOTE_VIDEO_STATE_REASON_REMOTE_UNMUTED(6): The remote user resumes sending the video + * stream or enables the video module. + * REMOTE_VIDEO_STATE_REASON_REMOTE_OFFLINE(7): The remote user leaves the channel. + * REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK(8): The remote media stream falls back to the + * audio-only stream due to poor network conditions. + * REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK_RECOVERY(9): The remote media stream switches + * back to the video stream after the network conditions improve. + * @param elapsed Time elapsed (ms) from the local user calling the joinChannel method until + * the SDK triggers this callback.*/ + @Override + public void onRemoteVideoStateChanged(int uid, int state, int reason, int elapsed) + { + super.onRemoteVideoStateChanged(uid, state, reason, elapsed); + Log.i(TAG, "onRemoteVideoStateChanged->" + uid + ", state->" + state + ", reason->" + reason); + } + + @Override + public void onLocalAudioStats(LocalAudioStats stats) { + super.onLocalAudioStats(stats); + videoReportLayout.setLocalAudioStats(stats); + } + + + @Override + public void onLocalVideoStats(Constants.VideoSourceType source, LocalVideoStats stats) { + super.onLocalVideoStats(source, stats); + videoReportLayout.setLocalVideoStats(stats); + } + + }; + + @Override + public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { + if(buttonView == switchTransparentBackground){ + engine.enableVirtualBackground(isChecked, new VirtualBackgroundSource(VirtualBackgroundSource.BACKGROUND_COLOR, Color.TRANSPARENT, "", VirtualBackgroundSource.BLUR_DEGREE_HIGH), new SegmentationProperty()); + } + } +} diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaPlayer.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaPlayer.java index ae2565a40..b813feb6b 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaPlayer.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaPlayer.java @@ -71,7 +71,7 @@ public class MediaPlayer extends BaseFragment implements View.OnClickListener, I private SeekBar progressBar; private long playerDuration = 0; - private static final String SAMPLE_MOVIE_URL = "https://webdemo.agora.io/agora-web-showcase/examples/Agora-Custom-VideoSource-Web/assets/sample.mp4"; + private static final String SAMPLE_MOVIE_URL = "https://agora-adc-artifacts.s3.cn-north-1.amazonaws.com.cn/resources/sample.mp4"; @Nullable @Override @@ -271,8 +271,9 @@ private void joinChannel(String channelId) { } fl_local.addView(surfaceView); // Setup local video to render your local media player view - VideoCanvas videoCanvas = new VideoCanvas(surfaceView, Constants.RENDER_MODE_HIDDEN, Constants.VIDEO_MIRROR_MODE_AUTO, - Constants.VIDEO_SOURCE_MEDIA_PLAYER, mediaPlayer.getMediaPlayerId(), 0); + VideoCanvas videoCanvas = new VideoCanvas(surfaceView, Constants.RENDER_MODE_HIDDEN, 0); + videoCanvas.sourceType = Constants.VIDEO_SOURCE_MEDIA_PLAYER; + videoCanvas.mediaPlayerId = mediaPlayer.getMediaPlayerId(); engine.setupLocalVideo(videoCanvas); // Set audio route to microPhone engine.setDefaultAudioRoutetoSpeakerphone(true); diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaRecorder.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaRecorder.java new file mode 100644 index 000000000..abfbcb247 --- /dev/null +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaRecorder.java @@ -0,0 +1,674 @@ +package io.agora.api.example.examples.advanced; + +import static io.agora.api.example.common.model.Examples.ADVANCED; +import static io.agora.rtc2.Constants.RENDER_MODE_HIDDEN; +import static io.agora.rtc2.video.VideoEncoderConfiguration.STANDARD_BITRATE; + +import android.Manifest; +import android.annotation.SuppressLint; +import android.content.Context; +import android.os.Build; +import android.os.Bundle; +import android.util.Log; +import android.view.LayoutInflater; +import android.view.SurfaceView; +import android.view.View; +import android.view.ViewGroup; +import android.widget.Button; +import android.widget.EditText; +import android.widget.FrameLayout; +import android.widget.Toast; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.appcompat.app.AlertDialog; + +import com.yanzhenjie.permission.AndPermission; +import com.yanzhenjie.permission.runtime.Permission; + +import java.io.File; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; + +import io.agora.api.example.MainApplication; +import io.agora.api.example.R; +import io.agora.api.example.annotation.Example; +import io.agora.api.example.common.BaseFragment; +import io.agora.api.example.common.widget.VideoReportLayout; +import io.agora.api.example.examples.basic.JoinChannelVideo; +import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.TokenUtils; +import io.agora.rtc2.AgoraMediaRecorder; +import io.agora.rtc2.ChannelMediaOptions; +import io.agora.rtc2.Constants; +import io.agora.rtc2.IMediaRecorderCallback; +import io.agora.rtc2.IRtcEngineEventHandler; +import io.agora.rtc2.RecorderInfo; +import io.agora.rtc2.RecorderStreamInfo; +import io.agora.rtc2.RtcEngine; +import io.agora.rtc2.RtcEngineConfig; +import io.agora.rtc2.video.VideoCanvas; +import io.agora.rtc2.video.VideoEncoderConfiguration; + +@Example( + index = 17, + group = ADVANCED, + name = R.string.item_media_recorder, + actionId = R.id.action_mainFragment_to_MediaRecorder, + tipsId = R.string.media_recorder +) +public class MediaRecorder extends BaseFragment implements View.OnClickListener { + private static final String TAG = JoinChannelVideo.class.getSimpleName(); + + private VideoReportLayout fl_local, fl_remote, fl_remote_2, fl_remote_3; + private Button join, switch_camera; + private EditText et_channel; + private RtcEngine engine; + private int myUid; + private String channelId; + private boolean joined = false; + private final Map remoteViews = new ConcurrentHashMap(); + private AgoraMediaRecorder localMediaRecorder; + private final Map remoteMediaRecorders = new HashMap<>(); + + + @Nullable + @Override + public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { + return inflater.inflate(R.layout.fragment_media_recorder, container, false); + } + + @Override + public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { + super.onViewCreated(view, savedInstanceState); + join = view.findViewById(R.id.btn_join); + switch_camera = view.findViewById(R.id.btn_switch_camera); + et_channel = view.findViewById(R.id.et_channel); + view.findViewById(R.id.btn_join).setOnClickListener(this); + switch_camera.setOnClickListener(this); + fl_local = view.findViewById(R.id.fl_local); + fl_remote = view.findViewById(R.id.fl_remote); + fl_remote_2 = view.findViewById(R.id.fl_remote2); + fl_remote_3 = view.findViewById(R.id.fl_remote3); + } + + @Override + public void onActivityCreated(@Nullable Bundle savedInstanceState) { + super.onActivityCreated(savedInstanceState); + // Check if the context is valid + Context context = getContext(); + if (context == null) { + return; + } + try { + RtcEngineConfig config = new RtcEngineConfig(); + /** + * The context of Android Activity + */ + config.mContext = context.getApplicationContext(); + /** + * The App ID issued to you by Agora. See How to get the App ID + */ + config.mAppId = getString(R.string.agora_app_id); + /** Sets the channel profile of the Agora RtcEngine. + CHANNEL_PROFILE_COMMUNICATION(0): (Default) The Communication profile. + Use this profile in one-on-one calls or group calls, where all users can talk freely. + CHANNEL_PROFILE_LIVE_BROADCASTING(1): The Live-Broadcast profile. Users in a live-broadcast + channel have a role as either broadcaster or audience. A broadcaster can both send and receive streams; + an audience can only receive streams.*/ + config.mChannelProfile = Constants.CHANNEL_PROFILE_LIVE_BROADCASTING; + /** + * IRtcEngineEventHandler is an abstract class providing default implementation. + * The SDK uses this class to report to the app on SDK runtime events. + */ + config.mEventHandler = iRtcEngineEventHandler; + config.mAudioScenario = Constants.AudioScenario.getValue(Constants.AudioScenario.DEFAULT); + config.mAreaCode = ((MainApplication) getActivity().getApplication()).getGlobalSettings().getAreaCode(); + engine = RtcEngine.create(config); + /** + * This parameter is for reporting the usages of APIExample to agora background. + * Generally, it is not necessary for you to set this parameter. + */ + engine.setParameters("{" + + "\"rtc.report_app_scenario\":" + + "{" + + "\"appScenario\":" + 100 + "," + + "\"serviceType\":" + 11 + "," + + "\"appVersion\":\"" + RtcEngine.getSdkVersion() + "\"" + + "}" + + "}"); + /* setting the local access point if the private cloud ip was set, otherwise the config will be invalid.*/ + engine.setLocalAccessPoint(((MainApplication) getActivity().getApplication()).getGlobalSettings().getPrivateCloudConfig()); + } catch (Exception e) { + e.printStackTrace(); + getActivity().onBackPressed(); + } + } + + @Override + public void onDestroy() { + super.onDestroy(); + stopAllMediaRecorder(); + /**leaveChannel and Destroy the RtcEngine instance*/ + if (engine != null) { + engine.leaveChannel(); + } + handler.post(RtcEngine::destroy); + engine = null; + } + + @SuppressLint("WrongConstant") + @Override + public void onClick(View v) { + if (v.getId() == R.id.btn_join) { + if (!joined) { + CommonUtil.hideInputBoard(getActivity(), et_channel); + // call when join button hit + String channelId = et_channel.getText().toString(); + // Check permission + List permissionList = new ArrayList<>(); + permissionList.add(Permission.READ_EXTERNAL_STORAGE); + permissionList.add(Permission.WRITE_EXTERNAL_STORAGE); + permissionList.add(Permission.RECORD_AUDIO); + permissionList.add(Permission.CAMERA); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { + permissionList.add(Manifest.permission.BLUETOOTH_CONNECT); + } + + String[] permissionArray = new String[permissionList.size()]; + permissionList.toArray(permissionArray); + + if (AndPermission.hasPermissions(this, permissionArray)) { + joinChannel(channelId); + return; + } + // Request permission + AndPermission.with(this).runtime().permission( + permissionArray + ).onGranted(permissions -> + { + // Permissions Granted + joinChannel(channelId); + }).start(); + } else { + joined = false; + stopAllMediaRecorder(); + /**After joining a channel, the user must call the leaveChannel method to end the + * call before joining another channel. This method returns 0 if the user leaves the + * channel and releases all resources related to the call. This method call is + * asynchronous, and the user has not exited the channel when the method call returns. + * Once the user leaves the channel, the SDK triggers the onLeaveChannel callback. + * A successful leaveChannel method call triggers the following callbacks: + * 1:The local client: onLeaveChannel. + * 2:The remote client: onUserOffline, if the user leaving the channel is in the + * Communication channel, or is a BROADCASTER in the Live Broadcast profile. + * @returns 0: Success. + * < 0: Failure. + * PS: + * 1:If you call the destroy method immediately after calling the leaveChannel + * method, the leaveChannel process interrupts, and the SDK does not trigger + * the onLeaveChannel callback. + * 2:If you call the leaveChannel method during CDN live streaming, the SDK + * triggers the removeInjectStreamUrl method.*/ + engine.leaveChannel(); + join.setText(getString(R.string.join)); + for (ViewGroup value : remoteViews.values()) { + value.removeAllViews(); + resetLayoutRecording(value); + } + remoteViews.clear(); + fl_local.removeAllViews(); + resetLayoutRecording(fl_local); + } + } else if (v.getId() == switch_camera.getId()) { + if (engine != null && joined) { + engine.switchCamera(); + } + } + } + + private void joinChannel(String channelId) { + // Check if the context is valid + Context context = getContext(); + if (context == null) { + return; + } + + // Create render view by RtcEngine + SurfaceView surfaceView = new SurfaceView(context); + if (fl_local.getChildCount() > 0) { + fl_local.removeAllViews(); + } + // Add to the local container + fl_local.addView(surfaceView, 0, new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); + setupLayoutRecording(fl_local, () -> startLocalMediaRecorder(channelId), this::stopLocalMediaRecorder); + // Setup local video to render your local camera preview + engine.setupLocalVideo(new VideoCanvas(surfaceView, RENDER_MODE_HIDDEN, 0)); + // Set audio route to microPhone + engine.setDefaultAudioRoutetoSpeakerphone(true); + + /**In the demo, the default is to enter as the anchor.*/ + engine.setClientRole(Constants.CLIENT_ROLE_BROADCASTER); + // Enable video module + engine.enableVideo(); + // Setup video encoding configs + engine.setVideoEncoderConfiguration(new VideoEncoderConfiguration( + ((MainApplication) getActivity().getApplication()).getGlobalSettings().getVideoEncodingDimensionObject(), + VideoEncoderConfiguration.FRAME_RATE.valueOf(((MainApplication) getActivity().getApplication()).getGlobalSettings().getVideoEncodingFrameRate()), + STANDARD_BITRATE, + VideoEncoderConfiguration.ORIENTATION_MODE.valueOf(((MainApplication) getActivity().getApplication()).getGlobalSettings().getVideoEncodingOrientation()) + )); + + ChannelMediaOptions option = new ChannelMediaOptions(); + option.autoSubscribeAudio = true; + option.autoSubscribeVideo = true; + option.publishMicrophoneTrack = true; + option.publishCameraTrack = true; + + /**Please configure accessToken in the string_config file. + * A temporary token generated in Console. A temporary token is valid for 24 hours. For details, see + * https://docs.agora.io/en/Agora%20Platform/token?platform=All%20Platforms#get-a-temporary-token + * A token generated at the server. This applies to scenarios with high-security requirements. For details, see + * https://docs.agora.io/en/cloud-recording/token_server_java?platform=Java*/ + TokenUtils.gen(requireContext(), channelId, 0, ret -> { + + /** Allows a user to join a channel. + if you do not specify the uid, we will generate the uid for you*/ + int res = engine.joinChannel(ret, channelId, 0, option); + if (res != 0) { + // Usually happens with invalid parameters + // Error code description can be found at: + // en: https://docs.agora.io/en/Voice/API%20Reference/java/classio_1_1agora_1_1rtc_1_1_i_rtc_engine_event_handler_1_1_error_code.html + // cn: https://docs.agora.io/cn/Voice/API%20Reference/java/classio_1_1agora_1_1rtc_1_1_i_rtc_engine_event_handler_1_1_error_code.html + showAlert(RtcEngine.getErrorDescription(Math.abs(res))); + return; + } + // Prevent repeated entry + join.setEnabled(false); + }); + } + + private void stopAllMediaRecorder(){ + stopLocalMediaRecorder(); + Set remoteUidList = remoteMediaRecorders.keySet(); + for (Integer uid : remoteUidList) { + stopRemoteMediaRecorder(uid); + } + } + + private void stopRemoteMediaRecorder(int uid) { + AgoraMediaRecorder mediaRecorder = remoteMediaRecorders.get(uid); + if(mediaRecorder == null){ + return; + } + // Stop Local Recording + int ret = mediaRecorder.stopRecording(); + Toast.makeText(requireContext(), "StopRecording ret=" + ret, Toast.LENGTH_SHORT).show(); + mediaRecorder.setMediaRecorderObserver(null); + engine.destroyMediaRecorder(mediaRecorder); + remoteMediaRecorders.remove(uid); + } + + private void startRemoteMediaRecorder(String channelId, int uid) { + // Start Local Recording + AgoraMediaRecorder mediaRecorder = remoteMediaRecorders.get(uid); + String storagePath = requireContext().getExternalCacheDir().getAbsolutePath() + File.separator + "media_recorder_" + channelId + "_" + uid + ".mp4"; + if (mediaRecorder == null) { + mediaRecorder = engine.createMediaRecorder(new RecorderStreamInfo(channelId, uid)); + // Before starting recoding, you must call setMediaRecorderObserver firstly. Otherwise, recoding will fail with code -4. + mediaRecorder.setMediaRecorderObserver(new IMediaRecorderCallback() { + @Override + public void onRecorderStateChanged(String channelId, int uid, int state, int error) { + Log.d(TAG, "RemoteMediaRecorder -- onRecorderStateChanged channelId=" + channelId + ", uid=" + uid + ", state=" + state + ", error=" + error); + if (state == AgoraMediaRecorder.RECORDER_STATE_STOP) { + showRecordMediaPathDialog(storagePath); + } + } + + @Override + public void onRecorderInfoUpdated(String channelId, int uid, RecorderInfo info) { + Log.d(TAG, "RemoteMediaRecorder -- onRecorderInfoUpdated channelId=" + channelId + ", uid=" + uid + ", fileName=" + info.fileName + ", durationMs=" + info.durationMs + ", fileSize=" + info.fileSize); + } + }); + remoteMediaRecorders.put(uid, mediaRecorder); + } + int ret = mediaRecorder.startRecording(new AgoraMediaRecorder.MediaRecorderConfiguration( + storagePath, + AgoraMediaRecorder.CONTAINER_MP4, AgoraMediaRecorder.STREAM_TYPE_BOTH, 120000, 0 + )); + Toast.makeText(requireContext(), "StartRecording ret=" + ret, Toast.LENGTH_SHORT).show(); + } + + private void stopLocalMediaRecorder() { + if(localMediaRecorder == null){ + return; + } + // Stop Local Recording + int ret = localMediaRecorder.stopRecording(); + Toast.makeText(requireContext(), "StopRecording ret=" + ret, Toast.LENGTH_SHORT).show(); + localMediaRecorder.setMediaRecorderObserver(null); + engine.destroyMediaRecorder(localMediaRecorder); + localMediaRecorder = null; + } + + private void startLocalMediaRecorder(String channelId) { + // Start Local Recording + String storagePath = requireContext().getExternalCacheDir().getAbsolutePath() + File.separator + "media_recorder_" + channelId + "_local.mp4"; + + if (localMediaRecorder == null) { + localMediaRecorder = engine.createMediaRecorder(new RecorderStreamInfo(channelId, myUid)); + // Before starting recoding, you must call setMediaRecorderObserver firstly. Otherwise, recoding will fail with code -4. + localMediaRecorder.setMediaRecorderObserver(new IMediaRecorderCallback() { + @Override + public void onRecorderStateChanged(String channelId, int uid, int state, int error) { + Log.d(TAG, "LocalMediaRecorder -- onRecorderStateChanged channelId=" + channelId + ", uid=" + uid + ", state=" + state + ", error=" + error); + if (state == AgoraMediaRecorder.RECORDER_STATE_STOP) { + showRecordMediaPathDialog(storagePath); + } + } + + @Override + public void onRecorderInfoUpdated(String channelId, int uid, RecorderInfo info) { + Log.d(TAG, "LocalMediaRecorder -- onRecorderInfoUpdated channelId=" + channelId + ", uid=" + uid + ", fileName=" + info.fileName + ", durationMs=" + info.durationMs + ", fileSize=" + info.fileSize); + } + }); + } + int ret = localMediaRecorder.startRecording(new AgoraMediaRecorder.MediaRecorderConfiguration( + storagePath, + AgoraMediaRecorder.CONTAINER_MP4, AgoraMediaRecorder.STREAM_TYPE_BOTH, 120000, 0 + )); + Toast.makeText(requireContext(), "StartRecording ret=" + ret, Toast.LENGTH_SHORT).show(); + } + + private void setupLayoutRecording(@NonNull ViewGroup reportLayout, @NonNull Runnable onStart, @NonNull Runnable onStop) { + Button btnRecording = ((ViewGroup)reportLayout.getParent()).findViewWithTag(getString(R.string.recording_tag)); + if (btnRecording == null) { + return; + } + btnRecording.setText(R.string.start_recording); + btnRecording.setVisibility(View.VISIBLE); + btnRecording.setOnClickListener(v -> { + if (btnRecording.getText().equals(getString(R.string.start_recording))) { + + btnRecording.setText(R.string.stop_recording); + onStart.run(); + } else { + // Stop Recording + btnRecording.setText(R.string.start_recording); + onStop.run(); + } + }); + } + + private void showRecordMediaPathDialog(String path){ + runOnUIThread(() -> { + new AlertDialog.Builder(requireContext()) + .setTitle("MediaFilePath") + .setMessage(path) + .setPositiveButton(R.string.confirm, (dialog, which) -> dialog.dismiss()) + .show(); + }); + } + + private void resetLayoutRecording(@NonNull ViewGroup reportLayout) { + Button btnRecording = ((ViewGroup)reportLayout.getParent()).findViewWithTag(getString(R.string.recording_tag)); + if (btnRecording == null) { + return; + } + btnRecording.setVisibility(View.GONE); + btnRecording.setText(R.string.start_recording); + } + + /** + * IRtcEngineEventHandler is an abstract class providing default implementation. + * The SDK uses this class to report to the app on SDK runtime events. + */ + private final IRtcEngineEventHandler iRtcEngineEventHandler = new IRtcEngineEventHandler() { + /** + * Error code description can be found at: + * en: https://api-ref.agora.io/en/video-sdk/android/4.x/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror + * cn: https://docs.agora.io/cn/video-call-4.x/API%20Reference/java_ng/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror + */ + @Override + public void onError(int err) { + super.onError(err); + showLongToast("Error code:" + err + ", msg:" + RtcEngine.getErrorDescription(err)); + if (err == Constants.ERR_INVALID_TOKEN || err == Constants.ERR_TOKEN_EXPIRED) { + engine.leaveChannel(); + runOnUIThread(() -> join.setEnabled(true)); + + if (Constants.ERR_INVALID_TOKEN == err) { + showAlert(getString(R.string.token_invalid)); + } + if (Constants.ERR_TOKEN_EXPIRED == err) { + showAlert(getString(R.string.token_expired)); + } + } + } + + /**Occurs when a user leaves the channel. + * @param stats With this callback, the application retrieves the channel information, + * such as the call duration and statistics.*/ + @Override + public void onLeaveChannel(RtcStats stats) { + super.onLeaveChannel(stats); + Log.i(TAG, String.format("local user %d leaveChannel!", myUid)); + showLongToast(String.format("local user %d leaveChannel!", myUid)); + } + + /**Occurs when the local user joins a specified channel. + * The channel name assignment is based on channelName specified in the joinChannel method. + * If the uid is not specified when joinChannel is called, the server automatically assigns a uid. + * @param channel Channel name + * @param uid User ID + * @param elapsed Time elapsed (ms) from the user calling joinChannel until this callback is triggered*/ + @Override + public void onJoinChannelSuccess(String channel, int uid, int elapsed) { + Log.i(TAG, String.format("onJoinChannelSuccess channel %s uid %d", channel, uid)); + showLongToast(String.format("onJoinChannelSuccess channel %s uid %d", channel, uid)); + myUid = uid; + channelId = channel; + joined = true; + handler.post(new Runnable() { + @Override + public void run() { + join.setEnabled(true); + join.setText(getString(R.string.leave)); + fl_local.setReportUid(uid); + } + }); + } + + /**Since v2.9.0. + * This callback indicates the state change of the remote audio stream. + * PS: This callback does not work properly when the number of users (in the Communication profile) or + * broadcasters (in the Live-broadcast profile) in the channel exceeds 17. + * @param uid ID of the user whose audio state changes. + * @param state State of the remote audio + * REMOTE_AUDIO_STATE_STOPPED(0): The remote audio is in the default state, probably due + * to REMOTE_AUDIO_REASON_LOCAL_MUTED(3), REMOTE_AUDIO_REASON_REMOTE_MUTED(5), + * or REMOTE_AUDIO_REASON_REMOTE_OFFLINE(7). + * REMOTE_AUDIO_STATE_STARTING(1): The first remote audio packet is received. + * REMOTE_AUDIO_STATE_DECODING(2): The remote audio stream is decoded and plays normally, + * probably due to REMOTE_AUDIO_REASON_NETWORK_RECOVERY(2), + * REMOTE_AUDIO_REASON_LOCAL_UNMUTED(4) or REMOTE_AUDIO_REASON_REMOTE_UNMUTED(6). + * REMOTE_AUDIO_STATE_FROZEN(3): The remote audio is frozen, probably due to + * REMOTE_AUDIO_REASON_NETWORK_CONGESTION(1). + * REMOTE_AUDIO_STATE_FAILED(4): The remote audio fails to start, probably due to + * REMOTE_AUDIO_REASON_INTERNAL(0). + * @param reason The reason of the remote audio state change. + * REMOTE_AUDIO_REASON_INTERNAL(0): Internal reasons. + * REMOTE_AUDIO_REASON_NETWORK_CONGESTION(1): Network congestion. + * REMOTE_AUDIO_REASON_NETWORK_RECOVERY(2): Network recovery. + * REMOTE_AUDIO_REASON_LOCAL_MUTED(3): The local user stops receiving the remote audio + * stream or disables the audio module. + * REMOTE_AUDIO_REASON_LOCAL_UNMUTED(4): The local user resumes receiving the remote audio + * stream or enables the audio module. + * REMOTE_AUDIO_REASON_REMOTE_MUTED(5): The remote user stops sending the audio stream or + * disables the audio module. + * REMOTE_AUDIO_REASON_REMOTE_UNMUTED(6): The remote user resumes sending the audio stream + * or enables the audio module. + * REMOTE_AUDIO_REASON_REMOTE_OFFLINE(7): The remote user leaves the channel. + * @param elapsed Time elapsed (ms) from the local user calling the joinChannel method + * until the SDK triggers this callback.*/ + @Override + public void onRemoteAudioStateChanged(int uid, int state, int reason, int elapsed) { + super.onRemoteAudioStateChanged(uid, state, reason, elapsed); + Log.i(TAG, "onRemoteAudioStateChanged->" + uid + ", state->" + state + ", reason->" + reason); + } + + /**Since v2.9.0. + * Occurs when the remote video state changes. + * PS: This callback does not work properly when the number of users (in the Communication + * profile) or broadcasters (in the Live-broadcast profile) in the channel exceeds 17. + * @param uid ID of the remote user whose video state changes. + * @param state State of the remote video: + * REMOTE_VIDEO_STATE_STOPPED(0): The remote video is in the default state, probably due + * to REMOTE_VIDEO_STATE_REASON_LOCAL_MUTED(3), REMOTE_VIDEO_STATE_REASON_REMOTE_MUTED(5), + * or REMOTE_VIDEO_STATE_REASON_REMOTE_OFFLINE(7). + * REMOTE_VIDEO_STATE_STARTING(1): The first remote video packet is received. + * REMOTE_VIDEO_STATE_DECODING(2): The remote video stream is decoded and plays normally, + * probably due to REMOTE_VIDEO_STATE_REASON_NETWORK_RECOVERY (2), + * REMOTE_VIDEO_STATE_REASON_LOCAL_UNMUTED(4), REMOTE_VIDEO_STATE_REASON_REMOTE_UNMUTED(6), + * or REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK_RECOVERY(9). + * REMOTE_VIDEO_STATE_FROZEN(3): The remote video is frozen, probably due to + * REMOTE_VIDEO_STATE_REASON_NETWORK_CONGESTION(1) or REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK(8). + * REMOTE_VIDEO_STATE_FAILED(4): The remote video fails to start, probably due to + * REMOTE_VIDEO_STATE_REASON_INTERNAL(0). + * @param reason The reason of the remote video state change: + * REMOTE_VIDEO_STATE_REASON_INTERNAL(0): Internal reasons. + * REMOTE_VIDEO_STATE_REASON_NETWORK_CONGESTION(1): Network congestion. + * REMOTE_VIDEO_STATE_REASON_NETWORK_RECOVERY(2): Network recovery. + * REMOTE_VIDEO_STATE_REASON_LOCAL_MUTED(3): The local user stops receiving the remote + * video stream or disables the video module. + * REMOTE_VIDEO_STATE_REASON_LOCAL_UNMUTED(4): The local user resumes receiving the remote + * video stream or enables the video module. + * REMOTE_VIDEO_STATE_REASON_REMOTE_MUTED(5): The remote user stops sending the video + * stream or disables the video module. + * REMOTE_VIDEO_STATE_REASON_REMOTE_UNMUTED(6): The remote user resumes sending the video + * stream or enables the video module. + * REMOTE_VIDEO_STATE_REASON_REMOTE_OFFLINE(7): The remote user leaves the channel. + * REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK(8): The remote media stream falls back to the + * audio-only stream due to poor network conditions. + * REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK_RECOVERY(9): The remote media stream switches + * back to the video stream after the network conditions improve. + * @param elapsed Time elapsed (ms) from the local user calling the joinChannel method until + * the SDK triggers this callback.*/ + @Override + public void onRemoteVideoStateChanged(int uid, int state, int reason, int elapsed) { + super.onRemoteVideoStateChanged(uid, state, reason, elapsed); + Log.i(TAG, "onRemoteVideoStateChanged->" + uid + ", state->" + state + ", reason->" + reason); + } + + /**Occurs when a remote user (Communication)/host (Live Broadcast) joins the channel. + * @param uid ID of the user whose audio state changes. + * @param elapsed Time delay (ms) from the local user calling joinChannel/setClientRole + * until this callback is triggered.*/ + @Override + public void onUserJoined(int uid, int elapsed) { + super.onUserJoined(uid, elapsed); + Log.i(TAG, "onUserJoined->" + uid); + showLongToast(String.format("user %d joined!", uid)); + /**Check if the context is correct*/ + Context context = getContext(); + if (context == null) { + return; + } + if (remoteViews.containsKey(uid)) { + return; + } else { + handler.post(() -> + { + /**Display remote video stream*/ + SurfaceView surfaceView = null; + // Create render view by RtcEngine + surfaceView = new SurfaceView(context); + surfaceView.setZOrderMediaOverlay(true); + VideoReportLayout view = getAvailableView(); + view.setReportUid(uid); + setupLayoutRecording(view, () -> startRemoteMediaRecorder(channelId, uid), () -> stopRemoteMediaRecorder(uid)); + remoteViews.put(uid, view); + // Add to the remote container + view.addView(surfaceView, new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); + // Setup remote video to render + engine.setupRemoteVideo(new VideoCanvas(surfaceView, RENDER_MODE_HIDDEN, uid)); + }); + } + } + + /**Occurs when a remote user (Communication)/host (Live Broadcast) leaves the channel. + * @param uid ID of the user whose audio state changes. + * @param reason Reason why the user goes offline: + * USER_OFFLINE_QUIT(0): The user left the current channel. + * USER_OFFLINE_DROPPED(1): The SDK timed out and the user dropped offline because no data + * packet was received within a certain period of time. If a user quits the + * call and the message is not passed to the SDK (due to an unreliable channel), + * the SDK assumes the user dropped offline. + * USER_OFFLINE_BECOME_AUDIENCE(2): (Live broadcast only.) The client role switched from + * the host to the audience.*/ + @Override + public void onUserOffline(int uid, int reason) { + Log.i(TAG, String.format("user %d offline! reason:%d", uid, reason)); + showLongToast(String.format("user %d offline! reason:%d", uid, reason)); + handler.post(new Runnable() { + @Override + public void run() { + /**Clear render view + Note: The video will stay at its last frame, to completely remove it you will need to + remove the SurfaceView from its parent*/ + engine.setupRemoteVideo(new VideoCanvas(null, RENDER_MODE_HIDDEN, uid)); + stopRemoteMediaRecorder(uid); + ViewGroup viewGroup = remoteViews.get(uid); + resetLayoutRecording(viewGroup); + viewGroup.removeAllViews(); + remoteViews.remove(uid); + } + }); + } + + @Override + public void onLocalAudioStats(LocalAudioStats stats) { + super.onLocalAudioStats(stats); + fl_local.setLocalAudioStats(stats); + } + + @Override + public void onRemoteAudioStats(RemoteAudioStats stats) { + super.onRemoteAudioStats(stats); + fl_remote.setRemoteAudioStats(stats); + fl_remote_2.setRemoteAudioStats(stats); + fl_remote_3.setRemoteAudioStats(stats); + } + + @Override + public void onLocalVideoStats(Constants.VideoSourceType source, LocalVideoStats stats) { + super.onLocalVideoStats(source, stats); + fl_local.setLocalVideoStats(stats); + } + + @Override + public void onRemoteVideoStats(RemoteVideoStats stats) { + super.onRemoteVideoStats(stats); + fl_remote.setRemoteVideoStats(stats); + fl_remote_2.setRemoteVideoStats(stats); + fl_remote_3.setRemoteVideoStats(stats); + } + }; + + private VideoReportLayout getAvailableView() { + if (fl_remote.getChildCount() == 0) { + return fl_remote; + } else if (fl_remote_2.getChildCount() == 0) { + return fl_remote_2; + } else if (fl_remote_3.getChildCount() == 0) { + return fl_remote_3; + } else { + return fl_remote; + } + } + +} diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MultiVideoSourceTracks.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MultiVideoSourceTracks.java index 05c28b5f9..cadacc747 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MultiVideoSourceTracks.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MultiVideoSourceTracks.java @@ -5,6 +5,7 @@ import static io.agora.rtc2.video.VideoEncoderConfiguration.STANDARD_BITRATE; import android.content.Context; +import android.graphics.Matrix; import android.os.Bundle; import android.util.Log; import android.view.LayoutInflater; @@ -13,6 +14,8 @@ import android.view.ViewGroup; import android.widget.Button; import android.widget.EditText; +import android.widget.Spinner; +import android.widget.Toast; import androidx.annotation.NonNull; import androidx.annotation.Nullable; @@ -20,18 +23,27 @@ import com.yanzhenjie.permission.AndPermission; import com.yanzhenjie.permission.runtime.Permission; +import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.Random; +import java.util.concurrent.Callable; import io.agora.api.example.MainApplication; import io.agora.api.example.R; import io.agora.api.example.annotation.Example; import io.agora.api.example.common.BaseFragment; import io.agora.api.example.common.widget.VideoReportLayout; +import io.agora.api.example.examples.advanced.videoRender.YuvFboProgram; import io.agora.api.example.utils.CommonUtil; import io.agora.api.example.utils.TokenUtils; import io.agora.api.example.utils.VideoFileReader; +import io.agora.base.JavaI420Buffer; +import io.agora.base.NV12Buffer; +import io.agora.base.NV21Buffer; +import io.agora.base.TextureBufferHelper; +import io.agora.base.VideoFrame; +import io.agora.base.internal.video.YuvHelper; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; import io.agora.rtc2.IRtcEngineEventHandler; @@ -39,6 +51,7 @@ import io.agora.rtc2.RtcEngine; import io.agora.rtc2.RtcEngineConfig; import io.agora.rtc2.RtcEngineEx; +import io.agora.rtc2.gl.EglBaseProvider; import io.agora.rtc2.video.VideoCanvas; import io.agora.rtc2.video.VideoEncoderConfiguration; @@ -59,10 +72,13 @@ public class MultiVideoSourceTracks extends BaseFragment implements View.OnClick private RtcEngineEx engine; private int myUid; private volatile boolean joined = false; + private Spinner sp_push_buffer_type; private final List videoTrackIds = new ArrayList<>(); private final List videoFileReaders = new ArrayList<>(); private final List connections = new ArrayList<>(); + private YuvFboProgram yuvFboProgram; + private TextureBufferHelper textureBufferHelper; @Nullable @@ -86,6 +102,7 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat }; view.findViewById(R.id.btn_track_create).setOnClickListener(v -> createPushingVideoTrack()); view.findViewById(R.id.btn_track_destroy).setOnClickListener(v -> destroyLastPushingVideoTrack()); + sp_push_buffer_type = view.findViewById(R.id.sp_buffer_type); } @Override @@ -222,8 +239,6 @@ private void joinChannel(String channelId) { option.clientRoleType = Constants.CLIENT_ROLE_AUDIENCE; option.autoSubscribeAudio = true; option.autoSubscribeVideo = true; - option.publishCameraTrack = false; - option.publishMicrophoneTrack = false; int res = engine.joinChannel(accessToken, channelId, 0, option); if (res != 0) { // Usually happens with invalid parameters @@ -239,35 +254,190 @@ private void joinChannel(String channelId) { } private void createPushingVideoTrack() { - if(!joined || videoTrackIds.size() >= 4){ + if (!joined || videoTrackIds.size() >= 4) { return; } + /* + * Get an custom video track id created by internal,which could used to publish or preview + * + * @return + * - > 0: the useable video track id. + * - < 0: Failure. + */ int videoTrack = engine.createCustomVideoTrack(); + if (videoTrack < 0) { + Toast.makeText(requireContext(), "createCustomVideoTrack failed!", Toast.LENGTH_LONG).show(); + return; + } String channelId = et_channel.getText().toString(); int uid = new Random().nextInt(1000) + 20000; RtcConnection connection = new RtcConnection(channelId, uid); + /* + Generate a token by restful api, which could be used to join channel with token. + */ TokenUtils.gen(requireContext(), channelId, uid, accessToken -> { ChannelMediaOptions option = new ChannelMediaOptions(); option.clientRoleType = Constants.CLIENT_ROLE_BROADCASTER; option.autoSubscribeAudio = true; option.autoSubscribeVideo = true; - option.publishCameraTrack = false; option.publishCustomVideoTrack = true; + /* + specify custom video track id to publish in this channel. + */ option.customVideoTrackId = videoTrack; + /* + * Joins a channel. + * + * @return + * - 0: Success. + * - < 0: Failure. + * - -2: The parameter is invalid. For example, the token is invalid, the uid parameter is not set + * to an integer, or the value of a member in the `ChannelMediaOptions` structure is invalid. You need + * to pass in a valid parameter and join the channel again. + * - -3: Failes to initialize the `IRtcEngine` object. You need to reinitialize the IRtcEngine object. + * - -7: The IRtcEngine object has not been initialized. You need to initialize the IRtcEngine + * object before calling this method. + * - -8: The internal state of the IRtcEngine object is wrong. The typical cause is that you call + * this method to join the channel without calling `stopEchoTest` to stop the test after calling + * `startEchoTest` to start a call loop test. You need to call `stopEchoTest` before calling this method. + * - -17: The request to join the channel is rejected. The typical cause is that the user is in the + * channel. Agora recommends using the `onConnectionStateChanged` callback to get whether the user is + * in the channel. Do not call this method to join the channel unless you receive the + * `CONNECTION_STATE_DISCONNECTED(1)` state. + * - -102: The channel name is invalid. You need to pass in a valid channel name in channelId to + * rejoin the channel. + * - -121: The user ID is invalid. You need to pass in a valid user ID in uid to rejoin the channel. + */ int res = engine.joinChannelEx(accessToken, connection, option, new IRtcEngineEventHandler() { }); if (res != 0) { + /* + * destroy a created custom video track id + * + * @param video_track_id The video track id which was created by createCustomVideoTrack + * @return + * - 0: Success. + * - < 0: Failure. + */ engine.destroyCustomVideoTrack(videoTrack); showAlert(RtcEngine.getErrorDescription(Math.abs(res))); } else { - VideoFileReader videoFileReader = new VideoFileReader(requireContext(), videoFrame -> { - if (engine != null && joined) { - engine.pushExternalVideoFrameEx(videoFrame, videoTrack); + /* + * VideoFileReader can get nv21 buffer data of sample.yuv file in assets cyclically. + */ + VideoFileReader videoFileReader = new VideoFileReader(requireContext(), (yuv, width, height) -> { + if (joined && engine != null) { + String selectedItem = (String) sp_push_buffer_type.getSelectedItem(); + + /* + * Below show how to create different type buffers. + */ + VideoFrame.Buffer frameBuffer; + if ("NV21".equals(selectedItem)) { + int srcStrideY = width; + int srcHeightY = height; + int srcSizeY = srcStrideY * srcHeightY; + ByteBuffer srcY = ByteBuffer.allocateDirect(srcSizeY); + srcY.put(yuv, 0, srcSizeY); + + int srcStrideU = width / 2; + int srcHeightU = height / 2; + int srcSizeU = srcStrideU * srcHeightU; + ByteBuffer srcU = ByteBuffer.allocateDirect(srcSizeU); + srcU.put(yuv, srcSizeY, srcSizeU); + + int srcStrideV = width / 2; + int srcHeightV = height / 2; + int srcSizeV = srcStrideV * srcHeightV; + ByteBuffer srcV = ByteBuffer.allocateDirect(srcSizeV); + srcV.put(yuv, srcSizeY + srcSizeU, srcSizeV); + + int desSize = srcSizeY + srcSizeU + srcSizeV; + ByteBuffer des = ByteBuffer.allocateDirect(desSize); + YuvHelper.I420ToNV12(srcY, srcStrideY, srcV, srcStrideV, srcU, srcStrideU, des, width, height); + + byte[] nv21 = new byte[desSize]; + des.position(0); + des.get(nv21); + + frameBuffer = new NV21Buffer(nv21, width, height, null); + } else if ("NV12".equals(selectedItem)) { + int srcStrideY = width; + int srcHeightY = height; + int srcSizeY = srcStrideY * srcHeightY; + ByteBuffer srcY = ByteBuffer.allocateDirect(srcSizeY); + srcY.put(yuv, 0, srcSizeY); + + int srcStrideU = width / 2; + int srcHeightU = height / 2; + int srcSizeU = srcStrideU * srcHeightU; + ByteBuffer srcU = ByteBuffer.allocateDirect(srcSizeU); + srcU.put(yuv, srcSizeY, srcSizeU); + + int srcStrideV = width / 2; + int srcHeightV = height / 2; + int srcSizeV = srcStrideV * srcHeightV; + ByteBuffer srcV = ByteBuffer.allocateDirect(srcSizeV); + srcV.put(yuv, srcSizeY + srcSizeU, srcSizeV); + + int desSize = srcSizeY + srcSizeU + srcSizeV; + ByteBuffer des = ByteBuffer.allocateDirect(desSize); + YuvHelper.I420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, des, width, height); + + frameBuffer = new NV12Buffer(width, height, width, height, des, null); + } else if ("Texture2D".equals(selectedItem)) { + if (textureBufferHelper == null) { + textureBufferHelper = TextureBufferHelper.create("PushExternalVideoYUV", EglBaseProvider.instance().getRootEglBase().getEglBaseContext()); + } + if (yuvFboProgram == null) { + textureBufferHelper.invoke((Callable) () -> { + yuvFboProgram = new YuvFboProgram(); + return null; + }); + } + Integer textureId = textureBufferHelper.invoke(() -> yuvFboProgram.drawYuv(yuv, width, height)); + frameBuffer = textureBufferHelper.wrapTextureBuffer(width, height, VideoFrame.TextureBuffer.Type.RGB, textureId, new Matrix()); + } else { + // I420 type default + JavaI420Buffer i420Buffer = JavaI420Buffer.allocate(width, height); + i420Buffer.getDataY().put(yuv, 0, i420Buffer.getDataY().limit()); + i420Buffer.getDataU().put(yuv, i420Buffer.getDataY().limit(), i420Buffer.getDataU().limit()); + i420Buffer.getDataV().put(yuv, i420Buffer.getDataY().limit() + i420Buffer.getDataU().limit(), i420Buffer.getDataV().limit()); + frameBuffer = i420Buffer; + } + + + /* + * Get monotonic time in ms which can be used by capture time, + * typical scenario is as follows: + */ + long currentMonotonicTimeInMs = engine.getCurrentMonotonicTimeInMs(); + /* + * Create a video frame to push. + */ + VideoFrame videoFrame = new VideoFrame(frameBuffer, 0, currentMonotonicTimeInMs * 1000000); + + /* + * Pushes the external video frame to the app. + * + * @param frame The external video frame: ExternalVideoFrame. + * @param videoTrackId The id of the video track. + * - 0: Success. + * - < 0: Failure. + */ + int ret = engine.pushExternalVideoFrameEx(videoFrame, videoTrack); + if (ret < 0) { + Log.w(TAG, "pushExternalVideoFrameEx error code=" + ret); + } } }); videoFileReader.start(); + + /* + * cache video track ids , video file readers and rtc connection to release while fragment destroying. + */ videoTrackIds.add(videoTrack); videoFileReaders.add(videoFileReader); connections.add(connection); @@ -277,22 +447,30 @@ private void createPushingVideoTrack() { private int destroyLastPushingVideoTrack() { int lastIndex = videoTrackIds.size() - 1; - if(lastIndex < 0){ + if (lastIndex < 0) { return lastIndex; } int videoTrack = videoTrackIds.remove(lastIndex); VideoFileReader videoFileReader = videoFileReaders.remove(lastIndex); RtcConnection connection = connections.remove(lastIndex); - videoFileReader.stop(); + /* + * destroy a created custom video track id + * + * @param video_track_id The video track id which was created by createCustomVideoTrack + * @return + * - 0: Success. + * - < 0: Failure. + */ engine.destroyCustomVideoTrack(videoTrack); + videoFileReader.stop(); engine.leaveChannelEx(connection); return lastIndex; } - private void destroyAllPushingVideoTrack(){ + private void destroyAllPushingVideoTrack() { int index = videoTrackIds.size() - 1; - while (index >= 0){ + while (index >= 0) { index = destroyLastPushingVideoTrack(); } } @@ -443,7 +621,7 @@ public void onUserOffline(int uid, int reason) { public void onRemoteVideoStats(RemoteVideoStats stats) { super.onRemoteVideoStats(stats); VideoReportLayout videoLayoutByUid = getVideoLayoutByUid(stats.uid); - if(videoLayoutByUid != null){ + if (videoLayoutByUid != null) { videoLayoutByUid.setRemoteVideoStats(stats); } } @@ -452,7 +630,7 @@ public void onRemoteVideoStats(RemoteVideoStats stats) { public void onRemoteAudioStats(RemoteAudioStats stats) { super.onRemoteAudioStats(stats); VideoReportLayout videoLayoutByUid = getVideoLayoutByUid(stats.uid); - if(videoLayoutByUid != null){ + if (videoLayoutByUid != null) { videoLayoutByUid.setRemoteAudioStats(stats); } } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ProcessRawData.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ProcessRawData.java index aa8755f0f..8884f1a97 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ProcessRawData.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ProcessRawData.java @@ -239,8 +239,7 @@ private void joinChannel(String channelId) { /**Set up to play remote sound with receiver*/ engine.setDefaultAudioRoutetoSpeakerphone(true); - int ret = engine.registerVideoFrameObserver(iVideoFrameObserver); - // Enable video module should be after calling registerVideoFrameObserver + engine.registerVideoFrameObserver(iVideoFrameObserver); engine.enableVideo(); engine.startPreview(); @@ -274,7 +273,7 @@ private void joinChannel(String channelId) { private final IVideoFrameObserver iVideoFrameObserver = new IVideoFrameObserver() { @Override - public boolean onCaptureVideoFrame(VideoFrame videoFrame) { + public boolean onCaptureVideoFrame(int sourceType, VideoFrame videoFrame) { Log.i(TAG, "OnEncodedVideoImageReceived"+Thread.currentThread().getName()); long startTime = System.currentTimeMillis(); @@ -339,17 +338,7 @@ public boolean onCaptureVideoFrame(VideoFrame videoFrame) { } @Override - public boolean onPreEncodeVideoFrame(VideoFrame videoFrame) { - return false; - } - - @Override - public boolean onScreenCaptureVideoFrame(VideoFrame videoFrame) { - return false; - } - - @Override - public boolean onPreEncodeScreenVideoFrame(VideoFrame videoFrame) { + public boolean onPreEncodeVideoFrame(int sourceType, VideoFrame videoFrame) { return false; } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PushExternalVideoYUV.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PushExternalVideoYUV.java index d8a7d050e..20b76ba04 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PushExternalVideoYUV.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PushExternalVideoYUV.java @@ -6,6 +6,7 @@ import static io.agora.rtc2.video.VideoEncoderConfiguration.STANDARD_BITRATE; import android.content.Context; +import android.graphics.Matrix; import android.os.Bundle; import android.util.Log; import android.view.LayoutInflater; @@ -16,6 +17,7 @@ import android.widget.Button; import android.widget.EditText; import android.widget.FrameLayout; +import android.widget.Spinner; import androidx.annotation.NonNull; import androidx.annotation.Nullable; @@ -23,19 +25,30 @@ import com.yanzhenjie.permission.AndPermission; import com.yanzhenjie.permission.runtime.Permission; +import java.nio.ByteBuffer; +import java.util.concurrent.Callable; + import io.agora.api.example.MainApplication; import io.agora.api.example.R; import io.agora.api.example.annotation.Example; import io.agora.api.example.common.BaseFragment; +import io.agora.api.example.examples.advanced.videoRender.YuvFboProgram; import io.agora.api.example.utils.CommonUtil; import io.agora.api.example.utils.TokenUtils; import io.agora.api.example.utils.VideoFileReader; +import io.agora.base.JavaI420Buffer; +import io.agora.base.NV12Buffer; +import io.agora.base.NV21Buffer; +import io.agora.base.TextureBufferHelper; +import io.agora.base.VideoFrame; +import io.agora.base.internal.video.YuvHelper; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; import io.agora.rtc2.IRtcEngineEventHandler; import io.agora.rtc2.RtcEngine; import io.agora.rtc2.RtcEngineConfig; import io.agora.rtc2.RtcEngineEx; +import io.agora.rtc2.gl.EglBaseProvider; import io.agora.rtc2.video.VideoCanvas; import io.agora.rtc2.video.VideoEncoderConfiguration; @@ -53,11 +66,16 @@ public class PushExternalVideoYUV extends BaseFragment implements View.OnClickLi private Button join; private EditText et_channel; private RtcEngineEx engine; + private Spinner sp_push_buffer_type; private int myUid; private volatile boolean joined = false; private VideoFileReader videoFileReader; + private YuvFboProgram yuvFboProgram; + private TextureBufferHelper textureBufferHelper; + + @Nullable @Override public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { @@ -73,6 +91,7 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat view.findViewById(R.id.btn_join).setOnClickListener(this); fl_local = view.findViewById(R.id.fl_local); fl_remote = view.findViewById(R.id.fl_remote); + sp_push_buffer_type = view.findViewById(R.id.sp_buffer_type); } @Override @@ -106,7 +125,7 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) { */ config.mEventHandler = iRtcEngineEventHandler; config.mAudioScenario = Constants.AudioScenario.getValue(Constants.AudioScenario.DEFAULT); - config.mAreaCode = ((MainApplication)getActivity().getApplication()).getGlobalSettings().getAreaCode(); + config.mAreaCode = ((MainApplication) getActivity().getApplication()).getGlobalSettings().getAreaCode(); engine = (RtcEngineEx) RtcEngine.create(config); /** * This parameter is for reporting the usages of APIExample to agora background. @@ -122,6 +141,8 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) { + "}"); /* setting the local access point if the private cloud ip was set, otherwise the config will be invalid.*/ engine.setLocalAccessPoint(((MainApplication) getActivity().getApplication()).getGlobalSettings().getPrivateCloudConfig()); + + } catch (Exception e) { e.printStackTrace(); getActivity().onBackPressed(); @@ -131,9 +152,20 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) { @Override public void onDestroy() { - if(videoFileReader != null){ + if (videoFileReader != null) { videoFileReader.stop(); } + if (textureBufferHelper != null) { + textureBufferHelper.invoke(() -> { + if (yuvFboProgram != null) { + yuvFboProgram.release(); + yuvFboProgram = null; + } + return null; + }); + textureBufferHelper.dispose(); + textureBufferHelper = null; + } /**leaveChannel and Destroy the RtcEngine instance*/ if (engine != null) { @@ -187,7 +219,7 @@ public void onClick(View v) { } else { joined = false; join.setText(getString(R.string.join)); - if(videoFileReader != null){ + if (videoFileReader != null) { videoFileReader.stop(); } fl_remote.removeAllViews(); @@ -230,12 +262,13 @@ private void joinChannel(String channelId) { * @param pushMode * VIDEO_FRAME: Use the ENCODED_VIDEO_FRAME. * ENCODED_VIDEO_FRAME: Use the ENCODED_VIDEO_FRAME*/ - engine.setExternalVideoSource(true, false, Constants.ExternalVideoSourceType.VIDEO_FRAME); + engine.setExternalVideoSource(true, true, Constants.ExternalVideoSourceType.VIDEO_FRAME); TextureView textureView = new TextureView(getContext()); - engine.setupLocalVideo(new VideoCanvas(textureView, - Constants.RENDER_MODE_FIT, Constants.VIDEO_MIRROR_MODE_DISABLED, - Constants.VIDEO_SOURCE_CUSTOM, 0)); + VideoCanvas local = new VideoCanvas(textureView, Constants.RENDER_MODE_FIT, 0); + local.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED; + local.sourceType = Constants.VIDEO_SOURCE_CUSTOM; + engine.setupLocalVideo(local); // Add to the local container fl_local.removeAllViews(); fl_local.addView(textureView, new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, @@ -254,7 +287,6 @@ private void joinChannel(String channelId) { ChannelMediaOptions option = new ChannelMediaOptions(); option.autoSubscribeAudio = true; option.autoSubscribeVideo = true; - option.publishCameraTrack = false; option.publishCustomVideoTrack = true; int res = engine.joinChannel(accessToken, channelId, 0, option); if (res != 0) { @@ -278,12 +310,6 @@ private void joinChannel(String channelId) { * The SDK uses this class to report to the app on SDK runtime events. */ private final IRtcEngineEventHandler iRtcEngineEventHandler = new IRtcEngineEventHandler() { - /**Reports a warning during SDK runtime. - * Warning code: https://docs.agora.io/en/Voice/API%20Reference/java/classio_1_1agora_1_1rtc_1_1_i_rtc_engine_event_handler_1_1_warn_code.html*/ - @Override - public void onWarning(int warn) { - Log.w(TAG, String.format("onWarning code %d message %s", warn, RtcEngine.getErrorDescription(warn))); - } /**Occurs when a user leaves the channel. * @param stats With this callback, the application retrieves the channel information, @@ -314,9 +340,106 @@ public void run() { join.setText(getString(R.string.leave)); if (videoFileReader == null) { - videoFileReader = new VideoFileReader(requireContext(), videoFrame -> { - if(joined && engine != null){ - engine.pushExternalVideoFrame(videoFrame); + /* + * VideoFileReader can get nv21 buffer data of sample.yuv file in assets cyclically. + */ + videoFileReader = new VideoFileReader(requireContext(), (yuv, width, height) -> { + if (joined && engine != null) { + String selectedItem = (String) sp_push_buffer_type.getSelectedItem(); + /* + * Below show how to create different type buffers. + */ + VideoFrame.Buffer frameBuffer; + if ("NV21".equals(selectedItem)) { + int srcStrideY = width; + int srcHeightY = height; + int srcSizeY = srcStrideY * srcHeightY; + ByteBuffer srcY = ByteBuffer.allocateDirect(srcSizeY); + srcY.put(yuv, 0, srcSizeY); + + int srcStrideU = width / 2; + int srcHeightU = height / 2; + int srcSizeU = srcStrideU * srcHeightU; + ByteBuffer srcU = ByteBuffer.allocateDirect(srcSizeU); + srcU.put(yuv, srcSizeY, srcSizeU); + + int srcStrideV = width / 2; + int srcHeightV = height / 2; + int srcSizeV = srcStrideV * srcHeightV; + ByteBuffer srcV = ByteBuffer.allocateDirect(srcSizeV); + srcV.put(yuv, srcSizeY + srcSizeU, srcSizeV); + + int desSize = srcSizeY + srcSizeU + srcSizeV; + ByteBuffer des = ByteBuffer.allocateDirect(desSize); + YuvHelper.I420ToNV12(srcY, srcStrideY, srcV, srcStrideV, srcU, srcStrideU, des, width, height); + + byte[] nv21 = new byte[desSize]; + des.position(0); + des.get(nv21); + + frameBuffer = new NV21Buffer(nv21, width, height, null); + } else if ("NV12".equals(selectedItem)) { + int srcStrideY = width; + int srcHeightY = height; + int srcSizeY = srcStrideY * srcHeightY; + ByteBuffer srcY = ByteBuffer.allocateDirect(srcSizeY); + srcY.put(yuv, 0, srcSizeY); + + int srcStrideU = width / 2; + int srcHeightU = height / 2; + int srcSizeU = srcStrideU * srcHeightU; + ByteBuffer srcU = ByteBuffer.allocateDirect(srcSizeU); + srcU.put(yuv, srcSizeY, srcSizeU); + + int srcStrideV = width / 2; + int srcHeightV = height / 2; + int srcSizeV = srcStrideV * srcHeightV; + ByteBuffer srcV = ByteBuffer.allocateDirect(srcSizeV); + srcV.put(yuv, srcSizeY + srcSizeU, srcSizeV); + + int desSize = srcSizeY + srcSizeU + srcSizeV; + ByteBuffer des = ByteBuffer.allocateDirect(desSize); + YuvHelper.I420ToNV12(srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, des, width, height); + + frameBuffer = new NV12Buffer(width, height, width, height, des, null); + } else if ("Texture2D".equals(selectedItem)) { + if (textureBufferHelper == null) { + textureBufferHelper = TextureBufferHelper.create("PushExternalVideoYUV", EglBaseProvider.instance().getRootEglBase().getEglBaseContext()); + } + if (yuvFboProgram == null) { + textureBufferHelper.invoke((Callable) () -> { + yuvFboProgram = new YuvFboProgram(); + return null; + }); + } + Integer textureId = textureBufferHelper.invoke(() -> yuvFboProgram.drawYuv(yuv, width, height)); + frameBuffer = textureBufferHelper.wrapTextureBuffer(width, height, VideoFrame.TextureBuffer.Type.RGB, textureId, new Matrix()); + } else { + // I420 type default + JavaI420Buffer i420Buffer = JavaI420Buffer.allocate(width, height); + i420Buffer.getDataY().put(yuv, 0, i420Buffer.getDataY().limit()); + i420Buffer.getDataU().put(yuv, i420Buffer.getDataY().limit(), i420Buffer.getDataU().limit()); + i420Buffer.getDataV().put(yuv, i420Buffer.getDataY().limit() + i420Buffer.getDataU().limit(), i420Buffer.getDataV().limit()); + frameBuffer = i420Buffer; + } + + /* + * Get monotonic time in ms which can be used by capture time, + * typical scenario is as follows: + */ + long currentMonotonicTimeInMs = engine.getCurrentMonotonicTimeInMs(); + /* + * Create a video frame to push. + */ + VideoFrame videoFrame = new VideoFrame(frameBuffer, 0, currentMonotonicTimeInMs * 1000000); + + /* + * Pushes the external video frame to the app. + */ + boolean success = engine.pushExternalVideoFrame(videoFrame); + if (!success) { + Log.w(TAG, "pushExternalVideoFrame error"); + } } }); } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ScreenSharing.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ScreenSharing.java index 1c322d70f..6e102c6f8 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ScreenSharing.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ScreenSharing.java @@ -23,11 +23,13 @@ import android.view.SurfaceView; import android.view.View; import android.view.ViewGroup; +import android.widget.AdapterView; import android.widget.Button; import android.widget.CompoundButton; import android.widget.EditText; import android.widget.FrameLayout; import android.widget.SeekBar; +import android.widget.Spinner; import android.widget.Switch; import androidx.annotation.NonNull; @@ -66,7 +68,7 @@ tipsId = R.string.screensharing ) public class ScreenSharing extends BaseFragment implements View.OnClickListener, - CompoundButton.OnCheckedChangeListener, SeekBar.OnSeekBarChangeListener { + CompoundButton.OnCheckedChangeListener, SeekBar.OnSeekBarChangeListener, AdapterView.OnItemSelectedListener { private static final String TAG = ScreenSharing.class.getSimpleName(); private static final int PROJECTION_REQ_CODE = 1 << 2; private static final int DEFAULT_SHARE_FRAME_RATE = 15; @@ -81,6 +83,7 @@ public class ScreenSharing extends BaseFragment implements View.OnClickListener, private final ScreenCaptureParameters screenCaptureParameters = new ScreenCaptureParameters(); private Intent fgServiceIntent; + private Spinner screenScenarioType; @Nullable @Override @@ -102,7 +105,9 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat screenPreview = view.findViewById(R.id.screen_preview); screenAudio = view.findViewById(R.id.screen_audio); screenAudioVolume = view.findViewById(R.id.screen_audio_volume); + screenScenarioType = view.findViewById(R.id.spinner_screen_scenario_type); + screenScenarioType.setOnItemSelectedListener(this); screenPreview.setOnCheckedChangeListener(this); screenAudio.setOnCheckedChangeListener(this); screenAudioVolume.setOnSeekBarChangeListener(this); @@ -244,20 +249,17 @@ private void startScreenSharePreview() { // Add to the local container fl_local.addView(surfaceView, new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); // Setup local video to render your local camera preview - engine.setupLocalVideo(new VideoCanvas(surfaceView, Constants.RENDER_MODE_FIT, - Constants.VIDEO_MIRROR_MODE_DISABLED, - Constants.VIDEO_SOURCE_SCREEN_PRIMARY, - 0)); + VideoCanvas local = new VideoCanvas(surfaceView, Constants.RENDER_MODE_FIT, 0); + local.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED; + local.sourceType = Constants.VIDEO_SOURCE_SCREEN_PRIMARY; + engine.setupLocalVideo(local); engine.startPreview(Constants.VideoSourceType.VIDEO_SOURCE_SCREEN_PRIMARY); } private void stopScreenSharePreview() { fl_local.removeAllViews(); - engine.setupLocalVideo(new VideoCanvas(null, Constants.RENDER_MODE_FIT, - Constants.VIDEO_MIRROR_MODE_DISABLED, - Constants.VIDEO_SOURCE_SCREEN_PRIMARY, - 0)); + engine.setupLocalVideo(new VideoCanvas(null)); engine.stopPreview(Constants.VideoSourceType.VIDEO_SOURCE_SCREEN_PRIMARY); } @@ -512,6 +514,18 @@ public void onStopTrackingTouch(SeekBar seekBar) { } + @Override + public void onItemSelected(AdapterView parent, View view, int position, long id) { + if (parent == screenScenarioType) { + engine.setScreenCaptureScenario(Constants.ScreenScenarioType.valueOf(screenScenarioType.getSelectedItem().toString())); + } + } + + @Override + public void onNothingSelected(AdapterView parent) { + + } + public static class MediaProjectFgService extends Service { @Nullable @Override diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/SwitchCameraScreenShare.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/SwitchCameraScreenShare.java index 2f7805e67..0854c2576 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/SwitchCameraScreenShare.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/SwitchCameraScreenShare.java @@ -327,10 +327,10 @@ private void addScreenSharePreview() { // Add to the local container fl_screen.addView(surfaceView, new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); // Setup local video to render your local camera preview - engine.setupLocalVideo(new VideoCanvas(surfaceView, Constants.RENDER_MODE_FIT, - Constants.VIDEO_MIRROR_MODE_DISABLED, - Constants.VIDEO_SOURCE_SCREEN_PRIMARY, - 0)); + VideoCanvas local = new VideoCanvas(surfaceView, Constants.RENDER_MODE_FIT, 0); + local.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED; + local.sourceType = Constants.VIDEO_SOURCE_SCREEN_PRIMARY; + engine.setupLocalVideo(local); engine.startPreview(Constants.VideoSourceType.VIDEO_SOURCE_SCREEN_PRIMARY); } @@ -349,12 +349,9 @@ private void addCameraPreview() { // Add to the local container fl_camera.addView(surfaceView, new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); // Setup local video to render your local camera preview - engine.setupLocalVideo(new VideoCanvas( - surfaceView, - RENDER_MODE_HIDDEN, - Constants.VIDEO_MIRROR_MODE_AUTO, - Constants.VIDEO_SOURCE_CAMERA_PRIMARY, - 0)); + VideoCanvas local = new VideoCanvas(surfaceView, RENDER_MODE_HIDDEN, 0); + local.sourceType = Constants.VIDEO_SOURCE_CAMERA_PRIMARY; + engine.setupLocalVideo(local); engine.startPreview(Constants.VideoSourceType.VIDEO_SOURCE_CAMERA_PRIMARY); } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VideoProcessExtension.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VideoProcessExtension.java index c83325d87..dbdd36f11 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VideoProcessExtension.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VideoProcessExtension.java @@ -6,7 +6,6 @@ import android.content.Context; import android.os.Bundle; -import android.os.Environment; import android.util.Log; import android.view.LayoutInflater; import android.view.SurfaceView; @@ -132,7 +131,7 @@ private void resetVirtualBackground() { SegmentationProperty segproperty = new SegmentationProperty(); if (checkedId == R.id.virtual_bg_image) { backgroundSource.backgroundSourceType = VirtualBackgroundSource.BACKGROUND_IMG; - String imagePath = Environment.getExternalStorageDirectory().getPath(); + String imagePath = requireContext().getExternalCacheDir().getPath(); String imageName = "agora-logo.png"; FileUtils.copyFilesFromAssets(getContext(), imageName, imagePath); backgroundSource.source = imagePath + FileUtils.SEPARATOR + imageName; @@ -142,6 +141,9 @@ private void resetVirtualBackground() { } else if (checkedId == R.id.virtual_bg_blur) { backgroundSource.backgroundSourceType = VirtualBackgroundSource.BACKGROUND_BLUR; backgroundSource.blurDegree = VirtualBackgroundSource.BLUR_DEGREE_MEDIUM; + } else if (checkedId == R.id.virtual_bg_video) { + backgroundSource.backgroundSourceType = VirtualBackgroundSource.BACKGROUND_VIDEO; + backgroundSource.source = "https://agora-adc-artifacts.s3.cn-north-1.amazonaws.com.cn/resources/sample.mp4"; } engine.enableVirtualBackground(true, backgroundSource, segproperty); }else{ @@ -200,6 +202,8 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) + "}"); /* setting the local access point if the private cloud ip was set, otherwise the config will be invalid.*/ engine.setLocalAccessPoint(((MainApplication) getActivity().getApplication()).getGlobalSettings().getPrivateCloudConfig()); + + engine.enableExtension("agora_video_filters_clear_vision", "clear_vision", true); } catch (Exception e) { diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java index dea59d728..63b0ed474 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java @@ -27,6 +27,10 @@ import static io.agora.rtc2.Constants.ULTRA_HIGH_QUALITY_VOICE; import static io.agora.rtc2.Constants.VOICE_BEAUTIFIER_OFF; import static io.agora.rtc2.Constants.VOICE_CHANGER_BASS; +import static io.agora.rtc2.Constants.VOICE_CHANGER_CARTOON; +import static io.agora.rtc2.Constants.VOICE_CHANGER_CHILDLIKE; +import static io.agora.rtc2.Constants.VOICE_CHANGER_CHIPMUNK; +import static io.agora.rtc2.Constants.VOICE_CHANGER_DARTH_VADER; import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_BOY; import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_GIRL; import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_HULK; @@ -34,9 +38,16 @@ import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_PIGKING; import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_SISTER; import static io.agora.rtc2.Constants.VOICE_CHANGER_EFFECT_UNCLE; +import static io.agora.rtc2.Constants.VOICE_CHANGER_GIRLISH_MAN; +import static io.agora.rtc2.Constants.VOICE_CHANGER_GROOT; +import static io.agora.rtc2.Constants.VOICE_CHANGER_IRON_LADY; +import static io.agora.rtc2.Constants.VOICE_CHANGER_MONSTER; import static io.agora.rtc2.Constants.VOICE_CHANGER_NEUTRAL; +import static io.agora.rtc2.Constants.VOICE_CHANGER_PHONE_OPERATOR; +import static io.agora.rtc2.Constants.VOICE_CHANGER_SHIN_CHAN; import static io.agora.rtc2.Constants.VOICE_CHANGER_SOLID; import static io.agora.rtc2.Constants.VOICE_CHANGER_SWEET; +import static io.agora.rtc2.Constants.VOICE_CHANGER_TRANSFORMERS; import static io.agora.rtc2.Constants.VOICE_CONVERSION_OFF; import android.content.Context; @@ -97,7 +108,7 @@ public class VoiceEffects extends BaseFragment implements View.OnClickListener, chatBeautifier, timbreTransformation, voiceChanger, styleTransformation, roomAcoustics, pitchCorrection, _pitchModeOption, _pitchValueOption, voiceConversion, customBandFreq, customReverbKey; private ViewGroup _voice3DLayout, _pitchModeLayout, _pitchValueLayout; - private SeekBar _voice3DCircle, customPitch, customBandGain, customReverbValue; + private SeekBar _voice3DCircle, customPitch, customBandGain, customReverbValue, customVoiceFormant; private AudioSeatManager audioSeatManager; @@ -159,10 +170,12 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat customBandGain = view.findViewById(R.id.audio_custom_band_gain); // engine.setLocalVoiceEqualization() customReverbKey = view.findViewById(R.id.audio_custom_reverb_key); customReverbValue = view.findViewById(R.id.audio_custom_reverb_value); //engine.setLocalVoiceReverb() + customVoiceFormant = view.findViewById(R.id.audio_voice_formant_value); //engine.setLocalVoiceFormant() customPitch.setOnSeekBarChangeListener(this); customBandGain.setOnSeekBarChangeListener(this); customReverbValue.setOnSeekBarChangeListener(this); + customVoiceFormant.setOnSeekBarChangeListener(this); customBandFreq.setOnItemSelectedListener(this); customReverbKey.setOnItemSelectedListener(this); @@ -194,6 +207,7 @@ private void resetControlLayoutByJoined() { customBandGain.setEnabled(joined); customReverbKey.setEnabled(joined); customReverbValue.setEnabled(joined); + customVoiceFormant.setEnabled(joined); chatBeautifier.setSelection(0); @@ -206,6 +220,7 @@ private void resetControlLayoutByJoined() { customPitch.setProgress(0); customBandGain.setProgress(0); customReverbValue.setProgress(0); + customVoiceFormant.setProgress(50); } @Override @@ -617,6 +632,28 @@ private int getVoiceConversionValue(String label) { return VOICE_CHANGER_SOLID; case "VOICE_CHANGER_BASS": return VOICE_CHANGER_BASS; + case "VOICE_CHANGER_CARTOON": + return VOICE_CHANGER_CARTOON; + case "VOICE_CHANGER_CHILDLIKE": + return VOICE_CHANGER_CHILDLIKE; + case "VOICE_CHANGER_PHONE_OPERATOR": + return VOICE_CHANGER_PHONE_OPERATOR; + case "VOICE_CHANGER_MONSTER": + return VOICE_CHANGER_MONSTER; + case "VOICE_CHANGER_TRANSFORMERS": + return VOICE_CHANGER_TRANSFORMERS; + case "VOICE_CHANGER_GROOT": + return VOICE_CHANGER_GROOT; + case "VOICE_CHANGER_DARTH_VADER": + return VOICE_CHANGER_DARTH_VADER; + case "VOICE_CHANGER_IRON_LADY": + return VOICE_CHANGER_IRON_LADY; + case "VOICE_CHANGER_SHIN_CHAN": + return VOICE_CHANGER_SHIN_CHAN; + case "VOICE_CHANGER_GIRLISH_MAN": + return VOICE_CHANGER_GIRLISH_MAN; + case "VOICE_CHANGER_CHIPMUNK": + return VOICE_CHANGER_CHIPMUNK; case "VOICE_CONVERSION_OFF": default: return VOICE_CONVERSION_OFF; @@ -743,6 +780,9 @@ public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { + if (!fromUser) { + return; + } if(seekBar == _voice3DCircle){ int cicle = (int) (1 + 59 * progress * 1.0f / seekBar.getMax()); // [1,60], 10 default @@ -771,6 +811,10 @@ public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { value = (int) (100 * progress * 1.0f / seekBar.getMax()); } engine.setLocalVoiceReverb(reverbKey, value); + } else if (seekBar == customVoiceFormant) { + // [-1, 1] + double value = (progress - 50) * 1.0f / 100; + engine.setLocalVoiceFormant(value); } } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeauty.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeauty.java index 7df8c25d8..00db31803 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeauty.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeauty.java @@ -1,15 +1,21 @@ package io.agora.api.example.examples.advanced.beauty; +import android.graphics.Matrix; +import android.opengl.GLES11Ext; +import android.opengl.GLES20; import android.os.Bundle; import android.util.Log; +import android.util.Size; import android.view.LayoutInflater; import android.view.TextureView; import android.view.View; import android.view.ViewGroup; +import android.view.ViewParent; import androidx.annotation.NonNull; import androidx.annotation.Nullable; +import java.nio.ByteBuffer; import java.util.Locale; import java.util.Random; @@ -20,17 +26,20 @@ import io.agora.api.example.utils.TokenUtils; import io.agora.base.TextureBufferHelper; import io.agora.base.VideoFrame; +import io.agora.base.internal.video.YuvHelper; import io.agora.beauty.base.IBeautyByteDance; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; import io.agora.rtc2.IRtcEngineEventHandler; import io.agora.rtc2.RtcEngine; +import io.agora.rtc2.gl.EglBaseProvider; +import io.agora.rtc2.video.ColorEnhanceOptions; import io.agora.rtc2.video.IVideoFrameObserver; import io.agora.rtc2.video.VideoCanvas; public class ByteDanceBeauty extends BaseFragment { private static final String TAG = "SceneTimeBeauty"; - + private static final Matrix IDENTITY_MATRIX = new Matrix(); private IBeautyByteDance iBeautyByteDance; private FragmentBeautyBytedanceBinding mBinding; private RtcEngine rtcEngine; @@ -47,6 +56,8 @@ public class ByteDanceBeauty extends BaseFragment { private volatile boolean isDestroyed = false; private int mFrameRotation; + private ByteBuffer nv21ByteBuffer; + private byte[] nv21ByteArray; @Nullable @Override @@ -67,27 +78,54 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat initVideoView(); initRtcEngine(); joinChannel(); + mBinding.switchVideoEffect.setOnCheckedChangeListener((buttonView, isChecked) -> + { + ColorEnhanceOptions options = new ColorEnhanceOptions(); + options.strengthLevel = (float) 0.5f; + options.skinProtectLevel = (float) 0.5f; + rtcEngine.setColorEnhanceOptions(isChecked, options); + }); } @Override public void onDestroyView() { super.onDestroyView(); - isDestroyed = true; + if (rtcEngine != null) { + rtcEngine.leaveChannel(); + } if (mTextureBufferHelper != null) { mTextureBufferHelper.invoke(() -> { iBeautyByteDance.release(); iBeautyByteDance = null; return null; }); - mTextureBufferHelper.dispose(); + boolean disposeSuccess = false; + while (!disposeSuccess) { + try { + mTextureBufferHelper.dispose(); + disposeSuccess = true; + } catch (Exception e) { + try { + Thread.sleep(50); + } catch (InterruptedException ex) { + // do nothing + } + } + } mTextureBufferHelper = null; } - if (rtcEngine != null) { - rtcEngine.leaveChannel(); - } RtcEngine.destroy(); } + @Override + protected void onBackPressed() { + isDestroyed = true; + mBinding.fullVideoContainer.removeAllViews(); + mBinding.smallVideoContainer.removeAllViews(); + super.onBackPressed(); + + } + private void initVideoView() { mBinding.cbFaceBeautify.setOnCheckedChangeListener((buttonView, isChecked) -> { if (iBeautyByteDance == null) { @@ -195,65 +233,17 @@ public void onRemoteVideoStats(RemoteVideoStats stats) { if (rtcEngine == null) { return; } + rtcEngine.enableExtension("agora_video_filters_clear_vision", "clear_vision", true); mVideoFrameObserver = new IVideoFrameObserver() { @Override - public boolean onCaptureVideoFrame(VideoFrame videoFrame) { - if (isDestroyed) { - return false; - } - VideoFrame.Buffer buffer = videoFrame.getBuffer(); - if (!(buffer instanceof VideoFrame.TextureBuffer)) { - return false; - } - - VideoFrame.TextureBuffer texBuffer = (VideoFrame.TextureBuffer) buffer; - - if (mTextureBufferHelper == null) { - doOnBeautyCreatingBegin(); - mTextureBufferHelper = TextureBufferHelper.create("STRender", texBuffer.getEglBaseContext()); - mTextureBufferHelper.invoke(() -> { - iBeautyByteDance = IBeautyByteDance.create(getContext()); - return null; - }); - doOnBeautyCreatingEnd(); - } - - int width = texBuffer.getWidth(); - int height = texBuffer.getHeight(); - - Integer processTexId = mTextureBufferHelper.invoke(() -> iBeautyByteDance.process( - texBuffer.getTextureId(), - width, height, mFrameRotation - )); - - // drag one frame to avoid reframe when switching camera. - if(mFrameRotation != videoFrame.getRotation()){ - mFrameRotation = videoFrame.getRotation(); - return false; - } - - VideoFrame.TextureBuffer processBuffer = mTextureBufferHelper.wrapTextureBuffer( - width, height, VideoFrame.TextureBuffer.Type.RGB, processTexId, - texBuffer.getTransformMatrix()); - - videoFrame.replaceBuffer(processBuffer, mFrameRotation, videoFrame.getTimestampNs()); - return true; + public boolean onCaptureVideoFrame(int sourceType, VideoFrame videoFrame) { + return processBeauty(videoFrame); } @Override - public boolean onPreEncodeVideoFrame(VideoFrame videoFrame) { - return false; - } - - @Override - public boolean onScreenCaptureVideoFrame(VideoFrame videoFrame) { - return false; - } - - @Override - public boolean onPreEncodeScreenVideoFrame(VideoFrame videoFrame) { + public boolean onPreEncodeVideoFrame(int sourceType, VideoFrame videoFrame) { return false; } @@ -301,6 +291,89 @@ public int getObservedFramePosition() { } } + private boolean processBeauty(VideoFrame videoFrame) { + if (isDestroyed) { + return false; + } + VideoFrame.Buffer buffer = videoFrame.getBuffer(); + if (mTextureBufferHelper == null) { + doOnBeautyCreatingBegin(); + mTextureBufferHelper = TextureBufferHelper.create("ByteDanceProcess", EglBaseProvider.instance().getRootEglBase().getEglBaseContext()); + mTextureBufferHelper.invoke(() -> { + iBeautyByteDance = IBeautyByteDance.create(getContext()); + return null; + }); + doOnBeautyCreatingEnd(); + } + + int width = buffer.getWidth(); + int height = buffer.getHeight(); + + + int processTexId = -1; + Matrix transformMatrix = IDENTITY_MATRIX; + int rotation = videoFrame.getRotation(); + boolean skipFrame = false; + if (buffer instanceof VideoFrame.TextureBuffer) { + VideoFrame.TextureBuffer texBuffer = (VideoFrame.TextureBuffer) buffer; + transformMatrix = texBuffer.getTransformMatrix(); + Size captureOriginSize = VideoCaptureUtils.getCaptureOriginSize(texBuffer); + processTexId = mTextureBufferHelper.invoke(() -> iBeautyByteDance.process( + texBuffer.getTextureId(), + texBuffer.getType() == VideoFrame.TextureBuffer.Type.OES ? GLES11Ext.GL_TEXTURE_EXTERNAL_OES : GLES20.GL_TEXTURE_2D, + captureOriginSize.getWidth(), captureOriginSize.getHeight(), rotation + )); + if (nv21ByteBuffer != null) { + nv21ByteBuffer.clear(); + nv21ByteBuffer = null; + skipFrame = true; + } + } else { + // Obtain nv21 pixel data + int nv21Size = (int) (width * height * 3.0f / 2.0f + 0.5f); + if (nv21ByteBuffer == null || nv21ByteBuffer.capacity() != nv21Size) { + if (nv21ByteBuffer != null) { + nv21ByteBuffer.clear(); + } + nv21ByteBuffer = ByteBuffer.allocateDirect(nv21Size); + nv21ByteArray = new byte[nv21Size]; + skipFrame = true; + } + + VideoFrame.I420Buffer i420Buffer = buffer.toI420(); + YuvHelper.I420ToNV12(i420Buffer.getDataY(), i420Buffer.getStrideY(), + i420Buffer.getDataV(), i420Buffer.getStrideV(), + i420Buffer.getDataU(), i420Buffer.getStrideU(), + nv21ByteBuffer, width, height); + nv21ByteBuffer.position(0); + nv21ByteBuffer.get(nv21ByteArray); + i420Buffer.release(); + if(mTextureBufferHelper != null){ + processTexId = mTextureBufferHelper.invoke(() -> iBeautyByteDance.process( + nv21ByteArray, + width, height, rotation + )); + } + } + + // drag one frame to avoid reframe when switching camera. + if (mFrameRotation != rotation) { + mFrameRotation = rotation; + skipFrame = true; + } + + if(processTexId < 0 || skipFrame){ + return false; + } + if(mTextureBufferHelper != null){ + VideoFrame.TextureBuffer processBuffer = mTextureBufferHelper.wrapTextureBuffer( + width, height, VideoFrame.TextureBuffer.Type.RGB, processTexId, + transformMatrix); + videoFrame.replaceBuffer(processBuffer, mFrameRotation, videoFrame.getTimestampNs()); + } + return true; + } + private void joinChannel() { int uid = new Random(System.currentTimeMillis()).nextInt(1000) + 10000; ChannelMediaOptions options = new ChannelMediaOptions(); @@ -315,7 +388,9 @@ private void joinChannel() { mLocalVideoLayout = new VideoReportLayout(requireContext()); TextureView videoView = new TextureView(requireContext()); - rtcEngine.setupLocalVideo(new VideoCanvas(videoView, Constants.RENDER_MODE_HIDDEN)); + VideoCanvas local = new VideoCanvas(videoView, Constants.RENDER_MODE_HIDDEN, 0); + local.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED; + rtcEngine.setupLocalVideo(local); mLocalVideoLayout.addView(videoView); rtcEngine.startPreview(); @@ -324,24 +399,47 @@ private void joinChannel() { private void updateVideoLayouts(boolean isLocalFull) { this.isLocalFull = isLocalFull; - mBinding.fullVideoContainer.removeAllViews(); - mBinding.smallVideoContainer.removeAllViews(); if (isLocalFull) { if (mLocalVideoLayout != null) { - mBinding.fullVideoContainer.addView(mLocalVideoLayout); + ViewParent parent = mLocalVideoLayout.getParent(); + if (parent instanceof ViewGroup && parent != mBinding.fullVideoContainer) { + ((ViewGroup) parent).removeView(mLocalVideoLayout); + mBinding.fullVideoContainer.addView(mLocalVideoLayout); + } else if (parent == null) { + mBinding.fullVideoContainer.addView(mLocalVideoLayout); + } } if (mRemoteVideoLayout != null) { mRemoteVideoLayout.getChildAt(0).setOnClickListener(v -> updateVideoLayouts(!ByteDanceBeauty.this.isLocalFull)); - mBinding.smallVideoContainer.addView(mRemoteVideoLayout); + ViewParent parent = mRemoteVideoLayout.getParent(); + if (parent instanceof ViewGroup && parent != mBinding.smallVideoContainer) { + ((ViewGroup) parent).removeView(mRemoteVideoLayout); + mBinding.smallVideoContainer.addView(mRemoteVideoLayout); + } else if(parent == null){ + mBinding.smallVideoContainer.addView(mRemoteVideoLayout); + } } } else { if (mLocalVideoLayout != null) { mLocalVideoLayout.getChildAt(0).setOnClickListener(v -> updateVideoLayouts(!ByteDanceBeauty.this.isLocalFull)); - mBinding.smallVideoContainer.addView(mLocalVideoLayout); + ViewParent parent = mLocalVideoLayout.getParent(); + if (parent instanceof ViewGroup && parent != mBinding.smallVideoContainer) { + ((ViewGroup) parent).removeView(mLocalVideoLayout); + mBinding.smallVideoContainer.addView(mLocalVideoLayout); + } else if(parent == null){ + mBinding.smallVideoContainer.addView(mLocalVideoLayout); + } } + if (mRemoteVideoLayout != null) { - mBinding.fullVideoContainer.addView(mRemoteVideoLayout); + ViewParent parent = mRemoteVideoLayout.getParent(); + if (parent instanceof ViewGroup && parent != mBinding.fullVideoContainer) { + ((ViewGroup) parent).removeView(mRemoteVideoLayout); + mBinding.fullVideoContainer.addView(mRemoteVideoLayout); + } else if(parent == null) { + mBinding.fullVideoContainer.addView(mRemoteVideoLayout); + } } } } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeauty.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeauty.java index 6c4681977..8bd2dcd3a 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeauty.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeauty.java @@ -1,11 +1,15 @@ package io.agora.api.example.examples.advanced.beauty; +import static io.agora.rtc2.video.VideoEncoderConfiguration.STANDARD_BITRATE; + +import android.graphics.Matrix; import android.os.Bundle; import android.util.Log; import android.view.LayoutInflater; import android.view.TextureView; import android.view.View; import android.view.ViewGroup; +import android.view.ViewParent; import androidx.annotation.NonNull; import androidx.annotation.Nullable; @@ -14,6 +18,7 @@ import java.util.Locale; import java.util.Random; +import io.agora.api.example.MainApplication; import io.agora.api.example.R; import io.agora.api.example.common.BaseFragment; import io.agora.api.example.common.widget.VideoReportLayout; @@ -27,8 +32,11 @@ import io.agora.rtc2.Constants; import io.agora.rtc2.IRtcEngineEventHandler; import io.agora.rtc2.RtcEngine; +import io.agora.rtc2.gl.EglBaseProvider; +import io.agora.rtc2.video.ColorEnhanceOptions; import io.agora.rtc2.video.IVideoFrameObserver; import io.agora.rtc2.video.VideoCanvas; +import io.agora.rtc2.video.VideoEncoderConfiguration; public class FaceUnityBeauty extends BaseFragment { private static final String TAG = "SceneTimeBeauty"; @@ -42,7 +50,6 @@ public class FaceUnityBeauty extends BaseFragment { private boolean isFrontCamera = true; private TextureBufferHelper mTextureBufferHelper; - private boolean isSingleInput = true; private VideoReportLayout mLocalVideoLayout; private VideoReportLayout mRemoteVideoLayout; @@ -72,27 +79,53 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat initVideoView(); initRtcEngine(); joinChannel(); + mBinding.switchVideoEffect.setOnCheckedChangeListener((buttonView, isChecked) -> + { + ColorEnhanceOptions options = new ColorEnhanceOptions(); + options.strengthLevel = (float) 0.5f; + options.skinProtectLevel = (float) 0.5f; + rtcEngine.setColorEnhanceOptions(isChecked, options); + }); } @Override public void onDestroyView() { super.onDestroyView(); isDestroyed = true; + if (rtcEngine != null) { + rtcEngine.leaveChannel(); + } if (mTextureBufferHelper != null) { mTextureBufferHelper.invoke(() -> { iBeautyFaceUnity.release(); iBeautyFaceUnity = null; return null; }); - mTextureBufferHelper.dispose(); + boolean disposeSuccess = false; + while (!disposeSuccess) { + try { + mTextureBufferHelper.dispose(); + disposeSuccess = true; + } catch (Exception e) { + try { + Thread.sleep(50); + } catch (InterruptedException ex) { + // do nothing + } + } + } mTextureBufferHelper = null; } - if (rtcEngine != null) { - rtcEngine.leaveChannel(); - } RtcEngine.destroy(); } + @Override + protected void onBackPressed() { + mBinding.fullVideoContainer.removeAllViews(); + mBinding.smallVideoContainer.removeAllViews(); + super.onBackPressed(); + } + private void initVideoView() { mBinding.cbFaceBeautify.setOnCheckedChangeListener((buttonView, isChecked) -> { if (iBeautyFaceUnity == null) { @@ -122,11 +155,6 @@ private void initVideoView() { rtcEngine.switchCamera(); isFrontCamera = !isFrontCamera; }); - mBinding.tvBeautyInput.setText(isSingleInput ? R.string.beauty_input_single : R.string.beauty_input_double); - mBinding.tvBeautyInput.setOnClickListener(v -> { - isSingleInput = !isSingleInput; - mBinding.tvBeautyInput.setText(isSingleInput ? R.string.beauty_input_single : R.string.beauty_input_double); - }); mBinding.smallVideoContainer.setOnClickListener(v -> updateVideoLayouts(!FaceUnityBeauty.this.isLocalFull)); } @@ -205,61 +233,16 @@ public void onRemoteVideoStats(RemoteVideoStats stats) { if (rtcEngine == null) { return; } - + rtcEngine.enableExtension("agora_video_filters_clear_vision", "clear_vision", true); mVideoFrameObserver = new IVideoFrameObserver() { @Override - public boolean onCaptureVideoFrame(VideoFrame videoFrame) { - if (isDestroyed) { - return true; - } - VideoFrame.Buffer buffer = videoFrame.getBuffer(); - if (!(buffer instanceof VideoFrame.TextureBuffer)) { - return true; - } - - VideoFrame.TextureBuffer texBuffer = (VideoFrame.TextureBuffer) buffer; - - if (mTextureBufferHelper == null) { - doOnBeautyCreatingBegin(); - mTextureBufferHelper = TextureBufferHelper.create("STRender", texBuffer.getEglBaseContext()); - mTextureBufferHelper.invoke(() -> { - iBeautyFaceUnity = IBeautyFaceUnity.create(getContext()); - return null; - }); - doOnBeautyCreatingEnd(); - } - - VideoFrame.TextureBuffer processBuffer; - if (isSingleInput) { - processBuffer = processSingleInput(texBuffer); - } else { - processBuffer = processDoubleInput(texBuffer); - } - if(processBuffer == null){ - return true; - } - // drag one frame to avoid reframe when switching camera. - if(mFrameRotation != videoFrame.getRotation()){ - mFrameRotation = videoFrame.getRotation(); - return false; - } - videoFrame.replaceBuffer(processBuffer, mFrameRotation, videoFrame.getTimestampNs()); - return true; - } - - @Override - public boolean onPreEncodeVideoFrame(VideoFrame videoFrame) { - return false; + public boolean onCaptureVideoFrame(int sourceType, VideoFrame videoFrame) { + return processBeauty(videoFrame); } @Override - public boolean onScreenCaptureVideoFrame(VideoFrame videoFrame) { - return false; - } - - @Override - public boolean onPreEncodeScreenVideoFrame(VideoFrame videoFrame) { + public boolean onPreEncodeVideoFrame(int sourceType, VideoFrame videoFrame) { return false; } @@ -299,6 +282,13 @@ public int getObservedFramePosition() { } }; rtcEngine.registerVideoFrameObserver(mVideoFrameObserver); + // Setup video encoding configs + rtcEngine.setVideoEncoderConfiguration(new VideoEncoderConfiguration( + ((MainApplication) getActivity().getApplication()).getGlobalSettings().getVideoEncodingDimensionObject(), + VideoEncoderConfiguration.FRAME_RATE.valueOf(((MainApplication) getActivity().getApplication()).getGlobalSettings().getVideoEncodingFrameRate()), + STANDARD_BITRATE, + VideoEncoderConfiguration.ORIENTATION_MODE.valueOf(((MainApplication) getActivity().getApplication()).getGlobalSettings().getVideoEncodingOrientation()) + )); rtcEngine.enableVideo(); rtcEngine.disableAudio(); @@ -307,27 +297,29 @@ public int getObservedFramePosition() { } } - private VideoFrame.TextureBuffer processSingleInput(VideoFrame.TextureBuffer texBuffer) { - - int width = texBuffer.getWidth(); - int height = texBuffer.getHeight(); - - Integer processTexId = mTextureBufferHelper.invoke(() -> iBeautyFaceUnity.process( - texBuffer.getTextureId(), - width, height - )); - - return mTextureBufferHelper.wrapTextureBuffer( - width, height, VideoFrame.TextureBuffer.Type.RGB, processTexId, - texBuffer.getTransformMatrix()); - } + private boolean processBeauty(VideoFrame videoFrame) { + if (isDestroyed) { + return true; + } - private VideoFrame.TextureBuffer processDoubleInput(VideoFrame.TextureBuffer texBuffer) { + if (mTextureBufferHelper == null) { + doOnBeautyCreatingBegin(); + mTextureBufferHelper = TextureBufferHelper.create("STRender", EglBaseProvider.instance().getRootEglBase().getEglBaseContext()); + mTextureBufferHelper.invoke(() -> { + iBeautyFaceUnity = IBeautyFaceUnity.create(getContext()); + return null; + }); + doOnBeautyCreatingEnd(); + } - int textureId = texBuffer.getTextureId(); - int width = texBuffer.getWidth(); - int height = texBuffer.getHeight(); + VideoFrame.Buffer buffer = videoFrame.getBuffer(); + int width = buffer.getWidth(); + int height = buffer.getHeight(); + int processTexId = -1; + Matrix transformMatrix = new Matrix(); + int rotation = videoFrame.getRotation(); + boolean skipFrame = false; int nv21Size = (int) (width * height * 3.0f / 2.0f + 0.5f); if (nv21ByteBuffer == null || nv21ByteBuffer.capacity() != nv21Size) { if (nv21ByteBuffer != null) { @@ -335,10 +327,10 @@ private VideoFrame.TextureBuffer processDoubleInput(VideoFrame.TextureBuffer tex } nv21ByteBuffer = ByteBuffer.allocateDirect(nv21Size); nv21ByteArray = new byte[nv21Size]; + skipFrame = true; } - - VideoFrame.I420Buffer i420Buffer = texBuffer.toI420(); + VideoFrame.I420Buffer i420Buffer = buffer.toI420(); YuvHelper.I420ToNV12(i420Buffer.getDataY(), i420Buffer.getStrideY(), i420Buffer.getDataV(), i420Buffer.getStrideV(), i420Buffer.getDataU(), i420Buffer.getStrideU(), @@ -347,14 +339,32 @@ private VideoFrame.TextureBuffer processDoubleInput(VideoFrame.TextureBuffer tex nv21ByteBuffer.get(nv21ByteArray); i420Buffer.release(); - Integer processTexId = mTextureBufferHelper.invoke(() -> iBeautyFaceUnity.process( - nv21ByteArray, - textureId, - width, height - )); + if (mTextureBufferHelper != null) { + processTexId = mTextureBufferHelper.invoke(() -> iBeautyFaceUnity.process( + nv21ByteArray, + width, height, + videoFrame.getSourceType() == VideoFrame.SourceType.kFrontCamera + )); + } + + + // drag one frame to avoid reframe when switching camera. + if (mFrameRotation != rotation) { + mFrameRotation = rotation; + skipFrame = true; + } + + if (processTexId < 0 || skipFrame) { + return false; + } + + if (mTextureBufferHelper != null) { + VideoFrame.TextureBuffer textureBuffer = mTextureBufferHelper.wrapTextureBuffer( + width, height, VideoFrame.TextureBuffer.Type.RGB, processTexId, transformMatrix); + videoFrame.replaceBuffer(textureBuffer, mFrameRotation, videoFrame.getTimestampNs()); + } - return mTextureBufferHelper.wrapTextureBuffer( - width, height, VideoFrame.TextureBuffer.Type.RGB, processTexId, texBuffer.getTransformMatrix()); + return true; } private void joinChannel() { @@ -371,7 +381,9 @@ private void joinChannel() { mLocalVideoLayout = new VideoReportLayout(requireContext()); TextureView videoView = new TextureView(requireContext()); - rtcEngine.setupLocalVideo(new VideoCanvas(videoView, Constants.RENDER_MODE_HIDDEN)); + VideoCanvas local = new VideoCanvas(videoView, Constants.RENDER_MODE_HIDDEN, 0); + local.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED; + rtcEngine.setupLocalVideo(local); mLocalVideoLayout.addView(videoView); rtcEngine.startPreview(); @@ -380,24 +392,47 @@ private void joinChannel() { private void updateVideoLayouts(boolean isLocalFull) { this.isLocalFull = isLocalFull; - mBinding.fullVideoContainer.removeAllViews(); - mBinding.smallVideoContainer.removeAllViews(); if (isLocalFull) { if (mLocalVideoLayout != null) { - mBinding.fullVideoContainer.addView(mLocalVideoLayout); + ViewParent parent = mLocalVideoLayout.getParent(); + if (parent instanceof ViewGroup && parent != mBinding.fullVideoContainer) { + ((ViewGroup) parent).removeView(mLocalVideoLayout); + mBinding.fullVideoContainer.addView(mLocalVideoLayout); + } else if (parent == null) { + mBinding.fullVideoContainer.addView(mLocalVideoLayout); + } } if (mRemoteVideoLayout != null) { mRemoteVideoLayout.getChildAt(0).setOnClickListener(v -> updateVideoLayouts(!FaceUnityBeauty.this.isLocalFull)); - mBinding.smallVideoContainer.addView(mRemoteVideoLayout); + ViewParent parent = mRemoteVideoLayout.getParent(); + if (parent instanceof ViewGroup && parent != mBinding.smallVideoContainer) { + ((ViewGroup) parent).removeView(mRemoteVideoLayout); + mBinding.smallVideoContainer.addView(mRemoteVideoLayout); + } else if (parent == null) { + mBinding.smallVideoContainer.addView(mRemoteVideoLayout); + } } } else { if (mLocalVideoLayout != null) { mLocalVideoLayout.getChildAt(0).setOnClickListener(v -> updateVideoLayouts(!FaceUnityBeauty.this.isLocalFull)); - mBinding.smallVideoContainer.addView(mLocalVideoLayout); + ViewParent parent = mLocalVideoLayout.getParent(); + if (parent instanceof ViewGroup && parent != mBinding.smallVideoContainer) { + ((ViewGroup) parent).removeView(mLocalVideoLayout); + mBinding.smallVideoContainer.addView(mLocalVideoLayout); + } else if (parent == null) { + mBinding.smallVideoContainer.addView(mLocalVideoLayout); + } } + if (mRemoteVideoLayout != null) { - mBinding.fullVideoContainer.addView(mRemoteVideoLayout); + ViewParent parent = mRemoteVideoLayout.getParent(); + if (parent instanceof ViewGroup && parent != mBinding.fullVideoContainer) { + ((ViewGroup) parent).removeView(mRemoteVideoLayout); + mBinding.fullVideoContainer.addView(mRemoteVideoLayout); + } else if (parent == null) { + mBinding.fullVideoContainer.addView(mRemoteVideoLayout); + } } } } @@ -416,11 +451,4 @@ private void doOnBeautyCreatingEnd() { }); } - private void doOnBeautyReleasingBegin() { - Log.d(TAG, "doOnBeautyReleasingBegin..."); - } - - private void doOnBeautyReleasingEnd() { - Log.d(TAG, "doOnBeautyReleasingEnd."); - } } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SceneTimeBeauty.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SceneTimeBeauty.java index f4426987a..cee00eeb6 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SceneTimeBeauty.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SceneTimeBeauty.java @@ -1,5 +1,7 @@ package io.agora.api.example.examples.advanced.beauty; +import static io.agora.rtc2.video.VideoEncoderConfiguration.STANDARD_BITRATE; + import android.graphics.Matrix; import android.opengl.GLES11Ext; import android.opengl.GLES20; @@ -9,6 +11,7 @@ import android.view.TextureView; import android.view.View; import android.view.ViewGroup; +import android.view.ViewParent; import androidx.annotation.NonNull; import androidx.annotation.Nullable; @@ -17,6 +20,7 @@ import java.util.Locale; import java.util.Random; +import io.agora.api.example.MainApplication; import io.agora.api.example.R; import io.agora.api.example.common.BaseFragment; import io.agora.api.example.common.widget.VideoReportLayout; @@ -24,14 +28,18 @@ import io.agora.api.example.utils.TokenUtils; import io.agora.base.TextureBufferHelper; import io.agora.base.VideoFrame; +import io.agora.base.internal.video.RendererCommon; import io.agora.base.internal.video.YuvHelper; import io.agora.beauty.base.IBeautySenseTime; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; import io.agora.rtc2.IRtcEngineEventHandler; import io.agora.rtc2.RtcEngine; +import io.agora.rtc2.gl.EglBaseProvider; +import io.agora.rtc2.video.ColorEnhanceOptions; import io.agora.rtc2.video.IVideoFrameObserver; import io.agora.rtc2.video.VideoCanvas; +import io.agora.rtc2.video.VideoEncoderConfiguration; public class SceneTimeBeauty extends BaseFragment { private static final String TAG = "SceneTimeBeauty"; @@ -40,13 +48,8 @@ public class SceneTimeBeauty extends BaseFragment { private FragmentBeautyScenetimeBinding mBinding; private RtcEngine rtcEngine; private String channelId; - private ByteBuffer nv21ByteBuffer; - private byte[] nv21ByteArray; - private boolean isFrontCamera = true; - private TextureBufferHelper mDoubleTextureBufferHelper; - private TextureBufferHelper mSingleTextureBufferHelper; - private boolean isSingleInput = true; + private boolean isFrontCamera = true; private VideoReportLayout mLocalVideoLayout; private VideoReportLayout mRemoteVideoLayout; @@ -54,8 +57,11 @@ public class SceneTimeBeauty extends BaseFragment { private IVideoFrameObserver mVideoFrameObserver; private IRtcEngineEventHandler mRtcEngineEventHandler; + // Beauty process require parameters + private TextureBufferHelper mTextureBufferHelper; + private ByteBuffer nv21ByteBuffer; + private byte[] nv21ByteArray; private volatile boolean isDestroyed = false; - private int mFrameRotation; @Nullable @Override @@ -67,45 +73,43 @@ public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup c @Override public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); + isDestroyed = false; if (!IBeautySenseTime.hasIntegrated()) { mBinding.tvIntegrateTip.setVisibility(View.VISIBLE); return; } - channelId = getArguments().getString(getString(R.string.key_channel_name)); initVideoView(); initRtcEngine(); joinChannel(); + mBinding.switchVideoEffect.setOnCheckedChangeListener((buttonView, isChecked) -> + { + ColorEnhanceOptions options = new ColorEnhanceOptions(); + options.strengthLevel = (float) 0.5f; + options.skinProtectLevel = (float) 0.5f; + rtcEngine.setColorEnhanceOptions(isChecked, options); + }); } + @Override public void onDestroyView() { super.onDestroyView(); - isDestroyed = true; - if (mSingleTextureBufferHelper != null) { - mSingleTextureBufferHelper.invoke(() -> { - iBeautySenseTime.release(); - iBeautySenseTime = null; - return null; - }); - mSingleTextureBufferHelper.dispose(); - mSingleTextureBufferHelper = null; - } - if (mDoubleTextureBufferHelper != null) { - mDoubleTextureBufferHelper.invoke(() -> { - iBeautySenseTime.release(); - iBeautySenseTime = null; - return null; - }); - mDoubleTextureBufferHelper.dispose(); - mDoubleTextureBufferHelper = null; - } if (rtcEngine != null) { rtcEngine.leaveChannel(); } + unInitBeauty(); RtcEngine.destroy(); } + @Override + protected void onBackPressed() { + isDestroyed = true; + mBinding.fullVideoContainer.removeAllViews(); + mBinding.smallVideoContainer.removeAllViews(); + super.onBackPressed(); + } + private void initVideoView() { mBinding.cbFaceBeautify.setOnCheckedChangeListener((buttonView, isChecked) -> { if (iBeautySenseTime == null) { @@ -135,11 +139,6 @@ private void initVideoView() { rtcEngine.switchCamera(); isFrontCamera = !isFrontCamera; }); - mBinding.tvBeautyInput.setText(isSingleInput ? R.string.beauty_input_single : R.string.beauty_input_double); - mBinding.tvBeautyInput.setOnClickListener(v -> { - isSingleInput = !isSingleInput; - mBinding.tvBeautyInput.setText(isSingleInput ? R.string.beauty_input_single : R.string.beauty_input_double); - }); mBinding.smallVideoContainer.setOnClickListener(v -> updateVideoLayouts(!SceneTimeBeauty.this.isLocalFull)); } @@ -177,7 +176,7 @@ public void onUserJoined(int uid, int elapsed) { public void onUserOffline(int uid, int reason) { super.onUserOffline(uid, reason); runOnUIThread(() -> { - if(mRemoteVideoLayout != null && mRemoteVideoLayout.getReportUid() == uid){ + if (mRemoteVideoLayout != null && mRemoteVideoLayout.getReportUid() == uid) { mRemoteVideoLayout.removeAllViews(); mRemoteVideoLayout = null; updateVideoLayouts(isLocalFull); @@ -219,35 +218,17 @@ public void onRemoteVideoStats(RemoteVideoStats stats) { return; } + rtcEngine.enableExtension("agora_video_filters_clear_vision", "clear_vision", true); - mVideoFrameObserver = new IVideoFrameObserver() { - @Override - public boolean onCaptureVideoFrame(VideoFrame videoFrame) { - if (isDestroyed) { - return true; - } - if (isSingleInput) { - return processSingleInput(videoFrame); - } else { - if (!processDoubleInput(videoFrame)) { - return processSingleInput(videoFrame); - } - return true; - } - } - - @Override - public boolean onPreEncodeVideoFrame(VideoFrame videoFrame) { - return false; - } + mVideoFrameObserver = new IVideoFrameObserver() { @Override - public boolean onScreenCaptureVideoFrame(VideoFrame videoFrame) { - return false; + public boolean onCaptureVideoFrame(int sourceType, VideoFrame videoFrame) { + return processBeauty(videoFrame); } @Override - public boolean onPreEncodeScreenVideoFrame(VideoFrame videoFrame) { + public boolean onPreEncodeVideoFrame(int sourceType, VideoFrame videoFrame) { return false; } @@ -278,7 +259,7 @@ public boolean getRotationApplied() { @Override public boolean getMirrorApplied() { - return false; + return true; } @Override @@ -287,6 +268,13 @@ public int getObservedFramePosition() { } }; rtcEngine.registerVideoFrameObserver(mVideoFrameObserver); + // Setup video encoding configs + rtcEngine.setVideoEncoderConfiguration(new VideoEncoderConfiguration( + ((MainApplication) getActivity().getApplication()).getGlobalSettings().getVideoEncodingDimensionObject(), + VideoEncoderConfiguration.FRAME_RATE.valueOf(((MainApplication) getActivity().getApplication()).getGlobalSettings().getVideoEncodingFrameRate()), + STANDARD_BITRATE, + VideoEncoderConfiguration.ORIENTATION_MODE.valueOf(((MainApplication) getActivity().getApplication()).getGlobalSettings().getVideoEncodingOrientation()) + )); rtcEngine.enableVideo(); rtcEngine.disableAudio(); @@ -295,32 +283,29 @@ public int getObservedFramePosition() { } } - private boolean processSingleInput(VideoFrame videoFrame) { - VideoFrame.Buffer buffer = videoFrame.getBuffer(); - - if (mDoubleTextureBufferHelper != null) { - doOnBeautyReleasingBegin(); - mDoubleTextureBufferHelper.invoke(() -> { + private void unInitBeauty() { + if (mTextureBufferHelper != null) { + mTextureBufferHelper.invoke(() -> { iBeautySenseTime.release(); + iBeautySenseTime = null; return null; }); - mDoubleTextureBufferHelper.dispose(); - mDoubleTextureBufferHelper = null; - doOnBeautyReleasingEnd(); + mTextureBufferHelper.dispose(); + mTextureBufferHelper = null; } - if (mSingleTextureBufferHelper == null) { - doOnBeautyCreatingBegin(); - mSingleTextureBufferHelper = TextureBufferHelper.create("STRender", null); - mSingleTextureBufferHelper.invoke(() -> { - iBeautySenseTime = IBeautySenseTime.create(getContext()); - return null; - }); - doOnBeautyCreatingEnd(); + } + + private boolean processBeauty(VideoFrame videoFrame) { + if (isDestroyed) { + return false; } + VideoFrame.Buffer buffer = videoFrame.getBuffer(); int width = buffer.getWidth(); int height = buffer.getHeight(); + + // Obtain nv21 pixel data int nv21Size = (int) (width * height * 3.0f / 2.0f + 0.5f); if (nv21ByteBuffer == null || nv21ByteBuffer.capacity() != nv21Size) { if (nv21ByteBuffer != null) { @@ -328,9 +313,8 @@ private boolean processSingleInput(VideoFrame videoFrame) { } nv21ByteBuffer = ByteBuffer.allocateDirect(nv21Size); nv21ByteArray = new byte[nv21Size]; + return false; } - - VideoFrame.I420Buffer i420Buffer = buffer.toI420(); YuvHelper.I420ToNV12(i420Buffer.getDataY(), i420Buffer.getStrideY(), i420Buffer.getDataV(), i420Buffer.getStrideV(), @@ -340,94 +324,52 @@ private boolean processSingleInput(VideoFrame videoFrame) { nv21ByteBuffer.get(nv21ByteArray); i420Buffer.release(); - Integer processTexId = mSingleTextureBufferHelper.invoke(() -> iBeautySenseTime.process( - nv21ByteArray, - width, height, mFrameRotation - )); - - // drag one frame to avoid reframe when switching camera. - if(mFrameRotation != videoFrame.getRotation()){ - mFrameRotation = videoFrame.getRotation(); - return false; - } - - VideoFrame.TextureBuffer processBuffer = mSingleTextureBufferHelper.wrapTextureBuffer( - width, height, VideoFrame.TextureBuffer.Type.RGB, processTexId, - buffer instanceof VideoFrame.TextureBuffer ? ((VideoFrame.TextureBuffer) buffer).getTransformMatrix(): new Matrix()); - videoFrame.replaceBuffer(processBuffer, mFrameRotation, videoFrame.getTimestampNs()); - buffer.release(); - - return true; - } - - private boolean processDoubleInput(VideoFrame videoFrame) { - VideoFrame.Buffer buffer = videoFrame.getBuffer(); - if (!(buffer instanceof VideoFrame.TextureBuffer)) { - return false; - } - VideoFrame.TextureBuffer texBuffer = (VideoFrame.TextureBuffer) buffer; - - if (mSingleTextureBufferHelper != null) { - doOnBeautyReleasingBegin(); - mSingleTextureBufferHelper.invoke(() -> { - iBeautySenseTime.release(); - return null; - }); - mSingleTextureBufferHelper.dispose(); - mSingleTextureBufferHelper = null; - doOnBeautyReleasingEnd(); - } - - if (mDoubleTextureBufferHelper == null) { - doOnBeautyCreatingBegin(); - mDoubleTextureBufferHelper = TextureBufferHelper.create("STRender", texBuffer.getEglBaseContext()); - mDoubleTextureBufferHelper.invoke(() -> { + if (mTextureBufferHelper == null) { + Log.d(TAG, "doOnBeautyCreatingBegin..."); + mTextureBufferHelper = TextureBufferHelper.create("STRender", EglBaseProvider.instance().getRootEglBase().getEglBaseContext()); + mTextureBufferHelper.invoke(() -> { iBeautySenseTime = IBeautySenseTime.create(getContext()); return null; }); - doOnBeautyCreatingEnd(); + Log.d(TAG, "doOnBeautyCreatingEnd."); + runOnUIThread(() -> { + mBinding.cbFilter.setChecked(false); + mBinding.cbFaceBeautify.setChecked(false); + mBinding.cbSticker.setChecked(false); + mBinding.cbMakeup.setChecked(false); + }); } - int textureId = texBuffer.getTextureId(); - int textureFormat = texBuffer.getType() == VideoFrame.TextureBuffer.Type.OES ? GLES11Ext.GL_TEXTURE_EXTERNAL_OES : GLES20.GL_TEXTURE_2D; - int width = buffer.getWidth(); - int height = buffer.getHeight(); + int processTexId = -1; + if (buffer instanceof VideoFrame.TextureBuffer) { + VideoFrame.TextureBuffer texBuffer = (VideoFrame.TextureBuffer) buffer; + int textureFormat = texBuffer.getType() == VideoFrame.TextureBuffer.Type.OES ? GLES11Ext.GL_TEXTURE_EXTERNAL_OES : GLES20.GL_TEXTURE_2D; + float[] transformMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(texBuffer.getTransformMatrix()); + + if(mTextureBufferHelper != null){ + processTexId = mTextureBufferHelper.invoke(() -> iBeautySenseTime.process( + nv21ByteArray, + texBuffer.getTextureId(), textureFormat, + width, height, videoFrame.getRotation(), transformMatrix + )); + } - int nv21Size = (int) (width * height * 3.0f / 2.0f + 0.5f); - if (nv21ByteBuffer == null || nv21ByteBuffer.capacity() != nv21Size) { - if (nv21ByteBuffer != null) { - nv21ByteBuffer.clear(); + } else { + if(mTextureBufferHelper != null){ + processTexId = mTextureBufferHelper.invoke(() ->iBeautySenseTime.process( + nv21ByteArray, + width, height, videoFrame.getRotation() + )); } - nv21ByteBuffer = ByteBuffer.allocateDirect(nv21Size); - nv21ByteArray = new byte[nv21Size]; } - - - VideoFrame.I420Buffer i420Buffer = buffer.toI420(); - YuvHelper.I420ToNV12(i420Buffer.getDataY(), i420Buffer.getStrideY(), - i420Buffer.getDataV(), i420Buffer.getStrideV(), - i420Buffer.getDataU(), i420Buffer.getStrideU(), - nv21ByteBuffer, width, height); - nv21ByteBuffer.position(0); - nv21ByteBuffer.get(nv21ByteArray); - i420Buffer.release(); - - Integer processTexId = mDoubleTextureBufferHelper.invoke(() -> iBeautySenseTime.process( - nv21ByteArray, - textureId, textureFormat, - width, height, mFrameRotation - )); - - // drag one frame to avoid reframe when switching camera. - if(mFrameRotation != videoFrame.getRotation()){ - mFrameRotation = videoFrame.getRotation(); + if (processTexId < 0) { return false; } - - VideoFrame.TextureBuffer processBuffer = mDoubleTextureBufferHelper.wrapTextureBuffer( - width, height, VideoFrame.TextureBuffer.Type.RGB, processTexId, texBuffer.getTransformMatrix()); - videoFrame.replaceBuffer(processBuffer, mFrameRotation, videoFrame.getTimestampNs()); - buffer.release(); + if(mTextureBufferHelper != null){ + VideoFrame.TextureBuffer processBuffer = mTextureBufferHelper.wrapTextureBuffer( + width, height, VideoFrame.TextureBuffer.Type.RGB, processTexId, new Matrix()); + videoFrame.replaceBuffer(processBuffer, videoFrame.getRotation(), videoFrame.getTimestampNs()); + } return true; } @@ -446,56 +388,59 @@ private void joinChannel() { mLocalVideoLayout = new VideoReportLayout(requireContext()); TextureView videoView = new TextureView(requireContext()); - rtcEngine.setupLocalVideo(new VideoCanvas(videoView, Constants.RENDER_MODE_HIDDEN)); + VideoCanvas local = new VideoCanvas(videoView, Constants.RENDER_MODE_HIDDEN, 0); + local.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED; + rtcEngine.setupLocalVideo(local); mLocalVideoLayout.addView(videoView); - rtcEngine.startPreview(); updateVideoLayouts(isLocalFull); } private void updateVideoLayouts(boolean isLocalFull) { this.isLocalFull = isLocalFull; - mBinding.fullVideoContainer.removeAllViews(); - mBinding.smallVideoContainer.removeAllViews(); if (isLocalFull) { if (mLocalVideoLayout != null) { - mBinding.fullVideoContainer.addView(mLocalVideoLayout); + ViewParent parent = mLocalVideoLayout.getParent(); + if (parent instanceof ViewGroup && parent != mBinding.fullVideoContainer) { + ((ViewGroup) parent).removeView(mLocalVideoLayout); + mBinding.fullVideoContainer.addView(mLocalVideoLayout); + } else if (parent == null) { + mBinding.fullVideoContainer.addView(mLocalVideoLayout); + } } if (mRemoteVideoLayout != null) { mRemoteVideoLayout.getChildAt(0).setOnClickListener(v -> updateVideoLayouts(!SceneTimeBeauty.this.isLocalFull)); - mBinding.smallVideoContainer.addView(mRemoteVideoLayout); + ViewParent parent = mRemoteVideoLayout.getParent(); + if (parent instanceof ViewGroup && parent != mBinding.smallVideoContainer) { + ((ViewGroup) parent).removeView(mRemoteVideoLayout); + mBinding.smallVideoContainer.addView(mRemoteVideoLayout); + } else if(parent == null){ + mBinding.smallVideoContainer.addView(mRemoteVideoLayout); + } } } else { if (mLocalVideoLayout != null) { mLocalVideoLayout.getChildAt(0).setOnClickListener(v -> updateVideoLayouts(!SceneTimeBeauty.this.isLocalFull)); - mBinding.smallVideoContainer.addView(mLocalVideoLayout); + ViewParent parent = mLocalVideoLayout.getParent(); + if (parent instanceof ViewGroup && parent != mBinding.smallVideoContainer) { + ((ViewGroup) parent).removeView(mLocalVideoLayout); + mBinding.smallVideoContainer.addView(mLocalVideoLayout); + } else if(parent == null){ + mBinding.smallVideoContainer.addView(mLocalVideoLayout); + } } + if (mRemoteVideoLayout != null) { - mBinding.fullVideoContainer.addView(mRemoteVideoLayout); + ViewParent parent = mRemoteVideoLayout.getParent(); + if (parent instanceof ViewGroup && parent != mBinding.fullVideoContainer) { + ((ViewGroup) parent).removeView(mRemoteVideoLayout); + mBinding.fullVideoContainer.addView(mRemoteVideoLayout); + } else if(parent == null) { + mBinding.fullVideoContainer.addView(mRemoteVideoLayout); + } } } } - private void doOnBeautyCreatingBegin() { - Log.d(TAG, "doOnBeautyCreatingBegin..."); - } - - private void doOnBeautyCreatingEnd() { - Log.d(TAG, "doOnBeautyCreatingEnd."); - runOnUIThread(() -> { - mBinding.cbFilter.setChecked(false); - mBinding.cbFaceBeautify.setChecked(false); - mBinding.cbSticker.setChecked(false); - mBinding.cbMakeup.setChecked(false); - }); - } - - private void doOnBeautyReleasingBegin() { - Log.d(TAG, "doOnBeautyReleasingBegin..."); - } - - private void doOnBeautyReleasingEnd() { - Log.d(TAG, "doOnBeautyReleasingEnd."); - } } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/VideoCaptureUtils.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/VideoCaptureUtils.java new file mode 100644 index 000000000..1245a411a --- /dev/null +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/VideoCaptureUtils.java @@ -0,0 +1,29 @@ +package io.agora.api.example.examples.advanced.beauty; + +import android.graphics.Matrix; +import android.util.Size; + +import io.agora.base.VideoFrame; +import io.agora.base.internal.video.RendererCommon; + +public class VideoCaptureUtils { + + public static Size getCaptureOriginSize(VideoFrame.TextureBuffer texBuffer){ + int width = texBuffer.getWidth(); + int height = texBuffer.getHeight(); + Matrix texMatrix = texBuffer.getTransformMatrix(); + + // 根据Matrix反算纹理的真实宽高 + Matrix renderMatrix = new Matrix(); + renderMatrix.preTranslate(0.5F, 0.5F); + renderMatrix.preScale(1.0F, -1.0F); + renderMatrix.preTranslate(-0.5F, -0.5F); + Matrix finalMatrix = new Matrix(texMatrix); + finalMatrix.preConcat(renderMatrix); + float[] finalGlMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(finalMatrix); + int texWidth = (int) (width * 1.0f / finalGlMatrix[0] + 0.5f); + int texHeight = (int) (height * 1.0f / finalGlMatrix[5] + 0.5f); + return new Size(texWidth, texHeight); + } + +} diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java index 92c2afb78..e66641d57 100755 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java @@ -236,6 +236,7 @@ public void onClick(View v) { engine.leaveChannel(); pulling = false; join.setText(getString(R.string.join)); + audioSeatManager.downAllSeats(); if(pullingTask != null){ try { pullingTask.join(); @@ -268,7 +269,7 @@ private void joinChannel(String channelId) { * 0: Success. * < 0: Failure. * PS: Ensure that you call this method before the joinChannel method.*/ - engine.setExternalAudioSource(true, SAMPLE_RATE, SAMPLE_NUM_OF_CHANNEL, 2, false, true); + // engine.setExternalAudioSource(true, SAMPLE_RATE, SAMPLE_NUM_OF_CHANNEL, 2, false, true); diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java index a4403f77a..a94f62cfb 100755 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java @@ -34,6 +34,7 @@ import io.agora.rtc2.RtcEngine; import io.agora.rtc2.RtcEngineConfig; import io.agora.rtc2.RtcEngineEx; +import io.agora.rtc2.audio.AudioTrackConfig; /** * This demo demonstrates how to make a one-to-one voice call @@ -54,10 +55,11 @@ public class CustomAudioSource extends BaseFragment implements View.OnClickListe public static RtcEngineEx engine; private Switch mic, pcm; private ChannelMediaOptions option = new ChannelMediaOptions(); - private volatile int pushTimes = 0; + private int pushTimes = 0; private AudioSeatManager audioSeatManager; private AudioFileReader audioPushingHelper; + private int customAudioTrack = -1; @Override public void onCreate(@Nullable Bundle savedInstanceState) { @@ -155,9 +157,9 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) { engine.setLocalAccessPoint(((MainApplication) getActivity().getApplication()).getGlobalSettings().getPrivateCloudConfig()); audioPushingHelper = new AudioFileReader(requireContext(), (buffer, timestamp) -> { - if(joined && engine != null){ - Log.i(TAG, "pushExternalAudioFrame times:" + pushTimes++); - engine.pushExternalAudioFrame(buffer, 0); + if(joined && engine != null && customAudioTrack != -1){ + int ret = engine.pushExternalAudioFrame(buffer, timestamp, AudioFileReader.SAMPLE_RATE, AudioFileReader.SAMPLE_NUM_OF_CHANNEL, Constants.BytesPerSample.TWO_BYTES_PER_SAMPLE, customAudioTrack); + Log.i(TAG, "pushExternalAudioFrame times:" + (++pushTimes) + ", ret=" + ret); } }); } catch (Exception e) { @@ -169,6 +171,10 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) { @Override public void onDestroy() { super.onDestroy(); + if(customAudioTrack != -1){ + engine.destroyCustomAudioTrack(customAudioTrack); + customAudioTrack = -1; + } if(audioPushingHelper != null){ audioPushingHelper.stop(); } @@ -187,9 +193,10 @@ public void onCheckedChanged(CompoundButton compoundButton, boolean checked) { option.publishMicrophoneTrack = checked; engine.updateChannelMediaOptions(option); } else if (compoundButton.getId() == R.id.localAudio) { + option.publishCustomAudioTrackId = customAudioTrack; option.publishCustomAudioTrack = checked; engine.updateChannelMediaOptions(option); - engine.enableCustomAudioLocalPlayback(0, checked); + engine.enableCustomAudioLocalPlayback(customAudioTrack, checked); } } @@ -268,10 +275,9 @@ private void joinChannel(String channelId) { * 0: Success. * < 0: Failure. * PS: Ensure that you call this method before the joinChannel method.*/ - engine.setExternalAudioSource(true, - AudioFileReader.SAMPLE_RATE, AudioFileReader.SAMPLE_NUM_OF_CHANNEL, AudioFileReader.SAMPLE_NUM_OF_CHANNEL, - false, true); - + AudioTrackConfig config = new AudioTrackConfig(); + config.enableLocalPlayback = false; + customAudioTrack = engine.createCustomAudioTrack(Constants.AudioTrackType.AUDIO_TRACK_MIXABLE, config); /**Please configure accessToken in the string_config file. * A temporary token generated in Console. A temporary token is valid for 24 hours. For details, see diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/videoRender/YuvFboProgram.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/videoRender/YuvFboProgram.java new file mode 100644 index 000000000..9c84356c7 --- /dev/null +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/videoRender/YuvFboProgram.java @@ -0,0 +1,91 @@ +package io.agora.api.example.examples.advanced.videoRender; + +import android.graphics.Matrix; +import android.opengl.GLES20; + +import io.agora.base.JavaI420Buffer; +import io.agora.base.internal.video.GlRectDrawer; +import io.agora.base.internal.video.GlUtil; +import io.agora.base.internal.video.RendererCommon; + +public class YuvFboProgram { + + private int[] mFboTextureId; + private final YuvUploader yuvUploader; + private final GlRectDrawer glRectDrawer; + + private int mWidth, mHeight; + private volatile boolean isRelease; + + // GL Thread + public YuvFboProgram() { + yuvUploader = new YuvUploader(); + glRectDrawer = new GlRectDrawer(); + } + + // GL Thread + public void release() { + isRelease = true; + if (mFboTextureId != null) { + GLES20.glDeleteFramebuffers(1, mFboTextureId, 0); + GLES20.glDeleteTextures(1, mFboTextureId, 1); + yuvUploader.release(); + glRectDrawer.release(); + mFboTextureId = null; + } + } + + // GL Thread + public Integer drawYuv(byte[] yuv, int width, int height) { + if (isRelease) { + return -1; + } + if (mFboTextureId == null) { + mFboTextureId = new int[2]; + GLES20.glGenFramebuffers(1, mFboTextureId, 0); + int fboId = mFboTextureId[0]; + + int texture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D); + mFboTextureId[1] = texture; + + mWidth = width; + mHeight = height; + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture); + GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, + GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); + + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId); + GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, + GLES20.GL_COLOR_ATTACHMENT0, + GLES20.GL_TEXTURE_2D, texture, 0); + } else if (mWidth != width || mHeight != height) { + GLES20.glDeleteFramebuffers(1, mFboTextureId, 0); + GLES20.glDeleteTextures(1, mFboTextureId, 1); + mFboTextureId = null; + return drawYuv(yuv, width, height); + } else { + int fboId = mFboTextureId[0]; + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fboId); + } + GLES20.glViewport(0, 0, mWidth, mHeight); + + JavaI420Buffer i420Buffer = JavaI420Buffer.allocate(width, height); + i420Buffer.getDataY().put(yuv, 0, i420Buffer.getDataY().limit()); + i420Buffer.getDataU().put(yuv, i420Buffer.getDataY().limit(), i420Buffer.getDataU().limit()); + i420Buffer.getDataV().put(yuv, i420Buffer.getDataY().limit() + i420Buffer.getDataU().limit(), i420Buffer.getDataV().limit()); + + yuvUploader.uploadFromBuffer(i420Buffer); + Matrix matrix = new Matrix(); + matrix.preTranslate(0.5f, 0.5f); + matrix.preScale(1f, -1f);// I420-frames are upside down + matrix.preTranslate(-0.5f, -0.5f); + glRectDrawer.drawYuv(yuvUploader.getYuvTextures(), RendererCommon.convertMatrixFromAndroidGraphicsMatrix(matrix), width, height, 0, 0, width, height); + + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); + GLES20.glFlush(); + + return mFboTextureId[1]; + } + + +} diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/utils/VideoFileReader.java b/Android/APIExample/app/src/main/java/io/agora/api/example/utils/VideoFileReader.java index 8c6e21701..812d65086 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/utils/VideoFileReader.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/utils/VideoFileReader.java @@ -5,9 +5,6 @@ import java.io.IOException; import java.io.InputStream; -import io.agora.base.JavaI420Buffer; -import io.agora.base.VideoFrame; - public class VideoFileReader { private final String RAW_VIDEO_PATH = "sample.yuv"; private final int RAW_VIDEO_WIDTH = 320; @@ -51,7 +48,7 @@ public final void stop(){ public interface OnVideoReadListener { - void onVideoRead(VideoFrame videoFrame); + void onVideoRead(byte[] buffer, int width, int height); } private class InnerThread extends Thread { @@ -77,12 +74,8 @@ public void run() { } catch (IOException e) { e.printStackTrace(); } - JavaI420Buffer i420Buffer = JavaI420Buffer.allocate(RAW_VIDEO_WIDTH, RAW_VIDEO_HEIGHT); - i420Buffer.getDataY().put(buffer, 0, i420Buffer.getDataY().limit()); - i420Buffer.getDataU().put(buffer, i420Buffer.getDataY().limit(), i420Buffer.getDataU().limit()); - i420Buffer.getDataV().put(buffer, i420Buffer.getDataY().limit() + i420Buffer.getDataU().limit(), i420Buffer.getDataV().limit()); if(videoReadListener != null){ - videoReadListener.onVideoRead(new VideoFrame(i420Buffer, 0, System.nanoTime())); + videoReadListener.onVideoRead(buffer, RAW_VIDEO_WIDTH, RAW_VIDEO_HEIGHT); } long consume = System.nanoTime() - start; diff --git a/Android/APIExample/app/src/main/res/layout/fragment_beauty_bytedance.xml b/Android/APIExample/app/src/main/res/layout/fragment_beauty_bytedance.xml index cebcfa44e..14cf7188c 100644 --- a/Android/APIExample/app/src/main/res/layout/fragment_beauty_bytedance.xml +++ b/Android/APIExample/app/src/main/res/layout/fragment_beauty_bytedance.xml @@ -39,14 +39,23 @@ + + - + app:layout_constraintEnd_toEndOf="parent" + android:layout_margin="12dp" + android:text="@string/colorful_enhance"/> - - + app:layout_constraintEnd_toEndOf="parent" + android:layout_margin="12dp" + android:text="@string/colorful_enhance"/> @@ -87,7 +86,6 @@ android:layout_width="match_parent" android:layout_height="wrap_content" android:layout_weight="1" - android:digits="@string/chanel_support_char" android:hint="@string/token" android:singleLine="true" app:layout_constraintBottom_toTopOf="@id/ll_join" /> diff --git a/Android/APIExample/app/src/main/res/layout/fragment_localvideotranscoding.xml b/Android/APIExample/app/src/main/res/layout/fragment_localvideotranscoding.xml new file mode 100644 index 000000000..d1d8524f4 --- /dev/null +++ b/Android/APIExample/app/src/main/res/layout/fragment_localvideotranscoding.xml @@ -0,0 +1,51 @@ + + + + + + + + + + + + + + + + + + diff --git a/Android/APIExample/app/src/main/res/layout/fragment_media_recorder.xml b/Android/APIExample/app/src/main/res/layout/fragment_media_recorder.xml new file mode 100644 index 000000000..6e139b89e --- /dev/null +++ b/Android/APIExample/app/src/main/res/layout/fragment_media_recorder.xml @@ -0,0 +1,152 @@ + + + + + + + + + + + + + + + + + + + + + - - - - - - + + + + + + + + + + + diff --git a/iOS/APIExample-Audio/APIExample-Audio/Examples/Advanced/VoiceChanger/VoiceChanger.swift b/iOS/APIExample-Audio/APIExample-Audio/Examples/Advanced/VoiceChanger/VoiceChanger.swift index 454452961..332d43e77 100644 --- a/iOS/APIExample-Audio/APIExample-Audio/Examples/Advanced/VoiceChanger/VoiceChanger.swift +++ b/iOS/APIExample-Audio/APIExample-Audio/Examples/Advanced/VoiceChanger/VoiceChanger.swift @@ -297,6 +297,9 @@ class VoiceChangerMain: BaseViewController { LogUtils.log(message: "onLocalVoicePitch \(Double(sender.value))", level: .info) agoraKit.setLocalVoicePitch(Double(sender.value)) } + @IBAction func onVoiceFormantChange(_ sender: UISlider) { + agoraKit.setLocalVoiceFormant(Double(sender.value)) + } @IBAction func onLocalVoiceEqualizaitonFreq(_ sender:UIButton) { let alert = UIAlertController(title: "Set Band Frequency".localized, message: nil, preferredStyle: UIDevice.current.userInterfaceIdiom == .pad ? UIAlertController.Style.alert : UIAlertController.Style.actionSheet) diff --git a/iOS/APIExample-Audio/APIExample-Audio/Info.plist b/iOS/APIExample-Audio/APIExample-Audio/Info.plist index 3c3d2fc3a..7ad85ab0a 100644 --- a/iOS/APIExample-Audio/APIExample-Audio/Info.plist +++ b/iOS/APIExample-Audio/APIExample-Audio/Info.plist @@ -2,8 +2,6 @@ - UIFileSharingEnabled - BGTaskSchedulerPermittedIdentifiers com.yourCompanyName.appName @@ -35,6 +33,8 @@ audio processing + UIFileSharingEnabled + UILaunchStoryboardName LaunchScreen UIMainStoryboardFile diff --git a/iOS/APIExample-Audio/ExportOptions.plist b/iOS/APIExample-Audio/ExportOptions.plist index cbd9a2593..1727fd11e 100644 --- a/iOS/APIExample-Audio/ExportOptions.plist +++ b/iOS/APIExample-Audio/ExportOptions.plist @@ -2,25 +2,25 @@ - provisioningProfiles - - io.agora.api.examples.Agora-ScreenShare-Extension - App - io.agora.api.examples - App - compileBitcode - + destination export method development + provisioningProfiles + + io.agora.api.examples.audio + AgoraLab2020 + + signingCertificate + Apple Development signingStyle manual stripSwiftSymbols teamID - GM72UGLGZW + JDPG69R49Z thinning <none> diff --git a/iOS/APIExample-Audio/Podfile b/iOS/APIExample-Audio/Podfile index a105225a4..d8a06674f 100644 --- a/iOS/APIExample-Audio/Podfile +++ b/iOS/APIExample-Audio/Podfile @@ -7,7 +7,7 @@ target 'APIExample-Audio' do pod 'Floaty', '~> 4.2.0' pod 'AGEVideoLayout', '~> 1.0.2' - pod 'AgoraAudio_iOS', '4.1.1' + pod 'AgoraAudio_iOS', '4.2.0' # pod 'sdk', :path => 'sdk.podspec' end diff --git a/iOS/APIExample-Audio/iOS_ExportOptions.plist b/iOS/APIExample-Audio/iOS_ExportOptions.plist new file mode 100644 index 000000000..68c3cc00e --- /dev/null +++ b/iOS/APIExample-Audio/iOS_ExportOptions.plist @@ -0,0 +1,27 @@ + + + + + compileBitcode + + destination + export + method + development + provisioningProfiles + + io.agora.api.examples.audio + apiexamples_all + + signingCertificate + Apple Development + signingStyle + manual + stripSwiftSymbols + + teamID + YS397FG5PA + thinning + <none> + + diff --git a/iOS/APIExample/APIExample.xcodeproj/project.pbxproj b/iOS/APIExample/APIExample.xcodeproj/project.pbxproj index e9e1fa16d..519d9dd84 100644 --- a/iOS/APIExample/APIExample.xcodeproj/project.pbxproj +++ b/iOS/APIExample/APIExample.xcodeproj/project.pbxproj @@ -83,7 +83,7 @@ 576CA80C25AA0FA90091520B /* AgoraPcmSourcePush.swift in Sources */ = {isa = PBXBuildFile; fileRef = 576CA80B25AA0FA90091520B /* AgoraPcmSourcePush.swift */; }; 576EA54225AC3310000B3D79 /* CustomPcmAudioSource.swift in Sources */ = {isa = PBXBuildFile; fileRef = 576EA54125AC3310000B3D79 /* CustomPcmAudioSource.swift */; }; 576EA54825AC3523000B3D79 /* CustomPcmAudioSource.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 576EA54A25AC3523000B3D79 /* CustomPcmAudioSource.storyboard */; }; - 576EA59025AEDD3C000B3D79 /* (null) in Sources */ = {isa = PBXBuildFile; }; + 576EA59025AEDD3C000B3D79 /* BuildFile in Sources */ = {isa = PBXBuildFile; }; 57FE7C4B26B2D103002D9043 /* CircularBuffer.c in Sources */ = {isa = PBXBuildFile; fileRef = 57FE7C4726B2D103002D9043 /* CircularBuffer.c */; }; 670936FD282DFE1600BC3954 /* ContentInspect.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 670936FF282DFE1600BC3954 /* ContentInspect.storyboard */; }; 6709B23B2806B0EA000BCC58 /* RawAudioData.swift in Sources */ = {isa = PBXBuildFile; fileRef = 6709B23A2806B0EA000BCC58 /* RawAudioData.swift */; }; @@ -97,7 +97,7 @@ 67B8C7B628057D1500195106 /* RawVideoData.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 67B8C7B828057D1500195106 /* RawVideoData.storyboard */; }; 67CB2F0C27EB318200CB19D2 /* SpatialAudio.swift in Sources */ = {isa = PBXBuildFile; fileRef = 67CB2F0A27EB318100CB19D2 /* SpatialAudio.swift */; }; 67CB2F0D27EB318200CB19D2 /* SpatialAudio.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 67CB2F0B27EB318200CB19D2 /* SpatialAudio.storyboard */; }; - 8407E0942472320800AC5DE8 /* (null) in Sources */ = {isa = PBXBuildFile; }; + 8407E0942472320800AC5DE8 /* BuildFile in Sources */ = {isa = PBXBuildFile; }; 8B10BE1126AFFFA6002E1373 /* SimpleFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = 8B10BE0F26AFFFA6002E1373 /* SimpleFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; 8B1422C226B50AB500FFF91B /* AudioProcessor.mm in Sources */ = {isa = PBXBuildFile; fileRef = 8B1422BC26B50AB400FFF91B /* AudioProcessor.mm */; }; 8B1422C326B50AB500FFF91B /* ExtensionAudioFilter.hpp in Headers */ = {isa = PBXBuildFile; fileRef = 8B1422BD26B50AB400FFF91B /* ExtensionAudioFilter.hpp */; }; @@ -152,6 +152,9 @@ E728B85928B86B0700674A4A /* CustomVideoSourcePushMulti.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E728B85528B86B0700674A4A /* CustomVideoSourcePushMulti.storyboard */; }; E728B85A28B86B0700674A4A /* CustomVideoSourcePushMulti.swift in Sources */ = {isa = PBXBuildFile; fileRef = E728B85728B86B0700674A4A /* CustomVideoSourcePushMulti.swift */; }; E728B85C28B8971200674A4A /* sample.yuv in Resources */ = {isa = PBXBuildFile; fileRef = E728B85B28B8971200674A4A /* sample.yuv */; }; + E74788AC29C7FB6900CD7415 /* JoinChannelVideoRecorder.strings in Resources */ = {isa = PBXBuildFile; fileRef = E74788A729C7FB6800CD7415 /* JoinChannelVideoRecorder.strings */; }; + E74788AD29C7FB6900CD7415 /* JoinChannelVideoRecorder.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E74788A929C7FB6800CD7415 /* JoinChannelVideoRecorder.storyboard */; }; + E74788AE29C7FB6900CD7415 /* JoinChannelVideoRecorder.swift in Sources */ = {isa = PBXBuildFile; fileRef = E74788AB29C7FB6800CD7415 /* JoinChannelVideoRecorder.swift */; }; E74877B328A23B2F00CA2F58 /* SimpleFilter.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8B10BE0D26AFFFA6002E1373 /* SimpleFilter.framework */; }; E74877B728A23B8B00CA2F58 /* NetworkManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = E74877B628A23B8B00CA2F58 /* NetworkManager.swift */; }; E74877BA28A23C1400CA2F58 /* JSONObject.swift in Sources */ = {isa = PBXBuildFile; fileRef = E74877B928A23C1400CA2F58 /* JSONObject.swift */; }; @@ -203,6 +206,8 @@ E7A49D652909111400F06DD4 /* BEHttpRequestProvider.mm in Sources */ = {isa = PBXBuildFile; fileRef = E7A49D642909111400F06DD4 /* BEHttpRequestProvider.mm */; }; E7A49D682909113200F06DD4 /* BERender.mm in Sources */ = {isa = PBXBuildFile; fileRef = E7A49D672909113200F06DD4 /* BERender.mm */; }; E7A49D6B2909115200F06DD4 /* BEEffectResourceHelper.m in Sources */ = {isa = PBXBuildFile; fileRef = E7A49D6A2909115100F06DD4 /* BEEffectResourceHelper.m */; }; + E7AD0DE129C85FFB00C9A4B0 /* sample.mov in Resources */ = {isa = PBXBuildFile; fileRef = E7AD0DE029C85FFB00C9A4B0 /* sample.mov */; }; + E7AD0DE329C95EB500C9A4B0 /* PickerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = E7AD0DE229C95EB500C9A4B0 /* PickerView.swift */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -446,6 +451,9 @@ E728B85628B86B0700674A4A /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/CustomVideoSourcePushMulti.storyboard; sourceTree = ""; }; E728B85728B86B0700674A4A /* CustomVideoSourcePushMulti.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CustomVideoSourcePushMulti.swift; sourceTree = ""; }; E728B85B28B8971200674A4A /* sample.yuv */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = sample.yuv; sourceTree = ""; }; + E74788A829C7FB6800CD7415 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/JoinChannelVideoRecorder.strings"; sourceTree = ""; }; + E74788AA29C7FB6800CD7415 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/JoinChannelVideoRecorder.storyboard; sourceTree = ""; }; + E74788AB29C7FB6800CD7415 /* JoinChannelVideoRecorder.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = JoinChannelVideoRecorder.swift; sourceTree = ""; }; E74877B628A23B8B00CA2F58 /* NetworkManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NetworkManager.swift; sourceTree = ""; }; E74877B928A23C1400CA2F58 /* JSONObject.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = JSONObject.swift; sourceTree = ""; }; E74877C928A2611C00CA2F58 /* ToastView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ToastView.swift; sourceTree = ""; }; @@ -486,7 +494,6 @@ E7A49D1F2907DD8F00F06DD4 /* EffectsAttribute.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = EffectsAttribute.h; sourceTree = ""; }; E7A49D212907DD9A00F06DD4 /* EffectsCommonObject.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = EffectsCommonObject.h; sourceTree = ""; }; E7A49D222907DD9A00F06DD4 /* EffectsCommonObject.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = EffectsCommonObject.m; sourceTree = ""; }; - E7A49D252907DDBF00F06DD4 /* EffectMacro.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = EffectMacro.h; path = "../../../../../../../../../Agora-With-SenseTime/Agora-Video-With-SenseTime-iOS/SenseMe/EFRender/EffectMacro.h"; sourceTree = ""; }; E7A49D262907DDFE00F06DD4 /* EffectsDetector.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = EffectsDetector.m; sourceTree = ""; }; E7A49D272907DDFF00F06DD4 /* EffectsDetector.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = EffectsDetector.h; sourceTree = ""; }; E7A49D292907DEE600F06DD4 /* EFMotionManager.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = EFMotionManager.m; sourceTree = ""; }; @@ -528,6 +535,8 @@ E7A49D692909115100F06DD4 /* BEEffectResourceHelper.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = BEEffectResourceHelper.h; sourceTree = ""; }; E7A49D6A2909115100F06DD4 /* BEEffectResourceHelper.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = BEEffectResourceHelper.m; sourceTree = ""; }; E7A49D6E290A744400F06DD4 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/ThirdBeautify.strings"; sourceTree = ""; }; + E7AD0DE029C85FFB00C9A4B0 /* sample.mov */ = {isa = PBXFileReference; lastKnownFileType = video.quicktime; path = sample.mov; sourceTree = ""; }; + E7AD0DE229C95EB500C9A4B0 /* PickerView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PickerView.swift; sourceTree = ""; }; EAD308B056B63304DA681699 /* Pods-Agora-ScreenShare-Extension(Socket).release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Agora-ScreenShare-Extension(Socket).release.xcconfig"; path = "Target Support Files/Pods-Agora-ScreenShare-Extension(Socket)/Pods-Agora-ScreenShare-Extension(Socket).release.xcconfig"; sourceTree = ""; }; EB8CDD3F04870C6A31287732 /* Pods_audioFilter.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_audioFilter.framework; sourceTree = BUILT_PRODUCTS_DIR; }; FAAC2AEE355D103B9E8527B5 /* Pods-Agora-ScreenShare-Extension.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Agora-ScreenShare-Extension.debug.xcconfig"; path = "Target Support Files/Pods-Agora-ScreenShare-Extension/Pods-Agora-ScreenShare-Extension.debug.xcconfig"; sourceTree = ""; }; @@ -796,6 +805,7 @@ children = ( 8B349FE22681E2CE007247F2 /* agora-logo.png */, 576CA80925A9CC3A0091520B /* output.raw */, + E7AD0DE029C85FFB00C9A4B0 /* sample.mov */, 03414B5425546DEC00AB114D /* frames0.yuv */, E728B85B28B8971200674A4A /* sample.yuv */, 03BEED0C251CAB9C005E78F4 /* audioeffect.mp3 */, @@ -865,6 +875,7 @@ 03F8733124C8696600EDB1A3 /* EntryViewController.swift */, 0339BE63251DCA3B007D4FDD /* GlobalSettings.swift */, E721600E28D3314B006431BD /* AlertManager.swift */, + E7AD0DE229C95EB500C9A4B0 /* PickerView.swift */, ); path = Common; sourceTree = ""; @@ -1039,6 +1050,7 @@ A75A56D324A0603000D0089E /* Basic */ = { isa = PBXGroup; children = ( + E74788A629C7FB6800CD7415 /* JoinChannelVideo(Recorder) */, E77D54C128F55E9100D51C1E /* JoinChannelVideo(Token) */, 0385768025224A88003C369A /* JoinChannelVideo */, 0371D8AC250B4A2C00C0DD61 /* JoinChannelAudio */, @@ -1147,6 +1159,16 @@ path = APIExample/Examples/Advanced/CustomVideoSourcePushMulti; sourceTree = SOURCE_ROOT; }; + E74788A629C7FB6800CD7415 /* JoinChannelVideo(Recorder) */ = { + isa = PBXGroup; + children = ( + E74788A729C7FB6800CD7415 /* JoinChannelVideoRecorder.strings */, + E74788A929C7FB6800CD7415 /* JoinChannelVideoRecorder.storyboard */, + E74788AB29C7FB6800CD7415 /* JoinChannelVideoRecorder.swift */, + ); + path = "JoinChannelVideo(Recorder)"; + sourceTree = ""; + }; E74877B528A23B8B00CA2F58 /* NetworkManager */ = { isa = PBXGroup; children = ( @@ -1237,7 +1259,6 @@ isa = PBXGroup; children = ( E7A49D352907EB6000F06DD4 /* SENSEME.lic */, - E7A49D252907DDBF00F06DD4 /* EffectMacro.h */, E7A49D192907DD7800F06DD4 /* Effects.h */, E7A49D182907DD7800F06DD4 /* Effects.m */, E7A49D2A2907DEE600F06DD4 /* EFMotionManager.h */, @@ -1466,6 +1487,7 @@ 033A9F8E252D8FF300BC26E1 /* JoinMultiChannel.storyboard in Resources */, 03BEED0B251C4446005E78F4 /* audiomixing.mp3 in Resources */, 8BC751D6273E502700552265 /* LiveStreaming.storyboard in Resources */, + E74788AD29C7FB6900CD7415 /* JoinChannelVideoRecorder.storyboard in Resources */, 03B12DAC251127DC00E55818 /* VideoViewMetal.xib in Resources */, E77D54C828F55E9100D51C1E /* JoinChannelVideoToken.storyboard in Resources */, E7A49D0A29067F8300F06DD4 /* SenseBeautify.storyboard in Resources */, @@ -1488,10 +1510,12 @@ 8BE7ABC3279E065000DFBCEF /* FusionCDN.storyboard in Resources */, 0339D6D224E91B80008739CD /* QuickSwitchChannelVCItem.xib in Resources */, E7163F8A29651D8900EBBD55 /* AR.scnassets in Resources */, + E7AD0DE129C85FFB00C9A4B0 /* sample.mov in Resources */, E728B84928B5FFCB00674A4A /* PictureInPicture.storyboard in Resources */, 03BEED0D251CAB9C005E78F4 /* audioeffect.mp3 in Resources */, A7CA48C424553CF700507435 /* Popover.storyboard in Resources */, E7A49D42290907E200F06DD4 /* BytedEffect.storyboard in Resources */, + E74788AC29C7FB6900CD7415 /* JoinChannelVideoRecorder.strings in Resources */, E7A49CFC29029E0000F06DD4 /* FUBeautify.strings in Resources */, 03D13BDC2448758B00B599B3 /* LaunchScreen.storyboard in Resources */, E728B85C28B8971200674A4A /* sample.yuv in Resources */, @@ -1674,18 +1698,19 @@ 8B333DA9267B4BC3002A3785 /* SettingsCells.swift in Sources */, E7A49D4829090F8000F06DD4 /* BEFrameProcessor.mm in Sources */, 033A9EFC252D61E200BC26E1 /* CustomVideoRender.swift in Sources */, - 576EA59025AEDD3C000B3D79 /* (null) in Sources */, + 576EA59025AEDD3C000B3D79 /* BuildFile in Sources */, 033A9F09252D61FC00BC26E1 /* RTMPStreaming.swift in Sources */, 6709B23B2806B0EA000BCC58 /* RawAudioData.swift in Sources */, 033A9EEA252D5F5E00BC26E1 /* JoinMultiChannel.swift in Sources */, 0339BE64251DCA3B007D4FDD /* GlobalSettings.swift in Sources */, E728B85A28B86B0700674A4A /* CustomVideoSourcePushMulti.swift in Sources */, E728B84C28B6015800674A4A /* AgoraPictureInPictureController.m in Sources */, - 8407E0942472320800AC5DE8 /* (null) in Sources */, + 8407E0942472320800AC5DE8 /* BuildFile in Sources */, 8B5E5B50274CB68E0040E97D /* RhythmPlayer.swift in Sources */, E7A49D342907E74A00F06DD4 /* BundleUtil.m in Sources */, 036C42B524D2A3C600A59000 /* AgoraMetalRender.swift in Sources */, E72055EA28F943520030E6D1 /* Util.swift in Sources */, + E74788AE29C7FB6900CD7415 /* JoinChannelVideoRecorder.swift in Sources */, E7A49D41290907E200F06DD4 /* BytedEffectVC.m in Sources */, E7A49D142907DC2800F06DD4 /* EffectsProcess.m in Sources */, 03DF1D9324CFC29700DF7151 /* ExternalAudio.mm in Sources */, @@ -1725,6 +1750,7 @@ 03DF1D9024CFC29700DF7151 /* AudioWriteToFile.m in Sources */, 0339BE6D251DEAFC007D4FDD /* PrecallTest.swift in Sources */, 8BC751DA273E57C900552265 /* VideoProcess.swift in Sources */, + E7AD0DE329C95EB500C9A4B0 /* PickerView.swift in Sources */, E728B84828B5FFCB00674A4A /* PictureInPicture.swift in Sources */, 5744CE0925BA99FF0099AB66 /* VideoChat.swift in Sources */, 034C625E2524A06800296ECF /* VoiceChanger.swift in Sources */, @@ -2133,6 +2159,22 @@ name = CustomVideoSourcePushMulti.storyboard; sourceTree = ""; }; + E74788A729C7FB6800CD7415 /* JoinChannelVideoRecorder.strings */ = { + isa = PBXVariantGroup; + children = ( + E74788A829C7FB6800CD7415 /* zh-Hans */, + ); + name = JoinChannelVideoRecorder.strings; + sourceTree = ""; + }; + E74788A929C7FB6800CD7415 /* JoinChannelVideoRecorder.storyboard */ = { + isa = PBXVariantGroup; + children = ( + E74788AA29C7FB6800CD7415 /* Base */, + ); + name = JoinChannelVideoRecorder.storyboard; + sourceTree = ""; + }; E77D54C228F55E9100D51C1E /* JoinChannelVideoToken.strings */ = { isa = PBXVariantGroup; children = ( diff --git a/iOS/APIExample/APIExample/Common/ExternalAudio/ExternalAudio.mm b/iOS/APIExample/APIExample/Common/ExternalAudio/ExternalAudio.mm index cefa82035..dc628b7fb 100644 --- a/iOS/APIExample/APIExample/Common/ExternalAudio/ExternalAudio.mm +++ b/iOS/APIExample/APIExample/Common/ExternalAudio/ExternalAudio.mm @@ -303,7 +303,7 @@ - (void)audioController:(AudioController *)controller didCaptureData:(unsigned c } else { // [self.agoraKit pushExternalAudioFrameNSData:[NSData dataWithBytes:data length:bytesLength] sourceId:1 timestamp:0]; - [self.agoraKit pushExternalAudioFrameRawData: data samples: 441 * 10 sourceId:1 timestamp:0]; + [self.agoraKit pushExternalAudioFrameRawData: data samples: 441 * 10 trackId:1 timestamp:0]; } } diff --git a/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraMetalRender.swift b/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraMetalRender.swift index c5b53057f..b08bb2ac2 100644 --- a/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraMetalRender.swift +++ b/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraMetalRender.swift @@ -106,8 +106,8 @@ func getAgoraRotation(rotation: Int32) -> AgoraVideoRotation? { } extension AgoraMetalRender: AgoraVideoFrameDelegate { - func onCapture(_ videoFrame: AgoraOutputVideoFrame) -> Bool { - return true + func onCapture(_ videoFrame: AgoraOutputVideoFrame, sourceType: AgoraVideoSourceType) -> Bool { + true } func onRenderVideoFrame(_ videoFrame: AgoraOutputVideoFrame, uid: UInt, channelId: String) -> Bool { @@ -155,9 +155,8 @@ extension AgoraMetalRender: AgoraVideoFrameDelegate { return .readOnly } - - func onPreEncode(_ videoFrame: AgoraOutputVideoFrame) -> Bool { - return true + func onPreEncode(_ videoFrame: AgoraOutputVideoFrame, sourceType: AgoraVideoSourceType) -> Bool { + true } func getVideoFormatPreference() -> AgoraVideoFormat { diff --git a/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraPictureInPictureController.h b/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraPictureInPictureController.h index e5b577f29..c8ca14124 100644 --- a/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraPictureInPictureController.h +++ b/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraPictureInPictureController.h @@ -27,6 +27,8 @@ NS_ASSUME_NONNULL_BEGIN @property (nonatomic, strong, readonly) AgoraSampleBufferRender *displayView; - (instancetype)initWithDisplayView:(AgoraSampleBufferRender *)displayView; + +- (void)releasePIP; @end diff --git a/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraPictureInPictureController.m b/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraPictureInPictureController.m index 4c71e493e..0958ba80c 100644 --- a/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraPictureInPictureController.m +++ b/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraPictureInPictureController.m @@ -10,10 +10,6 @@ @interface AgoraPictureInPictureController () -@property (nonatomic, strong) AVPictureInPictureController *pipController; - -@property (nonatomic, strong) AgoraSampleBufferRender *displayView; - @end @implementation AgoraPictureInPictureController @@ -34,6 +30,12 @@ - (instancetype)initWithDisplayView:(AgoraSampleBufferRender *)displayView { return nil; } +- (void)releasePIP { + _pipController.delegate = nil; + _pipController = nil; + [_displayView reset]; + _displayView = nil; +} #pragma mark - diff --git a/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraSampleBufferRender.m b/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraSampleBufferRender.m index a651521f2..a3b0b9b60 100644 --- a/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraSampleBufferRender.m +++ b/iOS/APIExample/APIExample/Common/ExternalVideo/AgoraSampleBufferRender.m @@ -33,6 +33,13 @@ - (instancetype)init { return self; } +- (instancetype)initWithFrame:(CGRect)frame { + if (self = [super initWithFrame:frame]) { + [self.layer addSublayer:self.displayLayer]; + } + return self; +} + - (void)awakeFromNib { [super awakeFromNib]; [self.layer addSublayer:self.displayLayer]; @@ -65,7 +72,7 @@ - (void)layoutDisplayLayer { } CGRect renderRect = CGRectMake(0.5 * (viewWidth - videoSize.width), 0.5 * (viewHeight - videoSize.height), videoSize.width, videoSize.height); - + if (!CGRectEqualToRect(renderRect, self.displayLayer.frame)) { self.displayLayer.frame = renderRect; } @@ -75,6 +82,19 @@ - (void)reset { [self.displayLayer flushAndRemoveImage]; } +- (OSType)getFormatType: (NSInteger)type { + switch (type) { + case 1: + return kCVPixelFormatType_420YpCbCr8Planar; + + case 2: + return kCVPixelFormatType_32BGRA; + + default: + return kCVPixelFormatType_32BGRA; + } +} + - (void)renderVideoData:(AgoraOutputVideoFrame *_Nonnull)videoData { if (!videoData) { return; @@ -86,6 +106,7 @@ - (void)renderVideoData:(AgoraOutputVideoFrame *_Nonnull)videoData { [self layoutDisplayLayer]; }); + size_t width = videoData.width; size_t height = videoData.height; size_t yStride = videoData.yStride; @@ -99,17 +120,23 @@ - (void)renderVideoData:(AgoraOutputVideoFrame *_Nonnull)videoData { @autoreleasepool { CVPixelBufferRef pixelBuffer = NULL; NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}}; - CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_420YpCbCr8Planar, (__bridge CFDictionaryRef)(pixelAttributes), &pixelBuffer); - + OSType type = [self getFormatType:videoData.type]; + CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault, + width, + height, + type, + (__bridge CFDictionaryRef)(pixelAttributes), + &pixelBuffer); + if (result != kCVReturnSuccess) { NSLog(@"Unable to create cvpixelbuffer %d", result); } - + CVPixelBufferLockBaseAddress(pixelBuffer, 0); void *yPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0); int pixelBufferYBytes = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0); - + if (yStride == pixelBufferYBytes) { memcpy(yPlane, yBuffer, yStride*height); }else { @@ -117,7 +144,7 @@ - (void)renderVideoData:(AgoraOutputVideoFrame *_Nonnull)videoData { memcpy(yPlane + pixelBufferYBytes * i, yBuffer + yStride * i, MIN(yStride, pixelBufferYBytes)); } } - + void *uPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1); int pixelBufferUBytes = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1); if (uStride == pixelBufferUBytes) { @@ -127,7 +154,7 @@ - (void)renderVideoData:(AgoraOutputVideoFrame *_Nonnull)videoData { memcpy(uPlane + pixelBufferUBytes * i, uBuffer + uStride * i, MIN(uStride, pixelBufferUBytes)); } } - + void *vPlane = (void *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 2); int pixelBufferVBytes = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 2); if (vStride == pixelBufferVBytes) { @@ -137,9 +164,9 @@ - (void)renderVideoData:(AgoraOutputVideoFrame *_Nonnull)videoData { memcpy(vPlane + pixelBufferVBytes * i, vBuffer + vStride * i, MIN(vStride, pixelBufferVBytes)); } } - + CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); - + CMVideoFormatDescriptionRef videoInfo; CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo); @@ -150,7 +177,7 @@ - (void)renderVideoData:(AgoraOutputVideoFrame *_Nonnull)videoData { CMSampleBufferRef sampleBuffer; CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, pixelBuffer, videoInfo, &timingInfo, &sampleBuffer); - + [self.displayLayer enqueueSampleBuffer:sampleBuffer]; if (self.displayLayer.status == AVQueuedSampleBufferRenderingStatusFailed) { [self.displayLayer flush]; @@ -176,9 +203,11 @@ - (void)renderVideoPixelBuffer:(AgoraOutputVideoFrame *_Nonnull)videoData { @autoreleasepool { CVPixelBufferRef pixelBuffer = videoData.pixelBuffer; - + CMVideoFormatDescriptionRef videoInfo; - CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo); + CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, + pixelBuffer, + &videoInfo); CMSampleTimingInfo timingInfo; timingInfo.duration = kCMTimeZero; @@ -186,9 +215,16 @@ - (void)renderVideoPixelBuffer:(AgoraOutputVideoFrame *_Nonnull)videoData { timingInfo.presentationTimeStamp = CMTimeMake(CACurrentMediaTime()*1000, 1000); CMSampleBufferRef sampleBuffer; - CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, pixelBuffer, videoInfo, &timingInfo, &sampleBuffer); - + CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, + pixelBuffer, + videoInfo, + &timingInfo, + &sampleBuffer); + [self.displayLayer enqueueSampleBuffer:sampleBuffer]; + if (self.displayLayer.status == AVQueuedSampleBufferRenderingStatusFailed) { + [self.displayLayer flush]; + } CMSampleBufferInvalidate(sampleBuffer); CFRelease(sampleBuffer); } diff --git a/iOS/APIExample/APIExample/Common/PickerView.swift b/iOS/APIExample/APIExample/Common/PickerView.swift new file mode 100644 index 000000000..b79ad961a --- /dev/null +++ b/iOS/APIExample/APIExample/Common/PickerView.swift @@ -0,0 +1,124 @@ +// +// PickerView.swift +// APIExample +// +// Created by zhaoyongqiang on 2023/3/21. +// Copyright © 2023 Agora Corp. All rights reserved. +// + +import UIKit + +class PickerView: UIView { + private lazy var cancelButton: UIButton = { + let button = UIButton() + button.setTitle("Cancel".localized, for: .normal) + button.backgroundColor = .blue + button.cornerRadius = 5 + button.setTitleColor(.white, for: .normal) + button.titleLabel?.font = .systemFont(ofSize: 14) + button.addTarget(self, action: #selector(onTapCancelButton), for: .touchUpInside) + return button + }() + private lazy var sureButton: UIButton = { + let button = UIButton() + button.setTitle("Sure".localized, for: .normal) + button.backgroundColor = .blue + button.cornerRadius = 5 + button.setTitleColor(.white, for: .normal) + button.titleLabel?.font = .systemFont(ofSize: 14) + button.addTarget(self, action: #selector(onTapSureButton), for: .touchUpInside) + return button + }() + private lazy var titleLabel: UILabel = { + let label = UILabel() + label.text = "" + label.textColor = .black + label.font = UIFont.boldSystemFont(ofSize: 16) + return label + }() + private lazy var pickerView: UIPickerView = { + let pickerView = UIPickerView() + pickerView.dataSource = self + pickerView.delegate = self + return pickerView + }() + private var selectedValue: String? + + // MARK: Public + var pickerViewSelectedValueClosure: ((String) -> Void)? + var dataArray: [String]? + var rowHeight: CGFloat = 40 + + override init(frame: CGRect) { + super.init(frame: frame) + setupUI() + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + private func setupUI() { + backgroundColor = .white + addSubview(cancelButton) + addSubview(titleLabel) + addSubview(sureButton) + addSubview(pickerView) + cancelButton.translatesAutoresizingMaskIntoConstraints = false + titleLabel.translatesAutoresizingMaskIntoConstraints = false + sureButton.translatesAutoresizingMaskIntoConstraints = false + pickerView.translatesAutoresizingMaskIntoConstraints = false + + widthAnchor.constraint(equalToConstant: UIScreen.main.bounds.width).isActive = true + + cancelButton.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 10).isActive = true + cancelButton.topAnchor.constraint(equalTo: topAnchor, constant: 5).isActive = true + cancelButton.widthAnchor.constraint(equalToConstant: 50).isActive = true + cancelButton.heightAnchor.constraint(equalToConstant: 30).isActive = true + + titleLabel.centerXAnchor.constraint(equalTo: centerXAnchor).isActive = true + titleLabel.centerYAnchor.constraint(equalTo: cancelButton.centerYAnchor).isActive = true + + sureButton.centerYAnchor.constraint(equalTo: cancelButton.centerYAnchor).isActive = true + sureButton.trailingAnchor.constraint(equalTo: trailingAnchor, constant: -10).isActive = true + sureButton.widthAnchor.constraint(equalToConstant: 50).isActive = true + sureButton.heightAnchor.constraint(equalToConstant: 30).isActive = true + + pickerView.leadingAnchor.constraint(equalTo: leadingAnchor).isActive = true + pickerView.bottomAnchor.constraint(equalTo: bottomAnchor).isActive = true + pickerView.trailingAnchor.constraint(equalTo: trailingAnchor).isActive = true + pickerView.topAnchor.constraint(equalTo: cancelButton.bottomAnchor, constant: 5).isActive = true + pickerView.heightAnchor.constraint(equalToConstant: 160).isActive = true + } + + @objc + private func onTapCancelButton() { + AlertManager.hiddenView() + } + @objc + private func onTapSureButton() { + pickerViewSelectedValueClosure?(selectedValue ?? "") + AlertManager.hiddenView() + } +} + +extension PickerView: UIPickerViewDelegate, UIPickerViewDataSource { + func pickerView(_ pickerView: UIPickerView, rowHeightForComponent component: Int) -> CGFloat { + rowHeight + } + + func pickerView(_ pickerView: UIPickerView, didSelectRow row: Int, inComponent component: Int) { + selectedValue = dataArray?[row] + } + + func pickerView(_ pickerView: UIPickerView, titleForRow row: Int, forComponent component: Int) -> String? { + dataArray?[row] ?? "" + } + + func numberOfComponents(in pickerView: UIPickerView) -> Int { + 1 + } + func pickerView(_ pickerView: UIPickerView, numberOfRowsInComponent component: Int) -> Int { + dataArray?.count ?? 0 + } +} diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ARKit/ARKit.swift b/iOS/APIExample/APIExample/Examples/Advanced/ARKit/ARKit.swift index dd8212bf5..778814d5a 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ARKit/ARKit.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/ARKit/ARKit.swift @@ -103,7 +103,7 @@ class ARKitMain: BaseViewController { // make myself a broadcaster agoraKit.setChannelProfile(.liveBroadcasting) - agoraKit.setClientRole(.broadcaster) + agoraKit.setClientRole(GlobalSettings.shared.getUserRole()) // set AR video source as custom video source renderer = ARVideoRenderer() diff --git a/iOS/APIExample/APIExample/Examples/Advanced/CreateDataStream/CreateDataStream.swift b/iOS/APIExample/APIExample/Examples/Advanced/CreateDataStream/CreateDataStream.swift index 97b978286..da2681c5b 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/CreateDataStream/CreateDataStream.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/CreateDataStream/CreateDataStream.swift @@ -108,8 +108,8 @@ class CreateDataStreamMain: BaseViewController { // when joining channel. The channel name and uid used to calculate // the token has to match the ones used for channel join let option = AgoraRtcChannelMediaOptions() - option.publishCameraTrack = true - option.publishMicrophoneTrack = true + option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster + option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster option.clientRoleType = GlobalSettings.shared.getUserRole() NetworkManager.shared.generateToken(channelName: channelName, success: { token in diff --git a/iOS/APIExample/APIExample/Examples/Advanced/CustomPcmAudioSource/CustomPcmAudioSource.swift b/iOS/APIExample/APIExample/Examples/Advanced/CustomPcmAudioSource/CustomPcmAudioSource.swift index c030bbf27..c7a67743e 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/CustomPcmAudioSource/CustomPcmAudioSource.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/CustomPcmAudioSource/CustomPcmAudioSource.swift @@ -42,6 +42,7 @@ class CustomPcmAudioSourceMain: BaseViewController { var audioViews: [UInt:VideoView] = [:] @IBOutlet weak var playAudioView: UIView! @IBOutlet weak var pushPcmSwitch: UISwitch! + private var trackId: Int32 = 0 // indicate if current instance has joined channel var isJoined: Bool = false { @@ -82,7 +83,10 @@ class CustomPcmAudioSourceMain: BaseViewController { // setup external audio source pcmSourcePush = AgoraPcmSourcePush(delegate: self, filePath: filepath, sampleRate: Int(sampleRate), channelsPerFrame: Int(channel), bitPerSample: bitPerSample, samples: samples) - agoraKit.setExternalAudioSource(true, sampleRate: Int(sampleRate), channels: Int(channel), sourceNumber: 2, localPlayback: true, publish: true) + + let trackConfig = AgoraAudioTrackConfig() + trackConfig.enableLocalPlayback = true + trackId = agoraKit.createCustomAudioTrack(.mixable, config: trackConfig) agoraKit.enableCustomAudioLocalPlayback(1, enabled: true) // start joining channel // 1. Users can only see each other after they join the @@ -92,8 +96,9 @@ class CustomPcmAudioSourceMain: BaseViewController { // the token has to match the ones used for channel join let option = AgoraRtcChannelMediaOptions() option.publishCameraTrack = false - option.publishMicrophoneTrack = true - option.publishCustomAudioTrack = true + option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster + option.publishCustomAudioTrack = GlobalSettings.shared.getUserRole() == .broadcaster + option.publishCustomAudioTrackId = Int(trackId) option.clientRoleType = GlobalSettings.shared.getUserRole() NetworkManager.shared.generateToken(channelName: channelName, success: { token in let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option) @@ -112,6 +117,7 @@ class CustomPcmAudioSourceMain: BaseViewController { // leave channel when exiting the view pcmSourcePush?.stop() if isJoined { + agoraKit.destroyCustomAudioTrack(Int(trackId)) agoraKit.disableAudio() pcmSourcePush?.stop() agoraKit.leaveChannel { (stats) -> Void in @@ -128,12 +134,20 @@ class CustomPcmAudioSourceMain: BaseViewController { } else { pcmSourcePush?.stop() } + let mediaOption = AgoraRtcChannelMediaOptions() + mediaOption.publishCustomAudioTrack = sender.isOn + agoraKit.updateChannel(with: mediaOption) } } extension CustomPcmAudioSourceMain: AgoraPcmSourcePushDelegate { func onAudioFrame(data: UnsafeMutablePointer) { - agoraKit.pushExternalAudioFrameRawData(data, samples: samples, sourceId: 0, timestamp: 0) + agoraKit.pushExternalAudioFrameRawData(data, + samples: samples, + sampleRate: Int(sampleRate), + channels: Int(channel), + trackId: Int(trackId), + timestamp: 0) } } diff --git a/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoRender/CustomVideoRender.swift b/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoRender/CustomVideoRender.swift index 9b9ccd108..e68f475a4 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoRender/CustomVideoRender.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoRender/CustomVideoRender.swift @@ -104,8 +104,8 @@ class CustomVideoRenderMain: BaseViewController { // when joining channel. The channel name and uid used to calculate // the token has to match the ones used for channel join let option = AgoraRtcChannelMediaOptions() - option.publishCameraTrack = true - option.publishMicrophoneTrack = true + option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster + option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster option.clientRoleType = GlobalSettings.shared.getUserRole() NetworkManager.shared.generateToken(channelName: channelName, success: { token in diff --git a/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePush/CustomVideoSourcePush.swift b/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePush/CustomVideoSourcePush.swift index 3b207fad8..05f3f23eb 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePush/CustomVideoSourcePush.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePush/CustomVideoSourcePush.swift @@ -117,8 +117,8 @@ class CustomVideoSourcePushMain: BaseViewController { // when joining channel. The channel name and uid used to calculate // the token has to match the ones used for channel join let option = AgoraRtcChannelMediaOptions() - option.publishCustomAudioTrack = false - option.publishCustomVideoTrack = true + option.publishCustomAudioTrack = GlobalSettings.shared.getUserRole() == .broadcaster + option.publishCustomVideoTrack = GlobalSettings.shared.getUserRole() == .broadcaster option.clientRoleType = GlobalSettings.shared.getUserRole() NetworkManager.shared.generateToken(channelName: channelName, success: { token in let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option) diff --git a/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/CustomVideoSourcePushMulti.swift b/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/CustomVideoSourcePushMulti.swift index e51860c29..0f721663f 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/CustomVideoSourcePushMulti.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/CustomVideoSourcePushMulti.swift @@ -185,7 +185,7 @@ class CustomVideoSourcePushMultiMain: BaseViewController { } }) let connection = AgoraRtcConnection() - connection.localUid = 0 + connection.localUid = 999 connection.channelId = channelName agoraKit.leaveChannelEx(connection) { state in LogUtils.log(message: "warning: \(state.description)", level: .info) diff --git a/iOS/APIExample/APIExample/Examples/Advanced/FusionCDN/FusionCDN.swift b/iOS/APIExample/APIExample/Examples/Advanced/FusionCDN/FusionCDN.swift index d627d1f50..9cef9ac56 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/FusionCDN/FusionCDN.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/FusionCDN/FusionCDN.swift @@ -124,7 +124,7 @@ class FusionCDNHost: BaseViewController { Util.configPrivatization(agoraKit: agoraKit) agoraKit.setLogFile(LogUtils.sdkLogPath()) // make myself a broadcaster - agoraKit.setClientRole(.broadcaster) + agoraKit.setClientRole(GlobalSettings.shared.getUserRole()) // enable video module and set up video encoding configs agoraKit.enableVideo() @@ -197,8 +197,8 @@ class FusionCDNHost: BaseViewController { agoraKit.setDirectCdnStreamingVideoConfiguration(videoConfig) agoraKit.setDirectCdnStreamingAudioConfiguration(.default) let options = AgoraDirectCdnStreamingMediaOptions() - options.publishCameraTrack = true - options.publishMicrophoneTrack = true + options.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster + options.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster let ret = agoraKit.startDirectCdnStreaming(self, publishUrl: streamingUrl, mediaOptions: options) if ret == 0 { streamingButton.setTitle("Streaming", for: .normal) @@ -214,8 +214,8 @@ class FusionCDNHost: BaseViewController { private func switchToRtcStreaming() { guard let channelName = configs["channelName"] as? String else {return} let options = AgoraRtcChannelMediaOptions() - options.publishCameraTrack = true - options.publishMicrophoneTrack = true + options.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster + options.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster options.clientRoleType = .broadcaster NetworkManager.shared.generateToken(channelName: channelName, success: { token in let result = self.agoraKit.joinChannel(byToken: token, diff --git a/iOS/APIExample/APIExample/Examples/Advanced/LiveStreaming/LiveStreaming.swift b/iOS/APIExample/APIExample/Examples/Advanced/LiveStreaming/LiveStreaming.swift index 6fefe4933..f1b668294 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/LiveStreaming/LiveStreaming.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/LiveStreaming/LiveStreaming.swift @@ -168,9 +168,9 @@ class LiveStreamingMain: BaseViewController { // when joining channel. The channel name and uid used to calculate // the token has to match the ones used for channel join let option = AgoraRtcChannelMediaOptions() - option.publishCameraTrack = true - option.publishMicrophoneTrack = true - option.clientRoleType = GlobalSettings.shared.getUserRole() + option.publishCameraTrack = role == .broadcaster + option.publishMicrophoneTrack = role == .broadcaster + option.clientRoleType = role NetworkManager.shared.generateToken(channelName: channelName, success: { token in let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option) if result != 0 { diff --git a/iOS/APIExample/APIExample/Examples/Advanced/MediaChannelRelay/MediaChannelRelay.swift b/iOS/APIExample/APIExample/Examples/Advanced/MediaChannelRelay/MediaChannelRelay.swift index a7b81bdb4..a8c356b74 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/MediaChannelRelay/MediaChannelRelay.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/MediaChannelRelay/MediaChannelRelay.swift @@ -115,8 +115,8 @@ class MediaChannelRelayMain: BaseViewController { // when joining channel. The channel name and uid used to calculate // the token has to match the ones used for channel join let option = AgoraRtcChannelMediaOptions() - option.publishCameraTrack = true - option.publishMicrophoneTrack = true + option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster + option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster option.clientRoleType = GlobalSettings.shared.getUserRole() NetworkManager.shared.generateToken(channelName: channelName, success: { token in let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option) diff --git a/iOS/APIExample/APIExample/Examples/Advanced/MediaPlayer/Base.lproj/MediaPlayer.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/MediaPlayer/Base.lproj/MediaPlayer.storyboard index c091d35d0..f6f71c1c1 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/MediaPlayer/Base.lproj/MediaPlayer.storyboard +++ b/iOS/APIExample/APIExample/Examples/Advanced/MediaPlayer/Base.lproj/MediaPlayer.storyboard @@ -1,9 +1,9 @@ - + - + @@ -18,7 +18,7 @@ - + @@ -70,19 +70,19 @@ - + - - + + - + - + @@ -221,6 +247,7 @@ + diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ScreenShare/ScreenShare.swift b/iOS/APIExample/APIExample/Examples/Advanced/ScreenShare/ScreenShare.swift index 7ed59d5e7..f8afc9fa8 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ScreenShare/ScreenShare.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/ScreenShare/ScreenShare.swift @@ -40,7 +40,14 @@ class ScreenShareMain: BaseViewController { @IBOutlet weak var container: AGEVideoContainer! @IBOutlet weak var broadcasterPickerContainer: UIView! + @IBOutlet weak var fpsButton: UIButton! + var agoraKit: AgoraRtcEngineKit! + private lazy var pickerView: PickerView = { + let pickerView = PickerView() + pickerView.dataArray = fpsDataSources.map({ "\($0)" }) + return pickerView + }() private lazy var screenParams: AgoraScreenCaptureParameters2 = { let params = AgoraScreenCaptureParameters2() params.captureVideo = true @@ -50,7 +57,7 @@ class ScreenShareMain: BaseViewController { params.audioParams = audioParams let videoParams = AgoraScreenVideoParameters() videoParams.dimensions = screenShareVideoDimension() - videoParams.frameRate = .fps30 + videoParams.frameRate = .fps15 videoParams.bitrate = AgoraVideoBitrateStandard params.videoParams = videoParams return params @@ -65,6 +72,7 @@ class ScreenShareMain: BaseViewController { }() private var systemBroadcastPicker: RPSystemBroadcastPickerView? + private var fpsDataSources: [Int] = [15, 30, 60] // indicate if current instance has joined channel var isJoined: Bool = false @@ -178,6 +186,14 @@ class ScreenShareMain: BaseViewController { @IBAction func captureSignalVolumeSlider(_ sender: UISlider) { screenParams.audioParams.captureSignalVolume = Int(sender.value * 100) } + @IBAction func clickFpsButton(_ sender: UIButton) { + pickerView.pickerViewSelectedValueClosure = { [weak self] value in + guard let self = self else { return } + self.fpsButton.setTitle("\(value)fps", for: .normal) + self.screenParams.videoParams.frameRate = AgoraVideoFrameRate(rawValue: Int(value) ?? 15) ?? .fps15 + } + AlertManager.show(view: pickerView, alertPostion: .bottom) + } func isScreenShareUid(uid: UInt) -> Bool { return uid >= SCREEN_SHARE_UID_MIN && uid <= SCREEN_SHARE_UID_MAX @@ -322,3 +338,41 @@ extension ScreenShareMain: AgoraRtcEngineDelegate { remoteVideo.statsInfo?.updateAudioStats(stats) } } + + +extension ScreenShareMain: UIPickerViewDataSource, UIPickerViewDelegate { + func pickerView(_ pickerView: UIPickerView, rowHeightForComponent component: Int) -> CGFloat { + return 60.0 + } + + func pickerView(_ pickerView: UIPickerView, didSelectRow row: Int, inComponent component: Int) { + print("我选择了第"+"\(row)"+"行") + } + + func pickerView(_ pickerView: UIPickerView, titleForRow row: Int, forComponent component: Int) -> String? { + "\(fpsDataSources[row])fps" + } + +// // TODO: 可以设置哪一行显示特定的样式 +// func pickerView(_ pickerView: UIPickerView, viewForRow row: Int, forComponent component: Int, reusing view: UIView?) -> UIView { +// // 创建一个对象 +// let specificView = UIView.init() +// specificView.frame = CGRect.init(x: 10, y: 5, width: 100, height: 60) +// specificView.backgroundColor = UIColor.magenta +// /** +// 创建一个标题 +// */ +// let specificLable = UILabel.init(frame: CGRect.init(x: 5, y: 0, width: 90, height: 60)) +// specificLable.text = (SourceData[row] as! String) +// specificLable.textColor = UIColor.white +// specificView.addSubview(specificLable) +// return specificView +// } + + func numberOfComponents(in pickerView: UIPickerView) -> Int { + return 1 + } + func pickerView(_ pickerView: UIPickerView, numberOfRowsInComponent component: Int) -> Int { + fpsDataSources.count + } +} diff --git a/iOS/APIExample/APIExample/Examples/Advanced/SimpleFilter/SimpleFilter.swift b/iOS/APIExample/APIExample/Examples/Advanced/SimpleFilter/SimpleFilter.swift index 4e748b26c..273e3c118 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/SimpleFilter/SimpleFilter.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/SimpleFilter/SimpleFilter.swift @@ -118,8 +118,8 @@ class SimpleFilterMain: BaseViewController { // when joining channel. The channel name and uid used to calculate // the token has to match the ones used for channel join let option = AgoraRtcChannelMediaOptions() - option.publishCameraTrack = true - option.publishMicrophoneTrack = true + option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster + option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster option.clientRoleType = GlobalSettings.shared.getUserRole() NetworkManager.shared.generateToken(channelName: channelName, success: { token in let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option) diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Base.lproj/BytedEffect.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Base.lproj/BytedEffect.storyboard index df5c4fad8..03cb0cb14 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Base.lproj/BytedEffect.storyboard +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Base.lproj/BytedEffect.storyboard @@ -1,9 +1,9 @@ - + - + @@ -18,28 +18,14 @@ - - - + + - + - - + + - + - - + + + + + + + + + + + + + + + + + + - + + + + + + + + + - + + - - - - - - - + + + + + + + + + + + + + + + - + @@ -94,10 +155,29 @@ + + + + + + + + + + + + + + + + + + + diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/BytedEffectVC.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/BytedEffectVC.m index f17d1c9e4..10bf38b0f 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/BytedEffectVC.m +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/BytedEffectVC.m @@ -15,7 +15,7 @@ @interface BytedEffectVC () @property (weak, nonatomic) IBOutlet UILabel *tipsLabel; -@property (weak, nonatomic) IBOutlet UIStackView *container; +@property (weak, nonatomic) IBOutlet UIView *container; @property (weak, nonatomic) IBOutlet UIView *localVideo; @property (weak, nonatomic) IBOutlet UIView *remoteVideo; @@ -44,6 +44,9 @@ - (void) initSDK { self.rtcEngineKit = [AgoraRtcEngineKit sharedEngineWithAppId:KeyCenter.AppId delegate:self]; + AgoraVideoEncoderConfiguration *encoderConfig = [[AgoraVideoEncoderConfiguration alloc] initWithSize:CGSizeMake(375, 667) frameRate:(AgoraVideoFrameRateFps15) bitrate:15 orientationMode:(AgoraVideoOutputOrientationModeFixedPortrait) mirrorMode:(AgoraVideoMirrorModeEnabled)]; + [self.rtcEngineKit setVideoEncoderConfiguration:encoderConfig]; + // setup videoFrameDelegate [self.rtcEngineKit setVideoFrameDelegate:self]; @@ -81,6 +84,26 @@ - (void) initSDK { }]; } +- (IBAction)onTapSwitchCameraButton:(id)sender { + [self.rtcEngineKit switchCamera]; +} +- (IBAction)onTapBeautyButton:(UIButton *)sender { + [sender setSelected:!sender.isSelected]; + [self.videoFilter setBuauty:sender.isSelected]; +} +- (IBAction)onTapMakeupButton:(UIButton *)sender { + [sender setSelected:!sender.isSelected]; + [self.videoFilter setMakeup:sender.isSelected]; +} +- (IBAction)onTapStickerButton:(UIButton *)sender { + [sender setSelected:!sender.isSelected]; + [self.videoFilter setSticker:sender.isSelected]; +} +- (IBAction)onTapFilterButton:(UIButton *)sender { + [sender setSelected:!sender.isSelected]; + [self.videoFilter setFilter:sender.isSelected]; +} + #pragma mark - VideoFrameDelegate - (BOOL)onCaptureVideoFrame:(AgoraOutputVideoFrame *)videoFrame { CVPixelBufferRef pixelBuffer = videoFrame.pixelBuffer; diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.h index aff951034..51b548b2a 100755 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.h +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.h @@ -21,6 +21,11 @@ @property (nonatomic, assign) BOOL enabled; - + (ByteDanceFilter *)shareManager; + +- (void)setBuauty: (BOOL)isSelected; +- (void)setMakeup: (BOOL)isSelected; +- (void)setSticker: (BOOL)isSelected; +- (void)setFilter: (BOOL)isSelected; + @end diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.m index 08c3bcfe7..328190ef3 100755 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.m +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.m @@ -39,19 +39,53 @@ - (instancetype)init [_processor setEffectOn:YES]; // [_processor setFilterPath:@"Filter_32_Po10"]; - [_processor setStickerPath:@"test_sticker"]; +// [_processor setStickerPath:@"test_sticker"]; [_processor updateComposerNodes:@[@"/beauty_IOS_lite"]]; - [_processor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"smooth" intensity:0.8]; - [_processor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"whiten" intensity:0.9]; - [_processor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"sharp" intensity:0.96]; - [_processor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Overall" intensity:0.95]; - [_processor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Eye" intensity:0.95]; - [_processor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_MovNose" intensity:0.0]; } return self; } +- (void)setBuauty: (BOOL)isSelected { +#if __has_include("bef_effect_ai_api.h") + if (isSelected) { + [_processor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"whiten" intensity:0.6]; + [_processor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"smooth" intensity:0.6]; + } else { + [_processor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"whiten" intensity:0]; + [_processor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"smooth" intensity:0]; + } +#endif +} +- (void)setMakeup: (BOOL)isSelected { +#if __has_include("bef_effect_ai_api.h") + if (isSelected) { + [_processor updateComposerNodeIntensity:@"/style_makeup/tianmei" key:@"Makeup_ALL" intensity:0.6]; + } else { + [_processor updateComposerNodeIntensity:@"/style_makeup/tianmei" key:@"Makeup_ALL" intensity:0]; + } +#endif +} +- (void)setSticker: (BOOL)isSelected { +#if __has_include("bef_effect_ai_api.h") + if (isSelected) { + [_processor setStickerPath:@"wochaotian"]; + } else { + [_processor setStickerPath:@""]; + } +#endif +} +- (void)setFilter: (BOOL)isSelected { +#if __has_include("bef_effect_ai_api.h") + if (isSelected) { + [_processor setFilterPath:@"Filter_02_14"]; + [_processor setFilterIntensity:0.4]; + } else { + [_processor setFilterIntensity:0]; + } +#endif +} + #pragma mark - VideoFilterDelegate /// process your video frame here diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/Core.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/Core.h index 906dcb32b..069aafd07 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/Core.h +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/Core.h @@ -1,6 +1,6 @@ #import "macro.h" -#define LICENSE_NAME ((const char *)"labcv_test_20220210_20230210_com.bytedance.labcv.demo_4.2.1.licbag") +#define LICENSE_NAME ((const char *)"agora_test_20220805_20230208_io.agora.entfull_4.2.3.licbag") #define ONLINE_LICENSE_KEY ((const char *)"jiaoyang_test") #define ONLINE_LICENSE_SECRET ((const char *)"04273924-9a77-11eb-94da-0c42a1b32a30") diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/zh-Hans.lproj/BytedEffect.strings b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/zh-Hans.lproj/BytedEffect.strings index 46475970f..fae52c988 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/zh-Hans.lproj/BytedEffect.strings +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/zh-Hans.lproj/BytedEffect.strings @@ -23,8 +23,14 @@ "j6s-Pm-fSS.text" = "您还没有开启字节美颜功能, 请按如下步骤操作: 1: 在ByteBeautify->Manager->Core.h中填写ONLINE_LICENSE_KEY和ONLINE_LICENSE_SECRET 2: 打开Podfile中 pod 'bytedEffect' 注释 -3: 在iOS->APIExample->ByteEffectLib目录下添加BytedEffectSDK文件夹 -4: 在iOS->APIExample->ByteEffectLib目录下添加Resource文件夹 -5: 在iOS->APIExample->ByteEffectLib目录下添加libeffect-sdk.a库 -6: 执行pod install -7: 重新运行项目查看效果"; +3: 在iOS->APIExample目录下创建ByteEffectLib文件夹 +4: 在iOS->APIExample->ByteEffectLib目录下添加BytedEffectSDK文件夹 +5: 在iOS->APIExample->ByteEffectLib目录下添加Resource文件夹 +6: 在iOS->APIExample->ByteEffectLib目录下添加libeffect-sdk.a库 +7: 执行pod install +8: 重新运行项目查看效果"; + +"8ag-bw-I0V.normalTitle" = "美颜"; +"dow-FW-rpo.normalTitle" = "美妆"; +"dSm-Zl-ccL.normalTitle" = "贴纸"; +"qKk-jv-oyk.normalTitle" = "滤镜"; diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Base.lproj/FUBeautify.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Base.lproj/FUBeautify.storyboard index f8dff7522..c942c1207 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Base.lproj/FUBeautify.storyboard +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Base.lproj/FUBeautify.storyboard @@ -1,9 +1,9 @@ - + - + @@ -18,27 +18,14 @@ - - - + + - + - - + + - + - - + + + + + + + + + + + + + + + + + + - + + + + + + + + + - + + - - - - - - - + + + + + + + + + + + + + + + - + @@ -93,10 +155,29 @@ + + + + + + + + + + + + + + + + + + + diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/FUBeautifyVC.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/FUBeautifyVC.m index c9fa498da..983652f75 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/FUBeautifyVC.m +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/FUBeautifyVC.m @@ -14,7 +14,7 @@ @interface FUBeautifyVC () @property (weak, nonatomic) IBOutlet UILabel *tipsLabel; -@property (weak, nonatomic) IBOutlet UIStackView *container; +@property (weak, nonatomic) IBOutlet UIView *container; @property (weak, nonatomic) IBOutlet UIView *localVideo; @property (weak, nonatomic) IBOutlet UIView *remoteVideo; @@ -52,9 +52,6 @@ - (void) initSDK { // add FaceUnity filter and add to process manager self.videoFilter = [FUManager shareManager]; - // add Sticker - [self.videoFilter setSticker:@"fashi"]; - // set up local video to render your local camera preview AgoraRtcVideoCanvas *videoCanvas = [AgoraRtcVideoCanvas new]; videoCanvas.uid = 0; @@ -80,6 +77,26 @@ - (void) initSDK { }]; }]; } +- (IBAction)onTapCameraSwitch:(id)sender { + [self.rtcEngineKit switchCamera]; +} + +- (IBAction)onTapBeautyButton:(UIButton *)sender { + [sender setSelected:!sender.isSelected]; + [self.videoFilter setBuauty:sender.isSelected]; +} +- (IBAction)onTapMakeupButton:(UIButton *)sender { + [sender setSelected:!sender.isSelected]; + [self.videoFilter setMakeup:sender.isSelected]; +} +- (IBAction)onTapStickerButton:(UIButton *)sender { + [sender setSelected:!sender.isSelected]; + [self.videoFilter setSticker:sender.isSelected]; +} +- (IBAction)onTapFilterButton:(UIButton *)sender { + [sender setSelected:!sender.isSelected]; + [self.videoFilter setFilter:sender.isSelected]; +} #pragma mark - VideoFrameDelegate - (BOOL)onCaptureVideoFrame:(AgoraOutputVideoFrame *)videoFrame { @@ -96,7 +113,7 @@ - (AgoraVideoFrameProcessMode)getVideoFrameProcessMode{ } - (BOOL)getMirrorApplied{ - return NO; + return YES; } - (BOOL)getRotationApplied { diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Manager/FUManager.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Manager/FUManager.h index f458e23c5..0f6908032 100755 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Manager/FUManager.h +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Manager/FUManager.h @@ -39,7 +39,9 @@ /// 更新美颜磨皮效果(根据人脸检测置信度设置不同磨皮效果) - (void)updateBeautyBlurEffect; -/// 设置贴纸 -- (void)setSticker: (NSString *)stickerName; +- (void)setBuauty: (BOOL)isSelected; +- (void)setMakeup: (BOOL)isSelected; +- (void)setSticker: (BOOL)isSelected; +- (void)setFilter: (BOOL)isSelected; @end diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Manager/FUManager.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Manager/FUManager.m index 782c62d25..827abb55f 100755 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Manager/FUManager.m +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Manager/FUManager.m @@ -66,18 +66,6 @@ - (instancetype)init NSString *bodyAIPath = [bundle pathForResource:@"model/ai_human_processor" ofType:@"bundle"];//[[NSBundle mainBundle] pathForResource:@"ai_human_processor" ofType:@"bundle"]; [FUAIKit loadAIModeWithAIType:FUAITYPE_HUMAN_PROCESSOR dataPath:bodyAIPath]; - // 加载默认美颜效果 - NSString *beautyPath = [bundle pathForResource:@"graphics/face_beautification" ofType:@"bundle"];//[[NSBundle mainBundle] pathForResource:@"face_beautification" ofType:@"bundle"]; - FUBeauty *beauty = [[FUBeauty alloc] initWithPath:beautyPath name:@"FUBeauty"]; - // 默认均匀磨皮 - beauty.heavyBlur = 0; - beauty.blurType = 3; - // 默认自定义脸型 - beauty.faceShape = 4; - beauty.colorLevel = 0.8; - beauty.redLevel = 0.8; - [FURenderKit shareRenderKit].beauty = beauty; - CFAbsoluteTime endTime = (CFAbsoluteTimeGetCurrent() - startTime); NSString *path = [bundle pathForResource:@"graphics/tongue" ofType:@"bundle"];//[[NSBundle mainBundle] pathForResource:@"tongue" ofType:@"bundle"]; [FUAIKit loadTongueMode:path]; @@ -108,13 +96,75 @@ - (void)destoryItems { #endif } +- (void)setBuauty: (BOOL)isSelected { +#if __has_include() + if (isSelected) { + NSBundle *bundle = [BundleUtil bundleWithBundleName:@"FURenderKit" podName:@"fuLib"]; + NSString *beautyPath = [bundle pathForResource:@"graphics/face_beautification" ofType:@"bundle"]; + FUBeauty *beauty = [[FUBeauty alloc] initWithPath:beautyPath name:@"FUBeauty"]; + // 默认均匀磨皮 + beauty.heavyBlur = 0; + beauty.blurType = 3; + [FURenderKit shareRenderKit].beauty = beauty; + } else { + [FURenderKit shareRenderKit].beauty = nil; + } +#endif +} +- (void)setMakeup: (BOOL)isSelected { +#if __has_include() + if (isSelected) { + NSBundle *bundle = [BundleUtil bundleWithBundleName:@"FURenderKit" podName:@"fuLib"]; + NSString *beautyPath = [bundle pathForResource:@"graphics/face_makeup" ofType:@"bundle"]; + FUMakeup *makeup = [[FUMakeup alloc] initWithPath:beautyPath name:@"face_makeup"]; + makeup.isMakeupOn = YES; + [FURenderKit setLogLevel:FU_LOG_LEVEL_DEBUG]; + + [FURenderKit shareRenderKit].makeup = makeup; + [FURenderKit shareRenderKit].makeup.enable = isSelected; + + NSString *makeupPath = [bundle pathForResource:@"美妆/ziyun" ofType:@"bundle"]; + FUItem *makeupItem = [[FUItem alloc] initWithPath:makeupPath name:@"ziyun"]; + [makeup updateMakeupPackage:makeupItem needCleanSubItem:NO]; + makeup.intensity = 0.9; + } else { + [FURenderKit shareRenderKit].makeup.enable = NO; + [FURenderKit shareRenderKit].makeup = nil; + } +#endif +} +- (void)setSticker: (BOOL)isSelected { +#if __has_include() + if (isSelected) { + [self setStickerPath:@"DaisyPig"]; + } else { + [[FURenderKit shareRenderKit].stickerContainer removeAllSticks]; + } +#endif +} +- (void)setFilter: (BOOL)isSelected { +#if __has_include() + if (isSelected) { + NSBundle *bundle = [BundleUtil bundleWithBundleName:@"FURenderKit" podName:@"fuLib"]; + NSString *beautyPath = [bundle pathForResource:@"graphics/face_beautification" ofType:@"bundle"]; + FUBeauty *beauty = [[FUBeauty alloc] initWithPath:beautyPath name:@"FUBeauty"]; + beauty.filterName = FUFilterMiTao1; + beauty.filterLevel = 0.8; + [FURenderKit shareRenderKit].beauty = beauty; + } else { + [FURenderKit shareRenderKit].beauty = nil; + } +#endif +} + + - (void)onCameraChange { #if __has_include() [FUAIKit resetTrackedResult]; #endif } -- (void)setSticker: (NSString *)stickerName { +- (void)setStickerPath: (NSString *)stickerName { NSBundle *bundle = [BundleUtil bundleWithBundleName:@"FURenderKit" podName:@"fuLib"]; NSString *path = [bundle pathForResource:[NSString stringWithFormat:@"贴纸/%@", stickerName] ofType:@"bundle"]; if (!path) { diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/zh-Hans.lproj/FUBeautify.strings b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/zh-Hans.lproj/FUBeautify.strings index f07cd2e05..242588805 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/zh-Hans.lproj/FUBeautify.strings +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/zh-Hans.lproj/FUBeautify.strings @@ -23,8 +23,14 @@ "j6s-Pm-fSS.text" = "您还没有开启相芯美颜功能, 请按如下步骤操作: 1: 在FUBeautify->Manager->authpack中替换license 2: 打开Podfile中 pod 'fuLib' 注释 -3: 在iOS->APIExample->FULib目录下添加FURenderKit.framework -4: 在iOS->APIExample->FULib目录下添加Resources资源文件夹 -5: 在iOS->APIExample->FULib目录下添加Resource->贴纸文件夹 -6: 执行pod install -7: 重新运行项目查看效果"; +3: 在iOS->APIExample目录下创建FULib文件夹 +4: 在iOS->APIExample->FULib目录下添加FURenderKit.framework +5: 在iOS->APIExample->FULib目录下添加Resources资源文件夹 +6: 在iOS->APIExample->FULib目录下添加Resource->贴纸文件夹 +7: 执行pod install +8: 重新运行项目查看效果"; + +"QZu-iN-Fi6.normalTitle" = "美颜"; +"KHn-B1-epr.normalTitle" = "美妆"; +"aoR-43-iFs.normalTitle" = "贴纸"; +"UYi-3l-nYz.normalTitle" = "滤镜"; diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Base.lproj/SenseBeautify.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Base.lproj/SenseBeautify.storyboard index 943b2bf40..61348e6c8 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Base.lproj/SenseBeautify.storyboard +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Base.lproj/SenseBeautify.storyboard @@ -1,9 +1,9 @@ - + - + @@ -18,27 +18,14 @@ - - - + + - + - - + + - + - - + + + + + + + + + + + + + + + + + + - + + + + + + + + + - + + + - + + - - + + + + + + + + - + @@ -93,10 +154,29 @@ + + + + + + + + + + + + + + + + + + + diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Manager/VideoProcessingManager.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Manager/VideoProcessingManager.h index ab898f7a9..c757c71b9 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Manager/VideoProcessingManager.h +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Manager/VideoProcessingManager.h @@ -13,6 +13,11 @@ NS_ASSUME_NONNULL_BEGIN @interface VideoProcessingManager : NSObject +- (void)setBuauty: (BOOL)isSelected; +- (void)setMakeup: (BOOL)isSelected; +- (void)setSticker: (BOOL)isSelected; +- (void)setFilter: (BOOL)isSelected; + - (CVPixelBufferRef)videoProcessHandler:(CVPixelBufferRef)pixelBuffer; @end diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Manager/VideoProcessingManager.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Manager/VideoProcessingManager.m index 337949054..26ee21b79 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Manager/VideoProcessingManager.m +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/Manager/VideoProcessingManager.m @@ -26,6 +26,7 @@ @interface VideoProcessingManager () @property (nonatomic) dispatch_queue_t renderQueue; ///贴纸id @property (nonatomic, assign) int stickerId; +@property (nonatomic, assign) int filterId; @end @@ -43,18 +44,78 @@ - (instancetype)init { [self.effectsProcess setModelPath:[bundle pathForResource:@"model" ofType:@"bundle"]]; [EAGLContext setCurrentContext:self.glContext]; self.effectsProcess.detectConfig = ST_MOBILE_FACE_DETECT; - [self.effectsProcess setBeautyParam:EFFECT_BEAUTY_PARAM_ENABLE_WHITEN_SKIN_MASK andVal:0.7]; - [self.effectsProcess setEffectType:EFFECT_BEAUTY_RESHAPE_SHRINK_FACE value:0.8]; - [self.effectsProcess setEffectType:EFFECT_BEAUTY_BASE_WHITTEN value:0.6]; - [self.effectsProcess setEffectType:EFFECT_BEAUTY_RESHAPE_ENLARGE_EYE value:1.0]; - [self.effectsProcess setEffectType:EFFECT_BEAUTY_RESHAPE_ROUND_EYE value:1.0]; - [self.effectsProcess setEffectType:EFFECT_BEAUTY_PLASTIC_OPEN_CANTHUS value:0.7]; + #endif }); } return self; } +- (void)setBuauty: (BOOL)isSelected { +#if __has_include("st_mobile_common.h") + if (isSelected) { + [self.effectsProcess setBeautyParam:EFFECT_BEAUTY_PARAM_ENABLE_WHITEN_SKIN_MASK andVal:0.7]; + [self.effectsProcess setEffectType:EFFECT_BEAUTY_RESHAPE_SHRINK_FACE value:0.8]; + [self.effectsProcess setEffectType:EFFECT_BEAUTY_BASE_WHITTEN value:0.6]; + [self.effectsProcess setEffectType:EFFECT_BEAUTY_RESHAPE_ENLARGE_EYE value:1.0]; + [self.effectsProcess setEffectType:EFFECT_BEAUTY_RESHAPE_ROUND_EYE value:1.0]; + [self.effectsProcess setEffectType:EFFECT_BEAUTY_PLASTIC_OPEN_CANTHUS value:0.7]; + } else { + [self.effectsProcess setBeautyParam:EFFECT_BEAUTY_PARAM_ENABLE_WHITEN_SKIN_MASK andVal:0]; + [self.effectsProcess setEffectType:EFFECT_BEAUTY_RESHAPE_SHRINK_FACE value:0]; + [self.effectsProcess setEffectType:EFFECT_BEAUTY_BASE_WHITTEN value:0]; + [self.effectsProcess setEffectType:EFFECT_BEAUTY_RESHAPE_ENLARGE_EYE value:0]; + [self.effectsProcess setEffectType:EFFECT_BEAUTY_RESHAPE_ROUND_EYE value:0]; + [self.effectsProcess setEffectType:EFFECT_BEAUTY_PLASTIC_OPEN_CANTHUS value:0]; + } +#endif +} +- (void)setMakeup: (BOOL)isSelected { +#if __has_include("st_mobile_common.h") + if (isSelected) { + NSBundle *bundle = [BundleUtil bundleWithBundleName:@"SenseLib" podName:@"senseLib"]; + NSString *path = [bundle pathForResource:@"qise.zip" ofType:nil]; + __weak VideoProcessingManager *weakself = self; + [self.effectsProcess addStickerWithPath:path callBack:^(st_result_t state, int sticker, uint64_t action) { + [weakself.effectsProcess setPackageId:sticker groupType:EFFECT_BEAUTY_GROUP_MAKEUP strength:0.5]; + weakself.stickerId = sticker; + }]; + } else { + [self.effectsProcess removeSticker:self.stickerId]; + self.stickerId = 0; + } +#endif +} +- (void)setSticker: (BOOL)isSelected { +#if __has_include("st_mobile_common.h") + if (isSelected) { + NSBundle *bundle = [BundleUtil bundleWithBundleName:@"SenseLib" podName:@"senseLib"]; + NSString *path = [bundle pathForResource:@"lianxingface.zip" ofType:nil]; + [self.effectsProcess setStickerWithPath:path callBack:^(st_result_t state, int stickerId, uint64_t action) { + + }]; + } else { + [self.effectsProcess cleareStickers]; + } +#endif +} +- (void)setFilter: (BOOL)isSelected { +#if __has_include("st_mobile_common.h") + if (isSelected) { + NSBundle *bundle = [BundleUtil bundleWithBundleName:@"SenseLib" podName:@"senseLib"]; + NSString *path = [bundle pathForResource:@"qise.zip" ofType:nil]; + __weak VideoProcessingManager *weakself = self; + [self.effectsProcess addStickerWithPath:path callBack:^(st_result_t state, int sticker, uint64_t action) { + [weakself.effectsProcess setPackageId:sticker groupType:EFFECT_BEAUTY_GROUP_FILTER strength:0.5]; + weakself.filterId = sticker; + }]; + } else { + [self.effectsProcess removeSticker:self.filterId]; + self.filterId = 0; + } +#endif +} + - (CVPixelBufferRef)videoProcessHandler:(CVPixelBufferRef)pixelBuffer { if (!pixelBuffer) return pixelBuffer; @@ -69,6 +130,19 @@ - (CVPixelBufferRef)videoProcessHandler:(CVPixelBufferRef)pixelBuffer { CVPixelBufferLockBaseAddress(pixelBuffer, 0); int width = (int)CVPixelBufferGetWidth(pixelBuffer); int heigh = (int)CVPixelBufferGetHeight(pixelBuffer); + if (_outTexture) { + int _cacheW = (int)CVPixelBufferGetWidth(_outputPixelBuffer); + int _cacheH = (int)CVPixelBufferGetHeight(_outputPixelBuffer); + if (_cacheH != heigh || _cacheW != width) { + GLuint testTexture = 0; +#if __has_include("st_mobile_common.h") + [self.effectsProcess deleteTexture:&testTexture pixelBuffer:&_outputPixelBuffer cvTexture:&_outputCVTexture]; +#endif + _outTexture = 0; + _outputPixelBuffer = NULL; + _outputCVTexture = NULL; + } + } if(!_outTexture){ #if __has_include("st_mobile_common.h") [self.effectsProcess createGLObjectWith:width diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/SenseBeautifyVC.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/SenseBeautifyVC.m index ad7531c5c..9ad4b0794 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/SenseBeautifyVC.m +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/SenseBeautifyVC.m @@ -16,7 +16,7 @@ @interface SenseBeautifyVC () @property (weak, nonatomic) IBOutlet UILabel *tipsLabel; -@property (weak, nonatomic) IBOutlet UIStackView *container; +@property (weak, nonatomic) IBOutlet UIView *container; @property (weak, nonatomic) IBOutlet UIView *localVideo; @property (weak, nonatomic) IBOutlet UIView *remoteVideo; @@ -97,6 +97,26 @@ - (void) initSDK { }]; } +- (IBAction)onTapSwitchCameraButton:(id)sender { + [self.rtcEngineKit switchCamera]; +} +- (IBAction)onTapBeautyButton:(UIButton *)sender { + [sender setSelected:!sender.isSelected]; + [self.videoProcessing setBuauty:sender.isSelected]; +} +- (IBAction)onTapMakeupButton:(UIButton *)sender { + [sender setSelected:!sender.isSelected]; + [self.videoProcessing setMakeup:sender.isSelected]; +} +- (IBAction)onTapStickerButton:(UIButton *)sender { + [sender setSelected:!sender.isSelected]; + [self.videoProcessing setSticker:sender.isSelected]; +} +- (IBAction)onTapFilterButton:(UIButton *)sender { + [sender setSelected:!sender.isSelected]; + [self.videoProcessing setFilter:sender.isSelected]; +} + #pragma mark - VideoFrameDelegate - (BOOL)onCaptureVideoFrame:(AgoraOutputVideoFrame *)videoFrame { CVPixelBufferRef pixelBuffer = [self.videoProcessing videoProcessHandler:videoFrame.pixelBuffer]; diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/zh-Hans.lproj/SenseBeautify.strings b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/zh-Hans.lproj/SenseBeautify.strings index 446492001..298b5f9f6 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/zh-Hans.lproj/SenseBeautify.strings +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/SenseBeautify/zh-Hans.lproj/SenseBeautify.strings @@ -24,7 +24,14 @@ 1: 在SenseBeautify->Manager->替换SENSEME.lic 2: 替换license绑定的Bundle identifier 3: 打开Podfile中 pod 'senseLib' 注释 -4: 在iOS->APIExample->SenseLib目录下添加remoteSourcesLib文件夹 -5: 在iOS->APIExample->SenseLib目录下添加st_mobile_sdk文件夹 -6: 执行pod install -7: 重新运行项目查看效果"; +4: 在iOS->APIExample目录下创建SenseLib文件夹 +5: 在iOS->APIExample->SenseLib目录下添加remoteSourcesLib文件夹 +6: 在iOS->APIExample->SenseLib目录下添加st_mobile_sdk文件夹 +7: 执行pod install +8: 重新运行项目查看效果"; + +"CrL-Yf-Cev.normalTitle" = "美颜"; +"3hp-ZM-MMW.normalTitle" = "美妆"; +"UdR-D4-uNu.normalTitle" = "贴纸"; +"K3f-4k-VQ1.normalTitle" = "滤镜"; + diff --git a/iOS/APIExample/APIExample/Examples/Advanced/VideoChat/VideoChat.swift b/iOS/APIExample/APIExample/Examples/Advanced/VideoChat/VideoChat.swift index a9fd84226..cfb388817 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/VideoChat/VideoChat.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/VideoChat/VideoChat.swift @@ -160,8 +160,8 @@ class VideoChatMain: BaseViewController { // when joining channel. The channel name and uid used to calculate // the token has to match the ones used for channel join let option = AgoraRtcChannelMediaOptions() - option.publishCameraTrack = true - option.publishMicrophoneTrack = true + option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster + option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster option.clientRoleType = GlobalSettings.shared.getUserRole() NetworkManager.shared.generateToken(channelName: channelName, success: { token in let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option) diff --git a/iOS/APIExample/APIExample/Examples/Advanced/VideoMetadata/VideoMetadata.swift b/iOS/APIExample/APIExample/Examples/Advanced/VideoMetadata/VideoMetadata.swift index 6f482b2cb..d20445674 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/VideoMetadata/VideoMetadata.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/VideoMetadata/VideoMetadata.swift @@ -114,8 +114,8 @@ class VideoMetadataMain: BaseViewController { // when joining channel. The channel name and uid used to calculate // the token has to match the ones used for channel join let option = AgoraRtcChannelMediaOptions() - option.publishCameraTrack = true - option.publishMicrophoneTrack = true + option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster + option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster option.clientRoleType = GlobalSettings.shared.getUserRole() NetworkManager.shared.generateToken(channelName: channelName, success: { token in let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option) diff --git a/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/Base.lproj/VideoProcess.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/Base.lproj/VideoProcess.storyboard index 6c1c93ae6..3c367780c 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/Base.lproj/VideoProcess.storyboard +++ b/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/Base.lproj/VideoProcess.storyboard @@ -1,9 +1,9 @@ - + - + @@ -337,6 +337,7 @@ + diff --git a/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/VideoProcess.swift b/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/VideoProcess.swift index d039ee95c..3813d9b74 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/VideoProcess.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/VideoProcess.swift @@ -78,6 +78,8 @@ class VideoProcessMain : BaseViewController config.logConfig = logConfig agoraKit = AgoraRtcEngineKit.sharedEngine(with: config, delegate: self) + // enable filters + agoraKit.enableExtension(withVendor: "agora_video_filters_clear_vision", extension: "clear_vision", enabled: true, sourceType: .primaryCamera) // Configuring Privatization Parameters Util.configPrivatization(agoraKit: agoraKit) // make myself a broadcaster @@ -112,8 +114,8 @@ class VideoProcessMain : BaseViewController // when joining channel. The channel name and uid used to calculate // the token has to match the ones used for channel join let option = AgoraRtcChannelMediaOptions() - option.publishCameraTrack = true - option.publishMicrophoneTrack = true + option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster + option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster option.clientRoleType = GlobalSettings.shared.getUserRole() NetworkManager.shared.generateToken(channelName: channelName, success: { token in let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option) @@ -249,11 +251,18 @@ class VideoProcessMain : BaseViewController break case 2: source.backgroundSourceType = .blur - source.blurDegree = .high; + source.blurDegree = .high break + + case 3: + let videoPath = Bundle.main.path(forResource: "sample", ofType: "mov") + source.backgroundSourceType = .video + source.source = videoPath + default: break } + source.backgroundSourceType = virtualBgSwitch.isOn ? source.backgroundSourceType : .none let result = agoraKit.enableVirtualBackground(virtualBgSwitch.isOn, backData: source, segData: AgoraSegmentationProperty()) print("result == \(result)") } @@ -304,6 +313,7 @@ extension VideoProcessMain: AgoraRtcEngineDelegate { // the view to be binded videoCanvas.view = remoteVideo.videoView videoCanvas.renderMode = .hidden + videoCanvas.enableAlphaMask = true agoraKit.setupRemoteVideo(videoCanvas) } diff --git a/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/zh-Hans.lproj/VideoProcess.strings b/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/zh-Hans.lproj/VideoProcess.strings index 196ec2fc8..33cc66c5e 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/zh-Hans.lproj/VideoProcess.strings +++ b/iOS/APIExample/APIExample/Examples/Advanced/VideoProcess/zh-Hans.lproj/VideoProcess.strings @@ -32,6 +32,8 @@ /* Class = "UISegmentedControl"; Qhf-Ob-NYA.segmentTitles[2] = "Blur"; ObjectID = "Qhf-Ob-NYA"; */ "Qhf-Ob-NYA.segmentTitles[2]" = "毛玻璃"; +"Qhf-Ob-NYA.segmentTitles[3]" = "视频"; + /* Class = "UILabel"; text = "Low light Enhancement"; ObjectID = "RiO-Eg-x0D"; */ "RiO-Eg-x0D.text" = "暗光增强"; diff --git a/iOS/APIExample/APIExample/Examples/Advanced/VoiceChanger/Base.lproj/VoiceChanger.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/VoiceChanger/Base.lproj/VoiceChanger.storyboard index 8f241f996..b50768cc2 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/VoiceChanger/Base.lproj/VoiceChanger.storyboard +++ b/iOS/APIExample/APIExample/Examples/Advanced/VoiceChanger/Base.lproj/VoiceChanger.storyboard @@ -1,9 +1,9 @@ - + - + @@ -18,7 +18,7 @@ - + @@ -70,17 +70,17 @@ - + - + - + - +