在 android agora rtc sdk 中启用音频方法问题

Posted

技术标签:

【中文标题】在 android agora rtc sdk 中启用音频方法问题【英文标题】:Enable Audio method issue in android agora rtc sdk 【发布时间】:2019-06-27 05:43:42 【问题描述】:

我在我的应用中使用交互式视频广播。 我正在附加我正在使用实时流媒体的课程。 当我从实时流屏幕返回到上一个屏幕时,我遇到了音频问题。我还在听主持人的声音。 以前我使用离开通道方法并破坏 rtc 客户端对象,但是在我从流类返回时实现此操作后,由于离开通道方法,它会关闭所有使用此应用程序的用户屏幕。之后,我从我的 on destroy 方法中删除了这个选项。 现在我正在使用禁用音频的方法来禁用音频,但是当我打开实时流媒体类时,它不会启用音频。启用音频方法不起作用我还在用户静音音频方法上使用了静音音频本地流方法和 rtc 处理程序。

我遇到了错误--

"LiveStreamingActivity 泄露了 IntentReceiver io.agora.rtc.internal.AudioRoutingController$HeadsetBroadcastReceiver@101a7a7 最初是在这里注册的。你错过了一个电话 取消注册接收器()? android.app.IntentReceiverLeaked:活动 com.allin.activities.home.homeActivities.LiveStreamingActivity 有 泄露的 IntentReceiver io.agora.rtc.internal.AudioRoutingController$HeadsetBroadcastReceiver@101a7a7 最初是在这里注册的。你错过了一个电话 取消注册接收器()?”

接收方正在 SDK 中注册,SDK 内部出现异常,这是我无法编辑的 jar 文件。

请帮助解决我的问题,因为我必须使用该应用程序 游戏商店。

//首先我试过这个,但它会自动停止其他 设备流式传输。

     override fun onDestroy() 
    /* if (mRtcEngine != null) 
         leaveChannel()
         RtcEngine.destroy(mRtcEngine)
        mRtcEngine = null
     */
    //second I have tried disabling the audio so that user will 
     not hear 
   the host voice 
       if (mRtcEngine != null) //  
    
     mRtcEngine!!.disableAudio()
        
    super.onDestroy()
    

// 然后当我从上一个屏幕返回到实时流媒体活动时,一切都在重新初始化,但音频无法听到。

 override fun onResume() 
    super.onResume()
    Log.e("resume", "resume")
    if (mRtcEngine != null) 
       mRtcEngine!!.enableAudio()
      // mRtcEngine!!.resumeAudio()
    


我正在使用的代码

//agora rtc engine and handler initialization-----------------

private var mRtcEngine: RtcEngine? = null
private var mRtcEventHandler = object : IRtcEngineEventHandler() 
    @SuppressLint("LongLogTag")
    override fun onFirstRemoteVideoDecoded(uid: Int, width: Int, 
   height: Int, elapsed: Int) 
    

    override fun onUserOffline(uid: Int, reason: Int) 
        runOnUiThread 
            val a = reason //if login =0 user is offline
            try 

                if (mUid == uid) 
                    if (surfaceView?.parent != null)
                        (surfaceView?.parent as ViewGroup).removeAllViews()

                    if (mRtcEngine != null) 
                        leaveChannel()
                        RtcEngine.destroy(mRtcEngine)
                        mRtcEngine = null
                    

            setResult(IntentConstants.REQUEST_CODE_LIVE_STREAMING)
                    finish()
                

             catch (e: Exception) 
                e.printStackTrace()
            
        
    

    override fun onUserMuteVideo(uid: Int, muted: Boolean) 
        runOnUiThread 
            // onRemoteUserVideoMuted(uid, muted);
            Log.e("video","muted")
        
    

    override fun onAudioQuality(uid: Int, quality: Int, delay: 
    Short, lost: Short) 
        super.onAudioQuality(uid, quality, delay, lost)

        Log.e("", "")
    


    override fun onUserJoined(uid: Int, elapsed: Int) 
        //  super.onUserJoined(uid, elapsed)
        mUid = uid
        runOnUiThread 
            try 
                setupRemoteVideo(mUid!!)
             catch (e: Exception) 
                e.printStackTrace()
            
        
        Log.e("differnt_uid----", mUid.toString())
    




    private fun initAgoraEngineAndJoinChannel() 
    if(mRtcEngine==null)
    
        initializeAgoraEngine()
        setupVideoProfile()
    




//initializing rtc engine class
@Throws(Exception::class)
private fun initializeAgoraEngine() 
    try 
        var s = RtcEngine.getSdkVersion()
        mRtcEngine = RtcEngine.create(baseContext, AgoraConstants.APPLICATION_ID, mRtcEventHandler)
     catch (e: Exception) 
        // Log.e(LOG_TAG, Log.getStackTraceString(e));

        throw RuntimeException("NEED TO check rtc sdk init fatal error\n" + Log.getStackTraceString(e))
    



@Throws(Exception::class)
private fun setupVideoProfile() 


    //mRtcEngine?.muteAllRemoteAudiostreams(true)
    // mLogger.log("channelName  account = " + channelName + ",uid = " + 0);
    mRtcEngine?.enableVideo()
    //mRtcEngine.clearVideoCompositingLayout();
    mRtcEngine?.enableLocalVideo(false)
    mRtcEngine?.setEnableSpeakerphone(false)
    mRtcEngine?.muteLocalAudioStream(true)
    joinChannel()

   mRtcEngine?.setVideoProfile(Constants.CHANNEL_PROFILE_LIVE_BROADCASTING, true)
    mRtcEngine?.setChannelProfile(Constants.CHANNEL_PROFILE_LIVE_BROADCASTING)
    mRtcEngine?.setClientRole(Constants.CLIENT_ROLE_AUDIENCE,"")
    val speaker = mRtcEngine?.isSpeakerphoneEnabled
    val camerafocus = mRtcEngine?.isCameraAutoFocusFaceModeSupported

    Log.e("", "")


@Throws(Exception::class)
private fun setupRemoteVideo(uid: Int) 
    val container = findViewById<FrameLayout>(R.id.fl_video_container)

    if (container.childCount >= 1) 
        return
    

    surfaceView = RtcEngine.CreateRendererView(baseContext)
    container.addView(surfaceView)
    mRtcEngine?.setupRemoteVideo(VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid))
    mRtcEngine?.setRemoteVideoStreamType(uid, 1)
    mRtcEngine?.setCameraAutoFocusFaceModeEnabled(false)
    mRtcEngine?.muteRemoteAudioStream(uid, false)
    mRtcEngine?.adjustPlaybackSignalVolume(0)
    // mRtcEngine.setVideoProfile(Constants.VIDEO_PROFILE_180P, false); // Earlier than 2.3.0
    surfaceView?.tag = uid // for mark purpose

    val audioManager: AudioManager =
        this@LiveStreamingActivity.getSystemService(Context.AUDIO_SERVICE) as AudioManager
    //audioManager.mode = AudioManager.MODE_IN_CALL
    val isConnected: Boolean = audioManager.isWiredHeadsetOn
    if (isConnected) 
        /* audioManager.isSpeakerphoneOn = false
         audioManager.isWiredHeadsetOn = true*/

        mRtcEngine?.setEnableSpeakerphone(false)
        mRtcEngine?.setDefaultAudioRoutetoSpeakerphone(false)
        mRtcEngine?.setSpeakerphoneVolume(0)
        mRtcEngine?.enableInEarMonitoring(true)
        // Sets the in-ear monitoring volume to 50% of original volume.
        mRtcEngine?.setInEarMonitoringVolume(200)
        mRtcEngine?.adjustPlaybackSignalVolume(200)
     else 
        /* audioManager.isSpeakerphoneOn = true
         audioManager.isWiredHeadsetOn = false*/

        mRtcEngine?.setEnableSpeakerphone(true)
        mRtcEngine?.setDefaultAudioRoutetoSpeakerphone(true)
        mRtcEngine?.setSpeakerphoneVolume(50)
        mRtcEngine?.adjustPlaybackSignalVolume(50)
        mRtcEngine?.enableInEarMonitoring(false)
        // Sets the in-ear monitoring volume to 50% of original volume.
        mRtcEngine?.setInEarMonitoringVolume(0)


    


    Log.e("", "")



@Throws(Exception::class)
private fun joinChannel() 
    mRtcEngine?.joinChannel(
        null,
        AgoraConstants.CHANNEL_NAME,
        "Extra Optional Data",
        0
    ) // if you do not specify the uid, we will generate the uid for you

@Throws(Exception::class)
private fun leaveChannel() 
    mRtcEngine!!.leaveChannel()

【问题讨论】:

【参考方案1】:

我认为首先您想将 setupRemoteVideo 放入 onFirstRemoteVideoDecoded 回调而不是 onUserJoined 回调。此外,在 onDestroy 回调中,您应该调用 RtcEngine.destroy() 而不是 RtcEngine.destroy(mRtcEngine)。

【讨论】:

以上是关于在 android agora rtc sdk 中启用音频方法问题的主要内容,如果未能解决你的问题,请参考以下文章

Agora RTC SDK 的并发流限制是取决于视频质量还是仅取决于音频?

RTC月度小报5月 |教育aPaaS灵动课堂升级抢先体验VUE版 Agora Web SDK声网Agora与HTC达成合作

Agora Android SDK-开始聊天

如何在 android 中与 agora.io sdk 共享屏幕?

融云参加RTC实时互联网大会 现场集成IM SDK

融云参加RTC实时互联网大会 现场集成IM SDK