Android Accessibility 服务实时音频处理

Posted

技术标签:

【中文标题】Android Accessibility 服务实时音频处理【英文标题】:Android Accessibility service real time audio processing 【发布时间】:2020-04-13 08:49:51 【问题描述】:

谁能给我一个 的示例代码。我需要处理通话音频。但不知道如何实现这一点。请分享您对此的看法

请在下面找到清单:

<manifest xmlns:android="http://schemas.android.com/apk/res/android"
    package="mycalltest">

    <uses-permission android:name="android.permission.INTERNET" />
    <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
    <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
    <uses-permission android:name="android.permission.RECORD_AUDIO" />
    <uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
    <uses-permission android:name="android.permission.READ_CONTACTS" />
    <uses-permission android:name="android.permission.WRITE_CONTACTS" />
    <uses-permission android:name="android.permission.BLUETOOTH" />
    <uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />
    <uses-permission android:name="android.permission.READ_PHONE_STATE" />
    <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
    <uses-permission android:name="android.permission.READ_CALL_LOG" />
    <uses-permission android:name="android.permission.MANAGE_OWN_CALLS" />
    <uses-permission android:name="android.permission.WAKE_LOCK" />

    <application
        android:allowBackup="true"
        android:icon="@mipmap/ic_launcher"
        android:label="@string/app_name"
        android:requestLegacyExternalStorage="true"
        android:roundIcon="@mipmap/ic_launcher_round"
        android:supportsRtl="true"
        android:theme="@style/AppTheme"
        android:usesCleartextTraffic="true">

        <service
            android:name=".MyAccessibilityService"
            android:label="@string/accessibility_service_label"
            android:permission="android.permission.BIND_ACCESSIBILITY_SERVICE">
            <intent-filter>
                <action android:name="android.accessibilityservice.AccessibilityService" />
            </intent-filter>

            <meta-data
                android:name="android.accessibilityservice"
                android:resource="@xml/accessibility_service_config" />
        </service>


        <activity android:name=".MainActivity">
            <intent-filter>
                <action android:name="android.intent.action.MAIN" />

                <category android:name="android.intent.category.LAUNCHER" />
            </intent-filter>
        </activity>
    </application>

</manifest>

请在下面找到可访问性 xml:

<accessibility-service xmlns:android="http://schemas.android.com/apk/res/android"
    android:description="@string/accessibility_service_description"
    android:accessibilityEventTypes="typeWindowContentChanged|typeWindowStateChanged"
    android:accessibilityFeedbackType="feedbackGeneric"
    android:notificationTimeout="100"
    android:accessibilityFlags="flagReportViewIds|flagRetrieveInteractiveWindows"
    android:canRetrieveWindowContent="true"
    />

PFB 服务:

import android.accessibilityservice.AccessibilityService;
import android.app.Notification;
import android.app.NotificationChannel;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.media.AudioDeviceInfo;
import android.media.AudioManager;

import android.media.MediaRecorder;
import android.os.Build;

import android.util.Log;
import android.view.accessibility.AccessibilityEvent;

import androidx.annotation.RequiresApi;
import androidx.core.app.NotificationCompat;

import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;


public class MyAccessibilityService extends AccessibilityService 
    private static final String TAG="MyAccessibilityService";

    private Context context;
    public static final String CHANNEL_ID = "MyAccessibilityService";

    MediaRecorder mRecorder;
    private boolean isStarted;
    byte buffer[] = new byte[8916];

    private MediaSaver mediaSaver;
    @RequiresApi(api = Build.VERSION_CODES.M)
    @Override
    public void onCreate() 
        super.onCreate();

        Log.d(TAG,"MyAccessibilityService Salesken Started ...");
        context=this;

        startForegroundService();
    




    private void startForegroundService() 
        createNotificationChannel();
        Intent notificationIntent = new Intent(this, MainActivity.class);
        PendingIntent pendingIntent = PendingIntent.getActivity(this,
                0, notificationIntent, 0);
        Notification notification = new NotificationCompat.Builder(this, CHANNEL_ID)
                .setContentTitle("recording Service")
                .setContentText("Start")
                .setSmallIcon(R.drawable.ic_launcher_background)
                .setContentIntent(pendingIntent)
                .build();
        startForeground(1, notification);

    
    private void createNotificationChannel() 
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) 
            NotificationChannel serviceChannel = new NotificationChannel(
                    CHANNEL_ID,
                    "Recording Service Channel",
                    NotificationManager.IMPORTANCE_DEFAULT
            );
            NotificationManager manager = getSystemService(NotificationManager.class);
            manager.createNotificationChannel(serviceChannel);
        
    
    @Override
    public int onStartCommand(Intent intent, int flags, int startId) 
        if (intent != null) 
            String action = intent.getAction();

            switch (action) 
                case SaleskenIntent.START_RECORDING:
                    Log.d(TAG,"Start Recording");

                    //startRecorder();
                    String contact = intent.getStringExtra("contact");
                    startRecording(contact);

                    break;
                case SaleskenIntent.STOP_RECORDING:

                    Log.d(TAG,"Stop Recording");

                    stopRecording();
                    break;
            
        
        return super.onStartCommand(intent, flags, startId);
    





    @Override
    public void onAccessibilityEvent(AccessibilityEvent event) 

    

    @Override
    public void onInterrupt() 

    




    @Override
    public void onDestroy() 
        super.onDestroy();

    

        public void startRecording(String contact) 
            try 

                String timestamp = new SimpleDateFormat("dd-MM-yyyy-hh-mm-ss", Locale.US).format(new Date());
                String fileName =timestamp+".3gp";
                mediaSaver = new MediaSaver(context).setParentDirectoryName("Accessibility").
                        setFileNameKeepOriginalExtension(fileName).
                        setExternal(MediaSaver.isExternalStorageReadable());
                //String selectedPath = Environment.getExternalStorageDirectory() + "/Testing";
                //String selectedPath = Environment.getExternalStorageDirectory().getAbsolutePath() +"/Android/data/" + packageName + "/system_sound";



                mRecorder = new MediaRecorder();
                mRecorder.reset();

                //android.permission.MODIFY_AUDIO_SETTINGS
                AudioManager mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); //turn on speaker
                if (mAudioManager != null) 
                    mAudioManager.setMode(AudioManager.MODE_IN_COMMUNICATION); //MODE_IN_COMMUNICATION | MODE_IN_CALL
                    // mAudioManager.setSpeakerphoneOn(true);
                    // mAudioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, mAudioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL), 0); // increase Volume
                    hasWiredHeadset(mAudioManager);
                

                //android.permission.RECORD_AUDIO
                String manufacturer = Build.MANUFACTURER;
                Log.d(TAG, manufacturer);
           /* if (manufacturer.toLowerCase().contains("samsung")) 
                mRecorder.setAudiosource(MediaRecorder.AudioSource.VOICE_COMMUNICATION);
             else 
                mRecorder.setAudioSource(MediaRecorder.AudioSource.VOICE_CALL);
            */
            /*
            VOICE_CALL is the actual call data being sent in a call, up and down (so your side and their side). VOICE_COMMUNICATION is just the microphone, but with codecs and echo cancellation turned on for good voice quality.
            */
                mRecorder.setAudioSource(MediaRecorder.AudioSource.VOICE_COMMUNICATION); //MIC | VOICE_COMMUNICATION (Android 10 release) | VOICE_RECOGNITION | (VOICE_CALL = VOICE_UPLINK + VOICE_DOWNLINK)
                mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); //THREE_GPP | MPEG_4
                mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB); //AMR_NB | AAC
                mRecorder.setOutputFile(mediaSaver.pathFile().getAbsolutePath());
                mRecorder.prepare();
                mRecorder.start();
                isStarted = true;
             catch (IOException e) 
                e.printStackTrace();
            
        

        public void stopRecording() 
            if (isStarted && mRecorder != null) 
                mRecorder.stop();
                mRecorder.reset(); // You can reuse the object by going back to setAudioSource() step
                mRecorder.release();
                mRecorder = null;
                isStarted = false;
            
        

        // To detect the connected other device like headphone, wifi headphone, usb headphone etc
        private boolean hasWiredHeadset(AudioManager mAudioManager) 
            if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) 
                return mAudioManager.isWiredHeadsetOn();
             else 
                final AudioDeviceInfo[] devices = mAudioManager.getDevices(AudioManager.GET_DEVICES_ALL);
                for (AudioDeviceInfo device : devices) 
                    final int type = device.getType();
                    if (type == AudioDeviceInfo.TYPE_WIRED_HEADSET) 
                        Log.d(TAG, "hasWiredHeadset: found wired headset");
                        return true;
                     else if (type == AudioDeviceInfo.TYPE_USB_DEVICE) 
                        Log.d(TAG, "hasWiredHeadset: found USB audio device");
                        return true;
                     else if (type == AudioDeviceInfo.TYPE_TELEPHONY) 
                        Log.d(TAG, "hasWiredHeadset: found audio signals over the telephony network");
                        return true;
                    
                
                return false;
            
        




    

【问题讨论】:

在任何 Android 9 手机上都没有通话录音功能。任何应用都无法正确记录通话。 我想在通话中看到实时音频处理还有什么其他方法 我不知道通话记录。我已经尝试了很多时间,但没有成功。但是这个名为 Call Recorder - Cube ACR 的应用程序怎么做呢? ***.com/questions/58230181/… 【参考方案1】:

您需要更改音源输入。

【讨论】:

你能告诉我我该怎么做吗?

以上是关于Android Accessibility 服务实时音频处理的主要内容,如果未能解决你的问题,请参考以下文章

Android Accessibility无障碍服务安全性浅析

Android Accessibility 服务实时音频处理

抢红包软件背后的 Accessibility 服务及启动原理

Accessibility Service详解

Accessibility Service详解

android 5的accessibility 怎么做