Android Accessibility service real time audio processing

倾然丶 夕夏残阳落幕 提交于 2020-04-24 07:54:25

问题


Can someone please provide me a sample code for Android Accessibility service real time audio processing. I need to process the call audio. But don't know How to achieve this. Please share your thoughts on this

Please find below Manifest :

<manifest xmlns:android="http://schemas.android.com/apk/res/android"
    package="mycalltest">

    <uses-permission android:name="android.permission.INTERNET" />
    <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
    <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
    <uses-permission android:name="android.permission.RECORD_AUDIO" />
    <uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
    <uses-permission android:name="android.permission.READ_CONTACTS" />
    <uses-permission android:name="android.permission.WRITE_CONTACTS" />
    <uses-permission android:name="android.permission.BLUETOOTH" />
    <uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />
    <uses-permission android:name="android.permission.READ_PHONE_STATE" />
    <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
    <uses-permission android:name="android.permission.READ_CALL_LOG" />
    <uses-permission android:name="android.permission.MANAGE_OWN_CALLS" />
    <uses-permission android:name="android.permission.WAKE_LOCK" />

    <application
        android:allowBackup="true"
        android:icon="@mipmap/ic_launcher"
        android:label="@string/app_name"
        android:requestLegacyExternalStorage="true"
        android:roundIcon="@mipmap/ic_launcher_round"
        android:supportsRtl="true"
        android:theme="@style/AppTheme"
        android:usesCleartextTraffic="true">

        <service
            android:name=".MyAccessibilityService"
            android:label="@string/accessibility_service_label"
            android:permission="android.permission.BIND_ACCESSIBILITY_SERVICE">
            <intent-filter>
                <action android:name="android.accessibilityservice.AccessibilityService" />
            </intent-filter>

            <meta-data
                android:name="android.accessibilityservice"
                android:resource="@xml/accessibility_service_config" />
        </service>


        <activity android:name=".MainActivity">
            <intent-filter>
                <action android:name="android.intent.action.MAIN" />

                <category android:name="android.intent.category.LAUNCHER" />
            </intent-filter>
        </activity>
    </application>

</manifest>

Please find below acessibility xml:

<accessibility-service xmlns:android="http://schemas.android.com/apk/res/android"
    android:description="@string/accessibility_service_description"
    android:accessibilityEventTypes="typeWindowContentChanged|typeWindowStateChanged"
    android:accessibilityFeedbackType="feedbackGeneric"
    android:notificationTimeout="100"
    android:accessibilityFlags="flagReportViewIds|flagRetrieveInteractiveWindows"
    android:canRetrieveWindowContent="true"
    />

PFB the Service :

import android.accessibilityservice.AccessibilityService;
import android.app.Notification;
import android.app.NotificationChannel;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.media.AudioDeviceInfo;
import android.media.AudioManager;

import android.media.MediaRecorder;
import android.os.Build;

import android.util.Log;
import android.view.accessibility.AccessibilityEvent;

import androidx.annotation.RequiresApi;
import androidx.core.app.NotificationCompat;

import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;


public class MyAccessibilityService extends AccessibilityService {
    private static final String TAG="MyAccessibilityService";

    private Context context;
    public static final String CHANNEL_ID = "MyAccessibilityService";

    MediaRecorder mRecorder;
    private boolean isStarted;
    byte buffer[] = new byte[8916];

    private MediaSaver mediaSaver;
    @RequiresApi(api = Build.VERSION_CODES.M)
    @Override
    public void onCreate() {
        super.onCreate();

        Log.d(TAG,"MyAccessibilityService Salesken Started ...");
        context=this;

        startForegroundService();
    }




    private void startForegroundService() {
        createNotificationChannel();
        Intent notificationIntent = new Intent(this, MainActivity.class);
        PendingIntent pendingIntent = PendingIntent.getActivity(this,
                0, notificationIntent, 0);
        Notification notification = new NotificationCompat.Builder(this, CHANNEL_ID)
                .setContentTitle("recording Service")
                .setContentText("Start")
                .setSmallIcon(R.drawable.ic_launcher_background)
                .setContentIntent(pendingIntent)
                .build();
        startForeground(1, notification);

    }
    private void createNotificationChannel() {
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
            NotificationChannel serviceChannel = new NotificationChannel(
                    CHANNEL_ID,
                    "Recording Service Channel",
                    NotificationManager.IMPORTANCE_DEFAULT
            );
            NotificationManager manager = getSystemService(NotificationManager.class);
            manager.createNotificationChannel(serviceChannel);
        }
    }
    @Override
    public int onStartCommand(Intent intent, int flags, int startId) {
        if (intent != null) {
            String action = intent.getAction();

            switch (action) {
                case SaleskenIntent.START_RECORDING:
                    Log.d(TAG,"Start Recording");

                    //startRecorder();
                    String contact = intent.getStringExtra("contact");
                    startRecording(contact);

                    break;
                case SaleskenIntent.STOP_RECORDING:

                    Log.d(TAG,"Stop Recording");

                    stopRecording();
                    break;
            }
        }
        return super.onStartCommand(intent, flags, startId);
    }





    @Override
    public void onAccessibilityEvent(AccessibilityEvent event) {

    }

    @Override
    public void onInterrupt() {

    }




    @Override
    public void onDestroy() {
        super.onDestroy();

    }

        public void startRecording(String contact) {
            try {

                String timestamp = new SimpleDateFormat("dd-MM-yyyy-hh-mm-ss", Locale.US).format(new Date());
                String fileName =timestamp+".3gp";
                mediaSaver = new MediaSaver(context).setParentDirectoryName("Accessibility").
                        setFileNameKeepOriginalExtension(fileName).
                        setExternal(MediaSaver.isExternalStorageReadable());
                //String selectedPath = Environment.getExternalStorageDirectory() + "/Testing";
                //String selectedPath = Environment.getExternalStorageDirectory().getAbsolutePath() +"/Android/data/" + packageName + "/system_sound";



                mRecorder = new MediaRecorder();
                mRecorder.reset();

                //android.permission.MODIFY_AUDIO_SETTINGS
                AudioManager mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); //turn on speaker
                if (mAudioManager != null) {
                    mAudioManager.setMode(AudioManager.MODE_IN_COMMUNICATION); //MODE_IN_COMMUNICATION | MODE_IN_CALL
                    // mAudioManager.setSpeakerphoneOn(true);
                    // mAudioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, mAudioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL), 0); // increase Volume
                    hasWiredHeadset(mAudioManager);
                }

                //android.permission.RECORD_AUDIO
                String manufacturer = Build.MANUFACTURER;
                Log.d(TAG, manufacturer);
           /* if (manufacturer.toLowerCase().contains("samsung")) {
                mRecorder.setAudioSource(MediaRecorder.AudioSource.VOICE_COMMUNICATION);
            } else {
                mRecorder.setAudioSource(MediaRecorder.AudioSource.VOICE_CALL);
            }*/
            /*
            VOICE_CALL is the actual call data being sent in a call, up and down (so your side and their side). VOICE_COMMUNICATION is just the microphone, but with codecs and echo cancellation turned on for good voice quality.
            */
                mRecorder.setAudioSource(MediaRecorder.AudioSource.VOICE_COMMUNICATION); //MIC | VOICE_COMMUNICATION (Android 10 release) | VOICE_RECOGNITION | (VOICE_CALL = VOICE_UPLINK + VOICE_DOWNLINK)
                mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); //THREE_GPP | MPEG_4
                mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB); //AMR_NB | AAC
                mRecorder.setOutputFile(mediaSaver.pathFile().getAbsolutePath());
                mRecorder.prepare();
                mRecorder.start();
                isStarted = true;
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        public void stopRecording() {
            if (isStarted && mRecorder != null) {
                mRecorder.stop();
                mRecorder.reset(); // You can reuse the object by going back to setAudioSource() step
                mRecorder.release();
                mRecorder = null;
                isStarted = false;
            }
        }

        // To detect the connected other device like headphone, wifi headphone, usb headphone etc
        private boolean hasWiredHeadset(AudioManager mAudioManager) {
            if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
                return mAudioManager.isWiredHeadsetOn();
            } else {
                final AudioDeviceInfo[] devices = mAudioManager.getDevices(AudioManager.GET_DEVICES_ALL);
                for (AudioDeviceInfo device : devices) {
                    final int type = device.getType();
                    if (type == AudioDeviceInfo.TYPE_WIRED_HEADSET) {
                        Log.d(TAG, "hasWiredHeadset: found wired headset");
                        return true;
                    } else if (type == AudioDeviceInfo.TYPE_USB_DEVICE) {
                        Log.d(TAG, "hasWiredHeadset: found USB audio device");
                        return true;
                    } else if (type == AudioDeviceInfo.TYPE_TELEPHONY) {
                        Log.d(TAG, "hasWiredHeadset: found audio signals over the telephony network");
                        return true;
                    }
                }
                return false;
            }
        }




    }

来源:https://stackoverflow.com/questions/61184352/android-accessibility-service-real-time-audio-processing

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!