WebRTC cannot record screen

我的梦境 提交于 2021-01-28 04:58:15

问题


I'm trying to make screen sharing app using WebRTC. I have code that can get and share video stream from camera. I need to modify it to instead get video via MediaProjection API. Based on this post I have modified my code to use org.webrtc.ScreenCapturerAndroid, but there is no video output shown. There is only black screen. If I use camera, everything works fine (I can see camera output on screen). Could someone please check my code and maybe point me in right direction? I have been stuck on this for three days already.

Here is my code:

public class MainActivity extends AppCompatActivity {

    private static final String TAG = "VIDEO_CAPTURE";

    private static final int CAPTURE_PERMISSION_REQUEST_CODE = 1;
    private static final String VIDEO_TRACK_ID = "video_stream";

    PeerConnectionFactory peerConnectionFactory;

    SurfaceViewRenderer localVideoView;
    ProxyVideoSink localSink;

    VideoSource videoSource;
    VideoTrack localVideoTrack;

    EglBase rootEglBase;

    boolean camera = false;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);

        rootEglBase = EglBase.create();
        localVideoView = findViewById(R.id.local_gl_surface_view);

        localVideoView.init(rootEglBase.getEglBaseContext(), null);

        startScreenCapture();
    }

    @TargetApi(21)
    private void startScreenCapture() {
        MediaProjectionManager mMediaProjectionManager = (MediaProjectionManager) getApplication().getSystemService(Context.MEDIA_PROJECTION_SERVICE);
        startActivityForResult(mMediaProjectionManager.createScreenCaptureIntent(), CAPTURE_PERMISSION_REQUEST_CODE);
    }

    @Override
    public void onActivityResult(int requestCode, int resultCode, Intent data) {
        if (requestCode != CAPTURE_PERMISSION_REQUEST_CODE) { return; }

        start(data);
    }

    private void start(Intent permissionData) {

        //Initialize PeerConnectionFactory globals.
        PeerConnectionFactory.InitializationOptions initializationOptions =
                PeerConnectionFactory.InitializationOptions.builder(this)
                        .setEnableVideoHwAcceleration(true)
                        .createInitializationOptions();
        PeerConnectionFactory.initialize(initializationOptions);

        //Create a new PeerConnectionFactory instance - using Hardware encoder and decoder.
        PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
        DefaultVideoEncoderFactory defaultVideoEncoderFactory = new DefaultVideoEncoderFactory(
                rootEglBase.getEglBaseContext(), true,true);
        DefaultVideoDecoderFactory defaultVideoDecoderFactory = new DefaultVideoDecoderFactory(rootEglBase.getEglBaseContext());

        peerConnectionFactory = PeerConnectionFactory.builder()
                .setOptions(options)
                .setVideoDecoderFactory(defaultVideoDecoderFactory)
                .setVideoEncoderFactory(defaultVideoEncoderFactory)
                .createPeerConnectionFactory();;

        VideoCapturer videoCapturerAndroid;
        if (camera) {
            videoCapturerAndroid = createCameraCapturer(new Camera1Enumerator(false));
        } else {
            videoCapturerAndroid = new ScreenCapturerAndroid(permissionData, new MediaProjection.Callback() {
                @Override
                public void onStop() {
                    super.onStop();
                    Log.e(TAG, "user has revoked permissions");
                }
            });
        }

        videoSource = peerConnectionFactory.createVideoSource(videoCapturerAndroid);

        DisplayMetrics metrics = new DisplayMetrics();
        MainActivity.this.getWindowManager().getDefaultDisplay().getRealMetrics(metrics);
        videoCapturerAndroid.startCapture(metrics.widthPixels, metrics.heightPixels, 30);

        localVideoTrack = peerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
        localVideoTrack.setEnabled(true);

        //localVideoTrack.addRenderer(new VideoRenderer(localRenderer));
        localSink = new ProxyVideoSink().setTarget(localVideoView);
        localVideoTrack.addSink(localSink);
    }

    //find first camera, this works without problem
    private VideoCapturer createCameraCapturer(CameraEnumerator enumerator) {
        final String[] deviceNames = enumerator.getDeviceNames();

        // First, try to find front facing camera
        Logging.d(TAG, "Looking for front facing cameras.");
        for (String deviceName : deviceNames) {
            if (enumerator.isFrontFacing(deviceName)) {
                Logging.d(TAG, "Creating front facing camera capturer.");
                VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);

                if (videoCapturer != null) {
                    return videoCapturer;
                }
            }
        }

        // Front facing camera not found, try something else
        Logging.d(TAG, "Looking for other cameras.");
        for (String deviceName : deviceNames) {
            if (!enumerator.isFrontFacing(deviceName)) {
                Logging.d(TAG, "Creating other camera capturer.");
                VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);

                if (videoCapturer != null) {
                    return videoCapturer;
                }
            }
        }

        return null;
    }
}

ProxyVideoSink

public class ProxyVideoSink implements VideoSink {

    private VideoSink target;

    synchronized ProxyVideoSink setTarget(VideoSink target) { this.target = target; return this; }

    @Override
    public void onFrame(VideoFrame videoFrame) {

        if (target == null) {
            Log.w("VideoSink", "Dropping frame in proxy because target is null.");
            return;
        }

        target.onFrame(videoFrame);
    }
}

In logcat I can see, that some frames are rendered, but nothing is shown (black screen).

06-18 17:42:44.750 11357-11388/com.archona.webrtcscreencapturetest I/org.webrtc.Logging: EglRenderer: local_gl_surface_viewDuration: 4000 ms. Frames received: 117. Dropped: 0. Rendered: 117. Render fps: 29.2. Average render time: 4754 μs. Average swapBuffer time: 2913 μs.
06-18 17:42:48.752 11357-11388/com.archona.webrtcscreencapturetest I/org.webrtc.Logging: EglRenderer: local_gl_surface_viewDuration: 4001 ms. Frames received: 118. Dropped: 0. Rendered: 118. Render fps: 29.5. Average render time: 5015 μs. Average swapBuffer time: 3090 μs.

I'm using latest version of WebRTC library: implementation 'org.webrtc:google-webrtc:1.0.23546'. My device has API level 24 (Android 7.0), but I have tested this code on 3 different devices with different API levels, so I don't suspect device specific problem. I have tried building another app that uses MediaProjection API (without WebRTC) and I can see correct output inside SurfaceView. I have tried downgrading webrtc library, but nothing seems to work.

Thanks for any help.


回答1:


I was faced same issue using WebRTC library org.webrtc:google-webrtc:1.0.22672. I am using android 7.0 device. Video call is working fine. Issue is with screen sharing. Screen sharing showing black screen always.

Then I added following:

peerConnectionFactory.setVideoHwAccelerationOptions(rootEglBase.getEglBaseContext(), rootEglBase.getEglBaseContext());

Now it is working perfectly.



来源:https://stackoverflow.com/questions/50914031/webrtc-cannot-record-screen

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!