Not able to display live streamed data on android

[亡魂溺海] 提交于 2020-02-02 13:00:59

问题


I am trying to display raw H264 data from a camera device to my android app. I am able to receive data on a Textview but not able to display it on Textureview. I am a beginner in android and I am no expert in decoding raw data. It would be appreciated if someone could suggest a solution. Please find the below codes:

Code for getting the data

    public class myVideoReceiver extends Thread {
    public boolean bKeepRunning2 = true;
    public String lastMessage2 = "";

    public void run() {
        String message2;
        byte[] lmessage2 = new byte[MAX_UDP_DATAGRAM_LEN2];
        DatagramPacket packet2 = new DatagramPacket(lmessage2, lmessage2.length);

        try {
            DatagramSocket socket2 = new DatagramSocket(UDP_SERVER_PORT2);

            while(bKeepRunning2) {
                socket2.receive(packet2);
                message2 = new String(lmessage2, 0, packet2.getLength());
                lastMessage2 = message2;
                runOnUiThread(updateTextMessage2);
            }
        } catch (Throwable e) {
            e.printStackTrace();
        }

        if (mysocket != null) {
            mysocket.close();
        }
    }

    public void kill() {
        bKeepRunning2 = false;
    }

    public String getLastMessage() {
        return lastMessage2;

    }  }
   //Added release function
    public void release() {
    }

    public Runnable updateTextMessage2 = new Runnable() {
    public void run() {
        if (myVideoReceiver == null) return;
        VIDEO_RESPONSE.setText(myVideoReceiver.getLastMessage());

    }
};

Code to display the raw data on Texture view:

public class MainActivity extends AppCompatActivity implements TextureView.SurfaceTextureListener{

private TextureView m_surface;// View that contains the Surface Texture

private myVideoReceiver provider;// Object that connects to our server and gets H264 frames

private MediaCodec m_codec;// Media decoder

private DecodeFramesTask m_frameTask;// AsyncTask that takes H264 frames and uses the decoder to update the Surface Texture

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);

    // Get a referance to the TextureView in the UI
    m_surface = (TextureView)findViewById(R.id.textureView);

    // Add this class as a call back so we can catch the events from the Surface Texture
    m_surface.setSurfaceTextureListener(this);
}

@Override
// Invoked when a TextureView's SurfaceTexture is ready for use
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
        // when the surface is ready, we make a H264 provider Object. When its constructor runs it starts an AsyncTask to log into our server and start getting frames
        provider = new myVideoReceiver();

        // Create the format settinsg for the MediaCodec
        MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, 1920, 1080);// MIMETYPE: a two-part identifier for file formats and format contents
        // Set the PPS and SPS frame
        format.setByteBuffer("csd-0", ByteBuffer.wrap(provider.lastMessage2.getBytes()));
        // Set the buffer size
        format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 100000);

        try {
            // Get an instance of MediaCodec and give it its Mime type
            m_codec = MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
            // Configure the Codec
            m_codec.configure(format, new Surface(m_surface.getSurfaceTexture()), null, 0);
            // Start the codec
            m_codec.start();
            // Create the AsyncTask to get the frames and decode them using the Codec
            m_frameTask = new DecodeFramesTask();
            m_frameTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
        }catch(Exception e){
            e.printStackTrace();
        }
}

@Override
// Invoked when the SurfaceTexture's buffers size changed
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}

@Override
// Invoked when the specified SurfaceTexture is about to be destroyed
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
    return false;
}

@Override
// Invoked when the specified SurfaceTexture is updated through updateTexImage()
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}

private class DecodeFramesTask extends AsyncTask<String, String, String> {

    @Override
    protected String doInBackground(String... data) {
        while(!isCancelled()) {
            // Get the next frame
            //byte[] frame = provider.nextFrame();
            //New code
            byte[] frame = provider.lastMessage2.getBytes();
            Log.e("Frame", "Value in frame data : "+frame);
            Log.e("Frame length","Frame length"+frame.length);
            // For getting the 'frame.length' 
            for(int i = 0; i < 10 && i < frame.length; i++) {
                Log.e("Framelength","Frame length"+frame.length);
            }

            // Now we need to give it to the Codec to decode into the surface

            // Get the input buffer from the decoder
            int inputIndex = m_codec.dequeueInputBuffer(-1);// Pass in -1 here as in this example we don't have a playback time reference
            Log.e("InputIndex","Value in Input index : "+inputIndex);
            // If  the buffer number is valid use the buffer with that index
            if(inputIndex>=0) {
                ByteBuffer buffer = null;
                if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
                    buffer = m_codec.getInputBuffer(inputIndex);

                    Log.e("MycustomData","Value in Buffer :"+buffer);
                    Log.e("If InputIndex","if Input index : "+inputIndex);
                }
                buffer.put(frame);
                // tell the decoder to process the frame
                m_codec.queueInputBuffer(inputIndex, 0, frame.length, 0, 0);
            }

            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            int outputIndex = m_codec.dequeueOutputBuffer(info, 0);
            Log.e("MycustomData","value in outputIndex: "+outputIndex);
            if (outputIndex >= 0) {
                Log.e("Inside if outputindex","value : "+outputIndex);
                m_codec.releaseOutputBuffer(outputIndex, true);
            }

            // wait for the next frame to be ready, our server makes a frame every 250ms
            try{Thread.sleep(250);}catch (Exception e){e.printStackTrace();}
        }
        return "";
    }

    @Override
    protected void onPostExecute(String result) {
        try {
            m_codec.stop();
            m_codec.release();
        }catch(Exception e){
            e.printStackTrace();
        }
        provider.release();
    }

}

@Override
public void onStop(){
    super.onStop();
    m_frameTask.cancel(true);
    provider.release();
}
}

Thanks in Advance

Please find the below error log:

E/Frame: Value in frame data : [B@aa987a7
E/InputIndex: Value in Input index : 0
E/MycustomData: Value in Buffer :java.nio.DirectByteBuffer[pos=0 lim=4194304 cap=4194304]
E/If InputIndex: if Input index : 0
E/MycustomData: value in outputIndex: -1
I/ViewRootImpl: jank_removeInvalidNode all the node in jank list is out of time
W/InputMethodManager: startInputReason = 1
W/libEGL: EGLNativeWindowType 0x7c452aa010 disconnect failed
D/ViewRootImpl[Page_01]: surface should not be released
E/Frame: Value in frame data : [B@5dcf743
E/InputIndex: Value in Input index : 1
E/MycustomData: Value in Buffer :java.nio.DirectByteBuffer[pos=0 lim=4194304 cap=4194304]
E/If InputIndex: if Input index : 1
E/MycustomData: value in outputIndex: -1
E/Frame: Value in frame data : [B@33de1c0
E/InputIndex: Value in Input index : 2
E/MycustomData: Value in Buffer :java.nio.DirectByteBuffer[pos=0 lim=4194304 cap=4194304]
E/If InputIndex: if Input index : 2
E/MycustomData: value in outputIndex: -1
E/Frame: Value in frame data : [B@6a1cef9
E/InputIndex: Value in Input index : 3
E/MycustomData: Value in Buffer :java.nio.DirectByteBuffer[pos=0 lim=4194304 cap=4194304]
E/If InputIndex: if Input index : 3
E/MycustomData: value in outputIndex: -1
E/Frame: Value in frame data : [B@d8b2e3e
E/InputIndex: Value in Input index : 4
E/MycustomData: Value in Buffer :java.nio.DirectByteBuffer[pos=0 lim=4194304 cap=4194304]
E/If InputIndex: if Input index : 4
E/MycustomData: value in outputIndex: -1
E/Frame: Value in frame data : [B@e4dc49f
E/InputIndex: Value in Input index : 0
E/MycustomData: Value in Buffer :java.nio.DirectByteBuffer[pos=0 lim=4194304 cap=4194304]
E/If InputIndex: if Input index : 0
E/MycustomData: value in outputIndex: -1
E/Frame: Value in frame data : [B@aff59ec
E/InputIndex: Value in Input index : 1
E/MycustomData: Value in Buffer :java.nio.DirectByteBuffer[pos=0 lim=4194304 cap=4194304]
E/If InputIndex: if Input index : 1
E/MycustomData: value in outputIndex: -1
E/Frame: Value in frame data : [B@4a761b5
E/InputIndex: Value in Input index : 2
E/MycustomData: Value in Buffer :java.nio.DirectByteBuffer[pos=0 lim=4194304 cap=4194304]
E/If InputIndex: if Input index : 2
E/MycustomData: value in outputIndex: -1
E/Frame: Value in frame data : [B@207f04a
E/InputIndex: Value in Input index : 3
E/MycustomData: Value in Buffer :java.nio.DirectByteBuffer[pos=0 lim=4194304 cap=4194304]
E/If InputIndex: if Input index : 3
E/MycustomData: value in outputIndex: -1
E/Frame: Value in frame data : [B@bd40bbb
E/InputIndex: Value in Input index : 4
E/MycustomData: Value in Buffer :java.nio.DirectByteBuffer[pos=0 lim=4194304 cap=4194304]
E/If InputIndex: if Input index : 4
E/MycustomData: value in outputIndex: -1

Screenshot:

Updated screenshot

来源:https://stackoverflow.com/questions/59582209/not-able-to-display-live-streamed-data-on-android

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!