问题
I have written a code for v4l2src display and recording at the same time. My pipeline looks like :
/ [queue] ! [videosink]
v4l2src ! tee !
\ [queue] ! [filesink]
Currently I am able to display + record together, and also dynamically start and stop the record branch at will (using ctrl+c sigint handler for start/stop). I used @thiagoss' advice in this answer, and parts of this article.
Question :
The only problem I am facing is sending EoS to filesink branch upon unlinking. To what element do I send gst_element_send_event(-->?<--, gst_event_new_eos()); the event to? I can't send it to the entire pipeline because 1. branch is now unlinked and 2. even if I do, it will close the videosink.
What I've tried: On removing mp4mux and just saving h264 encoded video, I am able to use gst-playbin to view the video, meaning the branch creation and unlinking is happening correctly.
The following is my code.
#include <string.h>
#include <gst/gst.h>
#include <signal.h>
#include <unistd.h>
#include <stdlib.h>
#include <stdio.h>
// v4l2src ! tee name=t t. ! x264enc ! mp4mux ! filesink location=/home/rish/Desktop/okay.264 t. ! videoconvert ! autovideosink
static GMainLoop *loop;
static GstElement *pipeline, *src, *tee, *encoder, *muxer, *filesink, *videoconvert, *videosink, *queue_record, *queue_display;
static GstBus *bus;
static GstPad *teepad;
static gboolean recording = FALSE;
static gint counter = 0;
static gboolean
message_cb (GstBus * bus, GstMessage * message, gpointer user_data)
{
switch (GST_MESSAGE_TYPE (message)) {
case GST_MESSAGE_ERROR:{
GError *err = NULL;
gchar *name, *debug = NULL;
name = gst_object_get_path_string (message->src);
gst_message_parse_error (message, &err, &debug);
g_printerr ("ERROR: from element %s: %s\n", name, err->message);
if (debug != NULL)
g_printerr ("Additional debug info:\n%s\n", debug);
g_error_free (err);
g_free (debug);
g_free (name);
g_main_loop_quit (loop);
break;
}
case GST_MESSAGE_WARNING:{
GError *err = NULL;
gchar *name, *debug = NULL;
name = gst_object_get_path_string (message->src);
gst_message_parse_warning (message, &err, &debug);
g_printerr ("ERROR: from element %s: %s\n", name, err->message);
if (debug != NULL)
g_printerr ("Additional debug info:\n%s\n", debug);
g_error_free (err);
g_free (debug);
g_free (name);
break;
}
case GST_MESSAGE_EOS:{
g_print ("Got EOS\n");
g_main_loop_quit (loop);
gst_element_set_state (pipeline, GST_STATE_NULL);
g_main_loop_unref (loop);
gst_object_unref (pipeline);
exit(0);
break;
}
default:
break;
}
return TRUE;
}
static GstPadProbeReturn unlink_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data) {
g_print("Unlinking...");
GstPad *sinkpad;
sinkpad = gst_element_get_static_pad (queue_record, "sink");
gst_pad_unlink (teepad, sinkpad);
gst_object_unref (sinkpad);
gst_element_send_event(filesink, gst_event_new_eos());
sleep(1);
gst_bin_remove(GST_BIN (pipeline), queue_record);
gst_bin_remove(GST_BIN (pipeline), encoder);
// gst_bin_remove(GST_BIN (pipeline), muxer);
gst_bin_remove(GST_BIN (pipeline), filesink);
gst_element_set_state(queue_record, GST_STATE_NULL);
gst_element_set_state(encoder, GST_STATE_NULL);
// gst_element_set_state(muxer, GST_STATE_NULL);
gst_element_set_state(filesink, GST_STATE_NULL);
gst_object_unref(queue_record);
gst_object_unref(encoder);
// gst_object_unref(muxer);
gst_object_unref(filesink);
gst_element_release_request_pad (tee, teepad);
gst_object_unref (teepad);
g_print("Unlinked\n");
return GST_PAD_PROBE_REMOVE;
}
void stopRecording() {
g_print("stopRecording\n");
gst_pad_add_probe(teepad, GST_PAD_PROBE_TYPE_IDLE, unlink_cb, NULL, (GDestroyNotify) g_free);
recording = FALSE;
}
void startRecording() {
g_print("startRecording\n");
GstPad *sinkpad;
GstPadTemplate *templ;
templ = gst_element_class_get_pad_template(GST_ELEMENT_GET_CLASS(tee), "src_%u");
teepad = gst_element_request_pad(tee, templ, NULL, NULL);
queue_record = gst_element_factory_make("queue", "queue_record");
encoder = gst_element_factory_make("x264enc", NULL);
// muxer = gst_element_factory_make("mp4mux", NULL);
filesink = gst_element_factory_make("filesink", NULL);
char *file_name = (char*) malloc(100*sizeof(char));
sprintf(file_name, "/home/rish/Desktop/rec%d.mp4", counter++);
g_print(file_name);
g_object_set(filesink, "location", file_name, NULL);
g_object_set(encoder, "tune", 4, NULL);
free(file_name);
gst_bin_add_many(GST_BIN(pipeline), gst_object_ref(queue_record), gst_object_ref(encoder), gst_object_ref(filesink), NULL);
gst_element_link_many(queue_record, encoder, filesink, NULL);
gst_element_sync_state_with_parent(queue_record);
gst_element_sync_state_with_parent(encoder);
// gst_element_sync_state_with_parent(muxer);
gst_element_sync_state_with_parent(filesink);
sinkpad = gst_element_get_static_pad(queue_record, "sink");
gst_pad_link(teepad, sinkpad);
gst_object_unref(sinkpad);
recording = TRUE;
}
int sigintHandler(int unused) {
g_print("You ctrl-c!\n");
if (recording)
stopRecording();
else
startRecording();
return 0;
}
int main(int argc, char *argv[])
{
signal(SIGINT, sigintHandler);
gst_init (&argc, &argv);
pipeline = gst_pipeline_new(NULL);
src = gst_element_factory_make("v4l2src", NULL);
tee = gst_element_factory_make("tee", "tee");
queue_display = gst_element_factory_make("queue", "queue_display");
videoconvert = gst_element_factory_make("videoconvert", NULL);
videosink = gst_element_factory_make("autovideosink", NULL);
if (!pipeline || !src || !tee || !videoconvert || !videosink || !queue_display) {
g_error("Failed to create elements");
return -1;
}
gst_bin_add_many(GST_BIN(pipeline), src, tee, queue_display, videoconvert, videosink, NULL);
if (!gst_element_link_many(src, tee, NULL)
|| !gst_element_link_many(tee, queue_display, videoconvert, videosink, NULL)) {
g_error("Failed to link elements");
return -2;
}
startRecording();
loop = g_main_loop_new(NULL, FALSE);
bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));
gst_bus_add_signal_watch(bus);
g_signal_connect(G_OBJECT(bus), "message", G_CALLBACK(message_cb), NULL);
gst_object_unref(GST_OBJECT(bus));
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_print("Starting loop\n");
g_main_loop_run(loop);
return 0;
}
回答1:
You should send it to the encoders, so that they can properly finalize their work and they will forward it to the muxer that also needs to wrap up the file by writing the parts of the headers that they can only write at the end.
Also, remove that sleep and you will need to make sure filesink posted a EOS message to be sure it has all been processed and it is safe to remove them. You might need to enable message-forward for the pipeline otherwise it will hold the EOS messages until all sinks post their EOS (which isn't happening in your case because of the videosink).
回答2:
I've got filesink with encoders in a separate bin with overridden handle_message (How to override handle_message in GstBin?) to ignore EOS and transform it into a custom message.
The initial EOS is sent to the ghostpad of that bin. That pad is connected to the encoder (that bin contains encoder, muxer and filesink).
来源:https://stackoverflow.com/questions/42908694/sending-eos-to-filesink-while-removing-branch-from-tee