Here’s my code to display or record two video in one scene. (must be upgrade with glmosaic, by now, you can only use 1 video display on the two screen)
Here’s the return value of my command:GST_DEBUG=GST_BUS:5 ./Gstreamer-3DRendering ‘/home/pierre/Dropbox/demo 05_05_10/VideoMontage/big_buck_bunny.ogg’ FALSE ‘test.ogg’ TRUE TRUE
it gives:
0:00:00.420938212 14582 0x9c1f008 DEBUG GST_BUS gstbus.c:307:gst_bus_post:<bus1> [msg 0x9e41590] posting on bus, type state-changed, GstMessageState, old-state=(GstState)GST_STATE_READY, new-state=(GstState)GST_STATE_PAUSED, pending-state=(GstState)GST_STATE_VOID_PENDING; from source <gldownload>
0:00:00.420960142 14582 0x9c1f008 DEBUG GST_BUS gstbus.c:337:gst_bus_post:<bus1> [msg 0x9e41590] pushing on async queue
0:00:00.420972294 14582 0x9c1f008 DEBUG GST_BUS gstbus.c:342:gst_bus_post:<bus1> [msg 0x9e41590] pushed on async queue
0:00:00.420984307 14582 0x9c1f008 DEBUG GST_BUS gstbus.c:333:gst_bus_post:<bus0> [msg 0x9e41590] dropped
0:00:00.420995621 14582 0x9c1f008 DEBUG GST_BUS gstbus.c:333:gst_bus_post:<bus4> [msg 0x9e41590] dropped
Failed to start up pipeline!
0:00:00.421086136 14582 0x9c1f008 ERROR GST_BUS gstbus.c:838:gst_bus_add_watch_full_unlocked:<bus1> Tried to add new watch while one was already there
(Gstreamer-3DRendering:14582): GStreamer-CRITICAL **: Could not add signal watch to bus bus1
0:00:00.421118123 14582 0x9c1f008 DEBUG GST_BUS gstbus.c:1056:gst_bus_poll: running mainloop 0xa19d898
0:00:00.421134745 14582 0x9c1f008 DEBUG GST_BUS gstbus.c:258:gst_bus_set_main_context:<bus1> setting main context to 0x9de7280, GLib default context: 0x9de7280
0:00:00.421154231 14582 0x9c1f008 LOG GST_BUS gstbus.c:494:gst_bus_timed_pop_filtered:<bus1> have 34 messages
0:00:00.421248028 14582 0x9c1f008 DEBUG GST_BUS gstbus.c:498:gst_bus_timed_pop_filtered:<bus1> got message 0x9d50320, state-changed, type mask is 4294967295
gint main (gint argc, gchar *argv[])
{
GstStateChangeReturn ret;
GstElement *videosrc, *decodebin, *ffvideoscale, *glupload, *glfilterreflectedscreen, *glfilterapp, *gldownload, *ffvideoscale2, *theoraenc, *queue, *queue2, *aconverter, *asink, *vorbisenc, *oggmux, *filesink, *glimagesink;
GMainLoop *loop;
GstBus *bus;
// gstreamer initialization
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
// pipeline’s creation
pipeline = gst_pipeline_new (« pipeline »);
//listen the pipeline bus to get error messages
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
//free the allocated memory for this listening when it isn’t used anymore
gst_object_unref (bus);
//elements creation
//configure the videosource
videosrc = gst_element_factory_make (« filesrc », « source »);
//configure a default source if the user does not specified its own
//configure the decodebin
decodebin = gst_element_factory_make (« decodebin », « decoder »);
//add pads for the audio and video output
g_signal_connect (decodebin, « new-decoded-pad », G_CALLBACK (cb_newpad_video), NULL);
g_signal_connect (decodebin, « new-decoded-pad », G_CALLBACK (cb_newpad_audio), NULL);
//make the rgb mode conversion necessary (it’s compulsory for the glimagesink use)
ffvideoscale = gst_element_factory_make(« ffvideoscale », « videoconverter »);
ffvideoscale2 = gst_element_factory_make(« ffvideoscale », « videoconverter1 »);
queue = gst_element_factory_make(« queue », « queue »);
queue2 = gst_element_factory_make(« queue », « queue1 »);
GstCaps *incaps = gst_caps_new_simple (« video/x-raw-rgb »,
« width », G_TYPE_INT, 800,
« height », G_TYPE_INT, 600,
NULL) ;
GstCaps *outcaps = gst_caps_new_simple (« video/x-raw-rgb »,
NULL) ;
//configure the glimagesink
glupload = gst_element_factory_make (« glupload », « glupload »);
gldownload = gst_element_factory_make (« gldownload », « gldownload »);
glfilterapp = gst_element_factory_make (« glfilterapp », « glfilterapp »);
//specifying which function will be used as reshape and draw functions
g_object_set(G_OBJECT(glfilterapp), « client-reshape-callback », reshapeCallback, NULL);
g_object_set(G_OBJECT(glfilterapp), « client-draw-callback », drawCallback_glfilterapp, NULL);
glfilterreflectedscreen = gst_element_factory_make (« glfilterreflectedscreen », « glfilterreflectedscreen »);
//specifying the options
g_object_set(G_OBJECT(glfilterreflectedscreen), « fovy », 70.0, NULL);
aconverter = gst_element_factory_make (« audioconvert », « aconv »);
asink = gst_element_factory_make (« autoaudiosink », « sink »);
theoraenc = gst_element_factory_make(« theoraenc », « theoraenc »);
g_object_set(G_OBJECT(theoraenc), « sharpness », 2, NULL);
g_object_set(G_OBJECT(theoraenc), « quality », 63, NULL);
g_object_set(G_OBJECT(theoraenc), « keyframe-auto », FALSE, NULL);
vorbisenc = gst_element_factory_make(« vorbisenc », « vorbisenc »);
oggmux = gst_element_factory_make(« oggmux », « oggmux »);
filesink = gst_element_factory_make(« filesink », « filesink »);
glimagesink = gst_element_factory_make(« glimagesink », « glimagesink »);
//test if all object have been created
if (!videosrc || !decodebin || !ffvideoscale || !glupload || !glfilterapp || !glfilterreflectedscreen || !gldownload || !ffvideoscale2 || !theoraenc || !queue || !queue2 || ! aconverter || !glimagesink || !asink || !vorbisenc || !oggmux || !filesink)
{
//if only one failed, the pipeline could’nt work. Error message display
g_print (« one element could not be found \n »);
return -1;
}
//configure the software’s options: the arguments must be:
//1 –> first video source
//2 –> second video source (inactive)
//3 –> output file
//4 –> use display mode
//5 –> use graphics mode
//6 –> use splitted screen
string arg2 = string(argv[2]);
string arg3 = string(argv[3]);
string arg4 = string(argv[4]);
string arg5 = string(argv[5]);
g_object_set(G_OBJECT(videosrc), « location », argv[1], NULL);
if (arg2 == « TRUE »)
display = true;
else if (arg2== « FALSE »)
display = false;
if (!display)
{
g_object_set(G_OBJECT(filesink), « location », argv[3], NULL);
}
else
{
if (arg4 == « FALSE »)
g_object_set(G_OBJECT(glfilterreflectedscreen), « active_graphic_mode », FALSE, NULL);
else if (arg4 == « TRUE »)
g_object_set(G_OBJECT(glfilterreflectedscreen), « active_graphic_mode », TRUE, NULL);
if (arg5 == « FALSE »)
{
g_object_set(G_OBJECT(glfilterreflectedscreen), « separated_screen », FALSE, NULL);
g_object_set(G_OBJECT(glfilterreflectedscreen), « show_floor », TRUE, NULL);
}
if (arg5 == « TRUE »)
{
g_object_set(G_OBJECT(glfilterreflectedscreen), « show_floor », FALSE, NULL);
g_object_set(G_OBJECT(glfilterreflectedscreen), « separated_screen », TRUE, NULL);
}
}
if (display)
{
//adding first element to the main pipeline
gst_bin_add_many (GST_BIN (pipeline), videosrc, decodebin, NULL);
//linking them
gst_element_link (videosrc, decodebin);
//creating video output (with a second bin)
video = gst_bin_new (« videobin »);
//make the association between this pad and the needed format
videopad = gst_element_get_static_pad (ffvideoscale, « sink »);
//ad element to the video bin
gst_bin_add_many (GST_BIN (video), ffvideoscale, glupload, glfilterreflectedscreen, glimagesink, NULL);
//add video bin to main pipeline
gst_bin_add (GST_BIN (pipeline), video);
//verifying that the entry has been converted to rgb format
gboolean link = gst_element_link_filtered (ffvideoscale, glupload, incaps);
//free the memory allocated to the caps
gst_caps_unref(incaps);
if (!link)
{
g_print(« Failed to caps \n »);
return -1;
}
gst_element_link(glupload, glfilterreflectedscreen);
gst_element_link(glfilterreflectedscreen, glimagesink);
gst_element_add_pad (video, gst_ghost_pad_new (« sink », videopad));
gst_object_unref (videopad);
//same action for the audio output (must be tested if there’s no sound)
audio = gst_bin_new (« audiobin »);
aconverter = gst_element_factory_make (« audioconvert », « aconv »);
audiopad = gst_element_get_static_pad (aconverter, « sink »);
asink = gst_element_factory_make (« autoaudiosink », « sink »);
gst_bin_add_many (GST_BIN (audio), aconverter, asink, NULL);
gst_element_link (aconverter, asink);
gst_element_add_pad (audio, gst_ghost_pad_new (« sink », audiopad));
gst_object_unref (audiopad);
gst_bin_add (GST_BIN (pipeline), audio);
}
else
{
//adding first element to the main pipeline
gst_bin_add_many (GST_BIN (pipeline), videosrc, decodebin, oggmux, filesink, NULL);
//linking them
gst_element_link (videosrc, decodebin);
//creating video output (with a second bin)
video = gst_bin_new (« videobin »);
//make the association between this pad and the needed format
videopad = gst_element_get_static_pad (ffvideoscale, « sink »);
//ad element to the video bin
gst_bin_add_many (GST_BIN (video), ffvideoscale, glupload, glfilterapp, gldownload, ffvideoscale2, theoraenc, queue, NULL);
//verifying that the entry has been converted to rgb format
gboolean link = gst_element_link_filtered (ffvideoscale, glupload, incaps);
//free the memory allocated to the caps
gst_caps_unref(incaps);
if (!link)
{
g_print(« Failed to caps \n »);
return -1;
}
gst_element_link(glupload, glfilterapp);
gst_element_link(glfilterapp, gldownload);
link = gst_element_link_filtered(gldownload, ffvideoscale2, outcaps);
if (!link)
{
g_print(« Failed to caps second \n »);
return -1;
}
gst_element_link(ffvideoscale2, theoraenc);
gst_element_link(theoraenc, queue);
gst_element_add_pad (video, gst_ghost_pad_new (« sink », videopad));
gst_object_unref (videopad);
//add video bin to main pipeline
gst_bin_add (GST_BIN (pipeline), video);
//same action for the audio output (must be tested if there’s no sound)
audio = gst_bin_new (« audiobin »);
audiopad = gst_element_get_static_pad (aconverter, « sink »);
gst_bin_add_many (GST_BIN (audio), aconverter, vorbisenc, queue2, NULL);
gst_element_link (aconverter, vorbisenc);
gst_element_link (vorbisenc, queue2);
gst_element_add_pad (audio, gst_ghost_pad_new (« sink », audiopad));
gst_object_unref (audiopad);
gst_bin_add (GST_BIN (pipeline), audio);
gst_element_link(queue, oggmux);
gst_element_link(queue2, oggmux);
gst_element_link (oggmux, filesink);
}
//play the video
//set the pipeline state to PLAYING
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
//display error message if the state cannot be changed
if (ret == GST_STATE_CHANGE_FAILURE)
{
g_print (« Failed to start up pipeline!\n »);
// check if there is an error message with details on the bus
GstMessage* msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
if (msg)
{
GError *err = NULL;
gst_message_parse_error (msg, &err, NULL);
g_print (« ERROR: %s\n », err->message);
g_error_free (err);
gst_message_unref (msg);
}
return -1;
}
// loop for the video playing
g_main_loop_run (loop);
// clean up when the playing has been done
gst_element_set_state (pipeline, GST_STATE_NULL);
//free all allocated memory
gst_object_unref (pipeline);
return 0;
}