Other Parts Discussed in Thread: AM5728
hello:
MP4 audio and video files for use with solution gstreamer on AM5728 board.
pipeline is as follows:
gst-launch-1.0 -v filesrc location=test.mp4 ! qtdemux name=demux demux.audio_0 ! queue ! aacparse ! faad ! alsasink device=hw:0,0 demux.video_0 ! queue ! mpeg4videoparse ! ducatimpeg4dec ! waylandsink
I would like to convert these commands into source code and find that you have no way to play audio and video. Here is my source part.
static void on_pad_added (GstElement *element, GstPad *pad, gpointer data)
{
GstPad *sinkpad;
gchar *name;
GstElement *vqueue = (GstElement *) data;
GstElement *aqueue = (GstElement *) data;
/* Link this pad with the queue sink pad */
name = gst_pad_get_name( pad );
if (strncmp(name, "video_0", NULL)==0){
g_print ("Dynamic pad created, linking demuxer/vsink\n");
sinkpad = gst_element_get_static_pad (vqueue, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
else if(strncmp(name, "audio_0", NULL)==0){
g_print ("Dynamic pad created, linking demuxer/asink\n");
sinkpad = gst_element_get_static_pad (aqueue, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
printf( "%s - New pad '%s'\n", __FUNCTION__, name);
}
int main (int argc, char *argv[])
{
GMainLoop *loop;
GstBus *bus;
GstMessage *msg;
GstCaps *caps;
GstElement *pipeline;
GstElement *filesrc, *qtdemux, *queue0, *aacparse, *faad, *alsasink;
GstElement *queue1, *mpeg4videoparse, *ducatimpeg4dec, *waylandsink;
/* Initialisation */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* Create gstreamer elements*/
pipeline = gst_pipeline_new("pipeline");
filesrc = gst_element_factory_make("filesrc", "filesource");
g_return_val_if_fail (filesrc, -1);
g_object_set (G_OBJECT (filesrc), "location", "test.mp4" , NULL);
qtdemux = gst_element_factory_make("qtdemux", "qtdemux0");
g_return_val_if_fail (qtdemux, -1);
queue0 = gst_element_factory_make("queue","queue0");
g_return_val_if_fail (queue0, -1);
aacparse = gst_element_factory_make("aacparse","aacparser0");
g_return_val_if_fail (aacparse, -1);
faad = gst_element_factory_make("faad","faad0");
g_return_val_if_fail (faad, -1);
alsasink = gst_element_factory_make("alsasink","alsasink0");
g_return_val_if_fail (alsasink, -1);
queue1 = gst_element_factory_make("queue","queue1");
g_return_val_if_fail (queue1, -1);
mpeg4videoparse = gst_element_factory_make("mpeg4videoparse", "mpeg4videoparse0");
g_return_val_if_fail (mpeg4videoparse, -1);
ducatimpeg4dec = gst_element_factory_make("ducatimpeg4dec","ducatimpeg4dec0");
g_return_val_if_fail (ducatimpeg4dec, -1);
waylandsink = gst_element_factory_make("waylandsink", "waylandsink0");
g_return_val_if_fail (waylandsink, -1);
/* Add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/* Add elements to pipeline before linking them.*/
gst_bin_add (GST_BIN (pipeline), filesrc);
gst_bin_add (GST_BIN (pipeline), qtdemux);
gst_bin_add (GST_BIN (pipeline), queue0);
gst_bin_add (GST_BIN (pipeline), aacparse);
gst_bin_add (GST_BIN (pipeline), faad);
gst_bin_add (GST_BIN (pipeline), alsasink);
gst_bin_add (GST_BIN (pipeline), queue1);
gst_bin_add (GST_BIN (pipeline), mpeg4videoparse);
gst_bin_add (GST_BIN (pipeline), ducatimpeg4dec);
gst_bin_add (GST_BIN (pipeline), waylandsink);
/* Link the pipeline */
gst_element_link_pads (filesrc, "src", qtdemux, "sink");
gst_element_link_pads (qtdemux, "src", queue0, "sink");
/* listen for newly created pads */
g_signal_connect (G_OBJECT (qtdemux), "pad-added", G_CALLBACK (on_pad_added), queue0);
gst_element_link_pads (queue0, "src", aacparse, "sink");
gst_element_link_pads (aacparse, "src", faad, "sink");
gst_element_link_pads (faad, "src", alsasink, "sink");
gst_element_link_pads (alsasink, "src", queue1, "sink");
g_signal_connect (G_OBJECT (alsasink), "pad-added", G_CALLBACK (on_pad_added), queue1);
gst_element_link_pads (queue1, "src", mpeg4videoparse, "sink");
gst_element_link_pads (mpeg4videoparse, "src", ducatimpeg4dec, "sink");
gst_element_link_pads (ducatimpeg4dec, "src", waylandsink, "sink");
/* Set the pipeline to "playing" state */
g_print ("Playing: %s\n", "test.mp4");
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
How to set up audio and video synchronization play? Where did I go wrong?