This thread has been locked.

If you have a related question, please click the "Ask a related question" button in the top right corner. The newly created question will be automatically linked to this question.

AM5728: MP4 AV solution

Other Parts Discussed in Thread: AM5728

hello:

MP4 audio and video files for use with solution gstreamer on AM5728 board.

pipeline is as follows:

gst-launch-1.0 -v filesrc location=test.mp4 ! qtdemux name=demux demux.audio_0 ! queue ! aacparse ! faad ! alsasink device=hw:0,0 demux.video_0 ! queue ! mpeg4videoparse ! ducatimpeg4dec ! waylandsink

I would like to convert these commands into source code and find that you have no way to play audio and video. Here is my source part.

static void on_pad_added (GstElement *element, GstPad *pad, gpointer data)
{
GstPad *sinkpad;
gchar *name;

GstElement *vqueue = (GstElement *) data;
GstElement *aqueue = (GstElement *) data;
/* Link this pad with the queue sink pad */
name = gst_pad_get_name( pad );
if (strncmp(name, "video_0", NULL)==0){
g_print ("Dynamic pad created, linking demuxer/vsink\n");
sinkpad = gst_element_get_static_pad (vqueue, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
else if(strncmp(name, "audio_0", NULL)==0){
g_print ("Dynamic pad created, linking demuxer/asink\n");
sinkpad = gst_element_get_static_pad (aqueue, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);

}

printf( "%s - New pad '%s'\n", __FUNCTION__, name);

}


int main (int argc, char *argv[])
{
GMainLoop *loop;
GstBus *bus;
GstMessage *msg;
GstCaps *caps;

GstElement *pipeline;
GstElement *filesrc, *qtdemux, *queue0, *aacparse, *faad, *alsasink;
GstElement *queue1, *mpeg4videoparse, *ducatimpeg4dec, *waylandsink;


/* Initialisation */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);

/* Create gstreamer elements*/

pipeline = gst_pipeline_new("pipeline");

filesrc = gst_element_factory_make("filesrc", "filesource");
g_return_val_if_fail (filesrc, -1);
g_object_set (G_OBJECT (filesrc), "location", "test.mp4" , NULL);

qtdemux = gst_element_factory_make("qtdemux", "qtdemux0");
g_return_val_if_fail (qtdemux, -1);

queue0 = gst_element_factory_make("queue","queue0");
g_return_val_if_fail (queue0, -1);

aacparse = gst_element_factory_make("aacparse","aacparser0");
g_return_val_if_fail (aacparse, -1);

faad = gst_element_factory_make("faad","faad0");
g_return_val_if_fail (faad, -1);

alsasink = gst_element_factory_make("alsasink","alsasink0");
g_return_val_if_fail (alsasink, -1);

queue1 = gst_element_factory_make("queue","queue1");
g_return_val_if_fail (queue1, -1);

mpeg4videoparse = gst_element_factory_make("mpeg4videoparse", "mpeg4videoparse0");
g_return_val_if_fail (mpeg4videoparse, -1);

ducatimpeg4dec = gst_element_factory_make("ducatimpeg4dec","ducatimpeg4dec0");
g_return_val_if_fail (ducatimpeg4dec, -1);

waylandsink = gst_element_factory_make("waylandsink", "waylandsink0");
g_return_val_if_fail (waylandsink, -1);

/* Add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);

/* Add elements to pipeline before linking them.*/
gst_bin_add (GST_BIN (pipeline), filesrc);
gst_bin_add (GST_BIN (pipeline), qtdemux);
gst_bin_add (GST_BIN (pipeline), queue0);
gst_bin_add (GST_BIN (pipeline), aacparse);
gst_bin_add (GST_BIN (pipeline), faad);
gst_bin_add (GST_BIN (pipeline), alsasink);
gst_bin_add (GST_BIN (pipeline), queue1);
gst_bin_add (GST_BIN (pipeline), mpeg4videoparse);
gst_bin_add (GST_BIN (pipeline), ducatimpeg4dec);
gst_bin_add (GST_BIN (pipeline), waylandsink);

/* Link the pipeline */
gst_element_link_pads (filesrc, "src", qtdemux, "sink");
gst_element_link_pads (qtdemux, "src", queue0, "sink");
/* listen for newly created pads */
g_signal_connect (G_OBJECT (qtdemux), "pad-added", G_CALLBACK (on_pad_added), queue0);
gst_element_link_pads (queue0, "src", aacparse, "sink");
gst_element_link_pads (aacparse, "src", faad, "sink");
gst_element_link_pads (faad, "src", alsasink, "sink");
gst_element_link_pads (alsasink, "src", queue1, "sink");
g_signal_connect (G_OBJECT (alsasink), "pad-added", G_CALLBACK (on_pad_added), queue1);
gst_element_link_pads (queue1, "src", mpeg4videoparse, "sink");
gst_element_link_pads (mpeg4videoparse, "src", ducatimpeg4dec, "sink");
gst_element_link_pads (ducatimpeg4dec, "src", waylandsink, "sink");

/* Set the pipeline to "playing" state */
g_print ("Playing: %s\n", "test.mp4");
gst_element_set_state (pipeline, GST_STATE_PLAYING);

/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);

/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);

g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));

return 0;
}

How to set up audio and video synchronization play? Where did I go wrong?

  • Hello,

    I will check the code.
    What is the error that you are observing when you play your application?
    Please add debug log of execution.

    BR
    Margarita
  • Hello,

    I do not see in you code device=hw:0,0 property to be set with g_object_set function.


    BR
    Margarita
  • hello:
    I added "device" attribute in my code. Still no video and audio
    g_object_set(G_OBJECT(alsasink), "device", "hw:0,0", NULL);
  • hello:
    I try to add the "--gst-debug = 2" parameter when executing, and the red section has an error message
    root@am57xx-evm:~# ./mp4_to_display --gst-debug=2
    Playing: test.mp4
    [ 1137.918150] omap-iommu 55082000.mmu: 55082000.mmu: version 2.1
    0:00:00.162300991 1148 0x17b460 WARN basesrc gstbasesrc.c:3489:g
    st_base_src_start_complete:<filesource> pad not activated yet
    0:00:00.163063410 1148 0x176a30 WARN qtdemux qtdemux.c:2651:qtde
    mux_parse_trex:<demux> failed to find fragment defaults for stream 1
    0:00:00.163411029 1148 0x176a30 WARN qtdemux qtdemux.c:2651:qtde
    mux_parse_trex:<demux> failed to find fragment defaults for stream 2
    0:00:00.163672597 1148 0x176a30 WARN basesrc gstbasesrc.c:2396:g
    st_base_src_update_length:<filesource> processing at or past EOS
    Dynamic pad created, linking demuxer/vsink
    0:00:00.164458114 1148 0x176a30 WARN alsa conf.c:4729:parse_a
    rgs: alsalib error: Parameter DEV must be an integer
    0:00:00.164523506 1148 0x176a30 WARN alsa conf.c:4834:snd_con
    fig_expand: alsalib error: Parse arguments error: Invalid argument
    0:00:00.164565637 1148 0x176a30 WARN alsa pcm.c:2450:snd_pcm_
    open_noupdate: alsalib error: Unknown PCM hw:0,0:{AES0 0x02 AES1 0x82 AES2 0x00 AES
    3 0x02}
    on_pad_added - New pad 'video_0'
    Dynamic pad created, linking demuxer/vsink
    on_pad_added - New pad 'audio_0'
    0:00:00.172315239 1148 0x1769b0 WARN alsa pcm_hw.c:1250:snd_p
    cm_hw_get_chmap: alsalib error: Cannot read Channel Map ctl
    : No such file or directory


  • Hello,

    Please try this code:

    #include <gst/gst.h>

    #include <gst/audio/audio.h>

    #include <string.h>

    #include <features.h>

    #include <stdio.h>

    #include <stdlib.h>

    #include <errno.h>

    #include <sys/types.h>

    #include <sys/time.h>

    #include <unistd.h>

    #include <string.h>

    #include <ctype.h>

    static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data)

    {

     GMainLoop *loop = (GMainLoop *) data;

     switch (GST_MESSAGE_TYPE (msg)) {

       case GST_MESSAGE_EOS:

         g_print ("End of stream\n");

         g_main_loop_quit (loop);

         break;

       case GST_MESSAGE_ERROR: {

         gchar *debug;

         GError *error;

         gst_message_parse_error (msg, &error, &debug);

         g_free (debug);

         g_printerr ("Error: %s\n", error->message);

         g_error_free (error);

         g_main_loop_quit (loop);

         break;

       }

       default:

       break;

     }

    return TRUE;

    }

    static void on_pad_added (GstElement *element, GstPad *pad, gpointer data)

    {

    GstPad *sinkpad;

    gchar *name;

    GstElement *vqueue = (GstElement *) data;

    GstElement *aqueue = (GstElement *) data;

    /* Link this pad with the queue sink pad */

    name = gst_pad_get_name( pad );

    if (strncmp(name, "video_%u", NULL)==0){

    g_print ("Dynamic pad created, linking demuxer/vsink\n");

    sinkpad = gst_element_get_static_pad (vqueue, "sink");

    gst_pad_link (pad, sinkpad);

    gst_object_unref (sinkpad);

    }

    else if(strncmp(name, "audio_%u", NULL)==0){

    g_print ("Dynamic pad created, linking demuxer/asink\n");

    sinkpad = gst_element_get_static_pad (aqueue, "sink");

    gst_pad_link (pad, sinkpad);

    gst_object_unref (sinkpad);

    }

    printf( "%s - New pad '%s'\n", __FUNCTION__, name);

    }

    int main (int argc, char *argv[])

    {

    GMainLoop *loop;

    GstBus *bus;

    GstMessage *msg;

    GstCaps *caps;

    GstElement *pipeline;

    GstElement *filesrc, *qtdemux, *aqueue, *aacparse, *faad, *alsasink;

    GstElement *vqueue, *mpeg4videoparse, *ducatimpeg4dec, *waylandsink;

    /* Initialisation */

    gst_init (&argc, &argv);

    loop = g_main_loop_new (NULL, FALSE);

    /* Create gstreamer elements*/

    pipeline = gst_pipeline_new("pipeline");

    filesrc = gst_element_factory_make("filesrc", "filesource");

    g_return_val_if_fail (filesrc, -1);

    g_object_set (G_OBJECT (filesrc), "location", "test.mp4" , NULL);

    qtdemux = gst_element_factory_make("qtdemux", "qtdemux0");

    g_return_val_if_fail (qtdemux, -1);

    aqueue = gst_element_factory_make("queue","queue0");  

    g_return_val_if_fail (aqueue, -1);

    aacparse = gst_element_factory_make("aacparse","aacparser0");

    g_return_val_if_fail (aacparse, -1);

    faad = gst_element_factory_make("avdec_aac","aac");

    g_return_val_if_fail (faad, -1);

    alsasink = gst_element_factory_make("alsasink","alsasink0");

    //g_object_set(G_OBJECT(alsasink), "device", "hw:0,0", NULL);

    //alsasink = gst_element_factory_make("fakesink","alsasink0");

    g_return_val_if_fail (alsasink, -1);

    vqueue = gst_element_factory_make("queue","queue1");

    g_return_val_if_fail (vqueue, -1);

    mpeg4videoparse = gst_element_factory_make("mpeg4videoparse", "mpeg4videoparse0");

    g_return_val_if_fail (mpeg4videoparse, -1);

    ducatimpeg4dec = gst_element_factory_make("ducatimpeg4dec","ducatimpeg4dec0");

    g_return_val_if_fail (ducatimpeg4dec, -1);

    waylandsink = gst_element_factory_make("waylandsink", "waylandsink0");

    g_return_val_if_fail (waylandsink, -1);

    /* Add a message handler */

    bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));

    gst_bus_add_watch (bus, bus_call, loop);

    gst_object_unref (bus);

    /* Add elements to pipeline before linking them.*/

    gst_bin_add (GST_BIN (pipeline), filesrc);

    gst_bin_add (GST_BIN (pipeline), qtdemux);

    gst_bin_add (GST_BIN (pipeline), aqueue);

    gst_bin_add (GST_BIN (pipeline), aacparse);

    gst_bin_add (GST_BIN (pipeline), faad);

    gst_bin_add (GST_BIN (pipeline), alsasink);

    gst_bin_add (GST_BIN (pipeline), vqueue);

    gst_bin_add (GST_BIN (pipeline), mpeg4videoparse);

    gst_bin_add (GST_BIN (pipeline), ducatimpeg4dec);

    gst_bin_add (GST_BIN (pipeline), waylandsink);

    /* Link the pipeline */

    gst_element_link_pads (filesrc, "src", qtdemux, "sink");

    gst_element_link_pads (qtdemux, "src", aqueue, "sink");

    /* listen for newly created pads */

    g_signal_connect (G_OBJECT (qtdemux), "pad-added", G_CALLBACK (on_pad_added), aqueue);

    gst_element_link_pads (aqueue, "src", aacparse, "sink");

    gst_element_link_pads (aacparse, "src", faad, "sink");

    gst_element_link_pads (faad, "src", alsasink, "sink");

    gst_element_link_pads (alsasink, "src", vqueue, "sink");

    g_signal_connect (G_OBJECT (qtdemux), "pad-added", G_CALLBACK (on_pad_added), vqueue);

    gst_element_link_pads (vqueue, "src", mpeg4videoparse, "sink");

    gst_element_link_pads (mpeg4videoparse, "src", ducatimpeg4dec, "sink");

    gst_element_link_pads (ducatimpeg4dec, "src", waylandsink, "sink");

    /* Set the pipeline to "playing" state */

    g_print ("Playing: %s\n", "test.mp4");

    gst_element_set_state (pipeline, GST_STATE_PLAYING);

    /* Iterate */

    g_print ("Running...\n");

    g_main_loop_run (loop);

    /* Out of the main loop, clean up nicely */

    g_print ("Returned, stopping playback\n");

    gst_element_set_state (pipeline, GST_STATE_NULL);

    g_print ("Deleting pipeline\n");

    gst_object_unref (GST_OBJECT (pipeline));

    return 0;

    }

    //gcc demux_av.c -o demux_av `pkg-config --cflags --libs gstreamer-1.0`

    The mistake was in this line g_signal_connect (G_OBJECT (alsasink), "pad-added", G_CALLBACK (on_pad_added), vqueue);

    It should be g_signal_connect (G_OBJECT (qtdemux), "pad-added", G_CALLBACK (on_pad_added), vqueue);

    BR
    Margarita

  • hello;

    thank you very much,use filesrc can play mp4 file

    Now, I will replace filesrc with appsrc, the code is as follows

    static void cb_need_data (GstElement *appsrc,
    guint unused_size,
    gpointer user_data)
    {
    static GstClockTime timestamp = 0;
    GstBuffer *buffer;
    guint size;
    GstFlowReturn ret;

    size = 1024*1024;

    buffer = gst_buffer_new_allocate (NULL, size, NULL);

    fp = fopen("./test.mp4", "rb");

    if(fp == 0)
    {
    printf("openfile erro\n");
    g_main_loop_quit (loop);
    }

    GstMapInfo map;
    if (gst_buffer_map(buffer, &map, GST_MAP_WRITE))
    {
    static int totsize = 0;
    int ret = fread(map.data,1,map.size,fp);

    if (ret <= 0)
    {
    printf("READ ERRO,ret:%d ,totsize:%d byte\n",ret,totsize);
    fclose(fp);
    fp = 0;
    totsize = 0;
    //goto loop_playback; //loop play
    g_main_loop_quit (loop);
    }
    totsize+=ret;
    gst_buffer_unmap(buffer, &map);
    }

    GST_BUFFER_PTS (buffer) = timestamp;
    GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 2);
    timestamp += GST_BUFFER_DURATION (buffer);
    g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);
    gst_buffer_unref (buffer);

    if (ret != GST_FLOW_OK) {
    printf("#exit ret:%d\n", ret);
    /* something wrong, stop pushing */
    g_main_loop_quit (loop);
    }
    }

    Change g_object_set (G_OBJECT (filesrc), "location", "test.mp4", NULL) to g_signal_connect (filesrc, "need-data", G_CALLBACK (cb_need_data), NULL)

    Compile the implementation of the following error.

    root@am57xx-evm:~# ./mp4
    Playing: test.mp4
    [ 5750.011927] omap-iommu 55082000.mmu: 55082000.mmu: version 2.1
    Running...
    Error: Internal data flow error.
    Returned, stopping playback

    (mp4_to_display:1299): GStreamer-CRITICAL **: gst_mini_object_unref: assertion 'min
    i_object->refcount > 0' failed
    Deleting pipeline

    I added queue before qtdemux. but still the same result

    appsrc--> queue --> qtdemux-->..........

  • Hello,

    I will check the source code and get back to you when I have something.
    Meanwhile I would recommend you to check this tutorial:
    gstreamer.freedesktop.org/.../short-cutting-the-pipeline.html
    Keep in mind that there is a mistake this line gst_buffer_unref (sample); must be gst_sample_unref (sample);

    BR
    Margarita
  • Hello,

    In your code I do not see :
    g_signal_connect (data.app_source, "enough-data", G_CALLBACK (stop_feed), &data);
    after :
    g_signal_connect (data.app_source, "need-data", G_CALLBACK (start_feed), &data);

    BR
    Margarita
  • hello:
    I wrote a pipline to play the local file test.h264 as follows:
    appsrc -> queue -> h264parse -> ducatih264dec -> waylandsink
    Compile and execute, can play correctly

    #include <stdio.h>
    #include <stdlib.h>
    #include <gst/gst.h>

    static GMainLoop *loop;
    static FILE *fp = 0;

    static gboolean bus_callback(GstBus *bus, GstMessage *message, gpointer *ptr)
    {
    switch (GST_MESSAGE_TYPE(message)) {

    case GST_MESSAGE_ERROR: {
    gchar *debug;
    GError *err;
    gst_message_parse_error(message, &err, &debug);
    g_print("Error %s\n", err->message);
    g_error_free(err);
    g_free(debug);
    }
    break;

    case GST_MESSAGE_WARNING: {
    gchar *debug;
    GError *err;
    const gchar *name;
    gst_message_parse_warning(message, &err, &debug);
    g_print("Warning %s\nDebug %s\n", err->message, debug);

    name = GST_MESSAGE_SRC_NAME(message);

    g_print("Name of src %s\n", name ? name : "nil");
    g_error_free(err);
    g_free(debug);
    }
    break;

    case GST_MESSAGE_QOS:
    g_main_loop_quit(loop);
    break;

    case GST_MESSAGE_STATE_CHANGED:
    break;

    default:
    g_print("got message %s\n", \
    gst_message_type_get_name(GST_MESSAGE_TYPE(message)));
    break;
    }

    return TRUE;
    }

    static void cb_need_data (GstElement *appsrc,
    guint unused_size,
    gpointer user_data)
    {
    static GstClockTime timestamp = 0;
    GstBuffer *buffer;
    guint size;
    GstFlowReturn ret;

    size = 1024*1024;

    buffer = gst_buffer_new_allocate (NULL, size, NULL);

    fp = fopen("./test.h264", "rb");

    if(fp == 0)
    {
    printf("openfile erro\n");
    g_main_loop_quit (loop);
    }

    GstMapInfo map;
    if (gst_buffer_map(buffer, &map, GST_MAP_WRITE))
    {
    static int totsize = 0;
    int ret = fread(map.data,1,map.size,fp);

    if (ret <= 0)
    {
    printf("READ ERRO,ret:%d ,totsize:%d byte\n",ret,totsize);
    fclose(fp);
    fp = 0;
    totsize = 0;
    //goto loop_playback; //loop play
    g_main_loop_quit (loop);
    }
    totsize+=ret;
    gst_buffer_unmap(buffer, &map);
    }

    GST_BUFFER_PTS (buffer) = timestamp;
    GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 2);
    timestamp += GST_BUFFER_DURATION (buffer);
    g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);
    gst_buffer_unref (buffer);

    if (ret != GST_FLOW_OK) {
    printf("#exit ret:%d\n", ret);
    /* something wrong, stop pushing */
    g_main_loop_quit (loop);
    }
    }

    gint main (gint argc, gchar *argv[])
    {
    GstElement *pipeline, *appsrc, *queue, *h264parse, *ducatih264dec, *waylandsink;
    //

    /* init GStreamer */
    gst_init (&argc, &argv);

    loop = g_main_loop_new (NULL, FALSE);

    /* setup pipeline */
    pipeline = gst_pipeline_new ("pipeline");

    appsrc = gst_element_factory_make ("appsrc", "source-file");

    queue = gst_element_factory_make ("queue", "queue0");

    h264parse = gst_element_factory_make ("h264parse", "h264parse0");

    ducatih264dec = gst_element_factory_make ("ducatih264dec", "ducatih264dec0");

    waylandsink = gst_element_factory_make ("waylandsink", "waylandsink0");

    GstBus *bus;
    bus = gst_pipeline_get_bus((GstPipeline*)pipeline);
    gst_bus_add_watch(bus, (GstBusFunc)bus_callback, 0);

    /* setup */
    g_signal_connect (appsrc, "need-data", G_CALLBACK (cb_need_data), NULL);
    gst_bin_add_many(GST_BIN(pipeline), appsrc, queue, h264parse, ducatih264dec, waylandsink, NULL);
    gst_element_link_many (appsrc, queue, h264parse, ducatih264dec, waylandsink, NULL);


    /* play */
    gst_element_set_state (pipeline, GST_STATE_PLAYING);
    g_main_loop_run (loop);

    /* clean up */
    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (GST_OBJECT (pipeline));
    g_main_loop_unref (loop);

    return 0;
    }


    I do not understand why there is no way to play mp4 files?
  • Hello,


    Please check this links for the difference between elementary packet and container package:
    Containers:
    en.wikipedia.org/.../Container_format_(digital)
    en.wikipedia.org/.../QuickTime_File_Format

    Elementary Packet:
    en.wikipedia.org/.../Video_compression_format
    en.wikipedia.org/.../H264

    I would recommend you to check the tutorial link for caps and how it is set.

    BR
    Margarita
  • Hello:

    Have you verified this problem?

    I read the link you gave, but I still do not know how to solve the problem I encountered

    I suspect that appsrc is incompatible with qtdemux

  • Hello,

    The both elements appsrc and qtdemux are not  TI elements.
    Keep in mind test.mp4 the video is in container. So if you try pushing elementary stream into qtdemux will not work.As I said try to set caps of your appsrc element.
    The use case which is working the video is elementary stream.

    Please check this link:
    gstreamer-devel.narkive.com/.../problem-with-appsink-buffer-in-gstreamer-1-0


    BR
    Margarita