hello,everyone:
I am use the am5728 evm ,I want to use the IVAHD encode, I can use it with one camera but I do not know how to use it with two camera ,anyone can help me ?
I use the many time with it , help me. thanks
This thread has been locked.
If you have a related question, please click the "Ask a related question" button in the top right corner. The newly created question will be automatically linked to this question.
hello,everyone:
I am use the am5728 evm ,I want to use the IVAHD encode, I can use it with one camera but I do not know how to use it with two camera ,anyone can help me ?
I use the many time with it , help me. thanks
AM5728 EVM board has only one camera interface. How are you going to feed second camera input? How are you trying to create the use case - through gstreamer pipeline or working at V4L2 and DCE interface layer?
thanks, I am use the gstreamer way to push the NV12 data, Now I use two thread to creat two pipeline, but only one working. in each thread I read different file . and I write the H264 data to two file. if I only use one thread I have get right data. buf if I use two thread , only one thread can get right data. thanks
/////// this is my buscall.h ////////////
#ifndef BUSCALL_H
#define BUSCALL_H
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data);
gboolean bus_callTwo(GstBus *bus, GstMessage *msg, gpointer data);
unsigned int MyGetTickCount();
#endif // BUSCALL_H
///// this is my buscall.cpp ///////////
#include "buscall.h"
#include <errno.h>
#include <stdio.h>
#include <sys/time.h>
gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg))
{
case GST_MESSAGE_EOS:
fprintf(stderr, "End of stream\n");
g_main_loop_quit(loop);
break;
case GST_MESSAGE_ERROR:
{
gchar *debug;
GError *error;
gst_message_parse_error(msg, &error, &debug);
g_free(debug);
g_printerr("Error: %s\n", error->message);
g_error_free(error);
g_main_loop_quit(loop);
break;
}
default:
break;
}
return TRUE;
}
gboolean bus_callTwo(GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg))
{
case GST_MESSAGE_EOS:
fprintf(stderr, "End of stream\n");
g_main_loop_quit(loop);
break;
case GST_MESSAGE_ERROR:
{
gchar *debug;
GError *error;
gst_message_parse_error(msg, &error, &debug);
g_free(debug);
g_printerr("Error: %s\n", error->message);
g_error_free(error);
g_main_loop_quit(loop);
break;
}
default:
break;
}
return TRUE;
}
//////////////////////// this is my encode.h /////////////////
#ifndef ENCODE_H
#define ENCODE_H
#include <stdio.h>
#include <string.h>
#include <malloc.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
int init_encodeTwo();
int startEncodeTwo(char *nv12buf,int NV12Len,char *encodebuf, int* encodeLen);
void PushBufferEncodeTwo(char *nv12buf,int len);
char* PullH264EncodeTwo(int* outlen);
int init_encode();
int startEncode(char *nv12buf,int NV12Len,char *encodebuf, int* encodeLen);
void PushBufferEncode(char *nv12buf,int len);
char* PullH264Encode(int* outlen);
#endif
///////// this is my encode.cpp //////////////////////
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <sys/time.h>
#include <pthread.h>
#include <unistd.h>
#include "buscall.h"
#if 1
static volatile bool bGstreamerEncodeThreadStarted = false;
static volatile bool bGstreamerEncodeThreadFinished = false;
static GstElement *appsrc;
static GstElement *appsink;
void* GstreamerEncodeThread(void* pThreadParam)
{
GMainLoop *loop;
GstElement *pipeline, *encode;
GstBus *bus;
guint bus_watch_id;
gboolean bLinkOk;
pipeline = gst_pipeline_new("mypipeline");
g_assert(pipeline);
appsrc = gst_element_factory_make("appsrc", "mysource");
g_assert(appsrc);
encode = gst_element_factory_make("ducatih264enc", "myencode");
g_assert(encode);
appsink = gst_element_factory_make("appsink", "mysink");
g_assert(appsink);
if (!pipeline || !appsrc || !encode || !appsink)
{
fprintf(stderr, "Could not gst_element_factory_make, terminating\n");
bGstreamerEncodeThreadStarted = bGstreamerEncodeThreadFinished = true;
return (void*)0xDEAD;
}
GstCaps *capsappsrc2Jpegenc; // between appsrc and jpegenc
capsappsrc2Jpegenc = gst_caps_new_full(
gst_structure_new("video/x-raw",
"format",G_TYPE_STRING,"NV12",
"width",G_TYPE_INT,352,
"height",G_TYPE_INT,288,
"framerate",GST_TYPE_FRACTION,15,1,
NULL),
gst_structure_new("video/x-h264",
"format",G_TYPE_STRING,"NV12",
"width",G_TYPE_INT,352,
"height",G_TYPE_INT,288,
NULL),
NULL);
loop = g_main_loop_new(NULL, FALSE);
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
gst_object_unref(bus);
gst_bin_add_many(GST_BIN(pipeline), appsrc ,encode, appsink, NULL);
bLinkOk = gst_element_link_filtered(appsrc, encode, capsappsrc2Jpegenc);
if(!bLinkOk){
g_warning("Failed to link src encode \n");
return (void*)0xDEAD;
}
gst_element_link(encode, appsink);
g_object_set (G_OBJECT (appsrc),
"stream-type", GST_APP_STREAM_TYPE_STREAM,
"format", GST_FORMAT_BYTES,
NULL);
g_object_set(G_OBJECT(encode),
"rate-preset",1,NULL);
fprintf(stderr, "Setting g_main_loop_run to GST_STATE_PLAYING\n");
gst_element_set_state(pipeline, GST_STATE_PLAYING);
bGstreamerEncodeThreadStarted = true;
g_main_loop_run(loop);
fprintf(stderr, "g_main_loop_run returned, stopping playback\n");
// Stop pipeline to be released
gst_element_set_state(pipeline, GST_STATE_NULL);
fprintf(stderr, "Deleting pipeline\n");
gst_object_unref(GST_OBJECT(pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
bGstreamerEncodeThreadFinished = true;
gst_object_unref(appsrc);
gst_object_unref(encode);
gst_object_unref(appsink);
gst_object_unref(capsappsrc2Jpegenc);
return NULL;
}
bool StartGstreamer()
{
unsigned long GtkThreadId;
pthread_attr_t GtkAttr;
int result = pthread_attr_init(&GtkAttr);
if (result != 0)
{
fprintf(stderr, "pthread_attr_init returned error %d\n", result);
return false;
}
void* pParam = NULL;
result = pthread_create(&GtkThreadId, &GtkAttr,
GstreamerEncodeThread, pParam);
if (result != 0)
{
fprintf(stderr, "pthread_create returned error %d\n", result);
return false;
}
return true;
}
void PushBufferEncode(char *nv12buf,int len)
{
GstFlowReturn ret;
GstBuffer *buffer;
buffer = gst_buffer_new_allocate(NULL, len, NULL);
GstMapInfo info;
gst_buffer_map(buffer, &info, GST_MAP_WRITE);
unsigned char* buf = info.data;
memmove(buf, nv12buf, len);
gst_buffer_unmap(buffer, &info);
ret = gst_app_src_push_buffer(GST_APP_SRC(appsrc), buffer);
if(ret == GST_FLOW_OK){
printf("push ok\n");
}
}
char* PullH264Encode(int* outlen)
{
GstSample* sample;
sample = gst_app_sink_pull_sample(GST_APP_SINK(appsink));
if(sample == NULL)
{
fprintf(stderr, "gst_app_sink_pull_sample returned null\n");
return NULL;
}
// printf("get data\n");
GstBuffer* buffer = gst_sample_get_buffer (sample);
GstMapInfo map;
gst_buffer_map (buffer, &map, GST_MAP_READ);
char* pRet = new char[map.size];
memmove(pRet, map.data, map.size);
gst_buffer_unmap (buffer, &map);
gst_sample_unref (sample);
*outlen = map.size;
return pRet;
}
int init_encode()
{
StartGstreamer();
while(bGstreamerEncodeThreadStarted == false)
usleep(10000);
if(bGstreamerEncodeThreadFinished == true)
{
fprintf(stderr,
"Gstreamer thread could not start, terminating\n");
return -1;
}
}
int startEncode(char *nv12buf,int NV12Len,char *encodebuf, int* encodeLen)
{
// pthread_mutex_lock(&mutex);
char aa[352*288*3];
memset(aa,0,352*288*3);
int alen = NV12Len;
memcpy(aa,nv12buf,alen);
PushBufferEncode(aa, alen);
int len1=0;
char * buf = PullH264Encode(&len1);
*encodeLen = len1;
// printf("*encodelen:%d\n",*encodeLen);
if(*encodeLen !=0)
memcpy(encodebuf,buf,len1);
delete[] buf;
}
#endif
//// the second pipeline
///
///
///
///
#if 1
static volatile bool bGstreamerEncodeThreadStartedTwo = false;
static volatile bool bGstreamerEncodeThreadFinishedTwo = false;
static GstElement *appsrcTwo;
static GstElement *appsinkTwo;
void* GstreamerEncodeThreadTwo(void* pThreadParam)
{
GMainLoop *loop;
GstElement *pipeline, *encode;
GstBus *bus;
guint bus_watch_id;
gboolean bLinkOk;
pipeline = gst_pipeline_new("mypipelineTwo");
g_assert(pipeline);
appsrcTwo = gst_element_factory_make("appsrc", "mysourceTwo");
g_assert(appsrcTwo);
encode = gst_element_factory_make("ducatih264enc", "myencodeTwo");
g_assert(encode);
appsinkTwo = gst_element_factory_make("appsink", "mysinkTwo");
g_assert(appsinkTwo);
if (!pipeline || !appsrcTwo || !encode || !appsinkTwo)
{
fprintf(stderr, "Could not gst_element_factory_make, terminating\n");
bGstreamerEncodeThreadStartedTwo = bGstreamerEncodeThreadFinishedTwo = true;
return (void*)0xDEAD;
}
GstCaps *capsappsrc2Jpegenc; // between appsrc and jpegenc
capsappsrc2Jpegenc = gst_caps_new_full(
gst_structure_new("video/x-raw",
"format",G_TYPE_STRING,"NV12",
"width",G_TYPE_INT,352,
"height",G_TYPE_INT,288,
"framerate",GST_TYPE_FRACTION,15,1,
NULL),
gst_structure_new("video/x-h264",
"format",G_TYPE_STRING,"NV12",
"width",G_TYPE_INT,352,
"height",G_TYPE_INT,288,
NULL),
NULL);
loop = g_main_loop_new(NULL, FALSE);
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
bus_watch_id = gst_bus_add_watch(bus, bus_callTwo, loop);
gst_object_unref(bus);
gst_bin_add_many(GST_BIN(pipeline), appsrcTwo ,encode, appsinkTwo, NULL);
bLinkOk = gst_element_link_filtered(appsrcTwo, encode, capsappsrc2Jpegenc);
if(!bLinkOk){
g_warning("Failed to link src encode \n");
return (void*)0xDEAD;
}
gst_element_link(encode, appsinkTwo);
g_object_set (G_OBJECT (appsrcTwo),
"stream-type", GST_APP_STREAM_TYPE_STREAM,
"format", GST_FORMAT_BYTES,
NULL);
g_object_set(G_OBJECT(encode),
"rate-preset",1,NULL);
fprintf(stderr, "Setting g_main_loop_run to GST_STATE_PLAYING\n");
gst_element_set_state(pipeline, GST_STATE_PLAYING);
bGstreamerEncodeThreadStartedTwo = true;
g_main_loop_run(loop);
fprintf(stderr, "g_main_loop_run returned, stopping playback\n");
// Stop pipeline to be released
gst_element_set_state(pipeline, GST_STATE_NULL);
fprintf(stderr, "Deleting pipeline\n");
gst_object_unref(GST_OBJECT(pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
bGstreamerEncodeThreadFinishedTwo = true;
gst_object_unref(appsrcTwo);
gst_object_unref(encode);
gst_object_unref(appsinkTwo);
gst_object_unref(capsappsrc2Jpegenc);
return NULL;
}
bool StartGstreamerTwo()
{
unsigned long GtkThreadId;
pthread_attr_t GtkAttr;
int result = pthread_attr_init(&GtkAttr);
if (result != 0)
{
fprintf(stderr, "pthread_attr_init returned error %d\n", result);
return false;
}
void* pParam = NULL;
result = pthread_create(&GtkThreadId, &GtkAttr,
GstreamerEncodeThreadTwo, pParam);
if (result != 0)
{
fprintf(stderr, "pthread_create returned error %d\n", result);
return false;
}
return true;
}
void PushBufferEncodeTwo(char *nv12buf,int len)
{
GstFlowReturn ret;
GstBuffer *buffer;
buffer = gst_buffer_new_allocate(NULL, len, NULL);
GstMapInfo info;
gst_buffer_map(buffer, &info, GST_MAP_WRITE);
unsigned char* buf = info.data;
memmove(buf, nv12buf, len);
gst_buffer_unmap(buffer, &info);
ret = gst_app_src_push_buffer(GST_APP_SRC(appsrcTwo), buffer);
if(ret == GST_FLOW_OK){
printf("push ok\n");
}
}
char* PullH264EncodeTwo(int* outlen)
{
GstSample* sample;
sample = gst_app_sink_pull_sample(GST_APP_SINK(appsinkTwo));
if(sample == NULL)
{
fprintf(stderr, "gst_app_sink_pull_sample returned null\n");
return NULL;
}
// printf("get data\n");
GstBuffer* buffer = gst_sample_get_buffer (sample);
GstMapInfo map;
gst_buffer_map (buffer, &map, GST_MAP_READ);
char* pRet = new char[map.size];
memmove(pRet, map.data, map.size);
gst_buffer_unmap (buffer, &map);
gst_sample_unref (sample);
*outlen = map.size;
return pRet;
}
int init_encodeTwo()
{
StartGstreamerTwo();
while(bGstreamerEncodeThreadStartedTwo == false)
usleep(10000);
if(bGstreamerEncodeThreadFinishedTwo == true)
{
fprintf(stderr,
"Gstreamer thread could not start, terminating\n");
return -1;
}
}
int startEncodeTwo(char *nv12buf,int NV12Len,char *encodebuf, int* encodeLen)
{
char aa[352*288*3];
memset(aa,0,352*288*3);
int alen = NV12Len;
memcpy(aa,nv12buf,alen);
PushBufferEncodeTwo(aa, alen);
int len1=0;
char * buf = PullH264EncodeTwo(&len1);
*encodeLen = len1;
printf("*encodelen:%d\n",*encodeLen);
if(*encodeLen !=0)
memcpy(encodebuf,buf,len1);
delete[] buf;
}
#endif
//////// this is my main.cpp ///////////
#include <stdio.h>
#include "buscall.h"
#include "encode.h"
#include <unistd.h>
#include <sys/time.h>
#include <pthread.h>
FILE *fpSrc1=NULL,* fpSrc2=NULL,*fpDst1=NULL,*fpDst2=NULL;
/////////////// thread One //////////////
void * threadOne(void*){
fpSrc1 = fopen("/mnt/1.yuv","rb");
fpDst1 = fopen("/mnt/1.h264","wb+");
if( fpSrc1 == NULL || fpDst1 == NULL){
printf("open error\n");
// return -1;
}
printf("threadOne\n");
char encodebufNV121[352*288] = {'\0'};
int encodeLenNV121 = 352*288;
int encodeLen2641=0;
char encodebuf2641[352*288] = {'\0'};
int ret;
printf("352*288:%d\n",352*288);
init_encode();
while(1)
{
if( feof(fpSrc1) !=0)
{
fflush(fpDst1);
fclose(fpSrc1);
fclose(fpDst1);
printf("close src1\n");
break;
}
memset(encodebufNV121, 0, encodeLenNV121);
memset(encodebuf2641, 0, encodeLenNV121);
// printf("read\n");
ret =fread(encodebufNV121, 1, encodeLenNV121,fpSrc1);
printf("read :%d \n",ret);
startEncode(encodebufNV121,ret,encodebuf2641,&encodeLen2641);
usleep(1000);
printf("encodeLen264:%d\n",encodeLen2641);
ret =fwrite(encodebuf2641,1,encodeLen2641,fpDst1);
fflush(fpDst1);
printf("write:%d\n",ret);
}
}
void * threadTwo(void*){
fpSrc2 = fopen("/mnt/2.yuv","r");
fpDst2 = fopen("/mnt/2.h264","wb+");
if( fpSrc2 == NULL || fpDst2==NULL){
printf("open error\n");
// return -1;
}
char encodebufNV122[352*288] = {'\0'};
int encodeLenNV122 = 352*288;
int encodeLen2642=0;
char encodebuf2642[352*288] = {'\0'};
int ret=0;
init_encodeTwo();
while(1)
{
if( feof(fpSrc2) !=0)
{
fflush(fpDst2);
fclose(fpSrc2);
fclose(fpDst2);
printf("close src2\n");
break;
}
memset(encodebufNV122, 0, encodeLenNV122);
memset(encodebuf2642, 0, encodeLenNV122);
// printf("read\n");
ret =fread(encodebufNV122, 1, encodeLenNV122,fpSrc2);
printf("read2 :%d ",ret);
startEncodeTwo(encodebufNV122,ret,encodebuf2642,&encodeLen2642);
usleep(1000);
printf("encodeLen2642:%d\n",encodeLen2642);
ret =fwrite(encodebuf2642,1,encodeLen2642,fpDst2);
fflush(fpDst2);
printf("write2:%d\n",ret);
}
}
int main(int argc, char *argv[])
{
pthread_t npthId1;
pthread_t npthId2;
gst_init(NULL,NULL);
int ret = -1;
ret =pthread_create(&npthId1, NULL,threadOne,NULL);
if(ret != 0){
printf("creat one is error\n");
return -1;
}
ret = pthread_create(&npthId2, NULL,threadTwo,NULL);
if(ret != 0){
printf("creat two is error\n");
return -1;
}
while(1){
sleep(1);
}
}
Hello,
First pipeline:
abayyy said:GstElement *pipeline, *encode;
GstBus *bus;
guint bus_watch_id;
gboolean bLinkOk;
pipeline = gst_pipeline_new("mypipeline");
g_assert(pipeline);
appsrc = gst_element_factory_make("appsrc", "mysource");
g_assert(appsrc);
encode = gst_element_factory_make("ducatih264enc", "myencode");
g_assert(encode);
appsink = gst_element_factory_make("appsink", "mysink");
g_assert(appsink);
Second pipeline:
abayyy said:GstElement *pipeline, *encode;
GstBus *bus;
guint bus_watch_id;
gboolean bLinkOk;
pipeline = gst_pipeline_new("mypipelineTwo");
g_assert(pipeline);
appsrcTwo = gst_element_factory_make("appsrc", "mysourceTwo");
g_assert(appsrcTwo);
encode = gst_element_factory_make("ducatih264enc", "myencodeTwo");
g_assert(encode);
appsinkTwo = gst_element_factory_make("appsink", "mysinkTwo");
g_assert(appsinkTwo);
if (!pipeline || !appsrcTwo || !encode || !appsinkTwo).............
bus_watch_id = gst_bus_add_watch(bus, bus_callTwo, loop);
gst_object_unref(bus);
gst_bin_add_many(GST_BIN(pipeline), appsrcTwo ,encode, appsinkTwo, NULL);
bLinkOk = gst_element_link_filtered(appsrcTwo, encode, capsappsrc2Jpegenc);............
{
The encode for the second pipeline matches with the first pipeline.
Could you change "encode" name to something else like encodeTwo in you pipeline(same for gst element pipeline)?
The elements should have unique names.
Unfortunately, in the gstreamer tutorials there is no example of running multiple pipelines in an application.
BR
Margarita
hello
I modify the code , but sorry it don't work too.
(encodeClass:1170): GLib-GObject-WARNING **: invalid unclassed pointer in cast to 'GstAppSrc'
** (encodeClass:1170): CRITICAL **: gst_app_src_push_buffer_full: assertion 'GST_IS_APP_SRC (appsrc)' failed
(encodeClass:1170): GLib-GObject-WARNING **: invalid uninstantiatable type '(null)' in cast to 'GstAppSink'
** (encodeClass:1170): CRITICAL **: gst_app_sink_pull_sample: assertion 'GST_IS_APP_SINK (appsink)' failed
this is the error message.
Hello,
I am sorry for the delay.
I created two gstreamer applications. I tested them on PC.
First application is playing this pipeline:
gst-launch-1.0 videotestsrc ! autovideosink videotestsrc pattern=1 ! autovideosink
Two pipelines are played in one thread.
#include <gst/gst.h> #include <glib.h> static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data) { GMainLoop *loop = (GMainLoop *) data; switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_EOS: g_print ("End of stream\n"); g_main_loop_quit (loop); break; case GST_MESSAGE_ERROR: { gchar *debug; GError *error; gst_message_parse_error (msg, &error, &debug); g_free (debug); g_printerr ("Error: %s\n", error->message); g_error_free (error); g_main_loop_quit (loop); break; } default: break; } return TRUE; } int main (int argc, char *argv[]) { GMainLoop *loop; GstElement *pipeline1, *source, *sink; GstElement *pipeline; GstElement *pipeline2, *source2, *sink2; GstBus *bus; guint bus_watch_id; /* Initialisation */ gst_init (&argc, &argv); loop = g_main_loop_new (NULL, FALSE); pipeline = gst_pipeline_new ("unioun"); /* Create gstreamer elements for first pipeline*/ pipeline1 = gst_pipeline_new ("videotest1"); source = gst_element_factory_make ("videotestsrc", "file-source1"); g_object_set (G_OBJECT (source), "num-buffers", 1000, NULL); g_object_set (G_OBJECT (source), "pattern", 0, NULL); sink = gst_element_factory_make ("autovideosink", "video-output"); if (!pipeline1 || !source || !sink) { g_printerr ("One element could not be created. Exiting.\n"); return -1; } pipeline2 = gst_pipeline_new ("videotest2"); source2 = gst_element_factory_make ("videotestsrc", "file-source2"); g_object_set (G_OBJECT (source2), "num-buffers", 1000, NULL); g_object_set (G_OBJECT (source2), "pattern", 1, NULL); sink2 = gst_element_factory_make ("autovideosink", "video-output2"); if (!pipeline2 || !source2 || !sink2) { g_printerr ("One element could not be created. Exiting.\n"); return -1; } /* we add a message handler */ bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); bus_watch_id = gst_bus_add_watch (bus, bus_call, loop); gst_object_unref (bus); /* we add all elements into the pipeline */ gst_bin_add_many (GST_BIN (pipeline1), source, sink, NULL); gst_bin_add_many (GST_BIN (pipeline2), source2, sink2, NULL); gst_bin_add_many (GST_BIN (pipeline), pipeline1, pipeline2, NULL); /* we link the elements together */ gst_element_link (source, sink); gst_element_link (source2, sink2); gst_element_link (pipeline1, pipeline2); /* Set the pipeline to "playing" state*/ g_print ("Now playing: %s\n", argv[1]); gst_element_set_state (pipeline, GST_STATE_PLAYING); /* Iterate */ g_print ("Running...\n"); g_main_loop_run (loop); /* Out of the main loop, clean up nicely */ g_print ("Returned, stopping playback\n"); gst_element_set_state (pipeline, GST_STATE_NULL); g_print ("Deleting pipeline\n"); gst_object_unref (GST_OBJECT (pipeline)); g_source_remove (bus_watch_id); g_main_loop_unref (loop); return 0; }
The first videotestsrc element is linked to the sink in the pipeline1. The second videotestsrc is linked to the second sink to pipeline2. This line:
gst_bin_add_many (GST_BIN (pipeline),
pipeline1, pipeline2, NULL);
links the both pipeline into one.
In this case you could use the pipelines that Manisha shared this way:
gst-launch-1.0 -e videotestsrc num-buffers=100 ! queue ! 'video/x-raw, format=(string)NV12, width=(int)1920, height=(int)1080, framerate=(fraction)30/1' ! queue ! ducatih264enc ! queue ! h264parse ! mpegtsmux ! filesink location=file1.ts videotestsrc num-buffers=100 ! queue ! 'video/x-raw, format=(string)NV12, width=(int)1920, height=(int)1080, framerate=(fraction)30/1' ! queue ! ducatih264enc ! queue ! h264parse ! mpegtsmux ! filesink location=file2.ts
The second application is executing two different pipeline in the same application(pipeline1 and pipeline2).
gst-launch-1.0 videotestsrc ! autovideosink videotestsrc pattern=10 ! autovideosink
gst-launch-1.0 videotestsrc ! autovideosink videotestsrc pattern=18 ! autovideosink
#include <gst/gst.h> #include <glib.h> static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data) { GMainLoop *loop = (GMainLoop *) data; switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_EOS: g_print ("End of stream\n"); g_main_loop_quit (loop); break; case GST_MESSAGE_ERROR: { gchar *debug; GError *error; gst_message_parse_error (msg, &error, &debug); g_free (debug); g_printerr ("Error: %s\n", error->message); g_error_free (error); g_main_loop_quit (loop); break; } default: break; } return TRUE; } int main (int argc, char *argv[]) { GMainLoop *loop; GstElement *pipeline1, *source, *sink; // GstElement *pipeline; GstElement *pipeline2, *source2, *sink2; GstBus *bus; guint bus_watch_id; /* Initialisation */ gst_init (&argc, &argv); loop = g_main_loop_new (NULL, FALSE); /* Create gstreamer elements for first pipeline*/ pipeline1 = gst_pipeline_new ("videotest1"); source = gst_element_factory_make ("videotestsrc", "file-source1"); g_object_set (G_OBJECT (source), "num-buffers", 1000, NULL); g_object_set (G_OBJECT (source), "pattern", 10, NULL); sink = gst_element_factory_make ("autovideosink", "video-output"); if (!pipeline1 || !source || !sink) { g_printerr ("One element could not be created. Exiting.\n"); return -1; } pipeline2 = gst_pipeline_new ("videotest2"); source2 = gst_element_factory_make ("videotestsrc", "file-source2"); g_object_set (G_OBJECT (source2), "num-buffers", 1000, NULL); g_object_set (G_OBJECT (source2), "pattern", 18, NULL); sink2 = gst_element_factory_make ("autovideosink", "video-output2"); if (!pipeline2 || !source2 || !sink2) { g_printerr ("One element could not be created. Exiting.\n"); return -1; } /* we add a message handler */ bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline1)); bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline2)); bus_watch_id = gst_bus_add_watch (bus, bus_call, loop); gst_object_unref (bus); /* we add all elements into the pipeline */ gst_bin_add_many (GST_BIN (pipeline1), source, sink, NULL); gst_bin_add_many (GST_BIN (pipeline2), source2, sink2, NULL); //gst_bin_add_many (GST_BIN (pipeline), pipeline1, pipeline2, NULL); /* we link the elements together */ gst_element_link (source, sink); gst_element_link (source2, sink2); //gst_element_link (pipeline1, pipeline2); /* Set the pipeline to "playing" state*/ g_print ("Now playing: %s\n", argv[1]); gst_element_set_state (pipeline1, GST_STATE_PLAYING); gst_element_set_state (pipeline2, GST_STATE_PLAYING); /* Iterate */ g_print ("Running...\n"); g_main_loop_run (loop); /* Out of the main loop, clean up nicely */ g_print ("Returned, stopping playback\n"); gst_element_set_state (pipeline1, GST_STATE_NULL); gst_element_set_state (pipeline2, GST_STATE_NULL); g_print ("Deleting pipeline\n"); gst_object_unref (GST_OBJECT (pipeline1)); gst_object_unref (GST_OBJECT (pipeline2)); g_source_remove (bus_watch_id); g_main_loop_unref (loop); return 0; }
As you could see in the application, the states are changed separately for the both pipelines.
In this case you could try to implement Manisha's pipelines:
gst-launch-1.0 -e videotestsrc num-buffers=100 ! queue ! 'video/x-raw, format=(string)NV12, width=(int)1920, height=(int)1080, framerate=(fraction)30/1' ! queue ! ducatih264enc ! queue ! h264parse ! mpegtsmux ! filesink location=file1.ts &
gst-launch-1.0 -e videotestsrc num-buffers=100 ! queue ! 'video/x-raw, format=(string)NV12, width=(int)1920, height=(int)1080, framerate=(fraction)30/1' ! queue ! ducatih264enc ! queue ! h264parse ! mpegtsmux ! filesink location=file2.ts &
As I said I tested this on PC but you could use the code as reference.
I need to make some changes to test them on the EVM.
Hope this helps.
BR
Margarita