Tool/software: Linux
hello everyone
I'm in big trouble. my board is http://www.ti.com.cn/tool/cn/tmdsevm572x?keyMatch=am5728%20evm&tisearch=Search-CN-Everything am5728 evm
my sdk is http://software-dl.ti.com/processor-sdk-linux/esd/AM57X/03_03_00_04/index_FDS.html
my host pc is ubuntu 14.04
when i use gstreamer with ducatih264enc for encoding the camera data , unfortunately, A memory leak occurred
/*
*
*
* 编译命令 arm-linux-gnueabihf-gcc app-camera.c -o app `pkg-config --cflags --libs gstreamer-1.0 gstreamer-base-1.0 gstreamer-app-1.0`
*
*/
#include <linux/videodev2.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <unistd.h>
#include <stdio.h>
#include <stdint.h>
#include <errno.h>
#include <fcntl.h>
#include <stdlib.h>
#include <string.h>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#define waylandsink
#define v4l2src
typedef struct _App App;
struct _App
{
GstElement *pipeline;
GstElement *appsrc;
GstElement *encode;
GstElement *sink;
GstBus *bus;
GstMessage *msg;
GMainLoop *loop;
guint bus_watch_id;
};
App s_app;
int ret, idx, fd;
#define NBUF 3
#define FRAME_SIZE 152064 //352*288*1.5
int width = 352, height = 288;
void *buffer_addr[NBUF];
int size[NBUF];
static void
feed_data (GstElement * appsrc, guint size, App * app)
{
printf("feed-data....\n");
struct v4l2_buffer buf;
/* Dequeue one buffer */
memset(&buf, 0, sizeof(buf));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if(-1 == xioctl(fd, VIDIOC_DQBUF, &buf)) {
perror("Queue Buffer");
return FALSE;
}
idx = buf.index;
GstBuffer *buffer;
GstFlowReturn ret;
buffer = gst_buffer_new_allocate(NULL,FRAME_SIZE,NULL);
GstMapInfo info;
gst_buffer_map(buffer,&info,GST_MAP_WRITE);
unsigned char * buff = info.data;
memcpy(buff,buffer_addr[idx],FRAME_SIZE);
gst_buffer_unmap(buffer,&info);
g_signal_emit_by_name(app->appsrc,"push-buffer",buffer,&ret);
printf("ret:%d\n",ret);
if (ret != GST_FLOW_OK) {
/* some error, stop sending data */
printf("push error...\n");
return FALSE;
}
gst_buffer_unref(buffer);
memset(&buf, 0, sizeof(buf));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = idx;
if(-1 == xioctl(fd, VIDIOC_QBUF, &buf)) {
perror("Queue Buffer");
return FALSE;
}
return TRUE;
}
int xioctl(int fd, int request, void *arg)
{
int r;
do r = ioctl (fd, request, arg);
while (-1 == r && EINTR == errno);
return r;
}
int init_device(int fd) {
unsigned int i;
struct v4l2_capability caps;
struct v4l2_format fmt;
struct v4l2_requestbuffers req;
struct v4l2_buffer buf;
/* Check for capture device */
memset(&caps, 0, sizeof(caps));
if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &caps)) {
perror("Setting Pixel Format");
return 1;
}
printf("Driver: %s\ncaps: %8x", caps.driver, caps.capabilities);
if (~caps.capabilities & V4L2_CAP_VIDEO_CAPTURE) {
printf("Not a capture device");
return 1;
}
/* Set capture format to UYVY */
memset(&fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = width;
fmt.fmt.pix.height = height;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_NV12;
// fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY;
fmt.fmt.pix.field = V4L2_FIELD_NONE;
if (-1 == xioctl(fd, VIDIOC_S_FMT, &fmt)) {
perror("Setting Pixel Format");
return 1;
}
printf("Selected Camera Mode:\n" " Width: %d\n" " Height: %d\n" " Field: %d",
fmt.fmt.pix.width, fmt.fmt.pix.height, fmt.fmt.pix.field);
printf(" PixFmt = %c%c%c%c\n",
fmt.fmt.pix.pixelformat & 0xFF, (fmt.fmt.pix.pixelformat >> 8) & 0xFF,
(fmt.fmt.pix.pixelformat >> 16) & 0xFF, (fmt.fmt.pix.pixelformat >> 24) &0xFF);
/* Currently driver supports only mmap buffers
* Request memory mapped buffers */
memset(&req, 0, sizeof(req));
req.count = NBUF;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (-1 == xioctl(fd, VIDIOC_REQBUFS, &req)) {
perror("Requesting Buffer");
return 1;
}
printf("Total buffer num %d\n", req.count);
for (i = 0; i < req.count; i++) {
memset(&buf, 0, sizeof(buf));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (-1 == xioctl(fd, VIDIOC_QUERYBUF, &buf)) {
perror("Querying Buffer");
return 1;
}
/* Memory map all the buffers and save the addresses */
buffer_addr[i] = mmap(NULL, buf.length, PROT_READ | PROT_WRITE,
MAP_SHARED, fd, buf.m.offset);
//buffer_addr[i]=(void*)malloc(FRAME_SIZE);
size[i] = buf.length;
printf("Address %p, size %d, image size: %d \n", buffer_addr[i], buf.length, buf.bytesused);
/* Queue the buffer for capture */
memset(&buf, 0, sizeof(buf));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (-1 == xioctl(fd, VIDIOC_QBUF, &buf)) {
perror("Queue Buffer");
return 1;
}
printf("12345\r\n");
}
if (-1 == xioctl(fd, VIDIOC_STREAMON, &buf.type)) {
perror("Start Capture");
return 1;
}
return 0;
}
void release_device(int fd)
{
int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
xioctl(fd, VIDIOC_STREAMOFF, &type);
close(fd);
}
gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg))
{
case GST_MESSAGE_EOS:
fprintf(stderr, "End of stream\n");
g_main_loop_quit(loop);
break;
case GST_MESSAGE_ERROR:
{
gchar *debug;
GError *error;
gst_message_parse_error(msg, &error, &debug);
g_free(debug);
g_printerr("Error: %s\n", error->message);
g_error_free(error);
g_main_loop_quit(loop);
break;
}
default:
break;
}
return TRUE;
}
int main(int argc, char **argv)
{
App *app = &s_app;
printf("==========\n");
char devnode[100] = "/dev/video1";
fd = open(devnode, O_RDWR);
if (fd == -1) {
perror("Opening video device");
return 1;
}
ret = init_device(fd);
if (0 != ret) {
printf("Exiting");
return ret;
}
gst_init(NULL,NULL);
app->pipeline = gst_pipeline_new("encodepipeline");
g_assert(app->pipeline);
app->appsrc = gst_element_factory_make("appsrc","srcElement");
g_assert(app->appsrc);
app->encode = gst_element_factory_make("ducatih264enc","srcElement");
g_assert(app->encode);
app->sink = gst_element_factory_make("filesink","sinkElement");
g_assert(app->sink);
printf("element creat success\n");
GstCaps *capsappsrc2H264enc;
capsappsrc2H264enc = gst_caps_new_simple("video/x-raw",
"format",G_TYPE_STRING, "NV12",
"width", G_TYPE_INT, 352,
"height",G_TYPE_INT, 288,
"framerate", GST_TYPE_FRACTION, 30, 1,
NULL);
gst_app_src_set_caps(GST_APP_SRC(app->appsrc), capsappsrc2H264enc);
g_object_set(app->sink,"location","/mnt/camera.yuv",NULL);
app->loop = g_main_loop_new(NULL,FALSE);
app->bus = gst_pipeline_get_bus(GST_PIPELINE(app->pipeline));
app->bus_watch_id = gst_bus_add_watch(app->bus,bus_call,app->loop);
gst_object_unref(app->bus);
gboolean bLinkOk;
//appsrc -> filesink
gst_bin_add_many(GST_BIN(app->pipeline), app->appsrc,app->sink,NULL);
bLinkOk= gst_element_link(app->appsrc,app->sink);
if(!bLinkOk){
g_warning("Failed to link many 1\n");
return -5;
}
//appsrc -> ducatih264enc -> filesink
// gst_bin_add_many(GST_BIN(app->pipeline), app->appsrc,app->encode,app->sink,NULL);
// bLinkOk= gst_element_link(app->appsrc,app->encode,app->sink);
// if(!bLinkOk){
// g_warning("Failed to link many 1\n");
// return -5;
// }
g_signal_connect(app->appsrc, "need-data", G_CALLBACK(feed_data), app);
gst_element_set_state (app->pipeline, GST_STATE_PLAYING);
printf("run....1\n");
g_main_loop_run(app->loop);
printf("run....2\n");
app->msg = gst_bus_timed_pop_filtered (app->bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
if (app->msg != NULL)
gst_message_unref (app->msg);
gst_object_unref (app->bus);
gst_element_set_state (app->pipeline, GST_STATE_NULL);
gst_object_unref (app->pipeline);
gst_object_unref(app->appsrc);
gst_object_unref(app->sink);
printf("close...\n");
return 0;
}