This thread has been locked.

If you have a related question, please click the "Ask a related question" button in the top right corner. The newly created question will be automatically linked to this question.

TI DM355 USB speed

Other Parts Discussed in Thread: TVP5146

hi

   usb driver use DMA channel on the davinci dm355 .

capture.c have included memory mapping,why  not to use  memory  mapping  in the usb driver in order to usb speed ?

I do not know how to modify to improve usb speed?

please give me some advice!!!

/* Map the allocated buffers to user space */
    for (numCapBufs = 0; numCapBufs < req.count; numCapBufs++) {
        CLEAR(buf);
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = numCapBufs;

        if (ioctl(fd, VIDIOC_QUERYBUF, &buf) == -1) {
            ERR("Failed VIDIOC_QUERYBUF on %s (%s)\n", V4L2_DEVICE,
                                                       strerror(errno));
            return FAILURE;
        }

 

 

 

the following source files from the davinci dm355 demo

capture.c
 *
 * ============================================================================
 * Copyright (c) Texas Instruments Inc 2007
 *
 * Use of this software is controlled by the terms and conditions found in the
 * license agreement under which this software has been supplied or provided.
 * ============================================================================
 */

/* Standard Linux headers */
#include <stdio.h>
#include <fcntl.h>
#include <errno.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <asm/types.h>
#include <linux/videodev2.h>

/* Davinci specific kernel headers */
#include <media/davinci/davinci_vpfe.h>
#include <media/davinci/tvp5146.h>

/* Demo headers */
#include <rendezvous.h>
#include <fifoutil.h>
#include <rszcopy.h>
#include <smooth.h>
#include <pause.h>
#include "encode.h"
#include "capture.h"
#include "video.h"
#include "display.h"

/* Number of retries finding a video source before failing */
#define NUM_IOCTL_RETRIES        100

/* Triple buffering for the capture driver */
#define NUM_CAPTURE_BUFS         3

/* The smooth module needs extra input rows for proper processing */
#define EXTRA_TOP_ROWS           2
#define EXTRA_BOTTOM_ROWS        4
#define EXTRA_ROWS               EXTRA_TOP_ROWS + EXTRA_BOTTOM_ROWS

/* Describes a capture frame buffer */
typedef struct CaptureBuffer {
    void         *start;
    unsigned long offset;
    size_t        length;
} CaptureBuffer;

/* Local function prototypes */
static int initCaptureDevice(CaptureBuffer **capBufsPtr, int *numCapBufsPtr,
                             int svideoInput, int captureWidth,
                             int captureHeight, int topOffset);
static void cleanupCaptureDevice(int fd, CaptureBuffer *capBufs,
                                 int numCapBufs, int topOffset);

/******************************************************************************
 * initCaptureDevice
 ******************************************************************************/
static int initCaptureDevice(CaptureBuffer **capBufsPtr, int *numCapBufsPtr,
                             int svideoInput, int captureWidth,
                             int captureHeight, int topOffset)
{
    int                         failCount = 0;
    struct v4l2_requestbuffers  req;
    struct v4l2_capability      cap;
    struct v4l2_cropcap         cropCap;
    struct v4l2_crop            crop;
    struct v4l2_format          fmt;
    struct v4l2_buffer          buf;
    enum v4l2_buf_type          type;
    v4l2_std_id                 std;
    int                         input;
    int                         fd;
    CaptureBuffer              *capBufs;
    int                         numCapBufs;
    int                         ret;

    DBG("captureWidth = %d, captureHeight = %d\n", captureWidth, captureHeight);

    /* Open video capture device */
    fd = open(V4L2_DEVICE, O_RDWR | O_NONBLOCK, 0);

    if (fd == -1) {
        ERR("Cannot open %s (%s)\n", V4L2_DEVICE, strerror(errno));
        return FAILURE;
    }

    /* Select the video input */
    if (svideoInput == TRUE) {
        input = TVP5146_AMUX_SVIDEO;
    }
    else {
        input = TVP5146_AMUX_COMPOSITE;
    }

    if (ioctl(fd, VIDIOC_S_INPUT, &input) == -1) {
        ERR("Failed to set video input to %d\n", input);
        return FAILURE;
    }

    DBG("Set the capture input to id %d\n", input);

    /* Query for capture device capabilities */
    if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == -1) {
        if (errno == EINVAL) {
            ERR("%s is no V4L2 device\n", V4L2_DEVICE);
            return FAILURE;
        }
        ERR("Failed VIDIOC_QUERYCAP on %s (%s)\n", V4L2_DEVICE,
            strerror(errno));
        return FAILURE;
    }

    if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
        ERR("%s is no video capture device\n", V4L2_DEVICE);
        return FAILURE;
    }

    if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
        ERR("%s does not support streaming i/o\n", V4L2_DEVICE);
        return FAILURE;
    }

    cropCap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (ioctl(fd, VIDIOC_CROPCAP, &cropCap) == -1) {
        ERR("VIDIOC_CROPCAP failed %d, %s\n", errno, strerror(errno));
        return FAILURE;
    }

    /* Auto detect PAL or NTSC using the capture driver as sanity check */
    std = VPFE_STD_AUTO;
    if(ioctl(fd, VIDIOC_S_STD, &std) == -1) {
        ERR("VIDIOC_S_STD (auto) failed on %s (%s)\n", V4L2_DEVICE,
                                                       strerror(errno));
        return FAILURE;
    }

    DBG("Checking video standard\n");

    /* It might take a few tries to detect the signal */
    do {
        ret = ioctl(fd, VIDIOC_QUERYSTD, &std);

        if (ret == -1 && errno == EAGAIN) {
            usleep(1);
            failCount++;
        }
    } while (ret == -1 && errno == EAGAIN && failCount < NUM_IOCTL_RETRIES);

    if (ret == -1) {
        printf("VIDIOC_QUERYSTD failed on %s. Video input connected?\n",
                V4L2_DEVICE);
        return FAILURE;
    }

    switch (std) {
        case V4L2_STD_NTSC:
            DBG("NTSC camera detected\n");
            if (gblGetYFactor() == PALSTD) {
                ERR("NTSC camera connected but PAL selected.\n");
                return FAILURE;
            }
            break;
        case V4L2_STD_PAL:
            DBG("PAL camera detected\n");
            if (gblGetYFactor() == NTSCSTD) {
                ERR("PAL camera connected but NTSC selected.\n");
                return FAILURE;
            }
            break;
        default:
            ERR("Camera (%s) using unsupported video standard\n", V4L2_DEVICE);
            return FAILURE;
    }

    /* Use either NTSC or PAL depending on display kernel parameter */
    std = gblGetYFactor() == NTSCSTD ? V4L2_STD_NTSC : V4L2_STD_PAL;
    if(ioctl(fd, VIDIOC_S_STD, &std) == -1) {
        ERR("VIDIOC_S_STD failed on %s (%s)\n", V4L2_DEVICE,
                                                strerror(errno));
        return FAILURE;
    }

    /* Set the video capture image format */
    CLEAR(fmt);
    fmt.type                = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    fmt.fmt.pix.width       = D1_WIDTH;
    fmt.fmt.pix.height      = D1_HEIGHT;
    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY;
    fmt.fmt.pix.field       = V4L2_FIELD_INTERLACED;

    /* Set the video capture format */
    if (ioctl(fd, VIDIOC_S_FMT, &fmt) == -1) {
        ERR("VIDIOC_S_FMT failed on %s (%s)\n", V4L2_DEVICE,
                                                strerror(errno));
        return FAILURE;
    }

    crop.type     = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    crop.c.left   = D1_WIDTH / 2 - captureWidth / 2;
    crop.c.top    = D1_HEIGHT / 2 - captureHeight / 2;
    crop.c.width  = captureWidth;
    /*
     * Note that we are optionally capturing 2 extra rows at the top and 4 at
     * the bottom to have more valid image data to work with while removing
     * interlacing artifacts.
     */
    crop.c.height = topOffset ? captureHeight + EXTRA_ROWS : captureHeight;

    DBG("Setting capture cropping (%dx%d)\n", crop.c.width, crop.c.height);

    /* Crop the image depending on requested image size */
    if (ioctl(fd, VIDIOC_S_CROP, &crop) == -1) {
        ERR("VIDIOC_S_CROP failed %d, %s\n", errno, strerror(errno));
        return FAILURE;
    }

    printf("Capturing %dx%d video (cropped to %dx%d)\n",
           fmt.fmt.pix.width, fmt.fmt.pix.height, crop.c.width, crop.c.height);

    CLEAR(req);
    req.count  = NUM_CAPTURE_BUFS;
    req.type   = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req.memory = V4L2_MEMORY_MMAP;

    /* Allocate buffers in the capture device driver */
    if (ioctl(fd, VIDIOC_REQBUFS, &req) == -1) {
        ERR("VIDIOC_REQBUFS failed on %s (%s)\n", V4L2_DEVICE,
                                                  strerror(errno));
        return FAILURE;
    }

    DBG("%d capture buffers were successfully allocated.\n", req.count);

    if (req.count < NUM_CAPTURE_BUFS) {
        ERR("Insufficient buffer memory on %s\n", V4L2_DEVICE);
        return FAILURE;
    }

    capBufs = calloc(req.count, sizeof(*capBufs));

    if (!capBufs) {
        ERR("Failed to allocate memory for capture buffer structs.\n");
        return FAILURE;
    }

    /* Map the allocated buffers to user space */
    for (numCapBufs = 0; numCapBufs < req.count; numCapBufs++) {
        CLEAR(buf);
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = numCapBufs;

        if (ioctl(fd, VIDIOC_QUERYBUF, &buf) == -1) {
            ERR("Failed VIDIOC_QUERYBUF on %s (%s)\n", V4L2_DEVICE,
                                                       strerror(errno));
            return FAILURE;
        }

        capBufs[numCapBufs].length = buf.length;
        capBufs[numCapBufs].offset = buf.m.offset + topOffset;
        capBufs[numCapBufs].start  = mmap(NULL,
                                          buf.length,
                                          PROT_READ | PROT_WRITE,
                                          MAP_SHARED,
                                          fd, buf.m.offset) + topOffset;

        if (capBufs[numCapBufs].start == MAP_FAILED) {
            ERR("Failed to mmap buffer on %s (%s)\n", V4L2_DEVICE,
                                                      strerror(errno));
            return FAILURE;
        }

        DBG("Capture driver buffer %d at physical address %lu mapped to "
            "virtual address %#lx\n", numCapBufs, (unsigned long)
            capBufs[numCapBufs].start, capBufs[numCapBufs].offset);

        if (ioctl(fd, VIDIOC_QBUF, &buf) == -1) {
            ERR("VIODIOC_QBUF failed on %s (%s)\n", V4L2_DEVICE,
                                                    strerror(errno));
            return FAILURE;
        }
    }

    /* Start the video streaming */
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    if (ioctl(fd, VIDIOC_STREAMON, &type) == -1) {
        ERR("VIDIOC_STREAMON failed on %s (%s)\n", V4L2_DEVICE,
                                                   strerror(errno));
        return FAILURE;
    }

    *capBufsPtr = capBufs;
    *numCapBufsPtr = numCapBufs;

    return fd;
}

/******************************************************************************
 * cleanupCaptureDevice
 ******************************************************************************/
static void cleanupCaptureDevice(int fd, CaptureBuffer *capBufs,
                                 int numCapBufs, int topOffset)
{
    enum v4l2_buf_type type;
    unsigned int       i;

    /* Shut off the video capture */
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (ioctl(fd, VIDIOC_STREAMOFF, &type) == -1) {
        ERR("VIDIOC_STREAMOFF failed (%s)\n", strerror(errno));
    }

    if (close(fd) == -1) {
        ERR("Failed to close capture device (%s)\n", strerror(errno));
    }

    for (i = 0; i < numCapBufs; i++) {
        if (munmap(capBufs[i].start - topOffset, capBufs[i].length) == -1) {
            ERR("Failed to unmap capture buffer %d\n", i);
        }
    }

    free(capBufs);
}

/******************************************************************************
 * captureThrFxn
 ******************************************************************************/
void *captureThrFxn(void *arg)
{
    CaptureBufferElement cFlush            = { CAPTURE_FLUSH };
    DisplayBufferElement dFlush            = { DISPLAY_FLUSH };
    CaptureEnv          *envp              = (CaptureEnv *) arg;
    Rszcopy_Handle       hRszcopy          = RSZCOPY_FAILURE;
    Smooth_Handle        hSmooth           = SMOOTH_FAILURE;
    void                *status            = THREAD_SUCCESS;
    int                  captureFd         = FAILURE;
    int                  topOffset         = 0;
    struct v4l2_buffer   v4l2buf;
    unsigned int         numCapBufs;
    CaptureBuffer       *capBufs;
    DisplayBufferElement de;
    CaptureBufferElement ce;

    /* Include more rows than necessary for the smoother if applicable */
    if (envp->captureOp == CAPOP_SMOOTH &&
        envp->imageHeight + EXTRA_ROWS <= D1_HEIGHT) {

        topOffset = envp->imageWidth * EXTRA_TOP_ROWS * SCREEN_BPP / 8;
    }

    /* Initialize the video capture device */
    captureFd = initCaptureDevice(&capBufs, &numCapBufs, envp->svideoInput,
                                  envp->imageWidth, envp->imageHeight,
                                  topOffset);

    if (captureFd == FAILURE) {
        cleanup(THREAD_FAILURE);
    }

    DBG("Video capture initialized and started\n");

    if (envp->captureOp == CAPOP_COPY) {
        /* If no interlacing artifact removal, just copy the frame */
        hRszcopy = Rszcopy_create(RSZCOPY_DEFAULTRSZRATE);

        if (hRszcopy == RSZCOPY_FAILURE) {
            ERR("Failed to create resize job\n");
            cleanup(THREAD_FAILURE);
        }

        if (Rszcopy_config(hRszcopy, envp->imageWidth, envp->imageHeight,
                        envp->imageWidth * SCREEN_BPP / 8,
                        envp->imageWidth * SCREEN_BPP / 8) == RSZCOPY_FAILURE) {
            ERR("Failed to configure resize job\n");
            cleanup(THREAD_FAILURE);
        }
    }
    else {
        /* Use the Smooth module to remove interlacing artifacts */
        hSmooth = Smooth_create(SMOOTH_DEFAULTRSZRATE);

        if (hSmooth == SMOOTH_FAILURE) {
            ERR("Failed to create smoother\n");
            cleanup(THREAD_FAILURE);
        }

        if (Smooth_config(hSmooth, envp->imageWidth,
                          envp->imageHeight) == SMOOTH_FAILURE) {
            ERR("Failed to configure smooth job\n");
            cleanup(THREAD_FAILURE);
        }
    }

    /* Signal that initialization is done and wait for other threads */
    Rendezvous_meet(envp->hRendezvousInit);

    DBG("Entering capture main loop.\n");
    while (!gblGetQuit()) {
        /* Pause processing? */
        Pause_test(envp->hPause);

        CLEAR(v4l2buf);
        v4l2buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        v4l2buf.memory = V4L2_MEMORY_MMAP;

        /* Get a frame buffer with captured data */
        if (ioctl(captureFd, VIDIOC_DQBUF, &v4l2buf) == -1) {
            if (errno == EAGAIN) {
                continue;
            }

            ERR("VIDIOC_DQBUF failed (%s)\n", strerror(errno));
            breakLoop(THREAD_FAILURE);
        }

        de.virtBuf     = capBufs[v4l2buf.index].start;
        de.physBuf     = capBufs[v4l2buf.index].offset;
        de.width       = envp->imageWidth;
        de.height      = envp->imageHeight;

        /* Display raw frame while encoding it */
        if (FifoUtil_put(envp->hDisplayInFifo, &de) == FIFOUTIL_FAILURE) {
            ERR("Failed to put buffer in output fifo\n");
            breakLoop(THREAD_FAILURE);
        }

        /* Receive buffers to fill in from the video thread */
        if (FifoUtil_get(&envp->inFifo, &ce) == FIFOUTIL_FAILURE) {       
            breakLoop(THREAD_FAILURE);
        }

        /* Is the video thread flushing the pipe? */
        if (ce.id == CAPTURE_FLUSH) {
            breakLoop(THREAD_SUCCESS);
        }
 
        /* Process input frame by copying or deinterlacing */
        if (envp->captureOp == CAPOP_COPY) {
            if (Rszcopy_execute(hRszcopy, capBufs[v4l2buf.index].offset,
                                ce.physBuf) == RSZCOPY_FAILURE) {
                ERR("Failed to resize frame\n");
            }
        }
        else {
            if (Smooth_execute(hSmooth, capBufs[v4l2buf.index].offset,
                               ce.physBuf) == SMOOTH_FAILURE) {
                ERR("Failed to smooth frame\n");
                breakLoop(THREAD_FAILURE);
            }
        }

        /* Send frames to video thread for encoding */
        if (FifoUtil_put(&envp->outFifo, &ce) == FIFOUTIL_FAILURE) {
            ERR("Failed to put buffer in input fifo\n");
            breakLoop(THREAD_FAILURE);
        }

        /* Receive a buffer with a displayed frame from the display thread */
        if (FifoUtil_get(envp->hDisplayOutFifo, &de) == FIFOUTIL_FAILURE) {       
            breakLoop(THREAD_FAILURE);
        }

        /* Is the display thread flushing the pipe? */
        if (de.id == DISPLAY_FLUSH) {
            breakLoop(THREAD_SUCCESS);
        }

        /* Issue captured frame buffer back to device driver */
        if (ioctl(captureFd, VIDIOC_QBUF, &v4l2buf) == -1) {
            ERR("VIDIOC_QBUF failed (%s)\n", strerror(errno));
            breakLoop(THREAD_FAILURE);
        }
    }

cleanup:
    /* Make sure the other threads aren't waiting for init to complete */
    Rendezvous_force(envp->hRendezvousInit);

    /* Make sure the other threads aren't stuck pausing */
    Pause_off(envp->hPause);

    /* Make sure the video thread isn't stuck in FifoUtil_get() */
    FifoUtil_put(&envp->outFifo, &cFlush);

    /* Make sure the display thread isn't stuck in FifoUtil_get() */
    FifoUtil_put(envp->hDisplayInFifo, &dFlush);

    /* Meet up with other threads before cleaning up */
    Rendezvous_meet(envp->hRendezvousCleanup);

    /* Clean up the capture thread */
    if (hRszcopy != RSZCOPY_FAILURE) {
        Rszcopy_delete(hRszcopy);
    }

    if (hSmooth != SMOOTH_FAILURE) {
        Smooth_delete(hSmooth);
    }

    if (captureFd != FAILURE) {
        cleanupCaptureDevice(captureFd, capBufs, numCapBufs, topOffset);
    }

    return status;
}