This thread has been locked.

If you have a related question, please click the "Ask a related question" button in the top right corner. The newly created question will be automatically linked to this question.

GPU memory malloc method

Hi all,

When we read source code of gralloc.cpp for gralloc.default.so which is in /system/lib/hw, we found gralloc_alloc function definition is as following. From folloing bold sentences, format is fixed as RGB, RGBA, RGBX for gralloc.default.so.  For output port buffer of OMX decoder, its format is HAL_PIXEL_FORMAT_TI_NV12 which equal to 256. 

So is there other method to malloc GPU memory for TI format? Thx.

 

static int gralloc_alloc(alloc_device_t* dev,

        int w, int h, int format, int usage,

        buffer_handle_t* pHandle, int* pStride)

{

    if (!pHandle || !pStride)

        return -EINVAL;

    size_t size, stride;

 

    int align = 4;

    int bpp = 0;

    switch (format) {

        case HAL_PIXEL_FORMAT_RGBA_8888:

        case HAL_PIXEL_FORMAT_RGBX_8888:

        case HAL_PIXEL_FORMAT_BGRA_8888:

            bpp = 4;

            break;

        case HAL_PIXEL_FORMAT_RGB_888:

            bpp = 3;

            break;

        case HAL_PIXEL_FORMAT_RGB_565:

        case HAL_PIXEL_FORMAT_RGBA_5551:

        case HAL_PIXEL_FORMAT_RGBA_4444:

            bpp = 2;

            break;

        default:

            return -EINVAL;

    }

    size_t bpr = (w*bpp + (align-1)) & ~(align-1);

    size = bpr * h;

    stride = bpr / bpp;

 

    int err;

    if (usage & GRALLOC_USAGE_HW_FB) {

        err = gralloc_alloc_framebuffer(dev, size, usage, pHandle);

    } else {

        err = gralloc_alloc_buffer(dev, size, usage, pHandle);

    }

 

    if (err < 0) {

        return err;

    }

 

    *pStride = stride;

    return 0;

}

 

regards,

guangx