This thread has been locked.

If you have a related question, please click the "Ask a related question" button in the top right corner. The newly created question will be automatically linked to this question.

TDA4VM: 2-way Mipi is connected to 5-way YUV and 3-way raw12, and the drawing cannot be taken normally

Part Number: TDA4VM


2路米皮接5路YUV和3路raw12,无法正常取图

#include <string.h>
#include <iostream>        
#include <unistd.h>
#include <map>
#include <TI/j7.h>
#include <TI/tivx_config.h>
#include <TI/tivx_event.h>
#include <TI/tivx_task.h>
#include "math.h"
#include <limits.h>
#include <sys/time.h>
#include <time.h>
extern "C" {
#include "remote_service/include/app_remote_service.h"
#include "ipc/include/app_ipc.h"
#include "iss/include/app_iss.h"

#include "sensors/include/app_sensors.h"
}
#include "camera_inst.h"
#include "camera_tool.h"
#include "camera.h"
using namespace std;
namespace baidu {
namespace idg {
namespace gaia {

#define RAW_DATA 1
#define YUV_DATA 1

#define TIVX_TARGET_DEFAULT_STACK_SIZE      (256U * 1024U)
#define TIVX_TARGET_DEFAULT_TASK_PRIORITY1   (8u)

#define ENABLED_NUM_CHANNELS  (4U)
#define NUM_BUF             (4)
#define MAX_ABS_FILENAME    (1024u)
#define NUM_CHANNELS_PER_MIPI (4U)
#define NUM_INST              (1U)
#define IMAGE_WIDTH            (1920)
#define IMAGE_HEIGHT           (1080)
#define IMAGE_FORMAT        (VX_DF_IMAGE_UYVY)
#define APP_MAX_FILE_PATH  (255)
#define CAPTURE_NODE_ERROR  (1U)
static char gs_sensor_name[128] = SENSOR_OV1F_HIK_MAX96717F; 
static int gs_channel_mask = 0xFF;
static  std::map<std::string, Camera*> gs_camera_name_map;
static const vx_char user_data_object_name[] = "tivx_capture_params_t";
static CameraConfig _config[8];


Camera* get_camera_instance()
{
    static Camera_inst cam;
    return &cam;
}
CameraParam& Camera_inst::get_cameraparam(void)
{
    return _param;
}
int32_t Camera_inst::set_cameraparam(CameraParam& para)
{
    _param.sync_call_back = para.sync_call_back;
    _param.call_back_period_us = para.call_back_period_us;
    return 0;
}
int32_t copy_image(vx_object_array out_capture_frames,
                            uint32_t buf_id, void *user_ptr)
{
    uint32_t width, height;
    vx_imagepatch_addressing_t image_addr;
    vx_rectangle_t rect;
    vx_map_id map_id;
    void *data_ptr = NULL;
    uint32_t num_bytes_per_pixel = 2;
    vx_df_image format;
    vx_image img;
    vx_bool is_invalid = 1;
    if (user_ptr == nullptr) {
        return -1;
    }
    // if (not_lock[buf_id] == 1) {
    //     //printf("camera %d not lock\n", buf_id);
    //     return -2;
    // }
    //printf("get item buf_id=%d\n", buf_id);
    img = (vx_image)vxGetObjectArrayItem(out_capture_frames, buf_id);
    if (img == nullptr) {
       // printf("Get image failed buf_id:%d\n", buf_id);
        return -3;
    }
    vxQueryReference((vx_reference)img, TIVX_REFERENCE_INVALID, &is_invalid, sizeof(is_invalid));
    if (is_invalid != 0) {
        //printf("[%s]buf_id:%d, is_invalid\n", __FUNCTION__, buf_id);
        return -4;
    }
    vxQueryImage(img, VX_IMAGE_WIDTH, &width, sizeof(vx_uint32));
    vxQueryImage(img, VX_IMAGE_HEIGHT, &height, sizeof(vx_uint32));
    vxQueryImage(img, VX_IMAGE_FORMAT, &format, sizeof(format));
    rect.start_x = 0;
    rect.start_y = 0;
    rect.end_x = width;
    rect.end_y = height;
    vxMapImagePatch(img,
                    &rect,
                    0,
                    &map_id,
                    &image_addr,
                    &data_ptr,
                    VX_READ_ONLY,
                    VX_MEMORY_TYPE_HOST,
                    VX_NOGAP_X
                   );
    if (!data_ptr)
    {
        printf("Map Image Patch failed,data_ptr is NULL \n");
        return -2;
    }
    memcpy(user_ptr, data_ptr, width * height * num_bytes_per_pixel);
    vxUnmapImagePatch(img, map_id);
    vxReleaseImage(&img);
    return 0;
}

int32_t copy_raw_image(vx_object_array out_capture_frames,
                            uint32_t buf_id, void *user_ptr)
{
    uint32_t width, height;
    vx_imagepatch_addressing_t image_addr;
    vx_rectangle_t rect;
    vx_map_id map_id;
    void *data_ptr = NULL;
    uint32_t num_bytes_per_pixel = 2;
    vx_df_image format;
#ifndef RAW_IMAGE
    vx_image img = NULL;
#else
    tivx_raw_image img;
#endif

    vx_bool is_invalid = 1;
    if (user_ptr == nullptr) {
        return -1;
    }
    // if (not_lock[buf_id] == 1) {
    //     //printf("camera %d not lock\n", buf_id);
    //     return -2;
    // }
    //printf("get item buf_id=%d\n", buf_id);
#ifndef RAW_IMAGE
    img = (vx_image)vxGetObjectArrayItem(out_capture_frames, buf_id);
    if (img == nullptr) {
       // printf("Get image failed buf_id:%d\n", buf_id);
        return -3;
    }
    vxQueryReference((vx_reference)img, TIVX_REFERENCE_INVALID, &is_invalid, sizeof(is_invalid));
    if (is_invalid != 0) {
        //printf("[%s]buf_id:%d, is_invalid\n", __FUNCTION__, buf_id);
        return -4;
    }
    vxQueryImage(img, VX_IMAGE_WIDTH, &width, sizeof(vx_uint32));
    vxQueryImage(img, VX_IMAGE_HEIGHT, &height, sizeof(vx_uint32));
    vxQueryImage(img, VX_IMAGE_FORMAT, &format, sizeof(format));
    rect.start_x = 0;
    rect.start_y = 0;
    rect.end_x = width;
    rect.end_y = height;
    vxMapImagePatch(img,
                    &rect,
                    0,
                    &map_id,
                    &image_addr,
                    &data_ptr,
                    VX_READ_ONLY,
                    VX_MEMORY_TYPE_HOST,
                    VX_NOGAP_X
                   );
    if (!data_ptr)
    {
        printf("Map Image Patch failed,data_ptr is NULL \n");
        return -2;
    }
    memcpy(user_ptr, data_ptr, width * height * num_bytes_per_pixel);
    vxUnmapImagePatch(img, map_id);
    vxReleaseImage(&img);
#else
    img = (tivx_raw_image)vxGetObjectArrayItem(out_capture_frames, buf_id);

    vxQueryReference((vx_reference)img, TIVX_REFERENCE_INVALID, &is_invalid, sizeof(is_invalid));
    if (is_invalid != 0) {
        //printf("[%s]buf_id:%d, is_invalid\n", __FUNCTION__, buf_id);
        return -4;
    }

    tivxQueryRawImage(img, TIVX_RAW_IMAGE_WIDTH, &width, sizeof(vx_uint32));
    tivxQueryRawImage(img, TIVX_RAW_IMAGE_HEIGHT, &height, sizeof(vx_uint32));
    tivxQueryRawImage(img, TIVX_RAW_IMAGE_FORMAT, &format, sizeof(format));

//    printf("width %d height%d\n",width,height);
    
    
    rect.start_x = 0;
    rect.start_y = 0;
    rect.end_x = width;
    rect.end_y = height;

    tivxMapRawImagePatch(img,
        &rect,
        0,
        &map_id,
        &image_addr,
        &data_ptr,
        VX_READ_ONLY,
        VX_MEMORY_TYPE_HOST,
        TIVX_RAW_IMAGE_PIXEL_BUFFER
        );

        if (!data_ptr)
        {
            printf("Map Image Patch failed,data_ptr is NULL \n");
            return -2;
        }
        memcpy(user_ptr, data_ptr, width * height * num_bytes_per_pixel);
        tivxUnmapRawImagePatch(img, map_id);
        tivxReleaseRawImage(&img);

#endif
    return 0;
}
/*
 * Utility API used to add a raw_graph parameter from a node, node parameter index
  */
void add_graph_parameter_by_node_index(vx_graph raw_graph, vx_node node, vx_uint32 node_parameter_index)
{
    vx_parameter parameter = vxGetParameterByIndex(node, node_parameter_index);
    vxAddParameterToGraph(raw_graph, parameter);
    vxReleaseParameter(&parameter);
}
int32_t Camera_inst::power_up()
{
    appPowerOnImageSensor(gs_sensor_name, 0, gs_channel_mask);
    appInitImageSensor(gs_sensor_name, 0, gs_channel_mask);
    appStartImageSensor(gs_sensor_name, gs_channel_mask);
    return 0;
}
int32_t Camera_inst::power_down()
{
    appStopImageSensor(gs_sensor_name, gs_channel_mask);
    appDeInitImageSensor(gs_sensor_name);
    return 0;
}
void _init_config(CameraConfig config[])
{
    config[0].width = 1280;
    config[0].height = 960;
    config[0].name = "v0";
    config[0].format = FMT_YUV422_UYVY;
    config[1].width = 1920;
    config[1].height = 1080;
    config[1].name = "v1";
    config[1].format = FMT_YUV422_UYVY;
    config[2].width = 1920;
    config[2].height = 1080;
    config[2].name = "v2";
    config[2].format = FMT_YUV422_UYVY;
    config[3].width = 1920;
    config[3].height = 1080;
    config[3].name = "v3";
    config[3].format = FMT_YUV422_UYVY;
    // config[4].width = 1920;
    // config[4].height = 1080;
    // config[4].name = "v4";
    // config[4].format = FMT_YUV422_UYVY;
    // config[5].width = 1280;
    // config[5].height = 960;
    // config[5].name = "v5";
    // config[5].format = FMT_YUV422_UYVY;
    // config[6].width = 1280;
    // config[6].height = 960;
    // config[6].name = "v6";
    // config[6].format = FMT_YUV422_UYVY;
    // config[7].width = 1280;
    // config[7].height = 960;
    // config[7].name = "v7";
    // config[7].format = FMT_YUV422_UYVY;
}

#define MAX_NUM_BUF             (8u)
#define NUM_CHANNELS        (1U)
#define NUM_2_CHANNELS        (3U)
#define CAPT1_INST_ID       (0U)
#define CAPT2_INST_ID       (1U)

#define CAPT1_INST_SENSOR_MASK       ((1 << NUM_CHANNELS) - 1U)
#define CAPT2_INST_SENSOR_MASK       (((1 << NUM_CHANNELS) - 1U) << 4U)

#define CAPTURE1_ENABLE        (1U)
#define CAPTURE2_ENABLE        (1U)

static uint32_t num_buf = 4;

static tivx_event  eventHandle_RxFinished;


static tivx_event  eventHandle_RxFinished1;
static tivx_event  eventHandle_SensorCfgDone;
uint32_t sensorCfgDone = 0U;

static vx_context context;
static tivx_raw_image raw_img_exemplar;
static vx_image img_exemplar;

  vx_graph graph = nullptr;
  vx_graph raw_graph = nullptr;
static void VX_CALLBACK tivxTask_capture(void *app_var)
{
    vx_node csirx_node = 0;
    int num_buf = 4;
    vx_object_array capture_frames[MAX_NUM_BUF];
    vx_user_data_object capture_config;
    tivx_capture_params_t local_capture_config;
    uint32_t buf_id, loop_id, loopCnt;
    vx_graph_parameter_queue_params_t csirx_graph_parameters_queue_params_list[1];
    printf("tivxTask_capture \n");
    vx_graph csirx_graph = (vx_graph)app_var;


    /* allocate Input and Output refs, multiple refs created to allow pipelining of csirx_graph */
    for(buf_id=0; buf_id<num_buf; buf_id++)
    {
        capture_frames[buf_id] = vxCreateObjectArray(context, (vx_reference)img_exemplar, NUM_CHANNELS);
    }

    /* CSIRX Config initialization */
    tivx_capture_params_init(&local_capture_config);
    local_capture_config.numInst                          = 1U;
    local_capture_config.numCh                            = NUM_CHANNELS;
    local_capture_config.instId[0U]                       = CAPT1_INST_ID;
    local_capture_config.instCfg[0U].enableCsiv2p0Support = (uint32_t)vx_true_e;
    local_capture_config.instCfg[0U].numDataLanes         = 4U;
    for (loopCnt = 0U ;
        loopCnt < local_capture_config.instCfg[0U].numDataLanes ;
        loopCnt++)
    {
        local_capture_config.instCfg[0U].dataLanesMap[loopCnt] = (loopCnt + 1u);
    }
    for (loopCnt = 0U; loopCnt < NUM_CHANNELS; loopCnt++)
    {
        local_capture_config.chVcNum[loopCnt]   = loopCnt;
        local_capture_config.chInstMap[loopCnt] = CAPT1_INST_ID;
    }

    capture_config = vxCreateUserDataObject(context, "tivx_capture_params_t", sizeof(tivx_capture_params_t), &local_capture_config);


    csirx_node = tivxCaptureNode(csirx_graph, capture_config, capture_frames[0]);

    /* input @ node index 0, becomes csirx_graph parameter 1 */
    add_graph_parameter_by_node_index(csirx_graph, csirx_node, 1);

    /* set csirx_graph schedule config such that csirx_graph parameter @ index 0 and 1 are enqueuable */
    csirx_graph_parameters_queue_params_list[0].graph_parameter_index = 0;
    csirx_graph_parameters_queue_params_list[0].refs_list_size = num_buf;
    csirx_graph_parameters_queue_params_list[0].refs_list = (vx_reference*)&capture_frames[0];

    /* Schedule mode auto is used, here we dont need to call vxScheduleGraph
    * Graph gets scheduled automatically as refs are enqueued to it
    */
    vxSetGraphScheduleConfig(csirx_graph,
            VX_GRAPH_SCHEDULE_MODE_QUEUE_AUTO,
            1,
            csirx_graph_parameters_queue_params_list
            );

    vxSetNodeTarget(csirx_node, VX_TARGET_STRING, TIVX_TARGET_CAPTURE1);

    vxVerifyGraph(csirx_graph);

    /* iniitalizing sensor */
    //appStartImageSensor(sensor_name, CAPT1_INST_SENSOR_MASK);/*Mask for 2 cameras*/

    /* enqueue capture buffers for pipeup but dont trigger graph executions */
    for(buf_id=0; buf_id<num_buf-1; buf_id++)
    {
        tivxGraphParameterEnqueueReadyRef(csirx_graph, 0, (vx_reference*)&capture_frames[buf_id], 1, TIVX_GRAPH_PARAMETER_ENQUEUE_FLAG_PIPEUP);
    }

    /* after pipeup, now enqueue a buffer to trigger graph scheduling */
    vxGraphParameterEnqueueReadyRef(csirx_graph, 0, (vx_reference*)&capture_frames[buf_id], 1);

    /* wait for csirx_graph instances to complete, compare output and recycle data buffers, schedule again */
     for(loop_id=0; loop_id<1000; loop_id++)
     {
        uint32_t num_refs;
        vx_object_array captured_frames = NULL;

        /* Get captured frame reference, waits until a reference is available */
       vxGraphParameterDequeueDoneRef(csirx_graph, 0, (vx_reference*)&captured_frames, 1, &num_refs);

 #if (CAPTURE2_ENABLE == 1U)
        printf("%s %d sensorCfgDone %d\n",__func__,__LINE__,sensorCfgDone);
         if (sensorCfgDone == 0U)
         {
             tivxEventPost(eventHandle_SensorCfgDone);
                printf("%s %d\n",__func__,__LINE__);
             sensorCfgDone = 1U;
         }
 #endif
        if(captured_frames == NULL){
            printf("captured_frames NULL\n");
            continue;
        }
        CameraData data;
        uint8_t *yuv_ptr = (uint8_t*)malloc(IMAGE_WIDTH * IMAGE_HEIGHT * 2); 
        if (copy_image(captured_frames, 0, yuv_ptr) == 0) {
            printf("---------------------YUV OK\n");
        }else{
            printf("---------------------YUV no\n");
        }

        free(yuv_ptr);

         vxGraphParameterEnqueueReadyRef(csirx_graph, 0, (vx_reference*)&captured_frames, 1);

     }
    /* ensure all csirx_graph processing is complete */
    //vxWaitGraph(csirx_graph);

    //appStopImageSensor(sensor_name, CAPT1_INST_SENSOR_MASK)); /*Mask for 2 cameras*/

    // vxReleaseNode(&csirx_node);
    // for(buf_id=0; buf_id<num_buf; buf_id++)
    // {
    //     VX_CALL(vxReleaseObjectArray(&capture_frames[buf_id]));
    // }
    // VX_CALL(vxReleaseUserDataObject(&capture_config));

    /*Signal the completion of csirx graph processing*/
    printf("eventHandle_RxFinished post1\n");
    tivxEventPost(eventHandle_RxFinished);
    printf("eventHandle_RxFinished post2\n");
}


static void VX_CALLBACK tivxTask_capture1(void *app_var)
{
    vx_node csirx_node = 0;
    vx_object_array capture_frames[MAX_NUM_BUF];
    vx_user_data_object capture_config;
    tivx_capture_params_t local_capture_config;
    uint32_t buf_id, loop_id, loopCnt;
    vx_graph_parameter_queue_params_t csirx_graph_parameters_queue_params_list[1];
    printf("tivxTask_capture1 \n");

    vx_graph csirx_graph = (vx_graph)app_var;
    printf("%s %d\n",__func__,__LINE__);
    // ASSERT(num_buf > 0);
    if(CAPTURE1_ENABLE == 1U){
        tivxEventWait(eventHandle_SensorCfgDone, TIVX_EVENT_TIMEOUT_WAIT_FOREVER);
    }
       printf("%s %d\n",__func__,__LINE__);
    /* allocate Input and Output refs, multiple refs created to allow pipelining of csirx_graph */
    for(buf_id=0; buf_id<num_buf; buf_id++)
    {
        capture_frames[buf_id] = vxCreateObjectArray(context, (vx_reference)raw_img_exemplar, NUM_2_CHANNELS);
    }
   printf("%s %d\n",__func__,__LINE__);
    /* CSIRX Config initialization */
    tivx_capture_params_init(&local_capture_config);
    local_capture_config.timeout                          = 33;
    local_capture_config.timeoutInitial                   = 500;
    local_capture_config.numInst                          = 1U;
    local_capture_config.numCh                            = NUM_2_CHANNELS;
    local_capture_config.instId[0U]                       = CAPT2_INST_ID;
    local_capture_config.instCfg[0U].enableCsiv2p0Support = (uint32_t)vx_true_e;
    local_capture_config.instCfg[0U].numDataLanes         = 4U;
    int chIdx = 0U;
    for (loopCnt = 0U ;
        loopCnt < local_capture_config.instCfg[0U].numDataLanes ;
        loopCnt++)
    {
        local_capture_config.instCfg[0U].dataLanesMap[loopCnt] = (loopCnt + 1u);
    }
    for (loopCnt = 1U; loopCnt < 4; loopCnt++)
    {
        local_capture_config.chVcNum[chIdx]   = loopCnt;
        local_capture_config.chInstMap[chIdx] = CAPT2_INST_ID;
          chIdx++;
    }

    capture_config = vxCreateUserDataObject(context, "tivx_capture_params_t", sizeof(tivx_capture_params_t), &local_capture_config);
   printf("%s %d\n",__func__,__LINE__);
    csirx_node = tivxCaptureNode(csirx_graph, capture_config, capture_frames[0]);
   printf("%s %d\n",__func__,__LINE__);
    vxSetNodeTarget(csirx_node, VX_TARGET_STRING, TIVX_TARGET_CAPTURE1);
   printf("%s %d\n",__func__,__LINE__);
    /* input @ node index 0, becomes csirx_graph parameter 1 */
    add_graph_parameter_by_node_index(csirx_graph, csirx_node, 1);
   printf("%s %d\n",__func__,__LINE__);
    /* set csirx_graph schedule config such that csirx_graph parameter @ index 0 and 1 are enqueuable */
    csirx_graph_parameters_queue_params_list[0].graph_parameter_index = 0;
    csirx_graph_parameters_queue_params_list[0].refs_list_size = num_buf;
    csirx_graph_parameters_queue_params_list[0].refs_list = (vx_reference*)&capture_frames[0];

    /* Schedule mode auto is used, here we dont need to call vxScheduleGraph
    * Graph gets scheduled automatically as refs are enqueued to it
    */
    vxSetGraphScheduleConfig(csirx_graph,
            VX_GRAPH_SCHEDULE_MODE_QUEUE_AUTO,
            1,
            csirx_graph_parameters_queue_params_list
            );
   printf("%s %d\n",__func__,__LINE__);
    vxVerifyGraph(csirx_graph);
    vx_status status = vxVerifyGraph(csirx_graph);
    if (status == (vx_status)VX_SUCCESS)
    {
        printf("camera:verify raw_graph success\n");
    } else {
        printf("camera:verify raw_graph failed\n");
        }
    /* iniitalizing sensor */
    //appStartImageSensor(sensor_name1, CAPT2_INST_SENSOR_MASK));/*Mask for 2 cameras*/
    /* enqueue capture buffers for pipeup but dont trigger graph executions */
    for(buf_id=0; buf_id<num_buf-1; buf_id++)
    {
        tivxGraphParameterEnqueueReadyRef(csirx_graph, 0, (vx_reference*)&capture_frames[buf_id], 1, TIVX_GRAPH_PARAMETER_ENQUEUE_FLAG_PIPEUP);
    }

    /* after pipeup, now enqueue a buffer to trigger graph scheduling */
    vxGraphParameterEnqueueReadyRef(csirx_graph, 0, (vx_reference*)&capture_frames[buf_id], 1);

    /*after pipeup, trigger again with last buffer */
    vxGraphParameterEnqueueReadyRef(csirx_graph, 0, (vx_reference*)&capture_frames[num_buf-1], 1);

    /* wait for csirx_graph instances to complete, compare output and recycle data buffers, schedule again */
     for(loop_id=0; loop_id<1000; loop_id++)
     {
        uint32_t num_refs;
        vx_object_array captured_frames = NULL;

        /* Get captured frame reference, waits until a reference is available */
        vxGraphParameterDequeueDoneRef(csirx_graph, 0, (vx_reference*)&captured_frames, 1, &num_refs);

        for (int i = 0; i < 3; i++) {
        int frame_size = 1920*1080;
        CameraData data;
        uint8_t *yuv_ptr = (uint8_t*)malloc(IMAGE_WIDTH * IMAGE_HEIGHT * 2); 
        if (copy_raw_image(captured_frames, i, yuv_ptr) == 0) {
            data.chan_id = i;
            data.img_ptr = yuv_ptr;
            data.framesize = frame_size *2;
            // data.camera_name = _config[i].name;
            // data.frame_cnt = 0;//TODO
            // data.height = _config[i].height;
            // data.width = _config[i].width;
            // data.format = _config[i].format;
            clock_gettime(CLOCK_REALTIME, &(data.capture_ts));
            memcpy(&(data.trigger_ts), &(data.capture_ts), sizeof(data.capture_ts));
            //output->push_back(data);
            printf("%d RAW OK \n",i);
        } 
        free(yuv_ptr);
        
    }
        vxGraphParameterEnqueueReadyRef(csirx_graph, 0, (vx_reference*)&captured_frames, 1);

    }
    // /* ensure all csirx_graph processing is complete */
    // vxWaitGraph(csirx_graph);

    // VX_CALL(appStopImageSensor(sensor_name1, CAPT2_INST_SENSOR_MASK));

    // VX_CALL(vxReleaseNode(&csirx_node));
    // for(buf_id=0; buf_id<num_buf; buf_id++)
    // {
    //     VX_CALL(vxReleaseObjectArray(&capture_frames[buf_id]));
    // }
    // VX_CALL(vxReleaseUserDataObject(&capture_config));

    /*Signal the completion of csirx graph processing*/
        printf("eventHandle_RxFinished post3\n");
    tivxEventPost(eventHandle_RxFinished1);
        printf("eventHandle_RxFinished post4\n");
}
tivx_task taskHandle_capture;
    tivx_task taskHandle_capture1;
int32_t Camera_inst::init()
{   
    std::cout << "init camera start" << std::endl;
    
    IssSensor_CreateParams sensorParams;
    uint32_t channel_mask = 0xff;
      vx_status status;
    //   _init_config(_config);

    tivx_task_create_params_t taskParams_capture;
    context = vxCreateContext();
    tivxHwaLoadKernels(context);
    //CT_RegisterForGarbageCollection(context, ct_teardown_hwa_kernels, CT_GC_OBJECT);
    tivx_clr_debug_zone(VX_ZONE_INFO);
    
    graph = vxCreateGraph(context);
    status = vxGetStatus((vx_reference)graph);
    if (VX_SUCCESS == status)
    {
    printf("camera:graph create success\n");
       vxSetReferenceName((vx_reference)raw_graph, "CAMERA_CAPTURE_GRAPH");
    }else{
        printf("camera:raw_graph create fail\n");
    }

    raw_graph = vxCreateGraph(context);
    status = vxGetStatus((vx_reference)raw_graph);
    if (VX_SUCCESS == status)
    {
        printf("camera:raw_graph create success\n");
       vxSetReferenceName((vx_reference)raw_graph, "CAMERA_CAPTURE_GRAPH");
    }else{
        printf("camera:raw_graph create fail\n");
    }
    
    memset(&sensorParams, 0, sizeof(sensorParams));
    appQueryImageSensor(gs_sensor_name, &sensorParams);
    appInitImageSensor(sensorParams.name, 0, channel_mask);
     sensorParams.sensorInfo.raw_params.width =  1920;
     sensorParams.sensorInfo.raw_params.height = 1080;
     sensorParams.sensorInfo.raw_params.format[0] =    {TIVX_RAW_IMAGE_16_BIT, 11};
    printf("raw size width %d height %d\n",sensorParams.sensorInfo.raw_params.width,sensorParams.sensorInfo.raw_params.height);
    printf("sensorParams.sensorInfo.raw_params.format %d \n",sensorParams.sensorInfo.raw_params.format[0]);
    sensorParams.sensorInfo.raw_params.num_exposures = 1;
    sensorParams.sensorInfo.raw_params.line_interleaved = vx_false_e;
    sensorParams.sensorInfo.raw_params.meta_height_before = 0;//1280;
    sensorParams.sensorInfo.raw_params.meta_height_after = 4;//1280;

    raw_img_exemplar = tivxCreateRawImage(context, &(sensorParams.sensorInfo.raw_params));
    status = vxGetStatus((vx_reference)raw_img_exemplar);
    if (status == (vx_status)VX_SUCCESS)
    {
        printf("camera:raw_img_exemplar create success\n");
    } else {
        printf("camera:raw_img_exemplar create failed\n");
        vxReleaseGraph(&raw_graph);
        tivxHwaUnLoadKernels(context);
        vxReleaseContext(&context);
        return -1;
    }
    
    img_exemplar = vxCreateImage(context, IMAGE_WIDTH, IMAGE_HEIGHT, IMAGE_FORMAT);
    status = vxGetStatus((vx_reference)img_exemplar);
    if (status == (vx_status)VX_SUCCESS)
    {
        printf("camera:img_exemplar create success\n");
    } else {
        printf("camera:img_exemplar create failed\n");
        vxReleaseGraph(&raw_graph);
        tivxHwaUnLoadKernels(context);
        vxReleaseContext(&context);
        return -1;
    }

#if (CAPTURE1_ENABLE == 1U)
        tivxEventCreate(&eventHandle_RxFinished);
        // Setting up task params for capture_task
        tivxTaskSetDefaultCreateParams(&taskParams_capture);
        taskParams_capture.task_main = &tivxTask_capture;
        taskParams_capture.app_var = graph;
        taskParams_capture.stack_ptr = NULL;
        taskParams_capture.stack_size = TIVX_TARGET_DEFAULT_STACK_SIZE;
        taskParams_capture.core_affinity = TIVX_TASK_AFFINITY_ANY;
        taskParams_capture.priority = TIVX_TARGET_DEFAULT_TASK_PRIORITY1;
        tivxTaskCreate(&taskHandle_capture, &taskParams_capture);
#endif
#if (CAPTURE2_ENABLE == 1U)
        tivxEventCreate(&eventHandle_RxFinished1);
        tivxEventCreate(&eventHandle_SensorCfgDone);
        // Setting up task params for capture_task 1
        tivxTaskSetDefaultCreateParams(&taskParams_capture);
        taskParams_capture.task_main = &tivxTask_capture1;
        taskParams_capture.app_var = raw_graph;
        taskParams_capture.stack_ptr = NULL;
        taskParams_capture.stack_size = TIVX_TARGET_DEFAULT_STACK_SIZE;
        taskParams_capture.core_affinity = TIVX_TASK_AFFINITY_ANY;
        taskParams_capture.priority = TIVX_TARGET_DEFAULT_TASK_PRIORITY1;
        tivxTaskCreate(&taskHandle_capture1, &taskParams_capture);
#endif

    //Wait for both Graph Processing to complete
#if (CAPTURE1_ENABLE == 1U)
        tivxEventWait(eventHandle_RxFinished, TIVX_EVENT_TIMEOUT_WAIT_FOREVER);
#endif
#if (CAPTURE2_ENABLE == 1U)
        tivxEventWait(eventHandle_RxFinished1, TIVX_EVENT_TIMEOUT_WAIT_FOREVER);
#endif

    std::cout << "init camera success" << std::endl;
    return 0;
}

int32_t Camera_inst::uninit()
{
    uint32_t buf_id = 0;
    uint32_t num_buf = NUM_BUF;
    uint32_t num_refs;
    vx_object_array out_capture_frames;
    printf("uinit\n");
    /* ensure all raw_graph processing is complete */
    appStopImageSensor(gs_sensor_name, gs_channel_mask);
#if (CAPTURE1_ENABLE == 1U)
//        appDeInitImageSensor(sensor_name);
#endif

#if (CAPTURE2_ENABLE == 1U)
//        appDeInitImageSensor(sensor_name1);
#endif

#if (CAPTURE1_ENABLE == 1U)
        tivxReleaseRawImage(&raw_img_exemplar);
#endif

#if (CAPTURE2_ENABLE == 1U)
       vxReleaseImage(&img_exemplar);
#endif

#if (CAPTURE1_ENABLE == 1U)
        vxReleaseGraph(&graph);
#endif
#if (CAPTURE2_ENABLE == 1U)
        vxReleaseGraph(&raw_graph);
#endif
#if (CAPTURE1_ENABLE == 1U)
        tivxEventDelete(&eventHandle_RxFinished);
        tivxTaskDelete(&taskHandle_capture);
#endif
#if (CAPTURE2_ENABLE == 1U)
        tivxEventDelete(&eventHandle_RxFinished1);
        tivxEventDelete(&eventHandle_SensorCfgDone);
        tivxTaskDelete(&taskHandle_capture1);
#endif
        tivxHwaUnLoadKernels(context);

        tivx_clr_debug_zone(VX_ZONE_INFO);
    std::cout << "uninit camera success" << std::endl;
    return 0;
}
void Camera_inst::check_camera_reg()
{
    int32_t ret = 0;
    RemoteI2cParams param;
    int i = 0;
    param.nRegAddrWidth = 16;
    param.nI2cId = 3;
    param.nSlaveAddr = 0x29;
    param.nRegAddr = 0x1A;
    ret = appWriteReadRegImageSensor(gs_sensor_name, 0x1, IMAGE_SENSOR_CTRLCMD_READ_SENSOR_REG, &param);
    //printf("read reg slave=0x%x,reg=0x%x,val=0x%x\n", param.nSlaveAddr,param.nRegAddr, param.nRegValue);
    if((param.nRegValue & 0x08) == 0) {
        not_lock[0] = 1; 
    }
    param.nRegAddrWidth = 16;
    param.nI2cId = 3;
    param.nSlaveAddr = 0x29;
    param.nRegAddr = 0x0A;
    ret = appWriteReadRegImageSensor(gs_sensor_name, 0x1, IMAGE_SENSOR_CTRLCMD_READ_SENSOR_REG, &param);
    //printf("read reg slave=0x%x,reg=0x%x,val=0x%x\n", param.nSlaveAddr,param.nRegAddr, param.nRegValue);
    if((param.nRegValue & 0x08) == 0) {
        not_lock[1] = 1; 
    }
    param.nRegAddrWidth = 16;
    param.nI2cId = 3;
    param.nSlaveAddr = 0x29;
    param.nRegAddr = 0x0B;
    ret = appWriteReadRegImageSensor(gs_sensor_name, 0x1, IMAGE_SENSOR_CTRLCMD_READ_SENSOR_REG, &param);
    //printf("read reg slave=0x%x,reg=0x%x,val=0x%x\n", param.nSlaveAddr,param.nRegAddr, param.nRegValue);
    if((param.nRegValue & 0x08) == 0) {
        not_lock[2] = 1; 
    }
    param.nRegAddrWidth = 16;
    param.nI2cId = 3;
    param.nSlaveAddr = 0x29;
    param.nRegAddr = 0x0C;
    ret = appWriteReadRegImageSensor(gs_sensor_name, 0x1, IMAGE_SENSOR_CTRLCMD_READ_SENSOR_REG, &param);
    //printf("read reg slave=0x%x,reg=0x%x,val=0x%x\n", param.nSlaveAddr,param.nRegAddr, param.nRegValue);
    if((param.nRegValue & 0x08) == 0) {
        not_lock[3] = 1; 
    }
    param.nRegAddrWidth = 16;
    param.nI2cId = 3;
    param.nSlaveAddr = 0x28;
    param.nRegAddr = 0x13;
    ret = appWriteReadRegImageSensor(gs_sensor_name, 0x1, IMAGE_SENSOR_CTRLCMD_READ_SENSOR_REG, &param);
    //printf("read reg slave=0x%x,reg=0x%x,val=0x%x\n", param.nSlaveAddr,param.nRegAddr, param.nRegValue);
    if((param.nRegValue & 0x08) == 0) {
        not_lock[4] = 1; 
    }
}
int32_t Camera_inst::capture_sync(std::vector<CameraData> *output)
{
    uint32_t num_refs;
    uint32_t i = 0;
    uint32_t frame_size = 0;
    vx_size out_size = 0;
    int32_t ret = 0;
    char sensor_name[128] = SENSOR_OV1F_HIK_MAX96717F; 
   //    RemoteI2cParams param;
    output->clear();
    vx_object_array out_capture_frames = NULL;
    vx_object_array out_raw_capture_frames = NULL;
    //memset(not_lock, 0, sizeof(not_lock) * 5);
    /* Get output reference, waits until a reference is available */
    #if 0
    /* Error frame params */
    vx_event_t event;
    vxWaitEvent(context, &event, vx_true_e);
    /* Verifying that the capture node error event is not hit */
     if (event.app_value == CAPTURE_NODE_ERROR) {
          printf("[%s] capture node error\n", __FUNCTION__, event.app_value);
     }
    #endif
// #if YUV_DATA
//     vxGraphParameterDequeueDoneRef(graph, 0, (vx_reference*)&out_capture_frames, 1, &num_refs);
//     for (i = 0; i < 1; i++) {
//         frame_size = _config[i].height * _config[i].width;
//         CameraData data;
//         uint8_t *yuv_ptr = (uint8_t*)malloc(IMAGE_WIDTH * IMAGE_HEIGHT * 2); 
//         if (copy_image(out_capture_frames, i, yuv_ptr) == 0) {
//             data.chan_id = i;
//             data.img_ptr = yuv_ptr;
//             data.framesize = frame_size * 2;
//             data.camera_name = _config[i].name;
//             data.frame_cnt = 0;//TODO
//             data.height = _config[i].height;
//             data.width = _config[i].width;
//             data.format = _config[i].format;
//             clock_gettime(CLOCK_REALTIME, &(data.capture_ts));
//             memcpy(&(data.trigger_ts), &(data.capture_ts), sizeof(data.capture_ts));
//             output->push_back(data);
//         } else {
//             free(yuv_ptr);
//         }
//     }
//     vxGraphParameterEnqueueReadyRef(graph, 0, (vx_reference*)&out_capture_frames, 1);
//   #endif
  //  check_camera_reg();
#if RAW_DATA
    // vxGraphParameterDequeueDoneRef(raw_graph, 0, (vx_reference*)&out_raw_capture_frames, 1, &num_refs);    
    // for (i = 1; i < 4; i++) {
    //     frame_size = _config[i].height * _config[i].width;
    //     CameraData data;
    //     uint8_t *yuv_ptr = (uint8_t*)malloc(IMAGE_WIDTH * IMAGE_HEIGHT * 2); 
    //     if (copy_raw_image(out_capture_frames, i, yuv_ptr) == 0) {
    //         data.chan_id = i;
    //         data.img_ptr = yuv_ptr;
    //         data.framesize = frame_size *2;
    //         data.camera_name = _config[i].name;
    //         data.frame_cnt = 0;//TODO
    //         data.height = _config[i].height;
    //         data.width = _config[i].width;
    //         data.format = _config[i].format;
    //         clock_gettime(CLOCK_REALTIME, &(data.capture_ts));
    //         memcpy(&(data.trigger_ts), &(data.capture_ts), sizeof(data.capture_ts));
    //         output->push_back(data);
    //     } else {
    //         free(yuv_ptr);
    //     }
    // }
    // vxGraphParameterEnqueueReadyRef(raw_graph, 0, (vx_reference*)&out_raw_capture_frames, 1);
#endif
    return 0;
}
void Camera_inst::release(uint32_t chan_id, uint8_t* img_ptr)
{
    if (img_ptr != nullptr) {
        free(img_ptr);
    }
    //TODO
    return;
}
}
}
}