This thread has been locked.

If you have a related question, please click the "Ask a related question" button in the top right corner. The newly created question will be automatically linked to this question.

TDA4VM: How to feed local raw images to VISS and bypass CSIRX

Part Number: TDA4VM

Hi TI Team,

I am using app_multi_cam(or consider single cam app if needed) demo and want to apply changes as raw image data from local storage >> Viss node >> LDC Node >> Display Node >> eDP Monitor.

we need to create a graph in which we can directly feed raw image data(raw image frames from local storage) to VISS Node. In this case we want to skip capture node/camera/MIPI CSI2/ interface for our requirement.

Please provide suggestions for quick start. Thanks in advance.

Best Regards,

Gautam

  • Hi Gautam,

    That's possible, Depending on the application you are using, you could store the reference image in /opt/vision_apps/test_data/psdkra/AppName/SensorName folder and then application will read the image from this folder if sensor is not connected.. 

    Regards,

    Brijesh

  • Hi Brijesh,

    Thanks for your response. I could able to insert single raw image frame in the capture node using multi cam demo app with test mode = 1.

    But the problem is with multiple frames sending to capture node to make it video processing ie 30fps. I need to update multi cam demo app in such a way that it should take series of raw frames so that 10sec video can be seen at display monitor.

    In multi cam demo app with updates i am calling functions read_error_image_raw() and app_send_error_frame() in loop and getting error VX_PRINT(VX_ZONE_ERROR, "Reference frame already provided\n"); 

    With app_send_error_frame() we send control cmd (TIVX_CAPTURE_REGISTER_ERROR_FRAME) to capture node for one error frame only.

    Please provide suggestions to make it work quickly. Thanks in advance. Attached updated code below:

    #define APP_BUFFER_Q_DEPTH   (4)
    #define APP_PIPELINE_DEPTH   (7)
    
    typedef struct {
    
        SensorObj     sensorObj;
        CaptureObj    captureObj;
        VISSObj       vissObj;
        AEWBObj       aewbObj;
        LDCObj        ldcObj;
        ImgMosaicObj  imgMosaicObj;
        DisplayObj    displayObj;
    
        vx_char output_file_path[APP_MAX_FILE_PATH];
    
        /* OpenVX references */
        vx_context context;
        vx_graph   graph;
    
        vx_int32 en_out_img_write;
        vx_int32 test_mode;
    
        vx_uint32 is_interactive;
    
        vx_uint32 num_frames_to_run;
    
        vx_uint32 num_frames_to_write;
        vx_uint32 num_frames_to_skip;
    
        tivx_task task;
        vx_uint32 stop_task;
        vx_uint32 stop_task_done;
    
        app_perf_point_t total_perf;
        app_perf_point_t fileio_perf;
        app_perf_point_t draw_perf;
    
        int32_t pipeline;
    
        int32_t enqueueCnt;
        int32_t dequeueCnt;
    
        int32_t write_file;
    
        vx_uint32 enable_configure_hwa_freq;
        vx_uint32 hwa_freq_config;
    
    } AppObj;
    
    AppObj gAppObj;
    
    static vx_status app_init(AppObj *obj);
    static void app_deinit(AppObj *obj);
    static vx_status app_create_graph(AppObj *obj);
    static vx_status app_verify_graph(AppObj *obj);
    static vx_status app_run_graph(AppObj *obj);
    static vx_status app_run_graph_interactive(AppObj *obj);
    static void app_delete_graph(AppObj *obj);
    static void app_default_param_set(AppObj *obj);
    static void app_update_param_set(AppObj *obj);
    static void app_pipeline_params_defaults(AppObj *obj);
    static void add_graph_parameter_by_node_index(vx_graph graph, vx_node node, vx_uint32 node_parameter_index);
    
    static char menu[] = {
        "\n"
        "\n ========================="
        "\n Demo : Camera Demo"
        "\n ========================="
        "\n"
        "\n s: Save CSIx, VISS and LDC outputs"
        "\n"
        "\n p: Print performance statistics"
        "\n"
        "\n x: Exit"
        "\n"
        "\n Enter Choice: "
    };
    
    
    static void app_run_task(void *app_var)
    {
        AppObj *obj = (AppObj *)app_var;
        vx_status status = VX_SUCCESS;
        while((!obj->stop_task) && (status == VX_SUCCESS))
        {
            status = app_run_graph(obj);
        }
        obj->stop_task_done = 1;
    }
    
    static int32_t app_run_task_create(AppObj *obj)
    {
        tivx_task_create_params_t params;
        vx_status status;
    
        tivxTaskSetDefaultCreateParams(&params);
        params.task_main = app_run_task;
        params.app_var = obj;
    
        obj->stop_task_done = 0;
        obj->stop_task = 0;
    
        status = tivxTaskCreate(&obj->task, &params);
    
        return status;
    }
    
    static void app_run_task_delete(AppObj *obj)
    {
        while(obj->stop_task_done==0)
        {
            tivxTaskWaitMsecs(100);
        }
    
        tivxTaskDelete(&obj->task);
    }
    
    static vx_status app_run_graph_interactive(AppObj *obj)
    {
        vx_status status;
        uint32_t done = 0;
    
        char ch;
        FILE *fp;
        app_perf_point_t *perf_arr[1];
    
        status = app_run_task_create(obj);
        if(status == VX_FAILURE)
        {
            printf("app_tidl: ERROR: Unable to create task\n");
        }
        else
        {
            appPerfStatsResetAll();
            while(!done)
            {
                printf(menu);
                ch = getchar();
                printf("\n");
    
                switch(ch)
                {
                    case 'p':
                        appPerfStatsPrintAll();
                        status = tivx_utils_graph_perf_print(obj->graph);
                        appPerfPointPrint(&obj->fileio_perf);
                        appPerfPointPrint(&obj->total_perf);
                        printf("\n");
                        appPerfPointPrintFPS(&obj->total_perf);
                        appPerfPointReset(&obj->total_perf);
                        printf("\n");
    
                        vx_reference refs[1];
                        refs[0] = (vx_reference)obj->captureObj.raw_image_arr[0];
                        if (status == VX_SUCCESS)
                        {
                            status = tivxNodeSendCommand(obj->captureObj.node, 0u,
                                        TIVX_CAPTURE_PRINT_STATISTICS,
                                        refs, 1u);
                        }
                        break;
                    case 'e':
                        perf_arr[0] = &obj->total_perf;
                        fp = appPerfStatsExportOpenFile(".", "basic_demos_app_multi_cam");
                        if (NULL != fp)
                        {
                            appPerfStatsExportAll(fp, perf_arr, 1);
                            if (status == VX_SUCCESS)
                            {
                                status = tivx_utils_graph_perf_export(fp, obj->graph);
                            }
                            appPerfStatsExportCloseFile(fp);
                            appPerfStatsResetAll();
                        }
                        else
                        {
                            printf("fp is null\n");
                        }
                        break;
                    case 's':
                        obj->write_file = 1;
                        break;
                    case 'x':
                        obj->stop_task = 1;
                        done = 1;
                        break;
                }
            }
            app_run_task_delete(obj);
        }
        return status;
    }
    
    vx_int32 processGraph(vx_int32 argc, vx_char* argv[])
    {
        vx_status status = VX_SUCCESS;
    
        AppObj *obj = &gAppObj;
    
        /*Optional parameter setting*/
        app_default_param_set(obj);
    
        obj->test_mode = 1;
        obj->captureObj.test_mode = 1;
        obj->is_interactive = 0;
        obj->enable_configure_hwa_freq = 0;
        obj->hwa_freq_config = 0;
        // obj->sensorObj.is_interactive = 0;
        // set the number of test cams from cmd line
        obj->sensorObj.sensor_index = 1;
    
        // manual injection of sensor raw params - "AR0233-UB953_MARS"
        obj->sensorObj.sensorParams.sensorInfo.raw_params.width = 1920;
        obj->sensorObj.sensorParams.sensorInfo.raw_params.height = 1280;
        obj->sensorObj.image_width = obj->sensorObj.sensorParams.sensorInfo.raw_params.width;
        obj->sensorObj.image_height = obj->sensorObj.sensorParams.sensorInfo.raw_params.height;
    
        obj->sensorObj.sensorParams.sensorInfo.raw_params.num_exposures = 1;
        obj->sensorObj.sensorParams.sensorInfo.raw_params.line_interleaved = vx_false_e;
        obj->sensorObj.sensorParams.sensorInfo.raw_params.format[0].pixel_container = TIVX_RAW_IMAGE_16_BIT;
        obj->sensorObj.sensor_out_format = 0;
        obj->sensorObj.sensorParams.sensorInfo.raw_params.format[0].msb = 11;
        obj->sensorObj.sensorParams.sensorInfo.raw_params.meta_height_before = 0;
        obj->sensorObj.sensorParams.sensorInfo.raw_params.meta_height_after = 0;
    
        obj->sensorObj.sensorParams.sensorInfo.features = 0x00000178;
        obj->sensorObj.sensorParams.sensorInfo.aewbMode = ALGORITHMS_ISS_AEWB_MODE_AEWB;
    
        obj->sensorObj.sensorParams.sensorInfo.fps = 30;
        obj->sensorObj.sensorParams.sensorInfo.numDataLanes = 4;
        obj->sensorObj.sensorParams.sensorInfo.dataLanesMap[0] = 1;
        obj->sensorObj.sensorParams.sensorInfo.dataLanesMap[1] = 2;
        obj->sensorObj.sensorParams.sensorInfo.dataLanesMap[2] = 3;
        obj->sensorObj.sensorParams.sensorInfo.dataLanesMap[3] = 4;
    
        obj->sensorObj.sensorParams.sensorInfo.dataLanesPolarity[0] = 0;
        obj->sensorObj.sensorParams.sensorInfo.dataLanesPolarity[1] = 0;
        obj->sensorObj.sensorParams.sensorInfo.dataLanesPolarity[2] = 0;
        obj->sensorObj.sensorParams.sensorInfo.dataLanesPolarity[3] = 0;
        // CSIRX_LANE_BAND_SPEED_1350_TO_1500_MBPS = 0x12U
        obj->sensorObj.sensorParams.sensorInfo.csi_laneBandSpeed = 0x12U;
        obj->sensorObj.sensorParams.num_channels = 1;
        obj->sensorObj.sensorParams.dccId = 233;
    
        /*
        Check for supported sensor features.
        It is upto the application to decide which features should be enabled.
        This demo app enables WDR, DCC and 2A if the sensor supports it.
        */
        APP_PRINTF("WDR mode is supported \n");
        obj->sensorObj.sensor_features_enabled |= ISS_SENSOR_FEATURE_COMB_COMP_WDR_MODE;
        obj->sensorObj.sensor_wdr_enabled = 1;
    
        APP_PRINTF("Expsoure control is supported \n");
        obj->sensorObj.sensor_features_enabled |= ISS_SENSOR_FEATURE_MANUAL_EXPOSURE;
        obj->sensorObj.sensor_exp_control_enabled = 1;
    
        APP_PRINTF("Gain control is supported \n");
        obj->sensorObj.sensor_features_enabled |= ISS_SENSOR_FEATURE_MANUAL_GAIN;
        obj->sensorObj.sensor_gain_control_enabled = 1;
    
        obj->sensorObj.sensor_features_enabled |= ISS_SENSOR_FEATURE_DCC_SUPPORTED;
        obj->sensorObj.sensor_dcc_enabled = 1;
        APP_PRINTF("Sensor DCC is enabled \n");
        APP_PRINTF("Sensor width = %d\n", obj->sensorObj.sensorParams.sensorInfo.raw_params.width);
        APP_PRINTF("Sensor height = %d\n", obj->sensorObj.sensorParams.sensorInfo.raw_params.height);
        APP_PRINTF("Sensor DCC ID = %d\n", obj->sensorObj.sensorParams.dccId);
        APP_PRINTF("Sensor Supported Features = 0x%08X\n", obj->sensorObj.sensor_features_supported);
        APP_PRINTF("Sensor Enabled Features = 0x%08X\n", obj->sensorObj.sensor_features_enabled);
    
        /*Update of parameters are config file read*/
        app_update_param_set(obj);
    
        if (status == VX_SUCCESS)
        {
            status = app_init(obj);
        }
        if(status == VX_SUCCESS)
        {
            APP_PRINTF("App Init Done! \n");
    
            status = app_create_graph(obj);
    
            if(status == VX_SUCCESS)
            {
                APP_PRINTF("App Create Graph Done! \n");
    
                status = app_verify_graph(obj);
    
                if(status == VX_SUCCESS)
                {
                    APP_PRINTF("App Verify Graph Done! \n");
    
                    if (status == VX_SUCCESS)
                    {
                        APP_PRINTF("App Send Error Frame Done! \n");
                        if(obj->is_interactive)
                        {
                            status = app_run_graph_interactive(obj);
                        }
                        else
                        {
                            status = app_run_graph(obj);
                        }
                    }
                }
            }
    
            APP_PRINTF("App Run Graph Done! \n");
        }
    
        app_delete_graph(obj);
    
        APP_PRINTF("App Delete Graph Done! \n");
    
        app_deinit(obj);
    
        APP_PRINTF("App De-init Done! \n");
    
        if ((vx_false_e == test_result) || (status != VX_SUCCESS))
        {
            printf("\n\nTEST FAILED\n\n");
            print_new_checksum_structs();
            status = (status == VX_SUCCESS) ? VX_FAILURE : status;
        }
        else
        {
            printf("\n\nTEST PASSED\n\n");
        }
    
        return status;
    }
    
    static vx_status app_init(AppObj *obj)
    {
        vx_status status = VX_SUCCESS;
    
        if (1U == obj->enable_configure_hwa_freq)
        {
            APP_PRINTF("Configuring VPAC frequency!\n");
            if (0U == obj->hwa_freq_config)
            {
                APP_PRINTF("Configuring VPAC frequency to 650 MHz and DMPAC to 520 MHz!\n");
                status = appVhwaConfigureFreq(APP_HWA_CONFIGURE_FREQ_VPAC_650_DMPAC_520);
                APP_PRINTF("Configuring VPAC frequency done!\n");
            }
            else if (1U == obj->hwa_freq_config)
            {
                APP_PRINTF("Configuring VPAC frequency to 720 MHz and DMPAC to 480 MHz!\n");
                status = appVhwaConfigureFreq(APP_HWA_CONFIGURE_FREQ_VPAC_720_DMPAC_480);
                APP_PRINTF("Configuring VPAC frequency done!\n");
            }
            else
            {
                APP_PRINTF("Invalid option for VPAC frequency configuration!\n");
            }
        }
    
        if (status == VX_SUCCESS)
        {
            /* Create OpenVx Context */
            obj->context = vxCreateContext();
            status = vxGetStatus((vx_reference)obj->context);
            APP_PRINTF("Creating context done!\n");
        }
    
        if (status == VX_SUCCESS)
        {
            tivxHwaLoadKernels(obj->context);
            tivxImagingLoadKernels(obj->context);
            tivxFileIOLoadKernels(obj->context);
            APP_PRINTF("Kernel loading done!\n");
        }
    
        /* Initialize modules */
        if (status == VX_SUCCESS)
        {
            APP_PRINTF("Sensor init done!\n");
            status = app_init_capture(obj->context, &obj->captureObj, &obj->sensorObj, "capture_obj", APP_BUFFER_Q_DEPTH);
        }
    
        if(status == VX_SUCCESS)
        {
            status = app_init_viss(obj->context, &obj->vissObj, &obj->sensorObj, "viss_obj");
            APP_PRINTF("VISS init done!\n");
        }
    
        if(status == VX_SUCCESS)
        {
            status = app_init_aewb(obj->context, &obj->aewbObj, &obj->sensorObj, "aewb_obj");
            APP_PRINTF("AEWB init done!\n");
        }
    
        if(status == VX_SUCCESS)
        {
            status = app_init_ldc(obj->context, &obj->ldcObj, &obj->sensorObj, "ldc_obj");
            APP_PRINTF("LDC init done!\n");
        }
    
    
        if(status == VX_SUCCESS)
        {
            status = app_init_img_mosaic(obj->context, &obj->imgMosaicObj, "img_mosaic_obj", APP_BUFFER_Q_DEPTH);
            APP_PRINTF("Img Mosaic init done!\n");
        }
    
        if (status == VX_SUCCESS)
        {
            status = app_init_display(obj->context, &obj->displayObj, "display_obj");
            APP_PRINTF("Display init done!\n");
        }
    
        appPerfPointSetName(&obj->total_perf , "TOTAL");
        appPerfPointSetName(&obj->fileio_perf, "FILEIO");
        return status;
    }
    
    static void app_deinit(AppObj *obj)
    {
        app_deinit_sensor(&obj->sensorObj);
        APP_PRINTF("Sensor deinit done!\n");
    
        app_deinit_capture(&obj->captureObj, APP_BUFFER_Q_DEPTH);
        APP_PRINTF("Capture deinit done!\n");
    
        app_deinit_viss(&obj->vissObj);
        APP_PRINTF("VISS deinit done!\n");
    
        app_deinit_aewb(&obj->aewbObj);
        APP_PRINTF("AEWB deinit done!\n");
    
        app_deinit_ldc(&obj->ldcObj);
        APP_PRINTF("LDC deinit done!\n");
    
        app_deinit_img_mosaic(&obj->imgMosaicObj, APP_BUFFER_Q_DEPTH);
        APP_PRINTF("Img Mosaic deinit done!\n");
    
        app_deinit_display(&obj->displayObj);
        APP_PRINTF("Display deinit done!\n");
    
        tivxHwaUnLoadKernels(obj->context);
        tivxImagingUnLoadKernels(obj->context);
        tivxFileIOUnLoadKernels(obj->context);
        APP_PRINTF("Kernels unload done!\n");
    
        vxReleaseContext(&obj->context);
        APP_PRINTF("Release context done!\n");
    }
    
    static void app_delete_graph(AppObj *obj)
    {
        app_delete_capture(&obj->captureObj);
        APP_PRINTF("Capture delete done!\n");
    
        app_delete_viss(&obj->vissObj);
        APP_PRINTF("VISS delete done!\n");
    
        app_delete_aewb(&obj->aewbObj);
        APP_PRINTF("AEWB delete done!\n");
    
        app_delete_ldc(&obj->ldcObj);
        APP_PRINTF("LDC delete done!\n");
    
        app_delete_img_mosaic(&obj->imgMosaicObj);
        APP_PRINTF("Img Mosaic delete done!\n");
    
        app_delete_display(&obj->displayObj);
        APP_PRINTF("Display delete done!\n");
    
        vxReleaseGraph(&obj->graph);
        APP_PRINTF("Graph delete done!\n");
    }
    
    static vx_status app_create_graph(AppObj *obj)
    {
        vx_status status = VX_SUCCESS;
        vx_graph_parameter_queue_params_t graph_parameters_queue_params_list[2];
        vx_int32 graph_parameter_index;
    
        obj->graph = vxCreateGraph(obj->context);
        status = vxGetStatus((vx_reference)obj->graph);
        if (status == VX_SUCCESS)
        {
            status = vxSetReferenceName((vx_reference)obj->graph, "app_multi_cam_graph");
            APP_PRINTF("Graph create done!\n");
        }
    
        if(status == VX_SUCCESS)
        {
            status = app_create_graph_capture(obj->graph, &obj->captureObj);
            APP_PRINTF("Capture graph done!\n");
        }
    
        if (status == VX_SUCCESS)
        {
            status = app_create_graph_viss(obj->graph, &obj->vissObj, obj->captureObj.raw_image_arr[0]);
            APP_PRINTF("VISS graph done!\n");
        }
    
        if (status == VX_SUCCESS)
        {
            status = app_create_graph_aewb(obj->graph, &obj->aewbObj, obj->vissObj.h3a_stats_arr);
    
            APP_PRINTF("AEWB graph done!\n");
        }
    
        vx_int32 idx = 0;
        vx_object_array ldc_in_arr;
    
        ldc_in_arr = obj->vissObj.output_arr;
    
        status = app_create_graph_ldc(obj->graph, &obj->ldcObj, ldc_in_arr);
        APP_PRINTF("LDC graph done!\n");
    
        obj->imgMosaicObj.input_arr[idx++] = obj->ldcObj.output_arr;
    
        vx_image display_in_image;
    
        obj->imgMosaicObj.num_inputs = idx;
    
        if (status == VX_SUCCESS)
        {
            status = app_create_graph_img_mosaic(obj->graph, &obj->imgMosaicObj, NULL);
            APP_PRINTF("Img Mosaic graph done!\n");
        }
    
        if(status == VX_SUCCESS)
        {
            display_in_image = obj->imgMosaicObj.output_image[0];
            status = app_create_graph_display(obj->graph, &obj->displayObj, display_in_image);
            APP_PRINTF("Display graph done!\n");
        }
    
        if(status == VX_SUCCESS)
        {
            graph_parameter_index = 0;
            add_graph_parameter_by_node_index(obj->graph, obj->captureObj.node, 1);
            obj->captureObj.graph_parameter_index = graph_parameter_index;
            graph_parameters_queue_params_list[graph_parameter_index].graph_parameter_index = graph_parameter_index;
            graph_parameters_queue_params_list[graph_parameter_index].refs_list_size = APP_BUFFER_Q_DEPTH;
            graph_parameters_queue_params_list[graph_parameter_index].refs_list = (vx_reference*)&obj->captureObj.raw_image_arr[0];
            graph_parameter_index++;
    
            if((obj->en_out_img_write == 1) || (obj->test_mode == 1))
            {
                add_graph_parameter_by_node_index(obj->graph, obj->imgMosaicObj.node, 1);
                obj->imgMosaicObj.graph_parameter_index = graph_parameter_index;
                graph_parameters_queue_params_list[graph_parameter_index].graph_parameter_index = graph_parameter_index;
                graph_parameters_queue_params_list[graph_parameter_index].refs_list_size = APP_BUFFER_Q_DEPTH;
                graph_parameters_queue_params_list[graph_parameter_index].refs_list = (vx_reference*)&obj->imgMosaicObj.output_image[0];
                graph_parameter_index++;
            }
    
            status = vxSetGraphScheduleConfig(obj->graph,
                    VX_GRAPH_SCHEDULE_MODE_QUEUE_AUTO,
                    graph_parameter_index,
                    graph_parameters_queue_params_list);
    
            if (status == VX_SUCCESS)
            {
                status = tivxSetGraphPipelineDepth(obj->graph, APP_PIPELINE_DEPTH);
            }
            if(status == VX_SUCCESS)
            {
                status = tivxSetNodeParameterNumBufByIndex(obj->vissObj.node, 6, APP_BUFFER_Q_DEPTH);
    
                if (status == VX_SUCCESS)
                {
                    status = tivxSetNodeParameterNumBufByIndex(obj->vissObj.node, 9, APP_BUFFER_Q_DEPTH);
                }
                if (status == VX_SUCCESS)
                {
                    status = tivxSetNodeParameterNumBufByIndex(obj->aewbObj.node, 4, APP_BUFFER_Q_DEPTH);
                }
            }
            if(status == VX_SUCCESS)
            {
                status = tivxSetNodeParameterNumBufByIndex(obj->ldcObj.node, 7, APP_BUFFER_Q_DEPTH);
            }
            if(status == VX_SUCCESS)
            {
                if(!((obj->en_out_img_write == 1) || (obj->test_mode == 1)))
                {
                    status = tivxSetNodeParameterNumBufByIndex(obj->imgMosaicObj.node, 1, APP_BUFFER_Q_DEPTH);
                    APP_PRINTF("Pipeline params setup done!\n");
                }
            }
        }
    
        return status;
    }
    
    static vx_status app_verify_graph(AppObj *obj)
    {
        vx_status status = VX_SUCCESS;
    
        status = vxVerifyGraph(obj->graph);
    
        if(status == VX_SUCCESS)
        {
            APP_PRINTF("Graph verify done!\n");
        }
    
        #if 1
        if(VX_SUCCESS == status)
        {
          status = tivxExportGraphToDot(obj->graph,".", "graph_exports");
        }
        #endif
    
        if (((obj->captureObj.enable_error_detection) || (obj->test_mode)) && (status == VX_SUCCESS))
        {
            status = app_send_error_frame(&obj->captureObj);
        }
    
        /* wait a while for prints to flush */
        tivxTaskWaitMsecs(100);
    
        return status;
    }
    
    static vx_status app_run_graph_for_one_frame_pipeline(AppObj *obj, vx_int32 frame_id)
    {
        vx_status status = VX_SUCCESS;
    
        APP_PRINTF("app_run_graph_for_one_pipeline: frame %d beginning\n", frame_id);
        appPerfPointBegin(&obj->total_perf);
    
        ImgMosaicObj *imgMosaicObj = &obj->imgMosaicObj;
        CaptureObj *captureObj = &obj->captureObj;
    
        /* checksum_actual is the checksum determined by the realtime test
            checksum_expected is the checksum that is expected to be the pipeline output */
        uint32_t checksum_actual = 0;
    
        /* This is the number of frames required for the pipeline AWB and AE algorithms to stabilize
            (note that 15 is only required for the 6-8 camera use cases - others converge quicker) */
        uint8_t stability_frame = 15;
    
        if(obj->pipeline < 0)
        {
            /* Enqueue outpus */
            if ((obj->en_out_img_write == 1) || (obj->test_mode == 1))
            {
                status = vxGraphParameterEnqueueReadyRef(obj->graph, imgMosaicObj->graph_parameter_index, (vx_reference*)&imgMosaicObj->output_image[obj->enqueueCnt], 1);
            }
    
            /* Enqueue inputs during pipeup dont execute */
            if (status == VX_SUCCESS)
            {
                status = vxGraphParameterEnqueueReadyRef(obj->graph, captureObj->graph_parameter_index, (vx_reference*)&obj->captureObj.raw_image_arr[obj->enqueueCnt], 1);
            }
            obj->enqueueCnt++;
            obj->enqueueCnt   = (obj->enqueueCnt  >= APP_BUFFER_Q_DEPTH)? 0 : obj->enqueueCnt;
            obj->pipeline++;
        }
    
        if((obj->pipeline == 0) && (status == VX_SUCCESS))
        {
            if((obj->en_out_img_write == 1) || (obj->test_mode == 1))
            {
                status = vxGraphParameterEnqueueReadyRef(obj->graph, imgMosaicObj->graph_parameter_index, (vx_reference*)&imgMosaicObj->output_image[obj->enqueueCnt], 1);
            }
    
            /* Execute 1st frame */
            if(status == VX_SUCCESS)
            {
                status = vxGraphParameterEnqueueReadyRef(obj->graph, captureObj->graph_parameter_index, (vx_reference*)&obj->captureObj.raw_image_arr[obj->enqueueCnt], 1);
            }
            obj->enqueueCnt++;
            obj->enqueueCnt   = (obj->enqueueCnt  >= APP_BUFFER_Q_DEPTH)? 0 : obj->enqueueCnt;
            obj->pipeline++;
        }
    
        if((obj->pipeline > 0) && (status == VX_SUCCESS))
        {
            vx_object_array capture_input_arr;
            vx_image mosaic_output_image;
            uint32_t num_refs;
    
            /* Dequeue input */
            status = vxGraphParameterDequeueDoneRef(obj->graph, captureObj->graph_parameter_index, (vx_reference*)&capture_input_arr, 1, &num_refs);
            if((obj->en_out_img_write == 1) || (obj->test_mode == 1))
            {
                /* Dequeue output */
                if (status == VX_SUCCESS)
                {
                    status = vxGraphParameterDequeueDoneRef(obj->graph, imgMosaicObj->graph_parameter_index, (vx_reference*)&mosaic_output_image, 1, &num_refs);
                }
                if ((status == VX_SUCCESS) && (obj->test_mode == 1) && (frame_id > TEST_BUFFER))
                {
                    /* calculate the checksum of the mosaic output */
    
                    if ((app_test_check_image(mosaic_output_image, checksums_expected[obj->sensorObj.sensor_index][obj->sensorObj.num_cameras_enabled-1],
                                            &checksum_actual) != vx_true_e) && (frame_id > stability_frame))
                    {
                        test_result = vx_false_e;
                        /* in case test fails and needs to change */
                        populate_gatherer(obj->sensorObj.sensor_index, obj->sensorObj.num_cameras_enabled-1, checksum_actual);
                    }
                }
    
                /* Enqueue output */
                if (status == VX_SUCCESS)
                {
                    status = vxGraphParameterEnqueueReadyRef(obj->graph, imgMosaicObj->graph_parameter_index, (vx_reference*)&mosaic_output_image, 1);
                }
            }
    
            /* Enqueue input - start execution */
            if (status == VX_SUCCESS)
            {
                status = vxGraphParameterEnqueueReadyRef(obj->graph, captureObj->graph_parameter_index, (vx_reference*)&capture_input_arr, 1);
            }
            obj->enqueueCnt++;
            obj->dequeueCnt++;
    
            obj->enqueueCnt = (obj->enqueueCnt >= APP_BUFFER_Q_DEPTH)? 0 : obj->enqueueCnt;
            obj->dequeueCnt = (obj->dequeueCnt >= APP_BUFFER_Q_DEPTH)? 0 : obj->dequeueCnt;
        }
    
        appPerfPointEnd(&obj->total_perf);
        return status;
    }
    
    vx_status test_raw_10sec(AppObj *obj)
    {
        vx_status status = VX_SUCCESS;
    
    
    //below code tried first and then disabled. Pls check next after #if 0
        #if 0
        vx_map_id map_id;
        vx_rectangle_t rect;
        vx_imagepatch_addressing_t image_addr;
        void *data_ptr;
    
        /*2 Bytes per pixel*/
        vx_uint32 num_bytes_per_pixel = 2; 
        vx_uint32 num_bytes_read_from_file = 0, i;
    
        vx_reference ref[1];
        tivx_raw_image raw_image;
    
        FILE * fp;
        char raw_image_fname[] = "/ti_fs/next_gen_me/test_data/single_cam/AR0233_001/input2.raw";
    
        vx_uint32 imgaddr_width, imgaddr_height, imgaddr_stride;
    
        // tivxQueryRawImage((tivx_raw_image)frame, TIVX_RAW_IMAGE_WIDTH, &img_width, sizeof(vx_uint32));
        // tivxQueryRawImage((tivx_raw_image)frame, TIVX_RAW_IMAGE_HEIGHT, &img_height, sizeof(vx_uint32));
    
        /* Provision for injecting RAW frame into the sensor */
        /* rect defines the outer bounds of the raw_image frame
            which will be defined */
        rect.start_x = 0;
        rect.start_y = 0;
        rect.end_x = obj->sensorObj.sensorParams.sensorInfo.raw_params.width;
        rect.end_y = obj->sensorObj.sensorParams.sensorInfo.raw_params.height;
    
        /* Nothing is being populated here - just an empty frame */
        raw_image = tivxCreateRawImage(obj->context, &(obj->sensorObj.sensorParams.sensorInfo.raw_params));
    
        status = tivxMapRawImagePatch(raw_image,
                                      &rect,
                                      0,
                                      &map_id,
                                      &image_addr,
                                      &data_ptr,
                                      VX_READ_ONLY,
                                      VX_MEMORY_TYPE_HOST,
                                      TIVX_RAW_IMAGE_ALLOC_BUFFER);
    
        ref[0] = (vx_reference)raw_image;
    
        if ((vx_status)VX_SUCCESS == status)
        { 
            if (!data_ptr)
            {
                APP_PRINTF("data_ptr is NULL \n");
                return VX_FAILURE;
            }
    
            APP_PRINTF("Reading test RAW image %s \n", raw_image_fname);
            fp = fopen(raw_image_fname, "rb");
            if (!fp)
            {
                APP_PRINTF("Error: Unable to open file %s\n", raw_image_fname);
                return VX_FAILURE;
            }
    
            imgaddr_width = image_addr.dim_x;
            imgaddr_height = image_addr.dim_y;
            imgaddr_stride = image_addr.stride_y;
    
            for (i = 0; i < imgaddr_height; i++)
            {
                num_bytes_read_from_file += fread(data_ptr, 1, imgaddr_width * num_bytes_per_pixel, fp);
                data_ptr += imgaddr_stride;
            }
    
            status = tivxUnmapRawImagePatch(raw_image, map_id);
            fclose(fp);
            APP_PRINTF("%d bytes read from %s\n", num_bytes_read_from_file, raw_image_fname);
    
            if (num_bytes_read_from_file <= 0)
            {
                APP_PRINTF("[CAPTURE_MODULE] Bytes read by error frame for RAW image is < 0! \n");
                tivxReleaseRawImage(&raw_image);
                raw_image = NULL; /* Is this required after releasing the reference? */
                return VX_FAILURE;
            }
        }
        else
        {
            VX_PRINT(VX_ZONE_ERROR, "Could not allocate capture frame\n");
            return VX_FAILURE;
        }
    
        if ((vx_status)VX_SUCCESS == status)
        {
            // Control Cmd send to Capture node to update new raw image frame
            status = tivxNodeSendCommand(obj->captureObj.node,
                                         0,
                                         TIVX_CAPTURE_REGISTER_ERROR_FRAME,
                                         ref,
                                         1u);
        }
        else
        {
            VX_PRINT(VX_ZONE_ERROR, "Reference could not be allocated\n");
            return VX_FAILURE;
        }
        #endif
    
    // tried second time - this also gives same error
        // releasing previous ram image object
        if(NULL != obj->captureObj.error_frame_raw_image)
        {
            tivxReleaseRawImage(&obj->captureObj.error_frame_raw_image);
            APP_PRINTF("released: error_frame_raw_image.\n");
        }
    
        if (status == VX_SUCCESS)
        {
            // status = create_error_detection_frame(obj->context, obj->captureObj, obj->sensorObj);
    
            char test_data_paths[2][255] =
                {
                    "/ti_fs/next_gen_me/test_data/single_cam/IMX390_001/input2.raw",
                    "/ti_fs/next_gen_me/test_data/single_cam/AR0233_001/input2.raw"};
    
            vx_int32 bytes_read = 0;
            obj->captureObj.error_frame_raw_image = read_error_image_raw(obj->context, &(obj->sensorObj.sensorParams.sensorInfo),
                                                                     test_data_paths[obj->sensorObj.sensor_index],
                                                                     &bytes_read);
    
            APP_PRINTF("%d bytes were read by read_error_image_raw() from path %s\n", bytes_read, test_data_paths[obj->sensorObj.sensor_index]);
            status = vxGetStatus((vx_reference)obj->captureObj.error_frame_raw_image);
    
            if (status == VX_SUCCESS)
            {
                APP_PRINTF("%d bytes were read by read_error_image_raw()\n", bytes_read);
                if (bytes_read <= 0)
                {
                    APP_PRINTF("[CAPTURE_MODULE] Bytes read by error frame for RAW image is < 0! \n");
                    tivxReleaseRawImage(&obj->captureObj.error_frame_raw_image);
                    obj->captureObj.error_frame_raw_image = NULL; /* Is this required after releasing the reference? */
                }
            }
            else
            {
                APP_PRINTF("[CAPTURE_MODULE] Unable to create error frame RAW image!\n");
            }
        }
    
        if (status == VX_SUCCESS)
        {
            status = app_send_error_frame(&obj->captureObj);
        }
    
        return VX_SUCCESS;
    }
    
    static vx_status app_run_graph(AppObj *obj)
    {
        vx_status status = VX_SUCCESS;
    
        SensorObj *sensorObj = &obj->sensorObj;
        vx_int32 frame_id;
        uint64_t cur_time;
    
        app_pipeline_params_defaults(obj);
        APP_PRINTF("app_pipeline_params_defaults returned\n");
    
        if(NULL == sensorObj->sensor_name)
        {
            printf("sensor name is NULL \n");
            return VX_FAILURE;
        }
    
        if (obj->test_mode == 1) {
            // The buffer allows AWB/AE algos to converge before checksums are calculated
            // obj->num_frames_to_run = TEST_BUFFER + 30;
            obj->num_frames_to_run = 10000;
            APP_PRINTF("Num of frames to run: %d\n", obj->num_frames_to_run);
        }
    
        for(frame_id = 0; frame_id < obj->num_frames_to_run; frame_id++)
        {
            if(obj->write_file == 1)
            {
                if((obj->captureObj.en_out_capture_write == 1) && (status == VX_SUCCESS))
                {
                    status = app_send_cmd_capture_write_node(&obj->captureObj, frame_id, obj->num_frames_to_write, obj->num_frames_to_skip);
                }
                if((obj->vissObj.en_out_viss_write == 1) && (status == VX_SUCCESS))
                {
                    status = app_send_cmd_viss_write_node(&obj->vissObj, frame_id, obj->num_frames_to_write, obj->num_frames_to_skip);
                }
                if((obj->ldcObj.en_out_ldc_write == 1) && (status == VX_SUCCESS))
                {
                    status = app_send_cmd_ldc_write_node(&obj->ldcObj, frame_id, obj->num_frames_to_write, obj->num_frames_to_skip);
                }
                obj->write_file = 0;
            }
    
            if (status == VX_SUCCESS)
            {
                status = app_run_graph_for_one_frame_pipeline(obj, frame_id);
                test_raw_10sec(obj);
    
                #ifdef APP_DEBUG
                if (frame_id == 0)
                {
                    cur_time = appLogGetTimeInUsec();
                }
                
                if((frame_id%30) == 0)
                {
                    uint64_t time_elapsed = appLogGetTimeInUsec() - cur_time;
                    APP_PRINTF("Every 30 Frames: %d took %6d.%06u time\n",
                               frame_id,
                               (uint32_t)(time_elapsed / 1000000U),
                               (uint32_t)(time_elapsed % 1000000U));
                    // appLogPrintf("frame_id: %d\n", frame_id);
    
                    // store it for next 30frames time check
                    cur_time = appLogGetTimeInUsec();
                }
                #endif
            }
    
            /* user asked to stop processing */
            if(obj->stop_task)
              break;
        }
    
        if (status == VX_SUCCESS)
        {
            status = vxWaitGraph(obj->graph);
        }
        obj->stop_task = 1;
    
        return status;
    }
    
    static void set_display_defaults(DisplayObj *displayObj)
    {
        displayObj->display_option = 1;
    }
    
    static void app_pipeline_params_defaults(AppObj *obj)
    {
        obj->pipeline       = -APP_BUFFER_Q_DEPTH + 1;
        obj->enqueueCnt     = 0;
        obj->dequeueCnt     = 0;
    }
    
    static void set_sensor_defaults(SensorObj *sensorObj)
    {
        strcpy(sensorObj->sensor_name, SENSOR_ONSEMI_AR0233_UB953_MARS);
    
        sensorObj->num_sensors_found = 0;
        sensorObj->sensor_features_enabled = 0;
        sensorObj->sensor_dcc_enabled = 0;
        sensorObj->sensor_wdr_enabled = 0;
        sensorObj->sensor_exp_control_enabled = 0;
        sensorObj->sensor_gain_control_enabled = 0;
    }
    
    static void app_default_param_set(AppObj *obj)
    {
        set_sensor_defaults(&obj->sensorObj);
    
        set_display_defaults(&obj->displayObj);
    
        app_pipeline_params_defaults(obj);
    
        obj->is_interactive = 1;
        obj->test_mode = 0;
        obj->write_file = 0;
    
        // obj->sensorObj.enable_ldc = 0;
        obj->sensorObj.num_cameras_enabled = 1;
        obj->sensorObj.ch_mask = 0x1;
        obj->sensorObj.usecase_option = APP_SENSOR_FEATURE_CFG_UC0;
    }
    
    static void set_img_mosaic_params(ImgMosaicObj *imgMosaicObj, vx_uint32 in_width, vx_uint32 in_height, vx_int32 numCh)
    {
        vx_int32 idx, ch;
        vx_int32 grid_size = 1;
    
        imgMosaicObj->out_width    = DISPLAY_WIDTH;
        imgMosaicObj->out_height   = DISPLAY_HEIGHT;
        imgMosaicObj->num_inputs   = 1;
    
        idx = 0;
    
        tivxImgMosaicParamsSetDefaults(&imgMosaicObj->params);
    
        for(ch = 0; ch < numCh; ch++)
        {
            vx_int32 winX = ch%grid_size;
            vx_int32 winY = ch/grid_size;
    
            imgMosaicObj->params.windows[idx].startX  = (winX * (in_width/grid_size));
            imgMosaicObj->params.windows[idx].startY  = (winY * (in_height/grid_size));
            imgMosaicObj->params.windows[idx].width   = in_width/grid_size;
            imgMosaicObj->params.windows[idx].height  = in_height/grid_size;
            imgMosaicObj->params.windows[idx].input_select   = 0;
            imgMosaicObj->params.windows[idx].channel_select = ch;
            idx++;
        }
    
        imgMosaicObj->params.num_windows  = idx;
    
        /* Number of time to clear the output buffer before it gets reused */
        imgMosaicObj->params.clear_count  = APP_BUFFER_Q_DEPTH;
    }
    
    static void app_update_param_set(AppObj *obj)
    {
    
        vx_uint16 resized_width, resized_height;
        appIssGetResizeParams(obj->sensorObj.image_width, obj->sensorObj.image_height, DISPLAY_WIDTH, DISPLAY_HEIGHT, &resized_width, &resized_height);
    
        set_img_mosaic_params(&obj->imgMosaicObj, resized_width, resized_height, obj->sensorObj.num_cameras_enabled);
    
    }
    
    /*
     * Utility API used to add a graph parameter from a node, node parameter index
     */
    static void add_graph_parameter_by_node_index(vx_graph graph, vx_node node, vx_uint32 node_parameter_index)
    {
        vx_parameter parameter = vxGetParameterByIndex(node, node_parameter_index);
    
        vxAddParameterToGraph(graph, parameter);
        vxReleaseParameter(&parameter);
    }
    

  • Hi Gautam,

    If you want to send multiple test images, this application would require lot of changes. You would have to remove CSIRX node completely and use VISS input as a graph parameter and then you can copy test images to VISS input and can trigger the graph execution. It would be slower, but it is possible. 

    Regards,

    Brijesh

  • Hi Brijesh,

    Please suggest possible solutions for the error i am getting:

    code updated:

    /*
     *
     * Copyright (c) 2020 Texas Instruments Incorporated
     *
     * All rights reserved not granted herein.
     *
     * Limited License.
     *
     * Texas Instruments Incorporated grants a world-wide, royalty-free, non-exclusive
     * license under copyrights and patents it now or hereafter owns or controls to make,
     * have made, use, import, offer to sell and sell ("Utilize") this software subject to the
     * terms herein.  With respect to the foregoing patent license, such license is granted
     * solely to the extent that any such patent is necessary to Utilize the software alone.
     * The patent license shall not apply to any combinations which include this software,
     * other than combinations with devices manufactured by or for TI ("TI Devices").
     * No hardware patent is licensed hereunder.
     *
     * Redistributions must preserve existing copyright notices and reproduce this license
     * (including the above copyright notice and the disclaimer and (if applicable) source
     * code license limitations below) in the documentation and/or other materials provided
     * with the distribution
     *
     * Redistribution and use in binary form, without modification, are permitted provided
     * that the following conditions are met:
     *
     * *       No reverse engineering, decompilation, or disassembly of this software is
     * permitted with respect to any software provided in binary form.
     *
     * *       any redistribution and use are licensed by TI for use only with TI Devices.
     *
     * *       Nothing shall obligate TI to provide you with source code for the software
     * licensed and provided to you in object code.
     *
     * If software source code is provided to you, modification and redistribution of the
     * source code are permitted provided that the following conditions are met:
     *
     * *       any redistribution and use of the source code, including any resulting derivative
     * works, are licensed by TI for use only with TI Devices.
     *
     * *       any redistribution and use of any object code compiled from the source code
     * and any resulting derivative works, are licensed by TI for use only with TI Devices.
     *
     * Neither the name of Texas Instruments Incorporated nor the names of its suppliers
     *
     * may be used to endorse or promote products derived from this software without
     * specific prior written permission.
     *
     * DISCLAIMER.
     *
     * THIS SOFTWARE IS PROVIDED BY TI AND TI'S LICENSORS "AS IS" AND ANY EXPRESS
     * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
     * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
     * IN NO EVENT SHALL TI AND TI'S LICENSORS BE LIABLE FOR ANY DIRECT, INDIRECT,
     * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
     * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
     * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
     * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
     * OF THE POSSIBILITY OF SUCH DAMAGE.
     *
     */
    
    #include <app_utils/draw2d/include/draw2d.h>
    #include <app_utils/perf_stats/include/app_perf_stats.h>
    #include <app_utils/console_io/include/app_get.h>
    #include <app_utils/grpx/include/app_grpx.h>
    #include <app_utils/hwa/include/app_hwa_api.h>
    #include <VX/vx_khr_pipelining.h>
    
    #include "vxgraph.h"
    #include "app_sensor_module.h"
    #include "app_capture_module.h"
    #include "app_viss_module.h"
    #include "app_aewb_module.h"
    #include "app_ldc_module.h"
    #include "app_img_mosaic_module.h"
    #include "app_display_module.h"
    #include "app_test.h"
    
    #define APP_BUFFER_Q_DEPTH   (4)
    #define APP_PIPELINE_DEPTH   (7)
    
    typedef struct {
    
        SensorObj     sensorObj;
    
        VISSObj       vissObj;
        /*! Raw image read from filesystem and feed to VISS */
        tivx_raw_image viss_input_raw_image;
        vx_uint8 viss_graph_parameter_index;
        /*! VISS node object array output */
        vx_object_array viss_raw_image_arr[APP_MODULES_MAX_BUFQ_DEPTH];
    
        AEWBObj       aewbObj;
        LDCObj        ldcObj;
        ImgMosaicObj  imgMosaicObj;
        DisplayObj    displayObj;
    
        vx_char output_file_path[APP_MAX_FILE_PATH];
    
        /* OpenVX references */
        vx_context context;
        vx_graph   graph;
    
        vx_int32 en_out_img_write;
    
        vx_uint32 num_frames_to_run;
    
        vx_uint32 num_frames_to_write;
        vx_uint32 num_frames_to_skip;
    
        tivx_task task;
        vx_uint32 stop_task;
        vx_uint32 stop_task_done;
    
        int32_t pipeline;
    
        int32_t enqueueCnt;
        int32_t dequeueCnt;
    
    } AppObj;
    
    AppObj gAppObj;
    
    static void app_deinit(AppObj *obj);
    static vx_status app_create_graph(AppObj *obj);
    static vx_status app_verify_graph(AppObj *obj);
    static void app_delete_graph(AppObj *obj);
    static void app_default_param_set(AppObj *obj);
    static void app_update_param_set(AppObj *obj);
    static void app_pipeline_params_defaults(AppObj *obj);
    static void add_graph_parameter_by_node_index(vx_graph graph, vx_node node, vx_uint32 node_parameter_index);
    static vx_status app_run_graph_for_one_frame_pipeline(AppObj *obj, vx_int32 frame_id);
    static vx_status read_fs_raw_image(AppObj *obj);
    static vx_status send_raw_frame_to_viss(AppObj *obj);
    
    
    vx_int32 vxGraphInit()
    {
        vx_status status = VX_SUCCESS;
    
        AppObj *obj = &gAppObj;
    
        /*Optional parameter setting*/
        app_default_param_set(obj);
    
        /*Update of parameters are config file read*/
        app_update_param_set(obj);
    
        if (status == VX_SUCCESS)
        {
            /* Create OpenVx Context */
            obj->context = vxCreateContext();
            status = vxGetStatus((vx_reference)obj->context);
            APP_PRINTF("Creating context done!\n");
        }
    
        if (status == VX_SUCCESS)
        {
            tivxHwaLoadKernels(obj->context);
            tivxImagingLoadKernels(obj->context);
            tivxFileIOLoadKernels(obj->context);
            APP_PRINTF("Kernel loading done!\n");
        }
    
        if(status == VX_SUCCESS)
        {
            status = app_init_viss(obj->context, &obj->vissObj, &obj->sensorObj, "viss_obj");
            APP_PRINTF("VISS init done!\n");
        }
    
        if(status == VX_SUCCESS)
        {
            status = app_init_aewb(obj->context, &obj->aewbObj, &obj->sensorObj, "aewb_obj");
            APP_PRINTF("AEWB init done!\n");
        }
    
        if(status == VX_SUCCESS)
        {
            status = app_init_ldc(obj->context, &obj->ldcObj, &obj->sensorObj, "ldc_obj");
            APP_PRINTF("LDC init done!\n");
        }
    
        if(status == VX_SUCCESS)
        {
            status = app_init_img_mosaic(obj->context, &obj->imgMosaicObj, "img_mosaic_obj", APP_BUFFER_Q_DEPTH);
            APP_PRINTF("Img Mosaic init done!\n");
        }
    
        if (status == VX_SUCCESS)
        {
            status = app_init_display(obj->context, &obj->displayObj, "display_obj");
            APP_PRINTF("Display init done!\n");
        }
    
        if(status == VX_SUCCESS)
        {
            status = app_create_graph(obj);
        }
    
        if (status == VX_SUCCESS)
        {
            APP_PRINTF("App Graph Create Done! \n");
            status = app_verify_graph(obj);
        }
    
        if (status == VX_SUCCESS)
        {
            APP_PRINTF("App Graph Verify Done! \n");
        }
    
        return status;
    }
    
    int vxGraphRun(void)
    {
        AppObj *obj = &gAppObj;
    
        vx_status status = VX_SUCCESS;
        vx_int32 frame_id;
        uint64_t cur_time;
    
        app_pipeline_params_defaults(obj);
    
        /* /ti_fs/next_gen_me/test_data/single_cam/AR0233_001/input2.raw contains 500 frames */
        obj->num_frames_to_run = 500;
        APP_PRINTF("Num of frames to run: %d\n", obj->num_frames_to_run);
    
        for(frame_id = 0; frame_id < obj->num_frames_to_run; frame_id++)
        {
            if (status == VX_SUCCESS)
            {
                status = app_run_graph_for_one_frame_pipeline(obj, frame_id);
                //read_fs_raw_image(obj);
    
                #ifdef APP_DEBUG
                if (frame_id == 0)
                {
                    cur_time = appLogGetTimeInUsec();
                }
                
                if((frame_id%30) == 0)
                {
                    uint64_t time_elapsed = appLogGetTimeInUsec() - cur_time;
                    APP_PRINTF("Every 30 Frames: %d took %6d.%06u time\n",
                               frame_id,
                               (uint32_t)(time_elapsed / 1000000U),
                               (uint32_t)(time_elapsed % 1000000U));
    
                    // store it for next 30frames time check
                    cur_time = appLogGetTimeInUsec();
                }
                #endif
            }
    
            /* user asked to stop processing */
            if(obj->stop_task)
              break;
        }
    
        if (status == VX_SUCCESS)
        {
            status = vxWaitGraph(obj->graph);
        }
        obj->stop_task = 1;
    
        return status;
    }
    
    int vxGraphDeInit()
    {
        AppObj *obj = &gAppObj;
    
        vx_status status = VX_SUCCESS;
    
        app_delete_graph(obj);
        APP_PRINTF("App Delete Graph Done! \n");
    
        app_deinit(obj);
        APP_PRINTF("App De-init Done! \n");
    
        return status;
    }
    
    static void app_deinit(AppObj *obj)
    {
        app_deinit_viss(&obj->vissObj);
        APP_PRINTF("VISS deinit done!\n");
    
        app_deinit_aewb(&obj->aewbObj);
        APP_PRINTF("AEWB deinit done!\n");
    
        app_deinit_ldc(&obj->ldcObj);
        APP_PRINTF("LDC deinit done!\n");
    
        app_deinit_img_mosaic(&obj->imgMosaicObj, APP_BUFFER_Q_DEPTH);
        APP_PRINTF("Img Mosaic deinit done!\n");
    
        app_deinit_display(&obj->displayObj);
        APP_PRINTF("Display deinit done!\n");
    
        tivxHwaUnLoadKernels(obj->context);
        tivxImagingUnLoadKernels(obj->context);
        tivxFileIOUnLoadKernels(obj->context);
        APP_PRINTF("Kernels unload done!\n");
    
        vxReleaseContext(&obj->context);
        APP_PRINTF("Release context done!\n");
    }
    
    static void app_delete_graph(AppObj *obj)
    {
        app_delete_viss(&obj->vissObj);
        APP_PRINTF("VISS delete done!\n");
    
        app_delete_aewb(&obj->aewbObj);
        APP_PRINTF("AEWB delete done!\n");
    
        app_delete_ldc(&obj->ldcObj);
        APP_PRINTF("LDC delete done!\n");
    
        app_delete_img_mosaic(&obj->imgMosaicObj);
        APP_PRINTF("Img Mosaic delete done!\n");
    
        app_delete_display(&obj->displayObj);
        APP_PRINTF("Display delete done!\n");
    
        vxReleaseGraph(&obj->graph);
        APP_PRINTF("Graph delete done!\n");
    }
    
    static vx_status app_create_graph(AppObj *obj)
    {
        vx_status status = VX_SUCCESS;
        vx_graph_parameter_queue_params_t graph_parameters_queue_params_list[2];
        vx_int32 graph_parameter_index;
    
        obj->graph = vxCreateGraph(obj->context);
        status = vxGetStatus((vx_reference)obj->graph);
        if (status == VX_SUCCESS)
        {
            status = vxSetReferenceName((vx_reference)obj->graph, "app_main_raw_multi_frame");
        }
    
        /* read raw image from filesystem & store reference into obj->viss_input_raw_image */
        read_fs_raw_image(obj);
    
        if (status == VX_SUCCESS)
        {
            for (size_t i = 0; i < APP_BUFFER_Q_DEPTH; i++)
            {
                obj->viss_raw_image_arr[i] = vxCreateObjectArray(obj->context,
                                                                       (vx_reference)obj->viss_input_raw_image,
                                                                       obj->sensorObj.num_cameras_enabled);
            }
    
            status = app_create_graph_viss(obj->graph, &obj->vissObj, obj->viss_raw_image_arr[0]);
            APP_PRINTF("VISS graph done!\n");
        }
    
        if (status == VX_SUCCESS)
        {
            status = app_create_graph_aewb(obj->graph, &obj->aewbObj, obj->vissObj.h3a_stats_arr);
            APP_PRINTF("AEWB graph done!\n");
        }
    
        if (status == VX_SUCCESS)
        {
            vx_object_array ldc_in_arr;
            ldc_in_arr = obj->vissObj.output_arr;
            status = app_create_graph_ldc(obj->graph, &obj->ldcObj, ldc_in_arr);
            APP_PRINTF("LDC graph done!\n");
        }
    
        if (status == VX_SUCCESS)
        {
            obj->imgMosaicObj.input_arr[0] = obj->ldcObj.output_arr;
            obj->imgMosaicObj.num_inputs = 1;
            status = app_create_graph_img_mosaic(obj->graph, &obj->imgMosaicObj, NULL);
            APP_PRINTF("Img Mosaic graph done!\n");
        }
    
        if(status == VX_SUCCESS)
        {
            vx_image display_in_image;
            display_in_image = obj->imgMosaicObj.output_image[0];
            status = app_create_graph_display(obj->graph, &obj->displayObj, display_in_image);
            APP_PRINTF("Display graph done!\n");
        }
    
        if(status == VX_SUCCESS)
        {
            graph_parameter_index = 0;
            /* input @ capture node index 0, becomes graph parameter 0 */
            add_graph_parameter_by_node_index(obj->graph, obj->vissObj.node, 0);
    
            /* output @ mosaic node index 1, becomes graph parameter 1 */
            add_graph_parameter_by_node_index(obj->graph, obj->imgMosaicObj.node, 1);
    
            viss_graph_parameter_index = graph_parameter_index;
            graph_parameters_queue_params_list[graph_parameter_index].graph_parameter_index = graph_parameter_index;
            graph_parameters_queue_params_list[graph_parameter_index].refs_list_size = APP_BUFFER_Q_DEPTH;
            graph_parameters_queue_params_list[graph_parameter_index].refs_list = (vx_reference*)&obj->viss_raw_image_arr[0];
            graph_parameter_index++;
    
            obj->imgMosaicObj.graph_parameter_index = graph_parameter_index;
            graph_parameters_queue_params_list[graph_parameter_index].graph_parameter_index = graph_parameter_index;
            graph_parameters_queue_params_list[graph_parameter_index].refs_list_size = APP_BUFFER_Q_DEPTH;
            graph_parameters_queue_params_list[graph_parameter_index].refs_list = (vx_reference *)&obj->imgMosaicObj.output_image[0];
            graph_parameter_index++;
    
            status = vxSetGraphScheduleConfig(obj->graph,
                    VX_GRAPH_SCHEDULE_MODE_QUEUE_AUTO,
                    graph_parameter_index,
                    graph_parameters_queue_params_list);
    
            if (status == VX_SUCCESS)
            {
                status = tivxSetGraphPipelineDepth(obj->graph, APP_PIPELINE_DEPTH);
            }
            if(status == VX_SUCCESS)
            {
                status = tivxSetNodeParameterNumBufByIndex(obj->vissObj.node, 6, APP_BUFFER_Q_DEPTH);
            }
            if (status == VX_SUCCESS)
            {
                status = tivxSetNodeParameterNumBufByIndex(obj->vissObj.node, 9, APP_BUFFER_Q_DEPTH);
            }
            if (status == VX_SUCCESS)
            {
                status = tivxSetNodeParameterNumBufByIndex(obj->aewbObj.node, 4, APP_BUFFER_Q_DEPTH);
            }
            if (status == VX_SUCCESS)
            {
                status = tivxSetNodeParameterNumBufByIndex(obj->ldcObj.node, 7, APP_BUFFER_Q_DEPTH);
                APP_PRINTF("Pipeline params setup done!\n");
            }
        }
        return status;
    }
    
    static vx_status app_verify_graph(AppObj *obj)
    {
        vx_status status = VX_SUCCESS;
    
        status = vxVerifyGraph(obj->graph);
    
        #if 0
        if(VX_SUCCESS == status)
        {
          status = tivxExportGraphToDot(obj->graph,".", "graph_exports");
        }
        #endif
    
        if (status == VX_SUCCESS)
        {
             status = send_raw_frame_to_viss(obj);
        }
    
        /* wait a while for prints to flush */
        tivxTaskWaitMsecs(100);
    
        return status;
    }
    
    static vx_status app_run_graph_for_one_frame_pipeline(AppObj *obj, vx_int32 frame_id)
    {
        vx_status status = VX_SUCCESS;
    
        APP_PRINTF("app_run_graph_for_one_pipeline: frame %d beginning\n", frame_id);
    
        ImgMosaicObj *imgMosaicObj = &obj->imgMosaicObj;
    
        if(obj->pipeline < 0)
        {
            /* Enqueue outpus */
            status = vxGraphParameterEnqueueReadyRef(obj->graph, imgMosaicObj->graph_parameter_index, (vx_reference *)&imgMosaicObj->output_image[obj->enqueueCnt], 1);
    
            /* Enqueue inputs during pipeup dont execute */
            if (status == VX_SUCCESS)
            {
                status = vxGraphParameterEnqueueReadyRef(obj->graph, obj->viss_graph_parameter_index, (vx_reference*)&obj->viss_raw_image_arr[obj->enqueueCnt], 1);
            }
            obj->enqueueCnt++;
            obj->enqueueCnt   = (obj->enqueueCnt  >= APP_BUFFER_Q_DEPTH)? 0 : obj->enqueueCnt;
            obj->pipeline++;
        }
    
        if((obj->pipeline == 0) && (status == VX_SUCCESS))
        {
            status = vxGraphParameterEnqueueReadyRef(obj->graph, imgMosaicObj->graph_parameter_index, (vx_reference *)&imgMosaicObj->output_image[obj->enqueueCnt], 1);
    
            /* Execute 1st frame */
            if(status == VX_SUCCESS)
            {
                status = vxGraphParameterEnqueueReadyRef(obj->graph, Obj->viss_graph_parameter_index, (vx_reference*)&obj->viss_raw_image_arr[obj->enqueueCnt], 1);
            }
            obj->enqueueCnt++;
            obj->enqueueCnt   = (obj->enqueueCnt  >= APP_BUFFER_Q_DEPTH)? 0 : obj->enqueueCnt;
            obj->pipeline++;
        }
    
        if((obj->pipeline > 0) && (status == VX_SUCCESS))
        {
            vx_object_array capture_input_arr;
            vx_image mosaic_output_image;
            uint32_t num_refs;
    
            /* Dequeue input */
            status = vxGraphParameterDequeueDoneRef(obj->graph, obj->viss_graph_parameter_index, (vx_reference*)&capture_input_arr, 1, &num_refs);
    
            /* Dequeue output */
            if (status == VX_SUCCESS)
            {
                status = vxGraphParameterDequeueDoneRef(obj->graph, imgMosaicObj->graph_parameter_index, (vx_reference *)&mosaic_output_image, 1, &num_refs);
            }
    
            /* Enqueue output */
            if (status == VX_SUCCESS)
            {
                status = vxGraphParameterEnqueueReadyRef(obj->graph, imgMosaicObj->graph_parameter_index, (vx_reference *)&mosaic_output_image, 1);
            }
    
            /* Enqueue input - start execution */
            if (status == VX_SUCCESS)
            {
                status = vxGraphParameterEnqueueReadyRef(obj->graph, Obj->viss_graph_parameter_index, (vx_reference*)&capture_input_arr, 1);
            }
    
            obj->enqueueCnt++;
            obj->dequeueCnt++;
    
            obj->enqueueCnt = (obj->enqueueCnt >= APP_BUFFER_Q_DEPTH)? 0 : obj->enqueueCnt;
            obj->dequeueCnt = (obj->dequeueCnt >= APP_BUFFER_Q_DEPTH)? 0 : obj->dequeueCnt;
        }
    
        return status;
    }
    
    static vx_status read_fs_raw_image(AppObj *obj)
    {
        vx_status status = VX_SUCCESS;
        char raw_image_path[] = "/ti_fs/next_gen_me/test_data/single_cam/AR0233_001/input2.raw";
    
        // releasing previous raw image object if any
        if(NULL != obj->viss_input_raw_image)
        {
            tivxReleaseRawImage(&obj->viss_input_raw_image);
            APP_PRINTF("released: error_frame_raw_image.\n");
        }
    
        vx_int32 bytes_read = 0;
        obj->viss_input_raw_image = read_error_image_raw(obj->context,
                                                                     &(obj->sensorObj.sensorParams.sensorInfo),
                                                                     raw_image_path,
                                                                     &bytes_read);
    
        APP_PRINTF("%d bytes were read by read_error_image_raw() from path %s\n",
                   bytes_read,
                   raw_image_path);
        status = vxGetStatus((vx_reference)obj->viss_input_raw_image);
    
        if (status == VX_SUCCESS)
        {
            APP_PRINTF("%d bytes were read by read_error_image_raw()\n", bytes_read);
            if (bytes_read <= 0)
            {
                APP_PRINTF("[CAPTURE_MODULE] Bytes read by error frame for RAW image is < 0! \n");
                tivxReleaseRawImage(&obj->viss_input_raw_image);
                obj->viss_input_raw_image = NULL;
            }
        }
        else
        {
            APP_PRINTF("[CAPTURE_MODULE] Unable to create error frame RAW image!\n");
        }
    
        return status;
    }
    
    static vx_status send_raw_frame_to_viss(AppObj *obj)
    {
        vx_status status = VX_SUCCESS;
        vx_reference ref[1];
    
        // status = tivxCaptureValidateAllocFrame(obj->vissObj.node,
        //                                        (vx_reference)obj->viss_input_raw_image);
    
        ref[0] = (vx_reference)obj->viss_input_raw_image;
        vx_enum ref_type;
        vx_reference frame = (vx_reference)obj->viss_input_raw_image;
        tivxCheckStatus(&status, vxQueryReference(frame, (vx_enum)VX_REFERENCE_TYPE, &ref_type, sizeof(ref_type)));
    
        if ((vx_status)VX_SUCCESS == status)
        {
            vx_map_id map_id;
            vx_rectangle_t rect;
            vx_imagepatch_addressing_t image_addr;
            void *data_ptr;
            vx_uint32 img_width;
            vx_uint32 img_height;
    
            tivxQueryRawImage((tivx_raw_image)frame, TIVX_RAW_IMAGE_WIDTH, &img_width, sizeof(vx_uint32));
            tivxQueryRawImage((tivx_raw_image)frame, TIVX_RAW_IMAGE_HEIGHT, &img_height, sizeof(vx_uint32));
    
            rect.start_x = 0;
            rect.start_y = 0;
            rect.end_x = img_width;
            rect.end_y = img_height;
    
            status = tivxMapRawImagePatch((tivx_raw_image)frame,
                                          &rect,
                                          0,
                                          &map_id,
                                          &image_addr,
                                          &data_ptr,
                                          VX_READ_ONLY,
                                          VX_MEMORY_TYPE_HOST,
                                          TIVX_RAW_IMAGE_ALLOC_BUFFER);
    
            if ((vx_status)VX_SUCCESS == status)
            {
                status = tivxUnmapRawImagePatch((tivx_raw_image)frame, map_id);
    
                if ((vx_status)VX_SUCCESS == status)
                {
                    // control command to send raw frame details to VISS raw image descriptor
                    status = tivxNodeSendCommand(obj->vissObj.node, 0,
                                                 TIVX_VPAC_VISS_CMD_REGISTER_RAW_FRAME, ref, 1u);
                }
                else
                {
                    APP_PRINTF("Error: tivxUnmapRawImagePatch failed..\n");
                }
            }
            else
            {
                APP_PRINTF("Error: tivxMapRawImagePatch failed..\n");
            }
        }
        else
        {
            APP_PRINTF("Error: tivxCaptureValidateAllocFrame failed to validate raw frame\n");
        }
    
        return status;
    }
    
    static void set_display_defaults(DisplayObj *displayObj)
    {
        displayObj->display_option = 1;
    }
    
    static void app_pipeline_params_defaults(AppObj *obj)
    {
        obj->pipeline       = -APP_BUFFER_Q_DEPTH + 1;
        obj->enqueueCnt     = 0;
        obj->dequeueCnt     = 0;
    }
    
    static void set_sensor_defaults(SensorObj *sensorObj)
    {
        strcpy(sensorObj->sensor_name, SENSOR_ONSEMI_AR0233_UB953_MARS);
    
        sensorObj->num_sensors_found = 0;
    
        // sensor index is 1 for "AR0233-UB953_MARS"
        sensorObj->sensor_index = 1;
        sensorObj->sensorParams.sensorInfo.raw_params.width = 1920;
        sensorObj->sensorParams.sensorInfo.raw_params.height = 1280;
        sensorObj->image_width = sensorObj->sensorParams.sensorInfo.raw_params.width;
        sensorObj->image_height = sensorObj->sensorParams.sensorInfo.raw_params.height;
    
        sensorObj->sensorParams.sensorInfo.raw_params.num_exposures = 1;
        sensorObj->sensorParams.sensorInfo.raw_params.line_interleaved = vx_false_e;
        sensorObj->sensorParams.sensorInfo.raw_params.format[0].pixel_container = TIVX_RAW_IMAGE_16_BIT;
        sensorObj->sensor_out_format = 0;
        sensorObj->sensorParams.sensorInfo.raw_params.format[0].msb = 11;
        sensorObj->sensorParams.sensorInfo.raw_params.meta_height_before = 0;
        sensorObj->sensorParams.sensorInfo.raw_params.meta_height_after = 0;
    
        sensorObj->sensorParams.sensorInfo.features = 0x00000178;
        sensorObj->sensorParams.sensorInfo.aewbMode = ALGORITHMS_ISS_AEWB_MODE_AEWB;
    
        sensorObj->sensorParams.sensorInfo.fps = 30;
        sensorObj->sensorParams.sensorInfo.numDataLanes = 4;
        sensorObj->sensorParams.sensorInfo.dataLanesMap[0] = 1;
        sensorObj->sensorParams.sensorInfo.dataLanesMap[1] = 2;
        sensorObj->sensorParams.sensorInfo.dataLanesMap[2] = 3;
        sensorObj->sensorParams.sensorInfo.dataLanesMap[3] = 4;
    
        sensorObj->sensorParams.sensorInfo.dataLanesPolarity[0] = 0;
        sensorObj->sensorParams.sensorInfo.dataLanesPolarity[1] = 0;
        sensorObj->sensorParams.sensorInfo.dataLanesPolarity[2] = 0;
        sensorObj->sensorParams.sensorInfo.dataLanesPolarity[3] = 0;
        // CSIRX_LANE_BAND_SPEED_1350_TO_1500_MBPS = 0x12U
        sensorObj->sensorParams.sensorInfo.csi_laneBandSpeed = 0x12U;
        sensorObj->sensorParams.num_channels = 1;
        sensorObj->sensorParams.dccId = 233;
    
        /*
        Check for supported sensor features.
        It is upto the application to decide which features should be enabled.
        This demo app enables WDR, DCC and 2A if the sensor supports it.
        */
        sensorObj->sensor_features_enabled = 0;
        APP_PRINTF("WDR mode is supported \n");
        sensorObj->sensor_features_enabled |= ISS_SENSOR_FEATURE_COMB_COMP_WDR_MODE;
        sensorObj->sensor_wdr_enabled = 1;
    
        APP_PRINTF("Expsoure control is supported \n");
        sensorObj->sensor_features_enabled |= ISS_SENSOR_FEATURE_MANUAL_EXPOSURE;
        sensorObj->sensor_exp_control_enabled = 1;
    
        APP_PRINTF("Gain control is supported \n");
        sensorObj->sensor_features_enabled |= ISS_SENSOR_FEATURE_MANUAL_GAIN;
        sensorObj->sensor_gain_control_enabled = 1;
    
        sensorObj->sensor_features_enabled |= ISS_SENSOR_FEATURE_DCC_SUPPORTED;
        sensorObj->sensor_dcc_enabled = 1;
        APP_PRINTF("Sensor DCC is enabled \n");
        APP_PRINTF("Sensor width = %d\n", sensorObj->sensorParams.sensorInfo.raw_params.width);
        APP_PRINTF("Sensor height = %d\n", sensorObj->sensorParams.sensorInfo.raw_params.height);
        APP_PRINTF("Sensor DCC ID = %d\n", sensorObj->sensorParams.dccId);
        APP_PRINTF("Sensor Supported Features = 0x%08X\n", sensorObj->sensor_features_supported);
        APP_PRINTF("Sensor Enabled Features = 0x%08X\n", sensorObj->sensor_features_enabled);
    }
    
    static void app_default_param_set(AppObj *obj)
    {
        set_sensor_defaults(&obj->sensorObj);
    
        set_display_defaults(&obj->displayObj);
    
        app_pipeline_params_defaults(obj);
    
        obj->viss_input_raw_image = NULL;
    
        obj->sensorObj.num_cameras_enabled = 1;
        obj->sensorObj.ch_mask = 0x1;
        obj->sensorObj.usecase_option = APP_SENSOR_FEATURE_CFG_UC0;
    }
    
    static void set_img_mosaic_params(ImgMosaicObj *imgMosaicObj, vx_uint32 in_width, vx_uint32 in_height, vx_int32 numCh)
    {
        vx_int32 idx, ch;
        vx_int32 grid_size = 1;
    
        imgMosaicObj->out_width    = DISPLAY_WIDTH;
        imgMosaicObj->out_height   = DISPLAY_HEIGHT;
        imgMosaicObj->num_inputs   = 1;
    
        idx = 0;
    
        tivxImgMosaicParamsSetDefaults(&imgMosaicObj->params);
    
        for(ch = 0; ch < numCh; ch++)
        {
            vx_int32 winX = ch%grid_size;
            vx_int32 winY = ch/grid_size;
    
            imgMosaicObj->params.windows[idx].startX  = (winX * (in_width/grid_size));
            imgMosaicObj->params.windows[idx].startY  = (winY * (in_height/grid_size));
            imgMosaicObj->params.windows[idx].width   = in_width/grid_size;
            imgMosaicObj->params.windows[idx].height  = in_height/grid_size;
            imgMosaicObj->params.windows[idx].input_select   = 0;
            imgMosaicObj->params.windows[idx].channel_select = ch;
            idx++;
        }
    
        imgMosaicObj->params.num_windows  = idx;
    
        /* Number of time to clear the output buffer before it gets reused */
        imgMosaicObj->params.clear_count  = APP_BUFFER_Q_DEPTH;
    }
    
    static void app_update_param_set(AppObj *obj)
    {
    
        vx_uint16 resized_width, resized_height;
        appIssGetResizeParams(obj->sensorObj.image_width, obj->sensorObj.image_height, DISPLAY_WIDTH, DISPLAY_HEIGHT, &resized_width, &resized_height);
    
        set_img_mosaic_params(&obj->imgMosaicObj, resized_width, resized_height, obj->sensorObj.num_cameras_enabled);
    
    }
    
    /*
     * Utility API used to add a graph parameter from a node, node parameter index
     */
    static void add_graph_parameter_by_node_index(vx_graph graph, vx_node node, vx_uint32 node_parameter_index)
    {
        vx_parameter parameter = vxGetParameterByIndex(node, node_parameter_index);
    
        vxAddParameterToGraph(graph, parameter);
        vxReleaseParameter(&parameter);
    }
    

  • Hi Gautham,

    ok if you want to send multiple images to the VISS, it would require multiple changes to the application. The idea would be then to remove CSIRX node and declare input of the VISS node as graph parameter and providing frames to the VISS node by doing enqueue and dequeue operations of the graph..

    This is not supported in the current example.

    Regards,

    Brijesh