vx_status app_create_graph(AppObj *obj) { vx_status status = VX_SUCCESS; int32_t sensor_init_status = -1; obj->configuration = NULL; obj->raw = NULL; obj->y12 = NULL; obj->uv12_c1 = NULL; obj->y8_r8_c2 = NULL; obj->uv8_g8_c3 = NULL; obj->s8_b8_c4 = NULL; obj->histogram = NULL; obj->h3a_aew_af = NULL; obj->display_image = NULL; unsigned int image_width = obj->width_in; unsigned int image_height = obj->height_in; tivx_raw_image raw_image = 0; vx_user_data_object capture_config; vx_uint8 num_capture_frames = 1; tivx_capture_params_t local_capture_config; uint32_t buf_id; const vx_char capture_user_data_object_name[] = "tivx_capture_params_t"; uint32_t sensor_features_enabled = 0; uint32_t sensor_features_supported = 0; uint32_t sensor_wdr_enabled = 0; uint32_t sensor_exp_control_enabled = 0; uint32_t sensor_gain_control_enabled = 0; vx_bool yuv_cam_input = vx_false_e; vx_image viss_out_image = NULL; vx_image ldc_in_image = NULL; vx_image capt_yuv_image = NULL; uint8_t channel_mask = (1<selectedCam); vx_uint32 params_list_depth = 1; if(obj->test_mode == 1) { params_list_depth = 2; } vx_graph_parameter_queue_params_t graph_parameters_queue_params_list[params_list_depth]; status = appSetSensorCSIInstance(obj->sensor_csi_instance,SINGLE_APP); if(VX_SUCCESS != status) { printf("appSetSensorCSIInstance returned %d\n", status); return status; } printf("Querying %s \n", obj->sensor_name); memset(&sensorParams, 0, sizeof(sensorParams)); status = appQueryImageSensor(obj->sensor_name, &sensorParams); if(VX_SUCCESS != status) { printf("appQueryImageSensor returned %d\n", status); return status; } if(sensorParams.sensorInfo.raw_params.format[0].pixel_container == VX_DF_IMAGE_UYVY) { yuv_cam_input = vx_true_e; printf("YUV Input selected. VISS and AEWB nodes will be bypassed. \n"); } /* Check for supported sensor features. It is upto the application to decide which features should be enabled. This demo app enables WDR, DCC and 2A if the sensor supports it. */ sensor_features_supported = sensorParams.sensorInfo.features; if(vx_false_e == yuv_cam_input) { if(ISS_SENSOR_FEATURE_COMB_COMP_WDR_MODE == (sensor_features_supported & ISS_SENSOR_FEATURE_COMB_COMP_WDR_MODE)) { APP_PRINTF("WDR mode is supported \n"); sensor_features_enabled |= ISS_SENSOR_FEATURE_COMB_COMP_WDR_MODE; sensor_wdr_enabled = 1; obj->sensor_wdr_mode = 1; }else { APP_PRINTF("WDR mode is not supported. Defaulting to linear \n"); sensor_features_enabled |= ISS_SENSOR_FEATURE_LINEAR_MODE; sensor_wdr_enabled = 0; obj->sensor_wdr_mode = 0; } if(ISS_SENSOR_FEATURE_MANUAL_EXPOSURE == (sensor_features_supported & ISS_SENSOR_FEATURE_MANUAL_EXPOSURE)) { APP_PRINTF("Expsoure control is supported \n"); sensor_features_enabled |= ISS_SENSOR_FEATURE_MANUAL_EXPOSURE; sensor_exp_control_enabled = 1; } if(ISS_SENSOR_FEATURE_MANUAL_GAIN == (sensor_features_supported & ISS_SENSOR_FEATURE_MANUAL_GAIN)) { APP_PRINTF("Gain control is supported \n"); sensor_features_enabled |= ISS_SENSOR_FEATURE_MANUAL_GAIN; sensor_gain_control_enabled = 1; } if(ISS_SENSOR_FEATURE_CFG_UC1 == (sensor_features_supported & ISS_SENSOR_FEATURE_CFG_UC1)) { APP_PRINTF("CMS Usecase is supported \n"); sensor_features_enabled |= ISS_SENSOR_FEATURE_CFG_UC1; } switch(sensorParams.sensorInfo.aewbMode) { case ALGORITHMS_ISS_AEWB_MODE_NONE: obj->aewb_cfg.ae_mode = ALGORITHMS_ISS_AE_DISABLED; obj->aewb_cfg.awb_mode = ALGORITHMS_ISS_AWB_DISABLED; break; case ALGORITHMS_ISS_AEWB_MODE_AWB: obj->aewb_cfg.ae_mode = ALGORITHMS_ISS_AE_DISABLED; obj->aewb_cfg.awb_mode = ALGORITHMS_ISS_AWB_AUTO; break; case ALGORITHMS_ISS_AEWB_MODE_AE: obj->aewb_cfg.ae_mode = ALGORITHMS_ISS_AE_AUTO; obj->aewb_cfg.awb_mode = ALGORITHMS_ISS_AWB_DISABLED; break; case ALGORITHMS_ISS_AEWB_MODE_AEWB: obj->aewb_cfg.ae_mode = ALGORITHMS_ISS_AE_AUTO; obj->aewb_cfg.awb_mode = ALGORITHMS_ISS_AWB_AUTO; break; } if(obj->aewb_cfg.ae_mode == ALGORITHMS_ISS_AE_DISABLED) { if(sensor_exp_control_enabled || sensor_gain_control_enabled ) { obj->aewb_cfg.ae_mode = ALGORITHMS_ISS_AE_MANUAL; } } APP_PRINTF("obj->aewb_cfg.ae_mode = %d\n", obj->aewb_cfg.ae_mode); APP_PRINTF("obj->aewb_cfg.awb_mode = %d\n", obj->aewb_cfg.awb_mode); } if(ISS_SENSOR_FEATURE_DCC_SUPPORTED == (sensor_features_supported & ISS_SENSOR_FEATURE_DCC_SUPPORTED)) { sensor_features_enabled |= ISS_SENSOR_FEATURE_DCC_SUPPORTED; APP_PRINTF("Sensor DCC is enabled \n"); }else { APP_PRINTF("Sensor DCC is NOT enabled \n"); } APP_PRINTF("Sensor width = %d\n", sensorParams.sensorInfo.raw_params.width); APP_PRINTF("Sensor height = %d\n", sensorParams.sensorInfo.raw_params.height); APP_PRINTF("Sensor DCC ID = %d\n", sensorParams.dccId); APP_PRINTF("Sensor Supported Features = 0x%x\n", sensor_features_supported); APP_PRINTF("Sensor Enabled Features = 0x%x\n", sensor_features_enabled); sensor_init_status = appInitImageSensor(obj->sensor_name, sensor_features_enabled, channel_mask);/*Mask = 1 for camera # 0*/ if(0 != sensor_init_status) { /* Not returning failure because application may be waiting for error/test frame */ printf("Error initializing sensor %s \n", obj->sensor_name); } image_width = sensorParams.sensorInfo.raw_params.width; image_height = sensorParams.sensorInfo.raw_params.height; obj->cam_dcc_id = sensorParams.dccId; obj->width_in = image_width; obj->height_in = image_height; /* Assuming same dataformat for all exposures. This may not be true for staggered HDR. WIll be handled later for(count = 0;countgraph = vxCreateGraph(obj->context); if(status == VX_SUCCESS) { status = vxGetStatus((vx_reference) obj->graph); } APP_ASSERT(vx_true_e == tivxIsTargetEnabled(TIVX_TARGET_VPAC_VISS1)); APP_PRINTF("Initializing params for capture node \n"); /* Setting to num buf of capture node */ obj->num_cap_buf = NUM_BUFS; if(vx_false_e == yuv_cam_input) { raw_image = tivxCreateRawImage(obj->context, &sensorParams.sensorInfo.raw_params); /* allocate Input and Output refs, multiple refs created to allow pipelining of graph */ for(buf_id=0; buf_idnum_cap_buf; buf_id++) { if(status == VX_SUCCESS) { obj->cap_frames[buf_id] = vxCreateObjectArray(obj->context, (vx_reference)raw_image, num_capture_frames); status = vxGetStatus((vx_reference) obj->cap_frames[buf_id]); } } } else { capt_yuv_image = vxCreateImage( obj->context, sensorParams.sensorInfo.raw_params.width, sensorParams.sensorInfo.raw_params.height, VX_DF_IMAGE_UYVY ); /* allocate Input and Output refs, multiple refs created to allow pipelining of graph */ for(buf_id=0; buf_idnum_cap_buf; buf_id++) { if(status == VX_SUCCESS) { obj->cap_frames[buf_id] = vxCreateObjectArray(obj->context, (vx_reference)capt_yuv_image, num_capture_frames); status = vxGetStatus((vx_reference) obj->cap_frames[buf_id]); } } } /* Config initialization */ tivx_capture_params_init(&local_capture_config); local_capture_config.timeout = 33; local_capture_config.timeoutInitial = 500; local_capture_config.numInst = 1U;/* Configure both instances */ local_capture_config.numCh = 1U;/* Single cam. Only 1 channel enabled */ { vx_uint8 ch, id, lane, q; for(id = 0; id < local_capture_config.numInst; id++) { local_capture_config.instId[id] = obj->sensor_csi_instance; local_capture_config.instCfg[id].enableCsiv2p0Support = (uint32_t)vx_true_e; local_capture_config.instCfg[id].numDataLanes = sensorParams.sensorInfo.numDataLanes; local_capture_config.instCfg[id].laneBandSpeed = sensorParams.sensorInfo.csi_laneBandSpeed; for (lane = 0; lane < local_capture_config.instCfg[id].numDataLanes; lane++) { local_capture_config.instCfg[id].dataLanesMap[lane] = lane + 1; } for (q = 0; q < NUM_CAPT_CHANNELS; q++) { ch = (NUM_CAPT_CHANNELS-1)* id + q; local_capture_config.chVcNum[ch] = q; local_capture_config.chInstMap[ch] = obj->sensor_csi_instance; } } } //local_capture_config.chInstMap[0] = obj->selectedCam/NUM_CAPT_CHANNELS; local_capture_config.chInstMap[0] = obj->sensor_csi_instance; local_capture_config.chVcNum[0] = obj->selectedCam%NUM_CAPT_CHANNELS; printf("set sensor csi instance [%d] in single sensor app config!!\n",obj->sensor_csi_instance); capture_config = vxCreateUserDataObject(obj->context, capture_user_data_object_name, sizeof(tivx_capture_params_t), &local_capture_config); APP_PRINTF("capture_config = 0x%p \n", capture_config); APP_PRINTF("Creating capture node \n"); obj->capture_node = tivxCaptureNode(obj->graph, capture_config, obj->cap_frames[0]); APP_PRINTF("obj->capture_node = 0x%p \n", obj->capture_node); if(status == VX_SUCCESS) { status = vxReleaseUserDataObject(&capture_config); } if(status == VX_SUCCESS) { status = vxSetNodeTarget(obj->capture_node, VX_TARGET_STRING, TIVX_TARGET_CAPTURE2); } if(vx_false_e == yuv_cam_input) { obj->raw = (tivx_raw_image)vxGetObjectArrayItem(obj->cap_frames[0], 0); if(status == VX_SUCCESS) { status = tivxReleaseRawImage(&raw_image); } #ifdef _APP_DEBUG_ obj->fs_test_raw_image = tivxCreateRawImage(obj->context, &(sensorParams.sensorInfo.raw_params)); if (NULL != obj->fs_test_raw_image) { if(status == VX_SUCCESS) { status = read_test_image_raw(NULL, obj->fs_test_raw_image, obj->test_mode); } else { status = tivxReleaseRawImage(&obj->fs_test_raw_image); obj->fs_test_raw_image = NULL; } } #endif //_APP_DEBUG_ status = app_create_viss(obj, sensor_wdr_enabled); if(VX_SUCCESS == status) { vxSetNodeTarget(obj->node_viss, VX_TARGET_STRING, TIVX_TARGET_VPAC_VISS1); tivxSetNodeParameterNumBufByIndex(obj->node_viss, 6u, obj->num_cap_buf); } else { printf("app_create_viss failed \n"); return -1; } status = app_create_aewb(obj, sensor_wdr_enabled); if(VX_SUCCESS != status) { printf("app_create_aewb failed \n"); return -1; } viss_out_image = obj->y8_r8_c2; ldc_in_image = viss_out_image; } else { obj->capt_yuv_image = (vx_image)vxGetObjectArrayItem(obj->cap_frames[0], 0); ldc_in_image = obj->capt_yuv_image; vxReleaseImage(&capt_yuv_image); } if (obj->ldc_enable) { printf ("Enabling LDC \n"); status = app_create_ldc(obj, ldc_in_image); if(status == VX_SUCCESS) { status = vxSetNodeTarget(obj->node_ldc, VX_TARGET_STRING, TIVX_TARGET_VPAC_LDC1); } else { printf("app_create_ldc returned error \n"); return status; } if(status == VX_SUCCESS) { status = tivxSetNodeParameterNumBufByIndex(obj->node_ldc, 7u, obj->num_cap_buf); } /*Check if resizing is needed for display*/ if((obj->table_width >= obj->display_params.outWidth) && (obj->table_height >= obj->display_params.outHeight)) { vx_uint16 scaler_out_w, scaler_out_h; obj->scaler_enable = vx_true_e; appIssGetResizeParams(obj->table_width, obj->table_height, obj->display_params.outWidth, obj->display_params.outHeight, &scaler_out_w, &scaler_out_h); obj->scaler_out_img = vxCreateImage(obj->context, scaler_out_w, scaler_out_h, VX_DF_IMAGE_NV12); obj->scalerNode = tivxVpacMscScaleNode(obj->graph, obj->ldc_out, obj->scaler_out_img, NULL, NULL, NULL, NULL); if(status == VX_SUCCESS) { status = tivxSetNodeParameterNumBufByIndex(obj->scalerNode, 1u, obj->num_cap_buf); } obj->display_params.outHeight = scaler_out_h; obj->display_params.outWidth = scaler_out_w; obj->display_image = obj->scaler_out_img; }else /*No resize needed*/ { obj->scaler_enable = vx_false_e; obj->display_image = obj->ldc_out; /* MSC can only downsize. If ldc resolution is lower, display resolution must be set accordingly */ obj->display_params.outWidth = obj->table_width; obj->display_params.outHeight = obj->table_height; } } else /*ldc_enable*/ { if(NULL != obj->capt_yuv_image) { /*MSC does not support YUV422 input*/ obj->scaler_enable = vx_false_e; } else { if ((image_width >= obj->display_params.outWidth) && (image_height >= obj->display_params.outHeight)) { obj->scaler_enable = vx_true_e; } else { obj->scaler_enable = vx_false_e; /* MSC can only downsize. If viss resolution is lower, display resolution must be set accordingly */ obj->display_params.outWidth = image_width; obj->display_params.outHeight = image_height; } } if(vx_true_e == obj->scaler_enable) { vx_uint16 scaler_out_w, scaler_out_h; appIssGetResizeParams(image_width, image_height, obj->display_params.outWidth, obj->display_params.outHeight, &scaler_out_w, &scaler_out_h); obj->scaler_out_img = vxCreateImage(obj->context, scaler_out_w, scaler_out_h, VX_DF_IMAGE_NV12); obj->scalerNode = tivxVpacMscScaleNode(obj->graph, ldc_in_image, obj->scaler_out_img, NULL, NULL, NULL, NULL); if(status == VX_SUCCESS) { status = tivxSetNodeParameterNumBufByIndex(obj->scalerNode, 1u, obj->num_cap_buf); } if(status == VX_SUCCESS) { status = vxSetNodeTarget(obj->scalerNode, VX_TARGET_STRING, TIVX_TARGET_VPAC_MSC1); } obj->display_params.outHeight = scaler_out_h; obj->display_params.outWidth = scaler_out_w; obj->display_image = obj->scaler_out_img; } else { obj->display_image = ldc_in_image; } } if(NULL == obj->display_image) { printf("Error : Display input is uninitialized \n"); return VX_FAILURE; } else { obj->display_params.posX = (1920U - obj->display_params.outWidth)/2; obj->display_params.posY = (1080U - obj->display_params.outHeight)/2; obj->display_param_obj = vxCreateUserDataObject(obj->context, "tivx_display_params_t", sizeof(tivx_display_params_t), &obj->display_params); obj->displayNode = tivxDisplayNode(obj->graph, obj->display_param_obj, obj->display_image); } if(status == VX_SUCCESS) { status = vxSetNodeTarget(obj->displayNode, VX_TARGET_STRING, TIVX_TARGET_DISPLAY1); APP_PRINTF("Display Set Target done\n"); } int graph_parameter_num = 0; /* input @ node index 1, becomes graph parameter 0 */ add_graph_parameter_by_node_index(obj->graph, obj->capture_node, 1); /* set graph schedule config such that graph parameter @ index 0 is enqueuable */ graph_parameters_queue_params_list[graph_parameter_num].graph_parameter_index = graph_parameter_num; graph_parameters_queue_params_list[graph_parameter_num].refs_list_size = obj->num_cap_buf; graph_parameters_queue_params_list[graph_parameter_num].refs_list = (vx_reference*)&(obj->cap_frames[0]); graph_parameter_num++; if(obj->test_mode == 1) { add_graph_parameter_by_node_index(obj->graph, obj->displayNode, 0); /* set graph schedule config such that graph parameter @ index 0 is enqueuable */ graph_parameters_queue_params_list[graph_parameter_num].graph_parameter_index = graph_parameter_num; graph_parameters_queue_params_list[graph_parameter_num].refs_list_size = 1; graph_parameters_queue_params_list[graph_parameter_num].refs_list = (vx_reference*)&(obj->display_image); graph_parameter_num++; } if(status == VX_SUCCESS) { status = tivxSetGraphPipelineDepth(obj->graph, obj->num_cap_buf); } /* Schedule mode auto is used, here we dont need to call vxScheduleGraph * Graph gets scheduled automatically as refs are enqueued to it */ if(status == VX_SUCCESS) { status = vxSetGraphScheduleConfig(obj->graph, VX_GRAPH_SCHEDULE_MODE_QUEUE_AUTO, params_list_depth, graph_parameters_queue_params_list ); } APP_PRINTF("vxSetGraphScheduleConfig done\n"); if(status == VX_SUCCESS) { status = vxVerifyGraph(obj->graph); } if(vx_true_e == obj->scaler_enable) { tivx_vpac_msc_coefficients_t sc_coeffs; vx_reference refs[1]; printf("Scaler is enabled\n"); tivx_vpac_msc_coefficients_params_init(&sc_coeffs, VX_INTERPOLATION_BILINEAR); obj->sc_coeff_obj = vxCreateUserDataObject(obj->context, "tivx_vpac_msc_coefficients_t", sizeof(tivx_vpac_msc_coefficients_t), NULL); if(status == VX_SUCCESS) { status = vxCopyUserDataObject(obj->sc_coeff_obj, 0, sizeof(tivx_vpac_msc_coefficients_t), &sc_coeffs, VX_WRITE_ONLY, VX_MEMORY_TYPE_HOST); } refs[0] = (vx_reference)obj->sc_coeff_obj; if(status == VX_SUCCESS) { status = tivxNodeSendCommand(obj->scalerNode, 0u, TIVX_VPAC_MSC_CMD_SET_COEFF, refs, 1u); } } else { printf("Scaler is disabled\n"); } if(status == VX_SUCCESS) { status = tivxExportGraphToDot(obj->graph, ".", "single_cam_graph"); } #ifdef _APP_DEBUG_ if(vx_false_e == yuv_cam_input) { if( (NULL != obj->fs_test_raw_image) && (NULL != obj->capture_node) && (status == VX_SUCCESS)) { status = app_send_test_frame(obj->capture_node, obj->fs_test_raw_image); } } #endif //_APP_DEBUG_ APP_PRINTF("app_create_graph exiting\n"); return status; }