Part Number: TDA4VM
Hi Brijesh,
We want to convert the YUV422 UYVY of capture into RGB565. How should we implement this conversion node? Can you provide us with a reference patch?
Looking forward for your reply, thank you
Regards,
Barry
This thread has been locked.
If you have a related question, please click the "Ask a related question" button in the top right corner. The newly created question will be automatically linked to this question.
Part Number: TDA4VM
Hi Brijesh,
We want to convert the YUV422 UYVY of capture into RGB565. How should we implement this conversion node? Can you provide us with a reference patch?
Looking forward for your reply, thank you
Regards,
Barry
Hi TI expert,
Are there any supported nodes for converting yuv422 to rgb565? Please help us confirm.
How do we need to implement this function?
Looking forward to reply, thank you
Regards,
Barry
Hi Barry,
Yes, you can use DSS M2M node for this purpose. DSS does support input as YUV422 interleaved and output RGB565.
There isn't any example in the SDK for this exact usecase, but node is supported, so we just need to provide input and output image with the correct format.
Regards,
Brijesh
Hi Brijesh,
thank you for your reply,
We are doing:
PDK-RTOS:processor-sdk-rtos-j721e-evm-08_06_00_12
PDK-LINUX:processor-sdk-linux-08_06_00
After I used m2m node to set the data type to TIVX_DF_IMAGE_RGB565 in run_app_multi_cam.sh, the error was reported as follows:
apps/basic_demos/app_multi_cam/main.c
root@j7-evm:/opt/vision_apps# ./run_app_multi_cam.s
log:
initializing sensor GW_AR0233_UYVY
Sensor init done!
[FMT-CONV-MODULE]: Num Channels 1 WxH 1920x1536
FMT Conversion init done!
Display init done!
App Init Done!
Graph create done!
Capture graph done!
Fmt Conversion graph done!
Display graph done!
39.219062 s: VX_ZONE_ERROR:[tivxSetNodeParameterNumBufByIndex:2319] Invalid parameters
App Create Graph Done!
39.219262 s: VX_ZONE_ERROR:[tivxAddKernelDisplayM2MValidate:179] 'output' should be an image of type:
VX_DF_IMAGE_RGB or VX_DF_IMAGE_RGBX or VX_DF_IMAGE_UYVY or VX_DF_IMAGE_NV12
39.219273 s: VX_ZONE_ERROR:[ownGraphNodeKernelValidate:531] node kernel validate failed for kernel c1
39.219279 s: VX_ZONE_ERROR:[vxVerifyGraph:1941] Node kernel Validate failed
39.219285 s: VX_ZONE_ERROR:[vxVerifyGraph:2109] Graph verify failed
patch:
diff --git a/PDK0806_RTOS/vision_apps/apps/basic_demos/app_multi_cam/main.c b/PDK0806_RTOS/vision_apps/apps/basic_demos/app_multi_cam/main.c
index b4c8b13501..5d11f5bb09 100755
--- a/PDK0806_RTOS/vision_apps/apps/basic_demos/app_multi_cam/main.c
+++ b/PDK0806_RTOS/vision_apps/apps/basic_demos/app_multi_cam/main.c
@@ -76,6 +76,7 @@
#include "app_ldc_module.h"
#include "app_img_mosaic_module.h"
#include "app_display_module.h"
+#include "app_fmt_cnv_module.h"
#include "app_test.h"
#define CAPTURE_BUFFER_Q_DEPTH (4)
@@ -95,6 +96,7 @@ typedef struct {
LDCObj ldcObj1;
ImgMosaicObj imgMosaicObj;
DisplayObj displayObj;
+ FmtConvObj fmtConvObj;
vx_char output_file_path[APP_MAX_FILE_PATH];
@@ -155,7 +157,7 @@ static void app_pipeline_params_defaults(AppObj *obj);
static void add_graph_parameter_by_node_index(vx_graph graph, vx_node node, vx_uint32 node_parameter_index);
static vx_int32 calc_grid_size(vx_uint32 ch);
static void set_img_mosaic_params(ImgMosaicObj *imgMosaicObj, vx_uint32 in_width, vx_uint32 in_height, vx_int32 numCh, ObjArrSplitObj *objArrSplitObj, int32_t enable_split_graph);
-static void app_draw_graphics(Draw2D_Handle *handle, Draw2D_BufInfo *draw2dBufInfo, uint32_t update_type);
+// static void app_draw_graphics(Draw2D_Handle *handle, Draw2D_BufInfo *draw2dBufInfo, uint32_t update_type);
static void app_show_usage(vx_int32 argc, vx_char* argv[])
{
@@ -654,6 +656,7 @@ vx_int32 app_multi_cam_main(vx_int32 argc, vx_char* argv[])
/*Config parameter reading*/
app_parse_cmd_line_args(obj, argc, argv);
+ obj->sensorObj.ch_mask = 0x30; //(0~7 4号位)
/* Querry sensor parameters */
status = app_querry_sensor(&obj->sensorObj);
if(1 == obj->sensorObj.sensor_out_format)
@@ -740,7 +743,7 @@ vx_int32 app_multi_cam_main(vx_int32 argc, vx_char* argv[])
static vx_status app_init(AppObj *obj)
{
vx_status status = VX_SUCCESS;
- app_grpx_init_prms_t grpx_prms;
+ // app_grpx_init_prms_t grpx_prms;
if (1U == obj->enable_configure_hwa_freq)
{
@@ -791,6 +794,14 @@ static vx_status app_init(AppObj *obj)
status = app_init_capture(obj->context, &obj->captureObj, &obj->sensorObj, "capture_obj", CAPTURE_BUFFER_Q_DEPTH);
}
+ if (status == VX_SUCCESS)
+ {
+ obj->fmtConvObj.color_format = TIVX_DF_IMAGE_RGB565;
+ status = app_init_fmt_conv(obj->context,
+ &obj->fmtConvObj, &obj->sensorObj, "fmt_conv_obj");
+ APP_PRINTF("FMT Conversion init done!\n");
+ }
+
if( (1 == obj->enable_split_graph) && (status == VX_SUCCESS) )
{
obj->objArrSplitObj.input_arr = obj->captureObj.raw_image_arr[0];
@@ -844,9 +855,9 @@ static vx_status app_init(AppObj *obj)
APP_PRINTF("Display init done!\n");
}
- appGrpxInitParamsInit(&grpx_prms, obj->context);
- grpx_prms.draw_callback = app_draw_graphics;
- appGrpxInit(&grpx_prms);
+ // appGrpxInitParamsInit(&grpx_prms, obj->context);
+ // grpx_prms.draw_callback = app_draw_graphics;
+ // appGrpxInit(&grpx_prms);
appPerfPointSetName(&obj->total_perf , "TOTAL");
appPerfPointSetName(&obj->fileio_perf, "FILEIO");
@@ -861,6 +872,9 @@ static void app_deinit(AppObj *obj)
app_deinit_capture(&obj->captureObj, CAPTURE_BUFFER_Q_DEPTH);
APP_PRINTF("Capture deinit done!\n");
+ app_deinit_fmt_conv(&obj->fmtConvObj);
+ APP_PRINTF("FMT Conversion deinit done!\n");
+
if(1 == obj->enable_split_graph)
{
app_deinit_obj_arr_split(&obj->objArrSplitObj);
@@ -910,7 +924,7 @@ static void app_deinit(AppObj *obj)
app_deinit_display(&obj->displayObj);
APP_PRINTF("Display deinit done!\n");
- appGrpxDeInit();
+ // appGrpxDeInit();
tivxHwaUnLoadKernels(obj->context);
tivxImagingUnLoadKernels(obj->context);
@@ -926,6 +940,9 @@ static void app_delete_graph(AppObj *obj)
app_delete_capture(&obj->captureObj);
APP_PRINTF("Capture delete done!\n");
+ app_delete_fmt_conv(&obj->fmtConvObj);
+ APP_PRINTF("Fmt Conversion delete done!\n");
+
if(1 == obj->enable_split_graph)
{
app_delete_obj_arr_split(&obj->objArrSplitObj);
@@ -988,6 +1005,13 @@ static vx_status app_create_graph(AppObj *obj)
APP_PRINTF("Capture graph done!\n");
}
+ if(status == VX_SUCCESS)
+ {
+ status = app_create_graph_fmt_conv(obj->graph,
+ &obj->fmtConvObj, obj->captureObj.raw_image_arr[0]);
+ APP_PRINTF("Fmt Conversion graph done!\n");
+ }
+
if( (1 == obj->enable_split_graph) && (status == VX_SUCCESS) )
{
status = app_create_graph_obj_arr_split(obj->graph, &obj->objArrSplitObj);
@@ -1128,7 +1152,8 @@ static vx_status app_create_graph(AppObj *obj)
}
else
{
- display_in_image = (vx_image)vxGetObjectArrayItem(obj->captureObj.raw_image_arr[0], 0);
+ // display_in_image = (vx_image)vxGetObjectArrayItem(obj->captureObj.raw_image_arr[0], 0);
+ display_in_image = (vx_image)vxGetObjectArrayItem(obj->fmtConvObj.arr, 0);
}
if(status == VX_SUCCESS)
@@ -1145,6 +1170,7 @@ static vx_status app_create_graph(AppObj *obj)
graph_parameters_queue_params_list[graph_parameter_index].graph_parameter_index = graph_parameter_index;
graph_parameters_queue_params_list[graph_parameter_index].refs_list_size = CAPTURE_BUFFER_Q_DEPTH;
graph_parameters_queue_params_list[graph_parameter_index].refs_list = (vx_reference*)&obj->captureObj.raw_image_arr[0];
+ // graph_parameters_queue_params_list[graph_parameter_index].refs_list = (vx_reference*)&obj->fmtConvObj.arr;
graph_parameter_index++;
if((obj->en_out_img_write == 1) || (obj->test_mode == 1))
@@ -1215,6 +1241,14 @@ static vx_status app_create_graph(AppObj *obj)
APP_PRINTF("Pipeline params setup done!\n");
}
}
+
+ if (status == VX_SUCCESS)
+ {
+ status = tivxSetNodeParameterNumBufByIndex(
+ obj->fmtConvObj.node, 1, APP_BUFFER_Q_DEPTH);
+ status = tivxSetNodeParameterNumBufByIndex(
+ obj->fmtConvObj.node, 2, APP_BUFFER_Q_DEPTH);
+ }
}
return status;
@@ -1392,6 +1426,7 @@ static vx_status app_run_graph(AppObj *obj)
APP_PRINTF("appStartImageSensor returned with status: %d\n", status);
}
+ tivxTaskWaitMsecs(10000);
if(0 == obj->enable_viss)
{
obj->vissObj.en_out_viss_write = 0;
@@ -1497,7 +1532,7 @@ static void app_default_param_set(AppObj *obj)
obj->sensorObj.enable_ldc = 0;
obj->sensorObj.num_cameras_enabled = 1;
- obj->sensorObj.ch_mask = 0x1;
+ obj->sensorObj.ch_mask = 0x30;
obj->sensorObj.usecase_option = APP_SENSOR_FEATURE_CFG_UC0;
}
@@ -1616,17 +1651,17 @@ static void add_graph_parameter_by_node_index(vx_graph graph, vx_node node, vx_u
vxReleaseParameter(¶meter);
}
-static void app_draw_graphics(Draw2D_Handle *handle, Draw2D_BufInfo *draw2dBufInfo, uint32_t update_type)
-{
- appGrpxDrawDefault(handle, draw2dBufInfo, update_type);
+// static void app_draw_graphics(Draw2D_Handle *handle, Draw2D_BufInfo *draw2dBufInfo, uint32_t update_type)
+// {
+// appGrpxDrawDefault(handle, draw2dBufInfo, update_type);
- if(update_type == 0)
- {
- Draw2D_FontPrm sHeading;
+// if(update_type == 0)
+// {
+// Draw2D_FontPrm sHeading;
- sHeading.fontIdx = 4;
- Draw2D_drawString(handle, 700, 5, "Multi Cam Demo", &sHeading);
- }
+// sHeading.fontIdx = 4;
+// Draw2D_drawString(handle, 700, 5, "Multi Cam Demo", &sHeading);
+// }
- return;
-}
+// return;
+// }
Did we miss something?
Looking forward for your reply, thank you
Regards,
Barry
When I add TIVX_DF_IMAGE_RGB565,graph started working.
kernels_j7/hwa/host/vx_display_m2m_host.c:
Regards,
Barry
Hi Barry,
We will have to check if it is really supported in OpenVX node, even if it is not, it should be easy to add in the node, as driver already supports it.
Rgds,
Brijesh
Hi Barry,
I think you have raised similar question on the below ticket, and Nikhil is looking into it, so i will close this ticket. Lets continue the conversation on below ticket.
(+) TDA4VM: CSI1 app_multi_cam - Processors forum - Processors - TI E2E support forums
Regards,
Brijesh
Hi Brijesh,
Another thread never gets updated.
Please tell me how we should modify M2M to convert YUV422 to RGB565?
Looking forward to your reply.
Regards,
Barry
Hi Barry,
Is it possible to get the output image, not jpg, to understand the output?
I see display m2m node does support this format, so it should work.
switch (obj_desc_img->format)
{
case (vx_df_image)TIVX_DF_IMAGE_RGB565:
format->dataFormat = FVID2_DF_BGR16_565;
format->pitch[FVID2_RGB_ADDR_IDX] = (uint32_t)obj_desc_img->imagepatch_addr[0].stride_y;
break;
Regards,
Brijesh
Hi Brijesh,
thank you for your reply.
Our goal is to display it normally on the LCD, but the display we see now is abnormal;
Because the screen is RGB565, we now want to convert the captured YUV422 into RGB565 format for output before DSI output.
When we set the output to RGB565, the resulting display changes, but is still abnormal, such as 2804.tie2e1. zip
Our addition and call to M2M is as shown in the code:
We are on SDK8.6
PDK-RTOS:processor-sdk-rtos-j721e-evm-08_06_00_12
PDK-LINUX:processor-sdk-linux-08_06_00
Add to:
modules/src/app_fmt_cnv_module.c
modules/include/app_fmt_cnv_module.h
1.modules/src/app_fmt_cnv_module.c:
#include "app_fmt_cnv_module.h"
vx_status app_init_fmt_conv(
vx_context context, FmtConvObj *fmtObj, SensorObj *sensorObj, char *objName)
{
vx_status status = VX_SUCCESS;
vx_image out_img;
fmtObj->num_ch = sensorObj->num_cameras_enabled;
printf ("[FMT-CONV-MODULE]: Num Channels %d WxH %dx%d\n", fmtObj->num_ch,
sensorObj->sensorParams.sensorInfo.raw_params.width,
sensorObj->sensorParams.sensorInfo.raw_params.height);
out_img = vxCreateImage(context,
sensorObj->sensorParams.sensorInfo.raw_params.width,
sensorObj->sensorParams.sensorInfo.raw_params.height,
fmtObj->color_format);
status = vxGetStatus((vx_reference)out_img);
if(status == VX_SUCCESS)
{
fmtObj->arr = vxCreateObjectArray(context,
(vx_reference)out_img, fmtObj->num_ch);
vxReleaseImage(&out_img);
status = vxGetStatus((vx_reference)fmtObj->arr);
if(status != VX_SUCCESS)
{
printf("[FMT-CONV-MODULE] Unable to create output array! \n");
}
else
{
tivx_display_m2m_params_init(&fmtObj->m2m_params);
fmtObj->m2m_params.instId = 0u;
/* Only one pipeline is supported */
fmtObj->m2m_params.numPipe = 1u;
fmtObj->m2m_params.pipeId[0U] = 3u;
fmtObj->m2m_params.overlayId = 3u;
fmtObj->m2m_obj = vxCreateUserDataObject(
context, "tivx_display_m2m_params_t",
sizeof(tivx_display_m2m_params_t), &fmtObj->m2m_params);
status = vxGetStatus((vx_reference)fmtObj->m2m_obj);
if(status != VX_SUCCESS)
{
printf("[FMT-CONV-MODULE] Unable to create m2m object! \n");
}
}
}
else
{
printf("[FMT-CONV-MODULE] Unable to create output image template! \n");
}
return status;
}
void app_deinit_fmt_conv(FmtConvObj *obj)
{
vxReleaseUserDataObject(&obj->m2m_obj);
vxReleaseObjectArray(&obj->arr);
}
void app_delete_fmt_conv(FmtConvObj *obj)
{
if(obj->node != NULL)
{
vxReleaseNode(&obj->node);
}
}
vx_status app_create_graph_fmt_conv(vx_graph graph, FmtConvObj *obj, vx_object_array input_img_arr)
{
vx_status status = VX_SUCCESS;
vx_image input, output;
vx_bool replicate[] = {vx_false_e, vx_true_e, vx_true_e};
input = (vx_image)vxGetObjectArrayItem((vx_object_array)input_img_arr, 0);
if(obj->arr != NULL)
{
output = (vx_image)vxGetObjectArrayItem((vx_object_array)obj->arr, 0);
}
obj->node = tivxDisplayM2MNode(graph, obj->m2m_obj, input, output);
status = vxGetStatus((vx_reference)obj->node);
if(status == VX_SUCCESS)
{
vxSetNodeTarget(obj->node, VX_TARGET_STRING, TIVX_TARGET_DISPLAY_M2M1);
vxSetReferenceName((vx_reference)obj->node, "FmtConvNode");
vxReplicateNode(graph, obj->node, replicate, 3);
}
else
{
printf("[FMT-CONV-MODULE] Unable to create scaler node! \n");
}
vxReleaseImage(&input);
vxReleaseImage(&output);
return status;
}
2.modules/include/app_fmt_cnv_module.h
#ifndef _APP_FMT_CONV_MODULE
#define _APP_FMT_CONV_MODULE
#include "app_modules.h"
#include "app_sensor_module.h"
typedef struct
{
vx_node node;
vx_object_array arr;
tivx_display_m2m_params_t m2m_params;
vx_user_data_object m2m_obj;
vx_char objName[APP_MODULES_MAX_OBJ_NAME_SIZE];
vx_int32 num_ch;
vx_int32 color_format;
} FmtConvObj;
vx_status app_init_fmt_conv(vx_context context, FmtConvObj *fmtObj, SensorObj *sensorObj, char *objName);
void app_deinit_fmt_conv(FmtConvObj *obj);
void app_delete_fmt_conv(FmtConvObj *obj);
vx_status app_create_graph_fmt_conv(vx_graph graph, FmtConvObj *scalerObj, vx_object_array input_img_arr);
#endif
In addition,
we added M2M in
3.apps/basic_demos/app_multi_cam/main.c
--- a/PDK0806_RTOS/vision_apps/apps/basic_demos/app_multi_cam/main.c
+++ b/PDK0806_RTOS/vision_apps/apps/basic_demos/app_multi_cam/main.c
@@ -76,6 +76,7 @@
#include "app_ldc_module.h"
#include "app_img_mosaic_module.h"
#include "app_display_module.h"
+#include "app_fmt_cnv_module.h"
#include "app_test.h"
#define CAPTURE_BUFFER_Q_DEPTH (4)
@@ -95,6 +96,7 @@ typedef struct {
LDCObj ldcObj1;
ImgMosaicObj imgMosaicObj;
DisplayObj displayObj;
+ FmtConvObj fmtConvObj;
vx_char output_file_path[APP_MAX_FILE_PATH];
@@ -155,7 +157,7 @@ static void app_pipeline_params_defaults(AppObj *obj);
static void add_graph_parameter_by_node_index(vx_graph graph, vx_node node, vx_uint32 node_parameter_index);
static vx_int32 calc_grid_size(vx_uint32 ch);
static void set_img_mosaic_params(ImgMosaicObj *imgMosaicObj, vx_uint32 in_width, vx_uint32 in_height, vx_int32 numCh, ObjArrSplitObj *objArrSplitObj, int32_t enable_split_graph);
-static void app_draw_graphics(Draw2D_Handle *handle, Draw2D_BufInfo *draw2dBufInfo, uint32_t update_type);
+// static void app_draw_graphics(Draw2D_Handle *handle, Draw2D_BufInfo *draw2dBufInfo, uint32_t update_type);
static void app_show_usage(vx_int32 argc, vx_char* argv[])
{
@@ -654,6 +656,7 @@ vx_int32 app_multi_cam_main(vx_int32 argc, vx_char* argv[])
/*Config parameter reading*/
app_parse_cmd_line_args(obj, argc, argv);
+ obj->sensorObj.ch_mask = 0x30; //
/* Querry sensor parameters */
status = app_querry_sensor(&obj->sensorObj);
if(1 == obj->sensorObj.sensor_out_format)
@@ -740,7 +743,7 @@ vx_int32 app_multi_cam_main(vx_int32 argc, vx_char* argv[])
static vx_status app_init(AppObj *obj)
{
vx_status status = VX_SUCCESS;
if (1U == obj->enable_configure_hwa_freq)
{
@@ -791,6 +794,14 @@ static vx_status app_init(AppObj *obj)
status = app_init_capture(obj->context, &obj->captureObj, &obj->sensorObj, "capture_obj", CAPTURE_BUFFER_Q_DEPTH);
}
+ if (status == VX_SUCCESS)
+ {
+ obj->fmtConvObj.color_format = TIVX_DF_IMAGE_RGB565;
+ status = app_init_fmt_conv(obj->context,
+ &obj->fmtConvObj, &obj->sensorObj, "fmt_conv_obj");
+ APP_PRINTF("FMT Conversion init done!\n");
+ }
+
if( (1 == obj->enable_split_graph) && (status == VX_SUCCESS) )
{
obj->objArrSplitObj.input_arr = obj->captureObj.raw_image_arr[0];
@@ -844,9 +855,9 @@ static vx_status app_init(AppObj *obj)
APP_PRINTF("Display init done!\n");
}
appPerfPointSetName(&obj->total_perf , "TOTAL");
appPerfPointSetName(&obj->fileio_perf, "FILEIO");
@@ -861,6 +872,9 @@ static void app_deinit(AppObj *obj)
app_deinit_capture(&obj->captureObj, CAPTURE_BUFFER_Q_DEPTH);
APP_PRINTF("Capture deinit done!\n");
+ app_deinit_fmt_conv(&obj->fmtConvObj);
+ APP_PRINTF("FMT Conversion deinit done!\n");
+
if(1 == obj->enable_split_graph)
{
app_deinit_obj_arr_split(&obj->objArrSplitObj);
@@ -910,7 +924,7 @@ static void app_deinit(AppObj *obj)
app_deinit_display(&obj->displayObj);
APP_PRINTF("Display deinit done!\n");
tivxHwaUnLoadKernels(obj->context);
tivxImagingUnLoadKernels(obj->context);
@@ -926,6 +940,9 @@ static void app_delete_graph(AppObj *obj)
app_delete_capture(&obj->captureObj);
APP_PRINTF("Capture delete done!\n");
+ app_delete_fmt_conv(&obj->fmtConvObj);
+ APP_PRINTF("Fmt Conversion delete done!\n");
+
if(1 == obj->enable_split_graph)
{
app_delete_obj_arr_split(&obj->objArrSplitObj);
@@ -988,6 +1005,13 @@ static vx_status app_create_graph(AppObj *obj)
APP_PRINTF("Capture graph done!\n");
}
+ if(status == VX_SUCCESS)
+ {
+ status = app_create_graph_fmt_conv(obj->graph,
+ &obj->fmtConvObj, obj->captureObj.raw_image_arr[0]);
+ APP_PRINTF("Fmt Conversion graph done!\n");
+ }
+
if( (1 == obj->enable_split_graph) && (status == VX_SUCCESS) )
{
status = app_create_graph_obj_arr_split(obj->graph, &obj->objArrSplitObj);
@@ -1128,7 +1152,8 @@ static vx_status app_create_graph(AppObj *obj)
}
else
{
- display_in_image = (vx_image)vxGetObjectArrayItem(obj->captureObj.raw_image_arr[0], 0);
+ // display_in_image = (vx_image)vxGetObjectArrayItem(obj->captureObj.raw_image_arr[0], 0);
+ display_in_image = (vx_image)vxGetObjectArrayItem(obj->fmtConvObj.arr, 0);
}
if(status == VX_SUCCESS)
@@ -1145,6 +1170,7 @@ static vx_status app_create_graph(AppObj *obj)
graph_parameters_queue_params_list[graph_parameter_index].graph_parameter_index = graph_parameter_index;
graph_parameters_queue_params_list[graph_parameter_index].refs_list_size = CAPTURE_BUFFER_Q_DEPTH;
graph_parameters_queue_params_list[graph_parameter_index].refs_list = (vx_reference*)&obj->captureObj.raw_image_arr[0];
+ // graph_parameters_queue_params_list[graph_parameter_index].refs_list = (vx_reference*)&obj->fmtConvObj.arr;
graph_parameter_index++;
if((obj->en_out_img_write == 1) || (obj->test_mode == 1))
@@ -1215,6 +1241,14 @@ static vx_status app_create_graph(AppObj *obj)
APP_PRINTF("Pipeline params setup done!\n");
}
}
+
+ if (status == VX_SUCCESS)
+ {
+ // status = tivxSetNodeParameterNumBufByIndex(
+ // obj->fmtConvObj.node, 1, APP_BUFFER_Q_DEPTH);
+ status = tivxSetNodeParameterNumBufByIndex(
+ obj->fmtConvObj.node, 2, APP_BUFFER_Q_DEPTH);
+ }
}
return status;
@@ -1392,6 +1426,7 @@ static vx_status app_run_graph(AppObj *obj)
APP_PRINTF("appStartImageSensor returned with status: %d\n", status);
}
+ tivxTaskWaitMsecs(10000);
if(0 == obj->enable_viss)
{
obj->vissObj.en_out_viss_write = 0;
@@ -1408,8 +1443,11 @@ static vx_status app_run_graph(AppObj *obj)
for(frame_id = 0; frame_id < obj->num_frames_to_run; frame_id++)
{
+ printf("write_file:%d,en_out_capture_write:%d,line:%d \n", obj->write_file,obj->captureObj.en_out_capture_write,__LINE__);
+
if(obj->write_file == 1)
{
+ //obj->captureObj.en_out_capture_write = 1;
if((obj->captureObj.en_out_capture_write == 1) && (status == VX_SUCCESS))
{
status = app_send_cmd_capture_write_node(&obj->captureObj, frame_id, obj->num_frames_to_write, obj->num_frames_to_skip);
@@ -1497,7 +1535,7 @@ static void app_default_param_set(AppObj *obj)
obj->sensorObj.enable_ldc = 0;
obj->sensorObj.num_cameras_enabled = 1;
- obj->sensorObj.ch_mask = 0x1;
+ obj->sensorObj.ch_mask = 0x30;
obj->sensorObj.usecase_option = APP_SENSOR_FEATURE_CFG_UC0;
}
@@ -1616,17 +1654,17 @@ static void add_graph_parameter_by_node_index(vx_graph graph, vx_node node, vx_u
vxReleaseParameter(¶meter);
}
So can you tell me what's wrong? Thanks
Regards,
Barry
Hi Barry,
The above patch looks fine to me.
After running the graph, can you please share the value at the register offset 0x04AF0020? This will tell us if the WB pipeline is configured correctly.
Regards,
brijesh
Hi,
Let us close this thread and continue in the below thread to avoid duplication.
(+) TDA4VM: CSI1 app_multi_cam - Processors forum - Processors - TI E2E support forums
Regards,
Nikhil