Tool/software:
Hi,
SDK: 09.01.00
EVM: AM62A SK EVM
We have integrated the Image sensor with AM62A EVM and everything is working fine(able to get the stream, able to change the resolution, bit format as well). After that customer wanted to us to integrate the changes so that sensor can receive the embedded meta data. we have did below changes to integrate the embedded metadata.
static void sensor_init_formats(struct v4l2_subdev_state *state)
{
int i;
struct v4l2_mbus_framefmt *format;
for (i = 0; i < 2; ++i) {
format = v4l2_subdev_state_get_stream_format(state, 0, i);
format->width = 1920;
if(i == 0) {
format->height = 1200;
format->code = MEDIA_BUS_FMT_SRGGB10_1X10;
}
else {
format->height = 5;
format->code = MEDIA_BUS_FMT_SGRBG10_1X10;
}
format->field = V4L2_FIELD_NONE;
format->colorspace = V4L2_COLORSPACE_DEFAULT;
}
return;
}
static int _sensor_set_routing(struct v4l2_subdev *sd,
struct v4l2_subdev_state *state)
{
int ret;
struct v4l2_subdev_route routes[] = {
{
.source_pad = 0,
.source_stream = 0,
.flags = V4L2_SUBDEV_ROUTE_FL_ACTIVE,
},
{
.source_pad = 0,
.source_stream = 1,
.flags = V4L2_SUBDEV_ROUTE_FL_ACTIVE,
},
};
struct v4l2_subdev_krouting routing = {
.num_routes = ARRAY_SIZE(routes),
.routes = routes,
};
struct i2c_client *client = v4l2_get_subdevdata(sd);
dev_info(&client->dev, "--------- In %s --------\n", __func__);
ret = v4l2_subdev_set_routing(sd, state, &routing);
if (ret < 0)
return ret;
sensor_init_formats(state);
dev_info(&client->dev, "--------- Out %s --------\n", __func__);
return ret;
}
static int sensor_get_frame_desc(struct v4l2_subdev *sd, unsigned int pad,
struct v4l2_mbus_frame_desc *fd)
{
u32 bpp;
int ret = 0;
unsigned int i;
struct v4l2_subdev_state *state;
struct v4l2_mbus_framefmt *fmt;
struct i2c_client *client = v4l2_get_subdevdata(sd);
dev_info(&client->dev, "--------- In %s --------\n", __func__);
if (pad != 0)
return -EINVAL;
state = v4l2_subdev_lock_and_get_active_state(sd);
memset(fd, 0, sizeof(*fd));
fd->type = V4L2_MBUS_FRAME_DESC_TYPE_CSI2;
/* pixel stream - 2 virtual channels */
bpp = 16;
for (i = 0; i < 2; ++i) {
fmt = v4l2_subdev_state_get_stream_format(state, 0, i);
if (!fmt) {
ret = -EPIPE;
v4l2_subdev_unlock_state(state);
return ret;
}
fd->entry[fd->num_entries].stream = i;
fd->entry[fd->num_entries].flags = V4L2_MBUS_FRAME_DESC_FL_LEN_MAX;
fd->entry[fd->num_entries].pixelcode = fmt->code;
fd->entry[fd->num_entries].bus.csi2.vc = 0;
fd->entry[fd->num_entries].length = fmt->width * fmt->height * bpp / 8;
if(i == 0) {
fd->entry[fd->num_entries].bus.csi2.dt = 0x2b; /* RAW10 */
dev_info(&client->dev, "width - %d, height - %d\n", fmt->width, fmt->height);
}
else {
fd->entry[fd->num_entries].bus.csi2.dt = 0x12; /* RAW8 */
dev_info(&client->dev, "width - %d, height - %d\n", fmt->width, fmt->height);
}
fd->num_entries++;
}
v4l2_subdev_unlock_state(state);
dev_info(&client->dev, "--------- Out %s --------\n", __func__);
return ret;
}
static int sensor_set_routing(struct v4l2_subdev *sd,
struct v4l2_subdev_state *state,
enum v4l2_subdev_format_whence which,
struct v4l2_subdev_krouting *routing)
{
int ret;
struct i2c_client *client = v4l2_get_subdevdata(sd);
if (routing->num_routes == 0 || routing->num_routes > 2)
return -EINVAL;
dev_info(&client->dev, "--------- In %s --------\n", __func__);
dev_info(&client->dev, "--------- Number of routes = %d --------\n", routing->num_routes);
v4l2_subdev_lock_state(state);
ret = _sensor_set_routing(sd, state);
v4l2_subdev_unlock_state(state);
dev_info(&client->dev, "--------- Out %s --------\n", __func__);
return ret;
}
static int sensor_init_cfg(struct v4l2_subdev *sd,
struct v4l2_subdev_state *state)
{
int ret;
struct i2c_client *client = v4l2_get_subdevdata(sd);
dev_info(&client->dev, "--------- In %s --------\n", __func__);
ret = _sensor_set_routing(sd, state);
dev_info(&client->dev, "--------- Out %s --------\n", __func__);
dev_info(&client->dev, "--------- ret %d --------\n", ret);
return ret;
}
static const struct v4l2_subdev_pad_ops sensor_subdev_pad_ops = {
.init_cfg = sensor_init_cfg,
.enum_mbus_code = sensor_enum_mbus_code,
.set_fmt = sensor_set_fmt,
.get_fmt = sensor_get_fmt,
.set_routing = sensor_set_routing,
.get_frame_desc = sensor_get_frame_desc,
};
After creating the two separate stream as mention in above code, We were able to receive image data on one stream and embedded data on the other stream setting different the data type of both stream. So, there is no issues on receiving the embedded metadata and image data using above approach
Our sensor supports, 1920x1200(10bit, 8bit) and 960x600(10bit, 8bit) modes. With changes of the embedded metadata in the driver, I am not able to set the other resolution (Using media-ctl command, I am able to set the resolution but when I try to get stream with that resolution using gst-launch command, It gives the streaming error) and receive image data over that resolution.
Is it possible that image stream format is getting fixed using the above 2 streaming configuration done in the driver and not allowing other resolution to be set?
Regards,
Jay