Tool/software:
The code is as follows:
#include "video_output_v4l2_encode.h"
#include "video_bottom.h"
#include "video_output_interface.h"
#include "tivx/tivxcamera.h"
#include "video_image_to_file.h"
#include "video_output_main.h"
#include "tivx/tivxcamera.h"
#define DEBUG_MEMORY
#define APP_PIPELINE_DEPTH (7)
//30ns * 14 = 420ns
#define ICON_TIMER_400ns (14)
#define ASSERT_ERROR_CODE(result) { \
if(result) \
{ \
return false; \
} \
}
#define ASSERT_ERROR_BOOL(result) { \
if(!result) \
{ \
return false; \
} \
}
//extern std::mutex mutex_;
//extern std::condition_variable condition_;
extern bool predicate_;
extern bool fdex_Init_ret_;
int32_t fdex_Init();
extern AppObj gAppObj111;
extern std::atomic<bool> gEdrRunFlag ;
#define DEBUG_DMABUF
#define TIVX_UTILS_MAX(a, b) ((a) > (b) ? (a) : (b))
#define VX_IPC_MAX_VX_PLANES \
(TIVX_UTILS_MAX(TIVX_UTILS_MAX(TIVX_IMAGE_MAX_PLANES*\
TIVX_PYRAMID_MAX_LEVEL_OBJECTS,\
TIVX_CONTEXT_MAX_TENSOR_DIMS),\
TIVX_RAW_IMAGE_MAX_EXPOSURES))
int getImageDmaFd(vx_reference ref, vx_int32 *fd, vx_uint32 *pitch, vx_uint64 *size, vx_uint32 *offset, vx_uint32 count)
{
vx_image img = (vx_image)ref;
vx_rectangle_t rect;
vx_imagepatch_addressing_t image_addr;
vx_map_id map_id;
void * data_ptr;
vx_uint32 img_width;
vx_uint32 img_height;
vx_size num_planes;
vx_df_image img_format;
vx_status status;
vxQueryImage(img, VX_IMAGE_WIDTH, &img_width, sizeof(vx_uint32));
vxQueryImage(img, VX_IMAGE_HEIGHT, &img_height, sizeof(vx_uint32));
vxQueryImage(img, VX_IMAGE_PLANES, &num_planes, sizeof(vx_size));
vxQueryImage(img, VX_IMAGE_FORMAT, &img_format, sizeof(vx_df_image));
if (num_planes > count) {
APP_LOG.LogError() << "Count < number of planes\n";
return -1;
}
for (vx_uint32 plane = 0; plane < num_planes; plane++)
{
rect.start_x = 0;
rect.start_y = 0;
rect.end_x = img_width;
rect.end_y = img_height;
status = vxMapImagePatch(img,
&rect,
plane,
&map_id,
&image_addr,
&data_ptr,
VX_WRITE_ONLY,
VX_MEMORY_TYPE_HOST,
VX_NOGAP_X);
if(VX_SUCCESS != status) {
APP_LOG.LogError() << "Map Image failed\n";
return -1;
}
pitch[plane] = image_addr.stride_y;
size[plane] = (image_addr.dim_y/image_addr.step_y) * ((image_addr.dim_x * image_addr.stride_x)/image_addr.step_x);
fd[plane] = appMemGetDmaBufFd(data_ptr, &offset[plane]);
vxUnmapImagePatch(img, map_id);
}
return num_planes;
}
namespace video {
namespace v4l2 {
VideoOutputV4l2Encode::VideoOutputV4l2Encode(v4l2_encode_context& context)
: log_{CreateLogger("Video", "V4l2", ara::log::LogLevel::kVerbose)}
, context_(context)
{
memset(&context_, 0, sizeof(v4l2_encode_context));
context_.fd = V4L2FindDevice();
context_.width = 1920;
context_.height = 1080; // songsz
context_.n_frames = 0;
context_.bitrate = 10000000;
context_.gop_size = 30;
context_.i_period = 30;
context_.frameival_num = 1;
context_.frameival_den = 30;
#ifdef DEBUG_DMABUF
context_.out_memory = V4L2_MEMORY_DMABUF;
#else
context_.out_memory = V4L2_MEMORY_MMAP; //TODO.
#endif
context_.cap_memory = V4L2_MEMORY_MMAP;
context_.pfd.fd = context.fd;
context_.pfd.events = POLLIN;
context_.n_outbuffers = MAX_OUTBUFS;
context_.n_capbuffers = MAX_CAPBUFS;
memcpy(context_.format_name, "NV12", strlen("NV12"));
memcpy(context_.codec_name, "H264", strlen("H264"));
enc_pool_.width = 1920;
enc_pool_.height = 1088; // songsz
enc_pool_.format = VX_DF_IMAGE_NV12;
enc_pool_.num_planes = 2;
enc_pool_.num_channels = 1;
enc_pool_.bufq_depth = 1;
enc_pool_.plane_sizes[0] = enc_pool_.width*enc_pool_.height;
enc_pool_.plane_sizes[1] = enc_pool_.width*enc_pool_.height/2;
sem_init(&signal_videooutput_, 0, 0);
/* 时间同步初始化 */
INIT_NEUSAR_TS_NIC_NAME
}
VideoOutputV4l2Encode::~VideoOutputV4l2Encode()
{
sem_destroy(&signal_videooutput_);
V4L2UnsubscribeEvents();
V4L2DeallocBufs(&context_.n_outbuffers, context_.outbuffers, context_.out_format);
V4L2DeallocBufs(&context_.n_capbuffers, context_.capbuffers, context_.cap_format);
log_.LogError() << "VideoOutputV4l2Encode::~VideoOutputV4l2Encode";
}
void VideoOutputV4l2Encode::ScaleSetCoeff(tivx_vpac_msc_coefficients_t *coeff)
{
uint32_t i;
uint32_t idx;
uint32_t weight;
idx = 0;
coeff->single_phase[0][idx ++] = 0;
coeff->single_phase[0][idx ++] = 0;
coeff->single_phase[0][idx ++] = 256;
coeff->single_phase[0][idx ++] = 0;
coeff->single_phase[0][idx ++] = 0;
idx = 0;
coeff->single_phase[1][idx ++] = 0;
coeff->single_phase[1][idx ++] = 0;
coeff->single_phase[1][idx ++] = 256;
coeff->single_phase[1][idx ++] = 0;
coeff->single_phase[1][idx ++] = 0;
idx = 0;
for (i = 0; i < 32; i++)
{
weight = i << 2;
coeff->multi_phase[0][idx++] = 0;
coeff->multi_phase[0][idx++] = 0;
coeff->multi_phase[0][idx++] = 256 - weight;
coeff->multi_phase[0][idx++] = weight;
coeff->multi_phase[0][idx++] = 0;
}
idx = 0;
for (i = 0; i < 32; i++)
{
weight = (i + 32) << 2;
coeff->multi_phase[1][idx++] = 0;
coeff->multi_phase[1][idx++] = 0;
coeff->multi_phase[1][idx++] = 256 - weight;
coeff->multi_phase[1][idx++] = weight;
coeff->multi_phase[1][idx++] = 0;
}
idx = 0;
for (i = 0; i < 32; i++)
{
weight = i << 2;
coeff->multi_phase[2][idx++] = 0;
coeff->multi_phase[2][idx++] = 0;
coeff->multi_phase[2][idx++] = 256 - weight;
coeff->multi_phase[2][idx++] = weight;
coeff->multi_phase[2][idx++] = 0;
}
idx = 0;
for (i = 0; i < 32; i++)
{
weight = (i + 32) << 2;
coeff->multi_phase[3][idx++] = 0;
coeff->multi_phase[3][idx++] = 0;
coeff->multi_phase[3][idx++] = 256 - weight;
coeff->multi_phase[3][idx++] = weight;
coeff->multi_phase[3][idx++] = 0;
}
}
bool VideoOutputV4l2Encode::V4l2CreateScaler()
{
usleep(1000);
int sensorObj_num_cameras_enabled = 1;
vx_status status = VX_SUCCESS;
scalerObj_.num_outputs = 1;
scalerObj_.num_ch = sensorObj_num_cameras_enabled;
tivx_vpac_msc_coefficients_t coeffs;
ScaleSetCoeff(&coeffs);
scalerObj_.coeff_obj = vxCreateUserDataObject(gAppObj111.context, "tivx_vpac_msc_coefficients_t", sizeof(tivx_vpac_msc_coefficients_t), NULL);
ASSERT_ERROR_CODE(vxGetStatus((vx_reference)scalerObj_.coeff_obj));
vxSetReferenceName((vx_reference)scalerObj_.coeff_obj, "scaler_node_coeff_obj");
ASSERT_ERROR_CODE(vxCopyUserDataObject(scalerObj_.coeff_obj, 0, sizeof(tivx_vpac_msc_coefficients_t), &coeffs, VX_WRITE_ONLY, VX_MEMORY_TYPE_HOST));
vx_int32 num_outputs = scalerObj_.num_outputs;
for (vx_int32 idx = 0; idx < APP_MODULES_MAX_SCALER_OUTPUTS; idx++)
{
scalerObj_.output[idx].arr = NULL;
scalerObj_.file_prefix[idx] = NULL;
scalerObj_.write_node[idx] = NULL;
scalerObj_.write_cmd[idx] = NULL;
}
scalerObj_.file_path = NULL;
vx_image intermediate_img = vxCreateImage(gAppObj111.context, enc_pool_.width, enc_pool_.height, VX_DF_IMAGE_NV12);
ASSERT_ERROR_CODE(vxGetStatus((vx_reference)intermediate_img));
for (uint32_t count = 0; count < enc_pool_.bufq_depth; count++)
{
enc_pool_.arr[count] = vxCreateObjectArray(gAppObj111.context, (vx_reference)intermediate_img, sensorObj_num_cameras_enabled);
status = (vxGetStatus((vx_reference)enc_pool_.arr[count]));
if(status != VX_SUCCESS)
{
log_.LogError() << "VideoOutputV4l2Encode::V4l2CreateScaler() vxCreateObjectArray Error. status = " << status;
}
else
{
log_.LogInfo() << "VideoOutputV4l2Encode::V4l2CreateScaler() vxCreateObjectArray Success";
}
vx_char name[VX_MAX_REFERENCE_NAME];
snprintf(name, VX_MAX_REFERENCE_NAME, "enc_pool.arr_%d", count);
vxSetReferenceName((vx_reference)enc_pool_.arr[count], name);
}
vxReleaseImage(&intermediate_img);
scaler_graph_ = vxCreateGraph(gAppObj111.context);
ASSERT_ERROR_CODE(vxGetStatus((vx_reference)scaler_graph_));
ASSERT_ERROR_CODE(vxSetReferenceName((vx_reference)scaler_graph_, "scaler_graph"));
scalerObj_.output[0].width = enc_pool_.width;
scalerObj_.output[0].height = enc_pool_.height;
scalerObj_.output[0].arr = enc_pool_.arr[0];
vx_image input = (vx_image)fw_refs[0];
vx_image output1, output2, output3, output4, output5;
output1 = (vx_image)vxGetObjectArrayItem((vx_object_array)scalerObj_.output[0].arr, 0);
scalerObj_.node = tivxVpacMscScaleNode(scaler_graph_, input, output1, NULL, NULL, NULL, NULL);
ASSERT_ERROR_CODE(vxGetStatus((vx_reference)scalerObj_.node));
vxSetNodeTarget(scalerObj_.node, VX_TARGET_STRING, TIVX_TARGET_VPAC_MSC1);
vxSetReferenceName((vx_reference)scalerObj_.node, "scaler_node");
vx_bool replicate[] = { vx_true_e, vx_true_e, vx_false_e, vx_false_e, vx_false_e, vx_false_e};
vxReplicateNode(scaler_graph_, scalerObj_.node, replicate, 6);
// ASSERT_ERROR_CODE(app_create_graph_scaler_write_output(scaler_graph_, &(scalerObj_), 0));// ???
//vxReleaseImage(&input);
vxReleaseImage(&output1);
vx_int32 scal_graph_parameter_index = 0;
vx_parameter parameter = vxGetParameterByIndex(scalerObj_.node, 1);
vxAddParameterToGraph(scaler_graph_, parameter);
vxReleaseParameter(¶meter);
parameter = vxGetParameterByIndex(scalerObj_.node, 0);
vxAddParameterToGraph(scaler_graph_, parameter);
vxReleaseParameter(¶meter);
enc_pool_.graph_parameter_index = scal_graph_parameter_index;
vx_graph_parameter_queue_params_t scal_graph_parameters_queue_params_list[3];
scal_graph_parameters_queue_params_list[scal_graph_parameter_index].graph_parameter_index = scal_graph_parameter_index;
scal_graph_parameters_queue_params_list[scal_graph_parameter_index].refs_list_size = 1;
scal_graph_parameters_queue_params_list[scal_graph_parameter_index].refs_list = (vx_reference *)&enc_pool_.arr[0];
scal_graph_parameter_index++;
videoOutput_scaler_input_index_ = scal_graph_parameter_index;
scal_graph_parameters_queue_params_list[videoOutput_scaler_input_index_].graph_parameter_index = videoOutput_scaler_input_index_;
scal_graph_parameters_queue_params_list[videoOutput_scaler_input_index_].refs_list_size = 7;
scal_graph_parameters_queue_params_list[videoOutput_scaler_input_index_].refs_list = (vx_reference *)&fw_refs[0];
scal_graph_parameter_index++;
ASSERT_ERROR_CODE(vxSetGraphScheduleConfig(scaler_graph_, VX_GRAPH_SCHEDULE_MODE_QUEUE_AUTO, scal_graph_parameter_index, scal_graph_parameters_queue_params_list));
ASSERT_ERROR_CODE(tivxSetGraphPipelineDepth(scaler_graph_, APP_PIPELINE_DEPTH));
ASSERT_ERROR_CODE(vxVerifyGraph(scaler_graph_));
vx_reference refs = (vx_reference)scalerObj_.coeff_obj;
ASSERT_ERROR_CODE(tivxNodeSendCommand(scalerObj_.node, 0u, TIVX_VPAC_MSC_CMD_SET_COEFF, &refs, 1u));
log_.LogInfo() << "VideoOutputV4l2Encode::V4l2CreateScaler() Success!!!";
return true;
}
void VideoOutputV4l2Encode::addVideoBottomTime()
{
if(iLeftFlag_ <= ICON_TIMER_400ns)
{
iLeftFlag_ ++;
}
if(iRightFlag_ <= ICON_TIMER_400ns)
{
iRightFlag_ ++;
}
}
void VideoOutputV4l2Encode::OnVehicleInfoData(VMB_AVehicleInfo& vehicle_info)
{
log_.LogInfo() << "VideoOutputMain::OnVehicleInfoData begin Left = " << vehicle_info.BCM1_1.TurningSt_Left
<< " ||| Right = " << vehicle_info.BCM1_1.TurningSt_right
<< " ||| Brake = " << vehicle_info.HCU_2_3.HCU_BrakePedalSt
<< " ||| AccelerationPedal = " << vehicle_info.HCU_2_2.HCU_AccelerationPedalPosition
<< " ||| IndicationSpeed_IC = " << vehicle_info.IC_1.IndicationSpeed_IC
<< " ||| iLeftFlag_ = " << iLeftFlag_
<< " ||| iRightFlag_ = " << iRightFlag_
;
if(bLeftFlag_ && iLeftFlag_ > ICON_TIMER_400ns)
{
bLeftFlag_ = false;
iLeftFlag_ = 0;
}
else if(bLeftFlag_ == false && vehicle_info.BCM1_1.TurningSt_Left == true && iLeftFlag_ > ICON_TIMER_400ns)
{
bLeftFlag_ = true;
iLeftFlag_ = 0;
}
if(bRightFlag_ && iRightFlag_ > ICON_TIMER_400ns)
{
bRightFlag_ = false;
iRightFlag_ = 0;
}
else if(bRightFlag_ == false && vehicle_info.BCM1_1.TurningSt_right == true && iRightFlag_ > ICON_TIMER_400ns)
{
bRightFlag_ = true;
iRightFlag_ = 0;
}
bBreakPedal_ = vehicle_info.HCU_2_3.HCU_BrakePedalSt == 1 ? true : false;
bAccelerationPedal_ = vehicle_info.HCU_2_2.HCU_AccelerationPedalPosition <= 250 && vehicle_info.HCU_2_2.HCU_AccelerationPedalPosition > (0.05 / 0.004);
iSpeed_ = vehicle_info.IC_1.IndicationSpeed_IC;
log_.LogInfo() << "VideoOutputMain::OnVehicleInfoData end bLeftFlag_ = " << bLeftFlag_
<< " ||| bRightFlag_ = " << bRightFlag_
<< " ||| bBreakPedal_ = " << bBreakPedal_
<< " ||| bAccelerationPedal_ = " << bAccelerationPedal_
<< " ||| iSpeed_ = " << iSpeed_
<< " ||| iLeftFlag_ = " << iLeftFlag_
<< " ||| iRightFlag_ = " << iRightFlag_
;
}
void VideoOutputV4l2Encode::OnImageData(VMB_FW_ImageGrouph_Front &fwImageGrouph_Front)
{
if(!fdex_Init_ret_ )
{
static int initCount = 0;
initCount ++;
if(initCount % 50 == 0)
{
if (fdex_Init() != FDEX_OK) {
log_.LogError() << "VideoOutputMain::OnImageData fdex_Init() Error";
return ;
}
}
log_.LogError() << "VideoOutputMain::OnImageData wait fdex_Init_ret_ initCount = " << initCount;
}
else if(!videoOutput_scaler_init_done_)
{
log_.LogInfo() << "VideoOutputMain::OnImageData wait videoOutput_scaler_init_done_";
}
else if(!isEncoderWorkFlag())
{
//log_.LogInfo() << "VideoOutputMain::OnImageData wait isEncoderWorkFlag()";
}
else
{
int signal_value = 0;
nFrameArrIndex_ = fwImageGrouph_Front.imageInfo_fw.nFrameArrIndex;
sem_getvalue(&signal_videooutput_, &signal_value);
if(signal_value == 0)
{
sem_post(&signal_videooutput_);
}
}
}
void VideoOutputV4l2Encode::OnModeStopVideo(bool bOnModeStop)
{
bOnModeStop_ = bOnModeStop;
}
void VideoOutputV4l2Encode::OnErrStopVideo(bool bOnErrStop)
{
bOnErrStop_ = bOnErrStop;
}
void VideoOutputV4l2Encode::OnVideoBottomTime_L1(int64_t iTimeL1)
{
iTime_L1_ = iTimeL1;
}
uint64_t VideoOutputV4l2Encode::getVideoBottomTime()
{
/* 未完成时钟同步 */
if(!TS_Consumer_RPort.GetSynchronizationStatus())
{
APP_LOG.LogError() << "[TimeSyncNode] Waiting for TimeSync...";
return 0;
}
else /* 已完成时钟同步 */
{
/* 从AP获取当前时间 */
int64_t nPTPTime = TS_Consumer_RPort.GetCurrentTime().time_since_epoch().count();
/* ptp时间正常的场合 */
if(nPTPTime == -1)
{
APP_LOG.LogError() << " Error [TimeSyncNode] AP timestamp[" << nPTPTime << "],is larger than Compile time[]!";
return 0;
}
else
{
//整车时间 = ap接口 + L1; 需要:EDR AP TS配置 + L1 VMB配置
//上面这个GTC精度应该是ns的,需要我们除以1000。
//L1单位应该是us
uint64_t t1l1 = (nPTPTime / 1000 + iTime_L1_) / 1000 / 1000 ;
APP_LOG.LogDebug() << " Success [TimeSyncNode] AP timesnPTPTimetamp T1 : [" << nPTPTime << "],iTime_L1_:" << iTime_L1_ << " t1l1 = " << t1l1;
return t1l1;
}
}
}
bool VideoOutputV4l2Encode::isEncoderWorkFlag()
{
if(bOnErrStop_)
{
return false;
}
else if(bOnModeStop_)
{
return false;
}
else if(predicate_ || VideoOutputInterface::getInstance().isImageDataCallBack())
{
return true;
}
else
{
return false;
}
}
//videooutput scaler node节点 输入队列 出队列操作
void VideoOutputV4l2Encode::VideoOutput_input_Dequeue()
{
vx_status status = VX_SUCCESS;
vx_object_array enc_output_arr;
uint32_t num_refs;
status = vxGraphParameterDequeueDoneRef(scaler_graph_, videoOutput_scaler_input_index_, (vx_reference *)&enc_output_arr, 1, &num_refs);
if(status != VX_SUCCESS){
log_.LogError() << "VideoOutput_input_Dequeue Error.";
}
}
//videooutput scaler node节点 输入队列 入队列操作
void VideoOutputV4l2Encode::VideoOutput_input_Enqueue()
{
vx_status status = VX_SUCCESS;
vx_image image = (vx_image)fw_refs[nFrameArrIndex_];
status = vxGraphParameterEnqueueReadyRef(scaler_graph_, videoOutput_scaler_input_index_, (vx_reference *)&image, 1);
if(status != VX_SUCCESS)
{
log_.LogError() << "VideoOutput_input_Enqueue Error. status = " << status;
}
else{
//log_.LogInfo() << "VideoOutput_input_Enqueue Success.111";
}
//先缓冲3帧,3帧后每入一帧会自动出一帧
if(videoOutput_scaler_input_current_ < 3)
videoOutput_scaler_input_current_ ++;
else{
VideoOutput_input_Dequeue();
}
}
bool VideoOutputV4l2Encode::RunGraphForOneFramePipeline(vx_int32& frame_id, vx_image& input, vx_map_id& map_id1, vx_map_id& map_id2, void** data_ptr1, void** data_ptr2, vx_uint32& rect_end_x, vx_uint32& rect_end_y)
{
vx_rectangle_t rect;
vx_imagepatch_addressing_t image_addr;
if (frame_id <= 0)
{
vxGraphParameterEnqueueReadyRef(scaler_graph_, enc_pool_.graph_parameter_index, (vx_reference *)&enc_pool_.arr[scaler_pipline_], 1);
scaler_pipline_++;
scaler_pipline_ = (scaler_pipline_ >= enc_pool_.bufq_depth) ? 0 : scaler_pipline_;
return false;
}
else
{
vx_object_array enc_output_arr;
uint32_t num_refs;
vxGraphParameterDequeueDoneRef(scaler_graph_, enc_pool_.graph_parameter_index, (vx_reference *)&enc_output_arr, 1, &num_refs);
input = (vx_image)vxGetObjectArrayItem(enc_output_arr, 0); //改成这样
VideoOutputInterface::getInstance().OnImageData(input);
ASSERT_ERROR_CODE(vxGetStatus((vx_reference)input));
ASSERT_ERROR_CODE(vxQueryImage(input, VX_IMAGE_WIDTH, &rect_end_x, sizeof(vx_uint32)));
ASSERT_ERROR_CODE(vxQueryImage(input, VX_IMAGE_HEIGHT, &rect_end_y, sizeof(vx_uint32)));
rect.start_x = 0;
rect.start_y = 0;
rect.end_x = rect_end_x;
rect.end_y = rect_end_y;
vxMapImagePatch(input, &rect, 0, &map_id1, &image_addr, data_ptr1, VX_WRITE_ONLY, VX_MEMORY_TYPE_HOST, VX_NOGAP_X);
rect.start_x = 0;
rect.start_y = 0;
rect.end_x = rect_end_x;
rect.end_y = rect_end_y / 2;
vxMapImagePatch(input, &rect, 1, &map_id2, &image_addr, data_ptr2, VX_WRITE_ONLY, VX_MEMORY_TYPE_HOST, VX_NOGAP_X);
#ifdef DEBUG_MEMORY
vx_status status = VX_SUCCESS;
buf_idx_scale ++;
if(buf_idx_scale == image_queue_size)
{
buf_idx_scale = 0;
}
log_.LogError() << "DEBUG MEMORY tivxReference begin buf_idx_scale = " << buf_idx_scale;
void *addr = nullptr;
uint32_t size[64] = {0};
uint32_t max_entries = VX_IPC_MAX_VX_PLANES;
uint32_t num_entries = 0;
status = tivxReferenceExportHandle((vx_reference)image_array_[buf_idx_scale], &addr, &size[0], max_entries, &num_entries);
if(status != VX_SUCCESS)
{
log_.LogError() << "DEBUG MEMORY tivxReferenceExportHandle Error status = " << status;
}
else
{
log_.LogError() << "DEBUG MEMORY tivxReferenceExportHandle Success ";
}
log_.LogError() << "1DEBUG MEMORY tivxReference addr = " << addr << " size[0] = " << size[0] << " max_entries = " << max_entries << " num_entries = " << num_entries << " size[1] = " << size[1] << " size[2] = " << size[2] << " size[3] = " << size[3];
status = tivxReferenceImportHandle((vx_reference)input, (const void **)&addr, &size[0], num_entries);
if(status != VX_SUCCESS)
{
log_.LogError() << "DEBUG MEMORY tivxReferenceImportHandle Error status = " << status;
}
else
{
log_.LogError() << "DEBUG MEMORY tivxReferenceImportHandle Success " ;
}
log_.LogInfo() << "DEBUG MEMORY tivxReference end ";
#endif
return true;
}
}
void VideoOutputV4l2Encode::V4l2EncodeHandler()
{
vx_map_id map_id1;
vx_map_id map_id2;
void *data_ptr1;
void *data_ptr2;
vx_image input;
vx_int32 frame_id = -3;
vx_uint32 rect_end_x = 0, rect_end_y = 0;
static bool scaler = V4l2CreateScaler();
static bool config = V4L2EncodeConfig();
if(scaler == false)
{
log_.LogError() << "VideoOutputV4l2Encode::V4l2EncodeHandler() error !!! scaler == false first error ";
std::this_thread::sleep_for(std::chrono::seconds(3));
scaler = V4l2CreateScaler();
}
if(scaler == false)
{
log_.LogError() << "VideoOutputV4l2Encode::V4l2EncodeHandler() error !!! scaler == false second error ";
std::this_thread::sleep_for(std::chrono::seconds(3));
scaler = V4l2CreateScaler();
}
if(scaler == false)
{
log_.LogError() << "VideoOutputV4l2Encode::V4l2EncodeHandler() error !!! scaler == false three error";
}
else if(config == false)
{
log_.LogError() << "VideoOutputV4l2Encode::V4l2EncodeHandler() error !!! config == false";
}
else
{
videoOutput_scaler_init_done_ = true;
}
static bool result = false;
if(!result)
{
result = V4L2EncodeStart();
//log_.LogError() << "V4L2EncodeStart Failed " ;
//continue;
}
initV4l2Buf();
while (gEdrRunFlag)
{
if (V4L2WaitSemTimeout(&signal_videooutput_, 300) != 0)
{
log_.LogError() << "V4L2WaitSemTimeout";
continue;
}
if(!gEdrRunFlag)
{
continue;
}
VideoOutput_input_Enqueue();
if(!RunGraphForOneFramePipeline(frame_id, input, map_id1, map_id2, &data_ptr1, &data_ptr2, rect_end_x, rect_end_y))
{
frame_id++;
continue;
}
if(predicate_)
{
log_.LogError() << "V4l2EncodeHandler 111 work begin -----" ;
//static int32_t i_init = 0;
//if( i_init < context_.n_outbuffers )
// {
// bool result = V4L2EncodeFrm1(i_init,(int8_t *)data_ptr1, (int8_t *)data_ptr2, rect_end_x, rect_end_y,input);
// log_.LogError() << "V4L2EncodeFrm1 result = " << result ;
// i_init++;
// }
// else
{
V4L2EncodeFrm2((int8_t *)data_ptr1, (int8_t *)data_ptr2, rect_end_x, rect_end_y,input);
}
}
vxGraphParameterEnqueueReadyRef(scaler_graph_, enc_pool_.graph_parameter_index, (vx_reference *)&enc_pool_.arr[scaler_pipline_], 1);
scaler_pipline_++;
scaler_pipline_ = (scaler_pipline_ >= enc_pool_.bufq_depth) ? 0 : scaler_pipline_;
vxUnmapImagePatch(input, map_id1);
vxUnmapImagePatch(input, map_id2);
frame_id++;
}
}
int32_t VideoOutputV4l2Encode::V4L2WaitSemTimeout(sem_t *sem, uint32_t timeout)
{
struct timespec ts;
clock_gettime(CLOCK_REALTIME, &ts);
uint32_t msecs = timeout;
long secs = msecs / 1000;
msecs = msecs % 1000;
msecs = msecs * 1000 * 1000 + ts.tv_nsec;
secs = secs + msecs / (1000 * 1000 * 1000);
msecs = msecs % (1000 * 1000 * 1000);
ts.tv_sec += secs;
ts.tv_nsec = msecs;
return sem_timedwait(sem, &ts);
}
int32_t VideoOutputV4l2Encode::V4L2FindDevice()
{
std::string dev_path("/dev/");
std::string dev_name("videox");
std::string driver_name("vxe-enc");
struct v4l2_capability cap = {0};
std::string name;
int32_t fd = -1;
struct dirent *dir;
DIR *d = opendir(dev_path.c_str());
if (!d)
{
log_.LogError() << "Failed to open device path: " << dev_path << strerror(errno);
return -1;
}
while ((dir = readdir(d)) != NULL)
{
if (dev_name.compare(0, 5, dir->d_name, 0, 5) == 0)
{
name = dev_path + dir->d_name;
// 打开视频设备节点
fd = open(name.c_str(), O_RDWR | O_NONBLOCK, 0);
if (fd < 0)
{
log_.LogError() << "Failed to open device " << name << strerror(errno);
continue;
}
memset(&cap, 0, sizeof(cap));
if (ioctl(fd, VIDIOC_QUERYCAP, &cap))
{
log_.LogError() << "VIDIOC_QUERYCAP failed on device " << name << strerror(errno);
close(fd);
continue;
}
if (driver_name == (char*)cap.driver)
{
log_.LogError() << "No device specified, using " << name;
break;
}
close(fd);
fd = -1;
}
}
if (fd < 0)
{
log_.LogError() << "Failed to find device in " << dev_path;
}
name.copy((char*)context_.dev_name, name.length(), 0);
return fd;
}
bool VideoOutputV4l2Encode::V4L2EncodeConfig()
{
V4L2CalculateFormats();
context_.out_format.memory = context_.out_memory;
context_.cap_format.memory = context_.cap_memory;
ASSERT_ERROR_BOOL(V4L2QueryDevice());
ASSERT_ERROR_BOOL(V4L2SubscribeEvents());
V4L2QueryCtrls();
V4L2VideoSetExtCtrls();
ASSERT_ERROR_BOOL(VideoSetParameters());
V4L2GetFormat();
ASSERT_ERROR_BOOL(V4L2TrySetFormat());
V4L2GetFormat();
ASSERT_ERROR_BOOL(V4L2AllocBufs(&context_.n_outbuffers, context_.outbuffers, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE, context_.out_format));
ASSERT_ERROR_BOOL(V4L2AllocBufs(&context_.n_capbuffers, context_.capbuffers, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, context_.cap_format));
return true;
}
void VideoOutputV4l2Encode::V4L2CalculateFormats()
{
V4L2FindFormat(context_.format_name, &context_.out_format, out_formats, (sizeof(out_formats) / sizeof(out_formats[0])));
V4L2FindFormat(context_.codec_name, &context_.cap_format, cap_formats, (sizeof(cap_formats) / sizeof(cap_formats[0])));
context_.out_format.width = context_.width;
context_.out_format.height = context_.height;
context_.out_format.stride = ALIGN(context_.width * context_.out_format.bytes_pp, HW_ALIGN);
context_.out_format.size = ((context_.out_format.stride * context_.height) * context_.out_format.size_num) / context_.out_format.size_den;
// songsz context_.out_format.size = 1920 * 1080 * 3 / 2;
context_.cap_format.width = context_.width;
context_.cap_format.height = context_.height;
context_.cap_format.stride = context_.width;
context_.cap_format.size = context_.cap_format.stride * context_.height;
}
void VideoOutputV4l2Encode::V4L2FindFormat(uint8_t name[], struct tienc_format *format, const struct tienc_format formats[], uint32_t formats_size)
{
uint32_t i;
for (i = 0; i < formats_size; i++)
{
if (!strcmp((char*)name, (char*)formats[i].name))
{
log_.LogInfo() << "Found format " << name;
*format = formats[i];
break;
}
}
if (i >= formats_size)
{
log_.LogError() << "No format found. Defaulting to " << formats[0].name;
*format = formats[0];
}
}
bool VideoOutputV4l2Encode::V4L2QueryDevice()
{
struct v4l2_capability cap = {0};
struct v4l2_fmtdesc desc = {0};
struct v4l2_frmsizeenum frmsize = {0};
log_.LogInfo() << __func__ << " Enter";
if (ioctl(context_.fd, VIDIOC_QUERYCAP, &cap))
{
log_.LogError() << "VIDIOC_QUERYCAP failed";
return false;
}
log_.LogInfo() << "Querycaps: fd = " << context_.fd << " driver = " << cap.driver << " card = " << cap.card << " bus_info = " << cap.bus_info;
log_.LogInfo() << "Querycaps: device_caps = " << "0x" << std::hex << cap.device_caps << " capabilities = " << cap.capabilities;
for (uint32_t type : {V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE})
{
desc.index = 0;
desc.type = type;
while (!ioctl(context_.fd, VIDIOC_ENUM_FMT, &desc))
{
log_.LogInfo() << "desc.index = " << desc.index << " pixelformat = "
<< (desc.pixelformat & 0xff)
<< ((desc.pixelformat >> 8) & 0xff)
<< ((desc.pixelformat >> 16) & 0xff)
<< ((desc.pixelformat >> 24) & 0xff) << " desciption = " << desc.description;
desc.index++;
}
}
frmsize.index = 0;
log_.LogInfo() << "Calling VIDIOC_ENUM_FRAMESIZES";
// 枚举驱动程序支持的指定格式的分辨率
if (ioctl(context_.fd, VIDIOC_ENUM_FRAMESIZES, &frmsize))
{
log_.LogError() << "VIDIOC_ENUM_FRAMESIZES failed :" << errno << " " << strerror(errno);
}
else if (frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE)
{
log_.LogInfo() << "VIDIOC_ENUM_FRAMESIZES got DISCRETE";
log_.LogInfo() << "frmsizes = " << frmsize.index << " width = " << frmsize.discrete.width << " height = " << frmsize.discrete.height;
frmsize.index++;
while (ioctl(context_.fd, VIDIOC_ENUM_FRAMESIZES, &frmsize) == 0)
{
log_.LogInfo() << "frmsizes = " << frmsize.index << " width = " << frmsize.discrete.width << " height = " << frmsize.discrete.height;
frmsize.index++;
}
}
else
{
log_.LogInfo() << "VIDIOC_ENUM_FRAMESIZES got " << ((frmsize.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) ? "CONTINUOUS" : "STEPWISE");
log_.LogInfo() << "frmsizes min_width = " << frmsize.stepwise.min_width << " max_width = " << frmsize.stepwise.max_width << " step_width = " << frmsize.stepwise.step_width;
log_.LogInfo() << "frmsizes min_height = " << frmsize.stepwise.min_height << " max_height = " << frmsize.stepwise.max_height << " step_height = " << frmsize.stepwise.step_height;
}
return true;
}
bool VideoOutputV4l2Encode::V4L2SubscribeEvents()
{
struct v4l2_event_subscription sub;
for (uint32_t type : {V4L2_EVENT_EOS, V4L2_EVENT_EOS})
{
sub.type = type;
log_.LogInfo() << "Calling V4L2 IOCTL VIDIOC_SUBSCRIBE_EVENT";
if (ioctl(context_.fd, VIDIOC_SUBSCRIBE_EVENT, &sub))
{
log_.LogError() << "Failed to subscribe to events: err: " << errno << " " << strerror(errno);
return false;
}
}
return true;
}
void VideoOutputV4l2Encode::V4L2QueryCtrls()
{
struct v4l2_queryctrl ctrl;
struct v4l2_query_ext_ctrl ext_ctrl;
log_.LogInfo() << "Enter " << __func__;
log_.LogInfo() << " Enumerating all non-compound";
memset(&ctrl, 0, sizeof(ctrl));
ctrl.id = 0;
ctrl.id |= V4L2_CTRL_FLAG_NEXT_CTRL;
while (!ioctl(context_.fd, VIDIOC_QUERYCTRL, &ctrl))
{
log_.LogInfo() << " Got ctrl.id = " << "0x" << std::hex << ctrl.id << " type = " << ctrl.type;
log_.LogInfo() << " name = " << ctrl.name << " minimum = " << ctrl.minimum << " maximum = " << ctrl.maximum;
log_.LogInfo() << " step = " << ctrl.step << " default_value = " << ctrl.default_value;
log_.LogInfo() << " flags = " << "0x" << std::hex << ctrl.flags;
ctrl.id |= V4L2_CTRL_FLAG_NEXT_CTRL;
}
log_.LogInfo() << " Enumerating all non-compound ended errno = " << errno << " " << strerror(errno);
log_.LogInfo() << " Enumerating all non-compound ext";
for (uint32_t id : {(int32_t)V4L2_CTRL_FLAG_NEXT_CTRL, (int32_t)V4L2_CTRL_FLAG_NEXT_COMPOUND, (int32_t)(V4L2_CTRL_FLAG_NEXT_CTRL | V4L2_CTRL_FLAG_NEXT_COMPOUND)})
{
memset(&ext_ctrl, 0, sizeof(ext_ctrl));
ext_ctrl.id = 0;
ext_ctrl.id |= id;
while (!ioctl(context_.fd, VIDIOC_QUERY_EXT_CTRL, &ext_ctrl))
{
log_.LogInfo() << " Got ext_ctrl.id = " << "0x" << std::hex << ext_ctrl.id << " type = " << ext_ctrl.type;
log_.LogInfo() << " name = " << ext_ctrl.name << " minimum = " << ext_ctrl.minimum << " maximum = " << ext_ctrl.maximum;
log_.LogInfo() << " step = " << ext_ctrl.step << " default_value = " << ext_ctrl.default_value;
log_.LogInfo() << " telem_size = " << ext_ctrl.elem_size << " elems = " << ext_ctrl.elems << " nr_of_dims = " << ext_ctrl.nr_of_dims;
ext_ctrl.id |= id;
}
log_.LogInfo() << " Enumerating all compound ext ended errno = " << strerror(errno);
log_.LogInfo() << " Enumerating all controls ext";
}
for (uint32_t id : {V4L2_CID_MPEG_VIDEO_GOP_SIZE, V4L2_CID_MPEG_VIDEO_BITRATE, V4L2_CID_MPEG_VIDEO_H264_I_PERIOD})
{
memset(&ctrl, 0, sizeof(ctrl));
ctrl.id = id;
if (ioctl(context_.fd, VIDIOC_QUERYCTRL, &ctrl))
{
log_.LogError() << "error querying V4L2_CID_MPEG_VIDEO_H264_I_PERIOD errno: " << errno << " " << strerror(errno);
}
else
{
log_.LogInfo() << " Got ctrl.id = " << "0x" << std::hex << ctrl.id << "type = " << ctrl.type;
log_.LogInfo() << " name = " << ctrl.name << "minimum = " << ctrl.minimum << "maximum = " << ctrl.maximum;
log_.LogInfo() << " step = " << ctrl.step << " default_value = " << ctrl.default_value;
}
}
for (uint32_t id : {V4L2_CID_MPEG_VIDEO_GOP_SIZE, V4L2_CID_MPEG_VIDEO_BITRATE, V4L2_CID_MPEG_VIDEO_H264_I_PERIOD})
{
memset(&ext_ctrl, 0, sizeof(ext_ctrl));
ext_ctrl.id = id;
if (ioctl(context_.fd, VIDIOC_QUERY_EXT_CTRL, &ext_ctrl))
{
log_.LogError() << "error querying ext V4L2_CID_MPEG_VIDEO_BITRATE errno: " << errno << " " << strerror(errno);
}
else
{
log_.LogInfo() << " Got ext_ctrl.id = " << "0x" << std::hex << ext_ctrl.id << " type = " << ext_ctrl.type;
log_.LogInfo() << " name = " << ext_ctrl.name << " minimum = " << ext_ctrl.minimum << " maximum = " << ext_ctrl.maximum;
log_.LogInfo() << " step = " << ext_ctrl.step << " default_value = " << ext_ctrl.default_value;
log_.LogInfo() << " telem_size = " << ext_ctrl.elem_size << " elems = " << ext_ctrl.elems << " nr_of_dims = " << ext_ctrl.nr_of_dims;
log_.LogInfo() << " flags = " << "0x" << std::hex << ctrl.flags;
}
}
}
void VideoOutputV4l2Encode::V4L2VideoSetExtCtrls()
{
struct v4l2_ext_controls ctrls;
struct v4l2_ext_control controls[3];
memset(&ctrls, 0, sizeof(ctrls));
memset(controls, 0, sizeof(controls));
ctrls.which = V4L2_CTRL_WHICH_CUR_VAL;
ctrls.count = 3;
ctrls.controls = controls;
controls[0].id = V4L2_CID_MPEG_VIDEO_GOP_SIZE;
controls[1].id = V4L2_CID_MPEG_VIDEO_BITRATE;
controls[2].id = V4L2_CID_MPEG_VIDEO_H264_I_PERIOD;
uint32_t count = ctrls.count;
if (ioctl(context_.fd, VIDIOC_G_EXT_CTRLS, &ctrls))
{
log_.LogError() << "Getting VIDIOC_G_EXT_CTRLS Fail: " << strerror(errno);
}
controls[0].value = context_.gop_size;
controls[1].value = context_.bitrate;
controls[2].value = context_.i_period;
if (ioctl(context_.fd, VIDIOC_TRY_EXT_CTRLS, &ctrls))
{
log_.LogInfo() << "error getting VIDIOC_TRY_EXT_CTRLS error_idx = " << ctrls.error_idx << strerror(errno);
count = ctrls.error_idx;
}
if (ioctl(context_.fd, VIDIOC_S_EXT_CTRLS, &ctrls))
{
log_.LogError() << "error getting VIDIOC_S_EXT_CTRLS error_idx=" << ctrls.error_idx << strerror(errno);
count = (ctrls.error_idx == ctrls.count) ? 0 : ctrls.error_idx;
}
controls[0].value = 0;
controls[1].value = 0;
controls[2].value = 0;
if (ioctl(context_.fd, VIDIOC_G_EXT_CTRLS, &ctrls))
{
log_.LogError() << "Getting VIDIOC_G_EXT_CTRLS Fail: " << strerror(errno);
}
for (int32_t i = 0; i < count; i++)
{
log_.LogInfo() << " controls[" << i << "]" << "id = " << "0x" << std::hex << controls[i].id << " got size = " << controls[i].size << " value = " << controls[i].value;
}
}
bool VideoOutputV4l2Encode::VideoSetParameters()
{
struct v4l2_streamparm parm = {0};
parm.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
parm.parm.output.capability = V4L2_CAP_TIMEPERFRAME;
uint32_t& denominator = parm.parm.output.timeperframe.denominator;
uint32_t& numerator = parm.parm.output.timeperframe.numerator;
if (ioctl(context_.fd, VIDIOC_G_PARM, &parm))
{
log_.LogError() << "VIDIOC_G_PARM Failed: " << strerror(errno);
}
log_.LogInfo() << "VIDIOC_G_PARM got back framerate " << denominator << " " << numerator << "=" << denominator / numerator;
parm.parm.output.timeperframe.numerator = context_.frameival_num;
parm.parm.output.timeperframe.denominator = context_.frameival_den;
if (ioctl(context_.fd, VIDIOC_S_PARM, &parm))
{
log_.LogError() << "VIDIOC_S_PARM Failed: " << strerror(errno);
return false;
}
log_.LogInfo() << "VIDIOC_S_PARM got back framerate " << denominator << " " << numerator << "=" << denominator / numerator;
parm.parm.output.timeperframe.numerator = 0;
parm.parm.output.timeperframe.denominator = 0;
if (ioctl(context_.fd, VIDIOC_G_PARM, &parm))
{
log_.LogError() << "VIDIOC_G_PARM Failed: " << strerror(errno);
}
log_.LogInfo() << "VIDIOC_G_PARM got back framerate " << denominator << " " << numerator << "=" << denominator / numerator;
return true;
}
void VideoOutputV4l2Encode::V4L2GetFormat()
{
for (uint32_t type : {context_.out_format.type, context_.cap_format.type})
{
struct v4l2_format fmt = {0};
log_.LogInfo() << __func__ << " Enter";
fmt.type = type;
if (ioctl(context_.fd, VIDIOC_G_FMT, &fmt))
{
log_.LogError() << "VIDIOC Get Format Fail: " << strerror(errno);
return;
}
log_.LogError() << __func__ << " printing returned v4l2_format";
V4L2PrintFormat(&fmt);
}
}
void VideoOutputV4l2Encode::V4L2PrintFormat(struct v4l2_format *fmt)
{
log_.LogInfo() << "type = " << fmt->type;
log_.LogInfo() << "width = " << fmt->fmt.pix_mp.width << "height = " << fmt->fmt.pix_mp.height;
log_.LogInfo() << "pixelformat = " << fmt->fmt.pix_mp.pixelformat;
log_.LogInfo() << "field = " << fmt->fmt.pix_mp.field << "colorspace = " << fmt->fmt.pix_mp.colorspace;
for (int32_t i = 0; i < fmt->fmt.pix_mp.num_planes; i++)
{
log_.LogInfo() << "plane_fmt[" << i << "].sizeimage = " << fmt->fmt.pix_mp.plane_fmt[i].sizeimage;
log_.LogInfo() << "plane_fmt[" << i << "].bytesperline = " << fmt->fmt.pix_mp.plane_fmt[i].bytesperline;
}
log_.LogInfo() << "num_planes = " << fmt->fmt.pix_mp.num_planes << "flags = " << fmt->fmt.pix_mp.flags;
}
bool VideoOutputV4l2Encode::V4L2TrySetFormat()
{
struct v4l2_format fmt = {0};
log_.LogError() << "V4L2TrySetFormat";
for (uint32_t cmd : {VIDIOC_TRY_FMT, VIDIOC_S_FMT})
{
// songsz for (auto format : {context_.out_format, context_.cap_format})
for (auto format : {context_.cap_format})
{
memset(&fmt, 0, sizeof(v4l2_format));
fmt.type = format.type;
fmt.fmt.pix_mp.width = format.width;
fmt.fmt.pix_mp.height = format.height;
fmt.fmt.pix_mp.pixelformat = format.fourcc;
fmt.fmt.pix_mp.field = V4L2_FIELD_NONE;
fmt.fmt.pix_mp.colorspace = V4L2_COLORSPACE_DEFAULT;
fmt.fmt.pix_mp.plane_fmt[0].sizeimage = format.size;
fmt.fmt.pix_mp.plane_fmt[0].bytesperline = format.stride;
fmt.fmt.pix_mp.num_planes = format.n_planes;
log_.LogError() << "fmt.type " << fmt.type;
log_.LogError() << "VIDIOC_S_FMT sizeimage: " << fmt.fmt.pix_mp.plane_fmt[0].sizeimage;
if (ioctl(context_.fd, cmd, &fmt))
{
if (cmd == VIDIOC_S_FMT)
{
log_.LogError() << "VIDIOC_S_FMT Fail: " << strerror(errno);
return false;
}
}
else
{
log_.LogInfo() << "Printing returned v4l2_format";
V4L2PrintFormat(&fmt);
}
}
}
// songsz
memset(&fmt, 0, sizeof(v4l2_format));
fmt.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
fmt.fmt.pix_mp.width = 1920;
fmt.fmt.pix_mp.height = 1088;
fmt.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12;
fmt.fmt.pix_mp.plane_fmt[0].sizeimage =
1920 * 1088 * 3/2;
fmt.fmt.pix_mp.num_planes = 1;
fmt.fmt.pix_mp.plane_fmt[0].bytesperline = context_.out_format.stride;
log_.LogError() << "VIDIOC_S_FMT start " << fmt.fmt.pix_mp.plane_fmt[0].sizeimage;
if (ioctl(context_.fd, VIDIOC_S_FMT, &fmt))
{
log_.LogError() << "VIDIOC_S_FMT Fail: " << strerror(errno);
//return false;
}
log_.LogError() << "VIDIOC_S_FMT end " << fmt.fmt.pix_mp.plane_fmt[0].sizeimage;
V4L2GetFormat();
log_.LogError() << "V4L2TrySetFormat done";
return true;
}
void VideoOutputV4l2Encode::V4L2UnsubscribeEvents()
{
struct v4l2_event_subscription sub;
sub.type = V4L2_EVENT_ALL;
log_.LogInfo() << "Calling V4L2 IOCTL VIDIOC_UNSUBSCRIBE_EVENT";
if (ioctl(context_.fd, VIDIOC_UNSUBSCRIBE_EVENT, &sub))
{
log_.LogError() << "Failed to unsubscribe from events: err:" << strerror(errno);
}
return;
}
bool VideoOutputV4l2Encode::V4L2AllocBufs(uint32_t *n_buffers, struct tienc_buffer buffers[], enum v4l2_buf_type type, struct tienc_format format)
{
log_.LogInfo() << __func__ << " Enter";
struct v4l2_requestbuffers reqbuf = {0};
reqbuf.count = *n_buffers;
reqbuf.type = type;
reqbuf.memory = format.memory;
std::string type_str = ((type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) ? ("CAPTURE") : ("OUTPUT"));
log_.LogInfo() << "VIDIOC_REQBUF type_str " << type_str << " reqbuf.count = " << reqbuf.count << " reqbuf.type = " << reqbuf.type << " reqbuf.memory = " << reqbuf.memory;
// 向内核申请缓冲区
if (ioctl(context_.fd, VIDIOC_REQBUFS, &reqbuf))
{
log_.LogError() << "VIDIOC_REQBUF Error" << type_str << "Fail: " << strerror(errno);
return false;
}
else
{
log_.LogInfo() << "VIDIOC_REQBUF Success type_str is " << type_str ;
}
//log_.LogError() << "After VIDIOC_REQBUFS " << type_str << " getting buf_cnt " << reqbuf.count;
*n_buffers = reqbuf.count;
for (uint32_t i = 0; i < *n_buffers; i++)
{
if (format.memory == V4L2_MEMORY_MMAP)
{
if (!V4L2CreateMmapBuffer(&buffers[i], type, i, format))
{
return false;
}
}
}
return true;
}
void VideoOutputV4l2Encode::V4L2DeallocBufs(uint32_t *n_buffers, struct tienc_buffer buffers[], struct tienc_format format)
{
int32_t i, ret = 0;
log_.LogInfo() << __func__ << " Enter";
for (i = 0; i < *n_buffers; i++)
{
if (NULL == buffers[i].mapped)
{
log_.LogError() << "buffer " << i << " not mapped, skip munmap";
continue;
}
log_.LogInfo() << "munmap buffer " << i << "mapped = " << "0x" << std::hex << buffers[i].mapped << "length = " << buffers[i].length;
if (munmap(buffers[i].mapped, buffers[i].length))
{
log_.LogError() << "munmap failed for buffer" << i << strerror(errno);
}
buffers[i].mapped = NULL;
}
}
int VideoOutputV4l2Encode::get_plane_min_length(int encoder_fd, int plane_index) {
struct v4l2_format fmt;
memset(&fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
if (ioctl(encoder_fd, VIDIOC_G_FMT, &fmt) < 0) {
perror("Failed to get format");
return -1;
}
log_.LogError() << "Encoder Format";
log_.LogError() << "Width: "<< fmt.fmt.pix_mp.width;
log_.LogError() << "Height: "<< fmt.fmt.pix_mp.height;
log_.LogError() << "Pixel Format: " << \
(fmt.fmt.pix_mp.pixelformat & 0xFF) << " " << \
((fmt.fmt.pix_mp.pixelformat >> 8) & 0xFF) << " " << \
((fmt.fmt.pix_mp.pixelformat >> 16) & 0xFF) << " " << \
((fmt.fmt.pix_mp.pixelformat >> 24) & 0xFF);
log_.LogError() << "Field:" << fmt.fmt.pix_mp.field;
log_.LogError() << "Colorspace: ", fmt.fmt.pix_mp.colorspace;
for (int i = 0; i < fmt.fmt.pix_mp.num_planes; ++i) {
log_.LogError() << "Plane " << i << " sizeimage" << fmt.fmt.pix_mp.plane_fmt[i].sizeimage;
log_.LogError() << " Plane " << i << " bytesperline " << fmt.fmt.pix_mp.plane_fmt[i].bytesperline;
}
return fmt.fmt.pix_mp.plane_fmt[plane_index].sizeimage;
}
bool VideoOutputV4l2Encode::V4L2QueueBuffer(uint32_t index, struct tienc_buffer buffer, struct tienc_format format)
{
struct v4l2_buffer buf;
struct v4l2_plane buf_planes[MAX_PLANES];
memset(&buf, 0, sizeof(buf));
memset(&buf_planes, 0, sizeof(buf_planes));
if (format.type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
{
buf_planes[0].bytesused = format.size;
}
if (format.memory == V4L2_MEMORY_MMAP)
{
buf_planes[0].m.mem_offset = buffer.offset;
log_.LogError() << "dbg_dmabuf::V4L2QueueBuffer V4L2_MEMORY_MMAP";
}
else if (format.memory == V4L2_MEMORY_DMABUF)
{
// songsz
buf_planes[0].m.fd = buffer.dbuf_fd;
// buf_planes[0].length = 0;
buf_planes[0].length = buffer.length;
// buf_planes[0].length = 3133440;
log_.LogError() << "dbg_dmabuf::V4L2QueueBuffer V4L2_MEMORY_DMABUF";
}
else
{
log_.LogError() << "Invalid memory type selected";
return false;
}
buf.index = index;
buf.type = format.type;
buf.memory = format.memory;
buf.m.planes = buf_planes;
//buf.length = format.n_planes;
buf.length = 1;
errno = 0;
log_.LogError() << "dbg_dmabuf:: VIDIOC_QBUF buf_planes[0].length = " << buf_planes[0].length << " format.n_planes = " << format.n_planes << " errorno = " << strerror(errno);
// 将读取完数据的空缓冲区返还给驱动的缓存队列
get_plane_min_length(context_.fd, 0);
int iiRet = ioctl(context_.fd, VIDIOC_QBUF, &buf);
log_.LogError() << "dbg_dmabuf:: VIDIOC_QBUF iiRet = " << iiRet << " ||| buf.index = " << buf.index << " errorno = " << strerror(errno);
// static int i = 0;
// while(iiRet == -1)
// {
// errno = 0;
// iiRet = ioctl(context_.fd, VIDIOC_QBUF, &buf);
// //i++;
// log_.LogError() << "dbg_dmabuf:: 5 VIDIOC_QBUF buffer.length = " << buffer.length << " format.n_planes = " << format.n_planes << " errorno = " << strerror(errno);
// }
if (iiRet)
{
log_.LogError() << "QBUF buffer " << index << "Failed: " << strerror(errno);
return false;
}
return true;
}
bool VideoOutputV4l2Encode::V4L2EncodeFrm1(int8_t i,int8_t *data1, int8_t *data2, uint32_t rect_end_x, uint32_t rect_end_y,vx_image &input)
{
int status = 0;
uint32_t pitch[4];
int fd[4];
long unsigned int size[4];
unsigned int offset[4];
struct v4l2_buffer buf;
struct v4l2_plane buf_planes[MAX_PLANES];
context_.outbuffers[context_.encode_times].length = rect_end_x * rect_end_y;
log_.LogError() << "dbg_dmabuf:: V4L2EncodeFrm1 begin ------------------------------------start i = " << i;
memset(&buf, 0, sizeof(buf));
memset(&buf_planes, 0, sizeof(buf_planes));
buf.index = i;
{
unsigned int size_1 = rect_end_x * rect_end_y;
unsigned int size_2 = rect_end_x * rect_end_y / 2;
#ifndef DEBUG_DMABUF
memcpy(context_.outbuffers[buf.index].mapped, data1, size_1);
memcpy((void *)((uint8_t *)context_.outbuffers[buf.index].mapped + size_1), data2, size_2);
#else
//uint32_t dmabuf_fd_offset1;
//int fd1 = appMemGetDmaBufFd(data1, &dmabuf_fd_offset1);
getImageDmaFd((vx_reference)input, fd, pitch, size, offset, 2);
context_.outbuffers[buf.index].dbuf_fd = fd[0];
context_.outbuffers[buf.index].length = size[0] + size[1];
log_.LogError() << "dbg_dmabuf:: V4L2EncodeFrm1 fd1 = " << fd[0] << ",,,size[0] + size[1] = " << size[0] + size[1] ;
#endif
bool ret = V4L2QueueBuffer(buf.index, context_.outbuffers[buf.index], context_.out_format);
log_.LogError() << "dbg_dmabuf:: V4L2EncodeFrm1 end ------------------------------------start i = " << i;
return true;
}
return false;
}
bool VideoOutputV4l2Encode::V4L2EncodeFrm2(int8_t *data1, int8_t *data2, uint32_t rect_end_x, uint32_t rect_end_y,vx_image &input)
{
int status = 0;
uint32_t pitch[4];
int fd[4];
long unsigned int size[4];
unsigned int offset[4];
struct v4l2_buffer buf;
struct v4l2_plane buf_planes[MAX_PLANES];
context_.outbuffers[context_.encode_times].length = rect_end_x * rect_end_y;
log_.LogError() << "V4L2EncodeFrm2 start context_.encode_times = " << context_.encode_times << " |||context_.n_outbuffers = " << context_.n_outbuffers;
memset(&buf, 0, sizeof(buf));
memset(&buf_planes, 0, sizeof(buf_planes));
buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
buf.memory = context_.out_format.memory;
buf.m.planes = buf_planes;
buf.length = context_.out_format.n_planes;
int iiRet = ioctl(context_.fd, VIDIOC_DQBUF, &buf);
// if (iiRet < 0)
// {
// if (EAGAIN != errno)
// {
// log_.LogError() << "dbg_dmabuf:: VIDIOC_DQBUF Failed " << strerror(errno);
// usleep(10000);
// }
// else
// {
// log_.LogError() << "dbg_dmabuf:: VIDIOC_DQBUF Failed 2" << strerror(errno);
// }
// }
log_.LogError() << "dbg_dmabuf:: VIDIOC_DQBUF iiRet = " << iiRet << " ||| buf.index = " << buf.index << " errorno = " << strerror(errno);
{
unsigned int size_1 = rect_end_x * rect_end_y;
unsigned int size_2 = rect_end_x * rect_end_y / 2;
#ifndef DEBUG_DMABUF
memcpy(context_.outbuffers[buf.index].mapped, data1, size_1);
memcpy((void *)((uint8_t *)context_.outbuffers[buf.index].mapped + size_1), data2, size_2);
#else
//uint32_t dmabuf_fd_offset1;
//int fd1 = appMemGetDmaBufFd(data1, &dmabuf_fd_offset1);
getImageDmaFd((vx_reference)input, fd, pitch, size, offset, 2);
context_.outbuffers[buf.index].dbuf_fd = fd[0];
context_.outbuffers[buf.index].length = size[0] + size[1];
log_.LogError() << "dbg_dmabuf:: V4L2EncodeFrm2 fd1 = " << fd[0] << ",,,size[0] + size[1] = " << size[0] + size[1] ;
#endif
bool ret = V4L2QueueBuffer(buf.index, context_.outbuffers[buf.index], context_.out_format);
//log_.LogError() << "dbg_dmabuf:: buffer ret = " << ret << " errno: " << strerror(errno);
return true;
}
return false;
}
bool VideoOutputV4l2Encode::V4L2EncodeStart()
{
log_.LogError() << "OUTPUT V4L2EncodeStart context_.n_capbuffers = " << context_.n_capbuffers;
int32_t type = 0;
for(int32_t i = 0; i < context_.n_capbuffers; i++)
{
bool result = V4L2QueueBuffer(i, context_.capbuffers[i], context_.cap_format);
if(!result)
{
log_.LogError() << "V4L2QueueBuffer Failed " ;
}
}
// for(int32_t i = 0; i < context_.n_outbuffers; i++)
// {
// bool result = V4L2QueueBuffer(i, context_.outbuffers[i], context_.out_format);
// if(!result)
// {
// log_.LogError() << "V4L2QueueBuffer Failed 2" ;
// }
// }
// 启动捕捉图像数据 -- 输出设备
type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
if (ioctl(context_.fd, VIDIOC_STREAMON, &type))
{
log_.LogError() << "dbg_dmabuf:: OUTPUT VIDIOC_STREAMON Failed: " << strerror(errno);
return false;
}
log_.LogError() << "OUTPUT VIDIOC_STREAMON 2";
// 启动捕捉图像数据 -- 捕获设备
type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
if (ioctl(context_.fd, VIDIOC_STREAMON, &type))
{
log_.LogError() << "dbg_dmabuf::CAPTURE VIDIOC_STREAMON Failed " << strerror(errno);
return false;
}
log_.LogError() << "dbg_dmabuf:: CAPTURE VIDIOC_STREAMON succeeded 11111111";
return true;
}
bool VideoOutputV4l2Encode::V4L2CreateMmapBuffer(struct tienc_buffer *b, enum v4l2_buf_type type, uint32_t index, struct tienc_format format)
{
struct v4l2_buffer buf;
struct v4l2_plane buf_planes[MAX_PLANES];
memset(&buf, 0, sizeof(buf));
buf.type = type;
buf.index = index;
buf.m.planes = buf_planes;
buf.length = 1;
std::string type_str = (type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) ? ("CAPTURE") : ("OUTPUT");
// 获取缓冲区信息
if (ioctl(context_.fd, VIDIOC_QUERYBUF, &buf))
{
log_.LogError() << "VIDIOC_QUERYBUF" << type_str << index << "Failed: " << strerror(errno);
return false;
}
log_.LogInfo() << "QUERYBUF" << type_str << ": buffer" << index << "length (planes) = " << buf.length << "length =" << buf.m.planes[0].length << "offset =" << buf.m.planes[0].data_offset;
if ((type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) && (buf.m.planes[0].length < format.size))
{
log_.LogInfo() << "Buffer length " << buf.m.planes[0].length << "less than required " << format.size;
return false;
}
b->mapped = mmap(NULL, buf.m.planes[0].length, PROT_READ | PROT_WRITE, MAP_SHARED, context_.fd, buf.m.planes[0].m.mem_offset);
b->offset = buf.m.planes[0].m.mem_offset;
b->length = buf.m.planes[0].length;
log_.LogInfo() << "After mmap, buffers[" << index << "].mapped = " << b->mapped;
return (MAP_FAILED != b->mapped);
}
void VideoOutputV4l2Encode::initV4l2Buf()
{
for(int i = 0;i<image_queue_size;i++)
{
image_array_[i] = vxCreateImage(gAppObj111.context, enc_pool_.width, enc_pool_.height, VX_DF_IMAGE_NV12);
image_map_id_array_[i] = -1;
}
for(int i = 0;i<image_queue_size;i++)
{
vx_rectangle_t rect;
vx_imagepatch_addressing_t image_addr;
uint8_t * data_ptr1 = nullptr;
rect.start_x = 0;
rect.start_y = 0;
rect.end_x = context_.width;
rect.end_y = context_.height;
vx_status status = VX_SUCCESS;
if(image_map_id_array_[i] != -1)
{
vx_status status = VX_SUCCESS;
status = vxUnmapImagePatch(image_array_[i], image_map_id_array_[i]);
if(status != VX_SUCCESS)
{
log_.LogError() << "DEBUG MEMORY vxUnmapImagePatch Error status = " << status;
}
else
{
log_.LogInfo() << "DEBUG MEMORY vxUnmapImagePatch Success image_map_id_array_[i] = " << image_map_id_array_[i];
}
}
status = vxMapImagePatch(image_array_[i], &rect, 0, &image_map_id_array_[i], &image_addr, (void **)&data_ptr1, VX_WRITE_ONLY, VX_MEMORY_TYPE_HOST, VX_NOGAP_X);
}
}
}
}
if we set the image width&height 1920*1080,the application will throw an error as below.
so now we set 1920*1088.
The error log is :