sdk-hwV1.3/external/fast-user-adapter/rt_media/demo/demo_gdc.c

339 lines
10 KiB
C
Raw Normal View History

2024-05-07 10:09:20 +00:00
#include <string.h>
#include <stdio.h>
#include <stdlib.h>
#include <errno.h>
#include <fcntl.h>
#include <stdio.h>
#include <sys/types.h>
#include <pthread.h>
#include <unistd.h>
#include <sys/time.h>
#include <semaphore.h>
#include <signal.h>
#define LOG_TAG "demo_video"
#include "AW_VideoInput_API.h"
#include "aw_util.h"
#define OUT_PUT_FILE_PREFIX "/tmp/stream0_gdc"
static int max_bitstream_count = SAVE_BITSTREAM_COUNT;
FILE *out_file_0 = NULL;
static int stream_count_0 = 0;
static sem_t finish_sem;
static int video_finish_flag = 0;
demo_video_param mparam;
static uint64_t pre_pts = 0;
static int save_file_cnt = 0;
static int need_save_new_file = 0;
static int pre_pts_in_seconds = 0;
static int cb_stream_cnt_in_seconds = 0;
void video_stream_cb(const AWVideoInput_StreamInfo* stream_info)
{
int keyframe = stream_info->keyframe_flag;
if (stream_info->size0 == 0 || stream_info->data0 == NULL) {
aw_logd("stream data error: data = %p, len = %d",stream_info->data0, stream_info->size0);
return ;
}
if(stream_count_0 < max_bitstream_count)
{
uint64_t cur_time = get_cur_time_us();
uint64_t pts = stream_info->pts;
uint64_t pts_in_seconds = pts/1000/1000;
unsigned char file_name[128] = {0};
unsigned char new_file_name[128] = {0};
if(stream_count_0%15 == 0)
{
aw_logd("*data = %p, len = %d, cnt = %d, kf = %d, pts = %llu us, %llu s; diff = %llu ms, cur_time = %llu us, diff = %llu ms\n",
stream_info->data0, stream_info->size0, stream_count_0, keyframe,
pts, pts/1000000, (pts - pre_pts)/1000, cur_time, (cur_time - pts)/1000);
}
if(pts_in_seconds != pre_pts_in_seconds)
{
aw_logd("get video stream, fps = %d", cb_stream_cnt_in_seconds);
pre_pts_in_seconds = pts_in_seconds;
cb_stream_cnt_in_seconds = 0;
}
cb_stream_cnt_in_seconds++;
pre_pts = pts;
if(stream_count_0%450 == 0)
need_save_new_file = 1;
if((need_save_new_file == 1 && keyframe == 1)
|| out_file_0 == NULL)
{
save_file_cnt++;
sprintf(file_name, "%s_%d.h264", mparam.OutputFilePath, save_file_cnt);
aw_logd("save new file, cnt = %d, file_name = %s", stream_count_0, file_name);
need_save_new_file = 0;
if(out_file_0)
{
fclose(out_file_0);
}
out_file_0 = fopen(file_name, "wb");
}
if(out_file_0) {
if (stream_info->b_insert_sps_pps && stream_info->sps_pps_size && stream_info->sps_pps_buf)
fwrite(stream_info->sps_pps_buf, 1, stream_info->sps_pps_size, out_file_0);
fwrite(stream_info->data0, 1, stream_info->size0, out_file_0);
if (stream_info->size1)
fwrite(stream_info->data1, 1, stream_info->size1, out_file_0);
if (stream_info->size2)
fwrite(stream_info->data2, 1, stream_info->size2, out_file_0);
}
if(stream_count_0 == 0)
{
uint64_t time = get_cur_time_us();
/* led_fwrite("/sys/class/gpio_sw/PF6/data", "0"); */
aw_logw("route 0 save first stream buffer done, time: %lld.%lldms ", time/1000, time%1000);
sprintf(new_file_name, "%s_%d_%lld.%lldms.h264", mparam.OutputFilePath, save_file_cnt, time/1000, time%1000);
rename(file_name, new_file_name);
}
}
else
{
aw_logd("already arrive max_bitstream_count %d", max_bitstream_count);
return ;
}
stream_count_0++;
if(stream_count_0 >= max_bitstream_count && !video_finish_flag)
{
video_finish_flag = 1;
sem_post(&finish_sem);
}
}
static int initGdcFunc(RTsGdcParam *pGdcParam)
{
pGdcParam->bGDC_en = 1;
pGdcParam->eWarpMode = RT_Gdc_Warp_LDC;
pGdcParam->eMountMode = RT_Gdc_Mount_Wall;
pGdcParam->bMirror = 0;
pGdcParam->calib_widht = 3264;
pGdcParam->calib_height = 2448;
pGdcParam->fx = 2417.19;
pGdcParam->fy = 2408.43;
pGdcParam->cx = 1631.50;
pGdcParam->cy = 1223.50;
pGdcParam->fx_scale = 2161.82;
pGdcParam->fy_scale = 2153.99;
pGdcParam->cx_scale = 1631.50;
pGdcParam->cy_scale = 1223.50;
pGdcParam->eLensDistModel = RT_Gdc_DistModel_FishEye;
pGdcParam->distCoef_wide_ra[0] = -0.3849;
pGdcParam->distCoef_wide_ra[1] = 0.1567;
pGdcParam->distCoef_wide_ra[2] = -0.0030;
pGdcParam->distCoef_wide_ta[0] = -0.00005;
pGdcParam->distCoef_wide_ta[1] = 0.0016;
pGdcParam->distCoef_fish_k[0] = -0.0024;
pGdcParam->distCoef_fish_k[1] = 0.141;
pGdcParam->distCoef_fish_k[2] = -0.3;
pGdcParam->distCoef_fish_k[3] = 0.2328;
pGdcParam->centerOffsetX = 0;
pGdcParam->centerOffsetY = 0;
pGdcParam->rotateAngle = 0; //[0,360]
pGdcParam->radialDistortCoef = 0; //[-255,255]
pGdcParam->trapezoidDistortCoef = 0; //[-255,255]
pGdcParam->fanDistortCoef = 0; //[-255,255]
pGdcParam->pan = 0; //pano360:[0,360]; others:[-90,90]
pGdcParam->tilt = 0; //[-90,90]
pGdcParam->zoomH = 100; //[0,100]
pGdcParam->zoomV = 100; //[0,100]
pGdcParam->scale = 100; //[0,100]
pGdcParam->innerRadius = 0; //[0,width/2]
pGdcParam->roll = 0; //[-90,90]
pGdcParam->pitch = 0; //[-90,90]
pGdcParam->yaw = 0; //[-90,90]
pGdcParam->perspFunc = RT_Gdc_Persp_Only;
pGdcParam->perspectiveProjMat[0] = 1.0;
pGdcParam->perspectiveProjMat[1] = 0.0;
pGdcParam->perspectiveProjMat[2] = 0.0;
pGdcParam->perspectiveProjMat[3] = 0.0;
pGdcParam->perspectiveProjMat[4] = 1.0;
pGdcParam->perspectiveProjMat[5] = 0.0;
pGdcParam->perspectiveProjMat[6] = 0.0;
pGdcParam->perspectiveProjMat[7] = 0.0;
pGdcParam->perspectiveProjMat[8] = 1.0;
pGdcParam->mountHeight = 0.85; //meters
pGdcParam->roiDist_ahead = 4.5; //meters
pGdcParam->roiDist_left = -1.5; //meters
pGdcParam->roiDist_right = 1.5; //meters
pGdcParam->roiDist_bottom = 0.65; //meters
pGdcParam->peaking_en = 1; //0/1
pGdcParam->peaking_clamp = 1; //0/1
pGdcParam->peak_m = 16; //[0,63]
pGdcParam->th_strong_edge = 6; //[0,15]
pGdcParam->peak_weights_strength = 2; //[0,15]
if(pGdcParam->eWarpMode == RT_Gdc_Warp_LDC)
{
pGdcParam->birdsImg_width = 768;
pGdcParam->birdsImg_height = 1080;
}
return 0;
}
static void exit_demo()
{
int i = 0;
AWVideoInput_DeInit();
aw_logd("exit: deInit end\n");
sem_destroy(&finish_sem);
if(out_file_0)
{
fclose(out_file_0);
out_file_0 = NULL;
}
aw_logd("aw_demo, finish!\n");
return ;
}
int main(int argc, char** argv)
{
aw_logw("gdc test start, time: %lld\n", get_cur_time_us());
pthread_t thread = 0;
int i = 0;
int ret = 0;
int channelId_0 = 0;
stream_count_0 = 0;
video_finish_flag = 0;
save_file_cnt = 0;
need_save_new_file = 0;
memset(&mparam, 0, sizeof(demo_video_param));
mparam.c0_encoder_format = -1;
mparam.pixelformat = RT_PIXEL_NUM;
mparam.use_vipp_num = 0;
strcpy(mparam.OutputFilePath, OUT_PUT_FILE_PREFIX);
mparam.share_buf_num = 2;
/******** begin parse the config paramter ********/
if(argc >= 2)
{
aw_logd("******************************\n");
for(i = 1; i < (int)argc; i += 2)
{
ParseArgument(&mparam, argv[i], argv[i + 1]);
}
aw_logd("******************************\n");
}
else
{
aw_logd(" we need more arguments \n");
PrintDemoUsage();
}
check_param(&mparam);
if(mparam.encoder_num > 0)
max_bitstream_count = mparam.encoder_num;
else
max_bitstream_count = SAVE_BITSTREAM_COUNT;
channelId_0 = mparam.use_vipp_num;
VideoInputConfig config_0;
memset(&config_0, 0, sizeof(VideoInputConfig));
config_0.channelId = channelId_0;
config_0.fps = 15;
config_0.gop = 30;
config_0.vbr = 0;
config_0.qp_range.i_min_qp = 35;
config_0.qp_range.i_max_qp = 51;
config_0.qp_range.p_min_qp = 35;
config_0.qp_range.p_max_qp = 51;
config_0.qp_range.i_init_qp = 35;
config_0.profile = AW_Video_H264ProfileMain;
config_0.level = AW_Video_H264Level51;
config_0.pixelformat = mparam.pixelformat;
config_0.enable_sharp = mparam.enable_sharp;
config_0.bonline_channel = mparam.bonline_channel;
config_0.share_buf_num = mparam.share_buf_num;
config_0.breduce_refrecmem = 1;
config_0.venc_video_signal.video_format = RT_DEFAULT;
config_0.venc_video_signal.full_range_flag = 1;
config_0.venc_video_signal.src_colour_primaries = RT_VENC_BT709;
config_0.venc_video_signal.dst_colour_primaries = RT_VENC_BT709;
config_0.output_mode = OUTPUT_MODE_STREAM;
config_0.width = mparam.c0_src_w;
config_0.height = mparam.c0_src_h;
config_0.dst_width = mparam.c0_dst_w;
config_0.dst_height = mparam.c0_dst_h;
config_0.bitrate = mparam.c0_bitrate/1024; //* kb
config_0.encodeType = mparam.c0_encoder_format;
config_0.drop_frame_num = 0;
config_0.enable_wdr = 0;
if (config_0.encodeType == 1) {//jpg encode
config_0.jpg_quality = 80;
if (mparam.encoder_num > 1) {//mjpg
config_0.jpg_mode = mparam.jpg_mode = 1;
config_0.bitrate = 12*1024;//kb
config_0.bit_rate_range.bitRateMin = 10*1024;//kb
config_0.bit_rate_range.bitRateMax = 14*1024;//kb
}
}
AWVideoInput_Init();
sem_init(&finish_sem, 0, 0);
if(AWVideoInput_Configure(channelId_0, &config_0))
{
aw_loge("config err, exit!");
goto _exit;
}
if (mparam.enable_gdc) {
RTsGdcParam mGdcParam;
memset(&mGdcParam, 0, sizeof(RTsGdcParam));
initGdcFunc(&mGdcParam);
aw_logd("eMountMode %d peak_weights_strength %d peak_m %d %d", mGdcParam.eMountMode, mGdcParam.peak_weights_strength, mGdcParam.peak_m, mGdcParam.th_strong_edge);
aw_logd("calib_widht %d calib_height %d birdsImg_width %d %d", mGdcParam.calib_widht, mGdcParam.calib_height, mGdcParam.birdsImg_width, mGdcParam.birdsImg_height);
AWVideoInput_SetGdc(channelId_0, &mGdcParam);
}
AWVideoInput_CallBack(channelId_0, video_stream_cb, 1);
AWVideoInput_Start(channelId_0, 1);
if(thread != 0)
pthread_join(thread, (void**)&ret);
//* wait for finish
sem_wait(&finish_sem);
AWVideoInput_Start(mparam.use_vipp_num, 0);
_exit:
exit_demo();
return 0;
}