sdk-hwV1.3/external/fast-user-adapter/rt_media/demo/demo_encoder.c

573 lines
18 KiB
C
Raw Permalink Normal View History

2024-05-07 10:09:20 +00:00
#include <string.h>
#include <stdio.h>
#include <stdlib.h>
#include <errno.h>
#include <fcntl.h>
#include <stdio.h>
#include <sys/types.h>
#include <pthread.h>
#include <unistd.h>
#include <sys/time.h>
#include <semaphore.h>
#include <signal.h>
#include <linux/videodev2.h>
#include <media/sunxi_camera_v2.h>
#define LOG_TAG "demo_video"
#include "AW_VideoInput_API.h"
#include "aw_util.h"
#define TEST_FAST_OUTPUT_NEW_STREAM 0
#define TEST_FLIP (0)
#define TEST_FORCE_I_FRAME (0)
#define OUT_PUT_FILE_PREFIX "/tmp/stream0_encoder"
//#define OUT_PUT_FILE_PREFIX_jpg "/tmp/jpg_0.jpg"
//#define OUT_PUT_FILE_PREFIX_mjpeg "/tmp/jpg_0.mjpeg"
static int max_bitstream_count = SAVE_BITSTREAM_COUNT;
FILE *out_file_0 = NULL;
static int stream_count_0 = 0;
static sem_t finish_sem;
static int video_finish_flag = 0;
demo_video_param mparam;
static uint64_t pre_pts = 0;
static int save_file_cnt = 0;
static int need_save_new_file = 0;
static int pre_pts_in_seconds = 0;
static int cb_stream_cnt_in_seconds = 0;
static RTVencRegionD3DResult g_RegionD3DResult;
void video_stream_cb(const AWVideoInput_StreamInfo* stream_info)
{
int keyframe = stream_info->keyframe_flag;
if (stream_info->size0 == 0 || stream_info->data0 == NULL) {
aw_logd("stream data error: data = %p, len = %d",stream_info->data0, stream_info->size0);
return ;
}
if(stream_count_0 < max_bitstream_count)
{
uint64_t cur_time = get_cur_time_us();
uint64_t pts = stream_info->pts;
uint64_t pts_in_seconds = pts/1000/1000;
unsigned char file_name[128] = {0};
unsigned char new_file_name[128] = {0};
if(stream_count_0%15 == 0)
{
aw_logd("*data = %p, len = %d, cnt = %d, kf = %d, pts = %llu us, %llu s; diff = %llu ms, cur_time = %llu us, diff = %llu ms\n",
stream_info->data0, stream_info->size0, stream_count_0, keyframe,
pts, pts/1000000, (pts - pre_pts)/1000, cur_time, (cur_time - pts)/1000);
}
if(pts_in_seconds != pre_pts_in_seconds)
{
aw_logd("get video stream, fps = %d", cb_stream_cnt_in_seconds);
pre_pts_in_seconds = pts_in_seconds;
cb_stream_cnt_in_seconds = 0;
}
cb_stream_cnt_in_seconds++;
pre_pts = pts;
if(stream_count_0%450 == 0)
need_save_new_file = 1;
if((need_save_new_file == 1 && keyframe == 1)
|| out_file_0 == NULL)
{
save_file_cnt++;
sprintf(file_name, "%s_%d.h264", mparam.OutputFilePath, save_file_cnt);
aw_logw("save new file, cnt = %d, file_name = %s", stream_count_0, file_name);
need_save_new_file = 0;
if(out_file_0)
{
fclose(out_file_0);
}
out_file_0 = fopen(file_name, "wb");
}
if(out_file_0) {
if (stream_info->b_insert_sps_pps && stream_info->sps_pps_size && stream_info->sps_pps_buf)
fwrite(stream_info->sps_pps_buf, 1, stream_info->sps_pps_size, out_file_0);
fwrite(stream_info->data0, 1, stream_info->size0, out_file_0);
if (stream_info->size1)
fwrite(stream_info->data1, 1, stream_info->size1, out_file_0);
if (stream_info->size2)
fwrite(stream_info->data2, 1, stream_info->size2, out_file_0);
}
if(stream_count_0 == 0)
{
if (mparam.c0_encoder_format == 1) {
if (mparam.jpg_mode)
{
sprintf(new_file_name, "%s.mjpeg", mparam.OutputFilePath);
rename(file_name, new_file_name);
}
else
{
sprintf(new_file_name, "%s.jpg", mparam.OutputFilePath);
rename(file_name, new_file_name);
}
} else {
uint64_t time = get_cur_time_us();
/* led_fwrite("/sys/class/gpio_sw/PF6/data", "0"); */
aw_logw("route 0 save first stream buffer done, time: %lld.%lldms ", time/1000, time%1000);
if (mparam.c0_encoder_format == 0)
sprintf(new_file_name, "%s_%d_%lld.%lldms.h264", mparam.OutputFilePath, save_file_cnt, time/1000, time%1000);
else
sprintf(new_file_name, "%s_%d_%lld.%lldms.h265", mparam.OutputFilePath, save_file_cnt, time/1000, time%1000);
rename(file_name, new_file_name);
}
}
if (g_RegionD3DResult.region)
{
int channelId_0 = 0;
int ret = AWVideoInput_GetRegionD3DResult(channelId_0, &g_RegionD3DResult);
if (0 == ret)
{
int really_static_num = 0;
int possible_static_num = 0;
int i = 0;
for(i=0; i<g_RegionD3DResult.total_region_num; i++)
{
if(g_RegionD3DResult.region[i].is_really_static)
{
aw_logv("region_%d:[(%d,%d),(%d,%d)]", i,
g_RegionD3DResult.region[i].pix_x_bgn, g_RegionD3DResult.region[i].pix_y_bgn,
g_RegionD3DResult.region[i].pix_x_end, g_RegionD3DResult.region[i].pix_y_end);
really_static_num++;
}
if(g_RegionD3DResult.region[i].is_possible_static)
{
aw_logv("region_%d:[(%d,%d),(%d,%d)]", i,
g_RegionD3DResult.region[i].pix_x_bgn, g_RegionD3DResult.region[i].pix_y_bgn,
g_RegionD3DResult.region[i].pix_x_end, g_RegionD3DResult.region[i].pix_y_end);
possible_static_num++;
}
}
aw_logd("detect RegionD3D: total region:%d, static region:%d, static really:%d possible:%d", g_RegionD3DResult.total_region_num,
g_RegionD3DResult.static_region_num, really_static_num, possible_static_num);
}
}
}
else
{
aw_logd("already arrive max_bitstream_count %d", max_bitstream_count);
return ;
}
stream_count_0++;
if(stream_count_0 >= max_bitstream_count && !video_finish_flag)
{
video_finish_flag = 1;
sem_post(&finish_sem);
}
}
static int ChnCbImpl_AWVideoInputCallback(void *pAppData, int channel, AWVideoInput_EventType event, int nData1, int nData2, void *pEventData)
{
int result = 0;
switch(event)
{
case AWVideoInput_Event_DropFrame:
{
int nDropNum = nData1;
aw_logd("recorder[%d] receives dropFrameNum[%d] event, need control other recorder to drop frame!", channel, nDropNum);
//int nOtherRecordChannel = <other record channel id>;
//AWVideoInput_DropFrame(nOtherRecordChannel, nDropNum);
result = 0;
break;
}
case AWVideoInput_Event_StreamReady:
{
aw_logd("need implement to replace Video_Input_cb");
break;
}
case AWVideoInput_Event_WaitErrorExit:
{
aw_logd("need implement to replace Channel_Thread_exit");
break;
}
default:
{
aw_loge("fatal error! unknown event:%d", event);
result = -1;
break;
}
}
return result;
}
static void reset_buffer_and_set_I_frame(int channel_id)
{
AWVideoInput_Start(channel_id, 0);
AWVideoInput_ResetInOutBuffer(channel_id);
AWVideoInput_SetIFrame(channel_id);
AWVideoInput_Start(channel_id, 1);
}
static void init_venc_target_bits_clip_param(RT_VENC_TARGET_BITS_CLIP_PARAM *target_bits_clip_param)
{
target_bits_clip_param->dis_default_para = 1;
target_bits_clip_param->mode = 0;
target_bits_clip_param->en_gop_clip = 0;
target_bits_clip_param->gop_bit_ratio_th[0] = 0.58;
target_bits_clip_param->gop_bit_ratio_th[1] = 0.64;
target_bits_clip_param->gop_bit_ratio_th[2] = 1.25;
target_bits_clip_param->coef_th[0][0] = -0.5;
target_bits_clip_param->coef_th[0][1] = -0.2;
target_bits_clip_param->coef_th[1][0] = -0.3;
target_bits_clip_param->coef_th[1][1] = -0.1;
target_bits_clip_param->coef_th[2][0] = -0.3;
target_bits_clip_param->coef_th[2][1] = 0;
target_bits_clip_param->coef_th[3][0] = -0.5;
target_bits_clip_param->coef_th[3][1] = 0.1;
target_bits_clip_param->coef_th[4][0] = 0.4;
target_bits_clip_param->coef_th[4][1] = 0.7;
}
static void exit_demo()
{
int i = 0;
AWVideoInput_DeInit();
aw_logd("exit: deInit end\n");
sem_destroy(&finish_sem);
if(out_file_0)
{
fclose(out_file_0);
out_file_0 = NULL;
}
aw_logd("aw_demo, finish!\n");
return ;
}
int main(int argc, char** argv)
{
aw_logw("encoder test start, time: %lld\n", get_cur_time_us());
int i = 0;
int ret = 0;
int channelId_0 = 0;
stream_count_0 = 0;
video_finish_flag = 0;
save_file_cnt = 0;
need_save_new_file = 0;
memset(&mparam, 0, sizeof(demo_video_param));
mparam.c0_encoder_format = -1;
mparam.pixelformat = RT_PIXEL_NUM;
mparam.use_vipp_num = 0;
strcpy(mparam.OutputFilePath, OUT_PUT_FILE_PREFIX);
/******** begin parse the config paramter ********/
if(argc >= 2)
{
aw_logd("******************************\n");
for(i = 1; i < (int)argc; i += 2)
{
ParseArgument(&mparam, argv[i], argv[i + 1]);
}
aw_logd("******************************\n");
}
else
{
aw_logd(" we need more arguments \n");
PrintDemoUsage();
}
check_param(&mparam);
if(mparam.encoder_num > 0)
max_bitstream_count = mparam.encoder_num;
else
max_bitstream_count = SAVE_BITSTREAM_COUNT;
channelId_0 = mparam.use_vipp_num;
VideoInputConfig config_0;
memset(&config_0, 0, sizeof(VideoInputConfig));
config_0.channelId = channelId_0;
config_0.fps = 15;
config_0.gop = 30;
config_0.vbr = 0;
config_0.qp_range.i_min_qp = 35;
config_0.qp_range.i_max_qp = 51;
config_0.qp_range.p_min_qp = 35;
config_0.qp_range.p_max_qp = 51;
config_0.qp_range.i_init_qp = 35;
config_0.pixelformat = mparam.pixelformat;
config_0.enable_sharp = mparam.enable_sharp;
config_0.en_16_align_fill_data = 0;
config_0.bonline_channel = mparam.bonline_channel;
config_0.share_buf_num = mparam.share_buf_num;
config_0.breduce_refrecmem = 1;
config_0.venc_video_signal.video_format = RT_DEFAULT;
switch(mparam.color_space)
{
case V4L2_COLORSPACE_JPEG:
{
config_0.venc_video_signal.full_range_flag = 1;
config_0.venc_video_signal.src_colour_primaries = RT_VENC_YCC;
config_0.venc_video_signal.dst_colour_primaries = RT_VENC_YCC;
break;
}
case V4L2_COLORSPACE_REC709:
{
config_0.venc_video_signal.full_range_flag = 1;
config_0.venc_video_signal.src_colour_primaries = RT_VENC_BT709;
config_0.venc_video_signal.dst_colour_primaries = RT_VENC_BT709;
break;
}
case V4L2_COLORSPACE_REC709_PART_RANGE:
{
config_0.venc_video_signal.full_range_flag = 0;
config_0.venc_video_signal.src_colour_primaries = RT_VENC_BT709;
config_0.venc_video_signal.dst_colour_primaries = RT_VENC_BT709;
break;
}
default:
{
config_0.venc_video_signal.full_range_flag = 1;
config_0.venc_video_signal.src_colour_primaries = RT_VENC_BT709;
config_0.venc_video_signal.dst_colour_primaries = RT_VENC_BT709;
break;
}
}
config_0.output_mode = OUTPUT_MODE_STREAM;
config_0.width = mparam.c0_src_w;
config_0.height = mparam.c0_src_h;
config_0.dst_width = mparam.c0_dst_w;
config_0.dst_height = mparam.c0_dst_h;
config_0.bitrate = mparam.c0_bitrate/1024; //* kb
config_0.encodeType = mparam.c0_encoder_format;
config_0.drop_frame_num = 0;
config_0.enable_wdr = 0;
if (config_0.encodeType == 1) {//jpg encode
config_0.jpg_quality = 80;
if (mparam.encoder_num > 1) {//mjpg
config_0.jpg_mode = mparam.jpg_mode = 1;
config_0.bitrate = 12*1024;//kb
config_0.bit_rate_range.bitRateMin = 10*1024;//kb
config_0.bit_rate_range.bitRateMax = 14*1024;//kb
}
}
else if(config_0.encodeType == 0) //h264
{
config_0.profile = AW_Video_H264ProfileMain;
config_0.level = AW_Video_H264Level51;
}
else if(config_0.encodeType == 2) //h265
{
config_0.profile = AW_Video_H265ProfileMain;
config_0.level = AW_Video_H265LevelDefault;
}
AWVideoInput_Init();
sem_init(&finish_sem, 0, 0);
if(AWVideoInput_Configure(channelId_0, &config_0))
{
aw_loge("config err, exit!");
goto _exit;
}
if (0 == config_0.encodeType || 2 == config_0.encodeType)
{
RTeVeLbcMode rec_lbc_mode = RT_LBC_MODE_1_5X;
aw_logd("rec_lbc_mode:%d", rec_lbc_mode);
AWVideoInput_SetRecRefLbcMode(channelId_0, rec_lbc_mode);
}
if (0 == config_0.encodeType || 2 == config_0.encodeType)
{
RTVenWeakTextTh stWeakTextureTh;
memset(&stWeakTextureTh, 0, sizeof(RTVenWeakTextTh));
stWeakTextureTh.en_weak_text_th = 1;
stWeakTextureTh.weak_text_th = 40; // [0,100]
aw_logd("en_weak_text_th:%d, weak_text_th:%.2f", stWeakTextureTh.en_weak_text_th, stWeakTextureTh.weak_text_th);
AWVideoInput_SetWeakTextTh(channelId_0, &stWeakTextureTh);
}
if (0 == config_0.encodeType || 2 == config_0.encodeType)
{
RTVencRegionD3DParam stRegionD3DParam;
memset(&stRegionD3DParam, 0, sizeof(RTVencRegionD3DParam));
stRegionD3DParam.en_region_d3d = 1;
stRegionD3DParam.dis_default_para = 1;
stRegionD3DParam.hor_region_num = 15; // [2,dst_pic_width/128]
stRegionD3DParam.ver_region_num = 8; // [2,dst_pic_height/128]
stRegionD3DParam.hor_expand_num = 1; // [0, hor_region_num]
stRegionD3DParam.ver_expand_num = 1; // [0, ver_region_num]
stRegionD3DParam.motion_coef[0] = 13; // [min_coef, motion_coef[1]]
stRegionD3DParam.motion_coef[1] = 14; // [min_coef, moiton_coef[2]]
stRegionD3DParam.motion_coef[2] = 15; // [min_coef, motion_coef[3]]
stRegionD3DParam.motion_coef[3] = 16; // [min_coef, max_coef]
stRegionD3DParam.zero_mv_rate_th[0] = 95; // [0, zero_mv_rate_th[1]
stRegionD3DParam.zero_mv_rate_th[1] = 96; // [0, zero_mv_rate_th[2]
stRegionD3DParam.zero_mv_rate_th[2] = 98; // [0, 100]
stRegionD3DParam.static_coef[0] = 5; // [min_coef, static_coef[1]]
stRegionD3DParam.static_coef[1] = 6; // [min_coef, static_coef[2]]
stRegionD3DParam.static_coef[2] = 7; // [min_coef, motion_coef[1]]
stRegionD3DParam.chroma_offset = 1; // [0,16]
aw_logd("en_region_d3d:%d", stRegionD3DParam.en_region_d3d);
AWVideoInput_SetRegionD3DParam(channelId_0, &stRegionD3DParam);
memset(&g_RegionD3DResult, 0, sizeof(RTVencRegionD3DResult));
unsigned int size = stRegionD3DParam.hor_region_num * stRegionD3DParam.ver_region_num * sizeof(RTVencRegionD3DRegion);
if (0 < size)
{
g_RegionD3DResult.region = (RTVencRegionD3DRegion *)malloc(size);
if (NULL == g_RegionD3DResult.region)
{
aw_loge("fatal error! malloc region failed! size=%d", size);
}
else
{
memset(g_RegionD3DResult.region, 0, size);
}
aw_logd("enable RegionD3DResult, and malloc region size %d", size);
}
else
{
aw_loge("fatal error! invalid size=%d, hor_region_num:%d, ver_region_num:%d", size, stRegionD3DParam.hor_region_num, stRegionD3DParam.ver_region_num);
}
}
if (0 == config_0.encodeType || 2 == config_0.encodeType)
{
int nChromaQpOffset = 6; // [-12,12]
aw_logd("nChromaQpOffset:%d", nChromaQpOffset);
AWVideoInput_SetChromaQPOffset(channelId_0, nChromaQpOffset);
}
if (0 == config_0.encodeType)
{
RTVencH264ConstraintFlag stConstraintFlag;
memset(&stConstraintFlag, 0, sizeof(RTVencH264ConstraintFlag));
stConstraintFlag.constraint_0 = 1; // [0, 1]
stConstraintFlag.constraint_1 = 1; // [0, 1]
stConstraintFlag.constraint_2 = 1; // [0, 1]
stConstraintFlag.constraint_3 = 0; // [0, 1]
stConstraintFlag.constraint_4 = 0; // [0, 1]
stConstraintFlag.constraint_5 = 0; // [0, 1]
aw_logd("constraint:%d %d %d %d %d %d", stConstraintFlag.constraint_0, stConstraintFlag.constraint_1,
stConstraintFlag.constraint_2, stConstraintFlag.constraint_3, stConstraintFlag.constraint_4, stConstraintFlag.constraint_5);
AWVideoInput_SetH264ConstraintFlag(channelId_0, &stConstraintFlag);
}
if (0 == config_0.encodeType || 2 == config_0.encodeType)
{
RTVencVe2IspD2DLimit stD2DLimit;
memset(&stD2DLimit, 0, sizeof(RTVencVe2IspD2DLimit));
stD2DLimit.en_d2d_limit = 1;
stD2DLimit.d2d_level[0] = 768; // [1, 1024]
stD2DLimit.d2d_level[1] = 640; // [1, 1024]
stD2DLimit.d2d_level[2] = 512; // [1, 1024]
stD2DLimit.d2d_level[3] = 448; // [1, 1024]
stD2DLimit.d2d_level[4] = 384; // [1, 1024]
stD2DLimit.d2d_level[5] = 320; // [1, 1024]
aw_logd("en_d2d_limit:%d, d2d_level:%d %d %d %d %d %d", stD2DLimit.en_d2d_limit,
stD2DLimit.d2d_level[0], stD2DLimit.d2d_level[1], stD2DLimit.d2d_level[2],
stD2DLimit.d2d_level[3], stD2DLimit.d2d_level[4], stD2DLimit.d2d_level[5]);
AWVideoInput_SetVe2IspD2DLimit(channelId_0, &stD2DLimit);
}
if (0 == config_0.encodeType || 2 == config_0.encodeType)
{
RT_VENC_TARGET_BITS_CLIP_PARAM venc_target_bits_clip_param;
memset(&venc_target_bits_clip_param, 0, sizeof(RT_VENC_TARGET_BITS_CLIP_PARAM));
init_venc_target_bits_clip_param(&venc_target_bits_clip_param);
AWVideoInput_SetVencTargetBitsClipParam(channelId_0, &venc_target_bits_clip_param);
}
AWVideoInput_CallBack(channelId_0, video_stream_cb, 1);
AWVideoInput_SetChannelCallback(channelId_0, ChnCbImpl_AWVideoInputCallback, NULL);
AWVideoInput_Start(channelId_0, 1);
#if TEST_FAST_OUTPUT_NEW_STREAM
int test_cnt = 0;
while(test_cnt < 30)
{
test_cnt++;
if(test_cnt%5 == 0)
{
aw_logw("channel%d %ds reset stream buffer", channelId_0, test_cnt);
reset_buffer_and_set_I_frame(channelId_0);
// switch a new file after force set i frame
need_save_new_file = 1;
}
usleep(1000*1000);
}
#endif
if(TEST_FLIP != 0)
{
sleep(2);
AWVideoInput_State eState = AWVideoInput_Get_channel_state(channelId_0);
if(VIDEO_INPUT_STATE_EXCUTING == eState || VIDEO_INPUT_STATE_PAUSE == eState)
{
aw_logd("hflip!");
AWVideoInput_SetHFlip(channelId_0, 1);
sleep(2);
aw_logd("vflip!");
AWVideoInput_SetVFlip(channelId_0, 1);
sleep(2);
aw_logd("revert hflip!");
AWVideoInput_SetHFlip(channelId_0, 0);
sleep(2);
aw_logd("revert vflip!");
AWVideoInput_SetVFlip(channelId_0, 0);
}
else
{
aw_loge("fatal error! state[%d] is wrong!", eState);
}
}
if(TEST_FORCE_I_FRAME != 0)
{
sleep(5);
AWVideoInput_State eState = AWVideoInput_Get_channel_state(channelId_0);
if(VIDEO_INPUT_STATE_EXCUTING == eState || VIDEO_INPUT_STATE_PAUSE == eState)
{
aw_logd("force I frame!");
ret = AWVideoInput_SetIFrame(channelId_0);
if(ret != 0)
{
aw_loge("fatal error! force I frame fail[%d]", ret);
}
}
else
{
aw_loge("fatal error! state[%d] is wrong!", eState);
}
}
//* wait for finish
sem_wait(&finish_sem);
AWVideoInput_Start(mparam.use_vipp_num, 0);
_exit:
exit_demo();
return 0;
}