sdk-hwV1.3/external/fast-user-adapter/rt_media/demo/demo_isp_api.c

672 lines
18 KiB
C
Executable File

#include <string.h>
#include <stdio.h>
#include <stdlib.h>
#include <errno.h>
#include <fcntl.h>
#include <stdio.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/types.h>
#include <pthread.h>
#include <unistd.h>
#include <sys/time.h>
#include <semaphore.h>
#include <signal.h>
#define LOG_TAG "demo_video"
#include "AW_VideoInput_API.h"
#include "aw_util.h"
#include <media/rt-media/uapi_rt_media.h>
#define DROP_STREAM_DATA (0)
#define GET_ISP_ATTR_CFG (0)
#define OUT_PUT_FILE_PREFIX "/tmp/stream0"
#define OUT_PUT_FILE_PREFIX_jpg "/tmp/jpg_0.jpg"
#define OUT_PUT_FILE_PREFIX_mjpeg "/tmp/jpg_0.mjpeg"
#define OUT_PUT_FILE_PREFIX_1 "/tmp/stream1"
#define OUT_PUT_FILE_0_1 "/mnt/extsd/stream0-1.h264"
static int max_bitstream_count = SAVE_BITSTREAM_COUNT;
FILE *out_file_0 = NULL;
FILE *out_file_1 = NULL;
static int stream_count_0 = 0;
static int stream_count_1 = 0;
static int64_t time_start = 0;
static int64_t time_end = 0;
static int64_t time_aw_start = 0;
static sem_t finish_sem;
static int video_finish_flag = 0;
pthread_t ispDebugThreadId;
static int IspDebugThreadCreate(pthread_t *threadId);
demo_video_param mparam;
static uint64_t pre_pts = 0;
static int save_file_cnt = 0;
static int need_save_new_file = 0;
static int pre_pts_in_seconds = 0;
static int cb_stream_cnt_in_seconds = 0;
void video_stream_cb(const AWVideoInput_StreamInfo* stream_info)
{
int keyframe = stream_info->keyframe_flag;
if (stream_info->size0 == 0 || stream_info->data0 == NULL) {
aw_logd("stream data error: data = %p, len = %d",stream_info->data0, stream_info->size0);
return ;
}
if(stream_count_0 < max_bitstream_count)
{
uint64_t cur_time = get_cur_time_us();
uint64_t pts = stream_info->pts;
uint64_t pts_in_seconds = pts/1000/1000;
unsigned char file_name[128] = {0};
unsigned char new_file_name[128] = {0};
if(stream_count_0%15 == 0)
{
aw_logd("*data = %p, len = %d, cnt = %d, kf = %d, pts = %llu us, %llu s; diff = %llu ms, cur_time = %llu us, diff = %llu ms\n",
stream_info->data0, stream_info->size0, stream_count_0, keyframe,
pts, pts/1000000, (pts - pre_pts)/1000, cur_time, (cur_time - pts)/1000);
}
if(pts_in_seconds != pre_pts_in_seconds)
{
aw_logd("get video stream, fps = %d", cb_stream_cnt_in_seconds);
pre_pts_in_seconds = pts_in_seconds;
cb_stream_cnt_in_seconds = 0;
}
cb_stream_cnt_in_seconds++;
pre_pts = pts;
time_end = get_cur_time_us();
if(stream_count_0 == 0)
{
aw_logd("time of first stream: = %lld\n",time_end - time_aw_start);
}
//if((time_end - time_start) > 30000)
//if(stream_count_0%200 == 0)
if(stream_count_0 == 0)
{
aw_logd("save first bitstream[0]: count = %d, time = %.3f s\n",
stream_count_0,
((float)(time_end - time_start)/1000/1000));
}
/*
if((time_end - time_start) > 30000)
{
aw_logd("big time [0]: data = %p, len = %d, count = %d, time = %lld\n",
data, len, stream_count_0,
(time_end - time_start));
}
*/
time_start = get_cur_time_us();
#if (DROP_STREAM_DATA == 0)
if(stream_count_0%450 == 0)
need_save_new_file = 1;
if((need_save_new_file == 1 && keyframe == 1)
|| out_file_0 == NULL)
{
save_file_cnt++;
if (mparam.c0_encoder_format == 0)
sprintf(file_name, "%s_%d.h264", OUT_PUT_FILE_PREFIX, save_file_cnt);
else
sprintf(file_name, "%s_%d.h265", OUT_PUT_FILE_PREFIX, save_file_cnt);
aw_logd("save new file, cnt = %d, file_name = %s", stream_count_0, file_name);
need_save_new_file = 0;
if(out_file_0)
{
fclose(out_file_0);
}
out_file_0 = fopen(file_name, "wb");
}
if(out_file_0) {
if (stream_info->b_insert_sps_pps && stream_info->sps_pps_size && stream_info->sps_pps_buf)
fwrite(stream_info->sps_pps_buf, 1, stream_info->sps_pps_size, out_file_0);
fwrite(stream_info->data0, 1, stream_info->size0, out_file_0);
if (stream_info->size1)
fwrite(stream_info->data1, 1, stream_info->size1, out_file_0);
if (stream_info->size2)
fwrite(stream_info->data2, 1, stream_info->size2, out_file_0);
}
if(stream_count_0 == 0)
{
if (mparam.c0_encoder_format == 1) {
if (mparam.jpg_mode)
rename(file_name, OUT_PUT_FILE_PREFIX_mjpeg);
else
rename(file_name, OUT_PUT_FILE_PREFIX_jpg);
} else {
uint64_t time = get_cur_time_us();
aw_logw("route 0 save first stream buffer done, time: %lld.%lldms ", time/1000, time%1000);
if (mparam.c0_encoder_format == 0)
sprintf(new_file_name, "%s_%d_%lld.%lldms.h264", OUT_PUT_FILE_PREFIX, save_file_cnt, time/1000, time%1000);
else
sprintf(new_file_name, "%s_%d_%lld.%lldms.h265", OUT_PUT_FILE_PREFIX, save_file_cnt, time/1000, time%1000);
rename(file_name, new_file_name);
}
}
#endif
}
else
{
aw_logd("already arrive max_bitstream_count %d", max_bitstream_count);
return ;
}
stream_count_0++;
if(stream_count_0 >= max_bitstream_count && !video_finish_flag)
{
video_finish_flag = 1;
sem_post(&finish_sem);
}
}
static int save_file_cnt_1 = 0;
static int need_save_new_file_1 = 0;
void video_stream_cb_1(const AWVideoInput_StreamInfo* stream_info)
{
int keyframe = stream_info->keyframe_flag;
if (stream_info->size0 == 0 || stream_info->data0 == NULL) {
aw_logd("stream data error: data = %p, len = %d",stream_info->data0, stream_info->size0);
return ;
}
if(stream_count_0 < max_bitstream_count)
{
//if(stream_count_1%200 == 0)
{
//aw_logd("save bitstream[1]: data = %p, len = %d, count = %d\n",
// data, len, stream_count_1);
}
unsigned char file_name[128] = {0};
unsigned char new_file_name[128] = {0};
#if (DROP_STREAM_DATA == 0)
if(stream_count_1%450 == 0)
need_save_new_file_1 = 1;
if((need_save_new_file_1 == 1 && keyframe == 1)
|| out_file_1 == NULL)
{
save_file_cnt_1++;
sprintf(file_name, "%s_%d.h264", OUT_PUT_FILE_PREFIX_1, save_file_cnt_1);
aw_logd("save new file, cnt = %d, file_name = %s", stream_count_1, file_name);
need_save_new_file_1 = 0;
if(out_file_1)
{
fclose(out_file_1);
}
out_file_1 = fopen(file_name, "wb");
}
if(out_file_1) {
if (stream_info->b_insert_sps_pps && stream_info->sps_pps_size && stream_info->sps_pps_buf)
fwrite(stream_info->sps_pps_buf, 1, stream_info->sps_pps_size, out_file_1);
fwrite(stream_info->data0, 1, stream_info->size0, out_file_1);
if (stream_info->size1)
fwrite(stream_info->data1, 1, stream_info->size1, out_file_1);
if (stream_info->size2)
fwrite(stream_info->data2, 1, stream_info->size2, out_file_1);
}
if(stream_count_1 == 0)
{
uint64_t time = get_cur_time_us();
aw_logw("route 1 save first stream buffer done, time: %lld.%lldms ", time/1000, time%1000);
sprintf(new_file_name, "%s_%d_%lld.%lldms.h264", OUT_PUT_FILE_PREFIX_1, save_file_cnt_1, time/1000, time%1000);
rename(file_name, new_file_name);
}
stream_count_1++;
#endif
}
else
{
aw_logd("already arrive max_bitstream_count %d", max_bitstream_count);
return ;
}
}
static int setParam(demo_video_param* mparam, int channel_id, VideoInputConfig* pConfig)
{
int ret = 0;
if(mparam->encode_local_yuv)
{
FILE * in_yuv_fp = fopen(mparam->InputFileName, "rb");
aw_logd("fopen %s w&h = %d, %d", mparam->InputFileName, pConfig->width, pConfig->height);
if(in_yuv_fp == NULL)
{
aw_loge("fopen failed: %s", mparam->InputFileName);
}
else
{
int yuv_size = pConfig->width*pConfig->height*3/2;
VideoYuvFrame mYuvFrame;
if(pConfig->pixelformat == RT_PIXEL_LBC_25X)
{
int y_stride = 0;
int yc_stride = 0;
int bit_depth = 8;
int com_ratio_even = 0;
int com_ratio_odd = 0;
int pic_width_32align = (pConfig->width + 31) & ~31;
int pic_height = pConfig->height;
unsigned int lbc_ext_size = 0;
com_ratio_even = 440;
com_ratio_odd = 380;
y_stride = ((com_ratio_even * pic_width_32align * bit_depth / 1000 +511) & (~511)) >> 3;
yc_stride = ((com_ratio_odd * pic_width_32align * bit_depth / 500 + 511) & (~511)) >> 3;
int total_stream_len = (y_stride + yc_stride) * pic_height / 2;
//* add more 1KB to fix ve-lbc-error
//lbc_ext_size = 1*1024;
total_stream_len += lbc_ext_size;
aw_logd("LBC in buf:com_ratio: %d, %d, w32alin = %d, pic_height = %d, \
y_s = %d, yc_s = %d, total_len = %d,\n",
com_ratio_even, com_ratio_odd,
pic_width_32align, pic_height,
y_stride, yc_stride, total_stream_len);
yuv_size = total_stream_len;
}
aw_logd("* yuv_size = %d, w&h = %d, %d", yuv_size, pConfig->width, pConfig->height);
while (1)
{
if(video_finish_flag == 1)
break;
memset(&mYuvFrame, 0, sizeof(VideoYuvFrame));
ret = AWVideoInput_RequestEmptyYuvFrame(channel_id, &mYuvFrame);
if(ret == 0)
{
ret = fread(mYuvFrame.virAddr[0], 1, yuv_size, in_yuv_fp);
aw_logd("vir_addr = %p, ret = %d, yuv_size = %d",
mYuvFrame.virAddr[0], ret, yuv_size);
if(ret != yuv_size)
{
fseek(in_yuv_fp, 0, SEEK_SET);
fread(mYuvFrame.virAddr[0], 1, yuv_size, in_yuv_fp);
}
AWVideoInput_SubmitFilledYuvFrame(channel_id, &mYuvFrame, yuv_size);
}
else {
usleep(50*1000);
}
}
fclose(in_yuv_fp);
}
}
return 0;
}
static void exit_demo()
{
int i = 0;
AWVideoInput_DeInit();
aw_logd("exit: deInit end\n");
sem_destroy(&finish_sem);
if(out_file_0)
{
fclose(out_file_0);
out_file_0 = NULL;
}
if(out_file_1)
{
fclose(out_file_1);
out_file_1 = NULL;
}
aw_logw("aw_demo, finish!\n");
return ;
}
static void init_color_space(RTVencVideoSignal *pvenc_video_signal)
{
pvenc_video_signal->video_format = RT_DEFAULT;
pvenc_video_signal->full_range_flag = 1;
pvenc_video_signal->src_colour_primaries = RT_VENC_BT709;
pvenc_video_signal->dst_colour_primaries = RT_VENC_BT709;
return ;
}
void handler(int sig)
{
aw_logw("rev sig=%d\n", sig);
if (SIGSEGV == sig) {
exit_demo();
_exit(1);
}
}
static int Isp_DebugFunc(int IspDevId)
{
static int time_s_cnt = 0;
RTIspCtrlAttr isp_ctrl_attr;
int ir_mode[2] = {1, 0};
static int mode_idx = 0;
if(++time_s_cnt >= 3) {
time_s_cnt = 0;
aw_logd("SetIspAttrCfg : RT_ISP_CTRL_IR_STATUS = %d\n", ir_mode[mode_idx]);
isp_ctrl_attr.isp_attr_cfg.cfg_id = RT_ISP_CTRL_IR_STATUS;
isp_ctrl_attr.isp_attr_cfg.ir_status = ir_mode[mode_idx];
AWVideoInput_SetIspAttrCfg(0, &isp_ctrl_attr);
if (++mode_idx > 1) {
mode_idx = 0;
}
#if GET_ISP_ATTR_CFG
isp_ctrl_attr.isp_attr_cfg.cfg_id = RT_ISP_CTRL_AE_EV_LV_ADJ;
AWVideoInput_GetIspAttrCfg(0, &isp_ctrl_attr);
isp_ctrl_attr.isp_attr_cfg.cfg_id = RT_ISP_CTRL_IR_AWB_GAIN;
AWVideoInput_GetIspAttrCfg(0, &isp_ctrl_attr);
aw_logd("ev_lv_adj = %d, rgain = %d, bgain = %d\n",
isp_ctrl_attr.isp_attr_cfg.ae_ev_lv_adj,
isp_ctrl_attr.isp_attr_cfg.awb_ir_gain.awb_rgain_ir,
isp_ctrl_attr.isp_attr_cfg.awb_ir_gain.awb_bgain_ir);
#endif
}
return 0;
}
void* Isp_DebugThread(void *arg)
{
int isp_dev = 0;
while(1)
{
Isp_DebugFunc(isp_dev);
sleep(1);//1s
}
}
static int IspDebugThreadCreate(pthread_t *threadId)
{
int ret;
ret = pthread_create(threadId, NULL, Isp_DebugThread, NULL);
if (ret <0) {
aw_logd("error: the IspDebugThreadCreate can not be created");
return -1;
} else {
aw_logd("IspDebugThreadCreate create ok");
}
return 0;
}
int main(int argc, char** argv)
{
aw_logw("aw_demo start, OUT_PUT_FILE_PREFIX = %s time: %lld clock_gettime: %lld\n", OUT_PUT_FILE_PREFIX, get_timeofday(), get_cur_time_us());
pthread_t thread = 0;
int i = 0;
int ret = 0;
int channelId_0 = 0;
int channelId_1 = 4;
int channelId_2 = 8;
stream_count_1 = 0;
stream_count_0 = 0;
video_finish_flag = 0;
save_file_cnt = 0;
need_save_new_file = 0;
save_file_cnt_1 = 0;
need_save_new_file_1 = 0;
mparam.c0_encoder_format = -1;
mparam.c1_encoder_format = -1;
mparam.pixelformat = RT_PIXEL_NUM;
mparam.pixelformat_1 = RT_PIXEL_NUM;
mparam.pixelformat_2 = RT_PIXEL_NUM;
mparam.use_vipp_num = 0;
mparam.use_vipp_num_1 = 1;
mparam.use_vipp_num_2 = 8;
mparam.jpg_width = 640;
mparam.jpg_heigh = 360;
mparam.use_vipp_num = 0;
mparam.use_vipp_num_1 = 1;
mparam.use_vipp_num_2 = 8;
mparam.share_buf_num = 2;
mparam.enable_sharp = 1;
/******** begin parse the config paramter ********/
if(argc >= 2)
{
aw_logd("******************************\n");
for(i = 1; i < (int)argc; i += 2)
{
ParseArgument(&mparam, argv[i], argv[i + 1]);
}
aw_logd("******************************\n");
}
else
{
aw_logd(" we need more arguments \n");
PrintDemoUsage();
//return 0;
}
check_param(&mparam);
aw_logd("*demo param: c0: w&h = %d, %d; encode_format = %d, bitrate = %d",
mparam.c0_src_w, mparam.c0_src_h, mparam.c0_encoder_format, mparam.c0_bitrate);
aw_logd("*demo param: c1: w&h = %d, %d; encode_format = %d, bitrate = %d",
mparam.c1_src_w, mparam.c1_src_h, mparam.c1_encoder_format, mparam.c1_bitrate);
time_start = get_cur_time_us();
if(mparam.encoder_num > 0)
max_bitstream_count = mparam.encoder_num;
else
max_bitstream_count = SAVE_BITSTREAM_COUNT;
channelId_0 = mparam.use_vipp_num;
channelId_1 = mparam.use_vipp_num_1;
channelId_2 = mparam.use_vipp_num_2;
VideoInputConfig config_0;
memset(&config_0, 0, sizeof(VideoInputConfig));
config_0.channelId = channelId_0;
config_0.fps = 15;
config_0.gop = 30;
config_0.vbr = 0;
config_0.qp_range.i_min_qp = 35;
config_0.qp_range.i_max_qp = 51;
config_0.qp_range.p_min_qp = 35;
config_0.qp_range.p_max_qp = 51;
config_0.qp_range.i_init_qp = 35;
config_0.profile = AW_Video_H264ProfileMain;
config_0.level = AW_Video_H264Level51;
config_0.pixelformat = mparam.pixelformat;
config_0.enable_sharp = mparam.enable_sharp;
config_0.bonline_channel = mparam.bonline_channel;
config_0.share_buf_num = mparam.share_buf_num;
config_0.en_16_align_fill_data = 0;
init_color_space(&config_0.venc_video_signal);
if(mparam.encode_local_yuv)
{
config_0.qp_range.i_min_qp = 20;
config_0.qp_range.i_max_qp = 45;
config_0.qp_range.p_min_qp = 20;
config_0.qp_range.p_max_qp = 45;
config_0.qp_range.i_init_qp = 20;
config_0.output_mode = OUTPUT_MODE_ENCODE_FILE_YUV;
mparam.en_second_channel = 0;
mparam.en_third_channel = 0;
}
else
{
config_0.output_mode = OUTPUT_MODE_STREAM;
}
config_0.width = mparam.c0_src_w;
config_0.height = mparam.c0_src_h;
config_0.dst_width = mparam.c0_dst_w;
config_0.dst_height = mparam.c0_dst_h;
config_0.bitrate = mparam.c0_bitrate/1024; //* kb
config_0.encodeType = mparam.c0_encoder_format;
config_0.drop_frame_num = 0;
config_0.enable_wdr = 0;
if (config_0.encodeType == 1) {//jpg encode
config_0.jpg_quality = 80;
if (mparam.encoder_num > 1) {//mjpg
config_0.jpg_mode = mparam.jpg_mode = 1;
config_0.bitrate = 12*1024;//kb
config_0.bit_rate_range.bitRateMin = 10*1024;//kb
config_0.bit_rate_range.bitRateMax = 14*1024;//kb
}
}
time_end = get_cur_time_us();
//aw_logd("init start, time = %lld, max_count = %d\n",time_end - time_start, max_bitstream_count);
struct sigaction act;
act.sa_handler = handler;
sigemptyset(&act.sa_mask);
act.sa_flags = 0;
if (sigaction(SIGTERM, &act, NULL) < 0)
aw_loge("install signal error\n");
if (sigaction(SIGINT, &act, NULL) < 0)
aw_loge("install signal error\n");
if (sigaction(SIGABRT, &act, NULL) < 0)
aw_loge("install signal error\n");
if (sigaction(SIGSEGV, &act, NULL) < 0)
aw_loge("install signal error\n");
AWVideoInput_Init();
sem_init(&finish_sem, 0, 0);
int64_t time_end1 = get_cur_time_us();
aw_logd("time of aw_init: %lld\n",time_end1 - time_end);
if(AWVideoInput_Configure(channelId_0, &config_0))
{
aw_loge("config err, exit!");
goto _exit;
}
int64_t time_end2 = get_cur_time_us();
aw_logd("time of aw_config: = %lld\n",time_end2 - time_end1);
AWVideoInput_CallBack(channelId_0, video_stream_cb, 1);
int64_t time_end3 = get_cur_time_us();
//aw_logd("callback end, time = %lld\n",time_end3 - time_end2);
memset(&ispDebugThreadId, 0, sizeof(pthread_t));
IspDebugThreadCreate(&ispDebugThreadId);
AWVideoInput_Start(channelId_0, 1);
int64_t time_end4 = get_cur_time_us();
aw_logd("time of aw_start: = %lld, total_time = %lld\n",
time_end4 - time_end3,
time_end4 - time_start);
time_aw_start = time_end4;
VideoChannelInfo mChannelInfo;
memset(&mChannelInfo, 0, sizeof(VideoChannelInfo));
AWVideoInput_GetChannelInfo(channelId_0, &mChannelInfo);
aw_logd("state = %d, w&h = %d,%d, bitrate = %d kbps, fps = %d, i_qp = %d~%d, p_qp = %d~%d",
mChannelInfo.state,
mChannelInfo.mConfig.width, mChannelInfo.mConfig.height,
mChannelInfo.mConfig.bitrate, mChannelInfo.mConfig.fps,
mChannelInfo.mConfig.qp_range.i_min_qp, mChannelInfo.mConfig.qp_range.i_max_qp,
mChannelInfo.mConfig.qp_range.p_min_qp, mChannelInfo.mConfig.qp_range.p_max_qp);
if (mparam.en_second_channel) {
VideoInputConfig config_1;
memset(&config_1, 0, sizeof(VideoInputConfig));
config_1.channelId = channelId_1;
config_1.fps = 15;
config_1.gop = 30;
config_1.vbr = 0;
config_1.qp_range.i_min_qp = 35;
config_1.qp_range.i_max_qp = 51;
config_1.qp_range.p_min_qp = 35;
config_1.qp_range.p_max_qp = 51;
config_1.qp_range.i_init_qp = 35;
config_1.profile = AW_Video_H264ProfileMain;
config_1.level = AW_Video_H264Level51;
config_1.output_mode = OUTPUT_MODE_STREAM;
config_1.width = mparam.c1_src_w;
config_1.height = mparam.c1_src_h;
config_1.bitrate = mparam.c1_bitrate/1024; //* kb
config_1.encodeType = mparam.c1_encoder_format;
config_1.pixelformat = mparam.pixelformat_1;
config_1.enable_sharp = 1;
init_color_space(&config_1.venc_video_signal);
AWVideoInput_Configure(channelId_1, &config_1);
AWVideoInput_CallBack(channelId_1, video_stream_cb_1, 1);
AWVideoInput_Start(channelId_1, 1);
}
setParam(&mparam, channelId_0, &config_0);
if(thread != 0)
pthread_join(thread, (void**)&ret);
//* wait for finish
sem_wait(&finish_sem);
#if 1//ENABLE_FIRST_CHANNEL
aw_logd("exit, stream_count_0 = %d\n", stream_count_0);
AWVideoInput_Start(mparam.use_vipp_num, 0);
#endif
if (mparam.en_second_channel) {
AWVideoInput_Start(mparam.use_vipp_num_1, 0);
}
if (mparam.en_third_channel) {
AWVideoInput_Start(mparam.use_vipp_num_2, 0);
}
_exit:
pthread_cancel(ispDebugThreadId);
exit_demo();
//isp_config_to_flash();
return 0;
}