sdk-hwV1.3/external/fast-user-adapter/rt_media/demo/demo_video_in.c

2594 lines
78 KiB
C
Executable File

#include <string.h>
#include <stdio.h>
#include <stdlib.h>
#include <errno.h>
#include <fcntl.h>
#include <stdio.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/types.h>
#include <pthread.h>
#include <unistd.h>
#include <sys/time.h>
#include <semaphore.h>
#include <signal.h>
#define LOG_TAG "demo_video"
#include "AW_VideoInput_API.h"
#include "aw_util.h"
#include "awnn.h"
#include "rt_awaiisp_common.h"
#define RT_SUPPORT_AWAIISP (1)
#if RT_SUPPORT_AWAIISP
#define MAIN_AWAIISP_DEV_ID 0
#define MAIN_AWAIISP_CFG_BIN_PATH "/mnt/extsd/gc2053_aiisp"
#define MAIN_AWAIISP_LUT_NBG_FILE_PATH "/lib/gamma_1088.nb"
#define MAIN_AWAIISP_NBG_FILE_PATH "/lib/FW100A102W0H0V0GC2053.nb"
#define MAIN_AWAIISP_CFG_BIN_PATH2 "/mnt/extsd/gc2053_day"
#define AWAIISP_SWITCH_NORMAL_MODE AWAIISP_MODE_NORMAL_GAMMA
#define DAY_TO_NIGHT_SIGNAL_CNT 30
#define NIGHT_TO_DAY_SIGNAL_CNT 30
#define DAY_TO_NIGHT_THRESHOLD 60
#define NIGHT_TO_DAY_THRESHOLD 200
#endif
#if 1
#define NETWORK_HUMAN "/lib/1.1.0_Beta.nb"
#define NETWORK_HUMAN_POST_TYPE AWNN_DET_POST_HUMANOID_1
#define HUMAN_DET_WIDTH (320)
#define HUMAN_DET_HEIGHT (192)
#define HUMAN_VIPP_WIDTH (320)
#define HUMAN_VIPP_HEIGHT (180)
#define HUMAN_DET_THRESH (0.35)
#define HUMAN_DET_MEM_SIZE (1862796)
#else
#define NETWORK_HUMAN "/lib/3.0.1_Beta.nb"
#define NETWORK_HUMAN_POST_TYPE AWNN_DET_POST_HUMANOID_3
#define HUMAN_DET_WIDTH (384)
#define HUMAN_DET_HEIGHT (224)
#define HUMAN_VIPP_WIDTH (384)
#define HUMAN_VIPP_HEIGHT (216)
#define HUMAN_DET_THRESH (0.25)
#define HUMAN_DET_MEM_SIZE (2702920)
#endif
#define ENABLE_OSD_FUNCTION (0)
#define ENABLE_CATCH_JPEG (1)
#define ENABLE_GET_BIN_IMAGE_DATA (0)
#define ENABLE_GET_MV_INFO_DATA (0)
#define ENABLE_GET_LV_SET_IR (0)
#define TESET_ROI_FUNC 0
#define ENABLE_SET_HFLIP (0)
#define ENABLE_SET_VFLIP (0)
#define ENABLE_SET_PLF (0)
#define ENABLE_SET_I_FRAME (0)
#define ENABLE_RESET_ENCODER_TYPE (0)
#define ENABLE_RESET_SIZE (0)
#define ENABLE_SET_VBR_PARAM (0)
#define ENABLE_GET_SUM_MAD (0)
#define ENABLE_HUMAN_DETECT 0
#if ENABLE_HUMAN_DETECT
#define ENABLE_GET_MOTION_SEARCH_RESULT 1
#else
#define ENABLE_GET_MOTION_SEARCH_RESULT 0
#endif
#define MOTION_SEARCH_HOR_NUM 16
#define MOTION_SEARCH_VER_NUM 9
#define MOTION_SEARCH_TOTAL_NUM (MOTION_SEARCH_HOR_NUM * MOTION_SEARCH_VER_NUM)
#define ENABLE_DYNAMIC_SET_QP_AND_BITRATE_AND_FPS (0)
#define ENABLE_SET_SUPER_FRAME_PARAM (0)
#define DROP_STREAM_DATA (0)
#define TEST_CROP_FUNC 0
#define TEST_OUTSIDE_YUV 0
#define OUT_PUT_FILE_PREFIX "/tmp/stream0"
#define OUT_PUT_FILE_PREFIX_jpg "/tmp/jpg_0.jpg"
#define OUT_PUT_FILE_PREFIX_mjpeg "/tmp/jpg_0.mjpeg"
#define OUT_PUT_FILE_PREFIX_1 "/tmp/stream1"
#define OUT_PUT_FILE_0_1 "/mnt/extsd/stream0-1.h264"
#define OUT_PUT_FILE_2 "/mnt/extsd/data.yuv"
#define OUT_PUT_FILE_3 "/mnt/extsd/stream0.h265"
#define OUT_PUT_FILE_JPEG "/tmp/data.jpg"
#define OUT_PUT_FILE_BIN_IMAGE "/mnt/extsd/bin_image.data"
#define OUT_PUT_FILE_MV_INFO "/mnt/extsd/mv_info.data"
#define IN_YUV_FILE "/mnt/extsd/1920x1080_nv12.yuv"
//#define IN_OSD_ARGB_FILE "/mnt/extsd/01_argb_464x32_time.dat"
#define IN_OSD_ARGB_FILE "/mnt/extsd/fs_v853/01_argb_464x32_time.dat"
#define IN_OSD_ARGB_FILE_1 "/mnt/extsd/03_argb_864x160_time.argb"
#define ENABLE_ISP_REG_GET (0)
#define ISP_REG_SAVE_FILE "/tmp/isp_reg_save.h";
//#define ALIGN_XXB(y, x) (((x) + ((y)-1)) & ~((y)-1))
//#define OUT_PUT_FILE_0 "/tmp/stream.h264"
#define ENABLE_TEST_SET_CAMERA_MOVE_STATUS (0)
#define ENABLE_TEST_SET_CAMERA_ADAPTIVE_MOVING_AND_STATIC (0)
static int max_bitstream_count = SAVE_BITSTREAM_COUNT;
FILE *out_file_0 = NULL;
FILE *out_file_1 = NULL;
FILE *out_file_2 = NULL;
static int stream_count_0 = 0;
static int stream_count_1 = 0;
static int64_t time_start = 0;
static int64_t time_end = 0;
static int64_t time_aw_start = 0;
static sem_t finish_sem;
static int video_finish_flag = 0;
demo_video_param mparam;
static int led_fwrite(const char *_path, const char *val_str)
{
FILE * stream = NULL;
size_t len = 0;
//aw_logd("led_write: path = %s, val = %s\n", _path, val_str);
stream = fopen(_path, "w");
if (!stream) {
fprintf(stderr, "Cannot open:%s\n", _path);
return -EINVAL;
}
len = strlen(val_str);
if (len != fwrite(val_str, 1, len, stream)) {
fprintf(stderr, "[err] %s --->fwrite size: %d\n",
val_str, (int)len);
fclose(stream);
return -EINVAL;
}
fprintf(stdout, "echo %s > %s\n", val_str, _path);
fclose(stream);
return 0;
}
static uint64_t pre_pts = 0;
static int save_file_cnt = 0;
static int need_save_new_file = 0;
static int pre_pts_in_seconds = 0;
static int cb_stream_cnt_in_seconds = 0;
static int thread_exit_flag = 0;
void channel_thread_exit(void)
{
sem_post(&finish_sem);
}
void video_stream_cb(const AWVideoInput_StreamInfo* stream_info)
{
int keyframe = stream_info->keyframe_flag;
if (stream_info->size0 == 0 || stream_info->data0 == NULL) {
aw_logd("stream data error: data = %p, len = %d",stream_info->data0, stream_info->size0);
return ;
}
if(stream_count_0 < max_bitstream_count)
{
uint64_t cur_time = get_cur_time_us();
uint64_t pts = stream_info->pts;
uint64_t pts_in_seconds = pts/1000/1000;
unsigned char file_name[128] = {0};
unsigned char new_file_name[128] = {0};
if(stream_count_0%15 == 0)
{
aw_logd("*data = %p, len = %d, cnt = %d, kf = %d, pts = %llu us, %llu s; diff = %llu ms, cur_time = %llu us, diff = %llu ms\n",
stream_info->data0, stream_info->size0, stream_count_0, keyframe,
pts, pts/1000000, (pts - pre_pts)/1000, cur_time, (cur_time - pts)/1000);
}
if(pts_in_seconds != pre_pts_in_seconds)
{
aw_logd("get video stream, fps = %d", cb_stream_cnt_in_seconds);
pre_pts_in_seconds = pts_in_seconds;
cb_stream_cnt_in_seconds = 0;
}
cb_stream_cnt_in_seconds++;
pre_pts = pts;
time_end = get_cur_time_us();
if(stream_count_0 == 0)
{
aw_logd("time of first stream: = %lld\n",time_end - time_aw_start);
/* led_fwrite("/sys/class/gpio_sw/PH11/data", "0"); */
}
//if((time_end - time_start) > 30000)
//if(stream_count_0%200 == 0)
if(stream_count_0 == 0)
{
aw_logd("save first bitstream[0]: count = %d, time = %.3f s\n",
stream_count_0,
((float)(time_end - time_start)/1000/1000));
}
/*
if((time_end - time_start) > 30000)
{
aw_logd("big time [0]: data = %p, len = %d, count = %d, time = %lld\n",
data, len, stream_count_0,
(time_end - time_start));
}
*/
time_start = get_cur_time_us();
#if (DROP_STREAM_DATA == 0)
if(stream_count_0%450 == 0)
need_save_new_file = 1;
if((need_save_new_file == 1 && keyframe == 1)
|| out_file_0 == NULL)
{
save_file_cnt++;
if (mparam.c0_encoder_format == 0)
sprintf(file_name, "%s_%d.h264", mparam.OutputFilePath, save_file_cnt);
else
sprintf(file_name, "%s_%d.h265", mparam.OutputFilePath, save_file_cnt);
aw_logd("save new file, cnt = %d, file_name = %s", stream_count_0, file_name);
need_save_new_file = 0;
if(out_file_0)
{
fclose(out_file_0);
}
out_file_0 = fopen(file_name, "wb");
}
if(out_file_0) {
if (stream_info->b_insert_sps_pps && stream_info->sps_pps_size && stream_info->sps_pps_buf)
fwrite(stream_info->sps_pps_buf, 1, stream_info->sps_pps_size, out_file_0);
fwrite(stream_info->data0, 1, stream_info->size0, out_file_0);
if (stream_info->size1)
fwrite(stream_info->data1, 1, stream_info->size1, out_file_0);
if (stream_info->size2)
fwrite(stream_info->data2, 1, stream_info->size2, out_file_0);
}
if(stream_count_0 == 0)
{
if (mparam.c0_encoder_format == 1) {
if (mparam.jpg_mode)
{
sprintf(new_file_name, "%s.mjpeg", mparam.OutputFilePath);
rename(file_name, new_file_name);
}
else
{
sprintf(new_file_name, "%s.jpg", mparam.OutputFilePath);
rename(file_name, new_file_name);
}
} else {
uint64_t time = get_cur_time_us();
/* led_fwrite("/sys/class/gpio_sw/PF6/data", "0"); */
aw_logw("route 0 save first stream buffer done, time: %lld.%lldms ", time/1000, time%1000);
if (mparam.c0_encoder_format == 0)
sprintf(new_file_name, "%s_%d_%lld.%lldms.h264", mparam.OutputFilePath, save_file_cnt, time/1000, time%1000);
else
sprintf(new_file_name, "%s_%d_%lld.%lldms.h265", mparam.OutputFilePath, save_file_cnt, time/1000, time%1000);
rename(file_name, new_file_name);
}
}
#endif
}
else
{
aw_logd("already arrive max_bitstream_count %d", max_bitstream_count);
return ;
}
#if ENABLE_SET_I_FRAME
if(stream_count_0 == 2 || stream_count_0 == 10)
AWVideoInput_SetIFrame(0);
#endif
#if 0
/*ircut*/
RTIrParam pIrParam;
if (stream_count_0 == 0) {
pIrParam.grey = 0;
pIrParam.ir_on = 0;
pIrParam.ir_flash_on = 0;
AWVideoInput_SetIrParam(0, &pIrParam);
aw_logd("********************%d:change to day mode\n", stream_count_0);
} else if (stream_count_0 == 50) {
pIrParam.grey = 1;
pIrParam.ir_on = 1;
pIrParam.ir_flash_on = 1;
AWVideoInput_SetIrParam(0, &pIrParam);
aw_logd("********************%d:change to night mode\n", stream_count_0);
} else if (stream_count_0 == 100) {
pIrParam.grey = 0;
pIrParam.ir_on = 0;
pIrParam.ir_flash_on = 0;
AWVideoInput_SetIrParam(0, &pIrParam);
aw_logd("********************%d:change to day mode\n", stream_count_0);
} else if (stream_count_0 == 150) {
pIrParam.grey = 1;
pIrParam.ir_on = 1;
pIrParam.ir_flash_on = 1;
AWVideoInput_SetIrParam(0, &pIrParam);
aw_logd("********************%d:change to night mode\n", stream_count_0);
} else if (stream_count_0 == 200) {
pIrParam.grey = 0;
pIrParam.ir_on = 0;
pIrParam.ir_flash_on = 0;
AWVideoInput_SetIrParam(0, &pIrParam);
aw_logd("********************%d:change to day mode\n", stream_count_0);
}
/*hist*/
int i, j = 0;
unsigned int hist[256];
memset(hist, 0, 256*sizeof(unsigned int));
if (stream_count_0 == 50) {
AWVideoInput_GetHist(0, hist);
for (i = 0; i < 256; i++) {
j++;
printf("0x%8x, ", hist[i]);
if (j%8 == 0)
printf("\n");
}
}
if (stream_count_0 == 30)
AWVideoInput_SetAeMeteringMode(0, RT_AE_METERING_MODE_CENTER);
RTIspCtrlAttr isp_ctrl_attr;
isp_ctrl_attr.isp_ctrl_id = RT_ISP_CTRL_HIGH_LIGHT;
isp_ctrl_attr.value = 15;
if (stream_count_0 == 20)
AWVideoInput_SetIspAttrCfg(0, &isp_ctrl_attr);
#endif
if (mparam.enable_orl) {
int i;
RTIspOrl isp_orl;
memset(&isp_orl, 0, sizeof(RTIspOrl));
isp_orl.on = 1;
isp_orl.orl_cnt = 16;
isp_orl.orl_width = 4;
for (i = 0; i < isp_orl.orl_cnt; i++) {
isp_orl.orl_win[i].width = 70;
isp_orl.orl_win[i].height = 60;
isp_orl.orl_win[i].left = 70*i;
isp_orl.orl_win[i].top = 60*i;
isp_orl.orl_win[i].rgb_orl = 0xff0000 >> ((i % 3)*8);
}
if (stream_count_0 == 0) {
isp_orl.on = 1;
isp_orl.orl_cnt = 16;
isp_orl.orl_width = 1;
AWVideoInput_SetIspOrl(0, &isp_orl);
} else if (stream_count_0 == 50) {
isp_orl.on = 1;
isp_orl.orl_cnt = 10;
isp_orl.orl_width = 2;
AWVideoInput_SetIspOrl(0, &isp_orl);
} else if (stream_count_0 == 100) {
isp_orl.on = 1;
isp_orl.orl_cnt = 5;
isp_orl.orl_width = 4;
AWVideoInput_SetIspOrl(0, &isp_orl);
} else if (stream_count_0 == 150) {
isp_orl.on = 0;
isp_orl.orl_cnt = 5;
isp_orl.orl_width = 6;
AWVideoInput_SetIspOrl(0, &isp_orl);
}
}
stream_count_0++;
if(stream_count_0 >= max_bitstream_count && !video_finish_flag)
{
video_finish_flag = 1;
sem_post(&finish_sem);
}
}
static int save_file_cnt_1 = 0;
static int need_save_new_file_1 = 0;
void video_stream_cb_1(const AWVideoInput_StreamInfo* stream_info)
{
int keyframe = stream_info->keyframe_flag;
if (stream_info->size0 == 0 || stream_info->data0 == NULL) {
aw_logd("stream data error: data = %p, len = %d",stream_info->data0, stream_info->size0);
return ;
}
if(stream_count_0 < max_bitstream_count)
{
//if(stream_count_1%200 == 0)
{
//aw_logd("save bitstream[1]: data = %p, len = %d, count = %d\n",
// data, len, stream_count_1);
}
unsigned char file_name[128] = {0};
unsigned char new_file_name[128] = {0};
#if (DROP_STREAM_DATA == 0)
if(stream_count_1%450 == 0)
need_save_new_file_1 = 1;
if((need_save_new_file_1 == 1 && keyframe == 1)
|| out_file_1 == NULL)
{
save_file_cnt_1++;
sprintf(file_name, "%s_%d.h264", OUT_PUT_FILE_PREFIX_1, save_file_cnt_1);
aw_logd("save new file, cnt = %d, file_name = %s", stream_count_1, file_name);
need_save_new_file_1 = 0;
if(out_file_1)
{
fclose(out_file_1);
}
out_file_1 = fopen(file_name, "wb");
}
if(out_file_1) {
if (stream_info->b_insert_sps_pps && stream_info->sps_pps_size && stream_info->sps_pps_buf)
fwrite(stream_info->sps_pps_buf, 1, stream_info->sps_pps_size, out_file_1);
fwrite(stream_info->data0, 1, stream_info->size0, out_file_1);
if (stream_info->size1)
fwrite(stream_info->data1, 1, stream_info->size1, out_file_1);
if (stream_info->size2)
fwrite(stream_info->data2, 1, stream_info->size2, out_file_1);
}
if(stream_count_1 == 0)
{
uint64_t time = get_cur_time_us();
/* led_fwrite("/sys/class/gpio_sw/PF6/data", "0"); */
aw_logw("route 1 save first stream buffer done, time: %lld.%lldms ", time/1000, time%1000);
sprintf(new_file_name, "%s_%d_%lld.%lldms.h264", OUT_PUT_FILE_PREFIX_1, save_file_cnt_1, time/1000, time%1000);
rename(file_name, new_file_name);
}
stream_count_1++;
#endif
}
else
{
aw_logd("already arrive max_bitstream_count %d", max_bitstream_count);
return ;
}
}
typedef struct {
unsigned int width;
unsigned int height;
unsigned char* argb_addr;
unsigned int size;
}BitMapInfoS;
#define MAX_BITMAP_NUM (8)
BitMapInfoS bit_map_info[MAX_BITMAP_NUM];
int init_overlay_info(VideoInputOSD *pOverlayInfo, int index)
{
int i = index;
unsigned char num_bitMap = 1;
BitMapInfoS* pBitMapInfo;
unsigned int start_x;
unsigned int start_y;
FILE* icon_hdle = NULL;
int width = pOverlayInfo->item_info[i].widht;
int height = pOverlayInfo->item_info[i].height;
unsigned char *argb_addr = NULL;
if(num_bitMap > MAX_BITMAP_NUM)
{
aw_logd("error: num_bitmap[%d] is too big", num_bitMap);
return -1;
}
if (pOverlayInfo->item_info[index].osd_type == RT_NORMAL_OVERLAY
|| pOverlayInfo->item_info[index].osd_type == RT_LUMA_REVERSE_OVERLAY) {
int bitmap_size = width * height * 4;
icon_hdle = fopen(IN_OSD_ARGB_FILE, "r");
aw_logd("icon_hdle = %p",icon_hdle);
if (icon_hdle == NULL)
{
aw_loge("get icon_hdle error: %s\n", IN_OSD_ARGB_FILE);
return -1;
}
if(argb_addr == NULL)
{
argb_addr = (unsigned char*)malloc(bitmap_size);
if(argb_addr == NULL)
{
aw_logd("malloc bit_map_info[%d].argb_addr fail\n", i);
if (icon_hdle) {
fclose(icon_hdle);
icon_hdle = NULL;
}
return -1;
}
memset(argb_addr, 0, bitmap_size);
}
aw_logd("bitMap[%d] size[%d,%d], argb_addr:%p\n", i, width, height, argb_addr);
int ret;
ret = fread(argb_addr, 1, bitmap_size, icon_hdle);
if(ret != bitmap_size)
aw_logd("read bitMap[%d] error, ret value:%d\n", i, ret);
fseek(icon_hdle, 0, SEEK_SET);
fclose(icon_hdle);
pOverlayInfo->argb_type = OVERLAY_ARGB8888;
aw_logd("osd_num:%d, argb_type:%d\n", pOverlayInfo->osd_num, pOverlayInfo->argb_type);
start_x = 80;
start_y = 68;
if(width%16 !=0 || height%16 !=0)
{
aw_logd("error: w or h is not 16_align: %d, %d", width, height);
//continue;
}
pOverlayInfo->item_info[i].data_buf = argb_addr;
pOverlayInfo->item_info[i].data_size = bitmap_size;
} else if (pOverlayInfo->item_info[index].osd_type == RT_COVER_OSD) {
aw_logd("osd_type RT_COVER_OSD!");
} else {
aw_logd("not support!");
}
aw_logd("osd item info[%d]: x = %d, y = %d, w = %d, h = %d, buf = %p, size = %d",
i, pOverlayInfo->item_info[i].start_x, pOverlayInfo->item_info[i].start_y,
pOverlayInfo->item_info[i].widht, pOverlayInfo->item_info[i].height,
pOverlayInfo->item_info[i].data_buf, pOverlayInfo->item_info[i].data_size);
return 0;
}
static int catch_jpeg(catch_jpeg_config *p_jpg_config)
{
char* buf = NULL;
int bufLen = p_jpg_config->width*p_jpg_config->height/2;
int ret = 0;
AWVideoInput_State channel_state = 0;
buf = malloc(bufLen);
if(buf == NULL)
{
aw_logd("catch_jpeg, error: malloc failed");
return -1;
}
memset(buf, 0, bufLen);
int loop_cnt = 5;
int cur_len = bufLen;
while(loop_cnt > 0)
{
channel_state = AWVideoInput_Get_channel_state(p_jpg_config->channel_id);
if (AWVideoInput_Get_csi_status(p_jpg_config->channel_id) == 0 || channel_state != VIDEO_INPUT_STATE_EXCUTING) {
usleep(200*1000);
aw_logd("jpg wait start");
continue;
}
AWVideoInput_CatchJpegConfig(p_jpg_config);
cur_len = bufLen;
ret = AWVideoInput_CatchJpeg(buf, &cur_len, p_jpg_config->channel_id);
aw_logd("catch jpeg: buf = %p, size = %d, ret = %d, w = %d, h = %d, cnt = %d\n",
buf, cur_len, ret, p_jpg_config->width, p_jpg_config->height, loop_cnt);
usleep(200*1000);
if(ret != 0) {
aw_loge("catch jpeg failed\n");
free(buf);
return -1;
}
loop_cnt--;
}
FILE* file = fopen(OUT_PUT_FILE_JPEG, "wb");
if(file == NULL)
{
aw_logd("fopen failed\n");
return -1;
}
fwrite(buf, 1, bufLen, file);
fclose(file);
free(buf);
return 0;
}
static int nv12_draw_point(unsigned char* yBuffer, unsigned char* uvBuffer, int w, int h, int x, int y, int yColor, int uColor, int vColor) {
if (x < 0 || x >= w) return -1;
if (y < 0 || y >= h) return -1;
yBuffer[y*w+x] = yColor;
uvBuffer[(y/2)*w+x/2*2] = uColor;
uvBuffer[(y/2)*w+x/2*2+1] = vColor;
return 0;
}
static int nv12_draw_rect(unsigned char* yBuffer, unsigned char* uvBuffer, int w, int h, int left, int top, int right, int bottom, int yColor, int uColor, int vColor) {
int i;
for (i = left; i <= right; i++) {
nv12_draw_point(yBuffer, uvBuffer, w, h, i, top, yColor, uColor, vColor);
nv12_draw_point(yBuffer, uvBuffer, w, h, i, bottom, yColor, uColor, vColor);
}
for (i = top; i <= bottom; i++) {
nv12_draw_point(yBuffer, uvBuffer, w, h, left, i, yColor, uColor, vColor);
nv12_draw_point(yBuffer, uvBuffer, w, h, right, i, yColor, uColor, vColor);
}
return 0;
}
static int check_cross_rect(int ra[], int rb[]) {
int left = ra[0] > rb[0] ? ra[0] : rb[0];
int right = ra[2] < rb[2] ? ra[2] : rb[2];
int top = ra[1] > rb[1] ? ra[1] : rb[1];
int bottom = ra[3] < rb[3] ? ra[3] : rb[3];
int width = right > left ? right - left : 0;
int height = bottom > top ? bottom - top : 0;
return width * height;
}
RTVencMotionSearchRegion* pMotionSearchResult = NULL;
int curMotionSearch = 0;
int lastMotionSearch = -1;
int useMotionSearch = -1;
static int score_rect(int r[]) {
int i;
if (useMotionSearch < 0 || pMotionSearchResult == NULL) return 1920 * 1080;
RTVencMotionSearchRegion* result = &pMotionSearchResult[useMotionSearch];
int score = 0;
int rect[4];
for(i = 0; i < 50; i++)
{
if (result[i].is_motion) {
rect[0] = result[i].pix_x_bgn;
rect[1] = result[i].pix_y_bgn;
rect[2] = result[i].pix_x_end;
rect[3] = result[i].pix_y_end;
score += check_cross_rect(r, rect);
}
}
return score;
}
static int get_yuv_frame_data(int channel, VideoInputConfig* config)
{
int ret = 0;
VideoYuvFrame mYuvFrame;
int cnt = 20;
int bsave_yuv_data = 1;
int64_t wait_start_time = 0;
out_file_2 = fopen(OUT_PUT_FILE_2, "wb");
if(out_file_2 == NULL)
{
aw_logd("fopen failed");
}
#if ENABLE_HUMAN_DETECT
// only get once for the same nb file.
#if 0
awnn_info_t * nbinfo;
nbinfo = awnn_get_info(NETWORK_HUMAN);
aw_logd("%s, %s, %dx%d, %d, %f\n", nbinfo->name, nbinfo->md5, nbinfo->width, nbinfo->height, nbinfo->mem_size, nbinfo->thresh);
#endif
// we can get this info from readme.txt for awnn_get_info()
float thresh = HUMAN_DET_THRESH; //nbinfo->thresh;
unsigned int awnn_mem_size = HUMAN_DET_MEM_SIZE; //nbinfo->mem_size;
aw_logw("%s, %dx%d, mem_size %d, thresh %f\n", NETWORK_HUMAN, HUMAN_DET_WIDTH, HUMAN_DET_HEIGHT, HUMAN_DET_MEM_SIZE, HUMAN_DET_THRESH);
awnn_init(awnn_mem_size);
Awnn_Context_t *context = awnn_create(NETWORK_HUMAN);
if (!context) {
aw_loge("Failed to awnn_create\n");
if (out_file_2) {
fclose(out_file_2);
}
awnn_uninit();
return -1;
}
aw_logd("awnn_create %p ok", context);
unsigned char *human_input_buffers[2];
unsigned char yBuffer[HUMAN_DET_WIDTH * HUMAN_DET_HEIGHT];
memset(&yBuffer, 0, sizeof(yBuffer));
unsigned char uvBuffer[HUMAN_DET_WIDTH * HUMAN_DET_HEIGHT / 2];
memset(&uvBuffer, 0, sizeof(uvBuffer));
human_input_buffers[0] = yBuffer;
human_input_buffers[1] = uvBuffer;
awnn_set_input_buffers(context, human_input_buffers);
#endif
while(cnt > 0)
{
if (AWVideoInput_Check_Wait_Start(channel)) {
if (wait_start_time == 0)
wait_start_time = get_cur_time_us();
if (get_cur_time_us() - wait_start_time > WAIT_CSI_TIMEOUT_TIME) {
aw_loge("wait csi ready more than 5s, exit!");
return -1;
}
usleep(200*1000);
aw_logd("wait start");
continue;
}
memset(&mYuvFrame, 0, sizeof(VideoYuvFrame));
ret = AWVideoInput_GetYuvFrame(channel, &mYuvFrame);
#if ENABLE_HUMAN_DETECT
if (0 == ret)
{
aw_logd("ret = %d, cnt = %d; YuvFrame: buf %dx%d data %dx%d; phyAddr = %p, %p, %p, virAddr = %p, %p, %p\n",
ret, cnt, mYuvFrame.widht, mYuvFrame.height, config->width, config->height,
mYuvFrame.phyAddr[0], mYuvFrame.phyAddr[1], mYuvFrame.phyAddr[2],
mYuvFrame.virAddr[0], mYuvFrame.virAddr[1], mYuvFrame.virAddr[2]);
memcpy(yBuffer, mYuvFrame.virAddr[0], config->width * config->height);
memcpy(uvBuffer, mYuvFrame.virAddr[1], config->width * config->height / 2);
awnn_run(context);
Awnn_Post_t post;
post.type = NETWORK_HUMAN_POST_TYPE;
post.width = HUMAN_DET_WIDTH;
post.height = HUMAN_DET_HEIGHT;
post.thresh = thresh;
Awnn_Result_t res;
memset(&res, 0, sizeof(Awnn_Result_t));
awnn_det_post(context, &post, &res);
aw_logw("awnn detect valid_cnt = %d, current time:%lld\n", res.valid_cnt, get_cur_time_us());
for(int i = 0; i < res.valid_cnt; i++) {
if (res.boxes[i].label == 0) {
int rect[] = {
res.boxes[i].xmin * mparam.c0_src_w / config->width,
res.boxes[i].ymin * mparam.c0_src_w / config->width,
res.boxes[i].xmax * mparam.c0_src_w / config->width,
res.boxes[i].ymax * mparam.c0_src_w / config->width};
int score = score_rect(rect);
int w = res.boxes[i].xmax - res.boxes[i].xmin;
int h = res.boxes[i].ymax - res.boxes[i].ymin;
float percentage = (w * h) / (config->width * config->height);
aw_logw("[%d/%d] cls %d, prob %f, rect %d, %d, %d, %d, score %d, percentage=%.4f\n", i+1, res.valid_cnt, res.boxes[i].label, res.boxes[i].score,
res.boxes[i].xmin, res.boxes[i].ymin, res.boxes[i].xmax, res.boxes[i].ymax, score, percentage);
if ((score == 0 && res.boxes[i].score < 0.6) || // 无移动数据,阈值<0.6则忽略
(percentage < 0.002 && res.boxes[i].score < 0.6) || // 检测区域太小,阈值<0.6则忽略
(percentage > 0.8 && res.boxes[i].score < 0.8) || // 检测区域太大,阈值<0.8则忽略
(w > h && res.boxes[i].score < 0.8)) {// 宽大于高,阈值<0.8则忽略
aw_logd("skip rect %d %d\n", w, h);
continue;
}
nv12_draw_rect(mYuvFrame.virAddr[0], mYuvFrame.virAddr[1], config->width, config->height,
res.boxes[i].xmin, res.boxes[i].ymin, res.boxes[i].xmax, res.boxes[i].ymax, 0x96, 0x2C, 0x15);
}
}
useMotionSearch = lastMotionSearch;
usleep(100*1000);
}
else
{
aw_loge("AWVideoInput_GetYuvFrame failed! ret=%d", ret);
}
#endif
if(ret == 0)
{
int buf_size = mYuvFrame.widht*mYuvFrame.height*3/2;
if(bsave_yuv_data == 1 && out_file_2 != NULL)
fwrite(mYuvFrame.virAddr[0], 1, buf_size, out_file_2);
AWVideoInput_ReleaseYuvFrame(channel, &mYuvFrame);
}
if (ret != 0) {
aw_logd("Getting yuv failed, break!");
break;
}
#if 0
RTIspExpGain expgain;
AWVideoInput_GetExpGain(channel, &expgain);
aw_loge("exp:%d, again:%d, rgain:%d, bgain:%d\n",
expgain.exp_val, expgain.gain_val, expgain.r_gain, expgain.b_gain);
#endif
cnt--;
}
#if ENABLE_HUMAN_DETECT
if (context)
{
aw_logd("awnn_destroy %p", context);
awnn_destroy(context);
context = NULL;
}
aw_logd("awnn_uninit");
awnn_uninit();
#endif
if (out_file_2 != NULL)
fclose(out_file_2);
return 0;
}
#if ENABLE_GET_BIN_IMAGE_DATA
static int get_bin_image_data(int channel, VideoInputConfig* config)
{
int ret = 0;
FILE* file = fopen(OUT_PUT_FILE_BIN_IMAGE, "wb");
if(file == NULL)
{
aw_logd("fopen failed");
return -1;
}
int ctu_size = 32;
int log2_ctu_size_sqrt = 5;
//int w_in_4ctu_align = 0;
int w_in_ctu = 0;
int h_in_ctu = 0;
//w_in_4ctu_align = (ALIGN_XXB(ctu_size*4, config->width)) >> log2_ctu_size_sqrt;
h_in_ctu = (ALIGN_XXB(ctu_size, config->height)) >> log2_ctu_size_sqrt;
w_in_ctu = (ALIGN_XXB(ctu_size, config->width)) >> log2_ctu_size_sqrt;
unsigned int buf_size = ALIGN_XXB(2, w_in_ctu * h_in_ctu) * 2;
unsigned char* tmp_buf = malloc(buf_size);
if(tmp_buf == NULL)
{
aw_logd("malloc failed\n");
abort();
}
memset(tmp_buf, 0, buf_size);
ret = AWVideoInput_GetBinImageData(channel, tmp_buf, buf_size);
aw_logd("get_bin_image_data: buf_size = %d, ret = %d\n", buf_size, ret);
//* analyze the bin image info
int x = 0;
int y = 0;
unsigned char* data_buf = tmp_buf;
int count = 0;
for(y = 0; y < h_in_ctu; y++)
{
//* 1ctu(32x32) = 16x8x8 : 8x8 mb need 1 bit, every ctu need 2 bytes;
for(x = 0; x < w_in_ctu; x++)
{
data_buf++;
data_buf++;
count += 2;
}
}
aw_logd("get_bin_image_data: analyze count = %d\n", count);
fwrite(tmp_buf, 1, buf_size, file);
fclose(file);
free(tmp_buf);
return 0;
}
#endif
#if ENABLE_GET_MV_INFO_DATA
static int get_mv_info_data_h264(int channel, VideoInputConfig* config)
{
int ret = 0;
FILE* file = fopen(OUT_PUT_FILE_MV_INFO, "wb");
if(file == NULL)
{
aw_logd("fopen failed");
return -1;
}
unsigned char* tmp_buf = NULL;
unsigned int mv_info_len = 0;
int widthMb = (ALIGN_XXB(16, config->width)) >> 4;
int heightMb = (ALIGN_XXB(16, config->height)) >> 4;
int widthMb_align4 = ALIGN_XXB(4, widthMb); //((widthMb + 3) & (~3));
mv_info_len = widthMb_align4 * heightMb * 8;
tmp_buf = malloc(mv_info_len);
if(tmp_buf == NULL)
{
aw_logd("malloc failed\n");
abort();
}
memset(tmp_buf, 0, mv_info_len);
ret = AWVideoInput_GetMvInfoData(channel, tmp_buf, mv_info_len);
aw_logd("get_mv_info_data_h264: mv_info_len = %d, ret = %d\n", mv_info_len, ret);
//* analyze the mv_info
long long *mv_info_parcel;
int mvx, mvy, mb_type;
int w, h, k;
k = 0;
for(h = 0; h < heightMb; h++)
{
for(w = 0; w < widthMb_align4; w++)
{
mv_info_parcel = (long long *)(tmp_buf) + k;
k++;
if(w < widthMb)// inside the picture
{
mb_type = (((*mv_info_parcel) >> 55) & 0x1); // intra: 1; inter: 0
mvx = ((*mv_info_parcel) >> 47) & (0x00ff);
mvy = ((*mv_info_parcel) >> 40) & (0x007f);
mvx = (mvx & 0x80) ? (mvx | (~0xff)) : mvx;
mvy = (mvy & 0x40) ? (mvy | (~0x7f)) : mvy;
}
}
}
aw_logd("get_mv_info_data_h264: analyze mv info, k = %d\n", k);
fwrite(tmp_buf, 1, mv_info_len, file);
fclose(file);
free(tmp_buf);
return 0;
}
static int get_mv_info_data_h265(int channel, VideoInputConfig* config)
{
int ret = 0;
FILE* file = fopen(OUT_PUT_FILE_MV_INFO, "wb");
if(file == NULL)
{
aw_logd("fopen failed");
return -1;
}
unsigned char* tmp_buf = NULL;
unsigned int mv_info_len = 0;
int ctu_size = 32;
int log2_ctu_size_sqrt = 5;
int w_in_4ctu_align = 0;
int h_in_ctu = 0;
w_in_4ctu_align = (ALIGN_XXB(ctu_size*4, config->width)) >> log2_ctu_size_sqrt;
h_in_ctu = (ALIGN_XXB(ctu_size, config->height)) >> log2_ctu_size_sqrt;
unsigned int width_in_ctu_16align = ALIGN_XXB(16, w_in_4ctu_align);
mv_info_len = width_in_ctu_16align * h_in_ctu * 16;
tmp_buf = malloc(mv_info_len);
if(tmp_buf == NULL)
{
aw_logd("malloc failed\n");
abort();
}
memset(tmp_buf, 0, mv_info_len);
ret = AWVideoInput_GetMvInfoData(channel, tmp_buf, mv_info_len);
aw_logd("get_mv_info_data_h265: mv_info_len = %d, ret = %d\n", mv_info_len, ret);
//* analyze the mv_info
int mvx, mvy, mb_type;
int w, h, k, n;
k = 0;
unsigned int *mv_info_parcel;
int ctu_width = width_in_ctu_16align;
int ctu_height = h_in_ctu;
for(h = 0; h < ctu_height; h++)
{
for(w = 0; w < ctu_width; w++)
{
for(n = 0; n < 4; n++) // 4 16x16 block in ctu
{
int curx = 32 * w + (n % 2) * 16;
int cury = 32 * h + (n / 2) * 16;
mv_info_parcel = (unsigned int *)tmp_buf + k;
if(curx < config->width && cury < config->height) // inside the picture
{
mb_type = (((*mv_info_parcel) >> 19) & 0x1); // intra: 1; inter: 0
mvx = ((*mv_info_parcel) >> 0) & (0x00ff);
mvy = ((*mv_info_parcel) >> 10) & (0x007f);
mvx = (mvx & 0x80) ? (mvx | (~0xff)) : mvx;
mvy = (mvy & 0x40) ? (mvy | (~0x7f)) : mvy;
}
k++;
} // n
} // w
} // h
aw_logd("get_mv_info_data_h265: analyze mv info, k = %d\n", k);
fwrite(tmp_buf, 1, mv_info_len, file);
fclose(file);
free(tmp_buf);
return 0;
}
#endif
static int reset_size(int channel_id, VideoInputOSD *pOverlayInfo)
{
int loop_cnt = 1;
int reset_w = 1920;
int reset_h = 1080;
aw_logd("start reset size");
//usleep(1*1000*1000);
while(loop_cnt > 0)
{
reset_w = 1280;
reset_h = 720;
AWVideoInput_Start(channel_id, 0);
AWVideoInput_ResetSize(channel_id, reset_w, reset_h);
#if 0
if(out_file_0)
{
fclose(out_file_0);
out_file_0 = fopen(OUT_PUT_FILE_0_1, "wb");
}
#endif
aw_logd("reset size[%dx%d], loop_cnt = %d -- reopen : %p, path = %s",reset_w, reset_h, loop_cnt, out_file_0, OUT_PUT_FILE_0_1);
AWVideoInput_Start(channel_id, 1);
usleep(2*1000*1000);
#if ENABLE_OSD_FUNCTION
AWVideoInput_SetOSD(channel_id, pOverlayInfo);
#endif
#if 1
usleep(1*1000*1000);
reset_w = 1920;
reset_h = 1080;
AWVideoInput_Start(channel_id, 0);
AWVideoInput_ResetSize(channel_id, reset_w, reset_h);
#if 0
if(out_file_0)
{
fclose(out_file_0);
out_file_0 = fopen("/tmp/stream0-2.h264", "wb");
}
#endif
aw_logd("reset size[%dx%d], loop_cnt = %d -- reopen : %p, path = %s",reset_w, reset_h, loop_cnt, out_file_0, OUT_PUT_FILE_0_1);
AWVideoInput_Start(channel_id, 1);
#if ENABLE_OSD_FUNCTION
AWVideoInput_SetOSD(channel_id, pOverlayInfo);
#endif
usleep(1*1000*1000);
#endif
loop_cnt--;
}
return 0;
}
//int initSharpFunc(RTsEncppSharpParamDynamic *pSharpParamDynamic, RTsEncppSharpParamStatic *pSharpParamStatic)
//{
// pSharpParamDynamic->ss_ns_lw = 0; //[0,255];
// pSharpParamDynamic->ss_ns_hi = 0; //[0,255];
// pSharpParamDynamic->ls_ns_lw = 0; //[0,255];
// pSharpParamDynamic->ls_ns_hi = 0; //[0,255];
// pSharpParamDynamic->ss_lw_cor = 0; //[0,255];
// pSharpParamDynamic->ss_hi_cor = 0; //[0,255];
// pSharpParamDynamic->ls_lw_cor = 0; //[0,255];
// pSharpParamDynamic->ls_hi_cor = 0; //[0,255];
// pSharpParamDynamic->ss_blk_stren = 256; //[0,4095];
// pSharpParamDynamic->ss_wht_stren = 256; //[0,4095];
// pSharpParamDynamic->ls_blk_stren = 256; //[0,4095];
// pSharpParamDynamic->ls_wht_stren = 256; //[0,4095];
// pSharpParamDynamic->wht_clp_para = 256; //[0,1023];
// pSharpParamDynamic->blk_clp_para = 256; //[0,1023];
// pSharpParamDynamic->ss_avg_smth = 0; //[0,255];
// pSharpParamDynamic->hfr_mf_blk_stren = 0; //[0,4095];
// pSharpParamDynamic->hfr_hf_blk_stren = 0; //[0,4095];
// pSharpParamDynamic->hfr_hf_wht_clp = 32; //[0,255];
// pSharpParamDynamic->hfr_hf_cor_ratio = 0; //[0,255];
// pSharpParamDynamic->hfr_mf_mix_ratio = 390; //[0,1023];
// pSharpParamDynamic->ss_dir_smth = 0; //[0,16];
// pSharpParamDynamic->wht_clp_slp = 16; //[0,63];
// pSharpParamDynamic->blk_clp_slp = 8; //[0,63];
// pSharpParamDynamic->max_clp_ratio = 64; //[0,255];
// pSharpParamDynamic->hfr_hf_blk_clp = 32; //[0,255];
// pSharpParamDynamic->hfr_smth_ratio = 0; //[0,32];
// pSharpParamDynamic->dir_smth[0] = 0; //[0,16];
// pSharpParamDynamic->dir_smth[1] = 0; //[0,16];
// pSharpParamDynamic->dir_smth[2] = 0; //[0,16];
// pSharpParamDynamic->dir_smth[3] = 0; //[0,16];
// pSharpParamStatic->ss_shp_ratio = 0; //[0,255];
// pSharpParamStatic->ls_shp_ratio = 0; //[0,255];
// pSharpParamStatic->ss_dir_ratio = 98; //[0,1023];
// pSharpParamStatic->ls_dir_ratio = 90; //[0,1023];
// pSharpParamStatic->ss_crc_stren = 128; //[0,1023];
// pSharpParamStatic->ss_crc_min = 16; //[0,255];
// pSharpParamDynamic->hfr_mf_blk_clp = 32; //[0,255];
// pSharpParamDynamic->hfr_mf_wht_clp = 32; //[0,255];
// pSharpParamDynamic->hfr_hf_wht_stren = 0; //[0,4095];
// pSharpParamDynamic->hfr_mf_wht_stren = 0; //[0,4095];
// pSharpParamDynamic->hfr_mf_cor_ratio = 0; //[0,255];
// pSharpParamDynamic->hfr_hf_mix_ratio = 390; //[0,1023];
// pSharpParamDynamic->hfr_hf_mix_min_ratio = 0; //[0,255];
// pSharpParamDynamic->hfr_mf_mix_min_ratio = 0; //[0,255];
//
// pSharpParamStatic->sharp_ss_value[0]=384;
// pSharpParamStatic->sharp_ss_value[1]=416;
// pSharpParamStatic->sharp_ss_value[2]=471;
// pSharpParamStatic->sharp_ss_value[3]=477;
// pSharpParamStatic->sharp_ss_value[4]=443;
// pSharpParamStatic->sharp_ss_value[5]=409;
// pSharpParamStatic->sharp_ss_value[6]=374;
// pSharpParamStatic->sharp_ss_value[7]=340;
// pSharpParamStatic->sharp_ss_value[8]=306;
// pSharpParamStatic->sharp_ss_value[9]=272;
// pSharpParamStatic->sharp_ss_value[10]=237;
// pSharpParamStatic->sharp_ss_value[11]=203;
// pSharpParamStatic->sharp_ss_value[12]=169;
// pSharpParamStatic->sharp_ss_value[13]=134;
// pSharpParamStatic->sharp_ss_value[14]=100;
// pSharpParamStatic->sharp_ss_value[15]=66;
// pSharpParamStatic->sharp_ss_value[16]=41;
// pSharpParamStatic->sharp_ss_value[17]=32;
// pSharpParamStatic->sharp_ss_value[18]=32;
// pSharpParamStatic->sharp_ss_value[19]=32;
// pSharpParamStatic->sharp_ss_value[20]=32;
// pSharpParamStatic->sharp_ss_value[21]=32;
// pSharpParamStatic->sharp_ss_value[22]=32;
// pSharpParamStatic->sharp_ss_value[23]=32;
// pSharpParamStatic->sharp_ss_value[24]=32;
// pSharpParamStatic->sharp_ss_value[25]=32;
// pSharpParamStatic->sharp_ss_value[26]=32;
// pSharpParamStatic->sharp_ss_value[27]=32;
// pSharpParamStatic->sharp_ss_value[28]=32;
// pSharpParamStatic->sharp_ss_value[29]=32;
// pSharpParamStatic->sharp_ss_value[30]=32;
// pSharpParamStatic->sharp_ss_value[31]=32;
// pSharpParamStatic->sharp_ss_value[32]=32;
//
// pSharpParamStatic->sharp_ls_value[0]=384;
// pSharpParamStatic->sharp_ls_value[1]=395;
// pSharpParamStatic->sharp_ls_value[2]=427;
// pSharpParamStatic->sharp_ls_value[3]=470;
// pSharpParamStatic->sharp_ls_value[4]=478;
// pSharpParamStatic->sharp_ls_value[5]=416;
// pSharpParamStatic->sharp_ls_value[6]=320;
// pSharpParamStatic->sharp_ls_value[7]=224;
// pSharpParamStatic->sharp_ls_value[8]=152;
// pSharpParamStatic->sharp_ls_value[9]=128;
// pSharpParamStatic->sharp_ls_value[10]=128;
// pSharpParamStatic->sharp_ls_value[11]=128;
// pSharpParamStatic->sharp_ls_value[12]=128;
// pSharpParamStatic->sharp_ls_value[13]=128;
// pSharpParamStatic->sharp_ls_value[14]=128;
// pSharpParamStatic->sharp_ls_value[15]=128;
// pSharpParamStatic->sharp_ls_value[16]=128;
// pSharpParamStatic->sharp_ls_value[17]=128;
// pSharpParamStatic->sharp_ls_value[18]=128;
// pSharpParamStatic->sharp_ls_value[19]=128;
// pSharpParamStatic->sharp_ls_value[20]=128;
// pSharpParamStatic->sharp_ls_value[21]=128;
// pSharpParamStatic->sharp_ls_value[22]=128;
// pSharpParamStatic->sharp_ls_value[23]=128;
// pSharpParamStatic->sharp_ls_value[24]=128;
// pSharpParamStatic->sharp_ls_value[25]=128;
// pSharpParamStatic->sharp_ls_value[26]=128;
// pSharpParamStatic->sharp_ls_value[27]=128;
// pSharpParamStatic->sharp_ls_value[28]=128;
// pSharpParamStatic->sharp_ls_value[29]=128;
// pSharpParamStatic->sharp_ls_value[30]=128;
// pSharpParamStatic->sharp_ls_value[31]=128;
// pSharpParamStatic->sharp_ls_value[32]=128;
//
// pSharpParamStatic->sharp_hsv[0]=218;
// pSharpParamStatic->sharp_hsv[1]=206;
// pSharpParamStatic->sharp_hsv[2]=214;
// pSharpParamStatic->sharp_hsv[3]=247;
// pSharpParamStatic->sharp_hsv[4]=282;
// pSharpParamStatic->sharp_hsv[5]=299;
// pSharpParamStatic->sharp_hsv[6]=308;
// pSharpParamStatic->sharp_hsv[7]=316;
// pSharpParamStatic->sharp_hsv[8]=325;
// pSharpParamStatic->sharp_hsv[9]=333;
// pSharpParamStatic->sharp_hsv[10]=342;
// pSharpParamStatic->sharp_hsv[11]=350;
// pSharpParamStatic->sharp_hsv[12]=359;
// pSharpParamStatic->sharp_hsv[13]=367;
// pSharpParamStatic->sharp_hsv[14]=376;
// pSharpParamStatic->sharp_hsv[15]=380;
// pSharpParamStatic->sharp_hsv[16]=375;
// pSharpParamStatic->sharp_hsv[17]=366;
// pSharpParamStatic->sharp_hsv[18]=358;
// pSharpParamStatic->sharp_hsv[19]=349;
// pSharpParamStatic->sharp_hsv[20]=341;
// pSharpParamStatic->sharp_hsv[21]=332;
// pSharpParamStatic->sharp_hsv[22]=324;
// pSharpParamStatic->sharp_hsv[23]=315;
// pSharpParamStatic->sharp_hsv[24]=307;
// pSharpParamStatic->sharp_hsv[25]=298;
// pSharpParamStatic->sharp_hsv[26]=290;
// pSharpParamStatic->sharp_hsv[27]=281;
// pSharpParamStatic->sharp_hsv[28]=273;
// pSharpParamStatic->sharp_hsv[29]=264;
// pSharpParamStatic->sharp_hsv[30]=258;
// pSharpParamStatic->sharp_hsv[31]=256;
// pSharpParamStatic->sharp_hsv[32]=256;
// pSharpParamStatic->sharp_hsv[33]=256;
// pSharpParamStatic->sharp_hsv[34]=256;
// pSharpParamStatic->sharp_hsv[35]=256;
// pSharpParamStatic->sharp_hsv[36]=256;
// pSharpParamStatic->sharp_hsv[37]=256;
// pSharpParamStatic->sharp_hsv[38]=256;
// pSharpParamStatic->sharp_hsv[39]=256;
// pSharpParamStatic->sharp_hsv[40]=256;
// pSharpParamStatic->sharp_hsv[41]=256;
// pSharpParamStatic->sharp_hsv[42]=256;
// pSharpParamStatic->sharp_hsv[43]=256;
// pSharpParamStatic->sharp_hsv[44]=256;
// pSharpParamStatic->sharp_hsv[45]=256;
//
// pSharpParamDynamic->sharp_edge_lum[0]=128;
// pSharpParamDynamic->sharp_edge_lum[1]=144;
// pSharpParamDynamic->sharp_edge_lum[2]=160;
// pSharpParamDynamic->sharp_edge_lum[3]=176;
// pSharpParamDynamic->sharp_edge_lum[4]=192;
// pSharpParamDynamic->sharp_edge_lum[5]=208;
// pSharpParamDynamic->sharp_edge_lum[6]=224;
// pSharpParamDynamic->sharp_edge_lum[7]=240;
// pSharpParamDynamic->sharp_edge_lum[8]=256;
// pSharpParamDynamic->sharp_edge_lum[9]=256;
// pSharpParamDynamic->sharp_edge_lum[10]=256;
// pSharpParamDynamic->sharp_edge_lum[11]=256;
// pSharpParamDynamic->sharp_edge_lum[12]=256;
// pSharpParamDynamic->sharp_edge_lum[13]=256;
// pSharpParamDynamic->sharp_edge_lum[14]=256;
// pSharpParamDynamic->sharp_edge_lum[15]=256;
// pSharpParamDynamic->sharp_edge_lum[16]=256;
// pSharpParamDynamic->sharp_edge_lum[17]=256;
// pSharpParamDynamic->sharp_edge_lum[18]=256;
// pSharpParamDynamic->sharp_edge_lum[19]=256;
// pSharpParamDynamic->sharp_edge_lum[20]=256;
// pSharpParamDynamic->sharp_edge_lum[21]=256;
// pSharpParamDynamic->sharp_edge_lum[22]=256;
// pSharpParamDynamic->sharp_edge_lum[23]=256;
// pSharpParamDynamic->sharp_edge_lum[24]=256;
// pSharpParamDynamic->sharp_edge_lum[25]=256;
// pSharpParamDynamic->sharp_edge_lum[26]=256;
// pSharpParamDynamic->sharp_edge_lum[27]=256;
// pSharpParamDynamic->sharp_edge_lum[28]=256;
// pSharpParamDynamic->sharp_edge_lum[29]=256;
// pSharpParamDynamic->sharp_edge_lum[30]=256;
// pSharpParamDynamic->sharp_edge_lum[31]=256;
// pSharpParamDynamic->sharp_edge_lum[32]=256;
//
//#if 0
// pSharpParamDynamic->roi_num = 0; //<=8
// pSharpParamDynamic->roi_item[0].x = 40;
// pSharpParamDynamic->roi_item[0].y = 59;
// pSharpParamDynamic->roi_item[0].width = 32;
// pSharpParamDynamic->roi_item[0].height = 32;
//
//
// pSharpParam->roi_item[1].x = 40;
// pSharpParam->roi_item[1].y = 59;
// pSharpParam->roi_item[1].width = 32;
// pSharpParam->roi_item[1].height = 32;
//
// pSharpParam->roi_item[2].x = 40;
// pSharpParam->roi_item[2].y = 59;
// pSharpParam->roi_item[2].width = 32;
// pSharpParam->roi_item[2].height = 32;
//
// pSharpParam->roi_item[3].x = 40;
// pSharpParam->roi_item[3].y = 59;
// pSharpParam->roi_item[3].width = 32;
// pSharpParam->roi_item[3].height = 32;
//
// pSharpParam->roi_item[4].x = 40;
// pSharpParam->roi_item[4].y = 59;
// pSharpParam->roi_item[4].width = 32;
// pSharpParam->roi_item[4].height = 32;
//
// pSharpParam->roi_item[5].x = 40;
// pSharpParam->roi_item[5].y = 59;
// pSharpParam->roi_item[5].width = 32;
// pSharpParam->roi_item[5].height = 32;
//
// pSharpParam->roi_item[6].x = 40;
// pSharpParam->roi_item[6].y = 59;
// pSharpParam->roi_item[6].width = 32;
// pSharpParam->roi_item[6].height = 32;
//
// pSharpParam->roi_item[7].x = 40;
// pSharpParam->roi_item[7].y = 59;
// pSharpParam->roi_item[7].width = 32;
// pSharpParam->roi_item[7].height = 32;
//#endif
//
// return 0;
//}
static void init_motion_search_param(RTVencMotionSearchParam *motion_search_param)
{
motion_search_param->en_motion_search = 1;
motion_search_param->dis_default_para = 1;
motion_search_param->hor_region_num = MOTION_SEARCH_HOR_NUM;
motion_search_param->ver_region_num = MOTION_SEARCH_VER_NUM;
motion_search_param->large_mv_th = 20;
motion_search_param->large_mv_ratio_th = 12.0f;
motion_search_param->non_zero_mv_ratio_th = 20.0f;
motion_search_param->large_sad_ratio_th = 30.0f;
return ;
}
static void init_roi(RTVencROIConfig *sRoiConfig)
{
sRoiConfig[0].bEnable = 1;
sRoiConfig[0].index = 0;
sRoiConfig[0].nQPoffset = -10;
sRoiConfig[0].sRect.nLeft = 0;
sRoiConfig[0].sRect.nTop = 0;
sRoiConfig[0].sRect.nWidth = 150;
sRoiConfig[0].sRect.nHeight = 500;
sRoiConfig[1].bEnable = 1;
sRoiConfig[1].index = 1;
sRoiConfig[1].nQPoffset = -5;
sRoiConfig[1].sRect.nLeft = 320;
sRoiConfig[1].sRect.nTop = 180;
sRoiConfig[1].sRect.nWidth = 320;
sRoiConfig[1].sRect.nHeight = 180;
sRoiConfig[2].bEnable = 1;
sRoiConfig[2].index = 2;
sRoiConfig[2].nQPoffset = -5;
sRoiConfig[2].sRect.nLeft = 320;
sRoiConfig[2].sRect.nTop = 180;
sRoiConfig[2].sRect.nWidth = 320;
sRoiConfig[2].sRect.nHeight = 180;
sRoiConfig[3].bEnable = 1;
sRoiConfig[3].index = 3;
sRoiConfig[3].nQPoffset = -5;
sRoiConfig[3].sRect.nLeft = 320;
sRoiConfig[3].sRect.nTop = 180;
sRoiConfig[3].sRect.nWidth = 320;
sRoiConfig[3].sRect.nHeight = 180;
}
static void init_2d_3d_param(RTs2DfilterParam *p2DfilterParam, RTs3DfilterParam *p3DfilterParam)
{
//init 2dfilter(2dnr) is open
p2DfilterParam->enable_2d_filter = 1;
p2DfilterParam->filter_strength_y = 127;
p2DfilterParam->filter_strength_uv = 128;
p2DfilterParam->filter_th_y = 11;
p2DfilterParam->filter_th_uv = 7;
//init 3dfilter(3dnr) is open
p3DfilterParam->enable_3d_filter = 1;
p3DfilterParam->adjust_pix_level_enable = 0;
p3DfilterParam->smooth_filter_enable = 1;
p3DfilterParam->max_pix_diff_th = 6;
p3DfilterParam->max_mv_th = 8;
p3DfilterParam->max_mad_th = 16;
p3DfilterParam->min_coef = 13;
p3DfilterParam->max_coef = 16;
return ;
}
static int motion_search_thread(void* param)
{
#if ENABLE_GET_MOTION_SEARCH_RESULT
if (mparam.c0_encoder_format == 1)
return -1;
usleep(2*1000*1000);
int channel_id = *(int *)param;
int ret = 0;
int i = 0;
curMotionSearch = 0;
int count = MOTION_SEARCH_TOTAL_NUM;
int result_len = count*sizeof(RTVencMotionSearchRegion);
pMotionSearchResult = (RTVencMotionSearchRegion *)malloc(result_len*3);
memset(pMotionSearchResult, 0, result_len*3);
aw_logd("result_len = %d", result_len);
for (int j = 0; j < 60; j++) {
RTVencMotionSearchResult sMotion_result;
RTVencMotionSearchRegion* region = &pMotionSearchResult[curMotionSearch];
memset(region, 0, result_len);
memset(&sMotion_result, 0, sizeof(RTVencMotionSearchResult));
sMotion_result.region = region;
aw_logd("get motion search result = %d", j);
ret = AWVideoInput_GetMotionSearchResult(channel_id, &sMotion_result);
for(i = 0; i < count; i++)
{
if (region[i].is_motion)
aw_logd("i = %d, totalN = %d, intraN = %d, largeMvN = %d, smallMvN = %d, zeroMvN = %d, largeSadN = %d, is_motion = %d, region:%d,%d,%d,%d",
i, region[i].total_num, region[i].intra_num,
region[i].large_mv_num, region[i].small_mv_num,
region[i].zero_mv_num, region[i].large_sad_num,
region[i].is_motion,
region[i].pix_x_bgn, region[i].pix_x_end,
region[i].pix_y_bgn, region[i].pix_y_end);
}
if (useMotionSearch < 0) {
lastMotionSearch = curMotionSearch;
useMotionSearch = lastMotionSearch;
curMotionSearch = (curMotionSearch + count) % (count * 3);
} else {
lastMotionSearch = curMotionSearch;
do {
curMotionSearch = (curMotionSearch + count) % (count * 3);
} while (curMotionSearch == useMotionSearch);
}
aw_logd("cur %d, last %d, use %d", curMotionSearch, lastMotionSearch, useMotionSearch);
usleep(42*1000);
}
free(pMotionSearchResult);
pMotionSearchResult = NULL;
#endif
return 0;
}
static void init_wbyuv_param_info(RTsWbYuvParam *pWbYuvParam, WbYuvFuncInfo *pWbYuvFuncInfo)
{
pWbYuvParam->bEnableWbYuv = 1;
pWbYuvParam->nWbBufferNum = 2;
pWbYuvParam->scalerRatio = RTVENC_ISP_SCALER_0;
pWbYuvParam->bEnableCrop = 0;
pWbYuvParam->sWbYuvcrop.nHeight = 640;
pWbYuvParam->sWbYuvcrop.nWidth = 640;
pWbYuvFuncInfo->enable_wbyuv = 1;
pWbYuvFuncInfo->get_num = 5;
return ;
}
static void setParamBeforeStart(const demo_video_param* pmparam, int channel_id)
{
if (pmparam->rotate_angle != 0)
AWVideoInput_SetRotate(channel_id, pmparam->rotate_angle);
if (pmparam->enable_gray)
AWVideoInput_SetChmoraGray(channel_id, pmparam->enable_gray);
if (pmparam->enable_wbyuv) {
WbYuvFuncInfo *pWbYuvFuncInfo = NULL;
RTsWbYuvParam mWbYuvParam;
pWbYuvFuncInfo = calloc(1, sizeof(WbYuvFuncInfo));
memset(&mWbYuvParam, 0, sizeof(RTsWbYuvParam));
init_wbyuv_param_info(&mWbYuvParam, pWbYuvFuncInfo);
if (pmparam->c0_dst_w && pmparam->c0_dst_h)
AWVideoInput_SetWbYuv(channel_id, pWbYuvFuncInfo, &mWbYuvParam, pmparam->c0_dst_w, pmparam->c0_dst_h);
else
AWVideoInput_SetWbYuv(channel_id, pWbYuvFuncInfo, &mWbYuvParam, pmparam->c0_src_w, pmparam->c0_src_h);
}
return ;
}
static int queue_outside_yuv(VideoInputConfig* pConfig, unsigned char* yuv_data, int size)
{
int cal_yuv_size = pConfig->width*pConfig->height*3/2;
VideoYuvFrame mYuvFrame;
int channel_id = pConfig->channelId;
int ret = 0;
cal_yuv_size = rt_cal_input_buffer_size(pConfig->width, pConfig->height, pConfig->pixelformat, 0);
if (size > cal_yuv_size) {
aw_loge("size(%d) > cal_yuv_size(%d) is null", size, cal_yuv_size);
return -1;
}
if (size != cal_yuv_size)
aw_logw("take care, size(%d) cal_yuv_size(%d) is diff", size, cal_yuv_size);
aw_logd("* size %d cal_yuv_size = %d, w&h = %d, %d", size, cal_yuv_size, pConfig->width, pConfig->height);
while (1)
{
if(video_finish_flag == 1)
break;
memset(&mYuvFrame, 0, sizeof(VideoYuvFrame));
ret = AWVideoInput_RequestEmptyYuvFrame(channel_id, &mYuvFrame);
if(ret == 0)
{
if (yuv_data) {
//aw_logw("memcpy yuv_data");
memcpy(mYuvFrame.virAddr[0], yuv_data, size);
}
else {
aw_loge("yuv_data is null");
//memset(mYuvFrame.virAddr[0], 0x08, size);//only test
}
AWVideoInput_SubmitFilledYuvFrame(channel_id, &mYuvFrame, size);
}
else
usleep(50*1000);
}
return 0;
}
static int setParam(demo_video_param* mparam, int channel_id, VideoInputConfig* pConfig)
{
int ret = 0;
#if 0//this for user debug, kernel use isp param reality.
if(mparam->enable_sharp)
{
RTsEncppSharp s_Sharp;
RTsEncppSharpParamDynamic mSharpParamDynamic;
RTsEncppSharpParamStatic mSharpParamStatic;
memset(&mSharpParamDynamic, 0, sizeof(RTsEncppSharpParamDynamic));
memset(&mSharpParamStatic, 0, sizeof(RTsEncppSharpParamStatic));
memset(&s_Sharp, 0, sizeof(RTsEncppSharp));
initSharpFunc(&mSharpParamDynamic, &mSharpParamStatic);
s_Sharp.sEncppSharpParam.pDynamicParam = &mSharpParamDynamic;;
s_Sharp.sEncppSharpParam.pStaticParam = &mSharpParamStatic;;
s_Sharp.bsharp = 1;
AWVideoInput_SetSharp(channel_id, &s_Sharp);
}
#endif
#if TEST_CROP_FUNC
usleep(1000*1000);
RTCropInfo rt_crop_info;
rt_crop_info.enable_crop = 1;
rt_crop_info.s_crop_rect.nLeft = 80;
rt_crop_info.s_crop_rect.nTop = 64;
rt_crop_info.s_crop_rect.nWidth = 1280;
rt_crop_info.s_crop_rect.nHeight = 720;
AWVideoInput_SetCrop(channel_id, &rt_crop_info);
usleep(1000*1000);
rt_crop_info.enable_crop = 1;
rt_crop_info.s_crop_rect.nLeft = 80;
rt_crop_info.s_crop_rect.nTop = 64;
rt_crop_info.s_crop_rect.nWidth = 640;
rt_crop_info.s_crop_rect.nHeight = 480;
AWVideoInput_SetCrop(channel_id, &rt_crop_info);
usleep(1000*1000);
rt_crop_info.enable_crop = 0;
AWVideoInput_SetCrop(channel_id, &rt_crop_info);
#endif
if (mparam->en_2d_3d_nr) {
RTs3DfilterParam m3DfilterParam;
RTs2DfilterParam m2DfilterParam;
init_2d_3d_param(&m2DfilterParam, &m3DfilterParam);
AWVideoInput_Set2dNR(channel_id, &m2DfilterParam);
AWVideoInput_Set3dNR(channel_id, &m3DfilterParam);
}
if (mparam->enable_p_intra_refresh) {
RTVencCyclicIntraRefresh sIntraRefresh;
sIntraRefresh.bEnable = 1;
sIntraRefresh.nBlockNumber = 10;
AWVideoInput_SetPIntraRefresh(channel_id, &sIntraRefresh);
}
#if ENABLE_GET_MOTION_SEARCH_RESULT
if (pConfig->encodeType == 0 || pConfig->encodeType == 2) {
init_motion_search_param(&mparam->motion_search_param);
AWVideoInput_SetMotionSearchParam(channel_id, &mparam->motion_search_param);
}
#endif
#if TESET_ROI_FUNC
RTVencROIConfig stRoiConfigs[MAX_ROI_AREA];
init_roi(stRoiConfigs);
AWVideoInput_SetRoi(channel_id, stRoiConfigs);
#endif
#if ENABLE_RESET_ENCODER_TYPE
AWVideoInput_Start(0, 0);
AWVideoInput_ResetEncoderType(0, 2);
fclose(out_file_0);
out_file_0 = fopen(OUT_PUT_FILE_3, "wb");
aw_logd("reset encodertype -- reopen : %p, path = %s", out_file_0, OUT_PUT_FILE_3);
AWVideoInput_Start(0, 1);
#endif
#if ENABLE_GET_BIN_IMAGE_DATA
get_bin_image_data(channel_id, pConfig);
#endif
#if ENABLE_GET_MV_INFO_DATA
if(pConfig->encodeType == 0)//* h264
get_mv_info_data_h264(channel_id, pConfig);
else if(pConfig->encodeType == 2)//* h265
get_mv_info_data_h265(channel_id, pConfig);
#endif
#if ENABLE_SET_HFLIP
usleep(1000*1000);
aw_logd("set h flip, channelId = %d\n", channel_id);
AWVideoInput_SetHFlip(channel_id, 1);
#endif
#if ENABLE_SET_VFLIP
usleep(1000*1000);
aw_logd("set v flip, channelId = %d\n", channel_id);
AWVideoInput_SetVFlip(channel_id, 1);
#endif
#if ENABLE_SET_PLF
AWVideoInput_SetPowerLineFreq(channel_id, RT_FREQUENCY_50HZ);
#endif
#if ENABLE_DYNAMIC_SET_QP_AND_BITRATE_AND_FPS
video_qp_range mqp_range;
mqp_range.i_min_qp = 35;
mqp_range.i_max_qp = 51;
mqp_range.p_min_qp = 35;
mqp_range.p_max_qp = 51;
mqp_range.i_init_qp = 35;
mqp_range.enable_mb_qp_limit = 0;
AWVideoInput_SetQpRange(channel_id, &mqp_range);
AWVideoInput_SetBitrate(channel_id, 1536/* kbps */);
AWVideoInput_SetFps(channel_id, 13);
memset(&mqp_range, 0, sizeof(video_qp_range));
AWVideoInput_GetQpRange(channel_id, &mqp_range);
int bitrate = AWVideoInput_GetBitrate(channel_id);
int fps = AWVideoInput_GetFps(channel_id);
aw_logd("get info: i_min&max_qp = %d, %d, p_min&max_qp = %d, %d, bitrate = %d, fps = %d",
mqp_range.i_min_qp, mqp_range.i_max_qp, mqp_range.p_min_qp, mqp_range.p_max_qp,
bitrate, fps);
usleep(3*1000*1000);
AWVideoInput_SetFps(channel_id, 15);
#endif
#if ENABLE_OSD_FUNCTION
VideoInputOSD OverlayInfo;
int index = 0;
memset(&OverlayInfo, 0, sizeof(VideoInputOSD));
OverlayInfo.osd_num = 3;
OverlayInfo.item_info[index].osd_type = RT_NORMAL_OVERLAY;
OverlayInfo.item_info[index].start_x = 16;
OverlayInfo.item_info[index].start_y = 752;
OverlayInfo.item_info[index].widht = 464;
OverlayInfo.item_info[index].height = 32;
ret = init_overlay_info(&OverlayInfo, index);
index = 1;
OverlayInfo.item_info[index].osd_type = RT_LUMA_REVERSE_OVERLAY;
OverlayInfo.item_info[index].start_x = 480;
OverlayInfo.item_info[index].start_y = 784;
OverlayInfo.item_info[index].widht = 464;
OverlayInfo.item_info[index].height = 32;
OverlayInfo.invert_mode = 3;
OverlayInfo.invert_threshold = 90;
ret = init_overlay_info(&OverlayInfo, index);
index = 2;
OverlayInfo.item_info[index].osd_type = RT_COVER_OSD;
OverlayInfo.item_info[index].start_x = 800;
OverlayInfo.item_info[index].start_y = 512;
OverlayInfo.item_info[index].widht = 464;
OverlayInfo.item_info[index].height = 32;
OverlayInfo.item_info[index].cover_yuv.cover_y = 0;
OverlayInfo.item_info[index].cover_yuv.cover_u = 0;
OverlayInfo.item_info[index].cover_yuv.cover_v = 0x64;
ret |= init_overlay_info(&OverlayInfo, index);
if(ret == 0)
AWVideoInput_SetOSD(channel_id, &OverlayInfo);
usleep(2*1000*1000);
//AWVideoInput_SetOSD(channel_id, &OverlayInfo);
#if 0
unsigned int g2d_data_size = 464*32*4;
unsigned char* g2d_data = malloc(g2d_data_size);
AWVideoInput_GetG2DData(channelId_0, g2d_data);
FILE *g2d_file = NULL;
g2d_file = fopen("/tmp/g2d.dat", "wb");
fwrite(g2d_data, 1, g2d_data_size, g2d_file);
usleep(200*1000);
fclose(g2d_file);
#endif
#endif
#if ENABLE_RESET_SIZE
reset_size(channel_id, &OverlayInfo);
#endif
if(mparam->encode_local_yuv)
{
FILE * in_yuv_fp = fopen(mparam->InputFileName, "rb");
aw_logd("fopen %s w&h = %d, %d", mparam->InputFileName, pConfig->width, pConfig->height);
if(in_yuv_fp == NULL)
{
aw_loge("fopen failed: %s", mparam->InputFileName);
}
else
{
int yuv_size = pConfig->width*pConfig->height*3/2;
VideoYuvFrame mYuvFrame;
yuv_size = rt_cal_input_buffer_size(pConfig->width, pConfig->height, pConfig->pixelformat, 1);
aw_logd("* yuv_size = %d, w&h = %d, %d", yuv_size, pConfig->width, pConfig->height);
while (1)
{
if(video_finish_flag == 1)
break;
memset(&mYuvFrame, 0, sizeof(VideoYuvFrame));
ret = AWVideoInput_RequestEmptyYuvFrame(channel_id, &mYuvFrame);
if(ret == 0)
{
ret = fread(mYuvFrame.virAddr[0], 1, yuv_size, in_yuv_fp);
aw_logd("vir_addr = %p, ret = %d, yuv_size = %d",
mYuvFrame.virAddr[0], ret, yuv_size);
if(ret != yuv_size)
{
fseek(in_yuv_fp, 0, SEEK_SET);
fread(mYuvFrame.virAddr[0], 1, yuv_size, in_yuv_fp);
}
AWVideoInput_SubmitFilledYuvFrame(channel_id, &mYuvFrame, yuv_size);
}
else
usleep(50*1000);
}
fclose(in_yuv_fp);
}
}
#if ENABLE_SET_VBR_PARAM
RTVencVbrParam mVbrParam;
memset(&mVbrParam, 0, sizeof(RTVencVbrParam));
mVbrParam.uMaxBitRate = 2048;
mVbrParam.nMovingTh = 20;
mVbrParam.nQuality = 10;
mVbrParam.nIFrmBitsCoef = 10;
mVbrParam.nPFrmBitsCoef = 10;
AWVideoInput_SetVbrParam(channel_id, &mVbrParam);
memset(&mVbrParam, 0, sizeof(RTVencVbrParam));
AWVideoInput_GetVbrParam(channel_id, &mVbrParam);
aw_logd("get vbr param: MaxBitRate = %d, MovingTh = %d, quality = %d, I&PFrmBitsCoef = %d, %d",
mVbrParam.uMaxBitRate, mVbrParam.nMovingTh,
mVbrParam.nQuality, mVbrParam.nIFrmBitsCoef, mVbrParam.nPFrmBitsCoef);
#endif
#if ENABLE_GET_SUM_MAD
usleep(2*1000*1000);
int sum_mad = AWVideoInput_GetSumMad(channel_id);
aw_logd("sum_mad = %d",sum_mad);
#endif
#if ENABLE_GET_LV_SET_IR
ret = AWVideoInput_GetLuminance(channel_id);
aw_logd("the lv is : %d\n", ret);
RTIrParam mIrParam;
memset(&mIrParam, 0, sizeof(RTIrParam));
mIrParam.grey = 1;
mIrParam.ir_on = 1;
mIrParam.ir_flash_on = 0;
AWVideoInput_SetIrParam(channel_id, &mIrParam);
#endif
#if ENABLE_SET_SUPER_FRAME_PARAM
RTVencSuperFrameConfig mSuperConfig;
memset(&mSuperConfig, 0, sizeof(RTVencSuperFrameConfig));
mSuperConfig.eSuperFrameMode = RT_VENC_SUPERFRAME_NONE;
mSuperConfig.nMaxRencodeTimes = 0;
mSuperConfig.nMaxIFrameBits = 200*1024*8; //* 200 KB
mSuperConfig.nMaxPFrameBits = mSuperConfig.nMaxIFrameBits / 3;
mSuperConfig.nMaxP2IFrameBitsRatio = 0.33;
AWVideoInput_SetSuperFrameParam(channel_id, &mSuperConfig);
#endif
return 0;
}
static void* catch_jpeg_thread(void* param)
{
#if ENABLE_CATCH_JPEG
catch_jpeg_config jpg_config = *((catch_jpeg_config *)param);
catch_jpeg(&jpg_config);
#endif
return 0;
}
#if RT_SUPPORT_AWAIISP
static void* aiisp_switch_thread(void* param)
{
int channel_id = *(int *)param;
int loop_cnt = 0;
int interval_ms = 0;
awaiisp_mode mode = mparam.aiisp_mode;
awaiisp_mode last_aiisp_mode = mode;
VideoChannelInfo config;
memset(&config, 0, sizeof(VideoChannelInfo));
AWVideoInput_GetChannelInfo(channel_id, &config);
if (MAIN_CHANNEL_FPS)
interval_ms = 1000 / MAIN_CHANNEL_FPS;
if (0 == interval_ms)
interval_ms = 1000;
int night_to_day_signal_cnt = 0;
int day_to_night_signal_cnt = 0;
int env_light_level = 0;
while (0 == thread_exit_flag)
{
if (mparam.aiisp_auto_switch)
{
// switch aiisp by ae param
RTIspCtrlAttr isp_ctrl_attr;
isp_ctrl_attr.isp_attr_cfg.cfg_id = RT_ISP_CTRL_AE_EV_LV_ADJ;
AWVideoInput_GetIspAttrCfg(channel_id, &isp_ctrl_attr);
env_light_level = isp_ctrl_attr.isp_attr_cfg.ae_ev_lv_adj;
//aw_logw("channel %d, env_light_level:%d, day2nignt:%d, night2day:%d", channel_id, env_light_level, day_to_night_signal_cnt, night_to_day_signal_cnt);
if (env_light_level < DAY_TO_NIGHT_THRESHOLD)
{
if (++day_to_night_signal_cnt >= DAY_TO_NIGHT_SIGNAL_CNT)
{
day_to_night_signal_cnt = 0;
mode = AWAIISP_MODE_NPU;
}
night_to_day_signal_cnt = 0;
}
else if (env_light_level > NIGHT_TO_DAY_THRESHOLD)
{
if (++night_to_day_signal_cnt >= NIGHT_TO_DAY_SIGNAL_CNT)
{
night_to_day_signal_cnt = 0;
mode = AWAIISP_SWITCH_NORMAL_MODE;
}
day_to_night_signal_cnt = 0;
}
else
{
night_to_day_signal_cnt = 0;
day_to_night_signal_cnt = 0;
}
interval_ms = 100;
}
else
{
// for aiisp switch test by manually specifying interval
if ((mparam.aiisp_switch_interval) && (0 == (++loop_cnt) % mparam.aiisp_switch_interval))
{
mode = (AWAIISP_MODE_NPU == last_aiisp_mode) ? AWAIISP_SWITCH_NORMAL_MODE : AWAIISP_MODE_NPU;
}
}
if (last_aiisp_mode != mode)
{
if (mparam.aiisp_auto_switch)
aw_logw("isp%d switch mode %d -> %d, env_light_level:%d", MAIN_AWAIISP_DEV_ID, last_aiisp_mode, mode, env_light_level);
else
aw_logw("isp%d switch mode %d -> %d", MAIN_AWAIISP_DEV_ID, last_aiisp_mode, mode);
rt_awaiisp_common_switch_param switch_param;
memset(&switch_param, 0, sizeof(rt_awaiisp_common_switch_param));
switch_param.config.mode = mode;
if (AWAIISP_MODE_NPU == mode)
{
switch_param.isp_cfg_bin_path = mparam.isp_aiisp_bin_path;
switch_param.config.release_aiisp_resources = 0;
}
else
{
switch_param.isp_cfg_bin_path = mparam.isp_day_bin_path;
/**
Decide whether to release aiisp resources based on the needs of the scenario.
If the memory resources are sufficient, 'release_aiisp_resources=1' is not recommended to release aiisp resources during switching.
Because after release, aiisp resources need to be prepared before switching to NPU mode next time.
Switching speed will slow down.
*/
switch_param.config.release_aiisp_resources = 1;
}
rt_awaiisp_common_switch_mode(MAIN_AWAIISP_DEV_ID, &switch_param);
last_aiisp_mode = mode;
}
usleep(interval_ms * 1000);
}
return NULL;
}
#endif
static void exit_demo()
{
int i = 0;
AWVideoInput_DeInit();
aw_logd("exit: deInit end\n");
sem_destroy(&finish_sem);
for(i = 0; i < MAX_BITMAP_NUM; i++)
{
if(bit_map_info[i].argb_addr)
{
free(bit_map_info[i].argb_addr);
bit_map_info[i].argb_addr = NULL;
}
}
if(out_file_0)
{
fclose(out_file_0);
out_file_0 = NULL;
}
if(out_file_1)
{
fclose(out_file_1);
out_file_1 = NULL;
}
aw_logw("aw_demo, finish!\n");
return ;
}
static int initGdcFunc(RTsGdcParam *pGdcParam)
{
pGdcParam->bGDC_en = 1;
pGdcParam->eWarpMode = RT_Gdc_Warp_LDC;
pGdcParam->eMountMode = RT_Gdc_Mount_Wall;
pGdcParam->bMirror = 0;
pGdcParam->calib_widht = 3264;
pGdcParam->calib_height = 2448;
pGdcParam->fx = 2417.19;
pGdcParam->fy = 2408.43;
pGdcParam->cx = 1631.50;
pGdcParam->cy = 1223.50;
pGdcParam->fx_scale = 2161.82;
pGdcParam->fy_scale = 2153.99;
pGdcParam->cx_scale = 1631.50;
pGdcParam->cy_scale = 1223.50;
pGdcParam->eLensDistModel = RT_Gdc_DistModel_FishEye;
pGdcParam->distCoef_wide_ra[0] = -0.3849;
pGdcParam->distCoef_wide_ra[1] = 0.1567;
pGdcParam->distCoef_wide_ra[2] = -0.0030;
pGdcParam->distCoef_wide_ta[0] = -0.00005;
pGdcParam->distCoef_wide_ta[1] = 0.0016;
pGdcParam->distCoef_fish_k[0] = -0.0024;
pGdcParam->distCoef_fish_k[1] = 0.141;
pGdcParam->distCoef_fish_k[2] = -0.3;
pGdcParam->distCoef_fish_k[3] = 0.2328;
pGdcParam->centerOffsetX = 0;
pGdcParam->centerOffsetY = 0;
pGdcParam->rotateAngle = 0; //[0,360]
pGdcParam->radialDistortCoef = 0; //[-255,255]
pGdcParam->trapezoidDistortCoef = 0; //[-255,255]
pGdcParam->fanDistortCoef = 0; //[-255,255]
pGdcParam->pan = 0; //pano360:[0,360]; others:[-90,90]
pGdcParam->tilt = 0; //[-90,90]
pGdcParam->zoomH = 100; //[0,100]
pGdcParam->zoomV = 100; //[0,100]
pGdcParam->scale = 100; //[0,100]
pGdcParam->innerRadius = 0; //[0,width/2]
pGdcParam->roll = 0; //[-90,90]
pGdcParam->pitch = 0; //[-90,90]
pGdcParam->yaw = 0; //[-90,90]
pGdcParam->perspFunc = RT_Gdc_Persp_Only;
pGdcParam->perspectiveProjMat[0] = 1.0;
pGdcParam->perspectiveProjMat[1] = 0.0;
pGdcParam->perspectiveProjMat[2] = 0.0;
pGdcParam->perspectiveProjMat[3] = 0.0;
pGdcParam->perspectiveProjMat[4] = 1.0;
pGdcParam->perspectiveProjMat[5] = 0.0;
pGdcParam->perspectiveProjMat[6] = 0.0;
pGdcParam->perspectiveProjMat[7] = 0.0;
pGdcParam->perspectiveProjMat[8] = 1.0;
pGdcParam->mountHeight = 0.85; //meters
pGdcParam->roiDist_ahead = 4.5; //meters
pGdcParam->roiDist_left = -1.5; //meters
pGdcParam->roiDist_right = 1.5; //meters
pGdcParam->roiDist_bottom = 0.65; //meters
pGdcParam->peaking_en = 1; //0/1
pGdcParam->peaking_clamp = 1; //0/1
pGdcParam->peak_m = 16; //[0,63]
pGdcParam->th_strong_edge = 6; //[0,15]
pGdcParam->peak_weights_strength = 2; //[0,15]
if(pGdcParam->eWarpMode == RT_Gdc_Warp_LDC)
{
pGdcParam->birdsImg_width = 768;
pGdcParam->birdsImg_height = 1080;
}
return 0;
}
static void init_color_space(RTVencVideoSignal *pvenc_video_signal)
{
pvenc_video_signal->video_format = RT_DEFAULT;
pvenc_video_signal->full_range_flag = 1;
pvenc_video_signal->src_colour_primaries = RT_VENC_BT709;
pvenc_video_signal->dst_colour_primaries = RT_VENC_BT709;
return ;
}
void handler(int sig)
{
aw_logw("rev sig=%d\n", sig);
if (SIGSEGV == sig) {
exit_demo();
_exit(1);
}
}
int main(int argc, char** argv)
{
aw_logw("aw_demo start, OUT_PUT_FILE_PREFIX = %s time: %lld clock_gettime: %lld", OUT_PUT_FILE_PREFIX, get_timeofday(), get_cur_time_us());
#if 0
int tmp_size = 10*1024*1024;
unsigned char* tmp_buf = malloc(tmp_size);
memset(tmp_buf, 0xf, tmp_size);
#endif
pthread_t thread = 0;
int i = 0;
int ret = 0;
int channelId_0 = MAIN_CHANNEL_CHANNEL_ID;
int channelId_1 = 4;
int channelId_2 = 8;
VideoInputConfig config_1;
memset(&config_1, 0, sizeof(VideoInputConfig));
thread_exit_flag = 0;
stream_count_1 = 0;
stream_count_0 = 0;
video_finish_flag = 0;
save_file_cnt = 0;
need_save_new_file = 0;
save_file_cnt_1 = 0;
need_save_new_file_1 = 0;
memset(&mparam, 0, sizeof(demo_video_param));
memset(bit_map_info, 0, MAX_BITMAP_NUM*sizeof(BitMapInfoS));
mparam.c0_encoder_format = -1;
mparam.c1_encoder_format = -1;
mparam.pixelformat = RT_PIXEL_NUM;
mparam.pixelformat_1 = RT_PIXEL_NUM;
mparam.pixelformat_2 = RT_PIXEL_NUM;
mparam.use_vipp_num = 0;
mparam.use_vipp_num_1 = 4;
mparam.use_vipp_num_2 = 8;
mparam.jpg_width = HUMAN_VIPP_WIDTH;
mparam.jpg_heigh = HUMAN_VIPP_HEIGHT;
mparam.use_vipp_num = 0;
mparam.use_vipp_num_1 = 4;
mparam.use_vipp_num_2 = 8;
mparam.share_buf_num = 2;
mparam.enable_sharp = 1;
mparam.enable_aiisp = MAIN_CHANNEL_AIISP_ENABLE;
mparam.tdm_rxbuf_cnt = MAIN_CHANNEL_TDM_RXBUF_CNT;
mparam.aiisp_switch_interval = 0;
strcpy(mparam.OutputFilePath, OUT_PUT_FILE_PREFIX);
/******** begin parse the config paramter ********/
if(argc >= 2)
{
aw_logd("******************************\n");
for(i = 1; i < (int)argc; i += 2)
{
ParseArgument(&mparam, argv[i], argv[i + 1]);
}
aw_logd("******************************\n");
}
else
{
aw_logd(" we need more arguments \n");
PrintDemoUsage();
//return 0;
}
check_param(&mparam);
aw_logd("*demo param: c0: w&h = %d, %d; encode_format = %d, bitrate = %d",
mparam.c0_src_w, mparam.c0_src_h, mparam.c0_encoder_format, mparam.c0_bitrate);
aw_logd("*demo param: c1: w&h = %d, %d; encode_format = %d, bitrate = %d",
mparam.c1_src_w, mparam.c1_src_h, mparam.c1_encoder_format, mparam.c1_bitrate);
time_start = get_cur_time_us();
if(mparam.encoder_num > 0)
max_bitstream_count = mparam.encoder_num;
else
max_bitstream_count = SAVE_BITSTREAM_COUNT;
channelId_0 = mparam.use_vipp_num;
channelId_1 = mparam.use_vipp_num_1;
channelId_2 = mparam.use_vipp_num_2;
VideoInputConfig config_0;
memset(&config_0, 0, sizeof(VideoInputConfig));
config_0.channelId = channelId_0;
config_0.fps = MAIN_CHANNEL_FPS;
config_0.gop = MAIN_CHANNEL_GOP;
config_0.product_mode = MAIN_CHANNEL_PRODUCT_MODE;
config_0.vbr = MAIN_CHANNEL_VBR;
if (config_0.vbr) {
config_0.vbr_param.uMaxBitRate = mparam.c0_bitrate/1024; //* kb
config_0.vbr_param.nMovingTh = 20;
config_0.vbr_param.nQuality = 10;
config_0.vbr_param.nIFrmBitsCoef = 10;
config_0.vbr_param.nPFrmBitsCoef = 10;
}
#if ENABLE_GET_MOTION_SEARCH_RESULT
if (!(1 == config_0.vbr ||
RTVENC_PRODUCT_STATIC_IPC == config_0.product_mode ||
RTVENC_PRODUCT_MOVING_IPC == config_0.product_mode ||
RTVENC_PRODUCT_DOORBELL == config_0.product_mode))
{
aw_logw("channel %d MOTION SEARCH only for VBR mode or IPC or DOORBELL, vbr=%d, product_mode=%d",
channelId_0, config_0.vbr, config_0.product_mode);
}
#endif
config_0.qp_range.i_min_qp = MAIN_CHANNEL_I_MIN_QP;
config_0.qp_range.i_max_qp = MAIN_CHANNEL_I_MAX_QP;
config_0.qp_range.p_min_qp = MAIN_CHANNEL_P_MIN_QP;
config_0.qp_range.p_max_qp = MAIN_CHANNEL_P_MAX_QP;
config_0.qp_range.i_init_qp = MAIN_CHANNEL_INIT_QP;
config_0.profile = AW_Video_H264ProfileMain;
config_0.level = AW_Video_H264Level51;
config_0.pixelformat = mparam.pixelformat;
config_0.enable_sharp = mparam.enable_sharp;
config_0.bonline_channel = mparam.bonline_channel;
config_0.share_buf_num = mparam.share_buf_num;
config_0.en_16_align_fill_data = 0;
config_0.breduce_refrecmem = MAIN_CHANNEL_REDUCE_REFREC_MEM;
config_0.enable_aiisp = mparam.enable_aiisp;
config_0.tdm_rxbuf_cnt = mparam.tdm_rxbuf_cnt;
aw_logd("ch%d enable_aiisp:%d, tdm_rxbuf_cnt:%d, aiisp_switch_interval:%d", config_0.channelId,
config_0.enable_aiisp, config_0.tdm_rxbuf_cnt, mparam.aiisp_switch_interval);
init_color_space(&config_0.venc_video_signal);
if(mparam.encode_local_yuv)
{
config_0.qp_range.i_min_qp = 20;
config_0.qp_range.i_max_qp = 45;
config_0.qp_range.p_min_qp = 20;
config_0.qp_range.p_max_qp = 45;
config_0.qp_range.i_init_qp = 20;
config_0.output_mode = OUTPUT_MODE_ENCODE_FILE_YUV;
mparam.en_second_channel = 0;
mparam.en_third_channel = 0;
}
else
{
config_0.output_mode = OUTPUT_MODE_STREAM;
}
config_0.width = mparam.c0_src_w;
config_0.height = mparam.c0_src_h;
config_0.dst_width = mparam.c0_dst_w;
config_0.dst_height = mparam.c0_dst_h;
config_0.bitrate = mparam.c0_bitrate/1024; //* kb
config_0.encodeType = mparam.c0_encoder_format;
config_0.drop_frame_num = 0;
config_0.enable_wdr = 0;
config_0.dst_fps = mparam.dst_fps;
#if ENABLE_TEST_SET_CAMERA_MOVE_STATUS
config_0.enable_ve_isp_linkage = 1;
#else
config_0.enable_ve_isp_linkage = 0;
#endif
if (config_0.encodeType == 1) {//jpg encode
config_0.jpg_quality = 80;
if (mparam.encoder_num > 1) {//mjpg
config_0.jpg_mode = mparam.jpg_mode = 1;
config_0.bitrate = 12*1024;//kb
config_0.bit_rate_range.bitRateMin = 10*1024;//kb
config_0.bit_rate_range.bitRateMax = 14*1024;//kb
}
}
#if ENABLE_GET_BIN_IMAGE_DATA
config_0.enable_bin_image = 1;
config_0.bin_image_moving_th = 20;
#endif
#if ENABLE_GET_MV_INFO_DATA
config_0.enable_mv_info = 1;
#endif
if (mparam.enable_crop) {
config_0.s_crop_info.enable_crop = 1;
config_0.s_crop_info.s_crop_rect.nLeft = 80;
config_0.s_crop_info.s_crop_rect.nTop = 64;
config_0.s_crop_info.s_crop_rect.nWidth = 640;
config_0.s_crop_info.s_crop_rect.nHeight = 320;
}
time_end = get_cur_time_us();
//aw_logd("init start, time = %lld, max_count = %d\n",time_end - time_start, max_bitstream_count);
struct sigaction act;
act.sa_handler = handler;
sigemptyset(&act.sa_mask);
act.sa_flags = 0;
if (sigaction(SIGTERM, &act, NULL) < 0)
aw_loge("install signal error\n");
if (sigaction(SIGINT, &act, NULL) < 0)
aw_loge("install signal error\n");
if (sigaction(SIGABRT, &act, NULL) < 0)
aw_loge("install signal error\n");
if (sigaction(SIGSEGV, &act, NULL) < 0)
aw_loge("install signal error\n");
AWVideoInput_Init();
sem_init(&finish_sem, 0, 0);
int64_t time_end1 = get_cur_time_us();
aw_logd("time of aw_init: %lld\n",time_end1 - time_end);
if(AWVideoInput_Configure(channelId_0, &config_0))
{
aw_loge("config err, exit!");
goto _exit;
}
setParamBeforeStart(&mparam, channelId_0);
int64_t time_end2 = get_cur_time_us();
aw_logd("time of aw_config: = %lld\n",time_end2 - time_end1);
if (mparam.enable_gdc) {
RTsGdcParam mGdcParam;
memset(&mGdcParam, 0, sizeof(RTsGdcParam));
initGdcFunc(&mGdcParam);
AWVideoInput_SetGdc(channelId_0, &mGdcParam);
}
AWVideoInput_CallBack(channelId_0, video_stream_cb, 1);
AWVideoInput_SetChannelThreadExitCb(channelId_0, channel_thread_exit);
int64_t time_end3 = get_cur_time_us();
//aw_logd("callback end, time = %lld\n",time_end3 - time_end2);
AWVideoInput_Start(channelId_0, 1);
#if RT_SUPPORT_AWAIISP
pthread_t aiisp_thread = 0;
if (config_0.enable_aiisp)
{
if (strlen(mparam.isp_aiisp_bin_path) == 0)
strncpy(mparam.isp_aiisp_bin_path, MAIN_AWAIISP_CFG_BIN_PATH, AW_UTIL_FILE_PATH_MAX_LEN);
if (strlen(mparam.isp_day_bin_path) == 0)
strncpy(mparam.isp_day_bin_path, MAIN_AWAIISP_CFG_BIN_PATH2, AW_UTIL_FILE_PATH_MAX_LEN);
if (strlen(mparam.npu_lut_model_file_path) == 0)
strncpy(mparam.npu_lut_model_file_path, MAIN_AWAIISP_LUT_NBG_FILE_PATH, AW_UTIL_FILE_PATH_MAX_LEN);
if (strlen(mparam.npu_model_file_path) == 0)
strncpy(mparam.npu_model_file_path, MAIN_AWAIISP_NBG_FILE_PATH, AW_UTIL_FILE_PATH_MAX_LEN);
rt_awaiisp_common_config_param common_param;
memset(&common_param, 0, sizeof(rt_awaiisp_common_config_param));
common_param.isp_cfg_bin_path = (AWAIISP_MODE_NPU == mparam.aiisp_mode) ? mparam.isp_aiisp_bin_path: mparam.isp_day_bin_path;
strncpy(common_param.config.lut_model_file, mparam.npu_lut_model_file_path, AWAIISP_FILE_PATH_MAX);
strncpy(common_param.config.model_file, mparam.npu_model_file_path, AWAIISP_FILE_PATH_MAX);
common_param.config.width = config_0.width;
common_param.config.height = config_0.height;
common_param.config.tdm_rxbuf_cnt = config_0.tdm_rxbuf_cnt;
if (AWAIISP_MODE_NPU == mparam.aiisp_mode)
{
common_param.config.mode = AWAIISP_MODE_NPU;
common_param.config.unprepared_aiisp_resources_advance = 0;
}
else
{
common_param.config.mode = AWAIISP_SWITCH_NORMAL_MODE;
/**
decide whether to prepare aiisp resources in advance for certain scenarios.
If the memory resources are sufficient, it is recommended to prepare aiisp resources in advance.
If no switching test is conducted, there is no need to prepare.
*/
if (mparam.aiisp_auto_switch || mparam.aiisp_switch_interval)
common_param.config.unprepared_aiisp_resources_advance = 0;
else
common_param.config.unprepared_aiisp_resources_advance = 1;
}
rt_awaiisp_common_enable(MAIN_AWAIISP_DEV_ID, &common_param);
pthread_create(&aiisp_thread, NULL, aiisp_switch_thread, (void*)&channelId_0);
}
#endif
int64_t time_end4 = get_cur_time_us();
aw_logd("time of aw_start: = %lld, total_time = %lld\n",
time_end4 - time_end3,
time_end4 - time_start);
time_aw_start = time_end4;
VideoChannelInfo mChannelInfo;
memset(&mChannelInfo, 0, sizeof(VideoChannelInfo));
AWVideoInput_GetChannelInfo(channelId_0, &mChannelInfo);
aw_logd("state = %d, w&h = %d,%d, bitrate = %d kbps, fps = %d, i_qp = %d~%d, p_qp = %d~%d",
mChannelInfo.state,
mChannelInfo.mConfig.width, mChannelInfo.mConfig.height,
mChannelInfo.mConfig.bitrate, mChannelInfo.mConfig.fps,
mChannelInfo.mConfig.qp_range.i_min_qp, mChannelInfo.mConfig.qp_range.i_max_qp,
mChannelInfo.mConfig.qp_range.p_min_qp, mChannelInfo.mConfig.qp_range.p_max_qp);
if (mparam.en_second_channel) {
config_1.channelId = channelId_1;
config_1.fps = 15;
config_1.gop = 30;
config_1.product_mode = 0;
config_1.vbr = 1;
if (config_1.vbr) {
config_1.vbr_param.uMaxBitRate = mparam.c1_bitrate/1024; //* kb
config_1.vbr_param.nMovingTh = 20;
config_1.vbr_param.nQuality = 10;
config_1.vbr_param.nIFrmBitsCoef = 10;
config_1.vbr_param.nPFrmBitsCoef = 10;
}
config_1.qp_range.i_min_qp = 25;
config_1.qp_range.i_max_qp = 45;
config_1.qp_range.p_min_qp = 25;
config_1.qp_range.p_max_qp = 45;
config_1.qp_range.i_init_qp = 35;
config_1.profile = AW_Video_H264ProfileMain;
config_1.level = AW_Video_H264Level51;
#if TEST_OUTSIDE_YUV
config_1.output_mode = OUTPUT_MODE_ENCODE_OUTSIDE_YUV;
#else
config_1.output_mode = OUTPUT_MODE_STREAM;
#endif
config_1.width = mparam.c1_src_w;
config_1.height = mparam.c1_src_h;
config_1.bitrate = mparam.c1_bitrate/1024; //* kb
config_1.encodeType = mparam.c1_encoder_format;
config_1.pixelformat = mparam.pixelformat_1;
config_1.enable_sharp = 1;
config_1.vin_buf_num = mparam.vin_buf_num;//for example, not set, kernel will set to 3
config_1.breduce_refrecmem = 1;
#if ENABLE_TEST_SET_CAMERA_MOVE_STATUS
config_0.enable_ve_isp_linkage = 1;
#else
config_1.enable_ve_isp_linkage = 0;
#endif
if (mparam.c0_src_w)
config_1.ve_encpp_sharp_atten_coef_per = 100 * mparam.c1_src_w / mparam.c0_src_w;
else
config_1.ve_encpp_sharp_atten_coef_per = 100;
init_color_space(&config_1.venc_video_signal);
AWVideoInput_Configure(channelId_1, &config_1);
AWVideoInput_CallBack(channelId_1, video_stream_cb_1, 1);
AWVideoInput_Start(channelId_1, 1);
}
if (mparam.en_third_channel) {
VideoInputConfig config_2;
memset(&config_2, 0, sizeof(VideoInputConfig));
config_2.channelId = channelId_2;
config_2.encodeType = 0;
config_2.width = HUMAN_VIPP_WIDTH;
config_2.height = HUMAN_VIPP_HEIGHT;
config_2.fps = 15;
config_2.bitrate = 0; //* kb
config_2.gop = 0;
config_2.vbr = 0;
config_2.output_mode = OUTPUT_MODE_YUV;
config_2.pixelformat = mparam.pixelformat_2;
config_2.enable_sharp = 0;
aw_logd("ch%d, %dx%d\n", channelId_2, config_2.width, config_2.height);
init_color_space(&config_2.venc_video_signal);
AWVideoInput_Configure(channelId_2, &config_2);
AWVideoInput_Start(channelId_2, 1);
catch_jpeg_config jpg_config;
memset(&jpg_config, 0, sizeof(catch_jpeg_config));
jpg_config.channel_id = channelId_2;
jpg_config.width = mparam.jpg_width;
jpg_config.height = mparam.jpg_heigh;
jpg_config.qp = 80;
if (get_yuv_frame_data(channelId_2, &config_2) != 0) {
goto _exit;
}
pthread_create(&thread, NULL, catch_jpeg_thread, (void*)&jpg_config);
}
setParam(&mparam, channelId_0, &config_0);
#if TEST_OUTSIDE_YUV
//use snd channel to test. this only a demo, Please modify according to the buffer source
FILE * in_yuv_fp = fopen(mparam.InputFileName, "rb");
if (in_yuv_fp) {
int cal_yuv_size = rt_cal_input_buffer_size(config_1.width, config_1.height, config_1.pixelformat, 0);
unsigned char* vir_addr = aw_rt_ion_alloc_palloc(cal_yuv_size);
fread(vir_addr, 1, config_1.width*config_1.height*3/2, in_yuv_fp);
fclose(in_yuv_fp);
if (OUTPUT_MODE_ENCODE_OUTSIDE_YUV == config_1.output_mode)
queue_outside_yuv(&config_1, vir_addr, cal_yuv_size);
aw_rt_ion_alloc_pfree(vir_addr);
} else {
aw_logd("fopen failed!");
}
#endif
#if ENABLE_GET_MOTION_SEARCH_RESULT
pthread_t motion_thread = 0;
pthread_create(&motion_thread, NULL, motion_search_thread, (void*)&channelId_0);
if(motion_thread != 0)
pthread_join(motion_thread, (void**)&ret);
#endif
if(thread != 0)
pthread_join(thread, (void**)&ret);
#if ENABLE_TEST_SET_CAMERA_MOVE_STATUS
#if ENABLE_TEST_SET_CAMERA_ADAPTIVE_MOVING_AND_STATIC
sleep(2);
aw_logw("camera start to adapt moving and static");
AWVideoInput_SetCameraMoveStatus(mparam.use_vipp_num, RT_VENC_CAMERA_ADAPTIVE_MOVING_AND_STATIC);
#else
sleep(2);
aw_logw("camera start to moving");
AWVideoInput_SetCameraMoveStatus(mparam.use_vipp_num, RT_VENC_CAMERA_FORCE_MOVING);
sleep(2);
aw_logd("camera moving stop");
/* necessary! must set RT_VENC_CAMERA_ADAPTIVE_STATIC */
AWVideoInput_SetCameraMoveStatus(mparam.use_vipp_num, RT_VENC_CAMERA_ADAPTIVE_STATIC);
#endif
#endif
//* wait for finish
sem_wait(&finish_sem);
#if ENABLE_ISP_REG_GET
//ISP REG Get
VIN_ISP_REG_GET_CFG isp_reg_get_cfg;
memset(&isp_reg_get_cfg, 0, sizeof(isp_reg_get_cfg));
#if 1
aw_logw("ISP REG action: [print]\n");
isp_reg_get_cfg.flag = 0;
AWVideoInput_GetISPReg(channelId_0, &isp_reg_get_cfg);
#else
aw_logw("ISP REG action: [save]\n");
isp_reg_get_cfg.flag = 1;
isp_reg_get_cfg.path = ISP_REG_SAVE_FILE;
isp_reg_get_cfg.len = strlen(isp_reg_get_cfg.path);
AWVideoInput_GetISPReg(channelId_0, &isp_reg_get_cfg);
#endif
#endif
thread_exit_flag = 1;
#if RT_SUPPORT_AWAIISP
if (config_0.enable_aiisp)
{
rt_awaiisp_common_disable(MAIN_AWAIISP_DEV_ID);
if(aiisp_thread != 0)
pthread_join(aiisp_thread, (void**)&ret);
}
#endif
#if 1//ENABLE_FIRST_CHANNEL
aw_logd("exit, stream_count_0 = %d\n", stream_count_0);
AWVideoInput_Start(mparam.use_vipp_num, 0);
#endif
if (mparam.en_second_channel) {
AWVideoInput_Start(mparam.use_vipp_num_1, 0);
}
if (mparam.en_third_channel) {
AWVideoInput_Start(mparam.use_vipp_num_2, 0);
}
_exit:
exit_demo();
//isp_config_to_flash();
return 0;
}