sdk-hwV1.3/external/fast-user-adapter/rt_media/demo/demo_get_yuv.c

230 lines
5.6 KiB
C
Raw Permalink Normal View History

2024-05-07 10:09:20 +00:00
#include <string.h>
#include <stdio.h>
#include <stdlib.h>
#include <errno.h>
#include <fcntl.h>
#include <stdio.h>
#include <sys/types.h>
#include <pthread.h>
#include <unistd.h>
#include <sys/time.h>
#include <semaphore.h>
#include <signal.h>
#define LOG_TAG "demo_video"
#include <linux/videodev2.h>
#include <media/sunxi_camera_v2.h>
#include "AW_VideoInput_API.h"
#include "aw_util.h"
#define OUT_PUT_FILE_2 "/mnt/extsd"
#define TEST_YUV_RESET_SIZE 0
static int max_bitstream_count = SAVE_BITSTREAM_COUNT;
FILE *out_file_0 = NULL;
demo_video_param mparam;
static int get_yuv_frame_data(int channel, VideoInputConfig* config)
{
int ret = 0;
unsigned char file_name[128] = {0};
FILE *out_yuv_file = NULL;
VideoYuvFrame mYuvFrame;
int cnt = mparam.encoder_num;
int bsave_yuv_data = 1;
sprintf(file_name, "%s/%dx%d.yuv", mparam.OutputFilePath, ALIGN_XXB(16, config->width), ALIGN_XXB(16, config->height));
out_yuv_file = fopen(file_name, "wb");
if(out_yuv_file == NULL)
{
aw_loge("fopen failed");
}
while(cnt > 0)
{
if (AWVideoInput_Check_Wait_Start(channel)) {
usleep(200*1000);
aw_logd("wait start");
continue;
}
#if TEST_YUV_RESET_SIZE
if (cnt == 3) {
int reset_w = 640, reset_h = 360;
AWVideoInput_Start(channel, 0);
AWVideoInput_ResetSize(channel, reset_w, reset_h);
fclose(out_yuv_file);
sprintf(file_name, "%s/rst_%dx%d.yuv", mparam.OutputFilePath, ALIGN_XXB(16, reset_w), ALIGN_XXB(16, reset_h));
out_yuv_file = fopen(file_name, "wb");
if(out_yuv_file == NULL)
{
aw_loge("fopen failed");
}
AWVideoInput_Start(channel, 1);
}
if (cnt == 8) {
int reset_w = 1280, reset_h = 720;
AWVideoInput_Start(channel, 0);
AWVideoInput_ResetSize(channel, reset_w, reset_h);
fclose(out_yuv_file);
sprintf(file_name, "%s/rst_%dx%d.yuv", mparam.OutputFilePath, ALIGN_XXB(16, reset_w), ALIGN_XXB(16, reset_h));
out_yuv_file = fopen(file_name, "wb");
if(out_yuv_file == NULL)
{
aw_loge("fopen failed");
}
AWVideoInput_Start(channel, 1);
}
#endif
memset(&mYuvFrame, 0, sizeof(VideoYuvFrame));
ret = AWVideoInput_GetYuvFrame(channel, &mYuvFrame);
aw_logw("ret = %d, cnt = %d; frame info: w&h = %d, %d; phyAddr = %p, %p, %p, virAddr = %p, %p, %p time %lld\n",
ret, cnt, mYuvFrame.widht, mYuvFrame.height,
mYuvFrame.phyAddr[0], mYuvFrame.phyAddr[1], mYuvFrame.phyAddr[2],
mYuvFrame.virAddr[0], mYuvFrame.virAddr[1], mYuvFrame.virAddr[2], get_cur_time_us());
//usleep(100*1000);
if(ret == 0)
{
int buf_size = mYuvFrame.widht*mYuvFrame.height*3/2;
if(bsave_yuv_data == 1 && out_yuv_file != NULL)
fwrite(mYuvFrame.virAddr[0], 1, buf_size, out_yuv_file);
AWVideoInput_ReleaseYuvFrame(channel, &mYuvFrame);
}
if (ret != 0) {
aw_logd("Getting yuv failed, break!");
break;
}
cnt--;
}
if (out_yuv_file != NULL)
fclose(out_yuv_file);
return 0;
}
static void exit_demo()
{
int i = 0;
AWVideoInput_DeInit();
aw_logd("exit: deInit end\n");
if(out_file_0)
{
fclose(out_file_0);
out_file_0 = NULL;
}
aw_logd("aw_demo, finish!\n");
return ;
}
int main(int argc, char** argv)
{
aw_logw("get yuv test start, time: %lld\n", get_cur_time_us());
pthread_t thread = 0;
int i = 0;
int ret = 0;
int channelId_0 = 0;
memset(&mparam, 0, sizeof(demo_video_param));
mparam.c0_encoder_format = -1;
mparam.pixelformat = RT_PIXEL_NUM;
mparam.use_vipp_num = 0;
strcpy(mparam.OutputFilePath, OUT_PUT_FILE_2);
/******** begin parse the config paramter ********/
if(argc >= 2)
{
aw_logd("******************************\n");
for(i = 1; i < (int)argc; i += 2)
{
ParseArgument(&mparam, argv[i], argv[i + 1]);
}
aw_logd("******************************\n");
}
else
{
aw_logd(" we need more arguments \n");
PrintDemoUsage();
}
check_param(&mparam);
if(mparam.encoder_num > 0)
max_bitstream_count = mparam.encoder_num;
else
max_bitstream_count = 1;
channelId_0 = mparam.use_vipp_num;
VideoInputConfig config_0;
memset(&config_0, 0, sizeof(VideoInputConfig));
config_0.channelId = channelId_0;
config_0.gop = 30;
config_0.pixelformat = mparam.pixelformat;
config_0.venc_video_signal.video_format = RT_DEFAULT;
switch(mparam.color_space)
{
case V4L2_COLORSPACE_JPEG:
{
config_0.venc_video_signal.full_range_flag = 1;
config_0.venc_video_signal.src_colour_primaries = RT_VENC_YCC;
config_0.venc_video_signal.dst_colour_primaries = RT_VENC_YCC;
break;
}
case V4L2_COLORSPACE_REC709:
{
config_0.venc_video_signal.full_range_flag = 1;
config_0.venc_video_signal.src_colour_primaries = RT_VENC_BT709;
config_0.venc_video_signal.dst_colour_primaries = RT_VENC_BT709;
break;
}
case V4L2_COLORSPACE_REC709_PART_RANGE:
{
config_0.venc_video_signal.full_range_flag = 0;
config_0.venc_video_signal.src_colour_primaries = RT_VENC_BT709;
config_0.venc_video_signal.dst_colour_primaries = RT_VENC_BT709;
break;
}
default:
{
config_0.venc_video_signal.full_range_flag = 1;
config_0.venc_video_signal.src_colour_primaries = RT_VENC_BT709;
config_0.venc_video_signal.dst_colour_primaries = RT_VENC_BT709;
break;
}
}
config_0.output_mode = OUTPUT_MODE_YUV;
config_0.width = mparam.c0_src_w;
config_0.height = mparam.c0_src_h;
config_0.bitrate = mparam.c0_bitrate/1024; //* kb
config_0.encodeType = mparam.c0_encoder_format;
AWVideoInput_Init();
if(AWVideoInput_Configure(channelId_0, &config_0))
{
aw_loge("config err, exit!");
goto _exit;
}
AWVideoInput_Start(channelId_0, 1);
if (get_yuv_frame_data(channelId_0, &config_0) != 0) {
goto _exit;
}
if(thread != 0)
pthread_join(thread, (void**)&ret);
AWVideoInput_Start(mparam.use_vipp_num, 0);
_exit:
exit_demo();
return 0;
}