SL100_FactoryTestTool/FactoryTestTool/SourceCode/Media/VideoDecoder/FFmpegDecoder.cpp

386 lines
14 KiB
C++

// FFmpegDecoder.cpp
#include "FFmpegDecoder.h"
#include <QApplication>
FFmpegDecoder::FFmpegDecoder(QObject* parent) :
QThread(parent),
videoLabel(nullptr),
videoLabel_back(nullptr),
resolutionEdit(nullptr),
resolutionEdit_back(nullptr),
abort(false),
restart(false),
formatContext(nullptr),
codecContext(nullptr),
frame(nullptr),
packet(nullptr),
swsContext(nullptr),
videoStreamIndex(-1)
{
av_log_set_level(AV_LOG_QUIET); // 设置日志级别为安静模式
avformat_network_init(); // 初始化网络
qDebug() << "FFmpegDecoder thread created";
}
FFmpegDecoder::~FFmpegDecoder()
{
qDebug() << "Destroying FFmpegDecoder thread";
mutex.lock();
abort = true;
isStartVideo = false;
condition.wakeOne();
mutex.unlock();
wait();
cleanup();
avformat_network_deinit(); // 反初始化网络
qDebug() << "FFmpegDecoder thread destroyed";
}
void FFmpegDecoder::processVideo(int itemIndex)
{
qDebug() << "processVideo running in thread:" << QThread::currentThread();
qDebug() << "FFmpegDecoder thread:" << this->thread();
QLabel* originalLabel;
if (isBackBoardOrAllBoard) {
originalLabel = this->videoLabel_back;
}
else {
originalLabel = this->videoLabel;
}
if ((FocusWindowWidth != 0) && (FocusWindowHeight != 0)) {
qDebug() << "---1--- processVideo";
/*FocusWindowDialog* dialog = nullptr;
if (FocusWindowWidth * 16 == FocusWindowHeight * 9) {
dialog = new FocusWindowDialog(nullptr, QSize(540, 960));
}
else if (FocusWindowWidth * 9 == FocusWindowHeight * 16) {
dialog = new FocusWindowDialog(nullptr, QSize(960, 540));
}
else if (FocusWindowWidth * 4 == FocusWindowHeight * 3) {
dialog = new FocusWindowDialog(nullptr, QSize(480, 640));
}
else if (FocusWindowWidth * 3 == FocusWindowHeight * 4) {
dialog = new FocusWindowDialog(nullptr, QSize(640, 480));
}
else {
qDebug() << "------ Other scaled resolutions use 480x640";
dialog = new FocusWindowDialog(nullptr, QSize(480, 640));
}
mutex.lock();
this->videoLabelTemp = dialog->videoDisplayLabel;
this->videoLabelChanged = true;
mutex.unlock();
qDebug() << "---2--- processVideo";
if ((dialog->exec() == QDialog::Accepted) || (dialog->exec() == QDialog::Rejected)) {
mutex.lock();
this->videoLabelTemp = originalLabel;
this->videoLabelChanged = true;
mutex.unlock();
}
delete dialog;*/
QMetaObject::invokeMethod(qApp, [=]() {
FocusWindowDialog* dialog = nullptr;
if (FocusWindowWidth * 16 == FocusWindowHeight * 9) {
dialog = new FocusWindowDialog(nullptr, QSize(540, 960));
}
else if (FocusWindowWidth * 9 == FocusWindowHeight * 16) {
dialog = new FocusWindowDialog(nullptr, QSize(960, 540));
}
else if (FocusWindowWidth * 4 == FocusWindowHeight * 3) {
dialog = new FocusWindowDialog(nullptr, QSize(480, 640));
}
else if (FocusWindowWidth * 3 == FocusWindowHeight * 4) {
dialog = new FocusWindowDialog(nullptr, QSize(640, 480));
}
else {
qDebug() << "------ Other scaled resolutions use 480x640";
dialog = new FocusWindowDialog(nullptr, QSize(480, 640));
}
// 设置临时视频Label
mutex.lock();
this->videoLabelTemp = dialog->videoDisplayLabel;
this->videoLabelChanged = true;
mutex.unlock();
connect(dialog, &QDialog::finished, this, [=](int result) {
mutex.lock();
this->videoLabelTemp = originalLabel;
this->videoLabelChanged = true;
mutex.unlock();
dialog->deleteLater(); // 关闭后删除窗口
});
dialog->open(); // **在 UI 线程执行 open(),避免阻塞**
}, Qt::QueuedConnection);
}
else {
qDebug() << "------ Please wait for the video to be decoded and rendered before clicking";
}
}
void FFmpegDecoder::initialize()
{
// 初始化FFmpeg库
avformat_network_init();
}
void FFmpegDecoder::stopFFmpegDecoder()
{
mutex.lock();
abort = true;
isStartVideo = false;
condition.wakeOne();
mutex.unlock();
}
void FFmpegDecoder::decodeFile(const QString& videoFilePath, int isBackBoardOrAllBoard,
QLabel* videoDisplayLabel, QLabel* videoDisplayLabel_back,
QLineEdit* VideoResolutionEdit, QLineEdit* VideoResolutionEdit_back)
{
QMutexLocker locker(&mutex);
this->filePath = videoFilePath;
this->videoLabel = videoDisplayLabel;
this->videoLabel_back = videoDisplayLabel_back;
this->resolutionEdit = VideoResolutionEdit;
this->resolutionEdit_back = VideoResolutionEdit_back;
this->isBackBoardOrAllBoard = isBackBoardOrAllBoard;
if (!isRunning()) {
qDebug() << "Starting decoder thread";
start(NormalPriority);
}
restart = true;
condition.wakeOne();
}
void FFmpegDecoder::run()
{
QFile file(filePath);
qint64 fileSize = 0;
QLabel* currentVideoLabel;
if (isBackBoardOrAllBoard) {
currentVideoLabel = videoLabel_back;
}
else {
currentVideoLabel = videoLabel;
}
while (!isInterruptionRequested()) {
mutex.lock();
while (!restart && !abort) {
condition.wait(&mutex);
}
if (abort) {
mutex.unlock();
qDebug() << "Decoder thread aborting";
break;
}
QSize labelSize = currentVideoLabel->size();
mutex.unlock();
if (labelSize.width() < 220 || labelSize.height() < 357) {
labelSize = QSize(220, 357);
currentVideoLabel->setFixedSize(labelSize);
qDebug() << "Adjusting video label size to: Width =" << labelSize.width() << ", Height =" << labelSize.height();
}
//qDebug() << "Video label size: Width =" << labelSize.width() << ", Height =" << labelSize.height();
if (!file.open(QIODevice::ReadOnly)) {
qWarning() << "Failed to open file:" << filePath;
continue;
}
if (!initializeFFmpeg(filePath)) {
//qDebug() << "Failed to initialize FFmpeg for file:" << filePath;
cleanup();
file.close();
continue;
}
isStartVideo = true;
restart = false;
while (!abort) {
qint64 currentFileSize = file.size();
//qDebug() << "Decoder thread currentFileSize:" << currentFileSize;
//qDebug() << "Decoder thread fileSize:" << fileSize;
if (currentFileSize > fileSize) {
fileSize = currentFileSize;
file.seek(fileSize); // 设置文件读取位置到末尾
// 读取并处理数据包
while (av_read_frame(formatContext, packet) >= 0) {
if (packet->stream_index == videoStreamIndex) {
int ret = avcodec_send_packet(codecContext, packet);
if (ret < 0) {
qWarning() << "Error sending packet for decoding";
av_packet_unref(packet);
continue;
}
while (ret >= 0) {
ret = avcodec_receive_frame(codecContext, frame);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
av_packet_unref(packet);
continue;
}
else if (ret < 0) {
qWarning() << "Error during decoding";
break;
}
mutex.lock();
if (videoLabelChanged) {
currentVideoLabel = videoLabelTemp; // 更新 currentVideoLabel
videoLabelChanged = false; // 重置标志位
labelSize = currentVideoLabel->size();
}
mutex.unlock();
qWarning() << "-------currentVideoLabel";
QImage img = avFrameToQImage(frame);
QImage scaledImage = img.scaled(labelSize, Qt::KeepAspectRatio, Qt::SmoothTransformation);
//currentVideoLabel->setPixmap(QPixmap::fromImage(scaledImage));
QMetaObject::invokeMethod(currentVideoLabel, "setPixmap", Qt::QueuedConnection, Q_ARG(QPixmap, QPixmap::fromImage(scaledImage)));
QThread::msleep(10);
}
}
av_packet_unref(packet);
}
}
mutex.lock();
if (restart) {
restart = false;
mutex.unlock();
break;
}
mutex.unlock();
}
cleanup();
file.close();
mutex.lock();
if (!restart) {
condition.wait(&mutex);
}
mutex.unlock();
}
}
bool FFmpegDecoder::initializeFFmpeg(const QString& filePath)
{
if (!QFile::exists(filePath)) {
qWarning() << "FFmpeg File does not exist:" << filePath;
return false;
}
if (avformat_open_input(&formatContext, filePath.toStdString().c_str(), nullptr, nullptr) != 0) {
qWarning() << "Failed to open file with FFmpeg:" << filePath;
return false;
}
if (avformat_find_stream_info(formatContext, nullptr) < 0) {
//qWarning() << "Failed to retrieve stream info";
return false;
}
videoStreamIndex = -1;
for (unsigned int i = 0; i < formatContext->nb_streams; ++i) {
if (formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStreamIndex = i;
break;
}
}
if (videoStreamIndex == -1) {
qWarning() << "No video stream found";
return false;
}
AVCodecParameters* codecParameters = formatContext->streams[videoStreamIndex]->codecpar;
const AVCodec* codec = avcodec_find_decoder(codecParameters->codec_id);
if (!codec) {
qWarning() << "Unsupported codec";
return false;
}
codecContext = avcodec_alloc_context3(codec);
if (!codecContext) {
qWarning() << "Failed to allocate codec context";
return false;
}
if (avcodec_parameters_to_context(codecContext, codecParameters) < 0) {
qWarning() << "Failed to copy codec parameters to context";
return false;
}
if (avcodec_open2(codecContext, codec, nullptr) < 0) {
qWarning() << "Failed to open codec";
return false;
}
frame = av_frame_alloc();
packet = av_packet_alloc();
return true;
}
void FFmpegDecoder::cleanup()
{
if (codecContext) {
avcodec_free_context(&codecContext);
codecContext = nullptr;
}
if (frame) {
av_frame_free(&frame);
frame = nullptr;
}
if (packet) {
av_packet_free(&packet);
packet = nullptr;
}
if (swsContext) {
sws_freeContext(swsContext);
swsContext = nullptr;
}
if (formatContext) {
avformat_close_input(&formatContext);
formatContext = nullptr;
}
}
QImage FFmpegDecoder::avFrameToQImage(AVFrame* frame)
{
int width = frame->width;
int height = frame->height;
// 这里注意切换镜头后是否改变分辨率去改变 isGotResolution
if (!isGotResolution && (width != 0) && (height != 0)) {
isGotResolution = true;
FocusWindowWidth = width;
FocusWindowHeight = height;
}
QString resolutionText = QString::number(width) + " x " + QString::number(height);
if (isBackBoardOrAllBoard) {
QMetaObject::invokeMethod(resolutionEdit_back, "setText", Qt::QueuedConnection, Q_ARG(QString, resolutionText));
}
else {
QMetaObject::invokeMethod(resolutionEdit, "setText", Qt::QueuedConnection, Q_ARG(QString, resolutionText));
}
//qDebug() << "H264 video resolution: Width =" << frame->width << ", Height =" << frame->height;
AVPixelFormat pixFmt = (AVPixelFormat)frame->format;
if (!swsContext) {
swsContext = sws_getContext(width, height, pixFmt, width, height, AV_PIX_FMT_RGB24, SWS_BILINEAR, nullptr, nullptr, nullptr);
if (!swsContext) {
qWarning() << "Failed to initialize the conversion context";
return QImage();
}
}
QImage img(width, height, QImage::Format_RGB888);
uint8_t* dest[4] = { img.bits(), nullptr, nullptr, nullptr };
int destLinesize[4] = { img.bytesPerLine(), 0, 0, 0 };
sws_scale(swsContext, frame->data, frame->linesize, 0, height, dest, destLinesize);
return img;
}