// FFmpegDecoder.cpp #include "FFmpegDecoder.h" FFmpegDecoder::FFmpegDecoder(QObject* parent) : QThread(parent), videoLabel(nullptr), abort(false), restart(false), formatContext(nullptr), codecContext(nullptr), frame(nullptr), packet(nullptr), swsContext(nullptr), videoStreamIndex(-1) // 初始化成员变量 { av_log_set_level(AV_LOG_QUIET); // 设置日志级别为安静模式 avformat_network_init(); // 初始化网络 //qDebug() << "FFmpegDecoder created"; } FFmpegDecoder::~FFmpegDecoder() { qDebug() << "Destroying FFmpegDecoder"; mutex.lock(); abort = true; condition.wakeOne(); mutex.unlock(); wait(); cleanup(); avformat_network_deinit(); // 反初始化网络 qDebug() << "FFmpegDecoder destroyed"; } void FFmpegDecoder::initialize() { // 初始化FFmpeg库 avformat_network_init(); } void FFmpegDecoder::decodeFile(const QString& filePath, QLabel* videoDisplayLabel) { QMutexLocker locker(&mutex); this->filePath = filePath; this->videoLabel = videoDisplayLabel; if (!isRunning()) { qDebug() << "Starting decoder thread"; start(NormalPriority); } restart = true; condition.wakeOne(); } void FFmpegDecoder::run() { QFile file(filePath); qint64 fileSize = 0; while (true) { mutex.lock(); while (!restart && !abort) { condition.wait(&mutex); } if (abort) { mutex.unlock(); qDebug() << "Decoder thread aborting"; break; } restart = false; QLabel* currentVideoLabel = videoLabel; QSize labelSize = currentVideoLabel->size(); mutex.unlock(); if (labelSize.width() <= 0 || labelSize.height() <= 0) { // 自动调整 QLabel 大小 labelSize = QSize(800, 600); // 例如设置为默认大小 currentVideoLabel->setFixedSize(labelSize); qDebug() << "Adjusting video label size to: Width =" << labelSize.width() << ", Height =" << labelSize.height(); } qDebug() << "Video label size: Width =" << labelSize.width() << ", Height =" << labelSize.height(); if (!file.open(QIODevice::ReadOnly)) { qWarning() << "Failed to open file:" << filePath; continue; } if (!initializeFFmpeg(filePath)) { qDebug() << "Failed to initialize FFmpeg for file:" << filePath; cleanup(); file.close(); continue; } while (!abort) { qint64 currentFileSize = file.size(); if (currentFileSize > fileSize) { fileSize = currentFileSize; file.seek(fileSize); // 设置文件读取位置到末尾 // 读取并处理数据包 while (av_read_frame(formatContext, packet) >= 0) { if (packet->stream_index == videoStreamIndex) { int ret = avcodec_send_packet(codecContext, packet); if (ret < 0) { qWarning() << "Error sending packet for decoding"; av_packet_unref(packet); continue; } while (ret >= 0) { ret = avcodec_receive_frame(codecContext, frame); if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { av_packet_unref(packet); continue; } else if (ret < 0) { qWarning() << "Error during decoding"; break; } qDebug() << "H264 video resolution: Width =" << frame->width << ", Height =" << frame->height; QImage img = avFrameToQImage(frame); QImage scaledImage = img.scaled(labelSize, Qt::KeepAspectRatio, Qt::SmoothTransformation); currentVideoLabel->setPixmap(QPixmap::fromImage(scaledImage)); QThread::msleep(10); // Simulate 25 FPS frame rate } } av_packet_unref(packet); } } mutex.lock(); if (restart) { restart = false; mutex.unlock(); break; } mutex.unlock(); } cleanup(); file.close(); mutex.lock(); if (!restart) { condition.wait(&mutex); } mutex.unlock(); } } bool FFmpegDecoder::initializeFFmpeg(const QString& filePath) { if (avformat_open_input(&formatContext, filePath.toStdString().c_str(), nullptr, nullptr) != 0) { qWarning() << "Failed to open file with FFmpeg:" << filePath; return false; } if (avformat_find_stream_info(formatContext, nullptr) < 0) { qWarning() << "Failed to retrieve stream info"; return false; } videoStreamIndex = -1; for (unsigned int i = 0; i < formatContext->nb_streams; ++i) { if (formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { videoStreamIndex = i; break; } } if (videoStreamIndex == -1) { qWarning() << "No video stream found"; return false; } AVCodecParameters* codecParameters = formatContext->streams[videoStreamIndex]->codecpar; const AVCodec* codec = avcodec_find_decoder(codecParameters->codec_id); if (!codec) { qWarning() << "Unsupported codec"; return false; } codecContext = avcodec_alloc_context3(codec); if (!codecContext) { qWarning() << "Failed to allocate codec context"; return false; } if (avcodec_parameters_to_context(codecContext, codecParameters) < 0) { qWarning() << "Failed to copy codec parameters to context"; return false; } if (avcodec_open2(codecContext, codec, nullptr) < 0) { qWarning() << "Failed to open codec"; return false; } frame = av_frame_alloc(); packet = av_packet_alloc(); return true; } void FFmpegDecoder::cleanup() { if (codecContext) { avcodec_free_context(&codecContext); codecContext = nullptr; } if (frame) { av_frame_free(&frame); frame = nullptr; } if (packet) { av_packet_free(&packet); packet = nullptr; } if (swsContext) { sws_freeContext(swsContext); swsContext = nullptr; } if (formatContext) { avformat_close_input(&formatContext); formatContext = nullptr; } } QImage FFmpegDecoder::avFrameToQImage(AVFrame* frame) { int width = frame->width; int height = frame->height; AVPixelFormat pixFmt = (AVPixelFormat)frame->format; if (!swsContext) { swsContext = sws_getContext(width, height, pixFmt, width, height, AV_PIX_FMT_RGB24, SWS_BILINEAR, nullptr, nullptr, nullptr); if (!swsContext) { qWarning() << "Failed to initialize the conversion context"; return QImage(); } } QImage img(width, height, QImage::Format_RGB888); uint8_t* dest[4] = { img.bits(), nullptr, nullptr, nullptr }; int destLinesize[4] = { img.bytesPerLine(), 0, 0, 0 }; sws_scale(swsContext, frame->data, frame->linesize, 0, height, dest, destLinesize); return img; }