326 lines
11 KiB
C++
326 lines
11 KiB
C++
// FFmpegDecoder.cpp
|
|
#include "FFmpegDecoder.h"
|
|
|
|
FFmpegDecoder::FFmpegDecoder(QObject* parent) :
|
|
QThread(parent),
|
|
videoLabel(nullptr),
|
|
resolutionEdit(nullptr),
|
|
abort(false),
|
|
restart(false),
|
|
formatContext(nullptr),
|
|
codecContext(nullptr),
|
|
frame(nullptr),
|
|
packet(nullptr),
|
|
swsContext(nullptr),
|
|
videoStreamIndex(-1)
|
|
{
|
|
av_log_set_level(AV_LOG_QUIET); // 设置日志级别为安静模式
|
|
avformat_network_init(); // 初始化网络
|
|
qDebug() << "FFmpegDecoder thread created";
|
|
}
|
|
|
|
FFmpegDecoder::~FFmpegDecoder()
|
|
{
|
|
qDebug() << "Destroying FFmpegDecoder thread";
|
|
mutex.lock();
|
|
abort = true;
|
|
condition.wakeOne();
|
|
mutex.unlock();
|
|
wait();
|
|
cleanup();
|
|
avformat_network_deinit(); // 反初始化网络
|
|
qDebug() << "FFmpegDecoder thread destroyed";
|
|
}
|
|
|
|
void FFmpegDecoder::processVideo(int itemIndex)
|
|
{
|
|
QLabel* originalLabel = this->videoLabel;
|
|
if ((FocusWindowWidth != 0) && (FocusWindowHeight != 0)) {
|
|
qDebug() << "------ processVideo";
|
|
FocusWindowDialog* dialog = nullptr;
|
|
if (FocusWindowWidth * 16 == FocusWindowHeight * 9) {
|
|
dialog = new FocusWindowDialog(nullptr, QSize(540, 960));
|
|
}
|
|
else if (FocusWindowWidth * 9 == FocusWindowHeight * 16) {
|
|
dialog = new FocusWindowDialog(nullptr, QSize(960, 540));
|
|
}
|
|
else if (FocusWindowWidth * 4 == FocusWindowHeight * 3) {
|
|
dialog = new FocusWindowDialog(nullptr, QSize(480, 640));
|
|
}
|
|
else if (FocusWindowWidth * 3 == FocusWindowHeight * 4) {
|
|
dialog = new FocusWindowDialog(nullptr, QSize(640, 480));
|
|
}
|
|
else {
|
|
qDebug() << "------ Other scaled resolutions use 480x640";
|
|
dialog = new FocusWindowDialog(nullptr, QSize(480, 640));
|
|
}
|
|
#if 0
|
|
// 将视频显示的 QLabel 切换为对话框内的 videoDisplayLabel
|
|
this->videoLabel = dialog->videoDisplayLabel;
|
|
if ((dialog->exec() == QDialog::Accepted) || (dialog->exec() == QDialog::Rejected)) {
|
|
this->videoLabel = originalLabel;
|
|
}
|
|
#else
|
|
mutex.lock();
|
|
this->videoLabelTemp = dialog->videoDisplayLabel; // 更新临时标签
|
|
this->videoLabelChanged = true; // 设置标志位,表示标签已更改
|
|
mutex.unlock();
|
|
|
|
if ((dialog->exec() == QDialog::Accepted) || (dialog->exec() == QDialog::Rejected)) {
|
|
mutex.lock();
|
|
this->videoLabelTemp = originalLabel; // 还原回原始标签
|
|
this->videoLabelChanged = true; // 设置标志位
|
|
mutex.unlock();
|
|
}
|
|
#endif
|
|
delete dialog;
|
|
}
|
|
else {
|
|
qDebug() << "------ Please wait for the video to be decoded and rendered before clicking";
|
|
}
|
|
}
|
|
|
|
void FFmpegDecoder::initialize()
|
|
{
|
|
// 初始化FFmpeg库
|
|
avformat_network_init();
|
|
}
|
|
|
|
void FFmpegDecoder::stopFFmpegDecoder()
|
|
{
|
|
mutex.lock();
|
|
abort = true;
|
|
condition.wakeOne();
|
|
mutex.unlock();
|
|
}
|
|
|
|
void FFmpegDecoder::decodeFile(const QString& videoFilePath, QLabel* videoDisplayLabel, QLineEdit* VideoResolutionEdit)
|
|
{
|
|
QMutexLocker locker(&mutex);
|
|
this->filePath = videoFilePath;
|
|
this->videoLabel = videoDisplayLabel;
|
|
this->resolutionEdit = VideoResolutionEdit;
|
|
if (!isRunning()) {
|
|
qDebug() << "Starting decoder thread";
|
|
start(NormalPriority);
|
|
}
|
|
restart = true;
|
|
condition.wakeOne();
|
|
}
|
|
|
|
void FFmpegDecoder::run()
|
|
{
|
|
QFile file(filePath);
|
|
qint64 fileSize = 0;
|
|
QLabel* currentVideoLabel = videoLabel;
|
|
while (!isInterruptionRequested()) {
|
|
mutex.lock();
|
|
while (!restart && !abort) {
|
|
condition.wait(&mutex);
|
|
}
|
|
if (abort) {
|
|
mutex.unlock();
|
|
qDebug() << "Decoder thread aborting";
|
|
break;
|
|
}
|
|
/*QLabel* currentVideoLabel = videoLabel;*/
|
|
QSize labelSize = currentVideoLabel->size();
|
|
mutex.unlock();
|
|
|
|
if (labelSize.width() < 220 || labelSize.height() < 357) {
|
|
labelSize = QSize(220, 357);
|
|
currentVideoLabel->setFixedSize(labelSize);
|
|
qDebug() << "Adjusting video label size to: Width =" << labelSize.width() << ", Height =" << labelSize.height();
|
|
}
|
|
qDebug() << "Video label size: Width =" << labelSize.width() << ", Height =" << labelSize.height();
|
|
|
|
if (!file.open(QIODevice::ReadOnly)) {
|
|
qWarning() << "Failed to open file:" << filePath;
|
|
continue;
|
|
}
|
|
if (!initializeFFmpeg(filePath)) {
|
|
qDebug() << "Failed to initialize FFmpeg for file:" << filePath;
|
|
cleanup();
|
|
file.close();
|
|
continue;
|
|
}
|
|
restart = false;
|
|
while (!abort) {
|
|
qint64 currentFileSize = file.size();
|
|
//qDebug() << "Decoder thread currentFileSize:" << currentFileSize;
|
|
//qDebug() << "Decoder thread fileSize:" << fileSize;
|
|
if (currentFileSize > fileSize) {
|
|
fileSize = currentFileSize;
|
|
file.seek(fileSize); // 设置文件读取位置到末尾
|
|
// 读取并处理数据包
|
|
while (av_read_frame(formatContext, packet) >= 0) {
|
|
if (packet->stream_index == videoStreamIndex) {
|
|
int ret = avcodec_send_packet(codecContext, packet);
|
|
if (ret < 0) {
|
|
qWarning() << "Error sending packet for decoding";
|
|
av_packet_unref(packet);
|
|
continue;
|
|
}
|
|
while (ret >= 0) {
|
|
ret = avcodec_receive_frame(codecContext, frame);
|
|
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
|
|
av_packet_unref(packet);
|
|
continue;
|
|
}
|
|
else if (ret < 0) {
|
|
qWarning() << "Error during decoding";
|
|
break;
|
|
}
|
|
mutex.lock();
|
|
if (videoLabelChanged) {
|
|
currentVideoLabel = videoLabelTemp; // 更新 currentVideoLabel
|
|
videoLabelChanged = false; // 重置标志位
|
|
labelSize = currentVideoLabel->size();
|
|
}
|
|
mutex.unlock();
|
|
|
|
QImage img = avFrameToQImage(frame);
|
|
QImage scaledImage = img.scaled(labelSize, Qt::KeepAspectRatio, Qt::SmoothTransformation);
|
|
//currentVideoLabel->setPixmap(QPixmap::fromImage(scaledImage));
|
|
QMetaObject::invokeMethod(currentVideoLabel, "setPixmap", Qt::QueuedConnection, Q_ARG(QPixmap, QPixmap::fromImage(scaledImage)));
|
|
QThread::msleep(10); // Simulate 25 FPS frame rate
|
|
}
|
|
}
|
|
av_packet_unref(packet);
|
|
}
|
|
}
|
|
mutex.lock();
|
|
if (restart) {
|
|
restart = false;
|
|
mutex.unlock();
|
|
break;
|
|
}
|
|
mutex.unlock();
|
|
}
|
|
cleanup();
|
|
file.close();
|
|
|
|
mutex.lock();
|
|
if (!restart) {
|
|
condition.wait(&mutex);
|
|
}
|
|
mutex.unlock();
|
|
}
|
|
}
|
|
|
|
bool FFmpegDecoder::initializeFFmpeg(const QString& filePath)
|
|
{
|
|
if (!QFile::exists(filePath)) {
|
|
qWarning() << "FFmpeg File does not exist:" << filePath;
|
|
return false;
|
|
}
|
|
if (avformat_open_input(&formatContext, filePath.toStdString().c_str(), nullptr, nullptr) != 0) {
|
|
qWarning() << "Failed to open file with FFmpeg:" << filePath;
|
|
return false;
|
|
}
|
|
|
|
if (avformat_find_stream_info(formatContext, nullptr) < 0) {
|
|
qWarning() << "Failed to retrieve stream info";
|
|
return false;
|
|
}
|
|
|
|
videoStreamIndex = -1;
|
|
for (unsigned int i = 0; i < formatContext->nb_streams; ++i) {
|
|
if (formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
|
|
videoStreamIndex = i;
|
|
break;
|
|
}
|
|
}
|
|
if (videoStreamIndex == -1) {
|
|
qWarning() << "No video stream found";
|
|
return false;
|
|
}
|
|
|
|
AVCodecParameters* codecParameters = formatContext->streams[videoStreamIndex]->codecpar;
|
|
const AVCodec* codec = avcodec_find_decoder(codecParameters->codec_id);
|
|
if (!codec) {
|
|
qWarning() << "Unsupported codec";
|
|
return false;
|
|
}
|
|
|
|
codecContext = avcodec_alloc_context3(codec);
|
|
if (!codecContext) {
|
|
qWarning() << "Failed to allocate codec context";
|
|
return false;
|
|
}
|
|
|
|
if (avcodec_parameters_to_context(codecContext, codecParameters) < 0) {
|
|
qWarning() << "Failed to copy codec parameters to context";
|
|
return false;
|
|
}
|
|
|
|
if (avcodec_open2(codecContext, codec, nullptr) < 0) {
|
|
qWarning() << "Failed to open codec";
|
|
return false;
|
|
}
|
|
|
|
frame = av_frame_alloc();
|
|
packet = av_packet_alloc();
|
|
|
|
return true;
|
|
}
|
|
|
|
void FFmpegDecoder::cleanup()
|
|
{
|
|
if (codecContext) {
|
|
avcodec_free_context(&codecContext);
|
|
codecContext = nullptr;
|
|
}
|
|
if (frame) {
|
|
av_frame_free(&frame);
|
|
frame = nullptr;
|
|
}
|
|
if (packet) {
|
|
av_packet_free(&packet);
|
|
packet = nullptr;
|
|
}
|
|
if (swsContext) {
|
|
sws_freeContext(swsContext);
|
|
swsContext = nullptr;
|
|
}
|
|
if (formatContext) {
|
|
avformat_close_input(&formatContext);
|
|
formatContext = nullptr;
|
|
}
|
|
}
|
|
|
|
QImage FFmpegDecoder::avFrameToQImage(AVFrame* frame)
|
|
{
|
|
int width = frame->width;
|
|
int height = frame->height;
|
|
// 这里注意切换镜头后是否改变分辨率去改变 isGotResolution
|
|
if (!isGotResolution && (width != 0) && (height != 0)) {
|
|
isGotResolution = true;
|
|
FocusWindowWidth = width;
|
|
FocusWindowHeight = height;
|
|
}
|
|
QString resolutionText = QString::number(width) + " x " + QString::number(height);
|
|
//resolutionEdit->setText(resolutionText);
|
|
QMetaObject::invokeMethod(resolutionEdit, "setText", Qt::QueuedConnection, Q_ARG(QString, resolutionText));
|
|
//qDebug() << "H264 video resolution: Width =" << frame->width << ", Height =" << frame->height;
|
|
|
|
AVPixelFormat pixFmt = (AVPixelFormat)frame->format;
|
|
if (!swsContext) {
|
|
swsContext = sws_getContext(width, height, pixFmt, width, height, AV_PIX_FMT_RGB24, SWS_BILINEAR, nullptr, nullptr, nullptr);
|
|
if (!swsContext) {
|
|
qWarning() << "Failed to initialize the conversion context";
|
|
return QImage();
|
|
}
|
|
}
|
|
|
|
QImage img(width, height, QImage::Format_RGB888);
|
|
uint8_t* dest[4] = { img.bits(), nullptr, nullptr, nullptr };
|
|
int destLinesize[4] = { img.bytesPerLine(), 0, 0, 0 };
|
|
|
|
sws_scale(swsContext, frame->data, frame->linesize, 0, height, dest, destLinesize);
|
|
|
|
return img;
|
|
}
|
|
|
|
|