Browse Source

V0.6.1
1、新添加了使用OpenGL绘制图片的工程,并且可以绘制成功了

Apple 2 weeks ago
parent
commit
09b5416064

+ 4 - 1
CMakeLists.txt

@@ -151,5 +151,8 @@ file(GLOB GLOBAL_SRC
 # add_subdirectory(${CMAKE_SOURCE_DIR}/demo/time)
 # add_subdirectory(${CMAKE_SOURCE_DIR}/demo/VideoPlayer)
 # add_subdirectory(${CMAKE_SOURCE_DIR}/demo/xlsx)
-add_subdirectory(${CMAKE_SOURCE_DIR}/demo/DesignerPattern)
+# add_subdirectory(${CMAKE_SOURCE_DIR}/demo/DesignerPattern)
+# add_subdirectory(${CMAKE_SOURCE_DIR}/demo/ViewModel)
+add_subdirectory(${CMAKE_SOURCE_DIR}/demo/VideoPlayerGL)
+
 

+ 1 - 1
External

@@ -1 +1 @@
-Subproject commit cc1b2f06472f420a7e0027b9131e3ee86a6ad285
+Subproject commit bedfa527c59b4af203efa45077833d1c9e3e5cd2

+ 100 - 0
demo/VideoPlayerGL/CMakeLists.txt

@@ -0,0 +1,100 @@
+cmake_minimum_required(VERSION 3.5)
+
+set(this_exe PlayerGL)
+
+
+#包含源文件
+file(GLOB LOCAL_SRC
+    ${CMAKE_CURRENT_SOURCE_DIR}/*.qrc
+    ${CMAKE_CURRENT_SOURCE_DIR}/*.rc
+    ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp
+    ${CMAKE_CURRENT_SOURCE_DIR}/*.ui
+    # ${CMAKE_CURRENT_SOURCE_DIR}/VideoPlayer/*.cpp
+    # ${CMAKE_CURRENT_SOURCE_DIR}/demo/*.cpp
+    ${CMAKE_CURRENT_SOURCE_DIR}/Player/*.cpp
+
+    ${CMAKE_SOURCE_DIR}/External/module/Logs/*.cpp
+    ${CMAKE_SOURCE_DIR}/External/module/ThreadPool/*.cpp
+    ${CMAKE_SOURCE_DIR}/External/module/VideoPlayer/*.cpp
+
+    
+    
+)
+
+
+# 生成可执行程序
+
+add_executable(${this_exe}
+    # WIN32
+    ${GLOBAL_SRC}
+    ${LOCAL_SRC} 
+)
+
+# set_target_properties(${this_exe} PROPERTIES
+    
+# )
+
+
+#添加头文件
+target_include_directories(${this_exe} PRIVATE
+
+    ${CMAKE_CURRENT_SOURCE_DIR}
+    ${CMAKE_CURRENT_SOURCE_DIR}/Player
+    # ${CMAKE_CURRENT_SOURCE_DIR}/VideoPlayer
+    # ${CMAKE_CURRENT_SOURCE_DIR}/demo
+    ${CMAKE_SOURCE_DIR}/External/common
+    ${CMAKE_SOURCE_DIR}/External/module
+    ${CMAKE_SOURCE_DIR}/External/module/ThreadPool
+    ${CMAKE_SOURCE_DIR}/External/module/RingQueue
+    ${CMAKE_SOURCE_DIR}/External/module/VideoPlayer
+    
+    
+    ${CURL_INCLUDE_DIR}
+    ${FFMPEG_INCLUDE_DIR}
+    ${spdlog_INCLUDE_DIR}
+)
+
+target_link_libraries(${this_exe} PRIVATE
+    Qt5::Widgets
+    Qt5::Core
+    Qt5::Network
+    # Qt5::Multimedia
+    # Qt5::Xml
+    # Qt5::Sql
+)
+
+target_link_libraries(${this_exe} PRIVATE 
+    # fmt::fmt
+    # spdlog::spdlog
+    ${CURL_LIBRARY}
+    ${FFMPEG_LIBRARY}
+    ${spdlog_LIBRARY}
+)
+
+find_package(OpenGL REQUIRED)
+target_link_libraries(${this_exe} PRIVATE ${OPENGL_LIBRARIES})
+
+if(CMAKE_CXX_COMPILER_VERSION LESS 9.0)
+    target_link_libraries(${this_exe} PRIVATE
+        stdc++fs
+    )
+endif()
+
+# target_link_libraries(${this_exe} PRIVATE
+#     GL
+#     GUL
+# )
+# message(STATUS "CURL_LIBRARY: ${CURL_LIBRARY}")
+
+
+# if(CMAKE_CXX_COMPILER_ID MATCHES MSVC)
+#     target_link_libraries(${this_exe} PRIVATE
+#         # debug spdlogd.lib
+#         # optimized spdlog.lib
+#     )
+# elseif(CMAKE_CXX_COMPILER_ID MATCHES GNU)
+#     target_link_libraries(${this_exe} PRIVATE
+#         # debug 
+#         # optimized ${SM_DLL}
+#     )
+# endif()

+ 149 - 0
demo/VideoPlayerGL/Player/PlayerGLWidget(副本).cpp_

@@ -0,0 +1,149 @@
+#include "PlayerGLWidget.h"
+
+
+PlayerGLWidget::PlayerGLWidget(QWidget *parent) : QOpenGLWidget(parent)
+{
+
+}
+
+PlayerGLWidget::~PlayerGLWidget()
+{
+
+}
+
+/* 刷新一帧 */
+void PlayerGLWidget::updateFrame(Image_YUV420& image)
+{
+    yData = image.yData;
+    uData = image.uData;
+    vData = image.vData;
+    update();
+}
+
+
+void PlayerGLWidget::initShaders()
+{
+    // 顶点着色器
+    const char *vshader = R"(
+        #version 330 core
+        layout(location = 0) in vec4 vertexIn;
+        layout(location = 1) in vec2 textureIn;
+        out vec2 textureOut;
+        void main(void)
+        {
+            gl_Position = vertexIn;
+            textureOut = textureIn;
+        })";
+    // 片段着色器(YUV420P 转 RGB)
+    const char *fshader = R"(
+        #version 330 core
+        in vec2 textureOut;
+        out vec4 fragColor;
+        uniform sampler2D tex_y;
+        uniform sampler2D tex_u;
+        uniform sampler2D tex_v;
+        void main(void)
+        {
+            float y = texture(tex_y, textureOut).r;
+            float u = texture(tex_u, textureOut).r - 0.5;
+            float v = texture(tex_v, textureOut).r - 0.5;
+            float r = y + 1.403 * v;
+            float g = y - 0.344 * u - 0.714 * v;
+            float b = y + 1.770 * u;
+            fragColor = vec4(r, g, b, 1.0);
+        })";
+    // 创建着色器程序
+    program.addShaderFromSourceCode(QOpenGLShader::Vertex, vshader);
+    program.addShaderFromSourceCode(QOpenGLShader::Fragment, fshader);
+    program.link();
+}
+
+void PlayerGLWidget::initTextures()
+{
+    // 创建 YUV 纹理
+        textureY = new QOpenGLTexture(QOpenGLTexture::Target2D);
+        textureU = new QOpenGLTexture(QOpenGLTexture::Target2D);
+        textureV = new QOpenGLTexture(QOpenGLTexture::Target2D);
+
+        textureY->create();
+        textureU->create();
+        textureV->create();
+}
+
+// void PlayerGLWidget::initVertex()
+// {
+
+// }
+
+// void PlayerGLWidget::initFrameBuffer()
+// {
+
+// }
+
+void PlayerGLWidget::renderYUV420P()
+{
+    if (yData.isEmpty() || uData.isEmpty() || vData.isEmpty())
+            return;
+
+        // 绑定纹理
+        textureY->bind(0);
+        textureU->bind(1);
+        textureV->bind(2);
+
+        // 上传 YUV 数据到纹理
+        textureY->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt8, yData.constData());
+        textureU->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt8, uData.constData());
+        textureV->setData(QOpenGLTexture::Red, QOpenGLTexture::UInt8, vData.constData());
+
+        // 绑定着色器程序
+        program.bind();
+        program.setUniformValue("tex_y", 0);
+        program.setUniformValue("tex_u", 1);
+        program.setUniformValue("tex_v", 2);
+
+        // 绘制矩形
+        static const GLfloat vertices[] = {
+            -1.0f, -1.0f, 0.0f,
+             1.0f, -1.0f, 0.0f,
+            -1.0f,  1.0f, 0.0f,
+             1.0f,  1.0f, 0.0f
+        };
+
+        static const GLfloat texCoords[] = {
+            0.0f, 1.0f,
+            1.0f, 1.0f,
+            0.0f, 0.0f,
+            1.0f, 0.0f
+        };
+
+        glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, vertices);
+        glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, texCoords);
+        glEnableVertexAttribArray(0);
+        glEnableVertexAttribArray(1);
+
+        glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
+
+        program.release();
+}
+
+void PlayerGLWidget::initializeGL()
+{
+    initializeOpenGLFunctions();
+    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
+    // 初始化着色器
+    initShaders();
+    initTextures();
+}
+
+void PlayerGLWidget::resizeGL(int w, int h)
+{
+    glViewport(0, 0, w, h);
+}
+
+void PlayerGLWidget::paintGL()
+{
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    // 渲染 YUV 数据
+    renderYUV420P();
+}

+ 54 - 0
demo/VideoPlayerGL/Player/PlayerGLWidget(副本).h_

@@ -0,0 +1,54 @@
+#ifndef PLAYEROPENGLWIDGET_H
+#define PLAYEROPENGLWIDGET_H
+
+
+#include <QOpenGLWidget>
+#include <QOpenGLFunctions>
+#include <QOpenGLShaderProgram>
+#include <QOpenGLTexture>
+#include <QTimer>
+
+#include "PlayerGlobalInfo.h"
+
+
+class PlayerGLWidget : public QOpenGLWidget , public QOpenGLFunctions
+{
+    Q_OBJECT
+public:
+    explicit PlayerGLWidget(QWidget *parent = nullptr);
+    ~PlayerGLWidget();
+
+    /* 刷新一帧 */
+    void updateFrame(Image_YUV420& image);
+
+
+private:
+    void initShaders();
+    void initTextures();
+    // void initVertex();
+    // void initFrameBuffer();
+
+    void renderYUV420P();
+
+protected:
+    void initializeGL() override;
+    void resizeGL(int w, int h) override;
+    void paintGL() override;
+
+private:
+
+    QOpenGLShaderProgram program;
+    QOpenGLTexture *textureY = nullptr;
+    QOpenGLTexture *textureU = nullptr;
+    QOpenGLTexture *textureV = nullptr;
+
+    QByteArray yData;
+    QByteArray uData;
+    QByteArray vData;
+
+    int width = 0;
+    int height = 0;
+
+};
+
+#endif /* PLAYEROPENGLWIDGET_H */

+ 221 - 0
demo/VideoPlayerGL/Player/PlayerGLWidget.cpp

@@ -0,0 +1,221 @@
+#include "PlayerGLWidget.h"
+#include <QOpenGLFunctions> // 添加此行
+#include <QApplication>
+
+PlayerGLWidget::PlayerGLWidget(QWidget *parent) : QOpenGLWidget(parent)
+{
+    QString imagePath = QApplication::applicationDirPath() + "/0.jpg";
+    QImage image(imagePath);
+    convertQImageToYUV420(image, m_YUV420);
+    /* 显示图片  */
+    updateFrame(m_YUV420);
+}
+
+PlayerGLWidget::~PlayerGLWidget()
+{
+
+}
+
+/* 刷新一帧 */
+void PlayerGLWidget::updateFrame(Image_YUV420& image)
+{
+    yData = image.yData;
+    uData = image.uData;
+    vData = image.vData;
+    imageSize_ = QSize(image.width, image.height);
+
+    update();
+}
+
+/* 刷新一帧QImage */
+void PlayerGLWidget::updateFrame(Image_QImage& image)
+{
+    m_image = image;
+
+    update();
+}
+
+void PlayerGLWidget::convertQImageToYUV420(const QImage& image, Image_YUV420& yuv420)
+{
+    int width = image.width();
+    int height = image.height();
+    int ySize = width * height;
+    int uvSize = ySize / 4;
+
+    yuv420.width = width;
+    yuv420.height = height;
+
+    yuv420.yData.resize(ySize);
+    yuv420.uData.resize(uvSize);
+    yuv420.vData.resize(uvSize);
+
+    for (int y = 0; y < height; ++y) {
+        for (int x = 0; x < width; ++x) {
+            QColor color = image.pixelColor(x, y);
+            int r = color.red();
+            int g = color.green();
+            int b = color.blue();
+
+            int yIndex = y * width + x;
+            yuv420.yData[yIndex] = static_cast<unsigned char>((0.257 * r) + (0.504 * g) + (0.098 * b) + 16);
+
+            if (y % 2 == 0 && x % 2 == 0) {
+                int uvIndex = (y / 2) * (width / 2) + (x / 2);
+                yuv420.uData[uvIndex] = static_cast<unsigned char>((-0.148 * r) - (0.291 * g) + (0.439 * b) + 128);
+                yuv420.vData[uvIndex] = static_cast<unsigned char>((0.439 * r) - (0.368 * g) - (0.071 * b) + 128);
+            }
+        }
+    }
+}
+
+void PlayerGLWidget::initializeGL()
+{
+    initializeOpenGLFunctions(); // 确保初始化 OpenGL 函数
+
+    // 创建并编译顶点着色器
+    const char* vertexShaderSource = R"(
+        #version 330 core
+        layout(location = 0) in vec3 aPos;
+        layout(location = 1) in vec2 aTexCoord;
+        out vec2 TexCoord;
+        void main()
+        {
+            gl_Position = vec4(aPos, 1.0);
+            TexCoord = vec2(aTexCoord.x, 1.0 - aTexCoord.y); // 翻转纹理坐标
+        }
+    )";
+    GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
+    glShaderSource(vertexShader, 1, &vertexShaderSource, nullptr);
+    glCompileShader(vertexShader);
+
+    // 创建并编译片段着色器
+    const char* fragmentShaderSource = R"(
+        #version 330 core
+        out vec4 FragColor;
+        in vec2 TexCoord;
+        uniform sampler2D textureY;
+        uniform sampler2D textureU;
+        uniform sampler2D textureV;
+        void main()
+        {
+            float y = texture(textureY, TexCoord).r;
+            float u = texture(textureU, TexCoord).r - 0.5;
+            float v = texture(textureV, TexCoord).r - 0.5;
+            float r = y + 1.402 * v;
+            float g = y - 0.344 * u - 0.714 * v;
+            float b = y + 1.772 * u;
+            FragColor = vec4(r, g, b, 1.0);
+        }
+    )";
+    GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
+    glShaderSource(fragmentShader, 1, &fragmentShaderSource, nullptr);
+    glCompileShader(fragmentShader);
+
+    // 链接着色器程序
+    shaderProgram = glCreateProgram();
+    glAttachShader(shaderProgram, vertexShader); // 确保传递的是程序对象和着色器对象
+    glAttachShader(shaderProgram, fragmentShader); // 确保传递的是程序对象和着色器对象
+    glLinkProgram(shaderProgram);
+
+    // 删除着色器对象
+    glDeleteShader(vertexShader);
+    glDeleteShader(fragmentShader);
+
+    // 创建纹理
+    glGenTextures(1, &textureIdY_);
+    glGenTextures(1, &textureIdU_);
+    glGenTextures(1, &textureIdV_);
+
+    glBindTexture(GL_TEXTURE_2D, textureIdY_);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+
+    glBindTexture(GL_TEXTURE_2D, textureIdU_);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+
+    glBindTexture(GL_TEXTURE_2D, textureIdV_);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+
+    // 设置顶点数据和缓冲
+    float vertices[] = {
+        // positions          // texture coords
+        -1.0f, -1.0f, 0.0f,  0.0f, 0.0f,
+         1.0f, -1.0f, 0.0f,  1.0f, 0.0f,
+         1.0f,  1.0f, 0.0f,  1.0f, 1.0f,
+        -1.0f,  1.0f, 0.0f,  0.0f, 1.0f
+    };
+    unsigned int indices[] = {
+        0, 1, 2,
+        2, 3, 0
+    };
+
+    glGenVertexArrays(1, &VAO);
+    glGenBuffers(1, &VBO);
+    glGenBuffers(1, &EBO);
+
+    glBindVertexArray(VAO);
+
+    glBindBuffer(GL_ARRAY_BUFFER, VBO);
+    glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
+
+    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
+    glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
+
+    // 位置属性
+    glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)0);
+    glEnableVertexAttribArray(0);
+    // 纹理坐标属性
+    glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)(3 * sizeof(float)));
+    glEnableVertexAttribArray(1);
+
+    glBindBuffer(GL_ARRAY_BUFFER, 0);
+    glBindVertexArray(0);
+}
+
+void PlayerGLWidget::resizeGL(int w, int h)
+{
+    Ortho2DSize_.setWidth(w);
+    Ortho2DSize_.setHeight(h);
+    glViewport(0, 0, w, h);
+    glMatrixMode(GL_PROJECTION);
+    glLoadIdentity();
+    glOrtho(0, Ortho2DSize_.width(), Ortho2DSize_.height(), 0, -1, 1);
+    glMatrixMode(GL_MODELVIEW);
+}
+
+void PlayerGLWidget::paintGL()
+{
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    if (yData.isEmpty() || uData.isEmpty() || vData.isEmpty()) {
+        return;
+    }
+
+    // 使用着色器程序
+    glUseProgram(shaderProgram);
+
+    // 更新纹理数据
+    glActiveTexture(GL_TEXTURE0);
+    glBindTexture(GL_TEXTURE_2D, textureIdY_);
+    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, imageSize_.width(), imageSize_.height(), 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, yData.data());
+
+    glActiveTexture(GL_TEXTURE1);
+    glBindTexture(GL_TEXTURE_2D, textureIdU_);
+    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, imageSize_.width() / 2, imageSize_.height() / 2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, uData.data());
+
+    glActiveTexture(GL_TEXTURE2);
+    glBindTexture(GL_TEXTURE_2D, textureIdV_);
+    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, imageSize_.width() / 2, imageSize_.height() / 2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, vData.data());
+
+    // 设置纹理单元
+    glUniform1i(glGetUniformLocation(shaderProgram, "textureY"), 0);
+    glUniform1i(glGetUniformLocation(shaderProgram, "textureU"), 1);
+    glUniform1i(glGetUniformLocation(shaderProgram, "textureV"), 2);
+
+    // 绘制四边形
+    glBindVertexArray(VAO);
+    glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
+    glBindVertexArray(0);
+}

+ 78 - 0
demo/VideoPlayerGL/Player/PlayerGLWidget.h

@@ -0,0 +1,78 @@
+#ifndef PLAYEROPENGLWIDGET_H
+#define PLAYEROPENGLWIDGET_H
+
+
+#include <QOpenGLWidget>
+#include <QOpenGLFunctions> // 添加此行
+#include <QOpenGLFunctions_3_3_Core> // 添加此行
+#include <QOpenGLTexture>
+#include "PlayerGlobalInfo.h"
+
+/**
+ * @brief 
+ * 
+ */
+
+
+class PlayerGLWidget : public QOpenGLWidget, protected QOpenGLFunctions_3_3_Core // 修改此行
+{
+    Q_OBJECT
+
+    enum
+    {
+        Left_Bottom_X,
+        Left_Bottom_Y,
+        Right_Bottom_X,
+        Right_Bottom_Y,
+        Right_Top_X,
+        Right_Top_Y,
+        Left_Top_X,
+        Left_Top_Y,
+        Pos_Max
+    };
+
+
+public:
+    explicit PlayerGLWidget(QWidget *parent = nullptr);
+    ~PlayerGLWidget();
+
+    /* 刷新一帧 */
+    void updateFrame(Image_YUV420& image);
+    /* 刷新一帧QImage */
+    void updateFrame(Image_QImage& image);
+
+
+private:
+    /* 转换成YUV420 */
+    void convertQImageToYUV420(const QImage& image, Image_YUV420& yuv420);
+
+protected:
+    void initializeGL() override;
+    void resizeGL(int w, int h) override;
+    void paintGL() override;
+
+    // void setImage(const QImage &image);
+
+
+private:
+
+    Image_QImage m_image;
+    Image_YUV420 m_YUV420;
+
+    QByteArray yData;
+    QByteArray uData;
+    QByteArray vData;
+
+    QOpenGLTexture* textureY_;
+    QOpenGLTexture* textureU_;
+    QOpenGLTexture* textureV_;
+    GLuint textureIdY_;
+    GLuint textureIdU_;
+    GLuint textureIdV_;
+    QSize Ortho2DSize_;
+    QSize imageSize_; // 添加此行
+    GLuint shaderProgram; // 添加此行
+    GLuint VAO, VBO, EBO; // 添加此行
+};
+
+#endif /* PLAYEROPENGLWIDGET_H */

+ 137 - 0
demo/VideoPlayerGL/Player/PlayerGLWidget2.cpp_

@@ -0,0 +1,137 @@
+#include "PlayerGLWidget.h"
+
+
+PlayerGLWidget::PlayerGLWidget(QWidget *parent) : QOpenGLWidget(parent)
+{
+
+}
+
+PlayerGLWidget::~PlayerGLWidget()
+{
+
+}
+
+/* 刷新一帧 */
+void PlayerGLWidget::updateFrame(Image_YUV420& image)
+{
+    // yData = image.yData;
+    // uData = image.uData;
+    // vData = image.vData;
+    update();
+}
+
+/* 刷新一帧QImage */
+void PlayerGLWidget::updateFrame(Image_QImage& image)
+{
+    m_image = image;
+
+    texture->setData(m_image.image);
+    /* 设置纹理细节 */
+    texture->setLevelofDetailBias(-1);
+    update();
+}
+
+
+void PlayerGLWidget::initShaders()
+{
+    //纹理坐标
+    texCoords.append(QVector2D(0, 1)); //左上
+    texCoords.append(QVector2D(1, 1)); //右上
+    texCoords.append(QVector2D(0, 0)); //左下
+    texCoords.append(QVector2D(1, 0)); //右下
+    //顶点坐标
+    vertices.append(QVector3D(-1, -1, 1));//左下
+    vertices.append(QVector3D(1, -1, 1)); //右下
+    vertices.append(QVector3D(-1, 1, 1)); //左上
+    vertices.append(QVector3D(1, 1, 1));  //右上
+    QOpenGLShader *vshader = new QOpenGLShader(QOpenGLShader::Vertex, this);
+    const char *vsrc =
+            "attribute vec4 vertex;\n"
+            "attribute vec2 texCoord;\n"
+            "varying vec2 texc;\n"
+            "void main(void)\n"
+            "{\n"
+            "    gl_Position = vertex;\n"
+            "    texc = texCoord;\n"
+            "}\n";
+    vshader->compileSourceCode(vsrc);//编译顶点着色器代码
+ 
+    QOpenGLShader *fshader = new QOpenGLShader(QOpenGLShader::Fragment, this);
+    const char *fsrc =
+            "uniform sampler2D texture;\n"
+            "varying vec2 texc;\n"
+            "void main(void)\n"
+            "{\n"
+            "    gl_FragColor = texture2D(texture,texc);\n"
+            "}\n";
+    fshader->compileSourceCode(fsrc); //编译纹理着色器代码
+ 
+    program.addShader(vshader);//添加顶点着色器
+    program.addShader(fshader);//添加纹理碎片着色器
+    program.bindAttributeLocation("vertex", 0);//绑定顶点属性位置
+    program.bindAttributeLocation("texCoord", 1);//绑定纹理属性位置
+    // 链接着色器管道
+    if (!program.link())
+        close();
+    // 绑定着色器管道
+    if (!program.bind())
+        close();
+}
+
+void PlayerGLWidget::initTextures()
+{
+    // 加载 Avengers.jpg 图片
+    texture = new QOpenGLTexture(QOpenGLTexture::Target2D);
+    texture->setMinificationFilter(QOpenGLTexture::LinearMipMapLinear);
+    texture->setMagnificationFilter(QOpenGLTexture::Linear);
+    //重复使用纹理坐标
+    //纹理坐标(1.1, 1.2)与(0.1, 0.2)相同
+    texture->setWrapMode(QOpenGLTexture::Repeat);
+    //设置纹理大小
+    texture->setSize(this->width(), this->height());
+    //分配储存空间
+    texture->allocateStorage();
+
+}
+
+
+void PlayerGLWidget::initializeGL()
+{
+    initializeOpenGLFunctions(); //初始化OPenGL功能函数
+    glClearColor(0, 0, 0, 0);    //设置背景为黑色
+    glEnable(GL_TEXTURE_2D);     //设置纹理2D功能可用
+    initTextures();              //初始化纹理设置
+    initShaders();               //初始化shaders
+
+}
+
+void PlayerGLWidget::resizeGL(int w, int h)
+{
+    // 计算窗口横纵比
+    qreal aspect = qreal(w) / qreal(h ? h : 1);
+    // 设置近平面值 3.0, 远平面值 7.0, 视场45度
+    const qreal zNear = 3.0, zFar = 7.0, fov = 45.0;
+    // 重设投影
+    projection.setToIdentity();
+    // 设置透视投影
+    projection.perspective(fov, static_cast<float>(aspect), zNear, zFar);
+
+}
+
+void PlayerGLWidget::paintGL()
+{
+    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); //清除屏幕缓存和深度缓冲
+    QMatrix4x4 matrix;
+    matrix.translate(0.0, 0.0, -5.0);                   //矩阵变换
+    program.enableAttributeArray(0);
+    program.enableAttributeArray(1);
+    program.setAttributeArray(0, vertices.constData());
+    program.setAttributeArray(1, texCoords.constData());
+    program.setUniformValue("texture", 0); //将当前上下文中位置的统一变量设置为value
+    texture->bind();  //绑定纹理
+    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);//绘制纹理
+    texture->release(); //释放绑定的纹理
+    texture->destroy(); //消耗底层的纹理对象
+    texture->create();
+
+}

+ 55 - 0
demo/VideoPlayerGL/Player/PlayerGLWidget2.h_

@@ -0,0 +1,55 @@
+#ifndef PLAYEROPENGLWIDGET_H
+#define PLAYEROPENGLWIDGET_H
+
+
+#include <QOpenGLWidget>
+#include <QOpenGLFunctions>
+#include <QOpenGLShaderProgram>
+#include <QOpenGLTexture>
+#include <QTimer>
+
+#include "PlayerGlobalInfo.h"
+
+/**
+ * @brief 可以播放,但是CPU占用率比纯CPU绘制还高
+ * 
+ */
+
+
+class PlayerGLWidget : public QOpenGLWidget , public QOpenGLFunctions
+{
+    Q_OBJECT
+public:
+    explicit PlayerGLWidget(QWidget *parent = nullptr);
+    ~PlayerGLWidget();
+
+    /* 刷新一帧 */
+    void updateFrame(Image_YUV420& image);
+    /* 刷新一帧QImage */
+    void updateFrame(Image_QImage& image);
+
+
+private:
+    void initShaders();
+    void initTextures();
+
+protected:
+    void initializeGL() override;
+    void resizeGL(int w, int h) override;
+    void paintGL() override;
+
+    // void setImage(const QImage &image);
+
+
+private:
+
+    QVector<QVector3D> vertices;
+    QVector<QVector2D> texCoords;
+    QOpenGLShaderProgram program;
+    QOpenGLTexture *texture;
+    QMatrix4x4 projection;
+
+    Image_QImage m_image;
+};
+
+#endif /* PLAYEROPENGLWIDGET_H */

+ 103 - 0
demo/VideoPlayerGL/Player/PlayerWidget.cpp

@@ -0,0 +1,103 @@
+#include "PlayerWidget.h"
+
+#include <algorithm>
+#include <QResizeEvent>
+#include <QPainter>
+
+
+PlayerWidget::PlayerWidget(QWidget *parent)
+{
+    m_windowSize = QSize(0, 0);
+}
+
+PlayerWidget::~PlayerWidget()
+{
+
+}
+
+/* 刷新一帧 */
+void PlayerWidget::updateFrame(Image_YUV420& yuvData)
+{
+    YUV420ToQImage(yuvData, m_image);
+    if(m_windowSize.width() == 0 || m_windowSize.height() == 0)
+    {
+        m_imageWidth = yuvData.width;
+        m_imageHeight = yuvData.height;
+    }
+    update();
+}
+
+void PlayerWidget::updateFrame(Image_QImage& imageData)
+{
+    if(m_windowSize.width() == 0 || m_windowSize.height() == 0)
+    {
+        m_imageWidth = imageData.width;
+        m_imageHeight = imageData.height;
+    }
+    m_image = std::move(imageData.image);
+    update();
+}
+
+
+/* 绘画事件 */
+void PlayerWidget::paintEvent(QPaintEvent *event)
+{
+
+    // SPDLOG_TRACE("开始绘制画面...");
+    /* 对图像进行缩放 */
+    if(m_windowSize.width() != m_imageWidth || m_windowSize.height() != m_imageHeight)
+    {
+        m_image = m_image.scaled(m_windowSize.width(), m_windowSize.height(), Qt::KeepAspectRatio, Qt::SmoothTransformation);
+    }
+    QPainter painter(this);
+    painter.drawImage(0, 0, m_image);
+}
+
+/* 重新设置大小事件 */
+void PlayerWidget::resizeEvent(QResizeEvent *event)
+{
+    m_windowSize.setWidth(event->size().width());
+    m_windowSize.setHeight(event->size().height());
+
+    QWidget::resizeEvent(event);
+}
+
+/* YUV420转QImage */
+void PlayerWidget::YUV420ToQImage(Image_YUV420& yuvData, QImage& image)
+{
+    int width = yuvData.width;
+    int height = yuvData.height;
+    int frameSize = yuvData.width * yuvData.height;
+    int chromaSize = frameSize / 4;
+
+    QImage image_rgb(width, height, QImage::Format_RGB888);
+
+    for (int y = 0; y < height; ++y) {
+        for (int x = 0; x < width; ++x) {
+            int yIndex = y * width + x;
+            int uIndex = (y / 2) * (width / 2) + (x / 2);
+            int vIndex = uIndex;
+
+            uint8_t Y = yuvData.yData.at(yIndex);
+            uint8_t U = yuvData.uData[uIndex];
+            uint8_t V = yuvData.vData[vIndex];
+
+            int C = Y - 16;
+            int D = U - 128;
+            int E = V - 128;
+
+            int R = (298 * C + 409 * E + 128) >> 8;
+            int G = (298 * C - 100 * D - 208 * E + 128) >> 8;
+            int B = (298 * C + 516 * D + 128) >> 8;
+
+            R = std::clamp(R, 0, 255);
+            G = std::clamp(G, 0, 255);
+            B = std::clamp(B, 0, 255);
+
+            image_rgb.setPixel(x, y, qRgb(R, G, B));
+        }
+    }
+
+    image = image_rgb;
+}
+

+ 38 - 0
demo/VideoPlayerGL/Player/PlayerWidget.h

@@ -0,0 +1,38 @@
+#ifndef PLAYERWIDGET_H
+#define PLAYERWIDGET_H
+
+#include <QWidget>
+#include "PlayerGlobalInfo.h"
+
+class PlayerWidget : public QWidget
+{
+    Q_OBJECT
+public:
+    PlayerWidget(QWidget *parent = nullptr);
+    ~PlayerWidget();
+
+    /* 刷新一帧 */
+    void updateFrame(Image_YUV420& yuvData);
+    void updateFrame(Image_QImage& imageData);
+
+protected:
+    /* 绘画事件 */
+    void paintEvent(QPaintEvent *event) override;
+    /* 重新设置大小事件 */
+    void resizeEvent(QResizeEvent *event) override;
+
+private:
+    /* YUV420转QImage */
+    void YUV420ToQImage(Image_YUV420& yuvData, QImage& image);
+
+private:
+
+    int m_imageWidth = 0;
+    int m_imageHeight = 0;
+    QImage m_image;
+    // Image_QImage m_image;
+
+    QSize m_windowSize;         /* 窗口大小 */
+};
+
+#endif /* PLAYERWIDGET_H */

+ 76 - 0
demo/VideoPlayerGL/PlayerGlobalInfo.cpp

@@ -0,0 +1,76 @@
+#include "PlayerGlobalInfo.h"
+
+
+
+/* 移动构造函数 */
+Image_YUV420::Image_YUV420(Image_YUV420&& other)
+{
+    yData = std::move(other.yData);
+    uData = std::move(other.uData);
+    vData = std::move(other.vData);
+    width = other.width;
+    height = other.height;
+}
+/* 拷贝构造函数 */
+Image_YUV420::Image_YUV420(const Image_YUV420& other)
+{
+    yData = other.yData;
+    uData = other.uData;
+    vData = other.vData;
+    width = other.width;
+    height = other.height;
+}
+
+
+/* 重载= */
+Image_YUV420& Image_YUV420::operator=(const Image_YUV420& other)
+{
+    yData = other.yData;
+    uData = other.uData;
+    vData = other.vData;
+    width = other.width;
+    height = other.height;
+    return *this;
+}
+
+Image_QImage::Image_QImage()
+{
+    width = 0;
+    height = 0;
+
+    /* 设置为RGB888 */
+    image =  QImage(1, 1, QImage::Format_RGB888);
+}
+
+/* 移动构造函数 */
+Image_QImage::Image_QImage(Image_QImage&& other)
+{
+    width = other.width;
+    height = other.height;
+    image = std::move(other.image);
+}
+
+/* 拷贝构造函数 */
+Image_QImage::Image_QImage(const Image_QImage& other)
+{
+    width = other.width;
+    height = other.height;
+    image = other.image;
+}
+
+/* 重载= */
+Image_QImage& Image_QImage::operator=(const Image_QImage& other)
+{
+    width = other.width;
+    height = other.height;
+    image = other.image;
+    return *this;
+}
+
+/* 移动幅值函数 */
+void Image_QImage::moveFrom(Image_QImage& other)
+{
+    width = other.width;
+    height = other.height;
+    image = std::move(other.image);
+}

+ 51 - 0
demo/VideoPlayerGL/PlayerGlobalInfo.h

@@ -0,0 +1,51 @@
+#ifndef PLAYERGLOBALINFO_H
+#define PLAYERGLOBALINFO_H
+
+// #include <vector>
+#include <QByteArray>
+#include <QImage>
+
+/**
+ * @brief 一帧YUV420的图片
+ * 
+ */
+struct Image_YUV420
+{
+    QByteArray yData;
+    QByteArray uData;
+    QByteArray vData;
+
+    int width;          /* 图片的宽 */
+    int height;         /* 图片的高 */
+
+    Image_YUV420() : width(0), height(0) {}
+    ~Image_YUV420() {}
+    /* 移动构造函数 */
+    Image_YUV420(Image_YUV420&& other);
+    /* 拷贝构造函数 */
+    Image_YUV420(const Image_YUV420& other);
+    /* 重载= */
+    Image_YUV420& operator=(const Image_YUV420& other);
+};
+
+struct Image_QImage
+{
+    QImage image;
+    int width;
+    int height;
+
+    Image_QImage();
+    ~Image_QImage() {}
+    /* 移动构造函数 */
+    Image_QImage(Image_QImage&& other);
+    /* 拷贝构造函数 */
+    Image_QImage(const Image_QImage& other);
+    /* 重载= */
+    Image_QImage& operator=(const Image_QImage& other);
+    /* 移动幅值函数 */
+    void moveFrom(Image_QImage& other);
+};
+
+
+
+#endif /* PLAYERGLOBALINFO_H */

+ 1069 - 0
demo/VideoPlayerGL/VideoPlayer/DecodeVedio.cpp

@@ -0,0 +1,1069 @@
+#include "DecodeVedio.h"
+#include "spdlog/spdlog.h"
+#include "FmtLog/fmtlog.h"
+
+#include <QThread>
+
+extern "C"
+{
+#include <libavcodec/avcodec.h>
+#include <libavformat/avformat.h>
+#include <libswscale/swscale.h>
+#include <libavutil/imgutils.h>
+}
+
+/*=================================================================================================
+* @brief  FFmpeg获取GPU硬件解码帧格式的回调函数
+===================================================================================================*/
+static enum AVPixelFormat g_hw_pix_fmt;
+
+/**
+ * @brief 回调函数,获取GPU硬件解码帧的格式
+ * 
+ * @param ctx 
+ * @param pix_fmts 
+ * @return AVPixelFormat 
+ */
+AVPixelFormat get_hw_format(AVCodecContext *ctx, const AVPixelFormat *pix_fmts)
+{
+    Q_UNUSED(ctx)
+    const AVPixelFormat* p;
+    for(p = pix_fmts; *p != -1; p++)
+    {
+        if(*p == g_hw_pix_fmt)
+        {
+            return *p;
+        }
+    }
+    SPDLOG_WARN("无法获取硬件解码器表面格式。");
+    return AV_PIX_FMT_NONE;
+}
+
+
+/**
+ * @brief Construct a new Decode Vedio:: Decode Vedio object
+ * 
+ * @param thread 
+ * @param parent 
+ */
+DecodeVedio::DecodeVedio(QThread* thread, QObject* parent) : QObject(parent) , m_thread(thread)
+{
+    /* 在连接之前调用,移动到新的线程 */
+    this->moveToThread(thread);
+    connect(this, &DecodeVedio::signal_startDecode, this, &DecodeVedio::do_startDecodeVedio);
+    thread->start();
+    findHWDecoder();
+    // if(m_supportHWDecoder)
+    // {
+    //     SPDLOG_DEBUG("支持的硬件解码器:");
+    //     for(auto it : m_listDecoderName)
+    //     {
+    //         SPDLOG_DEBUG("{}", it.toStdString());
+    //     }
+    // }else {
+    //     SPDLOG_WARN("未找到可用的硬件解码器。");
+    // }
+}
+
+DecodeVedio::~DecodeVedio()
+{
+    stopDecodeVedio();
+    if(m_thread != nullptr)
+    {
+        if(m_thread->isRunning())
+        {
+            m_thread->quit();
+            m_thread->wait();
+        }
+    }
+
+}
+
+
+
+/* 开始解码视频 */
+void DecodeVedio::startDecodeVedio()
+{
+    if(m_threadRuning)
+    {
+        return;
+    }
+    if(!m_initFFmpeg)
+    {
+        SPDLOG_WARN("未初始化FFMPEG...");
+        return;
+    }
+    m_threadRuning = true;
+    // decodeUsingCPU();
+    /* 发送信号,开启新线程 */
+    emit signal_startDecode();
+}
+
+/* 停止解码视频,退出工作函数,线程未停止 */
+void DecodeVedio::stopDecodeVedio()
+{
+    if(!m_threadRuning)
+    {
+        return;
+    }
+    exitThread();
+
+    /* 唤醒阻塞住的解码线程 */
+    // /* 等待线程执行结束 */
+    while(m_decodeState.load() != DecodeState::DecodeExit)
+    {
+        std::this_thread::sleep_for(std::chrono::milliseconds(5));
+    }
+    freeAll();
+    m_threadRuning = false;
+}
+
+/**
+ * @brief 设置当前播放位置,单位是毫秒
+ *        这里需要去掉队列中已有的图片数目对应的时长
+ * 
+ * @param pos 要跳转的位置,范围从0~duration
+ */
+void DecodeVedio::setCurrentPos(qint64 pos)
+{
+    if(!m_threadRuning)
+    {
+        return;
+    }
+    m_isSeek = true;
+    /* 先暂停解码 */
+    pauseDecode();
+    
+    SPDLOG_DEBUG("跳转到:{}ms",pos);
+    /*在环形队列中有已解码的视频帧数,需要去掉已有的帧数所占的时间 */
+    pos = pos - m_queueImage.QueueSize() * (1000 / m_fps);
+    if(pos < 0) {
+        pos = 0;
+    }
+    if(pos > m_duration) {
+        pos = m_duration;
+    }
+    pos = pos + m_startPos;
+    qint64 targetPos = qRound64((double)pos / (1000 * rationalToDouble(&m_pFormatContext->streams[m_videoStream]->time_base)));
+    /* 开始跳转,这里设置标志为AVSEEK_FLAG_BACKWARD,跳转到目标位置的前一个关键帧中,然后开始解码,直到到达目标位置为止 */
+    int ret = av_seek_frame(m_pFormatContext, m_videoStream, targetPos, AVSEEK_FLAG_BACKWARD);
+    if(ret < 0)
+    {
+        SPDLOG_ERROR("跳转失败!");
+    }
+    m_targetPos = pos;
+    /* 刷新解码器缓冲区 */
+    m_flushDecoder.store(true);
+    /* 清空环形队列中的视频 */
+    SPDLOG_DEBUG("清空环形队列中的视频。");
+    QImage* image = 0;
+    while (m_queueImage.QueueSize() > 0)
+    {
+        image = nullptr;
+        m_queueImage.front_pop_NoBlock(image);
+        if(image != nullptr)
+        {
+            delete image;
+        }
+    }
+    
+    /* 继续解码 */
+    continueDecode();
+}
+
+/* 获取当前播放位置,单位ms */
+qint64 DecodeVedio::getCurrentPos()
+{
+    return m_pts.load() - m_startPos;
+}
+
+/* 获取视频时长 */
+qint64 DecodeVedio::getDuration()
+{
+    return m_duration;
+}
+
+/**
+ * @brief 获取一帧图像,队列为空就返回nullptr,这个函数应该是运行在UI线程中的
+ * @warning 传出这个指针后,队列就出队了,内存需要外面获取的实例释放
+ * @return QImage* 一帧图像的指针
+ */
+QImage* DecodeVedio::getOneImage()
+{
+    if(!m_threadRuning)
+    {
+        return nullptr;
+    }
+    QImage* image = nullptr;
+    if(!m_queueImage.front_pop_NoBlock(image))
+    {
+        return nullptr;
+    }
+    return image;
+}
+
+/**
+ * @brief 获取一帧图像,直到有图像为止
+ * 
+ * @param timeOut 设为-1是一直等待,设置正整数是等待的时间,单位ms
+ * @return QImage* 
+ */
+QImage* DecodeVedio::getOneImageUntilHave(int timeOut)
+{
+    if(!m_threadRuning)
+    {
+        return nullptr;
+    }
+    if(timeOut < 0)
+    {
+        QImage* image = m_queueImage.front_pop();
+        return image;
+    }
+    for(int i = 0; i < timeOut; i++)
+    {
+        QImage* image = nullptr;
+        if(m_queueImage.front_pop_NoBlock(image))
+        {
+            return image;
+        }
+        std::this_thread::sleep_for(std::chrono::milliseconds(1));
+    }
+    return nullptr;
+}
+
+
+/* 查找硬件解码器 */
+void DecodeVedio::findHWDecoder(QStringList& listDecoderName)
+{
+    AVHWDeviceType type = AV_HWDEVICE_TYPE_NONE;
+    listDecoderName.clear();
+    while( (type = av_hwdevice_iterate_types(type)) != AV_HWDEVICE_TYPE_NONE)
+    {
+        /* 获取硬件解码器的名称 */
+        const char* typeName = av_hwdevice_get_type_name(type);
+        if(typeName)
+        {
+            listDecoderName.append(QString(typeName));
+        }
+    }
+}
+
+
+/* 获取硬件解码器 */
+void DecodeVedio::findHWDecoder()
+{
+    AVHWDeviceType type = AV_HWDEVICE_TYPE_NONE;
+    // QStringList strTypes;
+    m_listDecoderName.clear();
+    while( (type = av_hwdevice_iterate_types(type)) != AV_HWDEVICE_TYPE_NONE)
+    {
+        m_listHWDeviceType.append(type);
+        /* 获取硬件解码器的名称 */
+        const char* typeName = av_hwdevice_get_type_name(type);
+        if(typeName)
+        {
+            m_listDecoderName.append(QString(typeName));
+        }
+    }
+    if(m_listHWDeviceType.isEmpty())
+    {
+        m_supportHWDecoder = false;
+    }else {
+        m_supportHWDecoder = true;
+    }
+}
+
+
+
+/* 初始化硬件解码器 */
+void DecodeVedio::initHWDecoder(const AVCodec* codec)
+{
+    if(codec == nullptr)
+    {
+        return;
+    }
+    for(int i = 0;;i++)
+    {
+        /* 获取编解码器支持的硬件配置 */
+        const AVCodecHWConfig* hwConfig = avcodec_get_hw_config(codec, 0);
+        if(hwConfig == nullptr)
+        {
+            SPDLOG_WARN("没有找到支持{}的硬件配置", codec->name);
+            return;
+        }
+        /* 判断是否是设备类型 */
+        if(hwConfig->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX)
+        {
+            /* 在已有的解码器列表中查找 */
+            for(auto i : m_listHWDeviceType)
+            {
+                if(hwConfig->device_type == AVHWDeviceType(i))
+                {
+                    /* 获取像素格式 */
+                    g_hw_pix_fmt = hwConfig->pix_fmt;
+
+                    /* 打开指定类型的设备,并为其创建AVHWDeviceContext */
+                    int ret = av_hwdevice_ctx_create(&m_hw_device_ctx, hwConfig->device_type, nullptr, nullptr, 0);
+                    if(ret < 0)
+                    {
+                        SPDLOG_ERROR("打开硬件解码器失败!");
+                        return;
+                    }
+                    SPDLOG_INFO("打开硬件解码器:{}", av_hwdevice_get_type_name(hwConfig->device_type));
+                    m_pCodecContext->hw_device_ctx = av_buffer_ref(m_hw_device_ctx);
+                    m_pCodecContext->get_format = get_hw_format;
+                    return;
+                }
+            }
+        }
+    }
+    
+}
+
+
+/* 拷贝数据,从GPU显存拷贝到内存中 */
+bool DecodeVedio::copyDataFromGPU(AVFrame* pFrameHW, AVFrame* pFrameSRC)
+{
+    if(m_pFrameSRC->format != g_hw_pix_fmt)
+    {
+        av_frame_unref(m_pFrameSRC);
+        return false;
+    }
+    /* 这一步可能会耗费较长时间 */
+    int ret = av_hwframe_transfer_data(pFrameHW, pFrameSRC, 0);
+    if(ret < 0)
+    {
+        SPDLOG_ERROR("从GPU拷贝数据失败!");
+        av_frame_unref(pFrameSRC);
+        return false;
+    }
+    av_frame_copy_props(pFrameHW, pFrameSRC);
+    return true;
+}
+
+
+/* 打开视频,同时初始化解码器) */
+void DecodeVedio::openVedio(const QString& fileName)
+{
+    if(fileName.isEmpty())
+    {
+        SPDLOG_WARN("文件名为空...");
+        return;
+    }
+    m_fileName = fileName;
+    if(m_initFFmpeg)
+    {
+        freeAll();
+    }
+
+    /* 清空队列 */
+    if(m_queueImage.QueueSize() > 0)
+    {
+        for(int i = 0; i < m_queueImage.QueueSize(); i++)
+        {
+            QImage* image = nullptr;
+            if (m_queueImage.front_pop_NoBlock(image))
+            {
+                delete image;
+            }
+        }
+        m_queueImage.clearQueue();
+    }
+    m_queueImage.setQueueCapacity(30);
+
+    SPDLOG_DEBUG("开始初始化FFMPEG");
+    /* 设置网络缓冲区大小和错误恢复选项 */
+    AVDictionary *options = nullptr;
+    /* 设置接收缓冲区 */
+    av_dict_set(&options, "buffer_size", "1024000", 0);
+    /* 设置最大复用或解复用延迟(以微秒为单位)。当通过【UDP】 接收数据时,解复用器尝试重新排序接收到的数据包
+     * (因为它们可能无序到达,或者数据包可能完全丢失)。这可以通过将最大解复用延迟设置为零
+     * (通过max_delayAVFormatContext 字段)来禁用。 */
+    av_dict_set(&options, "max_delay", "500000", 0);
+    /* 设置rtsp流使用tcp打开,如果打开失败错误信息为【Error number -135 occurred】可以切换(UDP、tcp、udp_multicast、http),比如vlc推流就需要使用udp打开 */
+    av_dict_set(&options, "rtsp_transport", "tcp", 0);
+    /* 以微秒为单位设置套接字 TCP I/O 超时,如果等待时间过短,也可能会还没连接就返回了。 */
+    av_dict_set(&options, "stimeout", "20000000", 0);
+
+    /************ 存储文件格式信息 ************/
+    /* 打开文件,读取视频文件的头信息,放在第一个参数的结构体中 */
+    int ret = avformat_open_input(  &m_pFormatContext,                  /* 解封装上下文 */
+                                    m_fileName.toStdString().data(),    /* 打开视频地址 */
+                                    nullptr,                            /* 这个参数强制使用特定的输入格式,可以设置为null */
+                                    &options);                             /* 参数设置 */
+    if(ret != 0)
+    {
+        SPDLOG_WARN("打开视频文件错误,错误代码:{}",ret);
+        freeAll();
+        return;
+    }
+    /* 释放参数 */
+    if(options != nullptr)
+    {
+        av_dict_free(&options);
+    }
+
+    /************ 找到视频流 ************/
+    /* 检查视频容器内部的流信息,将所有流存储到了pFormatContext->streams中
+     * 查找到视频流,并获取视频时长相关的信息 */
+    ret = 0;
+    ret = avformat_find_stream_info(m_pFormatContext, nullptr);
+    if(ret < 0)
+    {
+        SPDLOG_WARN("获取视频流错误,错误代码:{}",ret);
+        freeAll();
+        return;
+    }
+    m_duration = m_pFormatContext->duration / (AV_TIME_BASE / 1000);  /* 获取视频时长,单位是毫秒 */
+    m_startPos = m_pFormatContext->start_time / (AV_TIME_BASE / 1000);  /* 获取视频开始时间,单位是毫秒 */
+    // SPDLOG_DEBUG("开始时间:{} 时长:{}",m_startPos, m_duration);
+    
+    /* 一个调试函数,将流信息输出到控制台 */
+    av_dump_format(m_pFormatContext, 0, m_fileName.toStdString().c_str(), 0);
+
+    /* 找到视频流 */
+    m_videoStream = av_find_best_stream(m_pFormatContext, AVMEDIA_TYPE_VIDEO, -1, -1, nullptr, 0);
+    if(m_videoStream < 0)
+    {
+        SPDLOG_WARN("没有找到视频流");
+        freeAll();
+        return;
+    }
+    SPDLOG_DEBUG("找到视频流");
+    /* 获取视频流的编解码信息,主要是分辨率等信息 */
+    AVStream *pStream = m_pFormatContext->streams[m_videoStream];   /* 获取视频流 */
+    AVCodecParameters* pCodecParams = pStream->codecpar;            /* 获取视频流的编解码信息 */
+    SPDLOG_DEBUG("获取视频流参数成功!");
+    /* 获取视频相关信息 */
+    m_srcSize.setWidth(pCodecParams->width);
+    m_srcSize.setHeight(pCodecParams->height);
+    m_fps = rationalToDouble(&pStream->avg_frame_rate);
+    m_totalFrame = m_pFormatContext->streams[m_videoStream]->nb_frames;
+    m_pts.store(0);
+
+    /************ 查找并设置解码器 ************/
+    /* 找到解码器 */
+    const AVCodec* pCodec = avcodec_find_decoder(pCodecParams->codec_id);
+    if(pCodec == nullptr)
+    {
+        SPDLOG_WARN("没有找到解码器");
+        freeAll();
+        return;
+    }
+    m_decoderName = pCodec->name;
+    // SPDLOG_INFO("找到解码器:{}",pCodec->name);
+    /* 获取视频信息的上下文,先分配空间,后面记得释放空间 */
+    m_pCodecContext = avcodec_alloc_context3(pCodec);
+    /* 将视频流中的编码器参数拷贝下来,这个函数不是线程安全的 */
+    if(avcodec_parameters_to_context(m_pCodecContext, pCodecParams) != 0)
+    {
+        SPDLOG_WARN("复制上下文错误");
+        freeAll();
+        return;
+    }
+    SPDLOG_DEBUG("设置解码器参数成功!");
+    // m_pCodecContext->flags2 |= AV_CODEC_FLAG2_FAST;  /* 使用快速解码(允许使用不符合规范的解码) */
+    m_pCodecContext->thread_count = 8;      /* 使用8线程解码 */
+
+    /* 初始化硬件解码器 */
+    if(m_supportHWDecoder)
+    {
+        initHWDecoder(pCodec);
+    }
+
+    /* 打开解码器,(初始化解码器上下文,如果调用了avcodec_alloc_context3,第二个参数可以设置为nullptr) */
+    if(avcodec_open2(m_pCodecContext, nullptr, nullptr) < 0)
+    {
+        SPDLOG_ERROR("打开解码器错误");
+        freeAll();
+        return;
+    }
+    SPDLOG_DEBUG("打开解码器成功!");
+
+    /************ 初始化数据包 ************/
+    m_packet = av_packet_alloc();
+    av_new_packet(m_packet, m_pCodecContext->width * m_pCodecContext->height);
+
+    /* 创建两个pFrame,一个存放原始数据,一个存放转换后的RGB数据 */
+    m_pFrameSRC = av_frame_alloc();
+    if(m_pFrameSRC == nullptr)
+    {
+        SPDLOG_ERROR("创建pFrame错误");
+        freeAll();
+        return;
+    }
+    // m_pFrameRGB = av_frame_alloc();
+    // if(m_pFrameRGB == nullptr)
+    // {
+    //     SPDLOG_ERROR("创建pFrameRGB错误");
+    //     freeAll();
+    //     return;
+    // }
+    m_pFrameHW = av_frame_alloc();
+    if(m_pFrameHW == nullptr)
+    {
+        SPDLOG_ERROR("创建pFrameHW错误");
+        freeAll();
+        return;
+    }
+
+    if(m_buffer != nullptr)
+    {
+        av_free(m_buffer);
+        m_buffer = nullptr;
+    }
+    
+    /* 分配图像空间 */
+    int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, m_pCodecContext->width, m_pCodecContext->height, 1);
+    /* 这里多分配了一些空间,防止某些视频图像在使用sws_scale()拷贝后超出数组长度 */
+    m_buffer = (uint8_t *)av_malloc(numBytes + 1000);
+
+    m_initFFmpeg = true;
+    SPDLOG_INFO("FFMPEG初始化完成!");
+    // SPDLOG_INFO("视频宽度:{} 高度:{} 帧率:{} 总时长:{} 总帧数:{}",m_srcSize.width(), m_srcSize.height(), m_fps, m_duration, m_totalFrame);
+    /* 再次判断帧数是否正常,如果没读取到,就使用 总帧数 / 时长 */
+    if(m_fps == 0)
+    {
+        if((m_duration > 0) && (m_totalFrame > 0))
+        {
+            m_fps = m_totalFrame / (m_duration / 1000.0);
+        }
+        /* 到这一步,无法确定帧数了,就按照25帧来计算了 */
+        if(m_fps == 0)
+        {
+            m_fps = 25;
+        }
+    }
+}
+
+
+/**
+ * @brief 软解码线程,使用CPU解码,使用环境队列存储解码的数据
+ * 
+ */
+void DecodeVedio::threadDecodeUsingCPU()
+{
+    /******** 初始化局部变量 ********/
+    bool isEnd = false;
+    int ret = 0;
+    int retFrame = 0;
+    int retPacket = 0;
+    m_pauseDecode = false;
+    m_decodeStatus = true;
+    m_decodeState.store(DecodeState::DecodeRun);
+
+    /* 开始解码 */
+    SPDLOG_DEBUG("开始解码...");
+    while(m_threadRuning)
+    {
+        /******** 判断是否在暂停状态 ********/
+        while(m_pauseDecode)
+        {
+            m_decodeState.store(DecodeState::DecodePause);
+            std::this_thread::sleep_for(std::chrono::microseconds(100));
+        }
+        m_decodeState.store(DecodeState::DecodeRun);
+        /* 刷新解码器缓冲区,清除掉里面残留的解码文件 */
+        if(m_flushDecoder.load())
+        {
+            avcodec_flush_buffers(m_pCodecContext);
+            m_flushDecoder.store(false);
+        }
+        /******** 读取数据包 av_read_frame ********/
+        int retRead = av_read_frame(m_pFormatContext, m_packet);
+        if(retRead == AVERROR_EOF)
+        {
+            /* 读取到末尾后,需要传入一个空的packet,才能读取到最后几帧 */
+            avcodec_send_packet(m_pCodecContext, m_packet);
+        }
+        else if(retRead < 0)
+        {
+            SPDLOG_ERROR("读取帧错误...");
+            break;
+        }
+        else
+        {
+            if(m_packet->stream_index == m_videoStream)
+            {
+                // SPDLOG_DEBUG("源pts:{}", m_packet->pts);
+                /*  pts 表示显示时间戳(Presentation Timestamp),它指示解码后的帧应该在什么时候显示。pts 是用于同步音视频的关键时间戳。
+                    dts 表示解码时间戳(Decoding Timestamp),它指示解码器应该在什么时候解码这个数据包。dts 用于确保解码器按照正确的顺序解码数据包。 */
+            #if 1
+                /* 计算当前帧时间,两种方法,第一种适用于所有场景,但是存在一定的误差 */
+                m_packet->pts = qRound64(m_packet->pts * (1000 * rationalToDouble(&m_pFormatContext->streams[m_videoStream]->time_base)));
+                m_packet->dts = qRound64(m_packet->dts * (1000 * rationalToDouble(&m_pFormatContext->streams[m_videoStream]->time_base)));
+            #else
+                /* 适用于本地视频,直接计算本地视频文件的每一帧时间 */
+                m_currentFrame++;
+                // m_packet->pts = qRound64(m_currentFrame * (qreal(m_duration) / m_totalFrame));
+            #endif
+                
+                /* 将数据传给解码器 */
+                int ret = avcodec_send_packet(m_pCodecContext, m_packet);
+                if(ret < 0)
+                {
+                    SPDLOG_ERROR("发送数据包错误...");
+                    av_packet_unref(m_packet);
+                    continue;
+                }
+            }else {
+                // SPDLOG_INFO("不是视频流。");
+                av_packet_unref(m_packet);
+                continue;
+            }
+        }
+
+        // SPDLOG_DEBUG("读取到数据包packet,pts:{}",m_packet->pts);
+        /* 解码packet包的内容,一个packet包内可能包含好多帧视频 */
+        while(m_threadRuning)
+        {
+            /* 读取出解码器返回的帧 avcodec_receive_frame */
+            int ret = avcodec_receive_frame(m_pCodecContext, m_pFrameSRC);
+            if(ret == AVERROR_EOF)
+            {
+                SPDLOG_INFO("读取到视频流末尾。");
+                isEnd = true;
+                break;
+            }
+            else if (ret == AVERROR(EAGAIN))
+            {
+                /* packet中的内容无法解码成一帧,需要更多的输入包,可能已经取出了几帧,也肯能就不够一帧
+                 * 这时候就需要往解码器送数据包了 */
+                // SPDLOG_WARN("packet无法解码成一帧,需要更多的输入包");
+                av_frame_unref(m_pFrameSRC);
+                break;
+            }
+            else if(ret < 0)
+            {
+                av_frame_unref(m_pFrameSRC);
+                if(retRead < 0)
+                {
+                    SPDLOG_ERROR("读取错误,错误值:{}",ret);
+                    break;
+                }
+            }
+            /* 解码成功,获取当前时间,现在已经是准的时间了
+             * 如果在跳转状态,在这里判断是否到了目标位置 */
+            m_pts = m_pFrameSRC->pts;
+            if(m_isSeek.load())
+            {
+                if(m_pts < m_targetPos)
+                {
+                    SPDLOG_DEBUG("目标位置:{} 当前位置:{}",m_targetPos, m_pts.load());
+                    av_frame_unref(m_pFrameSRC);
+                    continue;
+                }else {
+                    m_isSeek = false;
+                    m_targetPos = -1;
+                    SPDLOG_INFO("跳转结束。");
+                }
+            }
+            // SPDLOG_DEBUG("当前帧的pts:{}", m_pts.load());
+
+            /* 转换解码后的帧格式,转换成RGBA格式,Qt可以识别 */
+            if(m_sws_ctx == nullptr)
+            {
+                /* 选择在这里创建,为了兼容硬件解码,硬件解码出来的格式可能和解码器传出的不一样 */
+                m_sws_ctx = sws_getCachedContext(m_sws_ctx, 
+                                                m_pFrameSRC->width, m_pFrameSRC->height,    /* 原图像大小和格式 */
+                                                m_pCodecContext->pix_fmt,                   /* 输入图像的像素格式 */
+                                                m_srcSize.width(), m_srcSize.height(),      /* 目标图像的大小 */
+                                                AV_PIX_FMT_RGBA,                            /* 目标图像的格式 */
+                                                SWS_BILINEAR,                               /* 图像缩放算法,双线性 */
+                                                nullptr,                                    /* 输入图像的滤波器信息,不需要传NULL */
+                                                nullptr,                                    /* 输出图像的滤波器信息,不需要传NULL */
+                                                nullptr);                                   /* 特定缩放算法需要的参数,不需要传NULL */
+                if(m_sws_ctx == nullptr)
+                {
+                    SPDLOG_ERROR("创建SwsContext错误...");
+                    goto label_ThreadDecodeExit;
+                }
+                SPDLOG_INFO("创建SwsContext成功...");
+            }
+            /* 转换成RGBA格式 */
+            // uint8_t* data[1] = { m_buffer };
+            int lines[4];
+            /* 使用像素格式pix_fmt和宽度填充图像的平面线条的大小(一行大小?) */
+            av_image_fill_linesizes(lines, AV_PIX_FMT_RGBA, m_pFrameSRC->width);
+
+            sws_scale(  m_sws_ctx,              /* 缩放的上下文 */
+                        m_pFrameSRC->data,      /* 源图像数组 */
+                        m_pFrameSRC->linesize,  /* 包含源图像每个平面步幅的数组 */
+                        0,                      /* 开始位置 */
+                        m_pFrameSRC->height,    /* 行数 */
+                        &m_buffer,                   /* 目标图像数组 */
+                        lines); /* 目标图像行数 */
+            if(m_buffer != nullptr)
+            {
+                /* 将数据拷贝到QImage中 */
+                auto image = new QImage(m_buffer, m_srcSize.width(), m_srcSize.height(), QImage::Format_RGBA8888);
+                /* 如果队列满,线程会阻塞在这里 */
+                m_queueImage.push(image);
+                // av_frame_unref(m_pFrameRGB);
+                // SPDLOG_DEBUG("一帧视频入队");
+            }
+            av_frame_unref(m_pFrameSRC);
+            /* 如果在跳转过程中,直接退出,防止一个packet中有多个视频帧,再次阻塞在上面 */
+            if(m_isSeek)
+            {
+                break;
+            }
+        }
+        av_packet_unref(m_packet);    /* 释放数据包,引用计数-1,为0时释放空间 */
+        if(isEnd)
+        {
+            emit signal_playCompleted();
+            m_decodeState.store(DecodeState::DecodeStop);
+            /* 读取到结尾,但是不退出解码线程,可能还会使用倒退功能,后退读取 */
+            while(m_decodeState.load() != DecodeState::DecodeRun)
+            {
+                std::this_thread::sleep_for(std::chrono::milliseconds(2));
+            }
+            isEnd = false;
+        }
+    }
+label_ThreadDecodeExit:
+    /* 释放空间 */
+    av_packet_free(&m_packet);
+    m_decodeState.store(DecodeState::DecodeExit);
+    m_threadRuning = false;
+}
+
+/* 退出线程,将所有可能暂停线程运行的条件全部唤醒 */
+void DecodeVedio::exitThread()
+{
+    if(m_threadRuning)
+    {
+        m_threadRuning = false;
+    }
+    /* 设置成运行状态,唤醒可能阻塞在了解码结束的位置 */
+    m_decodeState.store(DecodeState::DecodeRun);
+    m_pauseDecode = false;
+    /* 先退出可能阻塞住的解码线程 */
+    m_queueImage.exit();
+}
+
+/**
+ * @brief 硬件解码线程,使用GPU解码,使用环境队列存储解码的数据
+ * 
+ */
+void DecodeVedio::threadDecodeUsingGPU()
+{
+    /******** 初始化局部变量 ********/
+    bool isEnd = false;
+    int ret = 0;
+    int retFrame = 0;
+    int retPacket = 0;
+    m_pauseDecode = false;
+    m_decodeStatus = true;
+    m_decodeState.store(DecodeState::DecodeRun);
+
+    /* 开始解码 */
+    SPDLOG_DEBUG("开始解码...");
+    while(m_threadRuning)
+    {
+        /******** 判断是否在暂停状态 ********/
+        while(m_pauseDecode)
+        {
+            m_decodeState.store(DecodeState::DecodePause);
+            std::this_thread::sleep_for(std::chrono::microseconds(100));
+        }
+        m_decodeState.store(DecodeState::DecodeRun);
+        /* 刷新解码器缓冲区,清除掉里面残留的解码文件 */
+        if(m_flushDecoder.load())
+        {
+            avcodec_flush_buffers(m_pCodecContext);
+            m_flushDecoder.store(false);
+        }
+        /******** 读取数据包 av_read_frame ********/
+        int retRead = av_read_frame(m_pFormatContext, m_packet);
+        if(retRead == AVERROR_EOF)
+        {
+            /* 读取到末尾后,需要传入一个空的packet,才能读取到最后几帧 */
+            avcodec_send_packet(m_pCodecContext, m_packet);
+        }
+        else if(retRead < 0)
+        {
+            SPDLOG_ERROR("读取帧错误...");
+            break;
+        }
+        else
+        {
+            if(m_packet->stream_index == m_videoStream)
+            {
+                // SPDLOG_DEBUG("源pts:{}", m_packet->pts);
+                /*  pts 表示显示时间戳(Presentation Timestamp),它指示解码后的帧应该在什么时候显示。pts 是用于同步音视频的关键时间戳。
+                    dts 表示解码时间戳(Decoding Timestamp),它指示解码器应该在什么时候解码这个数据包。dts 用于确保解码器按照正确的顺序解码数据包。 */
+            #if 1
+                /* 计算当前帧时间,两种方法,第一种适用于所有场景,但是存在一定的误差 */
+                m_packet->pts = qRound64(m_packet->pts * (1000 * rationalToDouble(&m_pFormatContext->streams[m_videoStream]->time_base)));
+                m_packet->dts = qRound64(m_packet->dts * (1000 * rationalToDouble(&m_pFormatContext->streams[m_videoStream]->time_base)));
+            #else
+                /* 适用于本地视频,直接计算本地视频文件的每一帧时间 */
+                m_currentFrame++;
+                // m_packet->pts = qRound64(m_currentFrame * (qreal(m_duration) / m_totalFrame));
+            #endif
+                
+                /* 将数据传给解码器 */
+                int ret = avcodec_send_packet(m_pCodecContext, m_packet);
+                if(ret < 0)
+                {
+                    SPDLOG_ERROR("发送数据包错误...");
+                    av_packet_unref(m_packet);
+                    continue;
+                }
+            }else {
+                // SPDLOG_INFO("不是视频流。");
+                av_packet_unref(m_packet);
+                continue;
+            }
+        }
+
+        // SPDLOG_DEBUG("读取到数据包packet,pts:{}",m_packet->pts);
+        /* 解码packet包的内容,一个packet包内可能包含好多帧视频 */
+        while(m_threadRuning)
+        {
+            /* 读取出解码器返回的帧 avcodec_receive_frame */
+            int ret = avcodec_receive_frame(m_pCodecContext, m_pFrameSRC);
+            if(ret == AVERROR_EOF)
+            {
+                SPDLOG_INFO("读取到视频流末尾。");
+                isEnd = true;
+                break;
+            }
+            else if (ret == AVERROR(EAGAIN))
+            {
+                /* packet中的内容无法解码成一帧,需要更多的输入包,可能已经取出了几帧,也肯能就不够一帧
+                 * 这时候就需要往解码器送数据包了 */
+                // SPDLOG_WARN("packet无法解码成一帧,需要更多的输入包");
+                av_frame_unref(m_pFrameSRC);
+                break;
+            }
+            else if(ret < 0)
+            {
+                av_frame_unref(m_pFrameSRC);
+                if(retRead < 0)
+                {
+                    SPDLOG_ERROR("读取错误,错误值:{}",ret);
+                    break;
+                }
+            }
+            
+            /* 硬件解码,上面读取到的帧m_pFrameSRC->data[0] = nullptr
+             * 数据要从GPU中拷贝出来 */
+            if(!copyDataFromGPU(m_pFrameHW, m_pFrameSRC))
+            {
+                continue;
+            }
+
+            /* 解码成功,获取当前时间,现在已经是准的时间了
+             * 如果在跳转状态,在这里判断是否到了目标位置 */
+            m_pts = m_pFrameHW->pts;
+            if(m_isSeek.load())
+            {
+                if(m_pts < m_targetPos)
+                {
+                    SPDLOG_DEBUG("目标位置:{} 当前位置:{}",m_targetPos, m_pts.load());
+                    av_frame_unref(m_pFrameHW);
+                    continue;
+                }else {
+                    m_isSeek = false;
+                    m_targetPos = -1;
+                    SPDLOG_INFO("跳转结束。");
+                }
+            }
+            // SPDLOG_DEBUG("当前帧的pts:{}", m_pts.load());
+
+            /* 转换解码后的帧格式,转换成RGBA格式,Qt可以识别 */
+            if(m_sws_ctx == nullptr)
+            {
+                /* 选择在这里创建,为了兼容硬件解码,硬件解码出来的格式可能和解码器传出的不一样 */
+                m_sws_ctx = sws_getCachedContext(m_sws_ctx, 
+                                                m_pFrameHW->width, m_pFrameHW->height,    /* 原图像大小和格式 */
+                                                m_pCodecContext->pix_fmt,                   /* 输入图像的像素格式 */
+                                                m_srcSize.width(), m_srcSize.height(),      /* 目标图像的大小 */
+                                                AV_PIX_FMT_RGBA,                            /* 目标图像的格式 */
+                                                SWS_BILINEAR,                               /* 图像缩放算法,双线性 */
+                                                nullptr,                                    /* 输入图像的滤波器信息,不需要传NULL */
+                                                nullptr,                                    /* 输出图像的滤波器信息,不需要传NULL */
+                                                nullptr);                                   /* 特定缩放算法需要的参数,不需要传NULL */
+                if(m_sws_ctx == nullptr)
+                {
+                    SPDLOG_ERROR("创建SwsContext错误...");
+                    goto label_ThreadDecodeExit;
+                }
+                SPDLOG_INFO("创建SwsContext成功...");
+            }
+            /* 转换成RGBA格式 */
+            // uint8_t* data[1] = { m_buffer };
+            int lines[4];
+            /* 使用像素格式pix_fmt和宽度填充图像的平面线条的大小(一行大小?) */
+            av_image_fill_linesizes(lines, AV_PIX_FMT_RGBA, m_pFrameHW->width);
+
+            sws_scale(  m_sws_ctx,              /* 缩放的上下文 */
+                        m_pFrameHW->data,      /* 源图像数组 */
+                        m_pFrameHW->linesize,  /* 包含源图像每个平面步幅的数组 */
+                        0,                      /* 开始位置 */
+                        m_pFrameHW->height,    /* 行数 */
+                        &m_buffer,                   /* 目标图像数组 */
+                        lines); /* 目标图像行数 */
+            if(m_buffer != nullptr)
+            {
+                /* 将数据拷贝到QImage中 */
+                auto image = new QImage(m_buffer, m_srcSize.width(), m_srcSize.height(), QImage::Format_RGBA8888);
+                /* 如果队列满,线程会阻塞在这里 */
+                m_queueImage.push(image);
+                // av_frame_unref(m_pFrameRGB);
+                // SPDLOG_DEBUG("一帧视频入队");
+            }
+            av_frame_unref(m_pFrameSRC);
+            /* 如果在跳转过程中,直接退出,防止一个packet中有多个视频帧,再次阻塞在上面 */
+            if(m_isSeek)
+            {
+                break;
+            }
+        }
+        av_packet_unref(m_packet);    /* 释放数据包,引用计数-1,为0时释放空间 */
+        if(isEnd)
+        {
+            emit signal_playCompleted();
+            m_decodeState.store(DecodeState::DecodeStop);
+            /* 读取到结尾,但是不退出解码线程,可能还会使用倒退功能,后退读取 */
+            while(m_decodeState.load() != DecodeState::DecodeRun)
+            {
+                std::this_thread::sleep_for(std::chrono::milliseconds(2));
+            }
+            isEnd = false;
+        }
+    }
+label_ThreadDecodeExit:
+    /* 释放空间 */
+    av_packet_free(&m_packet);
+    m_decodeState.store(DecodeState::DecodeExit);
+    m_threadRuning = false;
+}
+
+/* 暂停解码,会阻塞到线程暂停为止 */
+void DecodeVedio::pauseDecode()
+{
+    if(!m_threadRuning)
+    {
+        return;
+    }
+    if( (m_decodeState.load() == DecodeState::DecodeExit) 
+     || (m_decodeState.load() == DecodeState::DecodePause) )
+    {
+        return;
+    }
+    /* 设置成运行状态,唤醒可能阻塞在了解码结束的位置 */
+    m_decodeState.store(DecodeState::DecodeRun);
+    m_pauseDecode = true;
+    /* 队列出队两张图片,防止解码线程阻塞到环形队列满上面 */
+    QImage* image = nullptr;
+    m_queueImage.front_pop_NoBlock(image);
+    if(image != nullptr)
+    {
+        delete image;
+        image = nullptr;
+    }
+    m_queueImage.front_pop_NoBlock(image);
+    if(image != nullptr)
+    {
+        delete image;
+        image = nullptr;
+    }
+    /* 等待线程状态变为暂停为止 */
+    while (m_decodeState.load() != DecodeState::DecodePause)
+    {
+        std::this_thread::sleep_for(std::chrono::microseconds(100));
+    }
+}
+
+/* 继续解码 */
+void DecodeVedio::continueDecode()
+{
+    m_pauseDecode = false;
+    m_decodeState.store(DecodeState::DecodeRun);
+}
+
+/**
+ * @brief AVRational存储的是分子和分母,这里相除转换为double,用于计算帧率
+ *        这个函数就等同于av_q2d()
+ * @param rational
+ * @return
+ */
+qreal DecodeVedio::rationalToDouble(AVRational* rational)
+{
+    return ( (rational->den == 0) ? 0 : (qreal(rational->num) / rational->den) );
+}
+
+
+/* 开启解码 */
+void DecodeVedio::do_startDecodeVedio()
+{
+    SPDLOG_DEBUG("解码线程ID:{}",QThread::currentThreadId());
+    // if(!m_initFFmpeg)
+    // {
+    //     initFFmpeg();
+    // }
+    m_threadRuning = true;
+    m_pauseDecode = false;
+    /* 进入解码,直到播放完成或者手动退出 */
+    threadDecodeUsingCPU();
+    SPDLOG_TRACE("Decode解码结束。");
+
+}
+
+/* 释放所有资源 */
+void DecodeVedio::freeAll()
+{
+    if(m_sws_ctx)
+    {
+        sws_freeContext(m_sws_ctx);
+        m_sws_ctx = nullptr;
+    }
+    // if(m_pFrameRGB)
+    // {
+    //     av_frame_free(&m_pFrameRGB);
+    // }
+    if(m_pFrameSRC)
+    {
+        av_frame_free(&m_pFrameSRC);
+    }
+    if(m_pFrameHW)
+    {
+        av_frame_free(&m_pFrameHW);
+    }
+    if(m_packet)
+    {
+        av_packet_free(&m_packet);
+    }
+    if(m_pCodecContext)
+    {
+        avcodec_free_context(&m_pCodecContext);
+    }
+    if(m_pFormatContext)
+    {
+        avformat_close_input(&m_pFormatContext);
+    }
+    if(m_buffer)
+    {
+        av_free(m_buffer);
+        m_buffer = nullptr;
+    }
+    if(m_hw_device_ctx)
+    {
+        av_buffer_unref(&m_hw_device_ctx);
+    }
+
+    for(int i = 0; i < m_queueImage.QueueSize(); i++)
+    {
+        QImage* image = nullptr;
+        if (m_queueImage.front_pop_NoBlock(image))
+        {
+            delete image;
+        }
+    }
+    m_queueImage.clearQueue();
+}
+

+ 165 - 0
demo/VideoPlayerGL/VideoPlayer/DecodeVedio.h

@@ -0,0 +1,165 @@
+#ifndef DECODEVEDIO_H
+#define DECODEVEDIO_H
+
+#include <QObject>
+#include <QQueue>
+#include <QTimer>
+#include <QMutex>
+#include <QWaitCondition>
+#include <QImage>
+
+#include "RingQueue/RingQueue.hpp"
+
+// #include "threadcontroller.h"
+
+extern "C"
+{
+// #include <libavcodec/avcodec.h>
+#include <libavformat/avformat.h>
+// #include <libswscale/swscale.h>
+// #include <libavutil/imgutils.h>
+}
+
+
+
+/**
+ * 使用方式:
+ *      1. 初始化FFmpeg:initFFmpeg()
+ *      2. 开启解码线程:startDecodeVedio()
+ *      3. 获取一帧图像:getOneImage()
+ *      4. 停止解码线程:stopDecodeVedio()
+ *      5. 在初始化完成后,未进入第二步的之前,可以获取视频的宽高信息,也可以设置宽高信息
+ *      
+ */
+class DecodeVedio : public QObject
+{
+    Q_OBJECT
+
+enum class DecodeState
+{
+    NONE = 0,
+    DecodeRun,          /* 解码运行中 */
+    DecodePause,        /* 暂停解码 */
+    DecodeSeek,         /* 跳转中 */
+    DecodeStop,         /* 停止解码,但是并没有退出解码线程 */
+    DecodeExit          /* 退出解码 */
+};
+
+public:
+    explicit DecodeVedio(QThread* thread, QObject* parent = nullptr);
+    ~DecodeVedio();
+
+    /* 打开视频,同时初始化解码器) */
+    void openVedio(const QString& fileName);
+    
+    /* 开始解码视频,开始前先打开视频文件 */
+    void startDecodeVedio();
+    /* 停止解码视频,也是停止线程 */
+    void stopDecodeVedio();
+    /* 获取解码状态 */
+    bool isDecoding() { return m_threadRuning; }
+
+    /* 设置当前播放位置,单位ms */
+    void setCurrentPos(qint64 pos);
+    /* 获取当前播放位置,单位ms */
+    qint64 getCurrentPos();
+    /* 获取视频时长 */
+    qint64 getDuration();
+    qint64 getTotalFrame() { return m_totalFrame; }
+    
+
+    /* 获取一帧图像 */
+    QImage* getOneImage();
+    /* 获取一帧图像,直到有图像为止,可以设置超时时间 */
+    QImage* getOneImageUntilHave(int timeOut = -1);
+
+    /* 获取帧数 */
+    int getFPS() const { return m_fps; }
+    /* 设置帧数 */
+    void setFPS(int fps) { m_fps = fps; }
+    /* 获取图像宽度 */
+    QSize getSrcVideoSize() const {return m_srcSize; }
+    /* 获取解码器名称(编码格式) */
+    QString getDecoderName() const { return m_decoderName; }
+    /* 获取硬件解码器名称列表 */
+    QStringList getHWDecoderList() const { return m_listDecoderName; }
+    /* 查找硬件解码器 */
+    static void findHWDecoder(QStringList& listDecoderName);
+
+signals:
+    void signal_oneImage();                         /* 一帧图像信号 */
+    void signal_playCompleted();                    /* 播放完成信号 */
+    void signal_startDecode();                      /* 开始解码信号 */
+private:
+    /* 查找硬件解码器 */
+    void findHWDecoder();
+    /* 初始化硬件解码器 */
+    void initHWDecoder(const AVCodec* codec);
+    /* 拷贝数据,从GPU显存拷贝到内存中 */
+    bool copyDataFromGPU(AVFrame* pFrameHW, AVFrame* pFrameSRC);
+                
+    /* 软解码线程 */       
+    void threadDecodeUsingCPU();     
+    /* 硬件解码线程 */
+    void threadDecodeUsingGPU();
+    /* 退出线程 */      
+    void exitThread();
+    /* 暂停解码 */
+    void pauseDecode();
+    /* 继续解码 */
+    void continueDecode();
+    /* 将AVRational转换为double */
+    qreal rationalToDouble(AVRational* rational);
+    /* 释放所有资源 */
+    void freeAll();
+
+private slots:
+    void do_startDecodeVedio();                     /* 开启解码 */
+
+private:
+    QThread* m_thread = nullptr;                    /* 解码线程 */
+    /* 线程状态 */
+    std::atomic_bool m_threadRuning = false;        /* 解码线程是运行标志 */
+    std::atomic_bool m_initFFmpeg = false;          /* ffmpeg初始化标志 */
+    std::atomic_bool m_pauseDecode = false;         /* 暂停解码 */
+    std::atomic_bool m_decodeStatus = false;        /* 解码状态,这里主要是检测是否暂停解码 */
+    std::atomic_bool m_isSeek = false;              /* 是否在跳转中 */
+    std::atomic_bool m_flushDecoder = false;        /* 刷新解码器 */
+    std::atomic<DecodeState> m_decodeState = DecodeState::NONE;
+    /* 视频解码相关变量信息 */
+    QString m_fileName;                             /* 解码的视频文件名称 */
+    AVFormatContext *m_pFormatContext = nullptr;    /* 格式上下文,贯穿全局 */
+    AVCodecContext *m_pCodecContext = nullptr;      /* 解码器上下文 */
+    AVPacket* m_packet = nullptr;                   /* 存储解码前的数据,一个数据包 */
+    AVFrame* m_pFrameSRC = nullptr;                 /* 存储解码后的一帧数据原始视频编码 */
+    AVFrame* m_pFrameHW = nullptr;                  /* 存储解码后的一帧数据,硬件解码 */
+    struct SwsContext *m_sws_ctx = nullptr;         /* 视频转换上下文 */
+    uint8_t *m_buffer = nullptr;                    /* 存储解码后的一帧数据,RGB格式 */
+    int m_videoStream = -1;                         /* 记录视频流是第几个流 */
+    
+    bool m_supportHWDecoder = false;                /* 是否使用硬件解码 */
+    AVBufferRef* m_hw_device_ctx = nullptr;         /* 对数据缓冲区的引用 */
+    QList<int> m_listHWDeviceType;                  /* 保存当前环境支持的硬件解码器 */
+    QStringList m_listDecoderName;                  /* 硬件解码器列表名称 */
+
+    /* 视频相关信息 */
+    QSize m_srcSize;                                /* 原始视频分辨率大小 */
+    qint64 m_totalFrame = 0;                        /* 视频总帧数 */
+    int m_fps = 0;                                  /* 每秒的帧数 */
+    qint64 m_duration = 0;                          /* 视频时长,单位毫秒 */
+    qint64 m_startPos = 0;                          /* 开始播放的位置,摄像机视频的位置不是从0开始的,需要在初始化的时候取出这个值 */
+    std::atomic<qint64> m_pts = 0;                  /* 当前帧显示时间,也就是当前的进度时间 */
+    
+    qint64 m_targetPos = -1;                        /* 跳转的目标播放位置 */
+    qint64 m_currentFrame = 0;                      /* 当前已播放的帧数 */
+    QString m_decoderName;                          /* 解码器名称 */
+
+    RingQueue<QImage*> m_queueImage;                /* 环形队列,存储生成的图像 */
+    
+};
+
+
+
+
+
+#endif /* DECODEVEDIO_H */

+ 618 - 0
demo/VideoPlayerGL/VideoPlayer/VideoPlayer.cpp

@@ -0,0 +1,618 @@
+#include "VideoPlayer.h"
+
+#include "DecodeVedio.h"
+
+#include <QPainter>
+#include <QResizeEvent>
+#include <QEventLoop>
+#include <QVBoxLayout>
+
+#include "spdlog/spdlog.h"
+#include "FmtLog/fmtlog.h"
+
+
+VideoPlayer::VideoPlayer(QWidget *parent) : QWidget(parent)
+{
+    // /* 初始化解码线程 */
+    // m_threadDecode = new QThread(this);
+    // m_decodeVedio = new DecodeVedio(m_threadDecode);
+
+    m_previewImage = 2;
+    m_fps = 0;
+
+    m_semRefresh = new QSemaphore(0);
+
+    m_timerRefreshUI.setSingleShot(false);
+    /* 设置精度毫秒级 */
+    m_timerRefreshUI.setTimerType(Qt::PreciseTimer);
+    connect(&m_timerRefreshUI, &QTimer::timeout, this, &VideoPlayer::do_refreshUI);
+    connect(this, &VideoPlayer::signal_refreshImage, this, &VideoPlayer::do_refreshSamImage);
+
+    SPDLOG_TRACE("播放器线程ID:{}", QThread::currentThreadId());
+    QStringList listDecoder;
+    DecodeVedio::findHWDecoder(listDecoder);
+    if(listDecoder.isEmpty())
+    {
+        SPDLOG_WARN("没有找到硬件解码器");
+    }else {
+        SPDLOG_DEBUG("支持的硬件解码器:");
+        for(auto it : listDecoder)
+        {
+            SPDLOG_DEBUG("{}", it.toStdString());
+        }
+    }
+}
+
+VideoPlayer::~VideoPlayer()
+{
+    if(m_timerRefreshUI.isActive())
+    {
+        m_timerRefreshUI.stop();
+    }
+    delete m_decodeVedio;
+    if(m_image)
+    {
+        delete m_image;
+    }
+}
+
+/**
+ * @brief 设置播放视频,启动定时器,定时器间隔决定播放的速度
+ *        视频的宽和高使用QImage进行缩放
+ *        视频大小在直接设置这个类的resize即可,有最小大小限制
+ * 
+ * @param fileName 
+ */
+void VideoPlayer::openPlayVedio(const QString& fileName)
+{
+    if(m_isOpenFile)
+    {
+        m_isOpenFile = false;
+        stop();
+    }
+    if(m_decodeVedio == nullptr)
+    {
+        /* 初始化解码线程 */
+        m_threadDecode = new QThread(this);
+        m_decodeVedio = new DecodeVedio(m_threadDecode);
+        connect(m_decodeVedio, &DecodeVedio::signal_playCompleted, this, &VideoPlayer::do_playCompleted);
+    }
+    if(m_decodeVedio->isDecoding())
+    {
+        m_decodeVedio->stopDecodeVedio();
+    }
+    if(fileName.isEmpty())
+    {
+        SPDLOG_WARN("文件名为空");
+        return;
+    }
+    m_fileName = fileName;
+    m_isOpenFile = true;
+    m_isLocalFile = isLocalFile(fileName);
+
+    m_decodeVedio->openVedio(fileName);
+    /* 获取原始视频信息 */
+    m_srcWidth = m_decodeVedio->getSrcVideoSize().width();
+    m_srcHeight = m_decodeVedio->getSrcVideoSize().height();
+    m_fps = m_decodeVedio->getFPS();
+    m_duration = m_decodeVedio->getDuration();
+    auto totalFarame = m_decodeVedio->getTotalFrame();
+    SPDLOG_INFO("视频编码格式:{}", m_decodeVedio->getDecoderName().toStdString());
+    int hh = m_duration / 3600000;
+    int mm = (m_duration % 3600000) / 60000;
+    int ss = (m_duration % 60000) / 1000;
+    int ms = m_duration % 1000;
+    SPDLOG_INFO("视频分辨率:{}x{} 帧率:{} 总帧数:{}", m_srcWidth, m_srcHeight, m_fps, totalFarame);
+    SPDLOG_INFO("时长:{}h:{}m:{}.{}s 总时长:{}ms", hh, mm, ss, ms, m_duration);
+
+    /* 设置视频宽和高的最小大小 */
+    this->setMinimumSize(160,90);
+    /* 开启定时器刷新 */
+    if(m_fps <= 0)
+    {
+        m_fps = 25;
+    }
+
+    /* 开启解码,手动刷新第一帧 */
+    m_decodeVedio->startDecodeVedio();
+    m_semRefresh->release(2);
+    emit signal_refreshImage();
+}
+
+/* 播放视频 */
+bool VideoPlayer::play()
+{
+    if(!m_isOpenFile)
+    {
+        SPDLOG_ERROR("未打开视频文件!");
+        return false;
+    }
+    if(m_playStatus)
+    {
+        return false;
+    }
+    
+    /* 设置刷新时间 */
+    m_timerRefreshUI.setSingleShot(false);
+    m_interval = qRound64(1000.0 / m_fps);
+    SPDLOG_DEBUG("刷新UI的定时间隔:{}",m_interval);
+    m_timerRefreshUI.start(m_interval);
+    m_playStatus = true;
+    
+    return true;
+}
+
+
+/* 暂停播放 */
+void VideoPlayer::pause()
+{
+    if(!m_isOpenFile)
+    {
+        SPDLOG_ERROR("未打开视频文件!");
+        return;
+    }
+    if(!m_isLocalFile)
+    {
+        SPDLOG_ERROR("不是本地视频文件,无法暂停!");
+        return;
+    }
+    if(!m_playStatus)
+    {
+        return;
+    }
+    m_timerRefreshUI.stop();
+    m_playStatus = false;
+}
+
+/* 停止播放,停止后停止解码,将时间等复位到开始时间 */
+void VideoPlayer::stop()
+{
+    if(!m_isOpenFile)
+    {
+        SPDLOG_ERROR("未打开视频文件!");
+        return;
+    }
+    SPDLOG_DEBUG("...停止播放...");
+    m_fileName = QString();
+    if(m_timerRefreshUI.isActive())
+    {
+        m_timerRefreshUI.stop();
+    }
+    // SPDLOG_DEBUG("...停止解码...");
+    /* 删除解码器 */
+    delete m_decodeVedio;
+    m_decodeVedio = nullptr;
+    delete m_threadDecode;
+    m_threadDecode = nullptr;
+    
+    m_playStatus = false;
+    m_isOpenFile = false;
+    /* 绘制黑帧 */
+    SPDLOG_DEBUG("绘制黑帧");
+    m_image = new QImage(m_nowWidth, m_nowHeight, QImage::Format_RGB32);
+    m_image->fill(Qt::black);
+    update();
+    
+}
+
+/* 后退,单位ms */
+void VideoPlayer::backward(qint64 ms)
+{
+    if(!m_isOpenFile)
+    {
+        SPDLOG_ERROR("未打开视频文件!");
+        return;
+    }
+    if(!m_isLocalFile)
+    {
+        SPDLOG_ERROR("不是本地视频文件,无法后退!");
+        return;
+    }
+    /* 获取当前位置 */
+    qint64 pos = m_decodeVedio->getCurrentPos();
+    pos = pos - ms;
+    if(pos < 0)
+    {
+        pos = 0;
+    }
+
+    setCurrentPos(pos);
+
+}
+
+/* 前进,单位ms */
+void VideoPlayer::forward(qint64 ms)
+{
+    if(!m_isOpenFile)
+    {
+        SPDLOG_ERROR("未打开视频文件!");
+        return;
+    }
+    if(!m_isLocalFile)
+    {
+        SPDLOG_ERROR("不是本地视频文件,无法前进!");
+        return;
+    }
+    /* 获取当前位置 */
+    qint64 pos = m_decodeVedio->getCurrentPos();
+    SPDLOG_DEBUG("pos:{} ms:{}", pos, ms);
+    pos = pos + ms;
+    
+    setCurrentPos(pos);
+
+}
+
+/* 获取视频时长 */
+qint64 VideoPlayer::getDuration()
+{
+    if(!m_isOpenFile)
+    {
+        SPDLOG_ERROR("未打开视频文件!");
+        return -1;
+    }
+    auto duration = m_decodeVedio->getDuration();
+    if(duration <= 0)
+    {
+        return 0;
+    }
+    return duration;
+}
+
+/* 获取当前播放位置 */
+qint64 VideoPlayer::getCurrentPos()
+{
+    if(!m_isOpenFile)
+    {
+        SPDLOG_ERROR("未打开视频文件!");
+        return -1;
+    }
+    auto pos = m_decodeVedio->getCurrentPos();
+    if(pos < 0)
+    {
+        return 0;
+    }
+    return pos;
+}
+
+/* 设置当前播放位置,单位ms */
+void VideoPlayer::setCurrentPos(qint64 pos)
+{
+    if(!m_isOpenFile)
+    {
+        SPDLOG_ERROR("未打开视频文件!");
+        return;
+    }
+    if(!m_isLocalFile)
+    {
+        SPDLOG_ERROR("不是本地视频文件,无法设置播放位置!");
+        return;
+    }
+    if(pos < 0)
+    {
+        pos = 0;
+    }
+    /* 先停止播放 */
+    bool temp = m_playStatus;
+    if(m_playStatus)
+    {
+        m_timerRefreshUI.stop();
+        m_playStatus = false;
+    }
+    m_decodeVedio->setCurrentPos(pos);
+    /* 继续播放 */
+    if(temp)
+    {
+        // SPDLOG_INFO("..........开启定时器..........");
+        m_timerRefreshUI.start(m_interval);
+        m_playStatus = true;
+    }else
+    {
+        /* 刷新2张照片 */
+        m_semRefresh->release(m_previewImage);
+        emit signal_refreshImage();
+    }
+}
+
+/* 设置播放视频大小 */
+void VideoPlayer::setPlayWidgetSize(int width,int height)
+{
+    /* 对宽和高就行缩放,保持比例,同时将其居中放置
+     * 先计算出比例,和16/9相对比
+     * 大于16/9,以高为最大极限,计算出宽度和x坐标
+     * 小于16/9,以宽为最大极限,计算出高度和y坐标 */
+    double srcRatio = m_srcWidth*1.0 / m_srcHeight;
+    double ratio = width*1.0 / height;
+    long w1 = 0, h1 = 0;
+    int srcX = this->pos().rx(), srcY = this->pos().ry();
+    int x1 = srcX, y1 = srcY;
+    if(ratio > srcRatio)
+    {
+        w1 = height * srcRatio;
+        x1 = (width - w1) / 2;
+        h1 = height;
+        y1 = srcY;
+    }
+    else if(ratio < srcRatio)
+    {
+        h1 = width / srcRatio;
+        y1 = (height - h1) / 2;
+        w1 = width;
+        x1 = srcX;
+    }else {
+        w1 = width;
+        h1 = height;
+        x1 = srcX;
+        y1 = srcY;
+    }
+    this->move(x1, y1);
+    
+    m_nowWidth = w1;
+    m_nowHeight = h1;
+    this->resize(w1, h1);
+    // SPDLOG_DEBUG("设置窗口位置:{}x{}, 大小:{}x{}, 传入大小:{}x{}", x1, y1, w1, h1, width, height);
+    SPDLOG_DEBUG("现在位置和大小:{}x{}, {}x{}", this->pos().rx(), this->pos().ry(), this->width(), this->height());
+}
+
+/**
+ * @brief 设置播放窗口,这用于独占一个传入的widget,这里会自动添加一个布局,外面窗口变化,这里也跟随着变化
+ * 
+ * @param widget 
+ * @param flag 
+ *  @arg true:独占widget,并设置一个layout,会随着传入的widget大小变化
+ *  @arg false:不独占
+ */
+void VideoPlayer::setPlayWidget(QWidget* widget, bool flag)
+{
+    if(widget == nullptr)
+    {
+        SPDLOG_WARN("传入的widget为空");
+        return;
+    }
+    if(flag)
+    {
+        /* 设置布局 */
+        QVBoxLayout* layout = new QVBoxLayout(widget);
+        layout->addWidget(this);
+        layout->setMargin(0);
+        layout->setSpacing(0);
+        widget->setLayout(layout);
+    }else 
+    {
+        this->setParent(widget);
+        /* 设置窗口大小 */
+        setPlayWidgetSize(widget->width(), widget->height());
+    }
+    
+}
+
+
+/**
+ * @brief 设置预览图片数目,在暂停时跳转,可能会有花屏或者黑帧,可以设置跳转图片个数跳过黑帧
+ *        默认是2帧
+ * 
+ * @param num 
+ */
+void VideoPlayer::setPreviewImage(int num)
+{
+    m_previewImage = num;
+}
+
+/**
+ * @brief 设置帧率,有些视频无法获取到帧率,就会使用默认的25fps,如果需要,可以通过这个函数设置
+ *        注意:这个函数需要在打开视频文件之后设置,打开一次视频文件会覆盖这个参数
+ * 
+ * @param fps 
+ */
+void VideoPlayer::setFPS(int fps)
+{
+    m_fps = fps;
+    if(m_decodeVedio != nullptr)
+    {
+        m_decodeVedio->setFPS(fps);
+    }
+    if(m_timerRefreshUI.isActive())
+    {
+        m_timerRefreshUI.stop();
+        m_interval = qRound64(1000.0 / m_fps);
+        m_timerRefreshUI.start(m_interval);
+    }
+}
+
+
+/* 设置播放回调函数 */
+// void VideoPlayer::setPlayCallBack(std::function<Play_CallBack> playCallBack,void* context)
+// {
+//     m_funcPlayCB = playCallBack;
+//     m_context = context;
+// }
+
+
+void VideoPlayer::paintEvent(QPaintEvent *event)
+{
+    if(m_image != nullptr)
+    {
+        // SPDLOG_TRACE("开始绘制画面...");
+        /* 对图像进行缩放 */
+        if(m_srcWidth != m_nowWidth || m_srcHeight != m_nowHeight)
+        {
+            *m_image = m_image->scaled(m_nowWidth, m_nowHeight, Qt::KeepAspectRatio, Qt::SmoothTransformation);
+        }
+        QPainter painter(this);
+        painter.drawImage(0, 0, *m_image);
+    }
+}
+
+void VideoPlayer::resizeEvent(QResizeEvent *event)
+{
+    SPDLOG_TRACE("窗口大小改变...");
+    m_nowWidth = event->size().width();
+    m_nowHeight = event->size().height();
+
+    QWidget::resizeEvent(event);
+}
+
+/* 刷新一张图片,直到有图片为止 */
+void VideoPlayer::refreshOneUIUntilHave()
+{
+    if(m_decodeVedio != nullptr)
+    {
+        // SPDLOG_DEBUG("取出一帧图片...");
+        /* 删除上一帧图片 */
+        if(m_image != nullptr)
+        {
+            delete m_image;
+            m_image = nullptr;
+        }
+        /* 如果没有图片,这个函数会阻塞 */
+        m_image = m_decodeVedio->getOneImageUntilHave();
+        
+        if(m_image)
+        {
+            if(m_image->isNull())
+            {
+                SPDLOG_WARN("取出的图片为空...");
+                return;
+            }
+            // SPDLOG_DEBUG("绘制画面...");
+            update();
+        }
+    }
+}
+
+
+/* 双击事件函数 */
+void VideoPlayer::mouseDoubleClickEvent(QMouseEvent *event)
+{
+    if(event->button() == Qt::LeftButton)
+    {
+        // SPDLOG_DEBUG("双击事件...");
+        // if(m_funcPlayCB != nullptr)
+        // {
+        //     m_funcPlayCB(this, 5, nullptr, 0, m_context);
+        // }else {
+        //     SPDLOG_INFO("没有设置回调函数");
+        // }
+    }
+}
+
+/* 取出画面,刷新UI */
+void VideoPlayer::do_refreshUI()
+{
+    if(m_decodeVedio != nullptr)
+    {
+        // SPDLOG_DEBUG("取出一帧图片...");
+        /* 删除上一帧图片 */
+        if(m_image != nullptr)
+        {
+            delete m_image;
+            m_image = nullptr;
+        }
+        m_image = m_decodeVedio->getOneImage();
+        
+        if(m_image)
+        {
+            if(m_image->isNull())
+            {
+                SPDLOG_WARN("取出的图片为空...");
+                return;
+            }
+            // SPDLOG_DEBUG("绘制画面...");
+            update();
+        }
+        // m_decodeVedio->wakeUpCondQueueNoEmpty();
+    }
+}
+
+
+/* 通过信号刷新第一张图片 */
+void VideoPlayer::do_refreshSamImage()
+{
+    if(!m_isOpenFile)
+    {
+        return;
+    }
+    while(m_semRefresh->tryAcquire(1))
+    {
+        /* 取出第一张 */
+        if(m_decodeVedio != nullptr)
+        {
+            // SPDLOG_DEBUG("取出一帧图片...");
+            /* 删除上一帧图片 */
+            if(m_image != nullptr)
+            {
+                delete m_image;
+                m_image = nullptr;
+            }
+            /* 等待图片,最多等待50ms */
+            m_image = m_decodeVedio->getOneImageUntilHave(100);
+            if(m_image)
+            {
+                if(m_image->isNull())
+                {
+                    SPDLOG_WARN("取出的图片为空...");
+                    return;
+                }
+                SPDLOG_DEBUG("绘制预览画面。");
+                update();
+            }
+        }
+    }
+}
+
+/* 播放完成 */
+void VideoPlayer::do_playCompleted()
+{
+    SPDLOG_INFO("视频播放完成。");
+    m_timerRefreshUI.stop();
+    /* 手动刷新剩余的环形队列中的图片 */
+    while(true)
+    {
+        if(m_decodeVedio != nullptr)
+        {
+            QImage* image = nullptr;
+            image = m_decodeVedio->getOneImage();
+            if(image == nullptr)
+            {
+                break;
+            }
+            /* 删除上一帧图片 */
+            if(m_image != nullptr)
+            {
+                delete m_image;
+                m_image = nullptr;
+            }
+            m_image = image;
+            
+            if(m_image->isNull())
+            {
+                SPDLOG_WARN("取出的图片为空...");
+                return;
+            }
+            // SPDLOG_DEBUG("绘制画面...");
+            update();
+        }
+    }
+    m_playStatus = false;
+    // if(m_funcPlayCB != nullptr)
+    // {
+    //     /* 播放完成的回调函数 */
+    //     m_funcPlayCB(this, 2, nullptr, 0, m_context);
+    // }
+}
+
+/* 判断是否是本地文件 */
+bool VideoPlayer::isLocalFile(const QString& fileName)
+{
+    if(fileName.isEmpty())
+    {
+        return false;
+    }
+    if(fileName.startsWith("http://") || fileName.startsWith("rtsp://")
+     || fileName.startsWith("rtmp://") || fileName.startsWith("https://"))
+    {
+        return false;
+    }
+    return true;
+}
+
+

+ 82 - 0
demo/VideoPlayerGL/VideoPlayer/VideoPlayer.h

@@ -0,0 +1,82 @@
+#ifndef VideoPlayer_H
+#define VideoPlayer_H
+
+#include <QWidget>
+#include <QThread>
+#include <QTimer>
+#include <QSemaphore>
+
+class DecodeVedio;
+
+class VideoPlayer : public QWidget
+{
+    Q_OBJECT
+public:
+    explicit VideoPlayer(QWidget *parent = nullptr);
+    ~VideoPlayer();
+
+    void openPlayVedio(const QString& fileName);    /* 打开播放视频 */
+    bool play();                                    /* 播放视频 */
+    void pause();                                   /* 暂停播放 */
+    void stop();                                    /* 停止播放 */
+    
+    void backward(qint64 ms);                       /* 后退,单位ms */
+    void forward(qint64 ms);                        /* 前进,单位ms */
+    bool getPlayStatus() { return m_playStatus; }   /* 获取播放状态 */
+    qint64 getDuration();                           /* 获取视频时长 */
+    qint64 getCurrentPos();                         /* 获取当前播放位置 */
+    void setCurrentPos(qint64 pos);                 /* 设置当前播放位置 */
+
+    void setPlayWidgetSize(int width,int height);   /* 设置播放视频窗口的大小 */
+    void setPlayWidget(QWidget* widget, bool flag = false); /* 设置播放窗口 */
+    void setPreviewImage(int num = 2);              /* 设置预览图片数目,在暂停时跳转,可能会有花屏或者黑帧,可以设置跳转图片个数跳过黑帧 */
+    void setFPS(int fps);                            /* 设置帧率 */
+
+    // void setPlayCallBack(std::function<Play_CallBack> playCallBack,void* context);  /* 设置播放回调函数 */
+signals:
+    void signal_playCompleted();                    /* 播放完成信号 */
+    void signal_refreshImage();                     /* 刷新图片信号 */
+
+protected:
+    void paintEvent(QPaintEvent *event) override;
+    void resizeEvent(QResizeEvent *event) override;
+    void refreshOneUIUntilHave();                   /* 刷新一张图片,直到有图片为止 */
+    /* 双击事件函数 */
+    void mouseDoubleClickEvent(QMouseEvent *event) override;
+
+private slots:
+    void do_refreshUI();                            /* 取出画面,刷新UI */
+    void do_refreshSamImage();                      /* 通过信号刷新图片 */
+    void do_playCompleted();                        /* 播放完成 */
+
+private:
+    bool isLocalFile(const QString& fileName);      /* 判断是否是本地文件 */
+
+private:
+    bool m_isLocalFile = false;                     /* 是否是本地文件 */
+    QString m_fileName;
+    QTimer m_timerRefreshUI;                        /* 定时器,用于刷新界面 */
+    int m_srcWidth = 0;                             /* 视频原本大小 */
+    int m_srcHeight = 0;
+    int m_nowWidth = 0;                             /* 现在大小 */
+    int m_nowHeight = 0;
+    int m_fps = 0;                                  /* 帧数 */
+    int m_interval = 0;                             /* 定时器定时间隔,帧率的倒数,单位ms */
+    qint64 m_duration = 0;                          /* 时长,单位ms */
+
+    DecodeVedio* m_decodeVedio = nullptr;
+    QThread* m_threadDecode = nullptr;              /* 解码器所在的线程 */
+    QImage* m_image = nullptr;                      /* 画面 */
+    bool m_playStatus = false;                      /* 是否正在播放 */
+    bool m_isOpenFile = false;                      /* 是否打开了视频文件,未打开视频文件也就是未初始化解码线程 */
+    QSemaphore* m_semRefresh = nullptr;             /* 刷新信号量 */
+
+    int m_previewImage = 0;                         /* 预览图片数目 */
+
+    // std::function<Play_CallBack> m_funcPlayCB = nullptr;  /* 播放回调函数 */
+    // void* m_context = nullptr;                      /* 上下文 */
+};
+
+
+
+#endif /* VideoPlayer_H */

+ 26 - 0
demo/VideoPlayerGL/main.cpp

@@ -0,0 +1,26 @@
+#include "widget.h"
+
+#include <QApplication>
+#include "Logs/loginit.h"
+#include "spdlog/spdlog.h"
+#include "FmtLog/fmtlog.h"
+
+extern "C" {
+#include <libavcodec/avcodec.h>
+#include <libavutil/hwcontext.h>
+}
+
+int main(int argc, char *argv[])
+{
+    QApplication a(argc, argv);
+    init_log();
+
+    SPDLOG_INFO("********** VideoPlayer **********");
+
+
+
+    Widget w;
+    w.show();
+
+    return a.exec();
+}

+ 43 - 0
demo/VideoPlayerGL/widget.cpp

@@ -0,0 +1,43 @@
+#include "widget.h"
+#include "./ui_widget.h"
+
+#include <QTimer>
+#include <QFileDialog>
+
+#include "spdlog/spdlog.h"
+// #include "fmtlog.h"
+
+// #include "VideoPlayer1.h"
+
+
+Widget::Widget(QWidget *parent)
+    : QWidget(parent)
+    , ui(new Ui::Widget)
+{
+    ui->setupUi(this);
+
+    m_videoPlayer = std::make_shared<PlayerGLWidget>(ui->widget_display);
+    m_videoPlayer->resize(1280, 720);
+
+    // m_videoPlayer1 = std::make_shared<VideoPlayer1>();
+    // m_videoPlayer1->setParent(ui->widget_display);
+    SPDLOG_INFO("***** Qt Library *****");
+
+    /* 显示预览图片 */
+    QString imagePath = QApplication::applicationDirPath() + "/0.jpg";
+    QImage image(imagePath);
+    image = image.scaled(1280, 720);
+    ui->label->setPixmap(QPixmap::fromImage(image));
+    
+}
+
+Widget::~Widget()
+{
+
+    delete ui;
+}
+
+
+
+
+

+ 32 - 0
demo/VideoPlayerGL/widget.h

@@ -0,0 +1,32 @@
+#ifndef WIDGET_H
+#define WIDGET_H
+
+#include <QWidget>
+#include "VideoPlayer.h"
+// #include "VideoPlayer1.h"
+#include "Player/PlayerGLWidget.h"
+
+QT_BEGIN_NAMESPACE
+namespace Ui { class Widget; }
+QT_END_NAMESPACE
+
+class Widget : public QWidget
+{
+    Q_OBJECT
+
+public:
+    Widget(QWidget *parent = nullptr);
+    ~Widget();
+
+private slots:
+
+    
+private:
+    Ui::Widget *ui;
+
+    bool m_isPlay = false;
+    // std::shared_ptr<VideoPlayer1> m_videoPlayer1 = nullptr;
+    std::shared_ptr<PlayerGLWidget> m_videoPlayer = nullptr;
+
+};
+#endif // WIDGET_H

+ 43 - 0
demo/VideoPlayerGL/widget.ui

@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<ui version="4.0">
+ <class>Widget</class>
+ <widget class="QWidget" name="Widget">
+  <property name="geometry">
+   <rect>
+    <x>0</x>
+    <y>0</y>
+    <width>1600</width>
+    <height>900</height>
+   </rect>
+  </property>
+  <property name="windowTitle">
+   <string>Widget</string>
+  </property>
+  <layout class="QVBoxLayout" name="verticalLayout">
+   <item>
+    <widget class="QLabel" name="label">
+     <property name="minimumSize">
+      <size>
+       <width>1280</width>
+       <height>720</height>
+      </size>
+     </property>
+     <property name="maximumSize">
+      <size>
+       <width>1280</width>
+       <height>720</height>
+      </size>
+     </property>
+     <property name="text">
+      <string/>
+     </property>
+    </widget>
+   </item>
+   <item>
+    <widget class="QWidget" name="widget_display" native="true"/>
+   </item>
+  </layout>
+ </widget>
+ <resources/>
+ <connections/>
+</ui>

+ 0 - 0
demo/VideoPlayerGL/解码路线图.xmind


+ 60 - 0
demo/ViewModel/Base/viewmodelbase.cpp

@@ -0,0 +1,60 @@
+#include "viewmodelbase.h"
+#include "ui_viewmodelbase.h"
+
+ViewModelBase::ViewModelBase(QWidget *parent) :
+    QWidget(parent),
+    ui(new Ui::ViewModelBase)
+{
+    ui->setupUi(this);
+
+    this->resize(800, 600);
+
+    /* 初始化表格 */
+    initViewModel();
+
+    addSomeData();
+
+}
+
+ViewModelBase::~ViewModelBase()
+{
+    delete ui;
+}
+
+
+/* 初始化视图模型 */
+void ViewModelBase::initViewModel()
+{
+    m_layout = new QVBoxLayout(this);
+    m_tableView = std::make_shared<QTableView>();
+    m_model = std::make_shared<QStandardItemModel>();
+    m_tableView->setModel(m_model.get());
+
+    /* 设置布局 */
+    m_layout->addWidget(m_tableView.get());
+    this->setLayout(m_layout);
+
+    /*********** 设置表格内容 **********/
+
+    /* 设置列数和标题 */
+    m_model->setColumnCount(4);
+    m_model->setHeaderData(0,Qt::Horizontal, "序号", Qt::DisplayRole);
+    m_model->setHorizontalHeaderItem(1, new QStandardItem("通道号"));
+    m_model->setHeaderData(2, Qt::Horizontal, "通道名称", Qt::DisplayRole);
+    m_model->setHeaderData(3, Qt::Horizontal, "颜色", Qt::DisplayRole);
+
+}
+
+/* 添加一些数据 */
+void ViewModelBase::addSomeData()
+{
+    /* 添加数据 */
+    for (int i = 0; i < 8; i++)
+    {
+        m_model->setItem(i, 0, new QStandardItem(QString::number(i + 1)));
+        m_model->setItem(i, 1, new QStandardItem(QString::number(i + 1)));
+        m_model->setItem(i, 2, new QStandardItem(QString("通道%1").arg(i)));
+        m_model->setItem(i, 3, new QStandardItem(QString("颜色%1").arg(i)));
+    }
+}
+

+ 39 - 0
demo/ViewModel/Base/viewmodelbase.h

@@ -0,0 +1,39 @@
+#ifndef VIEWMODELBASE_H
+#define VIEWMODELBASE_H
+
+#include <QWidget>
+#include <QLayout>
+#include <QStandardItemModel>
+#include <QTableView>
+
+#include <memory>
+
+namespace Ui {
+class ViewModelBase;
+}
+
+class ViewModelBase : public QWidget
+{
+    Q_OBJECT
+
+public:
+    explicit ViewModelBase(QWidget *parent = nullptr);
+    ~ViewModelBase();
+
+
+private:
+    /* 初始化视图模型 */
+    void initViewModel();
+    /* 添加一些数据 */
+    void addSomeData();
+
+private:
+    Ui::ViewModelBase *ui;
+
+    QVBoxLayout* m_layout = nullptr;
+    std::shared_ptr<QTableView> m_tableView = nullptr;
+    std::shared_ptr<QStandardItemModel> m_model = nullptr;
+
+};
+
+#endif // VIEWMODELBASE_H

+ 21 - 0
demo/ViewModel/Base/viewmodelbase.ui

@@ -0,0 +1,21 @@
+<ui version="4.0">
+ <author/>
+ <comment/>
+ <exportmacro/>
+ <class>ViewModelBase</class>
+ <widget name="ViewModelBase" class="QWidget">
+  <property name="geometry">
+   <rect>
+    <x>0</x>
+    <y>0</y>
+    <width>400</width>
+    <height>300</height>
+   </rect>
+  </property>
+  <property name="windowTitle">
+   <string>Form</string>
+  </property>
+ </widget>
+ <pixmapfunction/>
+ <connections/>
+</ui>

+ 80 - 0
demo/ViewModel/CMakeLists.txt

@@ -0,0 +1,80 @@
+cmake_minimum_required(VERSION 3.5)
+
+set(this_exe ViewModel)
+
+
+#包含源文件
+file(GLOB LOCAL_SRC
+    ${CMAKE_SOURCE_DIR}/External/module/Logs/*.cpp
+
+    ${CMAKE_CURRENT_SOURCE_DIR}/*.qrc
+    ${CMAKE_CURRENT_SOURCE_DIR}/*.rc
+    ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp
+    ${CMAKE_CURRENT_SOURCE_DIR}/*.ui
+
+    ${CMAKE_CURRENT_SOURCE_DIR}/Base/*.cpp
+)
+
+
+
+# 生成可执行程序
+
+add_executable(${this_exe}
+    # WIN32
+    ${LOCAL_SRC} 
+)
+
+
+#添加头文件
+target_include_directories(${this_exe} PRIVATE
+
+    ${CMAKE_CURRENT_SOURCE_DIR}
+    ${CMAKE_SOURCE_DIR}/External/common
+    ${CMAKE_SOURCE_DIR}/External/module
+    ${CMAKE_SOURCE_DIR}/External/module/Logs
+
+    ${CMAKE_CURRENT_SOURCE_DIR}/Base
+
+    ${spdlog_INCLUDE_DIR}
+)
+
+target_link_libraries(${this_exe} PRIVATE
+    Qt5::Widgets
+    Qt5::Core
+    Qt5::Network
+    # Qt5::Multimedia
+    # Qt5::Xml
+    # Qt5::Sql
+)
+
+target_link_libraries(${this_exe} PRIVATE 
+    # ${CURL_LIBRARY}
+    ${spdlog_LIBRARY}
+    # ${OpenSSL-1.1.1_LIB_LIBRARY}
+    # CURL::libcurl
+)
+
+if(CMAKE_CXX_COMPILER_VERSION LESS 9.0)
+    target_link_libraries(${this_exe} PRIVATE
+        stdc++fs
+    )
+endif()
+
+# target_link_libraries(${this_exe} PRIVATE
+#     ${CURL_LIBRARY}
+    
+# )
+# message(STATUS "CURL_LIBRARY: ${CURL_LIBRARY}")
+
+
+# if(CMAKE_CXX_COMPILER_ID MATCHES MSVC)
+#     target_link_libraries(${this_exe} PRIVATE
+#         # debug spdlogd.lib
+#         # optimized spdlog.lib
+#     )
+# elseif(CMAKE_CXX_COMPILER_ID MATCHES GNU)
+#     target_link_libraries(${this_exe} PRIVATE
+#         # debug 
+#         # optimized ${SM_DLL}
+#     )
+# endif()

+ 18 - 0
demo/ViewModel/main.cpp

@@ -0,0 +1,18 @@
+#include "widget.h"
+
+#include <QApplication>
+#include "loginit.h"
+
+#include "Base/viewmodelbase.h"
+
+
+int main(int argc, char *argv[])
+{
+    QApplication a(argc, argv);
+    init_log();
+    
+    ViewModelBase viewmodel;
+    viewmodel.show();
+
+    return a.exec();
+}

+ 31 - 0
demo/ViewModel/widget.cpp

@@ -0,0 +1,31 @@
+#include "widget.h"
+#include "./ui_widget.h"
+
+#include "spdlog/spdlog.h"
+
+#include "nlohmann/json.hpp"
+#define nJson nlohmann::json
+
+Widget::Widget(QWidget *parent)
+    : QWidget(parent)
+    , ui(new Ui::Widget)
+{
+    ui->setupUi(this);
+
+
+
+    SPDLOG_INFO("✨✨✨✨✨ Qt Library ✨✨✨✨✨");
+}
+
+Widget::~Widget()
+{
+
+    delete ui;
+}
+
+
+
+
+
+
+

+ 25 - 0
demo/ViewModel/widget.h

@@ -0,0 +1,25 @@
+#ifndef WIDGET_H
+#define WIDGET_H
+
+#include <QWidget>
+
+QT_BEGIN_NAMESPACE
+namespace Ui { class Widget; }
+QT_END_NAMESPACE
+
+class Widget : public QWidget
+{
+    Q_OBJECT
+
+public:
+    Widget(QWidget *parent = nullptr);
+    ~Widget();
+
+private slots:
+
+
+private:
+    Ui::Widget *ui;
+
+};
+#endif // WIDGET_H

+ 30 - 0
demo/ViewModel/widget.ui

@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<ui version="4.0">
+ <class>Widget</class>
+ <widget class="QWidget" name="Widget">
+  <property name="geometry">
+   <rect>
+    <x>0</x>
+    <y>0</y>
+    <width>800</width>
+    <height>600</height>
+   </rect>
+  </property>
+  <property name="windowTitle">
+   <string>Widget</string>
+  </property>
+  <widget class="QWidget" name="widget_pBtn" native="true">
+   <property name="geometry">
+    <rect>
+     <x>30</x>
+     <y>30</y>
+     <width>681</width>
+     <height>311</height>
+    </rect>
+   </property>
+   <layout class="QGridLayout" name="gridLayout"/>
+  </widget>
+ </widget>
+ <resources/>
+ <connections/>
+</ui>