QT + OpenGL + FFmpeg写的一个全景视频播放器

2023-05-16

临时被分配了一个任务 写一个C++版本的全景视频播放器   

网上搜了搜  基于前辈的基础上 写的差不多了 

测试视频源是用ffmpeg拉RTSP的流  

最终是要嵌入到别的一个视频播放器模块 所以解码这块我不用太关注 只要实现渲染就可以了

效果如下 左边的窗口用于输入视频源 以及显示一些关于视频的一些信息

看下视频演示效果

QT加OpenGL+ffmpeg 写的全景视频播放器

 点击播放之后默认是非全景模式 按空格键切换到全景模式

 切换到全景模式之后 随着鼠标的拖动 左边的窗口会显示对应的全景的一些坐标信息

 对比了下 和insta360的还是不一样  和Detu的播放器效果是一样的 

由于用的是ffmpeg 所以可以支持rtsp rtmp等实时流媒体播放 

ffmpeg用的是软解 也没有做视频同步之类的  但是播放实时视频没有啥问题 

我们测试的RTSP是4K的 软解也没啥问题 记录下

绿色主窗口代码头文件:

#pragma once

#include <QtWidgets/QWidget>
#include "ui_QTPlayer.h"
#include "windows.h"
#include "glew.h"

#include <QPushButton>
#include <QLineEdit>
#include <QLabel>
#include <QMessageBox>

#include <QLabel>

#include "PlayWindow.h"

class QTPlayer : public QWidget
{
    Q_OBJECT

public:
    QTPlayer(QWidget *parent = nullptr);
    ~QTPlayer();

    void closeEvent(QCloseEvent* event);

private slots:

    void Refresh();

private:
    Ui::QTPlayerClass ui;


   

    QLabel* label;
    QLineEdit* input;
    QPushButton* open;
    QPushButton* reset;
    QLabel* label_info;
    PlayWindow* play;
};

主窗口cpp代码

#include "QTPlayer.h"

#include <QTimer>
#include <QDateTime>
#include <iostream>




QTPlayer::QTPlayer(QWidget *parent)
    : QWidget(parent,Qt::MSWindowsOwnDC)
{
    ui.setupUi(this);

    std::string filename = "rtsp://admin:thinker13@192.168.0.240:554/streaming/channels/701";
    //新建窗口

    
    play = new PlayWindow();
    
    QFont ft;
    ft.setPointSize(14);

    label = new QLabel("Media:", this);
    label->setFont(ft);
    label->move(10, 15);

    label_info = new QLabel( this);
    label_info->move(20, 120);
    label_info->setFont(ft);
    label_info->resize(660, 560);

    
    label_info->setStyleSheet("QLabel{background:#9BCD9B;color:#8B2500;}");



    input = new QLineEdit(this);
    input->move(150, 10);
    input->resize(300, 50);
    input->setText(QString(filename.c_str()));
    input->setFont(ft);
    //Play button
    open = new QPushButton("Play", this);
    open->setFont(ft);
    open->move(520, 15);

    //Reset button
    reset = new QPushButton("Reset", this);
    reset->setFont(ft);
    reset->move(175, 60);

    connect(reset, &QPushButton::clicked, play, [=]() {
        play->ResetCamera();
        });

    //主窗口设置
    this->resize(700, 700);
    setWindowFlags(Qt::MSWindowsFixedSizeDialogHint);
    setWindowTitle("QMCY");

    
    QTimer* timer_info = new QTimer(parent);
    connect(timer_info, &QTimer::timeout, this, &QTPlayer::Refresh);
    timer_info->start(200);

    //目标2
        //open点击一下,按钮文本变colse,再次点击就关闭one窗口
    connect(open, &QPushButton::clicked,play, [=]() {
        if (open->text() == QString("Close"))
        {
            play->DeInitGL();
            play->close();
            open->setText("Play");
        }
        else
        {
            play->hide();
            if (play->OpenFile(input->text().toStdString()))
            {
                play->show();
                open->setText("Close");
            }
            else
            {
                QMessageBox::about(NULL, "ERROR", "Input file is not valid");
                open->setText("Play");
            }
            
      
        }
});

}

QTPlayer::~QTPlayer()
{
    delete label;
    delete label_info;
    delete input;
    delete reset;
    delete open;
    delete play;
}





void QTPlayer::closeEvent(QCloseEvent* event)
{
    if (play)
    {
        play->close();
    }

}




void QTPlayer::Refresh()
{
    std::string info;
    play->GetMediaInfo(info);
    label_info->setText(info.c_str());
}

播放窗口头文件

#pragma once

#include "windows.h"

#include "glew.h"
#include "camera.h"
#include "shader.h"
#include "texture2d.h"
#include "model.h"
#include "videocapture.h"
#include <QtWidgets/QWidget>
#include <iostream>

class PlayWindow :public QWidget
{
    Q_OBJECT

public:
    PlayWindow(QWidget* parent = nullptr);
    ~PlayWindow();

    virtual QPaintEngine* paintEngine() const { return NULL; }
    virtual void resizeEvent(QResizeEvent* event);


    void GetMediaInfo(std::string &);
    void ResetCamera();
    bool CreateGLContext();

    void Render();


    bool event(QEvent* event);

    void showEvent(QShowEvent* event);

    void GLUpdate();
    bool OpenFile(std::string url);
    void InitModel();
    void InitGL();
    void DeInitGL();

    void keyReleaseEvent(QKeyEvent* event);

    void mousePressEvent(QMouseEvent* event);
    void mouseMoveEvent(QMouseEvent* event);
    void wheelEvent(QWheelEvent* event);
private slots:
    void Tick();

private:
    std::string play_url;
    HDC dc;
    HGLRC rc;
    HWND hwnd;

    bool hasVideo = false;

    Camera3D m_camera;

    
    float speed = 0.1;
    float rotate_speed = 0.05;
    QPoint last_pos;



    std::shared_ptr<Model> flatModel;
    std::shared_ptr<Model> sphereModel;
    std::shared_ptr<Shader> shader;

    std::shared_ptr<VideoCapture> video;


    Texture2D* Y = NULL;
    Texture2D* U = NULL;
    Texture2D* V = NULL;


    bool isVR360 = false;
    float fov = 60;

};

播放窗口cpp文件

#include "PlayWindow.h"
#include "windows.h"
#include <QPushButton>
#include <QDebug>
#include "QtEvent.h"
#include <QResizeEvent>
#include <QTimer>
#include <QDateTime>
#include <iostream>
#include <QApplication>

PlayWindow::PlayWindow(QWidget* parent)
    : QWidget(parent, Qt::MSWindowsOwnDC)
{
    setAttribute(Qt::WA_PaintOnScreen);
    setAttribute(Qt::WA_NoSystemBackground);
    setAutoFillBackground(true);
    

    auto flags = windowFlags();//save current configuration
    
    //setWindowFlags(Qt::CustomizeWindowHint | Qt::WindowMinMaxButtonsHint);
    setWindowFlags(Qt::CustomizeWindowHint | Qt::WindowMinMaxButtonsHint);
    
    //setWindowFlags(flags);//restore

    this->resize(800, 600);
    setWindowTitle("Play");

    hwnd = (HWND)winId();
    CreateGLContext();
    wglMakeCurrent(dc, rc);

    if (glewInit() != GLEW_OK)
    {
        throw "glewInit failed!!!";
    }
    
    QTimer* timer = new QTimer(parent);
    connect(timer, &QTimer::timeout, this, &PlayWindow::Tick);
    timer->start();
    m_camera.SetRotation(0, 0, 0);
    InitModel();

}


PlayWindow::~PlayWindow()
{

    DeInitGL();

    wglMakeCurrent(NULL, NULL);
    if (rc)wglDeleteContext(rc);
    if (dc) ReleaseDC(hwnd, dc);
}



bool PlayWindow::CreateGLContext()
{
    dc = GetDC(hwnd);

    PIXELFORMATDESCRIPTOR pfd;
    ZeroMemory(&pfd, sizeof(pfd));

    pfd.nSize = sizeof(pfd);
    pfd.nVersion = 1;
    pfd.cColorBits = 32;
    pfd.cDepthBits = 24;
    pfd.cStencilBits = 8;
    pfd.iPixelType = PFD_TYPE_RGBA;
    pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER;

    int format = 0;

    format = ChoosePixelFormat(dc, &pfd);

    if (!format)
    {
        throw "ChoosePixelFormat failed!!!";
    }

    SetPixelFormat(dc, format, &pfd);
    rc = wglCreateContext(dc);
    return true;
}

void PlayWindow::ResetCamera()
{
    fov = 60;
    m_camera.Reset();
}

void PlayWindow::GetMediaInfo(std::string &out)
{
    if (video)
    {
        out.append("Video mode:");
        if (isVR360)
        {
            out.append(" Sphere \n");
        }
        else
        {
            out.append(" Flat \n");
        }
        out.append("Video format:");
        out.append(video->format);
        out.append("\nWidth:");
        out.append(std::to_string(video->width));

        out.append("   Height:");
        out.append(std::to_string(video->height));

        out.append("\nEncode:");
        out.append(video->encoder);
        out.append("\nReceive size:");
        if (video->recv_size > 1024 * 1024 * 1024)
        {
            out.append(std::to_string(video->recv_size/(1024 * 1024 * 1024.0f)));
            out.append("GB");
        }
        else if (video->recv_size > 1024 * 1024)
        {
            out.append(std::to_string(video->recv_size / (1024 * 1024.0f)));
            out.append("MB");
        }
        else if (video->recv_size > 1024)
        {
            out.append(std::to_string(video->recv_size / (1024.0f)));
            out.append("KB");
        }

        if (isVR360)
        {
            out.append("\nFOV:");
            out.append(std::to_string(fov));
            out.append("\nX:");
            out.append(std::to_string(m_camera.GetPosition().x()));
            out.append("\nY:");
            out.append(std::to_string(m_camera.GetPosition().y()));
            out.append("\nZ:");
            out.append(std::to_string(m_camera.GetPosition().z()));

            out.append("\nPitch:");
            out.append(std::to_string(m_camera.GetRotation().x()));
            out.append("\nYaw:");
            out.append(std::to_string(m_camera.GetRotation().y()));
            out.append("\nRoll:");
            out.append(std::to_string(m_camera.GetRotation().z()));

        }

    }
    else
    {
        out.append("Video is not avaiable");
    }
    
}



void PlayWindow::resizeEvent(QResizeEvent* event)
{
    glViewport(0, 0, event->size().width(), event->size().height());
    GLUpdate();
}
bool PlayWindow::event(QEvent* event)
{
    if (event->type() == QtEvent::GL_UPDATE)
    {
        if (hasVideo)
        {
            Render();
        }
        
    }
    return QWidget::event(event);
}

void PlayWindow::GLUpdate()
{
    QApplication::postEvent(this, new QtEvent(QtEvent::GL_UPDATE));
}


void PlayWindow::showEvent(QShowEvent* event)
{
    GLUpdate();
}



void PlayWindow::keyReleaseEvent(QKeyEvent* event)
{
    if (event->key() == Qt::Key_Space)
    {
        isVR360 = 1 - isVR360;
        GLUpdate();
    }

}


void PlayWindow::mousePressEvent(QMouseEvent* event)
{
    last_pos = event->pos();
}

void PlayWindow::mouseMoveEvent(QMouseEvent* event)
{
    QPoint dis = event->pos() - last_pos;
    last_pos = event->pos();
    m_camera.Rotate(dis.y() * rotate_speed, dis.x() * rotate_speed, 0);
    GLUpdate();
}

void PlayWindow::wheelEvent(QWheelEvent* event)
{
    if (fov < 45)
    {
        fov = 45;
    }
    if (fov > 120)
    {
        fov = 120;
    }
    if (fov >= 45 && fov <= 120)
    {
        fov -= event->angleDelta().y() / 120.0f;
        GLUpdate();
    }
}


void PlayWindow::Tick()
{
    static long long last = 0;
    if (last == 0) last = QDateTime::currentMSecsSinceEpoch();

    float interval = QDateTime::currentMSecsSinceEpoch() - last;
    last = QDateTime::currentMSecsSinceEpoch();
    interval /= 1000;

    GLUpdate();
}
void PlayWindow::DeInitGL()
{
    if (Y)
    {
        Y = NULL;
    }
    if (U)
    {
        U = NULL;
    }
    if (V)
    {
        V = NULL;
    }

    if (shader)
    {
        shader = NULL;
    }
    if (video)
    {
        video = NULL;
    }
}

void PlayWindow::InitModel()
{
    flatModel = std::make_shared<Model>("res/model/quad.obj");
    sphereModel = std::make_shared<Model>("res/model/sphere.obj");
}
void PlayWindow::InitGL()
{
    ResetCamera();
    
    if (video->formatType == PIX_FMT_YUV420P || video->formatType == PIX_FMT_YUVJ420P)
    {
        shader = std::make_shared<Shader>("res/glsl/vertexshader.glsl", "res/glsl/fragmentyuv.glsl");
        Y = new Texture2D(video->width, video->height, GL_LUMINANCE, GL_LUMINANCE, NULL);
        U = new Texture2D(video->width / 2, video->height / 2, GL_LUMINANCE, GL_LUMINANCE, NULL);
        V = new Texture2D(video->width / 2, video->height / 2, GL_LUMINANCE, GL_LUMINANCE, NULL);
    }
    else if (video->formatType == PIX_FMT_YUV422P || video->formatType == PIX_FMT_YUVJ422P)
    {
        shader = std::make_shared<Shader>("res/glsl/vertexshader.glsl", "res/glsl/fragmentyuv.glsl");
        Y = new Texture2D(video->width, video->height, GL_LUMINANCE, GL_LUMINANCE, NULL);
        U = new Texture2D(video->width / 2, video->height, GL_LUMINANCE, GL_LUMINANCE, NULL);
        V = new Texture2D(video->width / 2, video->height, GL_LUMINANCE, GL_LUMINANCE, NULL);
    }
    else if (video->formatType == PIX_FMT_YUV444P || video->formatType == PIX_FMT_YUVJ444P)
    {
        shader = std::make_shared<Shader>("res/glsl/vertexshader.glsl", "res/glsl/fragmentyuv.glsl");
        Y = new Texture2D(video->width, video->height, GL_LUMINANCE, GL_LUMINANCE, NULL);
        U = new Texture2D(video->width, video->height, GL_LUMINANCE, GL_LUMINANCE, NULL);
        V = new Texture2D(video->width, video->height, GL_LUMINANCE, GL_LUMINANCE, NULL);
    }
    else if (video->formatType == PIX_FMT_YUYV422)
    {
        shader = std::make_shared<Shader>("res/glsl/vertexshader.glsl", "res/glsl/fragment_yuyv422.glsl");
        Y = new Texture2D(video->width, video->height, GL_LUMINANCE_ALPHA, GL_LUMINANCE_ALPHA, NULL);
        U = new Texture2D(video->width / 2, video->height, GL_RGBA, GL_RGBA, NULL);
    }
    else if (video->formatType == PIX_FMT_UYVY422)
    {
        shader = std::make_shared<Shader>("res/glsl/vertexshader.glsl", "res/glsl/fragment_uyvy422.glsl");
        Y = new Texture2D(video->width, video->height, GL_LUMINANCE_ALPHA, GL_LUMINANCE_ALPHA, NULL);
        U = new Texture2D(video->width / 2, video->height, GL_RGBA, GL_RGBA, NULL);
    }
    else if (video->formatType == PIX_FMT_NV12)
    {
        shader = std::make_shared<Shader>("res/glsl/vertexshader.glsl", "res/glsl/fragment_nv12.glsl");
        Y = new Texture2D(video->width, video->height, GL_LUMINANCE, GL_LUMINANCE, NULL);
        U = new Texture2D(video->width / 2, video->height / 2, GL_LUMINANCE_ALPHA, GL_LUMINANCE_ALPHA, NULL);
    }
    else if (video->formatType == PIX_FMT_NV21)
    {
        shader = std::make_shared<Shader>("res/glsl/vertexshader.glsl", "res/glsl/fragment_nv21.glsl");
        Y = new Texture2D(video->width, video->height, GL_LUMINANCE, GL_LUMINANCE, NULL);
        U = new Texture2D(video->width / 2, video->height / 2, GL_LUMINANCE_ALPHA, GL_LUMINANCE_ALPHA, NULL);
    }
    else if (video->formatType == PIX_FMT_GRAY)
    {
        shader = std::make_shared<Shader>("res/glsl/vertexshader.glsl", "res/glsl/fragment_gray.glsl");
        Y = new Texture2D(video->width, video->height, GL_LUMINANCE, GL_LUMINANCE, NULL);
    }
    else if (video->formatType == PIX_FMT_RGB)
    {
        shader = std::make_shared<Shader>("res/glsl/vertexshader.glsl", "res/glsl/fragment_rgb.glsl");
        Y = new Texture2D(video->width, video->height, GL_RGB, GL_RGB, NULL);
    }
    else if (video->formatType == PIX_FMT_BGR)
    {
        shader = std::make_shared<Shader>("res/glsl/vertexshader.glsl", "res/glsl/fragment_bgr.glsl");
        Y = new Texture2D(video->width, video->height, GL_RGB, GL_RGB, NULL);
    }
    else if (video->formatType == PIX_FMT_RGBA)
    {
        shader = std::make_shared<Shader>("res/glsl/vertexshader.glsl", "res/glsl/fragment_rgba.glsl");
        Y = new Texture2D(video->width, video->height, GL_RGBA, GL_RGBA, NULL);
    }
    else if (video->formatType == PIX_FMT_BGRA)
    {
        shader = std::make_shared<Shader>("res/glsl/vertexshader.glsl", "res/glsl/fragment_bgra.glsl");
        Y = new Texture2D(video->width, video->height, GL_RGBA, GL_RGBA, NULL);
    }
    else if (video->formatType == PIX_FMT_ARGB)
    {
        shader = std::make_shared<Shader>("res/glsl/vertexshader.glsl", "res/glsl/fragment_argb.glsl");
        Y = new Texture2D(video->width, video->height, GL_RGBA, GL_RGBA, NULL);
    }
    else if (video->formatType == PIX_FMT_ABGR)
    {
        shader = std::make_shared<Shader>("res/glsl/vertexshader.glsl", "res/glsl/fragment_abgr.glsl");
        Y = new Texture2D(video->width, video->height, GL_RGBA, GL_RGBA, NULL);
    }

    glEnable(GL_DEPTH_TEST);
    glEnable(GL_CULL_FACE);
    glCullFace(GL_BACK);

    glPolygonMode(GL_FRONT, GL_FILL);
    CheckError();

}


void PlayWindow::Render()
{
    QMatrix4x4 projectMat;

    projectMat.perspective(fov, width() / (float)height(), 0.1f, 100);


    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

RETRY:
    AVFrame* frame = nullptr;
    if (video == nullptr)
    {
        goto EMPTY;
    }
    int ret = video->Retrieve(frame);
    if (ret < 0)
    {
        goto EMPTY;
    };
    if (ret == 0)
    {
        video->Seek(0);
        goto RETRY;
    }

    //qDebug() << "frame:pts=" << frame->pts;

#if 1

    Model* model = isVR360 ? sphereModel.get() : flatModel.get();
    const float* videoMat = isVR360 ? m_camera.GetViewMat().constData() : QMatrix4x4().constData();
    const float* _projMat = isVR360 ? projectMat.constData() : QMatrix4x4().constData();

    if (video->formatType == PIX_FMT_YUV420P || video->formatType == PIX_FMT_YUVJ420P)
    {
        Y->UpdateTexture2D(frame->width, frame->height, frame->linesize[0], frame->data[0]);
        U->UpdateTexture2D(frame->width / 2, frame->height / 2, frame->linesize[1], frame->data[1]);
        V->UpdateTexture2D(frame->width / 2, frame->height / 2, frame->linesize[2], frame->data[2]);

        model->ApplyShader(shader.get());
        model->SetTexture2D("VIDEO_Y", Y);
        model->SetTexture2D("VIDEO_U", U);
        model->SetTexture2D("VIDEO_V", V);

    }
    else if (video->formatType == PIX_FMT_YUV422P || video->formatType == PIX_FMT_YUVJ422P)
    {
        Y->UpdateTexture2D(frame->width, frame->height, frame->linesize[0], frame->data[0]);
        U->UpdateTexture2D(frame->width / 2, frame->height, frame->linesize[1], frame->data[1]);
        V->UpdateTexture2D(frame->width / 2, frame->height, frame->linesize[2], frame->data[2]);

        model->ApplyShader(shader.get());
        model->SetTexture2D("VIDEO_Y", Y);
        model->SetTexture2D("VIDEO_U", U);
        model->SetTexture2D("VIDEO_V", V);

    }
    else if (video->formatType == PIX_FMT_YUV444P || video->formatType == PIX_FMT_YUVJ444P)
    {
        Y->UpdateTexture2D(frame->width, frame->height, frame->linesize[0], frame->data[0]);
        U->UpdateTexture2D(frame->width, frame->height, frame->linesize[1], frame->data[1]);
        V->UpdateTexture2D(frame->width, frame->height, frame->linesize[2], frame->data[2]);

        model->ApplyShader(shader.get());
        model->SetTexture2D("VIDEO_Y", Y);
        model->SetTexture2D("VIDEO_U", U);
        model->SetTexture2D("VIDEO_V", V);
    }
    else if (video->formatType == PIX_FMT_YUYV422)
    {
        Y->UpdateTexture2D(frame->width, frame->height, frame->linesize[0] / 2, frame->data[0]);
        U->UpdateTexture2D(frame->width / 2, frame->height, frame->linesize[0] / 4, frame->data[0]);

        model->ApplyShader(shader.get());
        model->SetTexture2D("VIDEO_Y", Y);
        model->SetTexture2D("VIDEO_U", U);
    }
    else if (video->formatType == PIX_FMT_UYVY422)
    {
        Y->UpdateTexture2D(frame->width, frame->height, frame->linesize[0] / 2, frame->data[0]);
        U->UpdateTexture2D(frame->width / 2, frame->height, frame->linesize[0] / 4, frame->data[0]);

        model->ApplyShader(shader.get());
        model->SetTexture2D("VIDEO_Y", Y);
        model->SetTexture2D("VIDEO_U", U);
    }

    else if (video->formatType == PIX_FMT_NV12)
    {
        Y->UpdateTexture2D(frame->width, frame->height, frame->linesize[0], frame->data[0]);
        U->UpdateTexture2D(frame->width / 2, frame->height / 2, frame->linesize[1] / 2, frame->data[1]);

        model->ApplyShader(shader.get());
        model->SetTexture2D("VIDEO_Y", Y);
        model->SetTexture2D("VIDEO_U", U);
    }
    else if (video->formatType == PIX_FMT_NV21)
    {
        Y->UpdateTexture2D(frame->width, frame->height, frame->linesize[0], frame->data[0]);
        U->UpdateTexture2D(frame->width / 2, frame->height / 2, frame->linesize[1] / 2, frame->data[1]);

        model->ApplyShader(shader.get());
        model->SetTexture2D("VIDEO_Y", Y);
        model->SetTexture2D("VIDEO_U", U);
    }
    else if (video->formatType == PIX_FMT_GRAY)
    {
        Y->UpdateTexture2D(frame->width, frame->height, frame->linesize[0], frame->data[0]);

        model->ApplyShader(shader.get());
        model->SetTexture2D("VIDEO_Y", Y);
    }
    else if (video->formatType == PIX_FMT_RGB || video->formatType == PIX_FMT_BGR)
    {
        Y->UpdateTexture2D(frame->width, frame->height, frame->linesize[0] / 3, frame->data[0]);

        model->ApplyShader(shader.get());
        model->SetTexture2D("VIDEO_Y", Y);
    }
    else if (video->formatType == PIX_FMT_RGBA || video->formatType == PIX_FMT_BGRA
        || video->formatType == PIX_FMT_ARGB || video->formatType == PIX_FMT_ABGR)
    {
        Y->UpdateTexture2D(frame->width, frame->height, frame->linesize[0] / 4, frame->data[0]);

        model->ApplyShader(shader.get());
        model->SetTexture2D("VIDEO_Y", Y);
    }



    model->Blit(videoMat, _projMat);
#endif
EMPTY:
    SwapBuffers(dc);
    av_frame_unref(frame);
}


bool PlayWindow::OpenFile(std::string url)
{
    DeInitGL();
    video = std::make_shared<VideoCapture>();
    auto ret = video->Open(url.c_str(), PIX_FMT_AUTO);
    if (ret == false)
    {
        video = nullptr;
        hasVideo = false;
        this->hide();
        return false;

    }
    hasVideo = true;

    InitGL();
    return true;
}

别的就是 camera的移动  以及openglmodel shader相关的代码了 

完整代码 付费

本文内容由网友自发贡献,版权归原作者所有,本站不承担相应法律责任。如您发现有涉嫌抄袭侵权的内容,请联系:hwhale#tublm.com(使用前将#替换为@)

QT + OpenGL + FFmpeg写的一个全景视频播放器 的相关文章

  • freertos入门之创建Task

    基于esp32平台 span class token macro property span class token directive hash span span class token directive keyword includ
  • freertos入门之queue

    基于esp32 平台 参考 xff1a https www freertos org fr content src uploads 2018 07 161204 Mastering the FreeRTOS Real Time Kernel
  • freertos入门之binary semaphore

    基于esp32 平台 参考 xff1a https www freertos org fr content src uploads 2018 07 161204 Mastering the FreeRTOS Real Time Kernel
  • freertos入门之EventGroup

    span class token macro property span class token directive hash span span class token directive keyword include span spa
  • freertos入门之Timer

    span class token macro property span class token directive hash span span class token directive keyword include span spa
  • freertos入门之StreamBuffer

    span class token macro property span class token directive hash span span class token directive keyword include span spa
  • 设置cpp-httplib 服务器模式模式下的线程池大小 以及如何增加默认处理函数 以便能够实现http请求转发

    先说说默认的创建的线程池数量 原因是某天调试在gdb调试下 一启动程序发现 开启了好多线程 如下图 因为我们程序 没几个线程 数了下 居然有60多个线程 不需要那么多 所以看下 httplib的源码 构造函数的时候 设置了最大线程池数量 看
  • freertos入门之StreamBuffer

    span class token macro property span class token directive hash span span class token directive keyword include span spa
  • arduino-esp32 入门之wifi连接热点

    参考 xff1a https github com espressif arduino esp32 blob master libraries WiFi examples WiFiClient WiFiClient ino span cla
  • esp32-arduino入门之点亮led

    参考 xff1a https learncplusplus org how to program arduino with c span class token macro property span class token directi
  • stm32 学习资料汇总

    外设库以及example xff1a Google 搜索 STM32 Standard Peripheral Libraries
  • HttpClient学习研究---第四章:HTTP authenticationHTTP身份验证

    第四章 HTTP authentication HTTP身份验证 HttpClient provides full support for authentication schemes defined by the HTTP standar
  • Linux系统下常用的3个网络测试工具!

    在Linux系统中 xff0c 有很多用于管理和监测网络连接的命令 xff0c 其中ping traceroute和nslookup是比较常用的网络命令 xff0c 可以用来测试网络 诊断网络故障等等 xff0c 以下是详细的内容 xff1
  • TCP.02.SELECT模型

    文章目录 SELECT模型简介SELECT模型流程SELECT原理SELECT代码实现fd set 数组及基本操作SELECT函数参数2 xff08 重点 xff09 参数3参数4 关闭所有SOCKET句柄处理控制台窗口关闭事件整体代码思考
  • Node.js http 模块详解:request 对象

    前言 前文介绍了 http 模块的基本用法 xff0c 主要就是调用 createServer 和 listen 方法来创建和启动服务 要处理具体的 HTTP 请求 xff0c 就要在 createServer 方法中写点什么 本文来介绍处
  • 如何确认串口波特率

    文章目录 1 盲扫一遍2 示波器测量1bit时间3 逻辑分析仪确认 背景 xff1a 手上有一个模块使用串口通信但是不知道其波特率 xff0c 如何确认它的波特率呢 xff1f 1 盲扫一遍 波特率有常用的配置9600 115200 230
  • curl命令常用参数

    curl命令常用参数 curl简介常用方法将远程文件下载到本地 o并指定名称指定请求方式 X显示响应结果 v携带用户名 密码 u携带请求头 H查看服务端响应头 i只显示http response的头信息 I自动跳转 L模拟dns解析 res
  • 学习ZLmediaKit流媒体服务器时候遇到的问题

    照zlmediakit的源码 自己复制了一份 然后有的地方编译不过修改了部分 测试的时候发现有两个问题 第一是 ffmpeg的ffplay 能播放 vlc不能播放 第二个问题是directProxy设置为0的时候 推流的时候 然后用ffpl
  • 如何在C/C++中使用pi (π) 值

    在math h有一个宏定义M PI if defined USE MATH DEFINES amp amp defined MATH DEFINES DEFINED define MATH DEFINES DEFINED Definitio
  • 关于#include<bits/stdc++.h>

    偶然发现 span class hljs preprocessor include lt bits stdc 43 43 h gt span 包括了C 43 43 几乎所有的头文件 xff0c 感觉以后可以返璞归真了 回顾自己不长的竞赛历程

随机推荐

  • 单片机STM32直连电调控制航模涵道电机的方法总结

    单片机STM32直连电调控制航模涵道电机的方法总结 文章目录 单片机STM32直连电调控制航模涵道电机的方法总结前言一 硬件情况二 涵道电机两种常见的驱动方式1 有线控制方式2 无线控制方案 解决方案 前言 由于项目需要 xff0c 我需要
  • PX4之常用函数解读

    PX4Firmware 经常有人将Pixhawk PX4 APM还有ArduPilot弄混 这里首先还是简要说明一下 xff1a Pixhawk是飞控硬件平台 xff0c PX4和ArduPilot都是开源的可以烧写到Pixhawk飞控中的
  • PX4项目学习::(七)飞控栈:commander

    PX4的飞行控制程序通过模块来实现 xff0c 与飞控相关的模块主要有commander xff0c navigator xff0c pos control xff0c att control这几个 xff0c 分别可以在src modul
  • PX4项目学习::(五)模块代码启动流程

    54条消息 PX4 模块代码启动流程 zhao23333的博客 CSDN博客
  • TX2指南(一)TX2接显示器的问题

    TX2开发板一定要适配HDMI显示器 xff0c 使用转接头在VGA显示器会显示 input signal out of range xff01 所以目前来看手上的这套TX2只能适配HDMI显示器 xff0c 目前还不清楚是不是所有的TX2
  • 推荐定位信息(GPRMC)

    推荐定位信息 GPRMC GPRMC lt 1 gt lt 2 gt lt 3 gt lt 4 gt lt 5 gt lt 6 gt lt 7 gt lt 8 gt lt 9 gt lt 10 gt lt 11 gt lt 12 gt hh
  • linux中使用shell命令打开指定文件夹(Nautilus@GNOME)

    在GNOME中是Nautilus 鹦鹉螺 xff0c 而KDE中是Konqueror nautilus 图形化桌面包括了一个叫做 Nautilus 的文件管理器 它给你提供了系统和个人文件的图形化显示 然而 xff0c Nautilus 不
  • 在ubuntu20.4下安装ardupilot 4.3.6

    这次重新安装真的是遇到了好多坑啊 xff01 从github上靠过来按照之前的那篇文章流程做完之后 xff0c 还会有一些别的问题 首先是module里面的包都没有拷过来 xff0c 所以需要用git add将文件都添加过来 之后进行编译时
  • Visual Studio 2022 搭建GLFW OpenGL开发环境

    最近工作需要 需要写一个全景的视频播放器 网上搜了下大概解决方案是 ffmpeg 43 opengl b站有很多视频 按照视频 搭建了OpenGL的开发环境 先去GLFW的网站下载 windows平台的库文件 为什么使用GLFW 因为GLF
  • Pixhawk原生固件PX4之自定义MAVLink消息

    欢迎交流 个人 Gitter 交流平台 xff0c 点击直达 xff1a 本着想在PX4基础上加点什么东西的我又开始折腾了 xff0c 先尝试用串口加传感器通过QGC查看 xff0c 要是能在原固件上加点内容就棒哉了 先立Flag 自定义u
  • Pixhawk原生固件PX4之MAVLink协议解析

    欢迎交流 个人 Gitter 交流平台 xff0c 点击直达 xff1a PX4 对Mavlink 协议提供了良好的原生支持 该协议既可以用于地面站 Ground ControlStation GCS 对无人机 UAV 的控制 xff0c
  • GPS和RTK的基本知识

    RTK的基本原理介绍 xff0c RTK一般由基站 移动站以及数据链路组成 下文摘自天宝 Trimble 官网 原文链接 xff1a http www trimble com OEM ReceiverHelp V4 44 en What i
  • freeRTOS系统栈与任务栈

    中断过来之后 xff0c 由任务栈切换到main stack xff08 系统栈 xff09 任务栈保存 系统栈的地址范围为0xfede8000 4K xff0c 向下生长 xff0c 所以按照ld的定义 xff0c 0xfede9000
  • ROS下上位机和stm32单片机通信

    1 需要实例化串口节点建立监听者listener和发布之publisher 2 上位机通过游戏手柄发布自定义消息类型control int64 mode 手柄模式切换 int64 lidar 雷达数据 int64 gamePad x 控制前
  • 奇偶校验码

    偶校验为例 xff1a 例图中 xff0c 下划线为校验位 xff0c 其余为信息位 检错步骤如下 xff1a 1 根据信息位算出校验位 xff08 通过异或运算 xff1a 相同为0 xff0c 不同为1 xff09 xff1a 得出校验
  • C++中#define和const的区别

    一 define是预处理指令 xff08 pre processor directive xff09 而const是关键字 define用于给一些值定义名称 字符串 xff0c 这样定义的字符串在C C 43 43 中称为宏定义 xff0c
  • select函数实现tcp服务器与客户端随时收发

    服务器 include lt stdio h gt include lt sys types h gt include lt sys socket h gt include lt arpa inet h gt include lt neti
  • STM32F10X库函数逻辑

    define PERIPH BASE unsigned int 0x40000000 定义外围总线基地址 define APB1PERIPH BASE PERIPH BASE xff09 APB1总线开始与外围总线基地址 define AP
  • STM32F10x外部中断EXTI

    目录 一 EXTI是什么 xff1f 二 使用方法 1 功能框图及寄存器 2 库函数编程 总结 提示 xff1a 以下是本篇文章正文内容 xff0c 下面案例可供参考 一 EXTI是什么 xff1f EXTI External interr
  • QT + OpenGL + FFmpeg写的一个全景视频播放器

    临时被分配了一个任务 写一个C 43 43 版本的全景视频播放器 网上搜了搜 基于前辈的基础上 写的差不多了 测试视频源是用ffmpeg拉RTSP的流 最终是要嵌入到别的一个视频播放器模块 所以解码这块我不用太关注 只要实现渲染就可以了 效