Ubuntu v4l2 视屏流花屏问题

2023-05-16

在这里插入图片描述
在这里插入图片描述
在这里插入图片描述
之前用的好好解析YUV,MJPEG,换了个核心板就不好使了,opencv3.4.6 >>>opencv4.5.5,Mat,cvMat,IplImage 的类型转换也不好使了。

//old version
#include "camera_thread.h"
#include <QByteArray>
#include <QImageReader>
#include <QBuffer>
#include <QImage>
#include <QDebug>
#include <QMutex>
#include <ctime>
#include <QString>
#include <QTimer>

#include <utils.h>
#include <fcntl.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <stdbool.h>
#include <unistd.h>

#include <linux/videodev2.h>

#include<opencv2/opencv.hpp>
#include<opencv2/highgui/highgui.hpp>
//#include "opencv2/core/types_c.h"

#include "device.h"
#include "qtimer.h"
#include "camera2.h"
#include "numeric"


int init_v4l2(void);
int v4l2_grab(void);

#define IMAGEWIDTH 640
#define IMAGEHEIGHT 480

#define FILE_VIDEO1 "/dev/video10"
#define TRUE 1
#define FALSE 0



char *deviceid;


using namespace cv;

QImageReader reader;



video_thread::video_thread():QThread()
{
    quit_flag = false;
}

video_thread::~video_thread()
{
    this->quit();
    quit_flag = true;
    show_picture_flag = false;
    this->wait();
}

void video_thread::run()
{
    int close_ret = 0;

    if(device_video==1){
        deviceid= "/dev/video0";
    }else{
        deviceid= "/dev/video2";
    }


  if(init_v4l2() == FALSE){

      quit_flag = true;
      exit(1);
  }

  if(v4l2_grab() == FALSE){

      quit_flag = true;
      exit(2);
  }
    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory = V4L2_MEMORY_MMAP;


    isopen = true;


    IplImage* img;
    IplImage* img1;

    CvMat cvmat;
    double t;
    QImage *qmg;
    while(!quit_flag)
    {
        msleep(1);
        // not show picture, continue next loop.
        if ( !show_picture_flag )
            continue;


       t = (double)cvGetTickCount();



       ioctl(fd,VIDIOC_DQBUF,&buf);

       buf.index = 0;
       cvmat =cvMat(IMAGEHEIGHT,IMAGEWIDTH,CV_8UC3,(void*)buffer);//CV_8UC3


       img = cvDecodeImage(&cvmat,1);


       cv::Mat MImg=cv::cvarrToMat(img);//Mat

//       MImg=MImg(cv::Rect(0,0,630,470));

       cv::rotate(MImg,MImg,cv::ROTATE_180);


 

      IplImage iplImage1(MImg);
      img1=&iplImage1;

       qmg=new QImage((unsigned char*)img1->imageData,img1->width,img1->height,img1->widthStep,QImage::Format_RGB888);

       if(!img)    qDebug("CAM2 No img\n");

       cvReleaseImage(&img);

       ioctl(fd,VIDIOC_QBUF,&buf);
       t=(double)cvGetTickCount()-t;

       if(device_video==1){

           emit image_data(qmg->copy());
       }
       if(device_video==2){

           emit image_data1(qmg->copy());
       }


    }

    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    close_ret = ioctl(fd,VIDIOC_STREAMOFF,&type);







    close_ret = close( fd );

    if ( close_ret == 0 )
    {
       qDebug("close success!");
    }
}

int video_thread::init_v4l2(void){



        if ((fd = open(deviceid, O_RDWR)) == -1){

            return FALSE;
        }
        if(fcntl(fd,F_SETFD, FD_CLOEXEC) == -1)
        {

        }
        if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == -1){

                return FALSE;
        }
        else
        {}


        fmtdesc.index = 0;
        fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

        while(ioctl(fd,VIDIOC_ENUM_FMT,&fmtdesc) != -1){

            fmtdesc.index++;
        }
        //set fmt
        fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        fmt.fmt.pix.width = IMAGEWIDTH;
        fmt.fmt.pix.height = IMAGEHEIGHT;
//        fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; //*************************V4L2_PIX_FMT_YUYV****************
        fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; //*************************V4L2_PIX_FMT_YUYV****************
        fmt.fmt.pix.field = V4L2_FIELD_NONE;



        if (ioctl(fd, VIDIOC_S_FMT, &fmt) == -1){
//            if (ioctl(fd, VIDIOC_TRY_FMT, &fmt) == -1){

            return FALSE;
        }
        if(ioctl(fd,VIDIOC_G_FMT,&fmt) == -1){

            return FALSE;
        }

        return TRUE;
}





int video_thread::v4l2_grab(void){
    //struct v4l2_requestbuffers req = {0};
    //4  request for 4 buffers
    req.count = 1;
    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req.memory = V4L2_MEMORY_MMAP;

    if (ioctl(fd, VIDIOC_REQBUFS, &req) == -1)
    {

        return FALSE;
    }
    //5 mmap for buffers
    buffer = (uchar*)malloc(req.count * sizeof(*buffer));
    if(!buffer){

        return FALSE;
    }
    unsigned int n_buffers;
    for(n_buffers = 0;n_buffers < req.count; n_buffers++){
    //struct v4l2_buffer buf = {0};
    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory = V4L2_MEMORY_MMAP;
    buf.index = n_buffers;
    if(ioctl(fd, VIDIOC_QUERYBUF, &buf) == -1){

        return FALSE;
        }



    buffer = (uchar*)mmap (NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset);

    if(buffer == MAP_FAILED){

        return FALSE;
        }

    }
    //6 queue
    for(n_buffers = 0;n_buffers <req.count;n_buffers++){
        buf.index = n_buffers;
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        if(ioctl(fd,VIDIOC_QBUF,&buf)){
//            qDebug("query buffer error\n");
            return FALSE;
        }
    }
    //7 starting
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if(ioctl(fd,VIDIOC_STREAMON,&type) == -1){
//        qDebug("stream on error\n");
        return FALSE;
    }
    return TRUE;
}

于是换一种打开方式:
头文件:

#ifndef MajorImageProcessingThread_H
#define MajorImageProcessingThread_H

#include <QThread>
#include <QImage>
#include <QDebug>
#include <QGuiApplication>
#include <QScreen>
#include <QFile>
#include <QCoreApplication>

#include <QMainWindow>
#include <QVBoxLayout>
#include <QHBoxLayout>
#include <QPushButton>
#include <QLabel>
#include <QScrollArea>
#include <QDebug>
#include <QTimer>

#include <stdio.h>
#include <stdlib.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <unistd.h>
#include <sys/ioctl.h>
#include <string.h>
#include <errno.h>
#include <sys/mman.h>
#include <sys/select.h>
#include <sys/time.h>
#include <linux/videodev2.h>
#include <stdbool.h>
#include <string>

#include "LPF_V4L2.h"
#define IMAGEWIDTH 640
#define IMAGEHEIGHT 480
class MajorImageProcessingThread : public QThread
{
    Q_OBJECT
public:
    MajorImageProcessingThread();

    QImage majorImage;
    void stop();
    void init(int index);

    /* 帧描述信息 */
    typedef struct Frame_Buffer{
        unsigned char buf[IMAGEWIDTH*IMAGEHEIGHT*3];
        int length;
    }FrameBuffer;


    ///

//    static struct buffer{
//        void *start;
//        unsigned int length;
//    }*buffers;

    struct buffer{
        void *start;
        unsigned int length;
    }*buffers;



    int buffers_length;


    struct v4l2_capability cap;
    struct v4l2_fmtdesc fmtdesc;
    struct v4l2_frmsizeenum frmsizeenum;
    struct v4l2_format format;
    struct v4l2_queryctrl  queryctrl;
    struct v4l2_requestbuffers reqbuf;
    struct v4l2_buffer buffer;
    struct v4l2_streamparm streamparm;

    //用户控制项ID
    __u32 brightness_id;    //亮度
    __u32 contrast_id ;  //对比度
    __u32 saturation_id ;    //饱和度
    __u32 hue_id ;   //色调
    __u32 white_balance_temperature_auto_id; //白平衡色温(自动)
    __u32 white_balance_temperature_id ; //白平衡色温
    __u32 gamma_id ; //伽马
    __u32 power_line_frequency_id ;  //电力线频率
    __u32 sharpness_id; //锐度,清晰度
    __u32 backlight_compensation_id ;    //背光补偿
    //扩展摄像头项ID
    __u32 exposure_auto_id ;  //自动曝光
    __u32 exposure_absolute_id ;



    char runningDev[15]="";
    char devName[15] ="";
    char camName[32] ="";
    char devFmtDesc[4]="" ;

    int fd;
    int videoIsRun ;
    unsigned char *rgb24 ;
    int WIDTH, HEIGHT;


//    unsigned char *rgb24;
//    int videoIsRun;

    int majorindex;
    bool stopped;


    bool isopen;
    QImage img;
    int device;


    int deviceIsOpen ;


    void init_value();//init
    int LPF_GetDeviceCount();
    char *LPF_GetDeviceName(int index);
    char *LPF_GetCameraName(int index);

    int LPF_StartRun(int index);
//    int LPF_GetFrame();
    int LPF_GetFrame(FrameBuffer *framebuf);

    int LPF_StopRun();

    char *LPF_GetDevFmtDesc(int index);

    int LPF_GetDevFmtWidth();
    int LPF_GetDevFmtHeight();
    int LPF_GetDevFmtSize();
    int LPF_GetDevFmtBytesLine();

    int LPF_GetResolutinCount();
    int LPF_GetResolutionWidth(int index);
    int LPF_GetResolutionHeight(int index);
    int LPF_GetCurResWidth();
    int LPF_GetCurResHeight();

    int init_format();

    int init_formatNew();
    int v4l2_init_buffer();
    void StartVideoPrePareNew();

    void StartVideoPrePare();
    void StartVideoStream();
    void EndVideoStream();
    void EndVideoStreamClear();
    int test_device_exist(char *devName);

    void LPF_GetDevControlAll();





    ///

protected:


    void run();

private:
//    volatile int majorindex;
//    volatile bool stopped;



signals:
    void SendMajorImageProcessing(QImage image, int result);
//    void SendMajorImageProcessing(QPixmap pix, int result);




};

#endif // MajorImageProcessingThread_H

源文件:

#include "majorimageprocessingthread.h"
#define IMAGEWIDTH 640
#define IMAGEHEIGHT 480
#define TRUE 1
#define FALSE 0

#define FRAMEBUFFER_COUNT 4


MajorImageProcessingThread::MajorImageProcessingThread()
{
    stopped = false;
    majorindex = -1;
}

void MajorImageProcessingThread::stop()
{
    stopped = true;
}

void MajorImageProcessingThread::init(int index)
{
    stopped = false;
    majorindex = index;
    qDebug()<<"MajorImageProcessingThread::init";
    init_value();
    qDebug()<<"MajorImageProceddddssingThread::init";

}

void MajorImageProcessingThread::run()
{
    if(majorindex != -1)
    {
        while(!stopped)
        {
            msleep(1000/30);
            FrameBuffer frame;
            QImage img;

            QPixmap pix;

            int ret = LPF_GetFrame(&frame);
            if(ret == 0)
            {

                int WV = LPF_GetCurResWidth();
                int HV = LPF_GetCurResHeight();
//                img = QImage(rgb24, WV, HV, QImage::Format_RGB888);
                pix.loadFromData(frame.buf, frame.length);
                img= pix.toImage();
            }
            emit SendMajorImageProcessing(img, ret);

        }
    }
}



/

void MajorImageProcessingThread::init_value(){

//    runningDev[15]="";
//    devName[15] ="";
//    camName[32] ="";
//    devFmtDesc[4]="" ;
    fd = -1;
    videoIsRun = -1;
    deviceIsOpen = -1;

    rgb24 = NULL;

    //用户控制项ID
    brightness_id = -1;    //亮度
    contrast_id = -1;  //对比度
    saturation_id = -1;    //饱和度
    hue_id = -1;   //色调
    white_balance_temperature_auto_id = -1; //白平衡色温(自动)
    white_balance_temperature_id = -1; //白平衡色温
    gamma_id = -1; //伽马
    power_line_frequency_id = -1;  //电力线频率
    sharpness_id = -1; //锐度,清晰度
    backlight_compensation_id = -1;    //背光补偿
    //扩展摄像头项ID
    exposure_auto_id = -1;  //自动曝光
    exposure_absolute_id = -1;


//    static struct v4l2_capability cap;
//    static struct v4l2_fmtdesc fmtdesc;
//    static struct v4l2_frmsizeenum frmsizeenum;
//    static struct v4l2_format format;
//    static struct v4l2_queryctrl  queryctrl;
//    static struct v4l2_requestbuffers reqbuf;
//    static struct v4l2_buffer buffer;
//    static struct v4l2_streamparm streamparm;

//    struct v4l2_capability cap;
//    struct v4l2_fmtdesc fmtdesc;
//    struct v4l2_frmsizeenum frmsizeenum;
//    struct v4l2_format format;
//    struct v4l2_queryctrl  queryctrl;
//    struct v4l2_requestbuffers reqbuf;
//    struct v4l2_buffer buffer;
//    struct v4l2_streamparm streamparm;

    //


}

static int convert_yuv_to_rgb_pixel(int y, int u, int v)
{
    unsigned int pixel32 = 0;
    unsigned char *pixel = (unsigned char *)&pixel32;
    int r, g, b;
    r = y + (1.370705 * (v-128));
    g = y - (0.698001 * (v-128)) - (0.337633 * (u-128));
    b = y + (1.732446 * (u-128));
    if(r > 255) r = 255;
    if(g > 255) g = 255;
    if(b > 255) b = 255;
    if(r < 0) r = 0;
    if(g < 0) g = 0;
    if(b < 0) b = 0;
    pixel[0] = r ;
    pixel[1] = g ;
    pixel[2] = b ;
    return pixel32;
}

static int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height)
{
    unsigned int in, out = 0;
    unsigned int pixel_16;
    unsigned char pixel_24[3];
    unsigned int pixel32;
    int y0, u, y1, v;

    for(in = 0; in < width * height * 2; in += 4)
    {
        pixel_16 =
                yuv[in + 3] << 24 |
                               yuv[in + 2] << 16 |
                                              yuv[in + 1] <<  8 |
                                                              yuv[in + 0];
        y0 = (pixel_16 & 0x000000ff);
        u  = (pixel_16 & 0x0000ff00) >>  8;
        y1 = (pixel_16 & 0x00ff0000) >> 16;
        v  = (pixel_16 & 0xff000000) >> 24;
        pixel32 = convert_yuv_to_rgb_pixel(y0, u, v);
        pixel_24[0] = (pixel32 & 0x000000ff);
        pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
        pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
        rgb[out++] = pixel_24[0];
        rgb[out++] = pixel_24[1];
        rgb[out++] = pixel_24[2];
        pixel32 = convert_yuv_to_rgb_pixel(y1, u, v);
        pixel_24[0] = (pixel32 & 0x000000ff);
        pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
        pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
        rgb[out++] = pixel_24[0];
        rgb[out++] = pixel_24[1];
        rgb[out++] = pixel_24[2];
    }
    return 0;
}

void MajorImageProcessingThread::StartVideoPrePare()
{
    init_format();
    qDebug("当前视频帧大小<%d * %d>, 颜色空间:%d\n", format.fmt.pix.width, format.fmt.pix.height,format.fmt.pix.colorspace);

    ///

    //申请帧缓存区
    memset (&reqbuf, 0, sizeof (reqbuf));
    reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    reqbuf.memory = V4L2_MEMORY_MMAP;
    reqbuf.count = 4;

    if (-1 == ioctl (fd, VIDIOC_REQBUFS, &reqbuf)) {
        if (errno == EINVAL){
            qDebug()<<  "Video capturing or mmap-streaming is not supported";
            printf ("Video capturing or mmap-streaming is not supported\n");
        }

        else{
            perror ("VIDIOC_REQBUFS");
            qDebug()<<  "buffers is Video capturing or mmap-streaming is not supported";
        }
        qDebug()<<  "buffers error";
        return;
    }

    //分配缓存区
//    buffers = calloc (reqbuf.count, sizeof (*buffers));
    buffers = static_cast<struct buffer*>(malloc(sizeof (*buffers)*reqbuf.count));//release
    if(buffers == NULL){
        perror("buffers is NULL");
        qDebug()<<  "buffers is NULL";
    }else
        assert (buffers != NULL);

    //mmap内存映射
    int i;
    for (i = 0; i < (int)reqbuf.count; i++) {
        memset (&buffer, 0, sizeof (buffer));
        buffer.type = reqbuf.type;
        buffer.memory = V4L2_MEMORY_MMAP;
        buffer.index = i;

        if (-1 == ioctl (fd, VIDIOC_QUERYBUF, &buffer)) {
            qDebug()<<  "perror VIDIOC_QUERYBUFVIDIOC_QUERYBUF buffers is NULL";
            perror ("VIDIOC_QUERYBUF");

            return;
        }

        buffers[i].length = buffer.length;

        buffers[i].start = mmap (NULL, buffer.length,
                                 PROT_READ | PROT_WRITE,
                                 MAP_SHARED,
                                 fd, buffer.m.offset);

        if (MAP_FAILED == buffers[i].start) {
            qDebug()<<  "perror MAP_FAILED MAP_FAILED is MAP_FAILED";
            perror ("mmap");
            return;
        }
    }



    //将缓存帧放到队列中等待视频流到来
    unsigned int ii;
    for(ii = 0; ii < reqbuf.count; ii++){
        buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buffer.memory = V4L2_MEMORY_MMAP;
        buffer.index = ii;
        if (ioctl(fd,VIDIOC_QBUF,&buffer)==-1){
            qDebug()<<  "perror VIDIOC_QBUF failed";
            perror("VIDIOC_QBUF failed");
        }
    }


    WIDTH = LPF_GetCurResWidth();
    HEIGHT = LPF_GetCurResHeight();
    qDebug()<<"WIDTH"<<WIDTH<<"  HEIGHT"<<HEIGHT;
    rgb24 = (unsigned char*)malloc(WIDTH*HEIGHT*3*sizeof(char));
    assert(rgb24 != NULL);
}

void MajorImageProcessingThread::StartVideoStream()
{
    enum v4l2_buf_type type;
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (ioctl(fd,VIDIOC_STREAMON,&type) == -1) {
        qDebug()<<"VIDIOC_STREAMON failed"<<rgb24;
        perror("VIDIOC_STREAMON failed");
    }else{
        qDebug()<<"VIDIOC_STREAMON success"<<rgb24;
    }
}

void MajorImageProcessingThread::EndVideoStream()
{
    //关闭视频流
    enum v4l2_buf_type type;
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (ioctl(fd,VIDIOC_STREAMOFF,&type) == -1) {
        perror("VIDIOC_STREAMOFF failed");
    }
}

void MajorImageProcessingThread::EndVideoStreamClear()
{
    //手动释放分配的内存
    int i;
    for (i = 0; i < (int)reqbuf.count; i++)
        munmap (buffers[i].start, buffers[i].length);
    free(rgb24);
    rgb24 = NULL;
}

void MajorImageProcessingThread::LPF_GetDevControlAll()
{
    int i = 0;
    for(i = V4L2_CID_BASE; i <= V4L2_CID_LASTP1; i++)
    {
        queryctrl.id = i;
        if(0 == ioctl(fd, VIDIOC_QUERYCTRL, &queryctrl))
        {

            if(queryctrl.flags & V4L2_CTRL_FLAG_DISABLED)
                continue;

            if(queryctrl.id == V4L2_CID_BRIGHTNESS)
                brightness_id = i;
            if(queryctrl.id == V4L2_CID_CONTRAST)
                contrast_id = i;
            if(queryctrl.id == V4L2_CID_SATURATION)
                saturation_id = i;
            if(queryctrl.id == V4L2_CID_HUE)
                hue_id = i;
            if(queryctrl.id == V4L2_CID_AUTO_WHITE_BALANCE)
                white_balance_temperature_auto_id = i;
            if(queryctrl.id == V4L2_CID_WHITE_BALANCE_TEMPERATURE)
                white_balance_temperature_id = i;
            if(queryctrl.id == V4L2_CID_GAMMA)
                gamma_id = i;
            if(queryctrl.id == V4L2_CID_POWER_LINE_FREQUENCY)
                power_line_frequency_id = i;
            if(queryctrl.id == V4L2_CID_SHARPNESS)
                sharpness_id = i;
            if(queryctrl.id == V4L2_CID_BACKLIGHT_COMPENSATION)
                backlight_compensation_id = i;
        }
        else
        {
            if(errno == EINVAL)
                continue;
            qDebug()<<"VIDIOC_QUERYCTRL";
            perror("VIDIOC_QUERYCTRL");
            return;
        }
    }

    queryctrl.id = V4L2_CTRL_CLASS_CAMERA | V4L2_CTRL_FLAG_NEXT_CTRL;
    while (0 == ioctl (fd, VIDIOC_QUERYCTRL, &queryctrl)) {
        if (V4L2_CTRL_ID2CLASS (queryctrl.id) != V4L2_CTRL_CLASS_CAMERA)
            break;

        if(queryctrl.id == V4L2_CID_EXPOSURE_AUTO)
            exposure_auto_id = queryctrl.id;
        if(queryctrl.id == V4L2_CID_EXPOSURE_ABSOLUTE)
            exposure_absolute_id = queryctrl.id;

        queryctrl.id |= V4L2_CTRL_FLAG_NEXT_CTRL;
    }
}

int MajorImageProcessingThread::test_device_exist(char *devName)
{
    struct stat st;
    if (-1 == stat(devName, &st))
        return -1;

    if (!S_ISCHR (st.st_mode))
        return -1;

    return 0;
}
int MajorImageProcessingThread::LPF_GetDeviceCount()
{
    char devname[15] = "";
    int count = 0;
    int i;
    for(i = 0; i < 100; i++)
    {
        sprintf(devname, "%s%d", "/dev/video", i);
        if(test_device_exist(devname) == 0)
            count++;

        memset(devname, 0, sizeof(devname));
    }

    return count;
}

//根据索引获取设备名称
char* MajorImageProcessingThread::LPF_GetDeviceName(int index)
{
    memset(devName, 0, sizeof(devName));

    int count = 0;
    char devname[15] = "";
    int i;
    for(i = 0; i < 100; i++)
    {
        sprintf(devname, "%s%d", "/dev/video", i);
        if(test_device_exist(devname) == 0)
        {
            if(count == index)
                break;
            count++;
        }
        else
            memset(devname, 0, sizeof(devname));
    }

    strcpy(devName, devname);
    qDebug()<<"LPF_GetDeviceName:"<<devName;

    return devName;
}

//根据索引获取摄像头名称
char* MajorImageProcessingThread::LPF_GetCameraName(int index)
{
    if(videoIsRun > 0)
        return "";

    memset(camName, 0, sizeof(camName));

    char devname[15] = "";
    strcpy(devname, LPF_GetDeviceName(index));

    int fd = open(devname, O_RDWR);
    if(ioctl(fd, VIDIOC_QUERYCAP, &cap) != -1)
    {
        strcpy(camName, (char *)cap.card);
    }else{
        qDebug()<<"error:LPF_GetCameraName";
    }
    close(fd);

    qDebug()<<"LPF_GetCameraName"<<camName;

    return camName;
}

//运行指定索引的视频
int MajorImageProcessingThread::LPF_StartRun(int index)
{
    if(videoIsRun > 0 ) {//weiyl
        qDebug()<<"LPF_StopRun";
//        LPF_StopRun();
        return -1;
    }

    char *devname = LPF_GetDeviceName(index);
    fd = open(devname, O_RDWR);
    if(fd == -1)
        return -1;

    deviceIsOpen = 1;

    LPF_GetCameraName(index);
    LPF_GetDevControlAll();
    LPF_GetDevFmtDesc(index);

    StartVideoPrePare();
//    StartVideoPrePareNew();

    StartVideoStream();
    strcpy(runningDev, devname);
    videoIsRun = 1;


    return 0;
}

int MajorImageProcessingThread::LPF_GetFrame(FrameBuffer *framebuf)
{



    if(videoIsRun > 0)
    {
        fd_set fds;
        struct timeval tv;
        int r;

        FD_ZERO (&fds);
        FD_SET (fd, &fds);

        /* Timeout. */
        tv.tv_sec = 7;
        tv.tv_usec = 0;

        r = select (fd + 1, &fds, NULL, NULL, &tv);


        if (0 == r)
            return -1;
        else if(-1 == r)
            return errno;

        memset(&buffer, 0, sizeof(buffer));
        buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buffer.memory = V4L2_MEMORY_MMAP;
        if (ioctl(fd, VIDIOC_DQBUF, &buffer) == -1) {
            perror("GetFrame VIDIOC_DQBUF Failed");
            qDebug()<<"VIDIOC_DQBUF Failed";
            return errno;
        }
        else
        {
            //将buffer_infos中已使用的字节数copy到framebuf中
            memcpy(framebuf->buf,(char *)buffers[buffer.index].start,buffer.bytesused);//bytesused 表示buf中已经使用的字节数
            framebuf->length = buffer.bytesused;

//            convert_yuv_to_rgb_buffer((unsigned char*)buffers[buffer.index].start, rgb24, WIDTH, HEIGHT);// error frames
            if (ioctl(fd, VIDIOC_QBUF, &buffer) < 0) {
                perror("GetFrame VIDIOC_QBUF Failed");
                perror("GetFrame VIDIOC_QBUF Failed");
                return errno;
            }

            return 0;
        }
    }

    return 0;
}

int MajorImageProcessingThread::LPF_StopRun()
{
    if(videoIsRun > 0)
    {
        EndVideoStream();
        EndVideoStreamClear();
    }

    memset(runningDev, 0, sizeof(runningDev));
    videoIsRun = -1;
    deviceIsOpen = -1;

    if(close(fd) != 0)
        return -1;

    return 0;
}

char *MajorImageProcessingThread::LPF_GetDevFmtDesc(int index)
{
    memset(devFmtDesc, 0, sizeof(devFmtDesc));
    fmtdesc.index=index;
    fmtdesc.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;

    if(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc) != -1)
    {
        char fmt[5] = "";
        sprintf(fmt, "%c%c%c%c",
                (__u8)(fmtdesc.pixelformat&0XFF),
                (__u8)((fmtdesc.pixelformat>>8)&0XFF),
                (__u8)((fmtdesc.pixelformat>>16)&0XFF),
                (__u8)((fmtdesc.pixelformat>>24)&0XFF));

        strncpy(devFmtDesc, fmt, 4);
    }



    return devFmtDesc;
}

//获取图像的格式属性相关
int MajorImageProcessingThread::LPF_GetDevFmtWidth()
{
    format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if(ioctl (fd, VIDIOC_G_FMT, &format) == -1)
    {
        perror("GetDevFmtWidth:");
        return -1;
    }
    return format.fmt.pix.width;
}
int MajorImageProcessingThread::LPF_GetDevFmtHeight()
{
    format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if(ioctl (fd, VIDIOC_G_FMT, &format) == -1)
    {
        perror("GetDevFmtHeight:");
        return -1;
    }
    return format.fmt.pix.height;
}
int MajorImageProcessingThread::LPF_GetDevFmtSize()
{
    format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if(ioctl (fd, VIDIOC_G_FMT, &format) == -1)
    {
        perror("GetDevFmtSize:");
        return -1;
    }
    return format.fmt.pix.sizeimage;
}
int MajorImageProcessingThread::LPF_GetDevFmtBytesLine()
{
    format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if(ioctl (fd, VIDIOC_G_FMT, &format) == -1)
    {
        perror("GetDevFmtBytesLine:");
        return -1;
    }
    return format.fmt.pix.bytesperline;
}

//设备分辨率相关
int MajorImageProcessingThread::LPF_GetResolutinCount()
{
    fmtdesc.index = 0;
    fmtdesc.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;

    if(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc) == -1)
        return -1;

    frmsizeenum.pixel_format = fmtdesc.pixelformat;
    int i = 0;
    for(i = 0; ; i++)
    {
        frmsizeenum.index = i;
        if(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) == -1)
            break;
    }



//    /* 打印格式 */
//    frmsizeenum.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
//    struct v4l2_frmivalenum frmival;
//    frmival.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
//    qDebug()<<"VIDIOC_ENUM_FRAMEINTERVALS"<<ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frmival);
//    while (0 == ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frmival)) { //获取设备支持的帧间隔
//        qDebug("<%dfps>", frmival.discrete.denominator / frmival.discrete.numerator);
//        frmival.index++;
//    }
    return i;
}
int MajorImageProcessingThread::LPF_GetResolutionWidth(int index)
{
    fmtdesc.index = 0;
    fmtdesc.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;

    if(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc) == -1)
        return -1;

    frmsizeenum.pixel_format = fmtdesc.pixelformat;

    frmsizeenum.index = index;
    if(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) != -1)
        return frmsizeenum.discrete.width;
    else
        return -1;
}
int MajorImageProcessingThread::LPF_GetResolutionHeight(int index)
{
    fmtdesc.index = 0;
    fmtdesc.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;

    if(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc) == -1)
        return -1;

    frmsizeenum.pixel_format = fmtdesc.pixelformat;

    frmsizeenum.index = index;
    if(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) != -1)
        return frmsizeenum.discrete.height;
    else
        return -1;
}
int MajorImageProcessingThread::LPF_GetCurResWidth()
{
    format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (ioctl(fd, VIDIOC_G_FMT, &format) == -1)
        return -1;
    return format.fmt.pix.width;
}
int MajorImageProcessingThread::LPF_GetCurResHeight()
{
    format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (ioctl(fd, VIDIOC_G_FMT, &format) == -1)
        return -1;
    return format.fmt.pix.height;
}

int MajorImageProcessingThread::init_format()
{

    //set fmt
    format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    format.fmt.pix.width = IMAGEWIDTH;
    format.fmt.pix.height = IMAGEHEIGHT;
    format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; //*************************V4L2_PIX_FMT_YUYV****************
    format.fmt.pix.field = V4L2_FIELD_NONE;

    if (0 > ioctl(fd, VIDIOC_S_FMT, &format)) {
        qDebug("set format failed\n");
        return -1;
    }

    /* 判断是否已经设置为我们要求的MJPEG像素格式,否则表示该设备不支持MJPEG像素格式 */
    if (V4L2_PIX_FMT_MJPEG != format.fmt.pix.pixelformat) {
        qDebug("Error: the device does not support MJPEG format!\n");
        return -1;
    }

    /* 获取streamparm */
    streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (0 > ioctl(fd, VIDIOC_G_PARM, &streamparm)) {
        qDebug("get parm failed\n");
        return -1;
    }


    WIDTH = LPF_GetCurResWidth();
    HEIGHT = LPF_GetCurResHeight();

    LPF_GetResolutinCount(); //review here

    return 0;
}


int MajorImageProcessingThread::init_formatNew(){

    // 判断是否是视频采集设备
    if (!(V4L2_CAP_VIDEO_CAPTURE & cap.capabilities)) {
        printf("Error:No capture video device!\n");
        return -1;
    }
    printf("Device Information:\n");
    printf("driver name: %s\ncard name: %s\n",cap.driver,cap.card);
    printf("------------------------------------\n");

    /* 查询摄像头所支持的所有像素格式 */
    struct v4l2_fmtdesc fmtdesc;
    fmtdesc.index = 0;
    fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    printf("Support Format:\n");

    /* 摄像头像素格式及其描述信息 */
    typedef struct camera_format {
        unsigned char description[32];  //字符串描述信息
        unsigned int pixelformat;       //像素格式
    } cam_fmt;
    cam_fmt cam_fmts[10];
    /* 先将数组清空 */
    memset(&cam_fmts,0,sizeof(cam_fmts));
    while (0 == ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc)) {
        // 将枚举出来的格式以及描述信息存放在数组中
        cam_fmts[fmtdesc.index].pixelformat = fmtdesc.pixelformat;
        memcpy(cam_fmts[fmtdesc.index].description, fmtdesc.description,sizeof(fmtdesc.description));
        fmtdesc.index++;
    }

    /* 打印格式 */
    struct v4l2_frmsizeenum frmsize;
    struct v4l2_frmivalenum frmival;
    int i;

    frmsize.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    frmival.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;


    for(i = 0; cam_fmts[i].pixelformat;i++){
        printf("format<0x%x>, description<%s>\n", cam_fmts[i].pixelformat, cam_fmts[i].description);

        /* 枚举出摄像头所支持的所有视频采集分辨率 */
        frmsize.index = 0;
        frmsize.pixel_format = cam_fmts[i].pixelformat;
        frmival.pixel_format = cam_fmts[i].pixelformat;
        while (0 == ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsize)) {

            printf("size<%d*%d> ",frmsize.discrete.width,frmsize.discrete.height);
            frmsize.index++;

            /* 获取摄像头视频采集帧率 */
            frmival.index = 0;
            frmival.width = frmsize.discrete.width;
            frmival.height = frmsize.discrete.height;
            while (0 == ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frmival)) {

                printf("<%dfps>", frmival.discrete.denominator / frmival.discrete.numerator);
                frmival.index++;
            }
            printf("\n");
        }
        printf("\n");
    }
    printf("-------------------------------------\n");




    format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    format.fmt.pix.width  = IMAGEWIDTH;
    format.fmt.pix.height = IMAGEHEIGHT;
    format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;   //选择MJPEG
    if (0 > ioctl(fd, VIDIOC_S_FMT, &format)) {
        printf("set format failed\n");
        return -1;
    }
    printf("set format success\n");

    /* 判断是否已经设置为我们要求的MJPEG像素格式,否则表示该设备不支持MJPEG像素格式 */
    if (V4L2_PIX_FMT_MJPEG != format.fmt.pix.pixelformat) {
        printf("Error: the device does not support MJPEG format!\n");
        return -1;
    }
    /* 获取实际的帧宽高度 */
    printf("当前视频帧大小<%d * %d>, 颜色空间:%d\n", format.fmt.pix.width, format.fmt.pix.height,format.fmt.pix.colorspace);

    /* 获取streamparm */
    streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (0 > ioctl(fd, VIDIOC_G_PARM, &streamparm)) {
        printf("get parm failed\n");
        return -1;
    }

    /* 判断是否支持帧率设置 */
    if (V4L2_CAP_TIMEPERFRAME & streamparm.parm.capture.capability) {
        streamparm.parm.capture.timeperframe.numerator = 1;
        streamparm.parm.capture.timeperframe.denominator = 30;//30fps
        if (0 > ioctl(fd, VIDIOC_S_PARM, &streamparm)) {
            printf("Error:device do not support set fps\n");
            return -1;
        }
    }

    return 0;
}

int MajorImageProcessingThread::v4l2_init_buffer(){
    struct v4l2_requestbuffers reqbuf;
    reqbuf.count = FRAMEBUFFER_COUNT;       //帧缓冲的数量
    reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    reqbuf.memory = V4L2_MEMORY_MMAP;
    if (0 > ioctl(fd, VIDIOC_REQBUFS, &reqbuf)) {
        printf("request buffer failed\n");
        return -1;
    }
    printf("request buffer success\n");

    /* 建立内存映射 */
    struct v4l2_buffer buf;
    unsigned int n_buffers = 0;
    /* calloc函数为buffer_infos动态分配内存空间并初始化为0*/
//    buffers = (struct buffer_info*)calloc(FRAMEBUFFER_COUNT,sizeof(struct buffers));
    buffers = static_cast<struct buffer*>(malloc(sizeof (*buffers)*reqbuf.count));//release

    for (n_buffers = 0; n_buffers < FRAMEBUFFER_COUNT; n_buffers++) {
        memset(&buf,0,sizeof(buf));
        buf.index = n_buffers;
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        if(0 > ioctl(fd, VIDIOC_QUERYBUF, &buf)){
            printf("VIDIOC_QUERYBUF failed\n");
            return -1;
        };

        buffers[n_buffers].start = mmap(NULL, buf.length,PROT_READ | PROT_WRITE, MAP_SHARED,fd, buf.m.offset);
        buffers[n_buffers].length = buf.length;

        if (MAP_FAILED == buffers[n_buffers].start) {
            printf("mmap error\n");
            return -1;
        }
    }
    printf("memory map success\n");

    /* 入队 */
    for (buf.index = 0; buf.index < FRAMEBUFFER_COUNT; buf.index++) {
        if (0 > ioctl(fd, VIDIOC_QBUF, &buf)) {
            printf("入队失败\n");
            return -1;
        }
    }

    return 0;
}



void MajorImageProcessingThread::StartVideoPrePareNew()
{
    init_formatNew();
    v4l2_init_buffer();


    WIDTH = LPF_GetCurResWidth();
    HEIGHT = LPF_GetCurResHeight();
    rgb24 = (unsigned char*)malloc(WIDTH*HEIGHT*3*sizeof(char));
//    qDebug()<<"rgb24"<<rgb24;
    assert(rgb24 != NULL);
}




cam_recv头文件:

void ReceiveMajorImageL(QImage image, int result);
void ReceiveMajorImageR(QImage image, int result);

cam_recv源文件:

//相关部分
if(video_camera_left->deviceIsOpen || video_camera_right->deviceIsOpen){
        connect(video_camera_left, SIGNAL(SendMajorImageProcessing(QImage, int)),
                this, SLOT(ReceiveMajorImageL(QImage, int)));




///

void cam_recv::ReceiveMajorImageL(QImage image, int result)
{
    //超时后关闭视频
    //超时代表着VIDIOC_DQBUF会阻塞,直接关闭视频即可
    if(result == -1)
    {
        video_camera_left->stop();
        video_camera_left->wait();
        video_camera_left->LPF_StopRun();
        ui->cameraLeft->clear();
        ui->cameraLeft->setText("获取设备图像超时!");


    }

    if(!image.isNull())
    {
        ui->cameraLeft->clear();
        switch(result)
        {
        case 0:     //Success
            err11 = err19 = 0;
            if(image.isNull()){
                ui->cameraLeft->setText("画面丢失!");
            }else{
//                ui->cameraRight->setPixmap(image);
                ui->cameraLeft->setPixmap(QPixmap::fromImage(image.scaled(ui->cameraLeft->size())));
            }


            break;
        case 11:    //Resource temporarily unavailable
            err11++;
            if(err11 == 10)
            {
                ui->cameraLeft->clear();
                ui->cameraLeft->setText("设备已打开,但获取视频失败!\n请尝试切换USB端口后断开重试!");
            }
            break;
        case 19:    //No such device
            err19++;
            if(err19 == 10)
            {

                ui->cameraLeft->clear();
                ui->cameraLeft->setText("设备丢失!");
            }
            break;
        }
    }
}

void cam_recv::ReceiveMajorImageR(QImage image, int result)
{
    //超时后关闭视频
    //超时代表着VIDIOC_DQBUF会阻塞,直接关闭视频即可
    if(result == -1)
    {
        video_camera_right->stop();
        video_camera_right->wait();
        video_camera_right->LPF_StopRun();

        ui->cameraRight->clear();
        ui->cameraRight->setText("获取设备图像超时!");


    }

    if(!image.isNull())
    {
        ui->cameraRight->clear();
        switch(result)
        {
        case 0:     //Success
            err11 = err19 = 0;
            if(image.isNull())
                ui->cameraRight->setText("画面丢失!");
            else{
//                ui->cameraRight->setPixmap(image);
                ui->cameraRight->setPixmap(QPixmap::fromImage(image.scaled(ui->cameraLeft->size())));

            }

            break;
        case 11:    //Resource temporarily unavailable
            err11++;
            if(err11 == 10)
            {
                ui->cameraRight->clear();
                ui->cameraRight->setText("设备已打开,但获取视频失败!\n请尝试切换USB端口后断开重试!");
            }
            break;
        case 19:    //No such device
            err19++;
            if(err19 == 10)
            {
                ui->cameraRight->clear();
                ui->cameraRight->setText("设备丢失!");
            }
            break;
        }
    }
}

但是依然出现视频流花屏的问题,但是用系统自带的相机软件打开后,再运行程序又是正常的了。
debug 许久,一个个句柄排查,发现句柄顺序流程都是对的,最终最终终于发现花屏原因的了:
convert_yuv_to_rgb_buffer 解析数据流有问题导致花屏,该成:

//将buffer_infos中已使用的字节数copy到framebuf中
            memcpy(framebuf->buf,(char *)buffers[buffer.index].start,buffer.bytesused);//bytesused 表示buf中已经使用的字节数
            framebuf->length = buffer.bytesused;

QPixmap 再转化QImage就可以了:

FrameBuffer frame;
QImage img;
QPixmap pix;
int ret = LPF_GetFrame(&frame);
if(ret == 0)
{

    int WV = LPF_GetCurResWidth();
    int HV = LPF_GetCurResHeight();
    pix.loadFromData(frame.buf, frame.length);
    img= pix.toImage();
}
本文内容由网友自发贡献,版权归原作者所有,本站不承担相应法律责任。如您发现有涉嫌抄袭侵权的内容,请联系:hwhale#tublm.com(使用前将#替换为@)

Ubuntu v4l2 视屏流花屏问题 的相关文章

随机推荐

  • 【Jmeter】跨线程组共享数据

    背景 在性能测试中 xff0c 经常会遇见使用多线程组的情况 xff0c 例如用户登陆成功后 xff0c 对某个查询接口使用500个线程来进行压测 xff0c 这个时候就需要使用多线程组 设计说明 首先 xff0c 需要使用setUp Th
  • 【playwright】使用pytest-playwright执行用例时频繁打开浏览器

    背景说明 安装pytest playwright之后 xff0c 执行多个用例频繁打开浏览器 xff0c 而且无法给对应的fixture的scope设置为session 原因说明 pytest playwright定义了fixture的sc
  • (转)注册JNI函数的两种方式

    原文地址 http blog csdn net wwj 748 article details 52347341 前言 前面介绍过如何实现在Android Studio中制作我们自己的so库 xff0c 相信大家看过之后基本清楚如何在And
  • 【playwright】使用playwright实现拖动功能

    思路说明 使用locator定位到要拖动滑块元素 xff0c 如元素名叫ele 获取元素ele的bounding box含4分属性值 xff1a x xff0c y xff0c width xff0c height 把鼠标移动到元素ele的
  • 【playwright】pytest-playwright与allure结合,生成报告带有图片和录屏

    依赖的环境 需要安装allure命令行工具以及allure pytest插件 pytest playwright需要升级0 3 0版本 xff0c 改版本支持如下参数 xff1a Playwright browser 61 chromium
  • 【性能测试】缓慢的磁盘问题分析套路

    该文基于 性能之巅 xff1a 洞悉系统 企业与云计算 的1 9 1 缓慢的磁盘章节调整而来 用户问题 Scott 是一家中型公司里的系统管理员 数据库团队报告了一个支持ticket xff08 工单 xff09 xff0c 抱怨他们有一台
  • 【虚拟机】win 10的virtual box打开虚拟机报VERR_MEM_INIT_FAILED

    错误信息 解决办法 进入到 控制面板 程序 程序和功能 xff1a 选择 启用或关闭 Windows 功能 xff1a 重启电脑 xff0c 重新打开virtual下的虚拟机 xff0c 能够正常启动 xff1a
  • 【playwright】pytest-playwright增加代理服务选项

    playwright的代理设置参数为 xff1a https playwright dev python docs network http proxy 对pytest playwright py进行如下调整 xff0c 在browser
  • 【镜像源】分享三个镜像链接

    兰州大学 xff1a https mirror lzu edu cn 南京大学 xff1a https mirror nju edu cn docker官方镜像库 xff1a https github com docker library
  • 【Docker】基于系统iso构建docker基础镜像

    1 搭建本地yum源 1 xff09 将镜像通过光盘或U盘挂载到 mnt目录下 mount media kylin xxx iso mnt kylin 2 xff09 修改 etc yum repo d kylin x86 64 local
  • 【数据库】Oracle 12透明网关查询postgresql表某些字段不展示问题处理

    前置条件 1 对应版本列表 服务 版本 Oracle 12C 12 2 0 1 0 Heterogeneous Agent 12 2 0 1 0 odbc 2 3 1 unixODBC 2 3 6 psqlodbc 9 2 24 查看命令
  • 【机器学习】lightGBM是什么?

    梯度提升法 Gradient Boosting Machine 简记 GBM 以非参数方法 不假设函数形式 估计基函数 并在 函数空间 使用 梯度下降 进行近似求解 非参数方法包括K近邻法 决策树 以及基于决策树的装袋法 随机森林与提升法等
  • 【HttpRunner】学习准备

    1 安装python 3 7及以上版本 xff1a 2 安装fastapi xff1a pip install fastapi all 3 把如下代码粘贴复制到main py文件中 xff1a span class token keywor
  • Android中锁定文件的方法

    androidSDK中并没有锁定文件相关的api 但是android是基于linux操作系统的 linux比较底层 灵活性也更大 为了实现锁定文件的效果 大概有以下几种办法 用chmod命令修改文件读写权限利用linux中的多线程独占锁 启
  • 远程控制Ubuntu

    远程控制Ubuntu 在Ubuntu上安装team viewer或者向日葵 xff0c 进行远程控制 xff0c 这里记录采用team viewer方式的配置过程 xff0c 向日葵等远程控制类似 安装Ubuntu 官方下载Ubuntu系统
  • 信号降噪方法

    傅里叶变换 只能获取一段信号总体上包含哪些频率的成分 xff0c 但是对各成分出现的时刻并无所知 对非平稳过程 xff0c 傅里叶变换有局限性 短时傅里叶变换 xff08 Short time Fourier Transform STFT
  • C++ 带通滤波

    Butterworth Filter Coefficients The following files are for a library of functions to calculate Butterworth filter coeff
  • python之collections

    collections是日常工作中的重点 高频模块 xff0c 包含了一些特殊的容器 xff0c 针对Python内置的容器 xff0c 例如list dict set和tuple xff0c 常用类型有 xff1a namedtuple
  • git 指定下载文件,目录

    1 创建路径 mkdir gitfile cd lt 路径 gt eg xff1a cd home gitfile 2 创建一个空的本地仓库 git init 3 连接远程仓库GitHub git remote add f origin l
  • Ubuntu v4l2 视屏流花屏问题

    之前用的好好解析YUV xff0c MJPEG 换了个核心板就不好使了 xff0c opencv3 4 6 gt gt gt opencv4 5 5 xff0c Mat xff0c cvMat xff0c IplImage 的类型转换也不好