移植opencv到开发板,摄像头在开发板6410上的采集使用(2)

时间:2022-08-05 19:36:21

在虚拟机搭建好系统后,真正的使用才刚刚开始。

在使用摄像头的时候,首先看自己的摄像头插上去是显示jpeg的还是yuv的

yuv的要实现UVC转QImage转IplImage这样的流程才能使用,jpeg的好多人不会用说没用其实最好用了。一点不卡。yuv的有点卡。

 

我用的也是yuv以前朋友用的jpeg的摄像头。

代码是用的网上的一个代码修改的

1. [文件] videodevice.h ?


#ifndef VIDEODEVICE_H#define VIDEODEVICE_H#include <string.h>#include <stdlib.h>#include <errno.h>#include <fcntl.h> #include <sys/ioctl.h>#include <sys/mman.h>#include <asm/types.h>#include <linux/videodev2.h>#include <QString>#include <QObject>#define CLEAR(x) memset(&(x), 0, sizeof(x))classVideoDevice :publicQObject{     Q_OBJECTpublic:    VideoDevice(QString dev_name);    //VideoDevice();   int open_device();    intclose_device();    intinit_device();    intstart_capturing();    intstop_capturing();    intuninit_device();    intget_frame(void**,size_t*);    intunget_frame(); private:    intinit_mmap();      structbuffer    {        void* start;        size_tlength;    };    QString dev_name;    intfd;    buffer* buffers;    unsignedintn_buffers;    intindex;  signals:     voiddisplay_error(QString);  };  #endif // VIDEODEVICE_H

2. [文件] videodevice.cpp 

  #include "videodevice.h"VideoDevice::VideoDevice(QString dev_name){     this->dev_name = dev_name;    this->fd = -1;    this->buffers = NULL;    this->n_buffers = 0;    this->index = -1;  }  intVideoDevice::open_device(){     fd = open(dev_name.toStdString().c_str(), O_RDWR/*|O_NONBLOCK*/, 0);   // fd = open(dev_name.toStdString().c_str(), O_RDWR|O_NONBLOCK, 0);    if(-1 == fd)    {        emit display_error(tr("open: %1").arg(QString(strerror(errno))));        return-1;    }    return0;}  intVideoDevice::close_device(){     if(-1 == close(fd))    {        emit display_error(tr("close: %1").arg(QString(strerror(errno))));        return-1;    }    return0;}  intVideoDevice::init_device(){     v4l2_capability cap;    v4l2_cropcap cropcap;    v4l2_crop crop;    v4l2_format fmt;      if(-1 == ioctl(fd, VIDIOC_QUERYCAP, &cap))    {        if(EINVAL ==errno)        {            emit display_error(tr("%1 is no V4l2 device").arg(dev_name));        }        else        {            emit display_error(tr("VIDIOC_QUERYCAP: %1").arg(QString(strerror(errno))));        }        return-1;    }      if(!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))    {        emit display_error(tr("%1 is no video capture device").arg(dev_name));        return-1;    }      if(!(cap.capabilities & V4L2_CAP_STREAMING))    {        emit display_error(tr("%1 does not support streaming i/o").arg(dev_name));        return-1;    }      CLEAR(cropcap);      cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;      if(0 == ioctl(fd, VIDIOC_CROPCAP, &cropcap))    {        CLEAR(crop);        crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;        crop.c = cropcap.defrect;          if(-1 == ioctl(fd, VIDIOC_S_CROP, &crop))        {            if(EINVAL ==errno)            {//                emit display_error(tr("VIDIOC_S_CROP not supported"));           }             else            {                emit display_error(tr("VIDIOC_S_CROP: %1").arg(QString(strerror(errno))));                return-1;            }        }    }    else    {        emit display_error(tr("VIDIOC_CROPCAP: %1").arg(QString(strerror(errno))));        return-1;    }      CLEAR(fmt);      fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;    fmt.fmt.pix.width = 640;    fmt.fmt.pix.height = 480;    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;    fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;      if(-1 == ioctl(fd, VIDIOC_S_FMT, &fmt))    {        emit display_error(tr("VIDIOC_S_FMT").arg(QString(strerror(errno))));        return-1;    }      if(-1 == init_mmap())    {        return-1;    }      return0;}  intVideoDevice::init_mmap(){     v4l2_requestbuffers req;    CLEAR(req);      req.count = 4;    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;    req.memory = V4L2_MEMORY_MMAP;      if(-1 == ioctl(fd, VIDIOC_REQBUFS, &req))    {        if(EINVAL ==errno)        {            emit display_error(tr("%1 does not support memory mapping").arg(dev_name));            return-1;        }        else        {            emit display_error(tr("VIDIOC_REQBUFS %1").arg(QString(strerror(errno))));            return-1;        }    }      if(req.count < 2)    {        emit display_error(tr("Insufficient buffer memory on %1").arg(dev_name));        return-1;    }      buffers = (buffer*)calloc(req.count,sizeof(*buffers));      if(!buffers)    {        emit display_error(tr("out of memory"));        return-1;    }      for(n_buffers = 0; n_buffers < req.count; ++n_buffers)    {        v4l2_buffer buf;        CLEAR(buf);          buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;        buf.memory = V4L2_MEMORY_MMAP;        buf.index = n_buffers;          if(-1 == ioctl(fd, VIDIOC_QUERYBUF, &buf))        {            emit display_error(tr("VIDIOC_QUERYBUF: %1").arg(QString(strerror(errno))));            return-1;        }          buffers[n_buffers].length = buf.length;        buffers[n_buffers].start =                mmap(NULL,// start anywhere                     buf.length,                      PROT_READ | PROT_WRITE,                     MAP_SHARED,                     fd, buf.m.offset);          if(MAP_FAILED == buffers[n_buffers].start)        {            emit display_error(tr("mmap %1").arg(QString(strerror(errno))));            return-1;        }    }    return0;  }  intVideoDevice::start_capturing(){     unsignedinti;     for(i = 0; i < n_buffers; ++i)    {        v4l2_buffer buf;        CLEAR(buf);          buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;        buf.memory =V4L2_MEMORY_MMAP;        buf.index = i;//        fprintf(stderr, "n_buffers: %d\n", i);        if(-1 == ioctl(fd, VIDIOC_QBUF, &buf))        {            emit display_error(tr("VIDIOC_QBUF: %1").arg(QString(strerror(errno))));            return-1;        }    }      v4l2_buf_type type;    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;      if(-1 == ioctl(fd, VIDIOC_STREAMON, &type))    {        emit display_error(tr("VIDIOC_STREAMON: %1").arg(QString(strerror(errno))));        return-1;    }    return0;}  intVideoDevice::stop_capturing(){     v4l2_buf_type type;    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;      if(-1 == ioctl(fd, VIDIOC_STREAMOFF, &type))    {        emit display_error(tr("VIDIOC_STREAMOFF: %1").arg(QString(strerror(errno))));        return-1;    }    return0;}  intVideoDevice::uninit_device(){     unsignedinti;     for(i = 0; i < n_buffers; ++i)    {        if(-1 == munmap(buffers[i].start, buffers[i].length))        {            emit display_error(tr("munmap: %1").arg(QString(strerror(errno))));            return-1;        }      }    free(buffers);    return0;}  intVideoDevice::get_frame(void**frame_buf,size_t* len){     v4l2_buffer queue_buf;    CLEAR(queue_buf);      queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;    queue_buf.memory = V4L2_MEMORY_MMAP;      if(-1 == ioctl(fd, VIDIOC_DQBUF, &queue_buf))    {        switch(errno)        {        caseEAGAIN://            perror("dqbuf");           return -1;         caseEIO:            return-1 ;        default:            emit display_error(tr("VIDIOC_DQBUF: %1").arg(QString(strerror(errno))));            return-1;        }    }      *frame_buf = buffers[queue_buf.index].start;    *len = buffers[queue_buf.index].length;    index = queue_buf.index;      return0;  }  intVideoDevice::unget_frame(){     if(index != -1)    {        v4l2_buffer queue_buf;        CLEAR(queue_buf);          queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;        queue_buf.memory = V4L2_MEMORY_MMAP;        queue_buf.index = index;          if(-1 == ioctl(fd, VIDIOC_QBUF, &queue_buf))        {            emit display_error(tr("VIDIOC_QBUF: %1").arg(QString(strerror(errno))));            return-1;        }        return0;    }    return-1;}

3. [文件] processImage.h 


#ifndef PROCESSIMAGE_H#define PROCESSIMAGE_H#include <QtGui>#include "videodevice.h"classProcessImage :publicQWidget{     Q_OBJECTpublic:    ProcessImage(QWidget *parent=0);    ~ProcessImage(); private:    QPainter *painter;    QLabel *label;    QImage *frame;    //QPixmap *frame;   QTimer *timer;     intrs;    uchar *pp;    uchar * p;    unsignedintlen;     intconvert_yuv_to_rgb_pixel(inty,int u, int v);     intconvert_yuv_to_rgb_buffer(unsignedchar*yuv, unsignedchar*rgb, unsignedintwidth, unsignedintheight);     VideoDevice *vd; privateslots:    voidpaintEvent(QPaintEvent *);    voiddisplay_error(QString err);    };  #endif

4. [文件] processImage.cpp


#include <QtGui>#include "processImage.h"#include "videodevice.h"extern"C"{ #include <stdio.h>#include <stdlib.h>}ProcessImage::ProcessImage(QWidget *parent):QWidget(parent){     pp = (unsignedchar*)malloc(640 * 480/*QWidget::width()*QWidget::height()*/* 3 *sizeof(char));    painter =newQPainter(this);    frame =newQImage(pp,640,480,QImage::Format_RGB888);   // frame = new QPixmap(640,320);   label = newQLabel();    vd =newVideoDevice(tr("/dev/video0"));      connect(vd, SIGNAL(display_error(QString)),this,SLOT(display_error(QString)));    rs = vd->open_device();    if(-1==rs)    {        QMessageBox::warning(this,tr("error"),tr("open /dev/dsp error"),QMessageBox::Yes);         vd->close_device();    }      rs = vd->init_device();    if(-1==rs)    {        QMessageBox::warning(this,tr("error"),tr("init failed"),QMessageBox::Yes);         vd->close_device();    }      rs = vd->start_capturing();    if(-1==rs)    {        QMessageBox::warning(this,tr("error"),tr("start capture failed"),QMessageBox::Yes);         vd->close_device();    }      if(-1==rs)    {        QMessageBox::warning(this,tr("error"),tr("get frame failed"),QMessageBox::Yes);         vd->stop_capturing();    }      timer =newQTimer(this);    connect(timer,SIGNAL(timeout()),this,SLOT(update()));    timer->start(30);      QHBoxLayout *hLayout =newQHBoxLayout();    hLayout->addWidget(label);    setLayout(hLayout);    setWindowTitle(tr("Capture"));}   ProcessImage::~ProcessImage(){     rs = vd->stop_capturing();    rs = vd->uninit_device();    rs = vd->close_device();}  voidProcessImage::paintEvent(QPaintEvent *){     rs = vd->get_frame((void**)&p,&len);    convert_yuv_to_rgb_buffer(p,pp,640,480/*QWidget::width(),QWidget::height()*/);    frame->loadFromData((uchar *)pp,/*len*/640 * 480 * 3 *sizeof(char)); //    painter->begin(this);//    painter->drawImage(0,0,*frame);//    painter->end();//    rs = vd->unget_frame();  // frame->load("./img3.jpg");     label->setPixmap(QPixmap::fromImage(*frame,Qt::AutoColor));   // label->show();   rs = vd->unget_frame();    // label->drawFrame();    //    QPixmap *pixImage = new QPixmap();//    pixImage->loadFromData((uchar *)pp,sizeof(pp),0,Qt::AutoColor);//    QPainter painter(this);//    painter.begin(this);//    painter.drawPixmap(0,0,QWidget::width(),QWidget::height(),*pixImage);//    painter.end();} voidProcessImage::display_error(QString err){     QMessageBox::warning(this,tr("error"), err,QMessageBox::Yes); }  /*yuv格式转换为rgb格式*/intProcessImage::convert_yuv_to_rgb_buffer(unsignedchar*yuv, unsignedchar*rgb, unsignedintwidth, unsignedintheight) {  unsignedintin, out = 0; unsignedintpixel_16; unsignedcharpixel_24[3]; unsignedintpixel32;  inty0, u, y1, v; for(in = 0; in < width * height * 2; in += 4) {  pixel_16 =   yuv[in + 3] << 24 |   yuv[in + 2] << 16 |   yuv[in + 1] <<  8 |   yuv[in + 0];  y0 = (pixel_16 & 0x000000ff);  u  = (pixel_16 & 0x0000ff00) >>  8;  y1 = (pixel_16 & 0x00ff0000) >> 16;  v  = (pixel_16 & 0xff000000) >> 24;  pixel32 = convert_yuv_to_rgb_pixel(y0, u, v);  pixel_24[0] = (pixel32 & 0x000000ff);  pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;  pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;  rgb[out++] = pixel_24[0];  rgb[out++] = pixel_24[1];  rgb[out++] = pixel_24[2];  pixel32 = convert_yuv_to_rgb_pixel(y1, u, v);  pixel_24[0] = (pixel32 & 0x000000ff);  pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;  pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;  rgb[out++] = pixel_24[0];  rgb[out++] = pixel_24[1];  rgb[out++] = pixel_24[2]; } return0;}  intProcessImage::convert_yuv_to_rgb_pixel(inty,int u, int v) {  unsignedintpixel32 = 0; unsignedchar*pixel = (unsignedchar*)&pixel32; intr, g, b; r = y + (1.370705 * (v-128)); g = y - (0.698001 * (v-128)) - (0.337633 * (u-128)); b = y + (1.732446 * (u-128)); if(r > 255) r = 255; if(g > 255) g = 255; if(b > 255) b = 255; if(r < 0) r = 0; if(g < 0) g = 0; if(b < 0) b = 0; pixel[0] = r * 220 / 256; pixel[1] = g * 220 / 256; pixel[2] = b * 220 / 256; returnpixel32;} /*yuv格式转换为rgb格式*/

5. [文件] main.cpp 


#include <QtGui>#include "processImage.h"intmain(intargc,char*argv[]) {     QApplication app(argc,argv);    ProcessImage process;    process.resize(640,480);    process.show();      returnapp.exec();}

可以复制也可以到我的资源里去找有的我上传了字库的移植,还有这个代码。

先在虚拟机上跑起来一般没问题 记得设备号在虚拟机为/dev/video0 至于虚拟机用摄像头看我别的博文

跑好了再交叉编译记得改设备号为/dev/video2具体的看你自己插拔摄像头在dev里是哪个。

然后有的人直接可以使用了,但是有的一堆问题的我就是

第一个问题::s3c-fimc: invalid target size

把这句话添加进去就没有问题了
fmt.fmt.pix.priv=1;

这句话加在cpp里的有个地方差不多全是这种。

第二个问题就是 :: segmentation fault

我够倒霉的全遇到了 解决花了一周。最后没办法我单步调试的

移植opencv到开发板,摄像头在开发板6410上的采集使用(2)

在in=155644的时候就会出现我看了下大小计算发现问题出在640*480上面,容易内存溢出,我改成320*240就不会超过155644了这样问题就解决了。

当时还有点小激动呢。图片小了你可以用opencv再放大的。

最后开发板图像出来了 这里不拍照随便拿个把

移植opencv到开发板,摄像头在开发板6410上的采集使用(2)需要的到我资源里下载代码 字库什么的