Browse Source

add project driver_camera_rk3399.

yuchuli 4 years ago
parent
commit
02622bbb06

+ 47 - 0
src/driver/driver_camera_rk3399/driver_camera_rk3399.pro

@@ -0,0 +1,47 @@
+
+QT -= gui
+
+CONFIG += c++11 console
+CONFIG -= app_bundle
+
+# The following define makes your compiler emit warnings if you use
+# any feature of Qt which as been marked deprecated (the exact warnings
+# depend on your compiler). Please consult the documentation of the
+# deprecated API in order to know how to port your code away from it.
+DEFINES += QT_DEPRECATED_WARNINGS
+
+# You can also make your code fail to compile if you use deprecated APIs.
+# In order to do so, uncomment the following line.
+# You can also select to disable deprecated APIs only up to a certain version of Qt.
+#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000    # disables all the APIs deprecated before Qt 6.0.0
+
+SOURCES += main.cpp \
+    ../../include/msgtype/rawpic.pb.cc
+
+
+!include(../../../include/common.pri ) {
+    error( "Couldn't find the common.pri file!" )
+}
+
+!include(../../../include/ivopencv.pri ) {
+    error( "Couldn't find the ivopencv.pri file!" )
+}
+
+!include(../../../include/ivprotobuf.pri ) {
+    error( "Couldn't find the ivprotobuf.pri file!" )
+
+}
+
+
+INCLUDEPATH += /home/linaro/opencv3
+
+
+#contains(QMAKE_HOST.arch, aarch64){
+#    DEFINES += USE_OPENCV4
+#}
+
+
+HEADERS += \
+    ../../include/msgtype/rawpic.pb.h
+
+

+ 11 - 0
src/driver/driver_camera_rk3399/driver_camera_rk3399.xml

@@ -0,0 +1,11 @@
+<xml>	
+	<node name="driver_camera_rk3399">
+		<param name="msgname" value="usbpic" />
+		<param name="width" value = "1280" />
+		<param name="height" value="720" />
+		<param name="index" value="0" />
+		<param name="bcompress" value="1" />
+		<param name="cameraname" value="/dev/video5" />
+		<param name="compquality" value="60" />
+	</node>
+</xml>

+ 428 - 0
src/driver/driver_camera_rk3399/main.cpp

@@ -0,0 +1,428 @@
+#include <QCoreApplication>
+#include <thread>
+
+#include <opencv2/opencv.hpp>
+#include <opencv2/core.hpp>
+
+#ifdef USE_OPENCV4
+#include "opencv2/imgcodecs/legacy/constants_c.h"   //OpenCV4 use this line
+#include <opencv2/imgproc/types_c.h>   //OpenCV4 use this line
+#endif
+
+#include <opencv2/videoio.hpp>
+
+#include <QTime>
+
+#include "modulecomm.h"
+#include "rawpic.pb.h"
+#include "xmlparam.h"
+
+#include "ivversion.h"
+
+void * gpa;
+
+int gindex = 0;
+
+int gwidth = 1920;
+int gheight = 1080;
+int gcamindex = 1;
+int gcompress = 1;
+int gcompquality = 95;
+std::string gstrcamera = "";
+
+std::string gmsgname = "usbpic";
+
+std::string gstrdevname = "video0";
+
+
+#ifndef CV_CAP_PROP_FRAME_WIDTH
+#define CV_CAP_PROP_FRAME_WIDTH cv::CAP_PROP_FRAME_WIDTH
+#define CV_CAP_PROP_FRAME_HEIGHT cv::CAP_PROP_FRAME_HEIGHT
+#define CV_CAP_PROP_FPS CAP_PROP_FPS
+#define CV_CAP_PROP_FOURCC CAP_PROP_FOURCC
+#define CV_BGR2GRAY cv::COLOR_BGR2GRAY
+#endif
+
+void VideoThread(int n)
+{
+    QTime xT;
+    xT.start();
+    int nserbufsize = 20000000;
+
+    char * strser = new char[nserbufsize];
+
+    cv::VideoCapture capture;
+
+//If OpenCV4 use this
+//    std::string gst_f = "v4l2src device=/dev/video0 ! video/x-raw, width=(int)"+std::to_string(gwidth)+", height=(int)"+std::to_string(gheight)+" ! videoconvert ! appsink";
+    std::string gstr_f = "v4l2src device=/dev/video"+std::to_string(gcamindex)+" ! video/x-raw, width=(int)"+std::to_string(gwidth)+", height=(int)"+std::to_string(gheight)+" ! videoconvert ! appsink";
+//    capture.open(gstr_f);
+
+#ifdef USE_OPENCV4
+    if(gstrcamera.length() > 2)
+    {
+        gstr_f = "v4l2src device=/dev/"+gstrcamera+" ! video/x-raw, width=(int)"+std::to_string(gwidth)+", height=(int)"+std::to_string(gheight)+" ! videoconvert ! appsink";
+        capture.open(gstr_f);
+    }
+    else
+        capture.open(gcamindex);
+
+#else
+
+        capture.open(gcamindex);
+#endif
+    capture.set(CV_CAP_PROP_FRAME_WIDTH,gwidth);
+    capture.set(CV_CAP_PROP_FRAME_HEIGHT,gheight);
+    while(true)
+    {
+        cv::Mat mat1;
+        capture>>mat1;
+
+
+
+        if(!mat1.empty())
+        {
+
+            QTime timex;
+            timex.start();
+//            int nlen = 2+3*sizeof(int) + mat1.rows*mat1.cols*mat1.elemSize();
+//            if((nlen+20)>nserbufsize)
+//            {
+//                nserbufsize = nlen+1000;
+//                delete strser;
+//                strser = new char[nserbufsize];
+//            }
+//            qint64 time = QDateTime::currentMSecsSinceEpoch();
+//            int npos = 0;
+//            memcpy(strser+npos,&time,sizeof(qint64));npos = npos+sizeof(qint64);
+// //           char * strout = new char[nlen];
+//            memcpy(strser+npos,&gindex,sizeof(int));npos = npos+sizeof(int);
+//            memcpy(strser+npos,"mb",2);npos = npos+2;
+//            memcpy(strser+npos,&mat1.rows,sizeof(int));npos = npos+sizeof(int);
+//            memcpy(strser+npos,&mat1.cols,sizeof(int));npos = npos+sizeof(int);
+//            int ntype = mat1.type();
+//            memcpy(strser+npos,&ntype,sizeof(int));npos = npos+sizeof(int);
+//            memcpy(strser+npos,mat1.data,mat1.rows*mat1.cols*mat1.elemSize());npos = npos+mat1.rows*mat1.cols*mat1.elemSize();
+
+
+//            iv::modulecomm::ModuleSendMsg(gpa,strser,npos);
+//            qDebug("timex is %d",timex.elapsed());
+
+
+            iv::vision::rawpic pic;
+
+            qint64 time = QDateTime::currentMSecsSinceEpoch();
+            QTime xg;
+            xg.start();
+            pic.set_time(time);
+            pic.set_index(gindex);gindex++;
+
+
+            pic.set_elemsize(mat1.elemSize());
+            pic.set_width(mat1.cols);
+            pic.set_height(mat1.rows);
+            pic.set_mattype(mat1.type());
+
+//            cv::Mat matcompress;
+//            cv::resize(mat1,matcompress,cv::Size(640,360));
+
+
+            std::vector<int> param = std::vector<int>(2);
+            param[0] = CV_IMWRITE_JPEG_QUALITY;
+            param[1] = gcompquality; // default(95) 0-100
+            std::vector<unsigned char> buff;
+            if(gcompress == 1)
+            {
+                cv::imencode(".jpg", mat1, buff, param);
+//                cv::imencode(".jpg", matcompress, buff, param);
+                pic.set_picdata(buff.data(),buff.size());
+                buff.clear();
+                pic.set_type(2);
+            }
+            else
+            {
+                pic.set_picdata(mat1.data,mat1.rows*mat1.cols*mat1.elemSize());
+//                pic.set_picdata(matcompress.data,matcompress.rows*matcompress.cols*matcompress.elemSize());
+                pic.set_type(1);
+            }
+
+
+//            qDebug("buff size is %d",buff.size());
+
+
+            int nSize = pic.ByteSize();
+
+  //          char * strser = new char[nSize];
+            bool bser = pic.SerializeToArray(strser,nSize);
+            qDebug("pac time is %d size is %d",xg.elapsed(),nSize);
+
+    //        iv::modulecomm::ModuleSendMsg(gpa,str.data(),str.length());
+            if(bser)iv::modulecomm::ModuleSendMsg(gpa,strser,nSize);
+            else
+            {
+                std::cout<<"usbcamera "<< "serialize error. "<<std::endl;
+            }
+ //           delete strser;
+ //           delete strout;
+
+
+
+   //         qDebug("%d have a pic. len is %d",xT.elapsed(),str.length());
+//            qDebug("now is %d",xT.elapsed());
+//            gw->mMutex.lock();
+//            mat1.copyTo(gw->msrcImage);
+//            gw->mMutex.unlock();
+
+//            gMutexInput.lock();
+//            mat1.copyTo(gsrcImage);
+//            g_CapIndex++;
+//            gMutexInput.unlock();
+        }
+        else
+        {
+ //           qDebug("no data.");
+            std::this_thread::sleep_for(std::chrono::milliseconds(1));
+        }
+    }
+}
+
+int test()
+{
+    unsigned char *datas[COUNT];            // 缓冲区数据地址
+    int ret, i;
+    int fd;
+
+    /* 第一步:打开摄像头设备文件 */
+    fd = open("/dev/video5", O_RDWR);       // 注意查看摄像头设备名
+    if (-1 == fd)
+    {
+        perror("open /dev/video0");
+        return -1;
+    }
+
+    /* 第二步:设置捕捉图片帧格式 */
+    struct v4l2_format format;
+    format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; //V4L2_BUF_TYPE_VIDEO_CAPTURE;  // 操作类型为获取图片
+    format.fmt.pix.width = WIDTH;               // 图片的宽度
+    format.fmt.pix.height = HEIGHT;             // 图片的高度
+    format.fmt.pix.pixelformat = FMT;           // 图片格式
+    format.fmt.pix_mp.field = V4L2_FIELD_NONE; // 逐行扫描
+    ret = ioctl(fd, VIDIOC_S_FMT, &format);     // 进行设置(Set)
+    if (-1 == ret)
+    {
+        perror("ioctl VIDIOC_S_FMT");
+        close(fd);
+        return -2;
+    }
+
+    /* 第三步:检查是否设置成功 */
+    ret = ioctl(fd, VIDIOC_G_FMT, &format);     // Get
+    if (-1 == ret)
+    {
+        perror("ioctl VIDIOC_G_FMT");
+        close(fd);
+        return -3;
+    }
+    if (format.fmt.pix.pixelformat == FMT)
+    {
+        printf("ioctl VIDIOC_S_FMT sucessful\n");
+    }
+    else
+    {
+        printf("ioctl VIDIOC_S_FMT failed\n");
+    }
+
+    v4l2_fmtdesc formatDescriptions;
+    formatDescriptions.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    for (i = 0; true; i++)
+    {
+        formatDescriptions.index = i;
+        if (ioctl(fd, VIDIOC_ENUM_FMT, &formatDescriptions) == 0) {
+            qDebug("[native_camera]  %2d: %s 0x%08X 0x%X",
+                        i,  formatDescriptions.description, formatDescriptions.pixelformat, formatDescriptions.flags);
+        } else {
+            // No more formats available
+            break;
+        }
+    }
+
+
+
+    /* 第四步:让摄像头驱动申请存放图像数据的缓冲区 */
+    struct v4l2_requestbuffers reqbuf;
+    reqbuf.count = COUNT;                       // 缓冲区个数
+    reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;  // 缓冲区类型
+    reqbuf.memory = V4L2_MEMORY_MMAP;           // 缓冲区的用途:用于内存映射
+    ret = ioctl(fd, VIDIOC_REQBUFS, &reqbuf);
+    if (-1 == ret)
+    {
+        perror("ioctl VIDIOC_REQBUFS");
+        close(fd);
+        return -4;
+    }
+
+    /* 第五步:查询每个缓冲区的信息,同时进行内存映射 */
+
+    struct v4l2_plane  planes[2];
+
+    struct v4l2_buffer buff;
+    buff.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    buff.memory = V4L2_MEMORY_MMAP;
+ //   buff.memory   = V4L2_MEMORY_USERPTR;
+    buff.index    = 0;
+    buff.length   = 2;		// 1
+    buff.m.planes = &planes[0];
+
+    for (i = 0; i < COUNT; i++)
+    {
+        buff.index = i;
+
+//        ret = ioctl(fd, VIDIOC_QUERYBUF, &buff);
+        ret = ioctl(fd, VIDIOC_QUERYBUF, &buff);
+        if (-1 == ret)                          // 操作失败
+        {
+            break;
+        }
+
+        /* 打印缓冲区的长度和偏移量 */
+        printf("buf[%d]: len = %d offset: %d\n", i, buff.length, buff.m.offset);
+
+        /* 把每块缓冲区映射到当前进程来 */
+ //       datas[i] = (unsigned char *)mmap(NULL, buff.length, PROT_READ, MAP_SHARED, fd, buff.m.offset);
+        datas[i] = (unsigned char *)mmap(NULL, buff.m.planes->length, PROT_READ, MAP_SHARED, fd, buff.m.planes->m.mem_offset);
+//        datas[i] = (unsigned char *)mmap(NULL, buff.length, PROT_READ, MAP_SHARED, fd, buff.m.planes->m.mem_offset);
+        if (MAP_FAILED == datas[i])             // 映射失败
+        {
+            perror("mmap failed");
+            return -5;
+        }
+
+        /* 把映射成功的缓冲区加入到摄像头驱动的图像数据采集队列里 */
+        ret = ioctl(fd, VIDIOC_QBUF, &buff);    // Queue
+        if (-1 == ret)
+        {
+            perror("VIDIOC_QBUF");
+            return -6;
+        }
+    }
+
+    /* 第六步:启动采集 */
+    int on = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;       // 设置启动标志位
+    ret = ioctl(fd, VIDIOC_STREAMON, &on);      // 开启摄像头流
+    if (-1 == ret)
+    {
+        perror("ioctl VIDIOC_STREAMON");
+        return -7;
+    }
+
+    /* 第七步:让已经采集好的数据缓冲退出队列 */
+
+        IplImage *srcImg;
+
+
+
+        srcImg = cvCreateImage(cvSize(WIDTH, HEIGHT), 8, 3);
+        cvZero(srcImg);
+        cv::Mat destination(cvSize(WIDTH, HEIGHT), CV_8UC3,srcImg->imageData) ;//(cvSize(WIDTH, HEIGHT), CV_8UC3);//, frame.imageData);;
+
+    int k;
+    int nlen = WIDTH * HEIGHT *3;
+    char * strRGB = new char[nlen];
+    for(k=0;k<100000;k++)
+    {
+        struct v4l2_buffer buf;
+        memset(&buf, 0, sizeof(buf));
+        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+        buf.memory = V4L2_MEMORY_MMAP;
+        buf.index    = k;
+        buf.length   = 2;		// 1
+        struct v4l2_plane  planest[2];
+
+        buf.m.planes = &planest[0];
+        buff.index = 0;
+        buff.length = 2;
+        ret = ioctl(fd, VIDIOC_DQBUF, &buff);       // Dequeue
+        if (-1 == ret)
+        {
+            perror("ioctl VIDIOC_DQUF");
+            return -8;
+        }
+
+        qDebug("index is %d",buff.index);
+        QTime x;
+        x.start();
+  //      nv122rgb((char *)datas[buff.index],strRGB,nlen/3);
+
+ //       nv122rgb((char *)datas[buff.index],(unsigned char *)srcImg->imageData,nlen/3);
+
+
+        cv::cvtColor(cv::Mat(HEIGHT * 3 / 2, WIDTH, CV_8U,(char *)datas[buff.index]), destination,
+                     cv::COLOR_YUV2BGR_NV12);
+ //       clipNv12ToJpgFile("/home/linaro/code/common/testv4l2/1.jpg",datas[buff.index],WIDTH,HEIGHT);
+ //       NV12_To_RGB(WIDTH,HEIGHT,datas[buff.index],(unsigned char *)srcImg->imageData);
+        qDebug(" 2rgb tims is %d",x.elapsed());
+        ioctl(fd, VIDIOC_QBUF, &buff);
+        qDebug("%d:%ld",k,QDateTime::currentMSecsSinceEpoch());
+
+        cvNamedWindow("YUV2RGB", 1);
+        IplImage * img;
+//        *img = destination;
+ //       *img = IplImage(destination);
+
+//        cvShowImage("YUV2RGB", img);
+  //      cvShowImage("YUV2RGB", &destination);
+        cvShowImage("YUV2RGB", srcImg);
+        cvWaitKey(1);
+    }
+
+    /* 第八步:从退出队列的缓冲区中获取数据并保存到文件中 */
+    FILE *fl;
+    fl = fopen("./my.yuyv", "w");
+    if (NULL == fl)
+    {
+        fprintf(stderr, "open write file failed.");
+    }
+    fwrite(datas[buff.index], buff.bytesused, 1, fl);
+
+    fclose(fl);                                 // 记得关闭已打开的文件
+    close(fd);                                  // 记得关闭已打开的设备
+    return 0;
+}
+
+
+
+int main(int argc, char *argv[])
+{
+    showversion("driver_camera_usb");
+    QCoreApplication a(argc, argv);
+
+    QString strpath = QCoreApplication::applicationDirPath();
+//    QString apppath = strpath;
+    if(argc < 2)
+        strpath = strpath + "/driver_camera_usb.xml";
+    else
+        strpath = argv[1];
+    std::cout<<strpath.toStdString()<<std::endl;
+    iv::xmlparam::Xmlparam xp(strpath.toStdString());
+
+    std::string xmlmsgname = xp.GetParam("msgname","compresspic");
+    gmsgname = xmlmsgname;
+    std::string strwidth = xp.GetParam("width","1280");
+    std::string strheight = xp.GetParam("height","720");
+    std::string strcamindex = xp.GetParam("index","0");
+    std::string strcompress = xp.GetParam("bcompress","1");
+    std::string strcompquality = xp.GetParam("compquality","95");
+    gcompquality = atoi(strcompquality.data());
+
+    gstrcamera = xp.GetParam("cameraname","video0");
+
+    gwidth = atoi(strwidth.data());
+    gheight = atoi(strheight.data());
+    gcamindex = atoi(strcamindex.data());
+    gcompress = atoi(strcompress.data());
+
+    gpa = iv::modulecomm::RegisterSend(gmsgname.data(),20000000,1);
+    std::thread * mthread = new std::thread(VideoThread,0);
+
+    return a.exec();
+}