#include #include #include #include #include #ifdef USE_OPENCV4 #include "opencv2/imgcodecs/legacy/constants_c.h" //OpenCV4 use this line #include //OpenCV4 use this line #endif #include #include #include #include #include // 下面四个头文件是linux系统编程特有的 #include #include #include #include #include // 操作摄像头设备 #include "modulecomm.h" #include "rawpic.pb.h" #include "xmlparam.h" #include "ivversion.h" #define COUNT 8 // 缓冲区个数 void * gpa; int gindex = 0; int gwidth = 1920; int gheight = 1080; int gcamindex = 1; int gcompress = 1; int gcompquality = 95; std::string gstrcamera = ""; std::string gmsgname = "usbpic"; std::string gstrdevname = "/dev/video5"; QMutex gMutexRawData; char * gRawData; int gnLenRaw; bool gbUpdate = false; QMutex gMutexCompress; cv::Mat gMatPic; bool gbUpdateMat = false; #ifndef CV_CAP_PROP_FRAME_WIDTH #define CV_CAP_PROP_FRAME_WIDTH cv::CAP_PROP_FRAME_WIDTH #define CV_CAP_PROP_FRAME_HEIGHT cv::CAP_PROP_FRAME_HEIGHT #define CV_CAP_PROP_FPS CAP_PROP_FPS #define CV_CAP_PROP_FOURCC CAP_PROP_FOURCC #define CV_BGR2GRAY cv::COLOR_BGR2GRAY #endif void VideoThread(int n) { QTime xT; xT.start(); int nserbufsize = 20000000; char * strser = new char[nserbufsize]; cv::VideoCapture capture; //If OpenCV4 use this // std::string gst_f = "v4l2src device=/dev/video0 ! video/x-raw, width=(int)"+std::to_string(gwidth)+", height=(int)"+std::to_string(gheight)+" ! videoconvert ! appsink"; std::string gstr_f = "v4l2src device=/dev/video"+std::to_string(gcamindex)+" ! video/x-raw, width=(int)"+std::to_string(gwidth)+", height=(int)"+std::to_string(gheight)+" ! videoconvert ! appsink"; // capture.open(gstr_f); #ifdef USE_OPENCV4 if(gstrcamera.length() > 2) { gstr_f = "v4l2src device=/dev/"+gstrcamera+" ! video/x-raw, width=(int)"+std::to_string(gwidth)+", height=(int)"+std::to_string(gheight)+" ! videoconvert ! appsink"; capture.open(gstr_f); } else capture.open(gcamindex); #else capture.open(gcamindex); #endif capture.set(CV_CAP_PROP_FRAME_WIDTH,gwidth); capture.set(CV_CAP_PROP_FRAME_HEIGHT,gheight); while(true) { cv::Mat mat1; capture>>mat1; if(!mat1.empty()) { QTime timex; timex.start(); // int nlen = 2+3*sizeof(int) + mat1.rows*mat1.cols*mat1.elemSize(); // if((nlen+20)>nserbufsize) // { // nserbufsize = nlen+1000; // delete strser; // strser = new char[nserbufsize]; // } // qint64 time = QDateTime::currentMSecsSinceEpoch(); // int npos = 0; // memcpy(strser+npos,&time,sizeof(qint64));npos = npos+sizeof(qint64); // // char * strout = new char[nlen]; // memcpy(strser+npos,&gindex,sizeof(int));npos = npos+sizeof(int); // memcpy(strser+npos,"mb",2);npos = npos+2; // memcpy(strser+npos,&mat1.rows,sizeof(int));npos = npos+sizeof(int); // memcpy(strser+npos,&mat1.cols,sizeof(int));npos = npos+sizeof(int); // int ntype = mat1.type(); // memcpy(strser+npos,&ntype,sizeof(int));npos = npos+sizeof(int); // memcpy(strser+npos,mat1.data,mat1.rows*mat1.cols*mat1.elemSize());npos = npos+mat1.rows*mat1.cols*mat1.elemSize(); // iv::modulecomm::ModuleSendMsg(gpa,strser,npos); // qDebug("timex is %d",timex.elapsed()); iv::vision::rawpic pic; qint64 time = QDateTime::currentMSecsSinceEpoch(); QTime xg; xg.start(); pic.set_time(time); pic.set_index(gindex);gindex++; pic.set_elemsize(mat1.elemSize()); pic.set_width(mat1.cols); pic.set_height(mat1.rows); pic.set_mattype(mat1.type()); // cv::Mat matcompress; // cv::resize(mat1,matcompress,cv::Size(640,360)); std::vector param = std::vector(2); param[0] = CV_IMWRITE_JPEG_QUALITY; param[1] = gcompquality; // default(95) 0-100 std::vector buff; if(gcompress == 1) { cv::imencode(".jpg", mat1, buff, param); // cv::imencode(".jpg", matcompress, buff, param); pic.set_picdata(buff.data(),buff.size()); buff.clear(); pic.set_type(2); } else { pic.set_picdata(mat1.data,mat1.rows*mat1.cols*mat1.elemSize()); // pic.set_picdata(matcompress.data,matcompress.rows*matcompress.cols*matcompress.elemSize()); pic.set_type(1); } // qDebug("buff size is %d",buff.size()); int nSize = pic.ByteSize(); // char * strser = new char[nSize]; bool bser = pic.SerializeToArray(strser,nSize); qDebug("pac time is %d size is %d",xg.elapsed(),nSize); // iv::modulecomm::ModuleSendMsg(gpa,str.data(),str.length()); if(bser)iv::modulecomm::ModuleSendMsg(gpa,strser,nSize); else { std::cout<<"usbcamera "<< "serialize error. "<mMutex.lock(); // mat1.copyTo(gw->msrcImage); // gw->mMutex.unlock(); // gMutexInput.lock(); // mat1.copyTo(gsrcImage); // g_CapIndex++; // gMutexInput.unlock(); } else { // qDebug("no data."); std::this_thread::sleep_for(std::chrono::milliseconds(1)); } } } void threadCompress() { cv::Mat mat1; int nserbufsize = 20000000; char * strser = new char[nserbufsize]; while(1) { if(gbUpdateMat == false) { std::this_thread::sleep_for(std::chrono::milliseconds(1)); } else { gMutexCompress.lock(); gMatPic.copyTo(mat1); gbUpdateMat = false; gMutexCompress.unlock(); iv::vision::rawpic pic; qint64 time = QDateTime::currentMSecsSinceEpoch(); QTime xg; xg.start(); pic.set_time(time); pic.set_index(gindex);gindex++; pic.set_elemsize(mat1.elemSize()); pic.set_width(mat1.cols); pic.set_height(mat1.rows); pic.set_mattype(mat1.type()); // cv::Mat matcompress; // cv::resize(mat1,matcompress,cv::Size(640,360)); std::vector param = std::vector(2); param[0] = CV_IMWRITE_JPEG_QUALITY; param[1] = gcompquality; // default(95) 0-100 std::vector buff; if(gcompress == 1) { cv::imencode(".jpg", mat1, buff, param); // cv::imencode(".jpg", matcompress, buff, param); pic.set_picdata(buff.data(),buff.size()); buff.clear(); pic.set_type(2); } else { pic.set_picdata(mat1.data,mat1.rows*mat1.cols*mat1.elemSize()); // pic.set_picdata(matcompress.data,matcompress.rows*matcompress.cols*matcompress.elemSize()); pic.set_type(1); } int nSize = pic.ByteSize(); // char * strser = new char[nSize]; bool bser = pic.SerializeToArray(strser,nSize); qDebug("pac time is %d size is %d",xg.elapsed(),nSize); // iv::modulecomm::ModuleSendMsg(gpa,str.data(),str.length()); if(bser)iv::modulecomm::ModuleSendMsg(gpa,strser,nSize); else { std::cout<<"usbcamera "<< "serialize error. "<imageData) ;//(cvSize(WIDTH, HEIGHT), CV_8UC3);//, frame.imageData);; while(1) { if(gbUpdate == false) { std::this_thread::sleep_for(std::chrono::milliseconds(1)); } else { gMutexRawData.lock(); nlen = gnLenRaw; memcpy(strRawData,gRawData,gnLenRaw); gbUpdate = false; gMutexRawData.unlock(); if(nlen > 0) { QTime x; x.start(); cv::cvtColor(cv::Mat(gheight * 3 / 2, gwidth, CV_8U,strRawData), mat1, cv::COLOR_YUV2BGR_NV12); qDebug(" 2rgb tims is %d",x.elapsed()); gMutexCompress.lock(); mat1.copyTo(gMatPic); gbUpdateMat = true; gMutexCompress.unlock(); } } } } int threadcapture() { unsigned char *datas[COUNT]; // 缓冲区数据地址 int ret, i; int fd; /* 第一步:打开摄像头设备文件 */ fd = open("/dev/video5", O_RDWR); // 注意查看摄像头设备名 if (-1 == fd) { perror("open /dev/video0"); return -1; } /* 第二步:设置捕捉图片帧格式 */ struct v4l2_format format; format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; //V4L2_BUF_TYPE_VIDEO_CAPTURE; // 操作类型为获取图片 format.fmt.pix.width = gwidth; // 图片的宽度 format.fmt.pix.height = gheight; // 图片的高度 format.fmt.pix.pixelformat = V4L2_PIX_FMT_NV12; // 图片格式 format.fmt.pix_mp.field = V4L2_FIELD_NONE; // 逐行扫描 ret = ioctl(fd, VIDIOC_S_FMT, &format); // 进行设置(Set) if (-1 == ret) { perror("ioctl VIDIOC_S_FMT"); close(fd); return -2; } /* 第三步:检查是否设置成功 */ ret = ioctl(fd, VIDIOC_G_FMT, &format); // Get if (-1 == ret) { perror("ioctl VIDIOC_G_FMT"); close(fd); return -3; } if (format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV12) { printf("ioctl VIDIOC_S_FMT sucessful\n"); } else { printf("ioctl VIDIOC_S_FMT failed\n"); } v4l2_fmtdesc formatDescriptions; formatDescriptions.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; for (i = 0; true; i++) { formatDescriptions.index = i; if (ioctl(fd, VIDIOC_ENUM_FMT, &formatDescriptions) == 0) { qDebug("[native_camera] %2d: %s 0x%08X 0x%X", i, formatDescriptions.description, formatDescriptions.pixelformat, formatDescriptions.flags); } else { // No more formats available break; } } /* 第四步:让摄像头驱动申请存放图像数据的缓冲区 */ struct v4l2_requestbuffers reqbuf; reqbuf.count = COUNT; // 缓冲区个数 reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; // 缓冲区类型 reqbuf.memory = V4L2_MEMORY_MMAP; // 缓冲区的用途:用于内存映射 ret = ioctl(fd, VIDIOC_REQBUFS, &reqbuf); if (-1 == ret) { perror("ioctl VIDIOC_REQBUFS"); close(fd); return -4; } /* 第五步:查询每个缓冲区的信息,同时进行内存映射 */ struct v4l2_plane planes[2]; struct v4l2_buffer buff; buff.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; buff.memory = V4L2_MEMORY_MMAP; // buff.memory = V4L2_MEMORY_USERPTR; buff.index = 0; buff.length = 2; // 1 buff.m.planes = &planes[0]; for (i = 0; i < COUNT; i++) { buff.index = i; // ret = ioctl(fd, VIDIOC_QUERYBUF, &buff); ret = ioctl(fd, VIDIOC_QUERYBUF, &buff); if (-1 == ret) // 操作失败 { break; } /* 打印缓冲区的长度和偏移量 */ printf("buf[%d]: len = %d offset: %d\n", i, buff.length, buff.m.offset); /* 把每块缓冲区映射到当前进程来 */ // datas[i] = (unsigned char *)mmap(NULL, buff.length, PROT_READ, MAP_SHARED, fd, buff.m.offset); datas[i] = (unsigned char *)mmap(NULL, buff.m.planes->length, PROT_READ, MAP_SHARED, fd, buff.m.planes->m.mem_offset); // datas[i] = (unsigned char *)mmap(NULL, buff.length, PROT_READ, MAP_SHARED, fd, buff.m.planes->m.mem_offset); if (MAP_FAILED == datas[i]) // 映射失败 { perror("mmap failed"); return -5; } /* 把映射成功的缓冲区加入到摄像头驱动的图像数据采集队列里 */ ret = ioctl(fd, VIDIOC_QBUF, &buff); // Queue if (-1 == ret) { perror("VIDIOC_QBUF"); return -6; } } /* 第六步:启动采集 */ int on = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; // 设置启动标志位 ret = ioctl(fd, VIDIOC_STREAMON, &on); // 开启摄像头流 if (-1 == ret) { perror("ioctl VIDIOC_STREAMON"); return -7; } /* 第七步:让已经采集好的数据缓冲退出队列 */ IplImage *srcImg; srcImg = cvCreateImage(cvSize(gwidth, gheight), 8, 3); cvZero(srcImg); cv::Mat destination(cvSize(gwidth, gheight), CV_8UC3,srcImg->imageData) ;//(cvSize(WIDTH, HEIGHT), CV_8UC3);//, frame.imageData);; int k; int nlen = gwidth * gheight *3; // for(k=0;k<100000;k++) while(1) { struct v4l2_buffer buf; memset(&buf, 0, sizeof(buf)); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; buf.memory = V4L2_MEMORY_MMAP; buf.index = k; buf.length = 2; // 1 struct v4l2_plane planest[2]; buf.m.planes = &planest[0]; buff.index = 0; buff.length = 2; ret = ioctl(fd, VIDIOC_DQBUF, &buff); // Dequeue if (-1 == ret) { perror("ioctl VIDIOC_DQUF"); return -8; } gMutexRawData.lock(); memcpy(gRawData,(char *)datas[buff.index],gheight*gwidth*3/2); gbUpdate = true; gnLenRaw = gheight * gwidth *3/2; gMutexRawData.unlock(); qDebug("index is %d",buff.index); // QTime x; // x.start(); // cv::cvtColor(cv::Mat(gheight * 3 / 2, gwidth, CV_8U,(char *)datas[buff.index]), destination, // cv::COLOR_YUV2BGR_NV12); // qDebug(" 2rgb tims is %d",x.elapsed()); ioctl(fd, VIDIOC_QBUF, &buff); qDebug("%d:%ld",k,QDateTime::currentMSecsSinceEpoch()); // cvNamedWindow("YUV2RGB", 1); // cvShowImage("YUV2RGB", srcImg); // cvWaitKey(1); } return 0; } int main(int argc, char *argv[]) { showversion("driver_camera_rk3399"); QCoreApplication a(argc, argv); gRawData = new char[20000000]; QString strpath = QCoreApplication::applicationDirPath(); // QString apppath = strpath; if(argc < 2) strpath = strpath + "/driver_camera_rk3399.xml"; else strpath = argv[1]; std::cout<