main.cpp 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564
  1. #include <QCoreApplication>
  2. #include <thread>
  3. #include <QMutex>
  4. #include <opencv2/opencv.hpp>
  5. #include <opencv2/core.hpp>
  6. #ifdef USE_OPENCV4
  7. #include "opencv2/imgcodecs/legacy/constants_c.h" //OpenCV4 use this line
  8. #include <opencv2/imgproc/types_c.h> //OpenCV4 use this line
  9. #endif
  10. #include <opencv2/videoio.hpp>
  11. #include <QTime>
  12. #include <stdio.h>
  13. #include <unistd.h>
  14. #include <sys/types.h> // 下面四个头文件是linux系统编程特有的
  15. #include <sys/stat.h>
  16. #include <sys/ioctl.h>
  17. #include <sys/mman.h>
  18. #include <fcntl.h>
  19. #include <linux/videodev2.h> // 操作摄像头设备
  20. #include "modulecomm.h"
  21. #include "rawpic.pb.h"
  22. #include "xmlparam.h"
  23. #include "ivversion.h"
  24. #define COUNT 8 // 缓冲区个数
  25. void * gpa;
  26. int gindex = 0;
  27. int gwidth = 1920;
  28. int gheight = 1080;
  29. int gcamindex = 1;
  30. int gcompress = 1;
  31. int gcompquality = 95;
  32. std::string gstrcamera = "";
  33. std::string gmsgname = "usbpic";
  34. std::string gstrdevname = "/dev/video5";
  35. QMutex gMutexRawData;
  36. char * gRawData;
  37. int gnLenRaw;
  38. bool gbUpdate = false;
  39. QMutex gMutexCompress;
  40. cv::Mat gMatPic;
  41. bool gbUpdateMat = false;
  42. #ifndef CV_CAP_PROP_FRAME_WIDTH
  43. #define CV_CAP_PROP_FRAME_WIDTH cv::CAP_PROP_FRAME_WIDTH
  44. #define CV_CAP_PROP_FRAME_HEIGHT cv::CAP_PROP_FRAME_HEIGHT
  45. #define CV_CAP_PROP_FPS CAP_PROP_FPS
  46. #define CV_CAP_PROP_FOURCC CAP_PROP_FOURCC
  47. #define CV_BGR2GRAY cv::COLOR_BGR2GRAY
  48. #endif
  49. void VideoThread(int n)
  50. {
  51. QTime xT;
  52. xT.start();
  53. int nserbufsize = 20000000;
  54. char * strser = new char[nserbufsize];
  55. cv::VideoCapture capture;
  56. //If OpenCV4 use this
  57. // std::string gst_f = "v4l2src device=/dev/video0 ! video/x-raw, width=(int)"+std::to_string(gwidth)+", height=(int)"+std::to_string(gheight)+" ! videoconvert ! appsink";
  58. std::string gstr_f = "v4l2src device=/dev/video"+std::to_string(gcamindex)+" ! video/x-raw, width=(int)"+std::to_string(gwidth)+", height=(int)"+std::to_string(gheight)+" ! videoconvert ! appsink";
  59. // capture.open(gstr_f);
  60. #ifdef USE_OPENCV4
  61. if(gstrcamera.length() > 2)
  62. {
  63. gstr_f = "v4l2src device=/dev/"+gstrcamera+" ! video/x-raw, width=(int)"+std::to_string(gwidth)+", height=(int)"+std::to_string(gheight)+" ! videoconvert ! appsink";
  64. capture.open(gstr_f);
  65. }
  66. else
  67. capture.open(gcamindex);
  68. #else
  69. capture.open(gcamindex);
  70. #endif
  71. capture.set(CV_CAP_PROP_FRAME_WIDTH,gwidth);
  72. capture.set(CV_CAP_PROP_FRAME_HEIGHT,gheight);
  73. while(true)
  74. {
  75. cv::Mat mat1;
  76. capture>>mat1;
  77. if(!mat1.empty())
  78. {
  79. QTime timex;
  80. timex.start();
  81. // int nlen = 2+3*sizeof(int) + mat1.rows*mat1.cols*mat1.elemSize();
  82. // if((nlen+20)>nserbufsize)
  83. // {
  84. // nserbufsize = nlen+1000;
  85. // delete strser;
  86. // strser = new char[nserbufsize];
  87. // }
  88. // qint64 time = QDateTime::currentMSecsSinceEpoch();
  89. // int npos = 0;
  90. // memcpy(strser+npos,&time,sizeof(qint64));npos = npos+sizeof(qint64);
  91. // // char * strout = new char[nlen];
  92. // memcpy(strser+npos,&gindex,sizeof(int));npos = npos+sizeof(int);
  93. // memcpy(strser+npos,"mb",2);npos = npos+2;
  94. // memcpy(strser+npos,&mat1.rows,sizeof(int));npos = npos+sizeof(int);
  95. // memcpy(strser+npos,&mat1.cols,sizeof(int));npos = npos+sizeof(int);
  96. // int ntype = mat1.type();
  97. // memcpy(strser+npos,&ntype,sizeof(int));npos = npos+sizeof(int);
  98. // memcpy(strser+npos,mat1.data,mat1.rows*mat1.cols*mat1.elemSize());npos = npos+mat1.rows*mat1.cols*mat1.elemSize();
  99. // iv::modulecomm::ModuleSendMsg(gpa,strser,npos);
  100. // qDebug("timex is %d",timex.elapsed());
  101. iv::vision::rawpic pic;
  102. qint64 time = QDateTime::currentMSecsSinceEpoch();
  103. QTime xg;
  104. xg.start();
  105. pic.set_time(time);
  106. pic.set_index(gindex);gindex++;
  107. pic.set_elemsize(mat1.elemSize());
  108. pic.set_width(mat1.cols);
  109. pic.set_height(mat1.rows);
  110. pic.set_mattype(mat1.type());
  111. // cv::Mat matcompress;
  112. // cv::resize(mat1,matcompress,cv::Size(640,360));
  113. std::vector<int> param = std::vector<int>(2);
  114. param[0] = CV_IMWRITE_JPEG_QUALITY;
  115. param[1] = gcompquality; // default(95) 0-100
  116. std::vector<unsigned char> buff;
  117. if(gcompress == 1)
  118. {
  119. cv::imencode(".jpg", mat1, buff, param);
  120. // cv::imencode(".jpg", matcompress, buff, param);
  121. pic.set_picdata(buff.data(),buff.size());
  122. buff.clear();
  123. pic.set_type(2);
  124. }
  125. else
  126. {
  127. pic.set_picdata(mat1.data,mat1.rows*mat1.cols*mat1.elemSize());
  128. // pic.set_picdata(matcompress.data,matcompress.rows*matcompress.cols*matcompress.elemSize());
  129. pic.set_type(1);
  130. }
  131. // qDebug("buff size is %d",buff.size());
  132. int nSize = pic.ByteSize();
  133. // char * strser = new char[nSize];
  134. bool bser = pic.SerializeToArray(strser,nSize);
  135. qDebug("pac time is %d size is %d",xg.elapsed(),nSize);
  136. // iv::modulecomm::ModuleSendMsg(gpa,str.data(),str.length());
  137. if(bser)iv::modulecomm::ModuleSendMsg(gpa,strser,nSize);
  138. else
  139. {
  140. std::cout<<"usbcamera "<< "serialize error. "<<std::endl;
  141. }
  142. // delete strser;
  143. // delete strout;
  144. // qDebug("%d have a pic. len is %d",xT.elapsed(),str.length());
  145. // qDebug("now is %d",xT.elapsed());
  146. // gw->mMutex.lock();
  147. // mat1.copyTo(gw->msrcImage);
  148. // gw->mMutex.unlock();
  149. // gMutexInput.lock();
  150. // mat1.copyTo(gsrcImage);
  151. // g_CapIndex++;
  152. // gMutexInput.unlock();
  153. }
  154. else
  155. {
  156. // qDebug("no data.");
  157. std::this_thread::sleep_for(std::chrono::milliseconds(1));
  158. }
  159. }
  160. }
  161. void threadCompress()
  162. {
  163. cv::Mat mat1;
  164. int nserbufsize = 20000000;
  165. char * strser = new char[nserbufsize];
  166. while(1)
  167. {
  168. if(gbUpdateMat == false)
  169. {
  170. std::this_thread::sleep_for(std::chrono::milliseconds(1));
  171. }
  172. else
  173. {
  174. gMutexCompress.lock();
  175. gMatPic.copyTo(mat1);
  176. gbUpdateMat = false;
  177. gMutexCompress.unlock();
  178. iv::vision::rawpic pic;
  179. qint64 time = QDateTime::currentMSecsSinceEpoch();
  180. QTime xg;
  181. xg.start();
  182. pic.set_time(time);
  183. pic.set_index(gindex);gindex++;
  184. pic.set_elemsize(mat1.elemSize());
  185. pic.set_width(mat1.cols);
  186. pic.set_height(mat1.rows);
  187. pic.set_mattype(mat1.type());
  188. // cv::Mat matcompress;
  189. // cv::resize(mat1,matcompress,cv::Size(640,360));
  190. std::vector<int> param = std::vector<int>(2);
  191. param[0] = CV_IMWRITE_JPEG_QUALITY;
  192. param[1] = gcompquality; // default(95) 0-100
  193. std::vector<unsigned char> buff;
  194. if(gcompress == 1)
  195. {
  196. cv::imencode(".jpg", mat1, buff, param);
  197. // cv::imencode(".jpg", matcompress, buff, param);
  198. pic.set_picdata(buff.data(),buff.size());
  199. buff.clear();
  200. pic.set_type(2);
  201. }
  202. else
  203. {
  204. pic.set_picdata(mat1.data,mat1.rows*mat1.cols*mat1.elemSize());
  205. // pic.set_picdata(matcompress.data,matcompress.rows*matcompress.cols*matcompress.elemSize());
  206. pic.set_type(1);
  207. }
  208. int nSize = pic.ByteSize();
  209. // char * strser = new char[nSize];
  210. bool bser = pic.SerializeToArray(strser,nSize);
  211. qDebug("pac time is %d size is %d",xg.elapsed(),nSize);
  212. // iv::modulecomm::ModuleSendMsg(gpa,str.data(),str.length());
  213. if(bser)iv::modulecomm::ModuleSendMsg(gpa,strser,nSize);
  214. else
  215. {
  216. std::cout<<"usbcamera "<< "serialize error. "<<std::endl;
  217. }
  218. }
  219. }
  220. }
  221. void threadConvert()
  222. {
  223. char * strRawData = new char[gwidth*gheight*4];
  224. int nlen = 0;
  225. IplImage *srcImg;
  226. srcImg = cvCreateImage(cvSize(gwidth, gheight), 8, 3);
  227. cvZero(srcImg);
  228. cv::Mat mat1(cvSize(gwidth, gheight), CV_8UC3,srcImg->imageData) ;//(cvSize(WIDTH, HEIGHT), CV_8UC3);//, frame.imageData);;
  229. while(1)
  230. {
  231. if(gbUpdate == false)
  232. {
  233. std::this_thread::sleep_for(std::chrono::milliseconds(1));
  234. }
  235. else
  236. {
  237. gMutexRawData.lock();
  238. nlen = gnLenRaw;
  239. memcpy(strRawData,gRawData,gnLenRaw);
  240. gbUpdate = false;
  241. gMutexRawData.unlock();
  242. if(nlen > 0)
  243. {
  244. QTime x;
  245. x.start();
  246. cv::cvtColor(cv::Mat(gheight * 3 / 2, gwidth, CV_8U,strRawData), mat1,
  247. cv::COLOR_YUV2BGR_NV12);
  248. qDebug(" 2rgb tims is %d",x.elapsed());
  249. gMutexCompress.lock();
  250. mat1.copyTo(gMatPic);
  251. gbUpdateMat = true;
  252. gMutexCompress.unlock();
  253. }
  254. }
  255. }
  256. }
  257. int threadcapture()
  258. {
  259. unsigned char *datas[COUNT]; // 缓冲区数据地址
  260. int ret, i;
  261. int fd;
  262. /* 第一步:打开摄像头设备文件 */
  263. fd = open("/dev/video5", O_RDWR); // 注意查看摄像头设备名
  264. if (-1 == fd)
  265. {
  266. perror("open /dev/video0");
  267. return -1;
  268. }
  269. /* 第二步:设置捕捉图片帧格式 */
  270. struct v4l2_format format;
  271. format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; //V4L2_BUF_TYPE_VIDEO_CAPTURE; // 操作类型为获取图片
  272. format.fmt.pix.width = gwidth; // 图片的宽度
  273. format.fmt.pix.height = gheight; // 图片的高度
  274. format.fmt.pix.pixelformat = V4L2_PIX_FMT_NV12; // 图片格式
  275. format.fmt.pix_mp.field = V4L2_FIELD_NONE; // 逐行扫描
  276. ret = ioctl(fd, VIDIOC_S_FMT, &format); // 进行设置(Set)
  277. if (-1 == ret)
  278. {
  279. perror("ioctl VIDIOC_S_FMT");
  280. close(fd);
  281. return -2;
  282. }
  283. /* 第三步:检查是否设置成功 */
  284. ret = ioctl(fd, VIDIOC_G_FMT, &format); // Get
  285. if (-1 == ret)
  286. {
  287. perror("ioctl VIDIOC_G_FMT");
  288. close(fd);
  289. return -3;
  290. }
  291. if (format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV12)
  292. {
  293. printf("ioctl VIDIOC_S_FMT sucessful\n");
  294. }
  295. else
  296. {
  297. printf("ioctl VIDIOC_S_FMT failed\n");
  298. }
  299. v4l2_fmtdesc formatDescriptions;
  300. formatDescriptions.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
  301. for (i = 0; true; i++)
  302. {
  303. formatDescriptions.index = i;
  304. if (ioctl(fd, VIDIOC_ENUM_FMT, &formatDescriptions) == 0) {
  305. qDebug("[native_camera] %2d: %s 0x%08X 0x%X",
  306. i, formatDescriptions.description, formatDescriptions.pixelformat, formatDescriptions.flags);
  307. } else {
  308. // No more formats available
  309. break;
  310. }
  311. }
  312. /* 第四步:让摄像头驱动申请存放图像数据的缓冲区 */
  313. struct v4l2_requestbuffers reqbuf;
  314. reqbuf.count = COUNT; // 缓冲区个数
  315. reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; // 缓冲区类型
  316. reqbuf.memory = V4L2_MEMORY_MMAP; // 缓冲区的用途:用于内存映射
  317. ret = ioctl(fd, VIDIOC_REQBUFS, &reqbuf);
  318. if (-1 == ret)
  319. {
  320. perror("ioctl VIDIOC_REQBUFS");
  321. close(fd);
  322. return -4;
  323. }
  324. /* 第五步:查询每个缓冲区的信息,同时进行内存映射 */
  325. struct v4l2_plane planes[2];
  326. struct v4l2_buffer buff;
  327. buff.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
  328. buff.memory = V4L2_MEMORY_MMAP;
  329. // buff.memory = V4L2_MEMORY_USERPTR;
  330. buff.index = 0;
  331. buff.length = 2; // 1
  332. buff.m.planes = &planes[0];
  333. for (i = 0; i < COUNT; i++)
  334. {
  335. buff.index = i;
  336. // ret = ioctl(fd, VIDIOC_QUERYBUF, &buff);
  337. ret = ioctl(fd, VIDIOC_QUERYBUF, &buff);
  338. if (-1 == ret) // 操作失败
  339. {
  340. break;
  341. }
  342. /* 打印缓冲区的长度和偏移量 */
  343. printf("buf[%d]: len = %d offset: %d\n", i, buff.length, buff.m.offset);
  344. /* 把每块缓冲区映射到当前进程来 */
  345. // datas[i] = (unsigned char *)mmap(NULL, buff.length, PROT_READ, MAP_SHARED, fd, buff.m.offset);
  346. datas[i] = (unsigned char *)mmap(NULL, buff.m.planes->length, PROT_READ, MAP_SHARED, fd, buff.m.planes->m.mem_offset);
  347. // datas[i] = (unsigned char *)mmap(NULL, buff.length, PROT_READ, MAP_SHARED, fd, buff.m.planes->m.mem_offset);
  348. if (MAP_FAILED == datas[i]) // 映射失败
  349. {
  350. perror("mmap failed");
  351. return -5;
  352. }
  353. /* 把映射成功的缓冲区加入到摄像头驱动的图像数据采集队列里 */
  354. ret = ioctl(fd, VIDIOC_QBUF, &buff); // Queue
  355. if (-1 == ret)
  356. {
  357. perror("VIDIOC_QBUF");
  358. return -6;
  359. }
  360. }
  361. /* 第六步:启动采集 */
  362. int on = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; // 设置启动标志位
  363. ret = ioctl(fd, VIDIOC_STREAMON, &on); // 开启摄像头流
  364. if (-1 == ret)
  365. {
  366. perror("ioctl VIDIOC_STREAMON");
  367. return -7;
  368. }
  369. /* 第七步:让已经采集好的数据缓冲退出队列 */
  370. IplImage *srcImg;
  371. srcImg = cvCreateImage(cvSize(gwidth, gheight), 8, 3);
  372. cvZero(srcImg);
  373. cv::Mat destination(cvSize(gwidth, gheight), CV_8UC3,srcImg->imageData) ;//(cvSize(WIDTH, HEIGHT), CV_8UC3);//, frame.imageData);;
  374. int k;
  375. int nlen = gwidth * gheight *3;
  376. // for(k=0;k<100000;k++)
  377. while(1)
  378. {
  379. struct v4l2_buffer buf;
  380. memset(&buf, 0, sizeof(buf));
  381. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
  382. buf.memory = V4L2_MEMORY_MMAP;
  383. buf.index = k;
  384. buf.length = 2; // 1
  385. struct v4l2_plane planest[2];
  386. buf.m.planes = &planest[0];
  387. buff.index = 0;
  388. buff.length = 2;
  389. ret = ioctl(fd, VIDIOC_DQBUF, &buff); // Dequeue
  390. if (-1 == ret)
  391. {
  392. perror("ioctl VIDIOC_DQUF");
  393. return -8;
  394. }
  395. gMutexRawData.lock();
  396. memcpy(gRawData,(char *)datas[buff.index],gheight*gwidth*3/2);
  397. gbUpdate = true;
  398. gnLenRaw = gheight * gwidth *3/2;
  399. gMutexRawData.unlock();
  400. qDebug("index is %d",buff.index);
  401. // QTime x;
  402. // x.start();
  403. // cv::cvtColor(cv::Mat(gheight * 3 / 2, gwidth, CV_8U,(char *)datas[buff.index]), destination,
  404. // cv::COLOR_YUV2BGR_NV12);
  405. // qDebug(" 2rgb tims is %d",x.elapsed());
  406. ioctl(fd, VIDIOC_QBUF, &buff);
  407. qDebug("%d:%ld",k,QDateTime::currentMSecsSinceEpoch());
  408. // cvNamedWindow("YUV2RGB", 1);
  409. // cvShowImage("YUV2RGB", srcImg);
  410. // cvWaitKey(1);
  411. }
  412. return 0;
  413. }
  414. int main(int argc, char *argv[])
  415. {
  416. showversion("driver_camera_rk3399");
  417. QCoreApplication a(argc, argv);
  418. gRawData = new char[20000000];
  419. QString strpath = QCoreApplication::applicationDirPath();
  420. // QString apppath = strpath;
  421. if(argc < 2)
  422. strpath = strpath + "/driver_camera_rk3399.xml";
  423. else
  424. strpath = argv[1];
  425. std::cout<<strpath.toStdString()<<std::endl;
  426. iv::xmlparam::Xmlparam xp(strpath.toStdString());
  427. std::string xmlmsgname = xp.GetParam("msgname","image00");
  428. gmsgname = xmlmsgname;
  429. std::string strwidth = xp.GetParam("width","1280");
  430. std::string strheight = xp.GetParam("height","720");
  431. std::string strcamindex = xp.GetParam("index","0");
  432. std::string strcompress = xp.GetParam("bcompress","1");
  433. std::string strcompquality = xp.GetParam("compquality","95");
  434. gcompquality = atoi(strcompquality.data());
  435. gstrcamera = xp.GetParam("cameraname","/dev/video5");
  436. gwidth = atoi(strwidth.data());
  437. gheight = atoi(strheight.data());
  438. gcamindex = atoi(strcamindex.data());
  439. gcompress = atoi(strcompress.data());
  440. gpa = iv::modulecomm::RegisterSend(gmsgname.data(),20000000,1);
  441. std::thread * mthread = new std::thread(threadcapture) ;//new std::thread(VideoThread,0);
  442. std::thread * conthread = new std::thread(threadConvert);
  443. std::thread * compressthread = new std::thread(threadCompress);
  444. (void)mthread;
  445. (void)conthread;
  446. (void)compressthread;
  447. return a.exec();
  448. }