乡下人产国偷v产偷v自拍,国产午夜片在线观看,婷婷成人亚洲综合国产麻豆,久久综合给合久久狠狠狠9

  • <output id="e9wm2"></output>
    <s id="e9wm2"><nobr id="e9wm2"><ins id="e9wm2"></ins></nobr></s>

    • 分享

      關(guān)于opencv更改攝像頭參數(shù)(幀率,分辨率,曝光度……)的幾個問題

       taotao_2016 2020-03-13

      1,適用于VideoCapture打開的攝像頭

      VideoCapture capture(0); 設(shè)置攝像頭參數(shù) 不要隨意修改

      capture.set(CV_CAP_PROP_FRAME_WIDTH, 1080);//寬度

      capture.set(CV_CAP_PROP_FRAME_HEIGHT, 960);//高度

      capture.set(CV_CAP_PROP_FPS, 30);//幀率 幀/秒

      capture.set(CV_CAP_PROP_BRIGHTNESS, 1);//亮度 

      capture.set(CV_CAP_PROP_CONTRAST,40);//對比度 40

      capture.set(CV_CAP_PROP_SATURATION, 50);//飽和度 50

      capture.set(CV_CAP_PROP_HUE, 50);//色調(diào) 50

      capture.set(CV_CAP_PROP_EXPOSURE, 50);//曝光 50 獲取攝像頭參數(shù)

      得到攝像頭的參數(shù)

      capture.get(CV_CAP_PROP_FRAME_WIDTH);

      capture.get(CV_CAP_PROP_FRAME_HEIGHT);

      capture.get(CV_CAP_PROP_FPS);

      capture.get(CV_CAP_PROP_BRIGHTNESS);

      capture.get(CV_CAP_PROP_CONTRAST);

      capture.get(CV_CAP_PROP_SATURATION);

      capture.get(CV_CAP_PROP_HUE);

      capture.get(CV_CAP_PROP_EXPOSURE); 獲取視頻參數(shù):

      capture.get(CV_CAP_PROP_FRAME_COUNT);//視頻幀數(shù) 

      然后你會發(fā)現(xiàn)除了個別參數(shù)你能更改之外(如曝光度),大分布你是不能更改的,甚至都沒辦法得到,這種并不適用

      2,不做開發(fā),只是單純的更改

      那么推薦一個軟件,amcap,百度網(wǎng)盤鏈接,https://pan.baidu.com/s/1pL8nq0V#list/path=%2F,很簡單很容易上手。

      補(bǔ),現(xiàn)在突然想起來我的一個學(xué)長告訴我的,利用這個軟件調(diào)節(jié)攝像頭的曝光度,可以改變幀率,且攝像頭會記住曝光度的設(shè)置(其他特性就沒有這個特點)。-2019.3.12

      3,修改opencv的文件,不過效果可能和第一個差不多

      大概是在opencv的這個位置,找一下,modules/highgui/src/cap_v4l.cpp,里面有關(guān)于參數(shù)的設(shè)置,位置比較靠前,可以搜索,也可以直接找到

      大致在200多行

      4,v4l2

      下面是我找到的一篇參考,可以突破幀率的限制,當(dāng)然前提是攝像頭支持

      https://blog.csdn.net/c406495762/article/details/72732135

      目前只適用于Linux系統(tǒng),本人試驗過,120幀的攝像頭在只打開攝像頭時可以達(dá)到100幀左右,設(shè)置的圖片分辨率越小,能達(dá)到的幀率越高

      1. #include <unistd.h>
      2. #include <error.h>
      3. #include <errno.h>
      4. #include <fcntl.h>
      5. #include <sys/ioctl.h>
      6. #include <sys/types.h>
      7. #include <pthread.h>
      8. #include <linux/videodev2.h>
      9. #include <sys/mman.h>
      10. #include <opencv2/core/core.hpp>
      11. #include <opencv2/highgui/highgui.hpp>
      12. #include <stdio.h>
      13. #include <stdlib.h>
      14. #include <string.h>
      15. #include <time.h>
      16. #include 'opencv2/highgui/highgui.hpp'
      17. #include 'opencv2/imgproc/imgproc.hpp'
      18. #include <math.h>
      19. #include <iostream>
      20. #include <iomanip>
      21. #include <string>
      22. using namespace std;
      23. using namespace cv;
      24. #define CLEAR(x) memset(&(x), 0, sizeof(x))
      25. #define IMAGEWIDTH 3264
      26. #define IMAGEHEIGHT 2448
      27. #define WINDOW_NAME1 '【原始圖】' //為窗口標(biāo)題定義的宏
      28. #define WINDOW_NAME2 '【圖像輪廓】' //為窗口標(biāo)題定義的宏
      29. Mat g_srcImage; Mat g_grayImage;
      30. int g_nThresh = 90;
      31. int g_nMaxThresh = 255;
      32. RNG g_rng(12345);
      33. Mat g_cannyMat_output;
      34. vector<vector<Point> > g_vContours;
      35. vector<Vec4i> g_vHierarchy;
      36. Point point1[100000];
      37. Point point2[100000];
      38. Point point3[100000];
      39. int ii,iii;
      40. int flag2 = 0;//避障用
      41. float number = 0;
      42. int fps=0;
      43. class V4L2Capture {
      44. public:
      45. V4L2Capture(char *devName, int width, int height);
      46. virtual ~V4L2Capture();
      47. int openDevice();
      48. int closeDevice();
      49. int initDevice();
      50. int startCapture();
      51. int stopCapture();
      52. int freeBuffers();
      53. int getFrame(void **,size_t *);
      54. int backFrame();
      55. static void test();
      56. private:
      57. int initBuffers();
      58. struct cam_buffer
      59. {
      60. void* start;
      61. unsigned int length;
      62. };
      63. char *devName;
      64. int capW;
      65. int capH;
      66. int fd_cam;
      67. cam_buffer *buffers;
      68. unsigned int n_buffers;
      69. int frameIndex;
      70. };
      71. V4L2Capture::V4L2Capture(char *devName, int width, int height) {
      72. // TODO Auto-generated constructor stub
      73. this->devName = devName;
      74. this->fd_cam = -1;
      75. this->buffers = NULL;
      76. this->n_buffers = 0;
      77. this->frameIndex = -1;
      78. this->capW=width;
      79. this->capH=height;
      80. }
      81. V4L2Capture::~V4L2Capture() {
      82. // TODO Auto-generated destructor stub
      83. }
      84. int V4L2Capture::openDevice() {
      85. /*設(shè)備的打開*/
      86. printf('video dev : %s\n', devName);
      87. fd_cam = open(devName, O_RDWR);
      88. if (fd_cam < 0) {
      89. perror('Can't open video device');
      90. }
      91. return 0;
      92. }
      93. int V4L2Capture::closeDevice() {
      94. if (fd_cam > 0) {
      95. int ret = 0;
      96. if ((ret = close(fd_cam)) < 0) {
      97. perror('Can't close video device');
      98. }
      99. return 0;
      100. } else {
      101. return -1;
      102. }
      103. }
      104. int V4L2Capture::initDevice() {
      105. int ret;
      106. struct v4l2_capability cam_cap; //顯示設(shè)備信息
      107. struct v4l2_cropcap cam_cropcap; //設(shè)置攝像頭的捕捉能力
      108. struct v4l2_fmtdesc cam_fmtdesc; //查詢所有支持的格式:VIDIOC_ENUM_FMT
      109. struct v4l2_crop cam_crop; //圖像的縮放
      110. struct v4l2_format cam_format; //設(shè)置攝像頭的視頻制式、幀格式等
      111. /* 使用IOCTL命令VIDIOC_QUERYCAP,獲取攝像頭的基本信息*/
      112. ret = ioctl(fd_cam, VIDIOC_QUERYCAP, &cam_cap);
      113. if (ret < 0) {
      114. perror('Can't get device information: VIDIOCGCAP');
      115. }
      116. printf(
      117. 'Driver Name:%s\nCard Name:%s\nBus info:%s\nDriver Version:%u.%u.%u\n',
      118. cam_cap.driver, cam_cap.card, cam_cap.bus_info,
      119. (cam_cap.version >> 16) & 0XFF, (cam_cap.version >> 8) & 0XFF,
      120. cam_cap.version & 0XFF);
      121. /* 使用IOCTL命令VIDIOC_ENUM_FMT,獲取攝像頭所有支持的格式*/
      122. cam_fmtdesc.index = 0;
      123. cam_fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      124. printf('Support format:\n');
      125. while (ioctl(fd_cam, VIDIOC_ENUM_FMT, &cam_fmtdesc) != -1) {
      126. printf('\t%d.%s\n', cam_fmtdesc.index + 1, cam_fmtdesc.description);
      127. cam_fmtdesc.index++;
      128. }
      129. /* 使用IOCTL命令VIDIOC_CROPCAP,獲取攝像頭的捕捉能力*/
      130. cam_cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      131. if (0 == ioctl(fd_cam, VIDIOC_CROPCAP, &cam_cropcap)) {
      132. printf('Default rec:\n\tleft:%d\n\ttop:%d\n\twidth:%d\n\theight:%d\n',
      133. cam_cropcap.defrect.left, cam_cropcap.defrect.top,
      134. cam_cropcap.defrect.width, cam_cropcap.defrect.height);
      135. /* 使用IOCTL命令VIDIOC_S_CROP,獲取攝像頭的窗口取景參數(shù)*/
      136. cam_crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      137. cam_crop.c = cam_cropcap.defrect; //默認(rèn)取景窗口大小
      138. if (-1 == ioctl(fd_cam, VIDIOC_S_CROP, &cam_crop)) {
      139. //printf('Can't set crop para\n');
      140. }
      141. } else {
      142. printf('Can't set cropcap para\n');
      143. }
      144. /* 使用IOCTL命令VIDIOC_S_FMT,設(shè)置攝像頭幀信息*/
      145. cam_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      146. cam_format.fmt.pix.width = capW;
      147. cam_format.fmt.pix.height = capH;
      148. cam_format.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; //要和攝像頭支持的類型對應(yīng)
      149. cam_format.fmt.pix.field = V4L2_FIELD_INTERLACED;
      150. ret = ioctl(fd_cam, VIDIOC_S_FMT, &cam_format);
      151. if (ret < 0) {
      152. perror('Can't set frame information');
      153. }
      154. /* 使用IOCTL命令VIDIOC_G_FMT,獲取攝像頭幀信息*/
      155. cam_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      156. ret = ioctl(fd_cam, VIDIOC_G_FMT, &cam_format);
      157. if (ret < 0) {
      158. perror('Can't get frame information');
      159. }
      160. printf('Current data format information:\n\twidth:%d\n\theight:%d\n',
      161. cam_format.fmt.pix.width, cam_format.fmt.pix.height);
      162. ret = initBuffers();
      163. if (ret < 0) {
      164. perror('Buffers init error');
      165. //exit(-1);
      166. }
      167. return 0;
      168. }
      169. int V4L2Capture::initBuffers() {
      170. int ret;
      171. /* 使用IOCTL命令VIDIOC_REQBUFS,申請幀緩沖*/
      172. struct v4l2_requestbuffers req;
      173. CLEAR(req);
      174. req.count = 4;
      175. req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      176. req.memory = V4L2_MEMORY_MMAP;
      177. ret = ioctl(fd_cam, VIDIOC_REQBUFS, &req);
      178. if (ret < 0) {
      179. perror('Request frame buffers failed');
      180. }
      181. if (req.count < 2) {
      182. perror('Request frame buffers while insufficient buffer memory');
      183. }
      184. buffers = (struct cam_buffer*) calloc(req.count, sizeof(*buffers));
      185. if (!buffers) {
      186. perror('Out of memory');
      187. }
      188. for (n_buffers = 0; n_buffers < req.count; n_buffers++) {
      189. struct v4l2_buffer buf;
      190. CLEAR(buf);
      191. // 查詢序號為n_buffers 的緩沖區(qū),得到其起始物理地址和大小
      192. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      193. buf.memory = V4L2_MEMORY_MMAP;
      194. buf.index = n_buffers;
      195. ret = ioctl(fd_cam, VIDIOC_QUERYBUF, &buf);
      196. if (ret < 0) {
      197. printf('VIDIOC_QUERYBUF %d failed\n', n_buffers);
      198. return -1;
      199. }
      200. buffers[n_buffers].length = buf.length;
      201. //printf('buf.length= %d\n',buf.length);
      202. // 映射內(nèi)存
      203. buffers[n_buffers].start = mmap(
      204. NULL, // start anywhere
      205. buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd_cam,
      206. buf.m.offset);
      207. if (MAP_FAILED == buffers[n_buffers].start) {
      208. printf('mmap buffer%d failed\n', n_buffers);
      209. return -1;
      210. }
      211. }
      212. return 0;
      213. }
      214. int V4L2Capture::startCapture() {
      215. unsigned int i;
      216. for (i = 0; i < n_buffers; i++) {
      217. struct v4l2_buffer buf;
      218. CLEAR(buf);
      219. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      220. buf.memory = V4L2_MEMORY_MMAP;
      221. buf.index = i;
      222. if (-1 == ioctl(fd_cam, VIDIOC_QBUF, &buf)) {
      223. printf('VIDIOC_QBUF buffer%d failed\n', i);
      224. return -1;
      225. }
      226. }
      227. enum v4l2_buf_type type;
      228. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      229. if (-1 == ioctl(fd_cam, VIDIOC_STREAMON, &type)) {
      230. printf('VIDIOC_STREAMON error');
      231. return -1;
      232. }
      233. return 0;
      234. }
      235. int V4L2Capture::stopCapture() {
      236. enum v4l2_buf_type type;
      237. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      238. if (-1 == ioctl(fd_cam, VIDIOC_STREAMOFF, &type)) {
      239. printf('VIDIOC_STREAMOFF error\n');
      240. return -1;
      241. }
      242. return 0;
      243. }/*ok*/
      244. int V4L2Capture::freeBuffers() {
      245. unsigned int i;
      246. for (i = 0; i < n_buffers; ++i) {
      247. if (-1 == munmap(buffers[i].start, buffers[i].length)) {
      248. printf('munmap buffer%d failed\n', i);
      249. return -1;
      250. }
      251. }
      252. free(buffers);
      253. return 0;
      254. }
      255. int V4L2Capture::getFrame(void **frame_buf, size_t* len) {
      256. struct v4l2_buffer queue_buf;
      257. CLEAR(queue_buf);
      258. queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      259. queue_buf.memory = V4L2_MEMORY_MMAP;
      260. if (-1 == ioctl(fd_cam, VIDIOC_DQBUF, &queue_buf)) {
      261. printf('VIDIOC_DQBUF error\n');
      262. return -1;
      263. }
      264. *frame_buf = buffers[queue_buf.index].start;
      265. *len = buffers[queue_buf.index].length;
      266. frameIndex = queue_buf.index;
      267. return 0;
      268. }
      269. int V4L2Capture::backFrame() {
      270. if (frameIndex != -1) {
      271. struct v4l2_buffer queue_buf;
      272. CLEAR(queue_buf);
      273. queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
      274. queue_buf.memory = V4L2_MEMORY_MMAP;
      275. queue_buf.index = frameIndex;
      276. if (-1 == ioctl(fd_cam, VIDIOC_QBUF, &queue_buf)) {
      277. printf('VIDIOC_QBUF error\n');
      278. return -1;
      279. }
      280. return 0;
      281. }
      282. return -1;
      283. }
      284. void V4L2Capture::test() {
      285. unsigned char *yuv422frame = NULL;
      286. unsigned long yuvframeSize = 0;
      287. string videoDev='/dev/video0';
      288. V4L2Capture *vcap = new V4L2Capture(const_cast<char*>(videoDev.c_str()),
      289. 1920, 1080);
      290. vcap->openDevice();
      291. vcap->initDevice();
      292. vcap->startCapture();
      293. vcap->getFrame((void **) &yuv422frame, (size_t *)&yuvframeSize);
      294. vcap->backFrame();
      295. vcap->freeBuffers();
      296. vcap->closeDevice();
      297. }
      298. void line2(Point point3[100000], int n)
      299. {
      300. float aa, bb, cc, dd, ee, ff, gg;
      301. int jj = 0;
      302. for (;jj <n;jj++)
      303. {
      304. aa += point3[jj].x*point3[jj].x;
      305. bb += point3[jj].x;
      306. cc += point3[jj].x*point3[jj].y;
      307. dd += point3[jj].y;
      308. }
      309. ee = aa*n - bb*bb;
      310. if ((int)(ee* 100) != 0)
      311. {
      312. ff = (n*cc - bb*dd) / ee;
      313. gg = (dd - bb*ff) / n;
      314. }
      315. else {
      316. ff = 0;
      317. gg = 1;
      318. }
      319. Point point0, pointn;
      320. point0.y = 0;
      321. point0.x = gg;
      322. pointn.y = (n-1);
      323. pointn.x = ((n-1) * ff + gg);
      324. Mat draw_ing2 = Mat::zeros(g_cannyMat_output.size(), CV_8UC3);
      325. line(draw_ing2, point0, pointn, (255, 255, 255));
      326. imshow('10', draw_ing2);
      327. //cout << '\n'<<ff <<' '<< gg << endl;
      328. float the =180*atan(ff)/3.14159;
      329. float dis = ff * 160+gg - 160;
      330. cout << the << ' ' << dis << endl;
      331. //正中心ff=0,gg=160,逆時ff為正,順時ff為負(fù)
      332. }
      333. void findcolor(cv::Mat &image)
      334. {
      335. cv::Mat_<cv::Vec3b>::iterator it = image.begin<cv::Vec3b>();
      336. cv::Mat_<cv::Vec3b>::iterator itend = image.end<cv::Vec3b>();
      337. ii = 0;
      338. iii = 0;
      339. int flagg = 0;
      340. cv::Mat srcX(image.rows, image.cols , CV_32F);
      341. cv::Mat srcY(image.rows, image.cols, CV_32F);
      342. for (int i = 0;i < image.rows;i++)
      343. {
      344. for (int j = 0;j < image.cols;j++)
      345. {
      346. if (flagg == 0)/*這樣遍歷水平方向無法得到有效數(shù)據(jù)*/
      347. {
      348. if ((*it)[0] == 255 && (*it)[1] == 0 && (*it)[2] == 255)
      349. {
      350. flagg = 1;
      351. point1[ii].x = i;
      352. point1[ii].y = j;
      353. ii++;
      354. }
      355. }
      356. else
      357. {
      358. if ((*it)[0] == 255 && (*it)[1] == 0 && (*it)[2] == 255)
      359. {
      360. flagg = 0;
      361. point2[iii].x = i;
      362. point2[iii].y = j;
      363. iii++;
      364. }
      365. }
      366. if (it == itend)
      367. break;
      368. else it++;
      369. }
      370. }
      371. IplImage pImg = IplImage(image);
      372. CvArr* arr = (CvArr*)&pImg;
      373. int nn = ii;
      374. for (;ii > 0;ii--)
      375. {
      376. point3[ii].x = (point1[ii].x + point2[ii].x) / 2;
      377. point3[ii].y = (point1[ii].y + point2[ii].y) / 2;
      378. //circle(image, point3[ii], 1, (255, 255, 255));
      379. cvSet2D(arr, point3[ii].x, point3[ii].y, Scalar(255, 255, 255));
      380. }
      381. line2(point3, nn);
      382. }
      383. void on_ThreshChange(int, void* )
      384. {
      385. // 使用Canndy檢測邊緣
      386. Canny( g_grayImage, g_cannyMat_output, g_nThresh, g_nThresh*2, 3 );
      387. // 找到輪廓
      388. findContours( g_cannyMat_output, g_vContours, g_vHierarchy, RETR_TREE, CHAIN_APPROX_SIMPLE, Point(0, 0) );
      389. // 計算矩
      390. vector<Moments> mu(g_vContours.size() );
      391. for(unsigned int i = 0; i < g_vContours.size(); i++ )
      392. { mu[i] = moments( g_vContours[i], false ); }
      393. // 計算中心矩
      394. vector<Point2f> mc( g_vContours.size() );
      395. for( unsigned int i = 0; i < g_vContours.size(); i++ )
      396. { mc[i] = Point2f( static_cast<float>(mu[i].m10/mu[i].m00), static_cast<float>(mu[i].m01/mu[i].m00 )); }
      397. // 繪制輪廓
      398. Mat drawing = Mat::zeros(g_cannyMat_output.size(), CV_8UC3);
      399. for( unsigned int i = 0; i< g_vContours.size(); i++ )
      400. {
      401. //Scalar color = Scalar( g_rng.uniform(0, 255), g_rng.uniform(0,255), g_rng.uniform(0,255) );//隨機(jī)生成顏色值
      402. Scalar color = Scalar(255, 0, 255);
      403. drawContours( drawing, g_vContours, i, color, 2, 8, g_vHierarchy, 0, Point() );//繪制外層和內(nèi)層輪廓
      404. circle( drawing, mc[i], 4, color, -1, 8, 0 );;//繪制圓
      405. }
      406. findcolor(drawing);
      407. //line1(point1,point2,ii,iii);
      408. // 顯示到窗口中
      409. // namedWindow( WINDOW_NAME2, WINDOW_AUTOSIZE );
      410. imshow( WINDOW_NAME2, drawing );
      411. }
      412. void findline(Mat image)
      413. {
      414. cv::Mat_<cv::Vec3b>::iterator it = image.begin<cv::Vec3b>();
      415. cv::Mat_<cv::Vec3b>::iterator itend = image.end<cv::Vec3b>();
      416. for (;it != itend;it++)
      417. {
      418. if ((*it)[1] == 0 && (*it)[2] >= 100)//條件可能需要改變
      419. {
      420. if(flag2==0)
      421. {
      422. flag2 = 1;
      423. cout << '注意line1,避障'<<endl;
      424. //向主控發(fā)送消息
      425. }
      426. else
      427. {
      428. cout << '注意line2,避障' << endl;
      429. //向主控發(fā)送消息
      430. //避障一與避障二中間要隔一段時間
      431. }
      432. }
      433. }
      434. }
      435. void wave(const cv::Mat &image, cv::Mat &result)
      436. {
      437. cv::Mat srcX(image.rows / 2, image.cols / 2, CV_32F);
      438. cv::Mat srcY(image.rows / 2, image.cols / 2, CV_32F);
      439. for (int i = 0;i<image.rows /2;i++)
      440. for (int j = 0;j < image.cols /2;j++)
      441. {
      442. srcX.at<float>(i, j) = 2 * j;
      443. srcY.at<float>(i, j) = 2 * i;
      444. }
      445. cv::remap(image, result, srcX, srcY, cv::INTER_LINEAR);
      446. }
      447. void VideoPlayer() {
      448. unsigned char *yuv422frame = NULL;
      449. unsigned long yuvframeSize = 0;
      450. string videoDev = '/dev/video0';
      451. V4L2Capture *vcap = new V4L2Capture(const_cast<char*>(videoDev.c_str()), 640, 480);
      452. vcap->openDevice();
      453. vcap->initDevice();
      454. vcap->startCapture();
      455. cvNamedWindow('Capture',CV_WINDOW_AUTOSIZE);
      456. IplImage* img;
      457. CvMat cvmat;
      458. double t;
      459. clock_t start, end;
      460. double number=0;
      461. int fps=0;
      462. while(1){
      463. start=clock();
      464. t = (double)cvGetTickCount();
      465. vcap->getFrame((void **) &yuv422frame, (size_t *)&yuvframeSize);
      466. cvmat = cvMat(IMAGEHEIGHT,IMAGEWIDTH,CV_8UC3,(void*)yuv422frame); //CV_8UC3
      467. //解碼
      468. img = cvDecodeImage(&cvmat,1);
      469. if(!img){
      470. printf('DecodeImage error!\n');
      471. }
      472. cv::Mat g_srcImage = cv::cvarrToMat(img,true);
      473. cvShowImage('Capture',img);
      474. cvReleaseImage(&img);
      475. vcap->backFrame();
      476. if((cvWaitKey(1)&255) == 27){
      477. exit(0);
      478. }
      479. wave(g_srcImage, g_srcImage);
      480. findline(g_srcImage);
      481. // 把原圖像轉(zhuǎn)化成灰度圖像并進(jìn)行平滑
      482. cvtColor(g_srcImage, g_grayImage, COLOR_BGR2GRAY);
      483. blur(g_grayImage, g_grayImage, Size(3, 3));
      484. //創(chuàng)建滾動條并進(jìn)行初始化
      485. createTrackbar(' 閾值', WINDOW_NAME1, &g_nThresh, g_nMaxThresh, on_ThreshChange);
      486. on_ThreshChange(0, 0);
      487. t = (double)cvGetTickCount() - t;
      488. printf('Used time is %g ms\n', (t / (cvGetTickFrequency() * 1000)));
      489. end =clock();
      490. number=number+end-start;
      491. fps++;
      492. if (number/ CLOCKS_PER_SEC>= 0.25)//windows10 for CLK_TCK
      493. {
      494. cout<<fps<<endl;
      495. fps = 0;
      496. number = 0;
      497. }
      498. }
      499. vcap->stopCapture();
      500. vcap->freeBuffers();
      501. vcap->closeDevice();
      502. }
      503. int main() {
      504. VideoPlayer();
      505. return 0;
      506. }

        本站是提供個人知識管理的網(wǎng)絡(luò)存儲空間,所有內(nèi)容均由用戶發(fā)布,不代表本站觀點。請注意甄別內(nèi)容中的聯(lián)系方式、誘導(dǎo)購買等信息,謹(jǐn)防詐騙。如發(fā)現(xiàn)有害或侵權(quán)內(nèi)容,請點擊一鍵舉報。
        轉(zhuǎn)藏 分享 獻(xiàn)花(0

        0條評論

        發(fā)表

        請遵守用戶 評論公約

        類似文章 更多