videocapture_linux.cpp 32 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193
  1. #include"videocapture_linux.h"
  2. #include "../../libvideoframework/videoutil.h"
  3. #include "../../libvideoframework/aligned_malloc.h"
  4. #include <stdlib.h>
  5. #include <string.h>
  6. #include <errno.h>
  7. #include <fcntl.h>
  8. #include <linux/videodev2.h>
  9. #include <stdio.h>
  10. #include <string.h>
  11. #include <sys/ioctl.h>
  12. #include <sys/mman.h>
  13. #include <sys/stat.h>
  14. #include <unistd.h>
  15. #include <memory>
  16. #ifdef __cplusplus
  17. extern "C" {
  18. #endif
  19. #include <libavcodec/avcodec.h>
  20. #include <libavformat/avformat.h>
  21. #include <libswscale/swscale.h>
  22. #ifdef __cplusplus
  23. }
  24. #endif
  25. #define BUFFER_DEBUG_FMT_STR \
  26. "buffer#%u @ %p type=%u bytesused=%u length=%u flags=%x " \
  27. "field=%u timestamp= %lld.%06lld sequence=%u"
  28. #define BUFFER_DEBUG_FMT_ARGS(buf) \
  29. (buf)->index, (buf), (buf)->type, (buf)->bytesused, (buf)->length, \
  30. (buf)->flags, (buf)->field, \
  31. (long long)(buf)->timestamp.tv_sec, \
  32. (long long)(buf)->timestamp.tv_usec, (buf)->sequence
  33. static const int kBufferAlignment = 64;
  34. // Get FourCC code as a string.
  35. int GetFourccName(char* strbuf, uint32_t ulen, uint32_t fourcc)
  36. {
  37. int iret = -1;
  38. if (NULL == strbuf) {
  39. return iret;
  40. }
  41. for (uint32_t i = 0; i < sizeof(uint32_t) && i < ulen; i++) {
  42. uint32_t uindex = i * 8;
  43. strbuf[i] = (fourcc >> uindex) & 0xFF;
  44. }
  45. iret = 0;
  46. return iret;
  47. }
  48. VideoCaptureImpl::VideoCaptureImpl(videocap_callback_t* pCallback)
  49. {
  50. memcpy(&m_callback, pCallback, sizeof(videocap_callback_t));
  51. m_capture = NULL;
  52. m_bCaptureStarted = false;
  53. m_deviceId = -1;
  54. m_deviceFd = -1;
  55. m_in_cap_width = 0;
  56. m_in_cap_height = 0;
  57. m_real_cap_width = 0;
  58. m_real_cap_height = 0;
  59. m_out_cap_width = 0;
  60. m_out_cap_height = 0;
  61. m_rotate = libyuv::kRotate0;
  62. m_frame_fmt = VIDEO_FORMAT_I420;
  63. m_captureVideoType = VideoType::kI420;
  64. m_currentFrameRate = -1;
  65. m_buffersAllocatedByDevice = -1;
  66. m_pool = NULL;
  67. m_CaptureThreadId = 0;
  68. m_bStopCapture = false;
  69. m_i420 = NULL;
  70. m_opti420 = NULL;
  71. m_rgb24 = NULL;
  72. m_iminbrightness = 0;
  73. m_imaxbrightness = 0;
  74. m_ilogcount = 0;
  75. }
  76. VideoCaptureImpl::~VideoCaptureImpl()
  77. {
  78. m_ilogcount = 0;
  79. m_bCaptureStarted = false;
  80. m_bStopCapture = false;
  81. StopVideoCapture();
  82. if (m_deviceFd != -1) {
  83. close(m_deviceFd);
  84. }
  85. if (NULL != m_capture){
  86. free(m_capture);
  87. m_capture = NULL;
  88. }
  89. }
  90. int VideoCaptureImpl::VideoCaptureSetParam(videocap_param_t* param)
  91. {
  92. /* check param */
  93. if (NULL == param) {
  94. return -1;
  95. }
  96. if (param->cap_mode < 0 || param->cap_mode >= VIDEOCAP_MAX_MODE) {
  97. return -1;
  98. }
  99. if (param->frame_fmt != VIDEO_FORMAT_I420 && param->frame_fmt != VIDEO_FORMAT_RGB24) {
  100. return -1;
  101. }
  102. if (param->fps < 1.0 || param->fps > 50.0) {
  103. return -1;
  104. }
  105. if (param->pre_hwnd){
  106. if (param->pre_width < 0 || param->pre_height < 0) {
  107. return -1;
  108. }
  109. }
  110. if (param->dev_id >= 0) {
  111. m_deviceId = param->dev_id;
  112. }
  113. else {
  114. return -1;
  115. }
  116. if (param->frame_fmt == VIDEO_FORMAT_I420 && !(param->option & VIDEOCAP_OPT_EANBLE_RESIZE)) {
  117. param->res_mode = param->cap_mode;
  118. param->option |= VIDEOCAP_OPT_EANBLE_RESIZE;
  119. }
  120. if (param->option & VIDEOCAP_OPT_EANBLE_RESIZE) {
  121. if (param->res_mode < VIDEOCAP_FRAME_SQCIF || param->res_mode > VIDEOCAP_FRAME_SVGA) {
  122. return -1;
  123. }
  124. }
  125. else {
  126. //CapLog("%s", "param->option & VIDEOCAP_OPT_EANBLE_RESIZE success.");
  127. }
  128. m_capture = (videocap_t*)malloc(sizeof(videocap_t));
  129. if (!m_capture) {
  130. return -1;
  131. }
  132. memset((void*)m_capture, 0, sizeof(videocap_t));
  133. memcpy(&m_capture->param, param, sizeof(videocap_param_t));
  134. if (param->option & VIDEOCAP_OPT_ENABLE_GRAB) {
  135. int width = mode_width[param->cap_mode];
  136. int height = mode_height[param->cap_mode];
  137. if (video_frame_alloc(width, height, param->frame_fmt, &m_capture->cap_frame) != 0) {
  138. free(m_capture);
  139. return -1;
  140. }
  141. video_frame_fill_black(&m_capture->cap_frame);
  142. }
  143. if (param->option & VIDEOCAP_OPT_ENABLE_ASYNC_GRAB) {
  144. }
  145. if (param->option & VIDEOCAP_OPT_EANBLE_RESIZE) {
  146. int width = mode_width[param->res_mode];
  147. int height = mode_height[param->res_mode];
  148. if (video_frame_alloc(width, height, param->frame_fmt, &m_capture->res_frame) != 0) {
  149. if (param->option & VIDEOCAP_OPT_ENABLE_GRAB) {
  150. video_frame_free(&m_capture->res_frame);
  151. }
  152. free(m_capture);
  153. return -1;
  154. }
  155. video_frame_fill_black(&m_capture->res_frame);
  156. m_capture->sws_context = sws_getContext(mode_width[param->cap_mode],
  157. mode_height[param->cap_mode],
  158. AV_PIX_FMT_BGR24,
  159. mode_width[param->res_mode],
  160. mode_height[param->res_mode],
  161. m_capture->param.frame_fmt == VIDEO_FORMAT_RGB24 ? AV_PIX_FMT_BGR24 : AV_PIX_FMT_YUV420P,
  162. SWS_FAST_BILINEAR,
  163. NULL,
  164. NULL,
  165. NULL);
  166. if (!m_capture->sws_context) {
  167. video_frame_free(&m_capture->res_frame);
  168. if (param->option & VIDEOCAP_OPT_ENABLE_GRAB) {
  169. video_frame_free(&m_capture->cap_frame);
  170. }
  171. free(m_capture);
  172. return -1;
  173. }
  174. }
  175. m_rotate = RotateTrans(param->irotate);
  176. m_in_cap_width = m_out_cap_width = mode_width[m_capture->param.cap_mode];
  177. m_in_cap_height = m_out_cap_height = mode_height[m_capture->param.cap_mode];
  178. if (libyuv::kRotate90 == m_rotate || libyuv::kRotate270 == m_rotate){
  179. m_out_cap_width = mode_height[m_capture->param.cap_mode];
  180. m_out_cap_height = mode_width[m_capture->param.cap_mode];
  181. }
  182. return 0;
  183. }
  184. int ConvertVideoType(VideoType video_type) {
  185. switch (video_type) {
  186. case VideoType::kUnknown:
  187. return libyuv::FOURCC_ANY;
  188. case VideoType::kI420:
  189. return libyuv::FOURCC_I420;
  190. case VideoType::kIYUV: // same as VideoType::kYV12
  191. case VideoType::kYV12:
  192. return libyuv::FOURCC_YV12;
  193. case VideoType::kRGB24:
  194. return libyuv::FOURCC_24BG;
  195. case VideoType::kABGR:
  196. return libyuv::FOURCC_ABGR;
  197. case VideoType::kRGB565:
  198. return libyuv::FOURCC_RGBP;
  199. case VideoType::kYUY2:
  200. return libyuv::FOURCC_YUY2;
  201. case VideoType::kUYVY:
  202. return libyuv::FOURCC_UYVY;
  203. case VideoType::kMJPEG:
  204. return libyuv::FOURCC_MJPG;
  205. case VideoType::kNV21:
  206. return libyuv::FOURCC_NV21;
  207. case VideoType::kNV12:
  208. return libyuv::FOURCC_NV12;
  209. case VideoType::kARGB:
  210. return libyuv::FOURCC_ARGB;
  211. case VideoType::kBGRA:
  212. return libyuv::FOURCC_BGRA;
  213. case VideoType::kARGB4444:
  214. return libyuv::FOURCC_R444;
  215. case VideoType::kARGB1555:
  216. return libyuv::FOURCC_RGBO;
  217. }
  218. return libyuv::FOURCC_ANY;
  219. }
  220. size_t CalcBufferSize(VideoType type, int width, int height)
  221. {
  222. size_t buffer_size = 0;
  223. switch (type) {
  224. case VideoType::kI420:
  225. case VideoType::kNV12:
  226. case VideoType::kNV21:
  227. case VideoType::kIYUV:
  228. case VideoType::kYV12: {
  229. int half_width = (width + 1) >> 1;
  230. int half_height = (height + 1) >> 1;
  231. buffer_size = width * height + half_width * half_height * 2;
  232. break;
  233. }
  234. case VideoType::kARGB4444:
  235. case VideoType::kRGB565:
  236. case VideoType::kARGB1555:
  237. case VideoType::kYUY2:
  238. case VideoType::kUYVY:
  239. buffer_size = width * height * 2;
  240. break;
  241. case VideoType::kRGB24:
  242. buffer_size = width * height * 3;
  243. break;
  244. case VideoType::kBGRA:
  245. case VideoType::kARGB:
  246. buffer_size = width * height * 4;
  247. break;
  248. default:
  249. break;
  250. }
  251. return buffer_size;
  252. }
  253. int I420DataSize(int height, int stride_y, int stride_u, int stride_v) {
  254. return stride_y * height + (stride_u + stride_v) * ((height + 1) / 2);
  255. }
  256. int RGB24DataSize(int height, int stride_y, int stride_u, int stride_v) {
  257. return stride_y * height * 2 + ((stride_u + stride_v) * ((height + 1) / 2) * 2);
  258. }
  259. bool CheackRotateParam(int width, int height, libyuv::RotationMode eRotate, int dst_width, int dst_height)
  260. {
  261. bool bret = false;
  262. if (width == dst_width && height == dst_height){
  263. if (libyuv::kRotate0 == eRotate || libyuv::kRotate180 == eRotate){
  264. bret = true;
  265. }
  266. }
  267. else {
  268. if (width == dst_height && height == dst_width){
  269. if (libyuv::kRotate90 == eRotate || libyuv::kRotate270 == eRotate) {
  270. bret = true;
  271. }
  272. }
  273. }
  274. return bret;
  275. }
  276. Buffer* VideoCaptureImpl::GetCaptureBuffer()
  277. {
  278. return m_pool;
  279. }
  280. int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame,
  281. size_t videoFrameLength,
  282. const VideoCaptureCapability& frameInfo,
  283. int64_t captureTime /*=0*/)
  284. {
  285. const int32_t width = frameInfo.width;
  286. const int32_t height = frameInfo.height;
  287. if (0 == m_ilogcount){
  288. CapLog(VIDEOCAP_LOG_DEBUG, "IncomingFrame capture_time is %d, videoType=%ld, rotate=%d, videoFrameLength=%d, width=%d, height=%d, and destination width=%d, height=%d.", captureTime, frameInfo.videoType, m_rotate, videoFrameLength, width, height, m_out_cap_width, m_out_cap_height);
  289. m_ilogcount++;
  290. }
  291. // Not encoded, convert to I420.
  292. if (frameInfo.videoType != VideoType::kMJPEG &&
  293. CalcBufferSize(frameInfo.videoType, width, abs(height)) != videoFrameLength) {
  294. if (VideoType::kYUY2 == frameInfo.videoType) {
  295. videoFrameLength = CalcBufferSize(frameInfo.videoType, width, abs(height));
  296. }
  297. else{
  298. CapLog(VIDEOCAP_LOG_ERROR, "Wrong incoming frame length.");
  299. return -1;
  300. }
  301. }
  302. if (NULL != m_capture->param.on_frame_yuy2) {
  303. video_frame frm = { 0 };
  304. frm.data[0] = videoFrame;
  305. frm.linesize[0] = width * 2;
  306. frm.width = width;
  307. frm.height = height;
  308. frm.format = VIDEO_FORMAT_YUY2;
  309. m_capture->param.on_frame_yuy2(m_capture->param.user_data, &frm);
  310. }
  311. int stride_y = m_in_cap_width;
  312. int stride_u = (m_in_cap_width + 1)/2;
  313. int stride_v = (m_in_cap_width + 1)/2;
  314. //uint8_t* i420y = (uint8_t*)AlignedMalloc(I420DataSize(height, stride_y, stride_u, stride_v), kBufferAlignment);
  315. //uint8_t* brg24 = (uint8_t*)AlignedMalloc(RGB24DataSize(m_dest_cap_height, m_dest_cap_width, (m_dest_cap_width+1)/2, (m_dest_cap_width + 1) / 2), kBufferAlignment);
  316. int conversionResult = libyuv::ConvertToI420(videoFrame, videoFrameLength,
  317. m_i420,
  318. stride_y,
  319. m_i420 + stride_y * m_in_cap_height,
  320. stride_u,
  321. m_i420 + stride_y * m_in_cap_height + stride_u * ((m_in_cap_height + 1) / 2),
  322. stride_v,
  323. 0,
  324. (height - m_in_cap_height) / 2, // No Cropping
  325. width,
  326. height,
  327. width,
  328. m_in_cap_height,
  329. libyuv::kRotate180,
  330. ConvertVideoType(frameInfo.videoType)
  331. );
  332. if (conversionResult < 0) {
  333. CapLog(VIDEOCAP_LOG_ERROR, "Failed to convert capture frame from type %d to I420 for %s.", static_cast<int>(frameInfo.videoType), strerror(errno));
  334. return -1;
  335. }
  336. //{
  337. // video_frame frmi420 = { 0 };
  338. // frmi420.data[0] = m_i420;
  339. // frmi420.linesize[0] = m_in_cap_height * 3 / 2;
  340. // frmi420.width = m_in_cap_width;
  341. // frmi420.height = m_in_cap_height;
  342. // frmi420.format = VIDEO_FORMAT_I420;
  343. // //m_capture->param.on_frame_i420(m_capture->param.user_data, &frmi420);
  344. // char stroptname[260] = { 0 };
  345. // snprintf(stroptname, 260, "%d_%d_%d_%d_i420.bmp", m_ilogcount, (int)m_rotate, m_in_cap_width, m_in_cap_height);
  346. // video_frame_save_bmpfile(stroptname, &frmi420);
  347. //}
  348. if (libyuv::kRotate0 == m_rotate || libyuv::kRotate180 == m_rotate){
  349. conversionResult = libyuv::ConvertFromI420(m_i420,
  350. stride_y,
  351. m_i420 + stride_y * m_in_cap_height,
  352. stride_u,
  353. m_i420 + stride_y * m_in_cap_height + stride_u * ((m_in_cap_height + 1) / 2),
  354. stride_v,
  355. m_rgb24,
  356. m_out_cap_width * 3,
  357. m_out_cap_width,
  358. m_out_cap_height,
  359. ConvertVideoType(kRGB24));
  360. if (conversionResult < 0) {
  361. CapLog(VIDEOCAP_LOG_ERROR, "Failed to convert capture frame from I420 to RGB24 for %s.", strerror(errno));
  362. return -1;
  363. }
  364. }
  365. else {
  366. if (libyuv::kRotate90 == m_rotate || libyuv::kRotate270 == m_rotate) {
  367. libyuv::RotationMode erotate = libyuv::kRotate90;
  368. if (libyuv::kRotate90 == m_rotate) {
  369. erotate = libyuv::kRotate270;
  370. }
  371. int opt_stride_y = m_out_cap_width;
  372. int opt_stride_u = (m_out_cap_width + 1) / 2;
  373. int opt_stride_v = (m_out_cap_width + 1) / 2;
  374. //uint8_t* iopt420 = (uint8_t*)AlignedMalloc(I420DataSize(m_dest_cap_height, opt_stride_y, opt_stride_u, opt_stride_v), kBufferAlignment);
  375. int rotateResult = libyuv::I420Rotate(m_i420,
  376. stride_y,
  377. m_i420 + stride_y * m_in_cap_height,
  378. stride_u,
  379. m_i420 + stride_y * m_in_cap_height + stride_u * ((m_in_cap_height + 1) / 2),
  380. stride_v,
  381. m_opti420,
  382. opt_stride_y,
  383. m_opti420 + opt_stride_y * m_out_cap_height,
  384. opt_stride_u,
  385. m_opti420 + opt_stride_y * m_out_cap_height + opt_stride_u * ((m_out_cap_height + 1) / 2),
  386. opt_stride_v,
  387. m_in_cap_width,
  388. m_in_cap_height,
  389. erotate);
  390. if (rotateResult < 0) {
  391. CapLog(VIDEOCAP_LOG_ERROR, "Failed to Rotate Frame %d for %s.", (int)erotate, strerror(errno));
  392. return -1;
  393. }
  394. //{
  395. // video_frame frmi420 = { 0 };
  396. // frmi420.data[0] = m_opti420;
  397. // frmi420.linesize[0] = m_out_cap_width * 3 / 2;
  398. // frmi420.width = m_out_cap_width;
  399. // frmi420.height = m_out_cap_height;
  400. // frmi420.format = VIDEO_FORMAT_I420;
  401. // //m_capture->param.on_frame_i420(m_capture->param.user_data, &frmi420);
  402. // char stroptname[260] = { 0 };
  403. // snprintf(stroptname, 260, "%d_%d_%d_%d_i420.bmp", m_ilogcount, (int)m_rotate, m_out_cap_width, m_out_cap_height);
  404. // video_frame_save_bmpfile(stroptname, &frmi420);
  405. //}
  406. //yu12_to_dib24(brg24, iopt420, m_dest_cap_width, m_dest_cap_height);
  407. conversionResult = libyuv::ConvertFromI420(m_opti420,
  408. opt_stride_y,
  409. m_opti420 + opt_stride_y * m_out_cap_height,
  410. opt_stride_u,
  411. m_opti420 + opt_stride_y * m_out_cap_height + opt_stride_u * ((m_out_cap_height + 1) / 2),
  412. opt_stride_v,
  413. m_rgb24,
  414. m_out_cap_width * 3,
  415. m_out_cap_width,
  416. m_out_cap_height,
  417. ConvertVideoType(kRGB24));
  418. if (conversionResult < 0) {
  419. CapLog(VIDEOCAP_LOG_ERROR, "Failed to convert capture frame from I420 to RGB24 for %s.", strerror(errno));
  420. return -1;
  421. }
  422. //AlignedFree(iopt420);
  423. //iopt420 = NULL;
  424. }
  425. }
  426. if (NULL != m_capture->param.on_frame) {
  427. video_frame frm = { 0 };
  428. frm.data[0] = m_rgb24;
  429. frm.linesize[0] = m_out_cap_width * 3;
  430. frm.width = m_out_cap_width;
  431. frm.height = m_out_cap_height;
  432. frm.format = VIDEO_FORMAT_RGB24;
  433. m_capture->param.on_frame(m_capture->param.user_data, &frm);
  434. //char strrgbname[260] = { 0 };
  435. //snprintf(strrgbname, 260, "%d_%d_%d_%d_rgb.bmp", m_ilogcount, (int)m_rotate, m_out_cap_width, m_out_cap_height);
  436. //video_frame_save_bmpfile(strrgbname, &frm);
  437. //m_ilogcount++;
  438. }
  439. //AlignedFree(i420y);
  440. //i420y = NULL;
  441. //AlignedFree(brg24);
  442. //brg24 = NULL;
  443. return 0;
  444. }
  445. static void* VideoCaptureProcess(void *arg)
  446. {
  447. int retVal = 0;
  448. fd_set rSet;
  449. struct timeval timeout;
  450. VideoCaptureImpl* pVideoCapture = (VideoCaptureImpl*)arg;
  451. int iDeviceFd = pVideoCapture->GetCaptureVideoFd();
  452. while (false == pVideoCapture->GetStopCaptureFlag())
  453. {
  454. FD_ZERO(&rSet);
  455. FD_SET(iDeviceFd, &rSet);
  456. timeout.tv_sec = 5;
  457. timeout.tv_usec = 0;
  458. retVal = select(iDeviceFd + 1, &rSet, NULL, NULL, &timeout);
  459. if (retVal < 0 && errno != EINTR) // continue if interrupted
  460. {
  461. // select failed
  462. if (pVideoCapture){
  463. pVideoCapture->CapLog(VIDEOCAP_LOG_INFO, "exit for select failed.");
  464. }
  465. return NULL;
  466. }
  467. else if (retVal == 0) {
  468. // select timed out
  469. if (pVideoCapture){
  470. pVideoCapture->CapLog(VIDEOCAP_LOG_INFO, "exit for select timed out.");
  471. }
  472. return NULL;
  473. }
  474. else if (!FD_ISSET(iDeviceFd, &rSet)) {
  475. // not event on camera handle
  476. if (pVideoCapture){
  477. pVideoCapture->CapLog(VIDEOCAP_LOG_INFO, "exit for not event on camera handle.");
  478. }
  479. return NULL;
  480. }
  481. if (pVideoCapture->VideoCaptureStarted()) {
  482. struct v4l2_buffer buf;
  483. memset(&buf, 0, sizeof(struct v4l2_buffer));
  484. buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  485. buf.memory = V4L2_MEMORY_MMAP;
  486. // dequeue a buffer - repeat until dequeued properly!
  487. while (ioctl(iDeviceFd, VIDIOC_DQBUF, &buf) < 0) {
  488. if (errno != EINTR) {
  489. if (pVideoCapture){
  490. pVideoCapture->CapLog(VIDEOCAP_LOG_INFO, "could not sync on a buffer on device %s.", strerror(errno));
  491. }
  492. return NULL;
  493. }
  494. }
  495. if (pVideoCapture) {
  496. pVideoCapture->CapLog(VIDEOCAP_LOG_DEBUG, "deviceid(%d), DQBUF(CAPTURE, index=%u) -> " BUFFER_DEBUG_FMT_STR, pVideoCapture->GetCaptureVideoId(), buf.index, BUFFER_DEBUG_FMT_ARGS(&buf));
  497. }
  498. VideoCaptureCapability frameInfo;
  499. frameInfo.width = pVideoCapture->GetCapture_Width();
  500. frameInfo.height = pVideoCapture->GetCapture_Height();
  501. frameInfo.videoType = pVideoCapture->GetCaptureVideoType();
  502. //// convert to to I420 if needed
  503. Buffer* buffer_pool = pVideoCapture->GetCaptureBuffer();
  504. pVideoCapture->IncomingFrame((unsigned char*)buffer_pool[buf.index].start, buf.length, frameInfo);
  505. // enqueue the buffer again
  506. if (ioctl(iDeviceFd, VIDIOC_QBUF, &buf) == -1) {
  507. if (pVideoCapture){
  508. pVideoCapture->CapLog(VIDEOCAP_LOG_INFO, "Failed to enqueue capture buffer");
  509. }
  510. }
  511. else {
  512. if (pVideoCapture) {
  513. pVideoCapture->CapLog(VIDEOCAP_LOG_DEBUG, "deviceid(%d), QBUF(CAPTURE, index=%u) -> " BUFFER_DEBUG_FMT_STR "\n", pVideoCapture->GetCaptureVideoId(), buf.index, BUFFER_DEBUG_FMT_ARGS(&buf));
  514. }
  515. }
  516. }
  517. }
  518. usleep(0);
  519. return NULL;
  520. }
  521. int VideoCaptureImpl::StartVideoCapture()
  522. {
  523. if (m_bCaptureStarted){
  524. if (m_real_cap_width == mode_width[m_capture->param.cap_mode] &&
  525. m_real_cap_height == mode_height[m_capture->param.cap_mode] &&
  526. m_frame_fmt == m_capture->param.frame_fmt){
  527. return 0;
  528. }
  529. else {
  530. StopVideoCapture();
  531. }
  532. }
  533. // first open /dev/video device
  534. char device[20] = {0};
  535. snprintf(device, 20,"/dev/video%d", (int)m_deviceId);
  536. if ((m_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0) {
  537. CapLog(VIDEOCAP_LOG_ERROR, "error in opening %s for %s.", device, strerror(errno));
  538. return -1;
  539. }
  540. // Supported video formats in preferred order.
  541. // If the requested resolution is larger than VGA, we prefer MJPEG. Go for
  542. // I420 otherwise.
  543. const int nFormats = 5;
  544. unsigned int fmts[nFormats];
  545. if (mode_width[m_capture->param.cap_mode] > 640 || mode_height[m_capture->param.cap_mode] > 480) {
  546. fmts[0] = V4L2_PIX_FMT_MJPEG;
  547. fmts[1] = V4L2_PIX_FMT_YUV420;
  548. fmts[2] = V4L2_PIX_FMT_YUYV;
  549. fmts[3] = V4L2_PIX_FMT_UYVY;
  550. fmts[4] = V4L2_PIX_FMT_JPEG;
  551. }
  552. else {
  553. fmts[0] = V4L2_PIX_FMT_YUV420;
  554. fmts[1] = V4L2_PIX_FMT_YUYV;
  555. fmts[2] = V4L2_PIX_FMT_UYVY;
  556. fmts[3] = V4L2_PIX_FMT_MJPEG;
  557. fmts[4] = V4L2_PIX_FMT_JPEG;
  558. }
  559. // Enumerate image formats.
  560. struct v4l2_fmtdesc fmt;
  561. int fmtsIdx = nFormats;
  562. memset(&fmt, 0, sizeof(fmt));
  563. fmt.index = 0;
  564. fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  565. CapLog(VIDEOCAP_LOG_DEBUG, "Video Capture enumerates supported image formats:");
  566. while (ioctl(m_deviceFd, VIDIOC_ENUM_FMT, &fmt) == 0) {
  567. char strformat[32] = { 0 };
  568. GetFourccName(strformat, 32, fmt.pixelformat);
  569. CapLog(VIDEOCAP_LOG_DEBUG, "pixelformat=%s, description='%s'", strformat, fmt.description);
  570. // Match the preferred order.
  571. for (int i = 0; i < nFormats; i++) {
  572. if (fmt.pixelformat == fmts[i] && i < fmtsIdx)
  573. fmtsIdx = i;
  574. }
  575. // Keep enumerating.
  576. fmt.index++;
  577. }
  578. if (fmtsIdx == nFormats) {
  579. CapLog(VIDEOCAP_LOG_INFO, "no supporting video formats found");
  580. close(m_deviceFd);
  581. return -1;
  582. }
  583. else {
  584. char strformat[32] = { 0 };
  585. GetFourccName(strformat, 32, fmts[fmtsIdx]);
  586. CapLog(VIDEOCAP_LOG_DEBUG, "we prefer format %s.", strformat);
  587. }
  588. struct v4l2_format video_fmt;
  589. memset(&video_fmt, 0, sizeof(v4l2_format));
  590. video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  591. video_fmt.fmt.pix.field = V4L2_FIELD_ANY;
  592. video_fmt.fmt.pix.width = mode_width[m_capture->param.cap_mode];
  593. video_fmt.fmt.pix.height = mode_height[m_capture->param.cap_mode];
  594. video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx];
  595. CapLog(VIDEOCAP_LOG_DEBUG, "video_fmt.fmt.pix.width = %d, video_fmt.fmt.pix.height = %d.", video_fmt.fmt.pix.width, video_fmt.fmt.pix.height);
  596. if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
  597. m_captureVideoType = VideoType::kYUY2;
  598. }
  599. else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420) {
  600. m_captureVideoType = VideoType::kI420;
  601. }
  602. else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY) {
  603. m_captureVideoType = VideoType::kUYVY;
  604. }
  605. else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG ||
  606. video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG) {
  607. m_captureVideoType = VideoType::kMJPEG;
  608. }
  609. // set format and frame size now
  610. if (ioctl(m_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0) {
  611. CapLog(VIDEOCAP_LOG_ERROR, "error in VIDIOC_S_FMT for %s.", strerror(errno));
  612. close(m_deviceFd);
  613. return -1;
  614. }
  615. else
  616. {
  617. if (ioctl(m_deviceFd, VIDIOC_G_FMT, &video_fmt) < 0){
  618. CapLog(VIDEOCAP_LOG_ERROR, "error in VIDIOC_G_FMT for %s.", strerror(errno));
  619. close(m_deviceFd);
  620. return -1;
  621. }
  622. else
  623. {
  624. // initialize current width and height
  625. m_real_cap_width = video_fmt.fmt.pix.width;
  626. m_real_cap_height = video_fmt.fmt.pix.height;
  627. char strformat[32] = { 0 };
  628. GetFourccName(strformat, 32, video_fmt.fmt.pix.pixelformat);
  629. CapLog(VIDEOCAP_LOG_DEBUG, "real camera capture format is %s, m_capture_width = %d, m_capture_height = %d.", strformat, m_real_cap_width, m_real_cap_height);
  630. }
  631. }
  632. // Trying to set frame rate, before check driver capability.
  633. bool driver_framerate_support = true;
  634. struct v4l2_streamparm streamparms;
  635. memset(&streamparms, 0, sizeof(streamparms));
  636. streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  637. if (ioctl(m_deviceFd, VIDIOC_G_PARM, &streamparms) < 0) {
  638. CapLog(VIDEOCAP_LOG_ERROR, "error in VIDIOC_G_PARM,and error info is %s.", strerror(errno));
  639. driver_framerate_support = false;
  640. // continue
  641. }
  642. else {
  643. // check the capability flag is set to V4L2_CAP_TIMEPERFRAME.
  644. if (streamparms.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
  645. // driver supports the feature. Set required framerate.
  646. memset(&streamparms, 0, sizeof(streamparms));
  647. streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  648. streamparms.parm.capture.timeperframe.numerator = 1;
  649. streamparms.parm.capture.timeperframe.denominator = (int32_t)m_capture->param.fps;
  650. if (ioctl(m_deviceFd, VIDIOC_S_PARM, &streamparms) < 0) {
  651. CapLog(VIDEOCAP_LOG_INFO, "Failed to set the framerate. error info is %s.", strerror(errno));
  652. driver_framerate_support = false;
  653. }
  654. else {
  655. m_currentFrameRate = (int32_t)m_capture->param.fps;
  656. CapLog(VIDEOCAP_LOG_DEBUG, "Set Camera video capture rate to %d, and numerator is %d, denominator is %d.", m_currentFrameRate, streamparms.parm.capture.timeperframe.numerator, streamparms.parm.capture.timeperframe.denominator);
  657. if (ioctl(m_deviceFd, VIDIOC_G_PARM, &streamparms) == 0) {
  658. CapLog(VIDEOCAP_LOG_DEBUG, "Get video capture numerator is %d, denominator is %d.", streamparms.parm.capture.timeperframe.numerator, streamparms.parm.capture.timeperframe.denominator);
  659. }
  660. }
  661. }
  662. }
  663. // If driver doesn't support framerate control, need to hardcode.
  664. // Hardcoding the value based on the frame size.
  665. if (!driver_framerate_support) {
  666. if (m_in_cap_width >= 800 && m_captureVideoType != VideoType::kMJPEG) {
  667. m_currentFrameRate = 15;
  668. }
  669. else {
  670. m_currentFrameRate = 5;
  671. CapLog(VIDEOCAP_LOG_INFO, "The Camera not support set video capture framerate, set capture rate to %d.", m_currentFrameRate);
  672. }
  673. }
  674. if (false == GetCamBrightnessInfo()) {
  675. CapLog(VIDEOCAP_LOG_ERROR, "get camea brightness info failed.");
  676. }
  677. if (!AllocateVideoCapturebuffer()) {
  678. CapLog(VIDEOCAP_LOG_ERROR, "failed to allocate video capture buffers.");
  679. close(m_deviceFd);
  680. return -1;
  681. }
  682. if (-1 == pthread_create(&m_CaptureThreadId, NULL, VideoCaptureProcess, this)) {
  683. CapLog(VIDEOCAP_LOG_ERROR, "Create Video Capture Thread Failed!");
  684. close(m_deviceFd);
  685. return -1;
  686. }
  687. // Needed to start UVC camera - from the uvcview application
  688. enum v4l2_buf_type type;
  689. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  690. if (ioctl(m_deviceFd, VIDIOC_STREAMON, &type) == -1) {
  691. CapLog(VIDEOCAP_LOG_ERROR, "failed to turn on stream for %s.", strerror(errno));
  692. close(m_deviceFd);
  693. return -1;
  694. }
  695. else {
  696. CapLog(VIDEOCAP_LOG_DEBUG, "succeed to turn on stream.");
  697. }
  698. m_bCaptureStarted = true;
  699. return 0;
  700. }
  701. bool VideoCaptureImpl::AllocateVideoCapturebuffer()
  702. {
  703. return AllocateVideoBuffers() && AlignedMallocVideoBuffer();
  704. }
  705. //critical section protected by the caller
  706. bool VideoCaptureImpl::AllocateVideoBuffers()
  707. {
  708. struct v4l2_requestbuffers rbuffer;
  709. memset(&rbuffer, 0, sizeof(v4l2_requestbuffers));
  710. rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; //缓冲帧数据格式
  711. rbuffer.memory = V4L2_MEMORY_MMAP; //是内存映射还是用户指针方式
  712. rbuffer.count = kNoOfV4L2Bufffers; //缓冲区缓冲帧的数目
  713. //向设备申请缓冲区
  714. if (ioctl(m_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0){
  715. CapLog(VIDEOCAP_LOG_ERROR, "Could not get buffers from device for %s.", strerror(errno));
  716. return false;
  717. }
  718. else {
  719. CapLog(VIDEOCAP_LOG_DEBUG, "req buffers count is %d.", rbuffer.count);
  720. }
  721. if (rbuffer.count > kNoOfV4L2Bufffers) {
  722. rbuffer.count = kNoOfV4L2Bufffers;
  723. }
  724. m_buffersAllocatedByDevice = rbuffer.count;
  725. //Map the buffers
  726. m_pool = new Buffer[rbuffer.count];
  727. for (unsigned int i = 0; i < rbuffer.count; i++)
  728. {
  729. struct v4l2_buffer buffer;
  730. memset(&buffer, 0, sizeof(v4l2_buffer));
  731. buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  732. buffer.memory = V4L2_MEMORY_MMAP;
  733. buffer.index = i;
  734. //获取缓冲帧的地址,长度
  735. if (ioctl(m_deviceFd, VIDIOC_QUERYBUF, &buffer) < 0){
  736. return false;
  737. }
  738. else {
  739. CapLog(VIDEOCAP_LOG_DEBUG, "QUERYBUF(CAPTURE, index=%u) -> " BUFFER_DEBUG_FMT_STR, buffer.index, BUFFER_DEBUG_FMT_ARGS(&buffer));
  740. }
  741. m_pool[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, m_deviceFd, buffer.m.offset);
  742. if (MAP_FAILED == m_pool[i].start){
  743. for (unsigned int j = 0; j < i; j++) {
  744. munmap(m_pool[j].start, m_pool[j].length);
  745. }
  746. return false;
  747. }
  748. m_pool[i].length = buffer.length;
  749. if (ioctl(m_deviceFd, VIDIOC_QBUF, &buffer) < 0){
  750. return false;
  751. }
  752. else {
  753. CapLog(VIDEOCAP_LOG_DEBUG, "%s:%d deviceid(%d), QBUF(CAPTURE, index=%u) -> " BUFFER_DEBUG_FMT_STR, __FUNCTION__, __LINE__, m_deviceId, buffer.index, BUFFER_DEBUG_FMT_ARGS(&buffer));
  754. }
  755. }
  756. return true;
  757. }
  758. bool VideoCaptureImpl::DeAllocateVideoBuffers()
  759. {
  760. // unmap buffers
  761. for (int i = 0; i < m_buffersAllocatedByDevice; i++) {
  762. munmap(m_pool[i].start, m_pool[i].length);
  763. }
  764. delete[] m_pool;
  765. // turn off stream
  766. enum v4l2_buf_type type;
  767. type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  768. if (ioctl(m_deviceFd, VIDIOC_STREAMOFF, &type) < 0){
  769. CapLog(VIDEOCAP_LOG_ERROR, "VIDIOC_STREAMOFF error. error no: %d", errno);
  770. }
  771. else {
  772. CapLog(VIDEOCAP_LOG_DEBUG, "succeed to turn off stream.");
  773. }
  774. return true;
  775. }
  776. bool VideoCaptureImpl::AlignedMallocVideoBuffer()
  777. {
  778. bool bret = false;
  779. int stride_y = m_in_cap_width;
  780. int stride_u = (m_in_cap_width + 1) / 2;
  781. int stride_v = (m_in_cap_width + 1) / 2;
  782. m_i420 = (uint8_t*)AlignedMalloc(I420DataSize(m_in_cap_height, stride_y, stride_u, stride_v), kBufferAlignment);
  783. m_rgb24 = (uint8_t*)AlignedMalloc(RGB24DataSize(m_out_cap_height, m_out_cap_width, (m_out_cap_width + 1) / 2, (m_out_cap_width + 1) / 2), kBufferAlignment);
  784. int opt_stride_y = m_out_cap_width;
  785. int opt_stride_u = (m_out_cap_width + 1) / 2;
  786. int opt_stride_v = (m_out_cap_width + 1) / 2;
  787. m_opti420 = (uint8_t*)AlignedMalloc(I420DataSize(m_out_cap_height, opt_stride_y, opt_stride_u, opt_stride_v), kBufferAlignment);
  788. if (m_i420 && m_rgb24 && m_opti420){
  789. bret = true;
  790. }
  791. return bret;
  792. }
  793. bool VideoCaptureImpl::FreeAlignedMallocVideoBuffer()
  794. {
  795. if (NULL != m_i420){
  796. AlignedFree(m_i420);
  797. m_i420 = NULL;
  798. }
  799. if (NULL != m_rgb24) {
  800. AlignedFree(m_rgb24);
  801. m_rgb24 = NULL;
  802. }
  803. if (NULL != m_opti420) {
  804. AlignedFree(m_opti420);
  805. m_opti420 = NULL;
  806. }
  807. return true;
  808. }
  809. bool VideoCaptureImpl::VideoCaptureStarted()
  810. {
  811. return m_bCaptureStarted;
  812. }
  813. int VideoCaptureImpl::GetCaptureVideoFd()
  814. {
  815. return m_deviceFd;
  816. }
  817. int VideoCaptureImpl::GetCaptureVideoId()
  818. {
  819. return m_deviceId;
  820. }
  821. VideoType VideoCaptureImpl::GetCaptureVideoType()
  822. {
  823. return m_captureVideoType;
  824. }
  825. int VideoCaptureImpl::GetCapture_Width()
  826. {
  827. return m_real_cap_width;
  828. }
  829. int VideoCaptureImpl::GetCapture_Height()
  830. {
  831. return m_real_cap_height;
  832. }
  833. bool VideoCaptureImpl::GetStopCaptureFlag()
  834. {
  835. return m_bStopCapture;
  836. }
  837. int VideoCaptureImpl::StopVideoCapture()
  838. {
  839. if (m_bCaptureStarted){
  840. m_bCaptureStarted = false;
  841. m_bStopCapture = true;
  842. if (0 == pthread_join(m_CaptureThreadId, NULL)) {
  843. m_CaptureThreadId = 0;
  844. }
  845. else {
  846. CapLog(VIDEOCAP_LOG_ERROR, "thread join video capture thread failed for %s.", strerror(errno));
  847. }
  848. DeAllocateVideoBuffers();
  849. FreeAlignedMallocVideoBuffer();
  850. close(m_deviceFd);
  851. m_deviceFd = -1;
  852. }
  853. return 0;
  854. }
  855. void VideoCaptureImpl::VideoCaptureDestroy()
  856. {
  857. delete this;
  858. }
  859. int VideoCaptureImpl::GetCamBrightness(int* ibright, bool bRawRange)
  860. {
  861. int iret = -1;
  862. struct v4l2_control ctrl;
  863. ctrl.id = V4L2_CID_BRIGHTNESS;
  864. if (ioctl(m_deviceFd,VIDIOC_G_CTRL,&ctrl) == -1){
  865. CapLog(VIDEOCAP_LOG_ERROR, "VIDIOC_S_CTRL get V4L2_CID_BRIGHTNESS error for %s", strerror(errno));
  866. }
  867. else {
  868. if (bRawRange) {
  869. *ibright = ctrl.value;
  870. }
  871. else {
  872. *ibright = TransFromRealBrightnessValue(ctrl.value);
  873. }
  874. iret = 0;
  875. }
  876. return iret;
  877. }
  878. int VideoCaptureImpl::SetCamBrightness(int ibright, bool bRawRange)
  879. {
  880. int iret = -1;
  881. struct v4l2_control ctrl;
  882. ctrl.id = V4L2_CID_BRIGHTNESS;
  883. if (bRawRange) {
  884. ctrl.value = ibright;
  885. }
  886. else {
  887. ctrl.value = TransToRealBrightnessValue(ibright);
  888. }
  889. if (ioctl(m_deviceFd, VIDIOC_S_CTRL, &ctrl) == -1){
  890. CapLog(VIDEOCAP_LOG_ERROR, "VIDIOC_S_CTRL set V4L2_CID_BRIGHTNESS error for %s.", strerror(errno));
  891. }
  892. else{
  893. iret = 0;
  894. }
  895. return iret;
  896. }
  897. int VideoCaptureImpl::SetCamAutoBrightness()
  898. {
  899. int iret = -1;
  900. struct v4l2_control ctrl;
  901. ctrl.id = V4L2_CID_BRIGHTNESS;
  902. ctrl.value = m_idefaultbrightness;
  903. if (ioctl(m_deviceFd, VIDIOC_S_CTRL, &ctrl) == -1) {
  904. CapLog(VIDEOCAP_LOG_ERROR, "VIDIOC_S_CTRL set V4L2_CID_AUTOBRIGHTNESS error for %s", strerror(errno));
  905. }
  906. else {
  907. iret = 0;
  908. }
  909. iret = 0;
  910. return iret;
  911. }
  912. bool VideoCaptureImpl::GetCamBrightnessInfo()
  913. {
  914. bool bret = false;
  915. struct v4l2_queryctrl qctrl;
  916. qctrl.id = V4L2_CID_BRIGHTNESS;
  917. if (ioctl(m_deviceFd, VIDIOC_QUERYCTRL, &qctrl) == -1) {
  918. CapLog(VIDEOCAP_LOG_ERROR, "VIDIOC_QUERYCTRL get V4L2_CID_BRIGHTNESS error for %s", strerror(errno));
  919. }
  920. else {
  921. CapLog(VIDEOCAP_LOG_DEBUG, "VIDIOC_QUERYCTRL get V4L2_CID_BRIGHTNESS success {min(%d) - max(%d)},default is %d", qctrl.minimum, qctrl.maximum, qctrl.default_value);
  922. m_idefaultbrightness = qctrl.default_value;
  923. m_iminbrightness = qctrl.minimum;
  924. m_imaxbrightness = qctrl.maximum;
  925. bret = true;
  926. }
  927. return bret;
  928. }
  929. bool VideoCaptureImpl::GetCamRawBrightnessRange(int* imin, int* imax)
  930. {
  931. bool bret = false;
  932. struct v4l2_queryctrl qctrl;
  933. qctrl.id = V4L2_CID_BRIGHTNESS;
  934. if (ioctl(m_deviceFd, VIDIOC_QUERYCTRL, &qctrl) == -1) {
  935. CapLog(VIDEOCAP_LOG_ERROR, "VIDIOC_QUERYCTRL get V4L2_CID_BRIGHTNESS error for %s", strerror(errno));
  936. }
  937. else {
  938. CapLog(VIDEOCAP_LOG_DEBUG, "VIDIOC_QUERYCTRL get V4L2_CID_BRIGHTNESS success {min(%d) - max(%d)}, default is %d.", qctrl.minimum, qctrl.maximum, qctrl.default_value);
  939. *imin = qctrl.minimum;
  940. *imax = qctrl.maximum;
  941. bret = true;
  942. }
  943. return bret;
  944. }
  945. //100 to real brightness value
  946. int VideoCaptureImpl::TransToRealBrightnessValue(int ibright)
  947. {
  948. float fvalue = ibright * (m_imaxbrightness - m_iminbrightness) / 10;
  949. int ivalue = fvalue;
  950. int ilast = ivalue % 10;
  951. int inum = ivalue / 10;
  952. if (ilast >= 5) {
  953. inum++;
  954. }
  955. inum += m_iminbrightness;
  956. if (inum < m_iminbrightness){
  957. inum = m_iminbrightness;
  958. }
  959. if (inum > m_imaxbrightness){
  960. inum = m_imaxbrightness;
  961. }
  962. return inum;
  963. }
  964. //real brightness value to [0-100]
  965. int VideoCaptureImpl::TransFromRealBrightnessValue(int ibright)
  966. {
  967. int itotal = m_imaxbrightness - m_iminbrightness;
  968. int ivalue = ibright - m_iminbrightness;
  969. float fvalue = ivalue * 1000 / itotal;
  970. ivalue = fvalue;
  971. int ilast = ivalue % 10;
  972. int inum = ivalue / 10;
  973. if (ilast >= 5) {
  974. inum++;
  975. }
  976. return inum;
  977. }
  978. libyuv::RotationMode VideoCaptureImpl::RotateTrans(int irotate)
  979. {
  980. libyuv::RotationMode rotation_mode = libyuv::kRotate0;
  981. switch (irotate) {
  982. case 0:
  983. rotation_mode = libyuv::kRotate0;
  984. break;
  985. case 90:
  986. rotation_mode = libyuv::kRotate90;
  987. break;
  988. case 180:
  989. rotation_mode = libyuv::kRotate180;
  990. break;
  991. case 270:
  992. rotation_mode = libyuv::kRotate270;
  993. break;
  994. }
  995. return rotation_mode;
  996. }
  997. void VideoCaptureImpl::CapLog(videocap_loglevel elevel, const char* fmt, ...)
  998. {
  999. if (m_callback.debug) {
  1000. va_list arg;
  1001. va_start(arg, fmt);
  1002. (*m_callback.debug)(m_callback.user_data, elevel, fmt, arg);
  1003. va_end(arg);
  1004. }
  1005. }
  1006. void VideoCaptureImpl::CapLogEvent(int itype, const char* strmessage)
  1007. {
  1008. if (m_callback.logevent) {
  1009. (*m_callback.logevent)(itype, m_deviceId, strmessage);
  1010. }
  1011. }