videocap.cpp 61 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228
  1. #include "precompile.h"
  2. #include "videocap.h"
  3. #include "ListEntry.h"
  4. #include "qedit.h"
  5. #include <dshow.h> // need directshow sdk
  6. #include <videohorflip.h>
  7. #include "libyuv/convert.h"
  8. #include "libyuv/video_common.h"
  9. #include "libyuv/scale.h"
  10. // helper
  11. #define swap(t, x, y) \
  12. { \
  13. t v; \
  14. v = x; \
  15. x = y; \
  16. y = v; \
  17. }
  18. HRESULT get_output_mediatype(videocap *vcap);
  19. static void capDbg(videocap_param *cap, const char *fmt, ...)
  20. {
  21. va_list arg;
  22. va_start(arg, fmt);
  23. if (cap->dbg) {
  24. (*cap->dbg)(fmt, arg);
  25. }
  26. va_end(arg);
  27. }
  28. int I420DataSize(int height, int stride_y, int stride_u, int stride_v) {
  29. return stride_y * height + (stride_u + stride_v) * ((height + 1) / 2);
  30. }
  31. int RGB24DataSize(int height, int stride_y, int stride_u, int stride_v) {
  32. return stride_y * height * 2 + ((stride_u + stride_v) * ((height + 1) / 2) * 2);
  33. }
  34. static void capLogEvent(videocap_param *cap, int ilogtype, const char* strmessage)
  35. {
  36. if (cap->logevent) {
  37. (*cap->logevent)(ilogtype, cap->dev_id, strmessage);
  38. }
  39. }
  40. static void i420_flip_vertical(video_frame *frame)
  41. {
  42. if (frame->format == VIDEO_FORMAT_I420) {
  43. frame->data[0] = frame->data[0] + frame->linesize[0]*(frame->height-1);
  44. frame->data[1] = frame->data[1] + frame->linesize[1]*((frame->height>>1)-1);
  45. frame->data[2] = frame->data[2] + frame->linesize[2]*((frame->height>>1)-1);
  46. frame->linesize[0] = -frame->linesize[0];
  47. frame->linesize[1] = -frame->linesize[1];
  48. frame->linesize[2] = -frame->linesize[2];
  49. swap(unsigned char*, frame->data[1], frame->data[2]);
  50. swap(int, frame->linesize[1], frame->linesize[2]);
  51. }
  52. }
  53. static HRESULT AddToRot(IUnknown *pUnkGraph, DWORD *pdwRegister)
  54. {
  55. IMoniker * pMoniker;
  56. IRunningObjectTable *pROT;
  57. WCHAR wsz[256];
  58. HRESULT hr;
  59. if (FAILED(GetRunningObjectTable(0, &pROT))) {
  60. return E_FAIL;
  61. }
  62. wsprintfW(wsz, L"FilterGraph %08x pid %08x", (DWORD_PTR)pUnkGraph, GetCurrentProcessId());
  63. hr = CreateItemMoniker(L"!", wsz, &pMoniker);
  64. if (SUCCEEDED(hr)) {
  65. pROT->Register(ROTFLAGS_REGISTRATIONKEEPSALIVE, pUnkGraph,
  66. pMoniker, pdwRegister);
  67. pMoniker->Release();
  68. }
  69. pROT->Release();
  70. return hr;
  71. }
  72. static void RemoveFromRot(DWORD pdwRegister)
  73. {
  74. IRunningObjectTable *pROT;
  75. if (SUCCEEDED(GetRunningObjectTable(0, &pROT))) {
  76. pROT->Revoke(pdwRegister);
  77. pROT->Release();
  78. }
  79. }
  80. static char* GuidToString(const GUID guid)
  81. {
  82. int buf_len=64;
  83. char *buf =(char *)malloc(buf_len);
  84. _snprintf(
  85. buf,
  86. buf_len,
  87. "{%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X}",
  88. guid.Data1, guid.Data2, guid.Data3,
  89. guid.Data4[0], guid.Data4[1],
  90. guid.Data4[2], guid.Data4[3],
  91. guid.Data4[4], guid.Data4[5],
  92. guid.Data4[6], guid.Data4[7]);
  93. //printf("%s\n",buf);
  94. return buf;
  95. }
  96. static void FreeMediaTypeEx(AM_MEDIA_TYPE *pmt)
  97. {
  98. if (pmt) {
  99. if (pmt->cbFormat) {
  100. CoTaskMemFree((PVOID)pmt->pbFormat);
  101. pmt->cbFormat = 0;
  102. pmt->pbFormat = NULL;
  103. }
  104. if (pmt->pUnk) {
  105. pmt->pUnk->Release();
  106. pmt->pUnk = NULL;
  107. }
  108. CoTaskMemFree(pmt);
  109. }
  110. }
  111. static HRESULT GetUnconnectedPin(IBaseFilter *pFilter,PIN_DIRECTION PinDir,IPin **ppPin)
  112. {
  113. IEnumPins *pEnum = 0;
  114. IPin *pPin = 0;
  115. HRESULT hr = pFilter->EnumPins(&pEnum);
  116. *ppPin = 0;
  117. if (FAILED(hr))
  118. return hr;
  119. while (pEnum->Next(1, &pPin, NULL) == S_OK) {
  120. PIN_DIRECTION ThisPinDir;
  121. pPin->QueryDirection(&ThisPinDir);
  122. if (ThisPinDir == PinDir) {
  123. IPin *pTmp = 0;
  124. hr = pPin->ConnectedTo(&pTmp);
  125. if (SUCCEEDED(hr)) // Already connected, not the pin we want.
  126. pTmp->Release();
  127. else // Unconnected, this is the pin we want.
  128. {
  129. pEnum->Release();
  130. *ppPin = pPin;
  131. return S_OK;
  132. }
  133. }
  134. pPin->Release();
  135. }
  136. pEnum->Release();
  137. // Did not find a matching pin.
  138. return E_FAIL;
  139. }
  140. static HRESULT ConnectFilters1(IGraphBuilder *pGraph, // Filter Graph Manager.
  141. IPin *pOut, // Output pin on the upstream filter.
  142. IBaseFilter *pDest) // Downstream filter.
  143. {
  144. IPin *pIn;
  145. HRESULT hr;
  146. if ((pGraph == NULL) || (pOut == NULL) || (pDest == NULL))
  147. {
  148. return E_POINTER;
  149. }
  150. // Find an input pin on the downstream filter.
  151. pIn = 0;
  152. hr = GetUnconnectedPin(pDest, PINDIR_INPUT, &pIn);
  153. if (FAILED(hr))
  154. {
  155. return hr;
  156. }
  157. // Try to connect them.
  158. hr = pGraph->ConnectDirect(pOut, pIn, NULL);
  159. if (FAILED(hr)){
  160. hr = pGraph->Connect(pOut, pIn);
  161. }
  162. pIn->Release();
  163. return hr;
  164. }
  165. static HRESULT ConnectFilters2(IGraphBuilder *pGraph, IBaseFilter *pSrc, IBaseFilter *pDest)
  166. {
  167. IPin *pOut;
  168. HRESULT hr;
  169. if ((pGraph == NULL) || (pSrc == NULL) || (pDest == NULL))
  170. return E_POINTER;
  171. // Find an output pin on the first filter.
  172. pOut = 0;
  173. hr = GetUnconnectedPin(pSrc, PINDIR_OUTPUT, &pOut);
  174. if (FAILED(hr))
  175. return hr;
  176. hr = ConnectFilters1(pGraph, pOut, pDest);
  177. pOut->Release();
  178. return hr;
  179. }
  180. static HRESULT WalkFilterCategory(REFCLSID category,
  181. HRESULT (*OnDeviceCB)(int index,IMoniker *pMoniker,VOID *pUserData1,VOID *pUserData2),
  182. VOID * pUserData1,
  183. VOID* pUserData2)
  184. {
  185. HRESULT hr;
  186. ICreateDevEnum *pDevEnum = NULL;
  187. IEnumMoniker *pEnumMoniker = NULL;
  188. IMoniker *pMoniker = NULL;
  189. BOOL quit;
  190. int i = 0;
  191. if (!OnDeviceCB)
  192. return E_INVALIDARG;
  193. hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
  194. IID_ICreateDevEnum, (void**)&pDevEnum);
  195. if (FAILED(hr))
  196. return hr;
  197. hr = pDevEnum->CreateClassEnumerator(category,
  198. &pEnumMoniker, 0);
  199. if (FAILED(hr)) {
  200. pDevEnum->Release();
  201. return hr;
  202. } else if (hr == S_OK) {
  203. pEnumMoniker->Reset();
  204. quit = FALSE;
  205. while (pEnumMoniker->Next(1, &pMoniker, NULL) == S_OK && !quit) {
  206. hr = OnDeviceCB(i++, pMoniker, pUserData1, pUserData2);
  207. quit = FAILED(hr) || (hr == S_OK);
  208. pMoniker->Release();
  209. }
  210. pEnumMoniker->Release();
  211. pDevEnum->Release();
  212. } else {
  213. if (pEnumMoniker)
  214. pEnumMoniker->Release();
  215. pDevEnum->Release();
  216. }
  217. return hr;
  218. }
  219. static HRESULT CreateFilterByFriendlyNameCB(int index, IMoniker *pMoniker, void *pUserData1, void *pUserData2)
  220. {
  221. HRESULT hr;
  222. IBaseFilter *pBaseFilter = NULL;
  223. WCHAR *szName = (WCHAR*)pUserData2;
  224. if (pMoniker) {
  225. IPropertyBag *pPropBag;
  226. VARIANT name;
  227. hr = pMoniker->BindToStorage(NULL, NULL,
  228. IID_IPropertyBag, (void**)&pPropBag);
  229. if (FAILED(hr))
  230. return S_FALSE;
  231. VariantInit(&name);
  232. name.vt = VT_BSTR;
  233. hr = pPropBag->Read( L"FriendlyName", &name, NULL);
  234. if (FAILED(hr)) {
  235. pPropBag->Release();
  236. VariantClear(&name);
  237. return S_FALSE;
  238. }
  239. if (SysStringByteLen(name.bstrVal) && wcscmp(name.bstrVal, szName)==0) {
  240. hr = pMoniker->BindToObject(NULL, NULL,
  241. IID_IBaseFilter, (void**)&pBaseFilter);
  242. if (SUCCEEDED(hr)) {
  243. *(IBaseFilter**)pUserData1 = pBaseFilter;
  244. hr = S_OK;
  245. }
  246. }
  247. pPropBag->Release();
  248. VariantClear(&name);
  249. } else {
  250. hr = E_POINTER;
  251. }
  252. return hr;
  253. }
  254. static HRESULT CreateFilterByFriendlyName(REFCLSID category, void **ppBaseFilter, WCHAR* szName)
  255. {
  256. HRESULT hr = WalkFilterCategory(category,
  257. &CreateFilterByFriendlyNameCB, ppBaseFilter, szName);
  258. return hr == S_FALSE ? E_NOINTERFACE : hr;
  259. }
  260. static HRESULT CreateFilterByIndexCB(int index, IMoniker *pMoniker, void *pUserData1, void *pUserData2)
  261. {
  262. HRESULT hr;
  263. IBaseFilter *pBaseFilter = NULL;
  264. int *pidx = (int*)pUserData2;
  265. if (pMoniker && index == *pidx) {
  266. hr = pMoniker->BindToObject(NULL, NULL,
  267. IID_IBaseFilter, (void**)&pBaseFilter);
  268. if (FAILED(hr))
  269. return hr;
  270. *(IBaseFilter**)pUserData1 = pBaseFilter;
  271. return S_OK;
  272. }
  273. return S_FALSE;
  274. }
  275. static HRESULT CreateFilterByIndex(REFCLSID category,
  276. void **ppBaseFilter,
  277. int idx)
  278. {
  279. HRESULT hr = WalkFilterCategory(category, &CreateFilterByIndexCB,
  280. ppBaseFilter, &idx);
  281. return hr == S_FALSE ? E_NOINTERFACE : hr;
  282. }
  283. static HRESULT RemoveGraphAllFilters(IGraphBuilder *pGraphBuilder)
  284. {
  285. HRESULT hr;
  286. IEnumFilters *pEnumFilters;
  287. if (!pGraphBuilder)
  288. return E_POINTER;
  289. hr = pGraphBuilder->EnumFilters(&pEnumFilters);
  290. if (SUCCEEDED(hr)) {
  291. IBaseFilter *pFilter;
  292. pEnumFilters->Reset();
  293. while (pEnumFilters->Next(1, &pFilter, NULL) == S_OK) {
  294. pGraphBuilder->RemoveFilter(pFilter);
  295. pFilter->Release();
  296. pEnumFilters->Reset();
  297. }
  298. pEnumFilters->Release();
  299. }
  300. return hr;
  301. }
  302. ////////////////////////////////////////////////////////////////////////////////////////////////
  303. static const int mode_width[VIDEOCAP_MAX_MODE] = {
  304. VIDEOCAP_SQCIF_WIDTH,VIDEOCAP_QQVGA_WIDTH,
  305. VIDEOCAP_QCIF_WIDTH,VIDEOCAP_QVGA_WIDTH,
  306. VIDEOCAP_CIF_WIDTH,VIDEOCAP_VGA_WIDTH,
  307. VIDEOCAP_4CIF_WIDTH,VIDEOCAP_SVGA_WIDTH,
  308. VIDEOCAP_NHD_WIDTH,VIDEOCAP_SXGA_WIDTH,
  309. VIDEOCAP_720P_WIDTH,VIDEOCAP_1080P_WIDTH,
  310. };
  311. static const int mode_height[VIDEOCAP_MAX_MODE] = {
  312. VIDEOCAP_SQCIF_HEIGHT,VIDEOCAP_QQVGA_HEIGHT,
  313. VIDEOCAP_QCIF_HEIGHT,VIDEOCAP_QVGA_HEIGHT,
  314. VIDEOCAP_CIF_HEIGHT,VIDEOCAP_VGA_HEIGHT,
  315. VIDEOCAP_4CIF_HEIGHT,VIDEOCAP_SVGA_HEIGHT,
  316. VIDEOCAP_NHD_HEIGHT,VIDEOCAP_SXGA_HEIGHT,
  317. VIDEOCAP_720P_HEIGHT,VIDEOCAP_1080P_HEIGHT,
  318. };
  319. typedef struct async_cap_t
  320. {
  321. LIST_ENTRY entry;
  322. HANDLE evt;
  323. int result;
  324. video_frame *ref_cap_frame;
  325. }async_cap_t;
  326. /**
  327. * we use DirectShow to capture video frames
  328. */
  329. typedef struct videocap
  330. {
  331. IBaseFilter *sourcefilter;
  332. IBaseFilter *cpcfilter; // color space converter if necessary
  333. IBaseFilter *avidecfilter;
  334. IBaseFilter *grabberfilter;
  335. IBaseFilter *horflipfilter;
  336. IBaseFilter *renderfilter;
  337. ISampleGrabber *grabber;
  338. IGraphBuilder *graphbuilder;
  339. IMediaControl *mc;
  340. IMediaEvent *me;
  341. IVideoWindow *videowindow;
  342. #ifdef _DEBUG
  343. DWORD dwROTRegister; // register RunningObjectTable
  344. #endif
  345. videocap_param param;
  346. int running;
  347. HANDLE thread_background;
  348. HANDLE evt_thread_exit;
  349. int cap_index;
  350. CRITICAL_SECTION cap_cs;
  351. video_frame cap_frame;
  352. CRITICAL_SECTION async_cap_cs;
  353. LIST_ENTRY async_cap_list;
  354. LONG grab_cb_count;
  355. CRITICAL_SECTION res_cs;
  356. video_frame res_frame;
  357. struct SwsContext *sws_context; /* for image scaling and format converting */
  358. int iout_width;
  359. int iout_height;
  360. bool bloged;
  361. } videocap;
  362. HRESULT set_video_source_format(videocap *);
  363. void release_all_interfaces(videocap *);
  364. typedef struct ISampleGrabberCBImpl
  365. {
  366. struct ISampleGrabberCB *pCb;
  367. struct videocap* vcap;
  368. //struct ISampleGrabberCBVtbl vtbl;
  369. }ISampleGrabberCBImpl;
  370. class RvcSampleGrabberCB : public ISampleGrabberCB
  371. {
  372. public:
  373. RvcSampleGrabberCB(struct videocap* vcap);
  374. HRESULT STDMETHODCALLTYPE SampleCB(
  375. double SampleTime,
  376. IMediaSample *pSample);
  377. HRESULT STDMETHODCALLTYPE BufferCB(
  378. double SampleTime,
  379. BYTE *pBuffer,
  380. long BufferLen);
  381. HRESULT STDMETHODCALLTYPE QueryInterface(
  382. /* [in] */ REFIID riid,
  383. /* [iid_is][out] */ __RPC__deref_out void __RPC_FAR *__RPC_FAR *ppvObject);
  384. ULONG STDMETHODCALLTYPE AddRef( void);
  385. ULONG STDMETHODCALLTYPE Release( void);
  386. private:
  387. struct videocap* m_vcap;
  388. };
  389. RvcSampleGrabberCB::RvcSampleGrabberCB(struct videocap* vcap)
  390. {
  391. m_vcap = vcap;
  392. }
  393. HRESULT STDMETHODCALLTYPE RvcSampleGrabberCB::SampleCB(
  394. double SampleTime,
  395. IMediaSample *pSample)
  396. {
  397. return S_OK;
  398. }
  399. static HRESULT Handle_BGR_Frame_CallBack(videocap *vcap, BYTE *pBuffer, long BufferLen)
  400. {
  401. int linesize = BufferLen / mode_height[vcap->param.cap_mode];
  402. struct SwsContext *sws;
  403. char*Buffertmp=(char*)malloc(mode_width[vcap->param.cap_mode]*mode_height[vcap->param.cap_mode]*3);
  404. uint8_t *src_data[4];
  405. int src_linesize[4];
  406. unsigned char *dst[4] = {(unsigned char*)Buffertmp,NULL,NULL,NULL};
  407. int dst_linesize[4] = {mode_width[vcap->param.cap_mode]*3,0,0,0};
  408. char*buffer;
  409. int oriLen = mode_height[vcap->param.cap_mode]*mode_width[vcap->param.cap_mode]*3;
  410. if (BufferLen != oriLen)
  411. {
  412. //计算目标图像比例
  413. int srcH;
  414. float fDstScale = (float)mode_width[vcap->param.cap_mode]/(float)mode_height[vcap->param.cap_mode];
  415. float fSrcScale = (float)vcap->iout_width /(float)vcap->iout_height;
  416. if (fSrcScale != fDstScale)
  417. {
  418. //计算偏移量
  419. int nWidth,nHeight,nOffset=0;
  420. if (fSrcScale > fDstScale)
  421. {
  422. return 0;
  423. }
  424. else if (fSrcScale < fDstScale)
  425. {
  426. //高度过长
  427. nWidth = vcap->iout_width;
  428. nHeight = (int)ceil(vcap->iout_width/fDstScale);
  429. nOffset = (vcap->iout_height -nHeight)/2*nWidth*3;
  430. }
  431. //计算等比例变换需要的SWS
  432. sws=sws_getContext(nWidth,nHeight,AV_PIX_FMT_BGR24, mode_width[vcap->param.cap_mode], mode_height[vcap->param.cap_mode], AV_PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL);
  433. av_image_alloc(src_data, src_linesize, vcap->iout_width, vcap->iout_height, AV_PIX_FMT_BGR24, 1);
  434. memcpy(src_data[0],pBuffer+nOffset,nWidth*nHeight*3); //Y
  435. srcH = nHeight;
  436. }
  437. else
  438. {
  439. //计算等比例变换需要的SWS
  440. sws=sws_getContext(vcap->iout_width, vcap->iout_height, AV_PIX_FMT_BGR24, mode_width[vcap->param.cap_mode], mode_height[vcap->param.cap_mode], AV_PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL);
  441. av_image_alloc(src_data, src_linesize, vcap->iout_width, vcap->iout_height, AV_PIX_FMT_BGR24, 1);
  442. memcpy(src_data[0],pBuffer,BufferLen); //Y
  443. srcH = vcap->iout_height;
  444. }
  445. //分辨率转换
  446. sws_scale(sws, src_data, src_linesize, 0, srcH, dst, dst_linesize);
  447. sws_freeContext(sws);
  448. av_freep(&src_data[0]);
  449. buffer = Buffertmp;
  450. BufferLen = mode_width[vcap->param.cap_mode]*mode_height[vcap->param.cap_mode]*3;
  451. }
  452. else
  453. {
  454. buffer = (char*)pBuffer;
  455. }
  456. linesize = BufferLen / mode_height[vcap->param.cap_mode];
  457. if (vcap->param.option & VIDEOCAP_OPT_ENABLE_GRAB)
  458. {
  459. if (vcap->cap_index++%3 == 0)
  460. { /* 3 is grabbing interval */
  461. EnterCriticalSection(&vcap->cap_cs);
  462. //memcpy(vcap->cap_frame.data[0], pBuffer, BufferLen);
  463. memcpy(vcap->cap_frame.data[0], buffer, BufferLen);
  464. LeaveCriticalSection(&vcap->cap_cs);
  465. }
  466. }
  467. if (vcap->grab_cb_count)
  468. {
  469. if (vcap->param.on_grab)
  470. {
  471. video_frame frm = {0};
  472. frm.data[0] = (unsigned char*)buffer;
  473. frm.linesize[0] = linesize;
  474. frm.width = mode_width[vcap->param.cap_mode];
  475. frm.height = mode_height[vcap->param.cap_mode];
  476. frm.format = VIDEO_FORMAT_RGB24;
  477. vcap->param.on_grab(vcap->param.user_data, &frm);
  478. }
  479. InterlockedDecrement(&vcap->grab_cb_count);
  480. }
  481. if (vcap->param.option & VIDEOCAP_OPT_ENABLE_ASYNC_GRAB)
  482. {
  483. async_cap_t *pos, *n;
  484. EnterCriticalSection(&vcap->async_cap_cs);
  485. ListEntry_ForEachSafe(pos, n, &vcap->async_cap_list, async_cap_t, entry)
  486. {
  487. ListEntry_DeleteNode(&pos->entry);
  488. pos->result = 0;
  489. memcpy(pos->ref_cap_frame->data[0], buffer, BufferLen);
  490. SetEvent(pos->evt);
  491. }
  492. LeaveCriticalSection(&vcap->async_cap_cs);
  493. }
  494. if (vcap->param.on_frame_raw)
  495. {
  496. video_frame frm = {0};
  497. frm.data[0] = (unsigned char*)buffer;
  498. frm.linesize[0] = linesize;
  499. frm.width = mode_width[vcap->param.cap_mode];
  500. frm.height = mode_height[vcap->param.cap_mode];
  501. frm.format = VIDEO_FORMAT_RGB24;
  502. vcap->param.on_frame_raw(vcap->param.user_data, &frm);
  503. }
  504. if (vcap->param.option & VIDEOCAP_OPT_EANBLE_RESIZE)
  505. { /* user enable resizing */
  506. unsigned char *src_data[4];
  507. int src_linesize[4] = {linesize, 0, 0, 0};
  508. src_data[0] = (unsigned char*)buffer;
  509. src_data[1] = NULL;
  510. src_data[2] = NULL;
  511. src_data[3] = NULL;
  512. EnterCriticalSection(&vcap->res_cs);
  513. if (vcap->param.option & VIDEOCAP_OPT_ENABLE_FLIP)
  514. {
  515. src_data[0] += (mode_height[vcap->param.cap_mode]-1)*src_linesize[0];
  516. src_linesize[0] = -src_linesize[0];
  517. sws_scale(vcap->sws_context, src_data, src_linesize, 0, mode_height[vcap->param.cap_mode], vcap->res_frame.data, vcap->res_frame.linesize);
  518. }
  519. else
  520. {
  521. sws_scale(vcap->sws_context, src_data, src_linesize,
  522. 0, mode_height[vcap->param.cap_mode], vcap->res_frame.data, vcap->res_frame.linesize);
  523. }
  524. if (vcap->param.on_frame)
  525. {
  526. vcap->param.on_frame(vcap->param.user_data, &vcap->res_frame);
  527. }
  528. LeaveCriticalSection(&vcap->res_cs);
  529. }
  530. else
  531. {
  532. if (vcap->param.on_frame)
  533. {
  534. video_frame frame;
  535. memset(&frame, 0, sizeof(video_frame));
  536. frame.width = mode_width[vcap->param.cap_mode];
  537. frame.height = mode_height[vcap->param.cap_mode];
  538. frame.data[0] = (unsigned char*)buffer;
  539. frame.linesize[0] = linesize;
  540. frame.format = VIDEO_FORMAT_RGB24;
  541. vcap->param.on_frame(vcap->param.user_data, &frame);
  542. }
  543. }
  544. free(Buffertmp);
  545. return S_OK;
  546. }
  547. static HRESULT Handle_YUY2_Frame_CallBack(videocap *vcap, BYTE *pBuffer, long BufferLen)
  548. {
  549. float fDstScale = (float)mode_width[vcap->param.cap_mode]/(float)mode_height[vcap->param.cap_mode];
  550. float fSrcScale = (float)vcap->iout_width /(float)vcap->iout_height;
  551. int used_width = vcap->iout_width;
  552. int used_height = vcap->iout_height;
  553. if (fSrcScale < fDstScale){
  554. used_height = (int)ceil(vcap->iout_width /fDstScale);
  555. }
  556. else if (fSrcScale > fDstScale){
  557. return S_FALSE;
  558. }
  559. int stride_y = used_width;
  560. int stride_u = (used_width + 1)/2;
  561. int stride_v = (used_width + 1)/2;
  562. uint8_t* m_i420 = (uint8_t*)malloc(I420DataSize(used_height, stride_y, stride_u, stride_v));
  563. int conversionResult = libyuv::ConvertToI420(pBuffer, BufferLen,
  564. m_i420,
  565. stride_y,
  566. m_i420 + stride_y * used_height,
  567. stride_u,
  568. m_i420 + stride_y * used_height + stride_u * ((used_height + 1) / 2),
  569. stride_v,
  570. 0,
  571. (vcap->iout_height -used_height)/2, // No Cropping
  572. vcap->iout_width,
  573. vcap->iout_height,
  574. vcap->iout_width,
  575. used_height,
  576. libyuv::kRotate180,
  577. libyuv::FOURCC_YUY2
  578. );
  579. if (0 != conversionResult) {
  580. //capDbg(&vcap->param, "Failed to convert capture frame from type FOURCC_YUY2, conversionResult = %d, pBuffer = 0x%08x, BufferLen = %ld.", conversionResult, pBuffer, BufferLen);
  581. if (NULL != m_i420){
  582. free(m_i420);
  583. m_i420 = NULL;
  584. }
  585. return S_FALSE;
  586. }
  587. char*buffertmp=(char*)malloc(mode_width[vcap->param.cap_mode]*mode_height[vcap->param.cap_mode]*3);
  588. char*buffer = NULL;
  589. //是否需要做分辨率转换
  590. if (used_width == mode_width[vcap->param.cap_mode]){
  591. conversionResult = libyuv::ConvertFromI420(m_i420,
  592. stride_y,
  593. m_i420 + stride_y * mode_height[vcap->param.cap_mode],
  594. stride_u,
  595. m_i420 + stride_y * mode_height[vcap->param.cap_mode] + stride_u * ((mode_height[vcap->param.cap_mode] + 1) / 2),
  596. stride_v,
  597. (uint8_t*)buffertmp,
  598. mode_width[vcap->param.cap_mode] * 3,
  599. mode_width[vcap->param.cap_mode],
  600. mode_height[vcap->param.cap_mode],
  601. libyuv::FOURCC_24BG);
  602. if (conversionResult < 0) {
  603. //capDbg(&vcap->param, "Failed to convert capture frame from I420 to RGB24, conversionResult = %d.", conversionResult);
  604. if (NULL != m_i420){
  605. free(m_i420);
  606. m_i420 = NULL;
  607. }
  608. if (NULL != buffertmp){
  609. free(buffertmp);
  610. buffertmp = NULL;
  611. }
  612. return S_FALSE;
  613. }
  614. }
  615. else{
  616. int dest_stride_y = mode_width[vcap->param.cap_mode];
  617. int dest_stride_u = (mode_width[vcap->param.cap_mode] + 1)/2;
  618. int dest_stride_v = (mode_width[vcap->param.cap_mode] + 1)/2;
  619. uint8_t* m_desti420 = (uint8_t*)malloc(I420DataSize(mode_height[vcap->param.cap_mode], dest_stride_y, dest_stride_u, dest_stride_v));
  620. conversionResult = libyuv::I420Scale(m_i420, used_width,
  621. m_i420 + used_width * used_height, used_width / 2,
  622. m_i420 + used_width * used_height * 5 / 4, used_width / 2,
  623. used_width, used_height,
  624. m_desti420, mode_width[vcap->param.cap_mode],
  625. m_desti420 + mode_width[vcap->param.cap_mode] * mode_height[vcap->param.cap_mode], mode_width[vcap->param.cap_mode] / 2,
  626. m_desti420 + mode_width[vcap->param.cap_mode] * mode_height[vcap->param.cap_mode] * 5 / 4, mode_width[vcap->param.cap_mode] / 2,
  627. mode_width[vcap->param.cap_mode], mode_height[vcap->param.cap_mode],
  628. libyuv::kFilterNone);
  629. if (conversionResult < 0) {
  630. //capDbg(&vcap->param, "Failed to I420Scale, conversionResult = %d.", conversionResult);
  631. if (NULL != m_desti420){
  632. free(m_desti420);
  633. m_desti420 = NULL;
  634. }
  635. if (NULL != buffertmp){
  636. free(buffertmp);
  637. buffertmp = NULL;
  638. }
  639. if (NULL != m_i420){
  640. free(m_i420);
  641. m_i420 = NULL;
  642. }
  643. return S_FALSE;
  644. }
  645. conversionResult = libyuv::ConvertFromI420(m_desti420,
  646. dest_stride_y,
  647. m_desti420 + dest_stride_y * mode_height[vcap->param.cap_mode],
  648. dest_stride_u,
  649. m_desti420 + dest_stride_y * mode_height[vcap->param.cap_mode] + dest_stride_u * ((mode_height[vcap->param.cap_mode] + 1) / 2),
  650. dest_stride_v,
  651. (uint8_t*)buffertmp,
  652. mode_width[vcap->param.cap_mode] * 3,
  653. mode_width[vcap->param.cap_mode],
  654. mode_height[vcap->param.cap_mode],
  655. libyuv::FOURCC_24BG);
  656. if (NULL != m_desti420){
  657. free(m_desti420);
  658. m_desti420 = NULL;
  659. }
  660. if (conversionResult < 0) {
  661. //capDbg(&vcap->param, "Failed to convert capture frame from I420 to RGB24, conversionResult = %d.", conversionResult);
  662. if (NULL != buffertmp){
  663. free(buffertmp);
  664. buffertmp = NULL;
  665. }
  666. if (NULL != m_i420){
  667. free(m_i420);
  668. m_i420 = NULL;
  669. }
  670. return S_FALSE;
  671. }
  672. }
  673. buffer = buffertmp;
  674. if (NULL != m_i420){
  675. free(m_i420);
  676. m_i420 = NULL;
  677. }
  678. if (vcap->param.on_frame)
  679. {
  680. video_frame frame;
  681. memset(&frame, 0, sizeof(video_frame));
  682. frame.width = mode_width[vcap->param.cap_mode];
  683. frame.height = mode_height[vcap->param.cap_mode];
  684. frame.data[0] = (unsigned char*)buffer;
  685. frame.linesize[0] = mode_width[vcap->param.cap_mode]*3;
  686. frame.format = VIDEO_FORMAT_RGB24;
  687. vcap->param.on_frame(vcap->param.user_data, &frame);
  688. }
  689. if (NULL != buffertmp){
  690. free(buffertmp);
  691. buffertmp = NULL;
  692. }
  693. return S_OK;
  694. }
  695. HRESULT STDMETHODCALLTYPE RvcSampleGrabberCB::BufferCB(
  696. double SampleTime,
  697. BYTE *pBuffer,
  698. long BufferLen)
  699. {
  700. if (false== m_vcap->bloged){
  701. get_output_mediatype(m_vcap);
  702. m_vcap->bloged = true;
  703. }
  704. if ((m_vcap->iout_width > 3 * mode_width[m_vcap->param.cap_mode]) && (m_vcap->iout_height > 3* mode_height[m_vcap->param.cap_mode])) {
  705. return S_FALSE;
  706. }
  707. if (VIDEO_FORMAT_YUY2 == m_vcap->param.cap_frame_format){
  708. return Handle_YUY2_Frame_CallBack(m_vcap, pBuffer, BufferLen);
  709. }
  710. else if(VIDEO_FORMAT_RGB24 == m_vcap->param.cap_frame_format){
  711. return Handle_BGR_Frame_CallBack(m_vcap, pBuffer, BufferLen);
  712. }
  713. else{
  714. return S_FALSE;
  715. }
  716. }
  717. HRESULT STDMETHODCALLTYPE RvcSampleGrabberCB::QueryInterface(
  718. /* [in] */ REFIID riid,
  719. /* [iid_is][out] */ __RPC__deref_out void __RPC_FAR *__RPC_FAR *ppvObject)
  720. {
  721. if (ppvObject == NULL || *ppvObject == NULL)
  722. return E_POINTER;
  723. if (IsEqualIID(riid, IID_IUnknown)) {
  724. *ppvObject = (void*)this;
  725. return S_OK;
  726. } else if (IsEqualIID(riid, IID_ISampleGrabberCB)) {
  727. *ppvObject = (void*)this;
  728. return S_OK;
  729. }
  730. return E_NOTIMPL;
  731. }
  732. ULONG STDMETHODCALLTYPE RvcSampleGrabberCB::AddRef( void)
  733. {
  734. return 1;
  735. }
  736. ULONG STDMETHODCALLTYPE RvcSampleGrabberCB::Release( void)
  737. {
  738. delete this;
  739. return 0;
  740. }
  741. static HRESULT STDMETHODCALLTYPE QueryInterface(ISampleGrabberCB * This,
  742. REFIID riid,
  743. void** ppvObject)
  744. {
  745. if (This == NULL || ppvObject == NULL || *ppvObject == NULL)
  746. return E_POINTER;
  747. if (IsEqualIID(riid, IID_IUnknown)) {
  748. *ppvObject = (void*)This;
  749. return S_OK;
  750. } else if (IsEqualIID(riid, IID_ISampleGrabberCB)) {
  751. *ppvObject = (void*)This;
  752. return S_OK;
  753. }
  754. return E_NOTIMPL;
  755. }
  756. static ULONG STDMETHODCALLTYPE AddRef(ISampleGrabberCB * This)
  757. {
  758. return 1;
  759. }
  760. static ULONG STDMETHODCALLTYPE Release(ISampleGrabberCB * This)
  761. {
  762. free(This);
  763. return 0;
  764. }
  765. static HRESULT STDMETHODCALLTYPE SampleCB(ISampleGrabberCB * This, double SampleTime,IMediaSample *pSample)
  766. {
  767. return E_NOTIMPL;
  768. }
  769. static HRESULT STDMETHODCALLTYPE BufferCB(ISampleGrabberCB * This, double SampleTime,BYTE *pBuffer,long BufferLen)
  770. {
  771. ISampleGrabberCBImpl *pImpl = (ISampleGrabberCBImpl *)This;
  772. videocap *vcap = pImpl->vcap;
  773. int linesize = BufferLen / mode_height[vcap->param.cap_mode];
  774. struct SwsContext *sws;
  775. int ibitcount = vcap->param.cap_frame_format == VIDEO_FORMAT_RGB24 ? 3 : 2;
  776. char*Buffertmp=(char*)malloc(mode_width[vcap->param.cap_mode]*mode_height[vcap->param.cap_mode]*3);
  777. uint8_t *src_data[4];
  778. int src_linesize[4];
  779. unsigned char *dst[4] = {(unsigned char*)Buffertmp,NULL,NULL,NULL};
  780. int dst_linesize[4] = {mode_width[vcap->param.cap_mode]*3,0,0,0};
  781. char*buffer;
  782. int oriLen = mode_height[vcap->param.cap_mode]*mode_width[vcap->param.cap_mode]*3;
  783. if (false == vcap->bloged){
  784. get_output_mediatype(vcap);
  785. vcap->bloged = true;
  786. }
  787. if (BufferLen != oriLen)
  788. {
  789. //计算目标图像比例
  790. int srcH;
  791. float fDstScale = (float)mode_width[vcap->param.cap_mode]/(float)mode_height[vcap->param.cap_mode];
  792. float fSrcScale = (float)vcap->iout_width/(float)vcap->iout_height;
  793. if (fSrcScale != fDstScale)
  794. {
  795. //计算偏移量
  796. int nWidth,nHeight,nOffset=0;
  797. if (fSrcScale > fDstScale)
  798. {
  799. return 0;
  800. }
  801. else if (fSrcScale < fDstScale)
  802. {
  803. //高度过长
  804. nWidth = vcap->iout_width;
  805. nHeight = (int)ceil(vcap->iout_width/fDstScale);
  806. nOffset = (vcap->iout_height -nHeight)/2*nWidth*3;
  807. }
  808. //计算等比例变换需要的SWS
  809. sws=sws_getContext(nWidth,nHeight,AV_PIX_FMT_BGR24, mode_width[vcap->param.cap_mode], mode_height[vcap->param.cap_mode], AV_PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL);
  810. av_image_alloc(src_data, src_linesize, vcap->iout_width , vcap->iout_height, AV_PIX_FMT_BGR24, 1);
  811. memcpy(src_data[0],pBuffer+nOffset,nWidth*nHeight*3); //Y
  812. srcH = nHeight;
  813. }
  814. else
  815. {
  816. //计算等比例变换需要的SWS
  817. sws=sws_getContext(vcap->iout_width, vcap->iout_height,AV_PIX_FMT_BGR24, mode_width[vcap->param.cap_mode], mode_height[vcap->param.cap_mode], AV_PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL);
  818. av_image_alloc(src_data, src_linesize, vcap->iout_width, vcap->iout_height, AV_PIX_FMT_BGR24, 1);
  819. memcpy(src_data[0],pBuffer,BufferLen); //Y
  820. srcH = vcap->iout_height;
  821. }
  822. //分辨率转换
  823. sws_scale(sws, src_data, src_linesize, 0, srcH, dst, dst_linesize);
  824. sws_freeContext(sws);
  825. av_freep(&src_data[0]);
  826. buffer = Buffertmp;
  827. BufferLen = mode_width[vcap->param.cap_mode]*mode_height[vcap->param.cap_mode]*3;
  828. }
  829. else
  830. {
  831. buffer = (char*)pBuffer;
  832. }
  833. linesize = BufferLen / mode_height[vcap->param.cap_mode];
  834. if (vcap->param.option & VIDEOCAP_OPT_ENABLE_GRAB)
  835. {
  836. if (vcap->cap_index++%3 == 0)
  837. { /* 3 is grabbing interval */
  838. EnterCriticalSection(&vcap->cap_cs);
  839. //memcpy(vcap->cap_frame.data[0], pBuffer, BufferLen);
  840. memcpy(vcap->cap_frame.data[0], buffer, BufferLen);
  841. LeaveCriticalSection(&vcap->cap_cs);
  842. }
  843. }
  844. if (vcap->grab_cb_count)
  845. {
  846. if (vcap->param.on_grab)
  847. {
  848. video_frame frm = {0};
  849. frm.data[0] = (unsigned char*)buffer;
  850. frm.linesize[0] = linesize;
  851. frm.width = mode_width[vcap->param.cap_mode];
  852. frm.height = mode_height[vcap->param.cap_mode];
  853. frm.format = VIDEO_FORMAT_RGB24;
  854. vcap->param.on_grab(vcap->param.user_data, &frm);
  855. }
  856. InterlockedDecrement(&vcap->grab_cb_count);
  857. }
  858. if (vcap->param.option & VIDEOCAP_OPT_ENABLE_ASYNC_GRAB)
  859. {
  860. async_cap_t *pos, *n;
  861. EnterCriticalSection(&vcap->async_cap_cs);
  862. ListEntry_ForEachSafe(pos, n, &vcap->async_cap_list, async_cap_t, entry)
  863. {
  864. ListEntry_DeleteNode(&pos->entry);
  865. pos->result = 0;
  866. memcpy(pos->ref_cap_frame->data[0], buffer, BufferLen);
  867. SetEvent(pos->evt);
  868. }
  869. LeaveCriticalSection(&vcap->async_cap_cs);
  870. }
  871. if (vcap->param.on_frame_raw)
  872. {
  873. video_frame frm = {0};
  874. frm.data[0] = (unsigned char*)buffer;
  875. frm.linesize[0] = linesize;
  876. frm.width = mode_width[vcap->param.cap_mode];
  877. frm.height = mode_height[vcap->param.cap_mode];
  878. frm.format = VIDEO_FORMAT_RGB24;
  879. vcap->param.on_frame_raw(vcap->param.user_data, &frm);
  880. }
  881. if (vcap->param.option & VIDEOCAP_OPT_EANBLE_RESIZE)
  882. { /* user enable resizing */
  883. unsigned char *src_data[4];
  884. int src_linesize[4] = {linesize, 0, 0, 0};
  885. src_data[0] = (unsigned char*)buffer;
  886. src_data[1] = NULL;
  887. src_data[2] = NULL;
  888. src_data[3] = NULL;
  889. EnterCriticalSection(&vcap->res_cs);
  890. if (vcap->param.option & VIDEOCAP_OPT_ENABLE_FLIP)
  891. {
  892. src_data[0] += (mode_height[vcap->param.cap_mode]-1)*src_linesize[0];
  893. src_linesize[0] = -src_linesize[0];
  894. sws_scale(vcap->sws_context, src_data, src_linesize, 0, mode_height[vcap->param.cap_mode], vcap->res_frame.data, vcap->res_frame.linesize);
  895. }
  896. else
  897. {
  898. sws_scale(vcap->sws_context, src_data, src_linesize,
  899. 0, mode_height[vcap->param.cap_mode], vcap->res_frame.data, vcap->res_frame.linesize);
  900. }
  901. if (vcap->param.on_frame)
  902. {
  903. vcap->param.on_frame(vcap->param.user_data, &vcap->res_frame);
  904. }
  905. LeaveCriticalSection(&vcap->res_cs);
  906. }
  907. else
  908. {
  909. if (vcap->param.on_frame)
  910. {
  911. video_frame frame;
  912. memset(&frame, 0, sizeof(video_frame));
  913. frame.width = mode_width[vcap->param.cap_mode];
  914. frame.height = mode_height[vcap->param.cap_mode];
  915. frame.data[0] = (unsigned char*)buffer;
  916. frame.linesize[0] = linesize;
  917. frame.format = VIDEO_FORMAT_RGB24;
  918. vcap->param.on_frame(vcap->param.user_data, &frame);
  919. }
  920. }
  921. free(Buffertmp);
  922. return S_OK;
  923. }
  924. static HRESULT TryConnectFilters(IGraphBuilder *pGraphBuilder, int cnt, IBaseFilter *pFilters[])
  925. {
  926. HRESULT hr = S_OK;
  927. int i;
  928. if (!pGraphBuilder)
  929. return E_POINTER;
  930. if (cnt == 0)
  931. return S_OK;
  932. if (!pFilters)
  933. return E_POINTER;
  934. for (i = 1; i < cnt; ++i)
  935. {
  936. IBaseFilter *src = pFilters[i-1];
  937. IBaseFilter *dst = pFilters[i];
  938. if (!src || !dst)
  939. return E_POINTER;
  940. hr = ConnectFilters2(pGraphBuilder, src, dst);
  941. if (FAILED(hr))
  942. {
  943. return hr;
  944. }
  945. }
  946. return hr;
  947. }
  948. ////////////////////////////////////////////////////////////////////////////////////////////////
  949. /* used for device lost event listenning */
  950. static DWORD WINAPI BackgroundThread(LPVOID *param)
  951. {
  952. struct videocap *vcap = (struct videocap *)param;
  953. int quit = 0;
  954. HANDLE t;
  955. HANDLE evts[2] = {0};
  956. HANDLE hProcess = GetCurrentProcess();
  957. HRESULT hr;
  958. hr = vcap->me->GetEventHandle((OAEVENT*)&t);
  959. if (FAILED(hr))
  960. return 0;
  961. if (DuplicateHandle(hProcess, t, hProcess, &evts[0], DUPLICATE_SAME_ACCESS, FALSE, DUPLICATE_SAME_ACCESS) == FALSE)
  962. return 0;
  963. evts[1] = vcap->evt_thread_exit;
  964. while (!quit)
  965. {
  966. DWORD dwResult = WaitForMultipleObjects(2, evts, FALSE, INFINITE) - WAIT_OBJECT_0;
  967. if (0 == dwResult)
  968. {
  969. long lEventCode;
  970. LONG_PTR lParam1;
  971. LONG_PTR lParam2;
  972. vcap->me->GetEvent(&lEventCode, &lParam1, &lParam2, INFINITE);
  973. if (lEventCode == EC_DEVICE_LOST || lEventCode == EC_ERRORABORT)
  974. {
  975. if (vcap->param.on_device_lost)
  976. (*vcap->param.on_device_lost)(vcap->param.user_data);
  977. quit = 1;
  978. }
  979. vcap->me->FreeEventParams(lEventCode, lParam1, lParam2);
  980. }
  981. else
  982. {
  983. quit = 1;
  984. }
  985. }
  986. CloseHandle(evts[0]);
  987. return 0;
  988. }
  989. ////////////////////////////////////////////////////////////////////////////////////////////////
  990. int videocap_create(videocap_t *h ,videocap_param *param)
  991. {
  992. videocap *vcap;
  993. //capDbg(param,"videocap_create, fps = %f.", param->fps);
  994. /* check param */
  995. if (!param)
  996. return -1;
  997. if (param->cap_mode < 0 || param->cap_mode >= VIDEOCAP_MAX_MODE)
  998. return -1;
  999. if (param->frame_fmt != VIDEO_FORMAT_I420 && param->frame_fmt != VIDEO_FORMAT_RGB24)
  1000. return -1;
  1001. if (param->fps < 1.0 ||
  1002. param->fps > 100.0)
  1003. return -1;
  1004. if (param->pre_hwnd)
  1005. {
  1006. if (!IsWindow(param->pre_hwnd))
  1007. return -1;
  1008. if (param->pre_width < 0)
  1009. return -1;
  1010. if (param->pre_height < 0)
  1011. return -1;
  1012. }
  1013. if (param->dev_id < 0)
  1014. return -1;
  1015. if (param->frame_fmt == VIDEO_FORMAT_I420 && !(param->option&VIDEOCAP_OPT_EANBLE_RESIZE)) {
  1016. param->res_mode = param->cap_mode;
  1017. param->option |= VIDEOCAP_OPT_EANBLE_RESIZE;
  1018. }
  1019. if (param->option & VIDEOCAP_OPT_EANBLE_RESIZE) {
  1020. if (param->res_mode < VIDEOCAP_FRAME_SQCIF || param->res_mode > VIDEOCAP_FRAME_SVGA)
  1021. return -1;
  1022. }
  1023. vcap = (videocap*)malloc(sizeof(videocap));
  1024. if (!vcap)
  1025. return -1;
  1026. ZeroMemory((void*)vcap, sizeof(videocap));
  1027. memcpy(&vcap->param, param, sizeof(videocap_param));
  1028. if (param->option & VIDEOCAP_OPT_ENABLE_GRAB) {
  1029. int width = mode_width[param->cap_mode];
  1030. int height = mode_height[param->cap_mode];
  1031. if (video_frame_alloc(width, height, VIDEO_FORMAT_RGB24, &vcap->cap_frame) != 0) {
  1032. free(vcap);
  1033. return -1;
  1034. }
  1035. video_frame_fill_black(&vcap->cap_frame);
  1036. InitializeCriticalSection(&vcap->cap_cs);
  1037. }
  1038. if (param->option & VIDEOCAP_OPT_ENABLE_ASYNC_GRAB) {
  1039. InitializeCriticalSection(&vcap->async_cap_cs);
  1040. }
  1041. if (param->option & VIDEOCAP_OPT_EANBLE_RESIZE) {
  1042. int width = mode_width[param->res_mode];
  1043. int height = mode_height[param->res_mode];
  1044. if (video_frame_alloc(width, height, param->frame_fmt, &vcap->res_frame) != 0) {
  1045. if (param->option & VIDEOCAP_OPT_ENABLE_GRAB) {
  1046. DeleteCriticalSection(&vcap->cap_cs);
  1047. video_frame_free(&vcap->cap_frame);
  1048. }
  1049. free(vcap);
  1050. return -1;
  1051. }
  1052. video_frame_fill_black(&vcap->res_frame);
  1053. vcap->sws_context = sws_getContext(mode_width[param->cap_mode],
  1054. mode_height[param->cap_mode],
  1055. AV_PIX_FMT_BGR24,
  1056. mode_width[param->res_mode],
  1057. mode_height[param->res_mode],
  1058. vcap->param.frame_fmt==VIDEO_FORMAT_RGB24 ? AV_PIX_FMT_BGR24 : AV_PIX_FMT_YUV420P,
  1059. SWS_FAST_BILINEAR,
  1060. NULL,
  1061. NULL,
  1062. NULL);
  1063. if (!vcap->sws_context) {
  1064. video_frame_free(&vcap->res_frame);
  1065. if (param->option & VIDEOCAP_OPT_ENABLE_GRAB) {
  1066. DeleteCriticalSection(&vcap->cap_cs);
  1067. video_frame_free(&vcap->cap_frame);
  1068. }
  1069. free(vcap);
  1070. return -1;
  1071. }
  1072. InitializeCriticalSection(&vcap->res_cs);
  1073. }
  1074. vcap->bloged = false;
  1075. vcap->iout_width = mode_width[param->cap_mode];
  1076. vcap->iout_height = mode_height[param->cap_mode];
  1077. *h = vcap;
  1078. return 0;
  1079. }
  1080. void videocap_destroy(videocap_t h)
  1081. {
  1082. if (h->param.option & VIDEOCAP_OPT_ENABLE_GRAB) {
  1083. DeleteCriticalSection(&h->cap_cs);
  1084. video_frame_free(&h->cap_frame);
  1085. }
  1086. if (h->param.option & VIDEOCAP_OPT_ENABLE_ASYNC_GRAB) {
  1087. DeleteCriticalSection(&h->async_cap_cs);
  1088. }
  1089. if (h->param.option & VIDEOCAP_OPT_EANBLE_RESIZE) {
  1090. sws_freeContext(h->sws_context);
  1091. DeleteCriticalSection(&h->res_cs);
  1092. video_frame_free(&h->res_frame);
  1093. }
  1094. free(h);
  1095. }
  1096. //100 to real brightness value
  1097. static int transtorealbrightnessvalue(int ibright, int imaxbrightness, int iminbrightness)
  1098. {
  1099. float fvalue = ibright * (imaxbrightness - iminbrightness) / 10;
  1100. int ivalue = fvalue;
  1101. int ilast = ivalue % 10;
  1102. int inum = ivalue / 10;
  1103. if (ilast >= 5) {
  1104. inum++;
  1105. }
  1106. inum += iminbrightness;
  1107. if (inum < iminbrightness) {
  1108. inum = iminbrightness;
  1109. }
  1110. if (inum > imaxbrightness) {
  1111. inum = imaxbrightness;
  1112. }
  1113. return inum;
  1114. }
  1115. //real brightness value to [0-100]
  1116. static int transfromrealbrightnessvalue(int ibright, int imaxbrightness, int iminbrightness)
  1117. {
  1118. int itotal = imaxbrightness - iminbrightness;
  1119. int ivalue = ibright - iminbrightness;
  1120. float fvalue = ivalue * 1000 / itotal;
  1121. ivalue = fvalue;
  1122. int ilast = ivalue % 10;
  1123. int inum = ivalue / 10;
  1124. if (ilast >= 5) {
  1125. inum++;
  1126. }
  1127. return inum;
  1128. }
  1129. int videocap_adj_brightness(videocap_t h, int nValue)
  1130. {
  1131. if (NULL == h) {
  1132. return S_FALSE;
  1133. }
  1134. IAMVideoProcAmp *pProcAmp = 0;
  1135. HRESULT hr = h->sourcefilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&pProcAmp);
  1136. if(SUCCEEDED(hr))
  1137. {
  1138. long Min, Max, Step, Default, Flags, Val;
  1139. // 亮度.
  1140. if(( nValue >= 0 )&&(nValue <= 100 ))
  1141. {
  1142. hr = pProcAmp->GetRange(VideoProcAmp_Brightness, &Min, &Max, &Step,
  1143. &Default, &Flags);
  1144. if(SUCCEEDED(hr))
  1145. {
  1146. Flags = VideoProcAmp_Flags_Manual;
  1147. hr = pProcAmp->Get(VideoProcAmp_Brightness, &Val, &Flags);
  1148. Val = transtorealbrightnessvalue((int)nValue, (int)Max, (int)Min);
  1149. hr = pProcAmp->Set(VideoProcAmp_Brightness, Val, Flags);
  1150. }
  1151. }
  1152. }
  1153. return hr;
  1154. }
  1155. int videocap_set_autobrightness(videocap_t h)
  1156. {
  1157. if (NULL == h) {
  1158. return S_FALSE;
  1159. }
  1160. IAMVideoProcAmp *pProcAmp = 0;
  1161. HRESULT hr = h->sourcefilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&pProcAmp);
  1162. if(SUCCEEDED(hr))
  1163. {
  1164. long Min, Max, Step, Default, Flags, Val;
  1165. // 亮度.
  1166. hr = pProcAmp->GetRange(VideoProcAmp_Brightness, &Min, &Max, &Step,
  1167. &Default, &Flags);
  1168. if(SUCCEEDED(hr))
  1169. {
  1170. Flags = VideoProcAmp_Flags_Auto;
  1171. Val = Min + (Max-Min)/2;
  1172. hr = pProcAmp->Set(VideoProcAmp_Brightness, Val, Flags);
  1173. }
  1174. }
  1175. return hr;
  1176. }
  1177. int videocap_get_brightness(videocap_t h,int*nValue)
  1178. {
  1179. if (NULL == h) {
  1180. return S_FALSE;
  1181. }
  1182. IAMVideoProcAmp *pProcAmp = 0;
  1183. HRESULT hr = h->sourcefilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&pProcAmp);
  1184. if(SUCCEEDED(hr))
  1185. {
  1186. long Min, Max, Step, Default, Flags, Val;
  1187. // 亮度.
  1188. hr = pProcAmp->GetRange(VideoProcAmp_Brightness, &Min, &Max, &Step, &Default, &Flags);
  1189. if(SUCCEEDED(hr))
  1190. {
  1191. hr = pProcAmp->Get(VideoProcAmp_Brightness, &Val, &Flags);
  1192. *nValue = transfromrealbrightnessvalue((int)Val, (int)Max, (int)Min);
  1193. }
  1194. }
  1195. return hr;
  1196. }
  1197. int videocap_start(videocap_t h)
  1198. {
  1199. HRESULT hr = S_OK;
  1200. if (!h) {
  1201. return VIDEOCAP_ERROR;
  1202. }
  1203. h->grab_cb_count = 0;
  1204. ListEntry_InitHead(&h->async_cap_list);
  1205. // create filter graph
  1206. hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER,
  1207. IID_IGraphBuilder, (void**)&h->graphbuilder);
  1208. if (FAILED(hr)){
  1209. capDbg(&h->param,"CoCreateInstance failed.");
  1210. goto on_error;
  1211. }
  1212. //else{
  1213. // capDbg(&h->param,"CoCreateInstance success, and fps = %f.", h->param.fps);
  1214. //}
  1215. hr = h->graphbuilder->QueryInterface(
  1216. IID_IMediaControl, (void**)&h->mc);
  1217. if (FAILED(hr)){
  1218. capDbg(&h->param,"QueryInterface failed.");
  1219. goto on_error;
  1220. }
  1221. hr = h->graphbuilder->QueryInterface(
  1222. IID_IMediaEvent, (void**)&h->me);
  1223. if (FAILED(hr)){
  1224. capDbg(&h->param,"QueryInterface IID_IMediaEvent failed.");
  1225. goto on_error;
  1226. }
  1227. #ifdef _DEBUG
  1228. {
  1229. IUnknown *pUnk;
  1230. hr = h->graphbuilder->QueryInterface(
  1231. IID_IUnknown, (void**)&pUnk);
  1232. if (FAILED(hr)){
  1233. goto on_error;
  1234. }
  1235. hr = AddToRot(pUnk, &h->dwROTRegister);
  1236. pUnk->Release();
  1237. if (FAILED(hr)){
  1238. goto on_error;
  1239. }
  1240. }
  1241. #endif
  1242. /* create video source filter and add to graph */
  1243. hr = CreateFilterByIndex(CLSID_VideoInputDeviceCategory,
  1244. (void**)&h->sourcefilter, h->param.dev_id);
  1245. if (FAILED(hr)){
  1246. goto on_error;
  1247. }
  1248. hr = h->graphbuilder->AddFilter(h->sourcefilter, L"videosource");
  1249. if (FAILED(hr)){
  1250. goto on_error;
  1251. }
  1252. hr = set_video_source_format((videocap*)h);
  1253. if (FAILED(hr)){
  1254. goto on_error;
  1255. }
  1256. hr = CoCreateInstance(CLSID_Colour, NULL, CLSCTX_INPROC_SERVER,
  1257. IID_IBaseFilter, (void**) &h->cpcfilter);
  1258. if (FAILED(hr)){
  1259. goto on_error;
  1260. }
  1261. hr = h->graphbuilder->AddFilter(h->cpcfilter, L"cpcfilter");
  1262. if (FAILED(hr)){
  1263. goto on_error;
  1264. }
  1265. hr = CoCreateInstance(CLSID_AVIDec, NULL, CLSCTX_INPROC_SERVER,
  1266. IID_IBaseFilter, (void**) &h->avidecfilter);
  1267. if (FAILED(hr)){
  1268. goto on_error;
  1269. }
  1270. hr = h->graphbuilder->AddFilter(h->avidecfilter, L"avidecfilter");
  1271. if (FAILED(hr)){
  1272. goto on_error;
  1273. }
  1274. // create grabber filter and add to graph
  1275. hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,
  1276. IID_IBaseFilter, (void**) &h->grabberfilter);
  1277. if (FAILED(hr)){
  1278. goto on_error;
  1279. }
  1280. hr = h->graphbuilder->AddFilter(h->grabberfilter, L"grabber");
  1281. if (FAILED(hr)){
  1282. goto on_error;
  1283. }
  1284. hr = h->grabberfilter->QueryInterface(
  1285. IID_ISampleGrabber, (void**)&h->grabber);
  1286. if (FAILED(hr)){
  1287. goto on_error;
  1288. }
  1289. {
  1290. int ibitcount = 3;
  1291. int width = mode_width[h->param.cap_mode];
  1292. int height = mode_height[h->param.cap_mode];
  1293. AM_MEDIA_TYPE *mt = (AM_MEDIA_TYPE*)CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE));
  1294. VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER*)CoTaskMemAlloc(sizeof(VIDEOINFOHEADER));
  1295. if (VIDEO_FORMAT_YUY2 == h->param.cap_frame_format){
  1296. ibitcount = 2;
  1297. }
  1298. memset(mt, 0, sizeof(AM_MEDIA_TYPE));
  1299. memset(pvi, 0, sizeof(VIDEOINFOHEADER));
  1300. mt->lSampleSize = width * height * ibitcount;
  1301. mt->majortype = MEDIATYPE_Video;
  1302. mt->subtype = MEDIASUBTYPE_RGB24;
  1303. if (VIDEO_FORMAT_YUY2 == h->param.cap_frame_format){
  1304. mt->subtype = MEDIASUBTYPE_YUY2;
  1305. }
  1306. mt->formattype = FORMAT_VideoInfo;
  1307. mt->bFixedSizeSamples = TRUE;
  1308. mt->bTemporalCompression = FALSE;
  1309. mt->cbFormat = sizeof(VIDEOINFOHEADER);
  1310. mt->pbFormat = (BYTE*)pvi;
  1311. pvi->bmiHeader.biWidth = width;
  1312. pvi->bmiHeader.biHeight = height;
  1313. pvi->bmiHeader.biSizeImage = width * height * ibitcount;
  1314. pvi->bmiHeader.biBitCount = ibitcount*8;
  1315. pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
  1316. pvi->AvgTimePerFrame = (REFERENCE_TIME)(10000000 / h->param.fps);
  1317. pvi->dwBitRate = (DWORD)(width*height*ibitcount*8*h->param.fps);
  1318. //capDbg(&h->param,"width = %d, height = %d, BitRate = %d, fps = %f.", width, height, pvi->dwBitRate, h->param.fps);
  1319. hr = h->grabber->SetMediaType(mt);
  1320. FreeMediaTypeEx(mt);
  1321. if (FAILED(hr)){
  1322. capDbg(&h->param,"SetMediaType error, err=0x%08x", hr);
  1323. goto on_error;
  1324. }
  1325. }
  1326. h->grabber->SetOneShot(FALSE);
  1327. {
  1328. ISampleGrabberCB *pImpl = new RvcSampleGrabberCB(h);
  1329. if (!pImpl)
  1330. goto on_error;
  1331. // 0: SampleCB
  1332. // 1: BufferCB
  1333. h->grabber->SetCallback( pImpl, 1);////////////////////////
  1334. }
  1335. if (h->param.option & VIDEOCAP_OPT_HOZFLIP) {
  1336. h->horflipfilter = (IBaseFilter*)videohorflip_create_filter();
  1337. if (!h->horflipfilter){
  1338. capDbg(&h->param,"videohorflip_create_filter error");
  1339. goto on_error;
  1340. }
  1341. hr = h->graphbuilder->AddFilter(h->horflipfilter, L"horflip");
  1342. if (FAILED(hr)){
  1343. capDbg(&h->param,"AddFilter error, err=0x%08x", hr);
  1344. goto on_error;
  1345. }
  1346. }
  1347. // create render filter and add to graph
  1348. {
  1349. hr = CoCreateInstance(h->param.pre_hwnd ? CLSID_VideoRendererDefault : CLSID_NullRenderer,
  1350. NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**) &h->renderfilter);
  1351. if (FAILED(hr)){
  1352. capDbg(&h->param,"CoCreateInstance error, err=0x%08x", hr);
  1353. goto on_error;
  1354. }
  1355. }
  1356. hr = h->graphbuilder->AddFilter(h->renderfilter, L"renderfilter");
  1357. if (FAILED(hr)){
  1358. capDbg(&h->param,"AddFilter error, err=0x%08x", hr);
  1359. goto on_error;
  1360. }
  1361. // try connect filters
  1362. if (!(h->param.option & VIDEOCAP_OPT_HOZFLIP)) {
  1363. IBaseFilter *Filter1[] = {h->sourcefilter, h->grabberfilter, h->renderfilter};
  1364. IBaseFilter *Filter2[] = {h->sourcefilter, h->cpcfilter, h->grabberfilter, h->renderfilter};
  1365. IBaseFilter *Filter3[] = {h->sourcefilter, h->avidecfilter, h->grabberfilter, h->renderfilter};
  1366. IBaseFilter *Filter4[] = {h->sourcefilter, h->avidecfilter, h->cpcfilter, h->grabberfilter, h->renderfilter};
  1367. struct {
  1368. int cnt;
  1369. IBaseFilter **Filters;
  1370. } tpl[] = {
  1371. {ARRAYSIZE(Filter1), Filter1},
  1372. {ARRAYSIZE(Filter2), Filter2},
  1373. {ARRAYSIZE(Filter3), Filter3},
  1374. {ARRAYSIZE(Filter4), Filter4},
  1375. };
  1376. int i;
  1377. for (i = 0; i < ARRAYSIZE(tpl); ++i) {
  1378. Sleep(50);
  1379. hr = TryConnectFilters(h->graphbuilder, tpl[i].cnt, tpl[i].Filters);
  1380. if (SUCCEEDED(hr)) {
  1381. break;
  1382. }
  1383. }
  1384. if (i == ARRAYSIZE(tpl)) {
  1385. char strinfo[MAX_PATH] = { 0 };
  1386. _snprintf(strinfo, MAX_PATH, "!(h->param.option & VIDEOCAP_OPT_HOZFLIP) TryConnectFilters failed and lResult = 0x%08x.", hr);
  1387. capDbg(&h->param, strinfo);
  1388. capLogEvent(&h->param, 2, strinfo);
  1389. goto on_error;
  1390. }
  1391. }
  1392. else {
  1393. IBaseFilter *Filter1[] = {h->sourcefilter, h->grabberfilter, h->horflipfilter, h->renderfilter};
  1394. IBaseFilter *Filter2[] = {h->sourcefilter, h->cpcfilter, h->grabberfilter, h->horflipfilter, h->renderfilter};
  1395. IBaseFilter *Filter3[] = {h->sourcefilter, h->avidecfilter, h->grabberfilter, h->horflipfilter, h->renderfilter};
  1396. IBaseFilter *Filter4[] = {h->sourcefilter, h->avidecfilter, h->cpcfilter, h->grabberfilter, h->horflipfilter, h->renderfilter};
  1397. struct {
  1398. int cnt;
  1399. IBaseFilter **Filters;
  1400. } tpl[] = {
  1401. {ARRAYSIZE(Filter1), Filter1},
  1402. {ARRAYSIZE(Filter2), Filter2},
  1403. {ARRAYSIZE(Filter3), Filter3},
  1404. {ARRAYSIZE(Filter4), Filter4},
  1405. };
  1406. int i;
  1407. for (i = 0; i < ARRAYSIZE(tpl); ++i) {
  1408. Sleep(50);
  1409. hr = TryConnectFilters(h->graphbuilder, tpl[i].cnt, tpl[i].Filters);
  1410. if (SUCCEEDED(hr)) {
  1411. break;
  1412. }
  1413. }
  1414. if (i == ARRAYSIZE(tpl)) {
  1415. char strerror[MAX_PATH] = { 0 };
  1416. _snprintf(strerror, MAX_PATH, "TryConnectFilters failed and lResult = 0x%08x.", hr);
  1417. capDbg(&h->param, strerror);
  1418. capLogEvent(&h->param, 2, strerror);
  1419. goto on_error;
  1420. }
  1421. }
  1422. if (h->param.pre_hwnd) {
  1423. RECT rc;
  1424. hr = h->renderfilter->QueryInterface(
  1425. IID_IVideoWindow, (void**)&h->videowindow);
  1426. if (FAILED(hr)){
  1427. capDbg(&h->param,"renderfilter QueryInterface failed!");
  1428. goto on_error;
  1429. }
  1430. GetClientRect(h->param.pre_hwnd, &rc);
  1431. h->videowindow->put_Owner((OAHWND)h->param.pre_hwnd);
  1432. h->videowindow->put_Visible(OAFALSE);
  1433. h->videowindow->put_Left(0);
  1434. h->videowindow->put_Top(0);
  1435. h->videowindow->put_Height(min(h->param.pre_height, rc.bottom-rc.top));
  1436. h->videowindow->put_Width(min(h->param.pre_width, rc.right - rc.left));
  1437. h->videowindow->put_WindowStyle(
  1438. WS_CHILD|WS_VISIBLE|WS_CLIPCHILDREN|WS_CLIPSIBLINGS);
  1439. h->videowindow->put_MessageDrain((OAHWND)h->param.pre_hwnd);
  1440. h->videowindow->put_Visible(OATRUE);
  1441. }
  1442. if (h->param.on_device_lost) {
  1443. //h->thread_background = CreateThread(NULL, 0, &BackgroundThread, h, 0, 0);
  1444. //h->evt_thread_exit = CreateEvent(NULL, FALSE, FALSE, NULL);
  1445. }
  1446. hr = h->mc->Run();
  1447. if (FAILED(hr)){
  1448. char strmsg[MAX_PATH] = {0};
  1449. _snprintf(strmsg, MAX_PATH, "h->mc->Run() failed and lResult = 0x%08x.", hr);
  1450. capDbg(&h->param, strmsg);
  1451. capLogEvent(&h->param, 2, strmsg);
  1452. goto on_error;
  1453. }
  1454. h->running = TRUE;
  1455. return VIDEOCAP_OK;
  1456. on_error:
  1457. release_all_interfaces(h);
  1458. return VIDEOCAP_ERROR;
  1459. }
  1460. int videocap_stop(videocap_t h)
  1461. {
  1462. long code;
  1463. if (!h->running)
  1464. return -1;
  1465. if (h->param.on_device_lost) {
  1466. SetEvent(h->evt_thread_exit);
  1467. WaitForSingleObject(h->thread_background, INFINITE);
  1468. CloseHandle(h->thread_background);
  1469. h->thread_background = NULL;
  1470. CloseHandle(h->evt_thread_exit);
  1471. h->evt_thread_exit = NULL;
  1472. }
  1473. h->mc->Stop();
  1474. h->me->WaitForCompletion(INFINITE, &code);
  1475. h->running = 0;
  1476. if (h->param.option & VIDEOCAP_OPT_ENABLE_ASYNC_GRAB) {
  1477. EnterCriticalSection(&h->async_cap_cs);
  1478. {
  1479. async_cap_t *pos, *n;
  1480. ListEntry_ForEachSafe(pos, n, &h->async_cap_list, async_cap_t, entry) {
  1481. ListEntry_DeleteNode(&pos->entry);
  1482. pos->result = -1; // cancel
  1483. SetEvent(pos->evt);
  1484. }
  1485. }
  1486. LeaveCriticalSection(&h->async_cap_cs);
  1487. }
  1488. #ifdef _DEBUG
  1489. RemoveFromRot(h->dwROTRegister);
  1490. h->dwROTRegister = 0;
  1491. #endif
  1492. release_all_interfaces(h);
  1493. return VIDEOCAP_OK;
  1494. }
  1495. int videocap_grab(videocap_t h, video_frame *frame)
  1496. {
  1497. int ret;
  1498. if (!h)
  1499. return -1;
  1500. if (!h->running)
  1501. return -1;
  1502. if (!(h->param.option & VIDEOCAP_OPT_ENABLE_GRAB)) /* initialize without grabbing option */
  1503. return -1;
  1504. EnterCriticalSection(&h->cap_cs);
  1505. ret = video_frame_copy(frame, &h->cap_frame);
  1506. LeaveCriticalSection(&h->cap_cs);
  1507. return ret;
  1508. }
  1509. int videocap_async_grab(videocap_t h, video_frame *frame)
  1510. {
  1511. async_cap_t *ac;
  1512. if (!h)
  1513. return -1;
  1514. if (!h->running)
  1515. return -1;
  1516. if (!(h->param.option & VIDEOCAP_OPT_ENABLE_ASYNC_GRAB)) /* initialize without grabbing option */
  1517. return -1;
  1518. ac = (async_cap_t*)malloc(sizeof(async_cap_t));
  1519. ac->evt = CreateEvent(NULL, FALSE, FALSE, NULL);
  1520. ac->ref_cap_frame = frame;
  1521. EnterCriticalSection(&h->async_cap_cs);
  1522. ListEntry_AddTail(&h->async_cap_list, &ac->entry);
  1523. LeaveCriticalSection(&h->async_cap_cs);
  1524. WaitForSingleObject(ac->evt, INFINITE);
  1525. CloseHandle(ac->evt);
  1526. free(ac);
  1527. return 0;
  1528. }
  1529. int videocap_incrment_grab_cb(videocap_t h)
  1530. {
  1531. if (!h)
  1532. return -1;
  1533. if (!h->running)
  1534. return -1;
  1535. InterlockedIncrement(&h->grab_cb_count);
  1536. return 0;
  1537. }
  1538. int videocap_get_frame(videocap_t h, video_frame *frame)
  1539. {
  1540. int ret;
  1541. if (!h)
  1542. return -1;
  1543. if (!(h->param.option & VIDEOCAP_OPT_EANBLE_RESIZE))
  1544. return -1;
  1545. if (!h->running)
  1546. return -1;
  1547. EnterCriticalSection(&h->res_cs);
  1548. ret = video_frame_copy(frame, &h->res_frame);
  1549. LeaveCriticalSection(&h->res_cs);
  1550. return ret;
  1551. }
  1552. int videocap_is_running(videocap_t h, BOOL *state)
  1553. {
  1554. if (!h)
  1555. return VIDEOCAP_ERROR;
  1556. return h->running;
  1557. }
  1558. int videocap_set_preview_wnd_visible(videocap_t h, BOOL visible)
  1559. {
  1560. if (!h || !h->videowindow)
  1561. return VIDEOCAP_ERROR;
  1562. h->videowindow->put_Visible(
  1563. visible ? OATRUE : OAFALSE);
  1564. return VIDEOCAP_OK;
  1565. }
  1566. int videocap_get_preview_wnd_visible(videocap_t h, BOOL *visible)
  1567. {
  1568. long l = OAFALSE;
  1569. if (!h || !h->videowindow)
  1570. return VIDEOCAP_ERROR;
  1571. h->videowindow->get_Visible(&l);
  1572. *visible = l ? TRUE : FALSE;
  1573. return VIDEOCAP_OK;
  1574. }
  1575. int videocap_set_preview_wnd_width(videocap_t h, int width)
  1576. {
  1577. if (!h)
  1578. return -1;
  1579. if (!h->param.pre_hwnd)
  1580. return -1;
  1581. h->videowindow->put_Width(width);
  1582. return 0;
  1583. }
  1584. int videocap_set_preview_wnd_height(videocap_t h, int height)
  1585. {
  1586. if (!h)
  1587. return -1;
  1588. if (!h->param.pre_hwnd)
  1589. return -1;
  1590. h->videowindow->put_Height(height);
  1591. return 0;
  1592. }
  1593. static HRESULT set_video_source_format(videocap *vcap)
  1594. {
  1595. HRESULT hr;
  1596. IAMStreamConfig *config;
  1597. ICaptureGraphBuilder2 *capturebuilder;
  1598. VIDEOINFOHEADER *pvi;
  1599. int i, count, size = 0;
  1600. BOOL alreadyset = FALSE;
  1601. // create capture builder
  1602. hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER,
  1603. IID_ICaptureGraphBuilder2, (void**)&capturebuilder);
  1604. if (FAILED(hr))
  1605. return hr;
  1606. hr = capturebuilder->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, vcap->sourcefilter,
  1607. IID_IAMStreamConfig, (void**)&config);
  1608. if (FAILED(hr))
  1609. return hr;
  1610. config->GetNumberOfCapabilities(&count, &size);
  1611. if (size == sizeof(VIDEO_STREAM_CONFIG_CAPS)) {
  1612. int width = mode_width[vcap->param.cap_mode];
  1613. int height = mode_height[vcap->param.cap_mode];
  1614. for (i = 0; i < count && !alreadyset; ++i)
  1615. {
  1616. VIDEO_STREAM_CONFIG_CAPS scc;
  1617. AM_MEDIA_TYPE *pmtConfig;
  1618. hr = config->GetStreamCaps(i, &pmtConfig, (BYTE*)&scc);
  1619. if (FAILED(hr))
  1620. continue;
  1621. if (IsEqualIID(pmtConfig->majortype, MEDIATYPE_Video) &&
  1622. IsEqualIID(pmtConfig->formattype, FORMAT_VideoInfo) &&
  1623. (pmtConfig->cbFormat >= sizeof(VIDEOINFOHEADER)) &&
  1624. pmtConfig->pbFormat != NULL
  1625. )
  1626. {
  1627. if (vcap->param.cap_frame_format == VIDEO_FORMAT_YUY2){
  1628. if (!IsEqualIID(pmtConfig->subtype, MEDIASUBTYPE_YUY2)){
  1629. continue;
  1630. }
  1631. }
  1632. pvi = (VIDEOINFOHEADER*)pmtConfig->pbFormat;
  1633. if (pvi->bmiHeader.biWidth == width && pvi->bmiHeader.biHeight == height)
  1634. {
  1635. pvi->AvgTimePerFrame = (REFERENCE_TIME)(10000000 / vcap->param.fps);
  1636. hr = config->SetFormat(pmtConfig);
  1637. if (SUCCEEDED(hr))
  1638. {
  1639. char *subtype_str=GuidToString(pmtConfig->subtype);
  1640. //char strmsg[MAX_PATH] = {0};
  1641. //_snprintf(strmsg, MAX_PATH,"[{32595559-0000-0010-8000-00AA00389B71->MEDIASUBTYPE_YUY2},{e436eb7d-524f-11ce-9f53-0020af0ba770->MEDIASUBTYPE_RGB24}]match video format is %s, width is %d, height is %d.", subtype_str, pvi->bmiHeader.biWidth, pvi->bmiHeader.biHeight);
  1642. //capLogEvent(&vcap->param, 0, strmsg);
  1643. free(subtype_str);
  1644. alreadyset = TRUE;
  1645. }
  1646. else
  1647. {
  1648. alreadyset = FALSE;
  1649. }
  1650. }
  1651. }
  1652. FreeMediaTypeEx(pmtConfig);
  1653. }
  1654. }
  1655. config->Release();
  1656. capturebuilder->Release();
  1657. return hr;
  1658. }
  1659. HRESULT get_output_mediatype(videocap *vcap)
  1660. {
  1661. AM_MEDIA_TYPE mt;
  1662. HRESULT hr;
  1663. VIDEOINFOHEADER *videoHeader = NULL;
  1664. ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
  1665. hr = vcap->grabber->GetConnectedMediaType(&mt);
  1666. if(FAILED(hr))
  1667. {
  1668. capDbg(&vcap->param,"GetConnectedMediaType Failed, hr=0x%08x!",hr);
  1669. return hr;
  1670. }
  1671. videoHeader = (VIDEOINFOHEADER*)(mt.pbFormat);
  1672. vcap->iout_width = videoHeader->bmiHeader.biWidth;
  1673. vcap->iout_height = videoHeader->bmiHeader.biHeight;
  1674. if ((mode_width[vcap->param.cap_mode] != videoHeader->bmiHeader.biWidth) || (mode_height[vcap->param.cap_mode] != videoHeader->bmiHeader.biHeight)) {
  1675. char* subtype_str = NULL;
  1676. char strmsg[MAX_PATH * 2] = { 0 };
  1677. subtype_str = GuidToString(mt.subtype);
  1678. _snprintf(strmsg, MAX_PATH * 2, "grabber Format Width=%d, Height=%d, biBitCount=%d, biSizeImage=%d, biCompression=%d, biPlanes=%d, biSize=%d, subtype=%s, newPmt->lSampleSize=%d, newPmt->bFixedSizeSamples=%d, newPmt->bTemporalCompression=%d",
  1679. videoHeader->bmiHeader.biWidth,
  1680. videoHeader->bmiHeader.biHeight,
  1681. videoHeader->bmiHeader.biBitCount,
  1682. videoHeader->bmiHeader.biSizeImage,
  1683. videoHeader->bmiHeader.biCompression,
  1684. videoHeader->bmiHeader.biPlanes,
  1685. videoHeader->bmiHeader.biSize,
  1686. subtype_str,
  1687. mt.lSampleSize,
  1688. mt.bFixedSizeSamples,
  1689. mt.bTemporalCompression);
  1690. capLogEvent(&vcap->param, 1, strmsg);
  1691. if ((mode_width[vcap->param.cap_mode] != videoHeader->bmiHeader.biWidth) && (mode_height[vcap->param.cap_mode] != videoHeader->bmiHeader.biHeight)) {
  1692. capLogEvent(&vcap->param, 3, strmsg);
  1693. }
  1694. }
  1695. return S_OK;
  1696. }
  1697. static void release_all_interfaces(videocap *h)
  1698. {
  1699. if (h->mc) {
  1700. h->mc->Release();
  1701. h->mc = NULL;
  1702. }
  1703. if (h->me) {
  1704. h->me->Release();
  1705. h->me = NULL;
  1706. }
  1707. if (h->videowindow) {
  1708. h->videowindow->put_Owner((OAHWND)NULL);
  1709. h->videowindow->Release();
  1710. h->videowindow = NULL;
  1711. }
  1712. if (h->graphbuilder) {
  1713. RemoveGraphAllFilters(h->graphbuilder);
  1714. h->graphbuilder->Release();
  1715. h->graphbuilder = NULL;
  1716. }
  1717. if (h->sourcefilter) {
  1718. h->sourcefilter->Release();
  1719. h->sourcefilter = NULL;
  1720. }
  1721. if (h->avidecfilter) {
  1722. h->avidecfilter->Release();
  1723. h->avidecfilter = NULL;
  1724. }
  1725. if (h->cpcfilter) {
  1726. h->cpcfilter->Release();
  1727. h->cpcfilter = NULL;
  1728. }
  1729. if (h->grabber) {
  1730. h->grabber->Release();
  1731. h->grabber = NULL;
  1732. }
  1733. if (h->grabberfilter) {
  1734. h->grabberfilter->Release();
  1735. h->grabberfilter = NULL;
  1736. }
  1737. if (h->horflipfilter) {
  1738. h->horflipfilter->Release();
  1739. h->horflipfilter = NULL;
  1740. }
  1741. if (h->renderfilter) {
  1742. h->renderfilter->Release();
  1743. h->renderfilter = NULL;
  1744. }
  1745. }
  1746. ////////////////////////////////////////////////////////////
  1747. static HRESULT get_device_count_cb(int index, IMoniker *pMoniker, void *pUserData1, void *pUserData2)
  1748. {
  1749. if (pMoniker)
  1750. ++*(int*)pUserData1;
  1751. return S_FALSE;
  1752. }
  1753. int videocap_get_device_count()
  1754. {
  1755. int count = 0;
  1756. HRESULT hr = WalkFilterCategory(CLSID_VideoInputDeviceCategory,
  1757. &get_device_count_cb, &count, NULL);
  1758. return SUCCEEDED(hr) ? count : 0;
  1759. }
  1760. struct _buf {
  1761. WCHAR *buf;
  1762. int len;
  1763. };
  1764. static HRESULT get_device_name_cb(int index, IMoniker *pMoniker, void *pUserData1, void *pUserData2)
  1765. {
  1766. if (pMoniker) {
  1767. int *p_id = (int*)pUserData1;
  1768. struct _buf *buf = (struct _buf*)pUserData2;
  1769. HRESULT hr;
  1770. if (index == *p_id) {
  1771. IPropertyBag *pPropBag;
  1772. VARIANT name;
  1773. hr = pMoniker->BindToStorage(NULL, NULL,
  1774. IID_IPropertyBag, (void**)&pPropBag);
  1775. if (FAILED(hr)) {
  1776. buf->len = -1; /* failed */
  1777. return hr;
  1778. }
  1779. VariantInit(&name);
  1780. name.vt = VT_BSTR;
  1781. hr = pPropBag->Read(L"FriendlyName", &name, NULL);
  1782. if (FAILED(hr)) {
  1783. pPropBag->Release();
  1784. VariantClear(&name);
  1785. buf->len = -1;
  1786. return hr;
  1787. }
  1788. buf->len = SysStringByteLen(name.bstrVal) + 2;
  1789. if (buf)
  1790. memcpy(buf->buf, name.bstrVal, buf->len);
  1791. pPropBag->Release();
  1792. VariantClear(&name);
  1793. return S_OK;
  1794. }
  1795. }
  1796. return S_FALSE;
  1797. }
  1798. int videocap_get_device_name(int device_id, WCHAR *buf, int len)
  1799. {
  1800. struct _buf x = {buf, len};
  1801. int dev_id = device_id;
  1802. HRESULT hr = WalkFilterCategory(CLSID_VideoInputDeviceCategory,
  1803. &get_device_name_cb, &dev_id, &x);
  1804. if (hr == S_OK)
  1805. return x.len;
  1806. return -1;
  1807. }
  1808. static HRESULT get_device_path_cb(int index, IMoniker *pMoniker, void *pUserData1, void *pUserData2)
  1809. {
  1810. if (pMoniker) {
  1811. int *p_id = (int*)pUserData1;
  1812. struct _buf *buf = (struct _buf*)pUserData2;
  1813. HRESULT hr;
  1814. if (index == *p_id) {
  1815. IPropertyBag *pPropBag;
  1816. VARIANT path;
  1817. hr = pMoniker->BindToStorage(NULL, NULL,
  1818. IID_IPropertyBag, (void**)&pPropBag);
  1819. if (FAILED(hr)) {
  1820. buf->len = -1; /* failed */
  1821. return hr;
  1822. }
  1823. VariantInit(&path);
  1824. path.vt = VT_BSTR;
  1825. hr = pPropBag->Read(L"DevicePath", &path, NULL);
  1826. if (FAILED(hr)) {
  1827. pPropBag->Release();
  1828. VariantClear(&path);
  1829. buf->len = -1;
  1830. return hr;
  1831. }
  1832. buf->len = SysStringByteLen(path.bstrVal) + 2;
  1833. if (buf)
  1834. memcpy(buf->buf, path.bstrVal, buf->len);
  1835. pPropBag->Release();
  1836. VariantClear(&path);
  1837. return S_OK;
  1838. }
  1839. }
  1840. return S_FALSE;
  1841. }
  1842. int videocap_get_device_path(int device_id, WCHAR *buf, int len)
  1843. {
  1844. struct _buf x = {buf, len};
  1845. int dev_id = device_id;
  1846. HRESULT hr = WalkFilterCategory(CLSID_VideoInputDeviceCategory,
  1847. &get_device_path_cb, &dev_id, &x);
  1848. if (hr == S_OK)
  1849. return x.len;
  1850. return -1;
  1851. }
  1852. static HRESULT get_device_instanceid_cb(int index, IMoniker* pMoniker, void* pUserData1, void* pUserData2)
  1853. {
  1854. if (pMoniker) {
  1855. int* p_id = (int*)pUserData1;
  1856. struct _buf* buf = (struct _buf*)pUserData2;
  1857. HRESULT hr;
  1858. if (index == *p_id) {
  1859. IPropertyBag* pPropBag;
  1860. VARIANT path;
  1861. hr = pMoniker->BindToStorage(NULL, NULL,
  1862. IID_IPropertyBag, (void**)&pPropBag);
  1863. if (FAILED(hr)) {
  1864. buf->len = -1; /* failed */
  1865. return hr;
  1866. }
  1867. VariantInit(&path);
  1868. path.vt = VT_BSTR;
  1869. hr = pPropBag->Read(L"InstanceId", &path, NULL);
  1870. if (FAILED(hr)) {
  1871. pPropBag->Release();
  1872. VariantClear(&path);
  1873. buf->len = -1;
  1874. return hr;
  1875. }
  1876. buf->len = SysStringByteLen(path.bstrVal) + 2;
  1877. if (buf)
  1878. memcpy(buf->buf, path.bstrVal, buf->len);
  1879. pPropBag->Release();
  1880. VariantClear(&path);
  1881. return S_OK;
  1882. }
  1883. }
  1884. return S_FALSE;
  1885. }
  1886. int videocap_get_device_instanceid(int device_id, WCHAR* buf, int len)
  1887. {
  1888. struct _buf x = { buf, len };
  1889. int dev_id = device_id;
  1890. HRESULT hr = WalkFilterCategory(CLSID_VideoInputDeviceCategory,
  1891. &get_device_instanceid_cb, &dev_id, &x);
  1892. if (hr == S_OK)
  1893. return x.len;
  1894. return -1;
  1895. }
  1896. static int is_support(int min, int max, int granularity, int x)
  1897. {
  1898. if (x < min || x > max)
  1899. return 0;
  1900. if (x == min || x == max)
  1901. return 1;
  1902. if (granularity == 0)
  1903. return 0;
  1904. if ((x-min)/granularity*granularity != (x-min))
  1905. return 0;
  1906. if ((max-x)/granularity*granularity != (max-x))
  1907. return 0;
  1908. return 1;
  1909. }
  1910. static int is_support_size(VIDEO_STREAM_CONFIG_CAPS *pscc, int width, int height)
  1911. {
  1912. return is_support(pscc->MinOutputSize.cx, pscc->MaxOutputSize.cx, pscc->OutputGranularityX, width) &&
  1913. is_support(pscc->MinOutputSize.cy, pscc->MaxOutputSize.cy, pscc->OutputGranularityY, height);
  1914. }
  1915. int videocap_get_device_cap(int device_id, videocap_device_cap *cap)
  1916. {
  1917. int error = -1, i, count, size = 0, j;
  1918. HRESULT hr;
  1919. ICaptureGraphBuilder2 *pBuilder = NULL;
  1920. IBaseFilter *pSourceFilter = NULL;
  1921. IAMStreamConfig *config = NULL;
  1922. videocap_device_cap tmp_cap = {0};
  1923. if (!cap)
  1924. return error;
  1925. hr = CreateFilterByIndex(CLSID_VideoInputDeviceCategory, (void**)&pSourceFilter, device_id);
  1926. if (FAILED(hr))
  1927. return error;
  1928. hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER,
  1929. IID_ICaptureGraphBuilder2, (void**)&pBuilder);
  1930. if (FAILED(hr))
  1931. return error;
  1932. ZeroMemory(cap, sizeof(videocap_device_cap));
  1933. hr = pBuilder->FindInterface( &PIN_CATEGORY_CAPTURE,
  1934. &MEDIATYPE_Video, pSourceFilter, IID_IAMStreamConfig, (void**)&config);
  1935. if (FAILED(hr))
  1936. goto on_error;
  1937. config->GetNumberOfCapabilities( &count, &size);
  1938. if (size == sizeof(VIDEO_STREAM_CONFIG_CAPS)) {
  1939. for (i = 0; i < count; ++i) {
  1940. VIDEO_STREAM_CONFIG_CAPS scc;
  1941. AM_MEDIA_TYPE *pmtConfig;
  1942. VIDEOINFOHEADER *pvi = NULL;
  1943. hr = config->GetStreamCaps(i, &pmtConfig, (BYTE*)&scc);
  1944. if (FAILED(hr))
  1945. continue;
  1946. if (IsEqualIID(pmtConfig->majortype, MEDIATYPE_Video) &&
  1947. IsEqualIID(pmtConfig->formattype, FORMAT_VideoInfo) &&
  1948. (pmtConfig->cbFormat >= sizeof(VIDEOINFOHEADER)) &&
  1949. pmtConfig->pbFormat != NULL) {
  1950. if (IsEqualIID(pmtConfig->subtype, MEDIASUBTYPE_RGB24)) {
  1951. for (j = 0; j < VIDEOCAP_MAX_MODE; ++j) {
  1952. if (is_support_size(&scc, mode_width[j], mode_height[j])) {
  1953. tmp_cap.mode[j] = 1;
  1954. tmp_cap.min_frame_interval[j] = (int)scc.MinFrameInterval;
  1955. tmp_cap.max_frame_interval[j] = (int)scc.MaxFrameInterval;
  1956. }
  1957. }
  1958. }
  1959. }
  1960. FreeMediaTypeEx(pmtConfig);
  1961. }
  1962. }
  1963. config->Release();
  1964. for (i = 0, j = 0; i < VIDEOCAP_MAX_MODE; ++i) {
  1965. if (tmp_cap.mode[i]) {
  1966. cap->mode[j] = i;
  1967. cap->min_frame_interval[j] = cap->min_frame_interval[i];
  1968. cap->max_frame_interval[j] = cap->max_frame_interval[i];
  1969. j++;
  1970. }
  1971. }
  1972. cap->mode_cnt = j;
  1973. error = 0;
  1974. on_error:
  1975. if (pBuilder)
  1976. pBuilder->Release();
  1977. if (pSourceFilter)
  1978. pSourceFilter->Release();
  1979. return error;
  1980. }
  1981. int videocap_get_mode_width(int mode)
  1982. {
  1983. return mode_width[mode];
  1984. }
  1985. int videocap_get_mode_height(int mode)
  1986. {
  1987. return mode_height[mode];
  1988. }