Selaa lähdekoodia

Z991239-5614 #comment 解决把高拍仪配置成上下摄像头存在实体lost问题

80274480 1 vuosi sitten
vanhempi
sitoutus
6a90f0e550
1 muutettua tiedostoa jossa 61 lisäystä ja 52 poistoa
  1. 61 52
      Other/win/libvideoframework/videocap.cpp

+ 61 - 52
Other/win/libvideoframework/videocap.cpp

@@ -18,9 +18,6 @@
 	y = v; \
 }
 
-int g_width= 640;
-int g_height= 360;
-
 HRESULT get_output_mediatype(videocap *vcap);
 
 static void capDbg(videocap_param *cap, const char *fmt, ...)
@@ -398,7 +395,9 @@ typedef struct videocap
     CRITICAL_SECTION res_cs;
     video_frame res_frame;
     struct SwsContext *sws_context; /* for image scaling and format converting */
-	int ioutloged;
+	int iout_width;
+	int iout_height;
+	bool bloged;
 } videocap;
 
 HRESULT set_video_source_format(videocap *);
@@ -470,7 +469,7 @@ static HRESULT Handle_BGR_Frame_CallBack(videocap *vcap, BYTE *pBuffer, long Buf
 		//计算目标图像比例
 		int srcH;
 		float fDstScale = (float)mode_width[vcap->param.cap_mode]/(float)mode_height[vcap->param.cap_mode];
-		float fSrcScale = (float)g_width/(float)g_height;
+		float fSrcScale = (float)vcap->iout_width /(float)vcap->iout_height;
 		if (fSrcScale != fDstScale)
 		{
 			//计算偏移量
@@ -482,23 +481,23 @@ static HRESULT Handle_BGR_Frame_CallBack(videocap *vcap, BYTE *pBuffer, long Buf
 			else if (fSrcScale < fDstScale)
 			{
 				//高度过长
-				nWidth = g_width;
-				nHeight = (int)ceil(g_width/fDstScale);
-				nOffset = (g_height-nHeight)/2*nWidth*3;
+				nWidth = vcap->iout_width;
+				nHeight = (int)ceil(vcap->iout_width/fDstScale);
+				nOffset = (vcap->iout_height -nHeight)/2*nWidth*3;
 			}
 			//计算等比例变换需要的SWS
 			sws=sws_getContext(nWidth,nHeight,AV_PIX_FMT_BGR24, mode_width[vcap->param.cap_mode], mode_height[vcap->param.cap_mode], AV_PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL);
-			av_image_alloc(src_data, src_linesize,g_width,g_height, AV_PIX_FMT_BGR24, 1);
+			av_image_alloc(src_data, src_linesize, vcap->iout_width, vcap->iout_height, AV_PIX_FMT_BGR24, 1);
 			memcpy(src_data[0],pBuffer+nOffset,nWidth*nHeight*3);                    //Y
 			srcH = nHeight;
 		}
 		else
 		{
 			//计算等比例变换需要的SWS
-			sws=sws_getContext(g_width,g_height,AV_PIX_FMT_BGR24, mode_width[vcap->param.cap_mode], mode_height[vcap->param.cap_mode], AV_PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL);
-			av_image_alloc(src_data, src_linesize,g_width,g_height, AV_PIX_FMT_BGR24, 1);
+			sws=sws_getContext(vcap->iout_width, vcap->iout_height, AV_PIX_FMT_BGR24, mode_width[vcap->param.cap_mode], mode_height[vcap->param.cap_mode], AV_PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL);
+			av_image_alloc(src_data, src_linesize, vcap->iout_width, vcap->iout_height, AV_PIX_FMT_BGR24, 1);
 			memcpy(src_data[0],pBuffer,BufferLen);                    //Y
-			srcH = g_height;
+			srcH = vcap->iout_height;
 		}
 
 		//分辨率转换
@@ -617,12 +616,12 @@ static HRESULT Handle_BGR_Frame_CallBack(videocap *vcap, BYTE *pBuffer, long Buf
 static HRESULT Handle_YUY2_Frame_CallBack(videocap *vcap, BYTE *pBuffer, long BufferLen)
 {
 	float fDstScale = (float)mode_width[vcap->param.cap_mode]/(float)mode_height[vcap->param.cap_mode];
-	float fSrcScale = (float)g_width/(float)g_height;
+	float fSrcScale = (float)vcap->iout_width /(float)vcap->iout_height;
 
-	int used_width = g_width;
-	int used_height = g_height;
+	int used_width = vcap->iout_width;
+	int used_height = vcap->iout_height;
 	if (fSrcScale < fDstScale){
-		used_height = (int)ceil(g_width/fDstScale);
+		used_height = (int)ceil(vcap->iout_width /fDstScale);
 	}
 	else if (fSrcScale > fDstScale){
 		return S_FALSE;
@@ -641,10 +640,10 @@ static HRESULT Handle_YUY2_Frame_CallBack(videocap *vcap, BYTE *pBuffer, long Bu
 		m_i420 + stride_y * used_height + stride_u * ((used_height + 1) / 2),
 		stride_v,
 		0, 
-		(g_height-used_height)/2,  // No Cropping
-		g_width, 
-		g_height,
-		g_width,
+		(vcap->iout_height -used_height)/2,  // No Cropping
+		vcap->iout_width,
+		vcap->iout_height,
+		vcap->iout_width,
 		used_height,
 		libyuv::kRotate180,
 		libyuv::FOURCC_YUY2
@@ -790,9 +789,13 @@ HRESULT STDMETHODCALLTYPE RvcSampleGrabberCB::BufferCB(
 	BYTE *pBuffer,
 	long BufferLen)
 {
-	if (0 == m_vcap->ioutloged){
+	if (false== m_vcap->bloged){
 		get_output_mediatype(m_vcap);
-		m_vcap->ioutloged = 1;
+		m_vcap->bloged = true;
+	}
+
+	if ((m_vcap->iout_width > 3 * mode_width[m_vcap->param.cap_mode]) && (m_vcap->iout_height > 3* mode_height[m_vcap->param.cap_mode])) {
+		return S_FALSE;
 	}
 
 	if (VIDEO_FORMAT_YUY2 == m_vcap->param.cap_frame_format){
@@ -881,9 +884,9 @@ static HRESULT STDMETHODCALLTYPE BufferCB(ISampleGrabberCB * This, double Sample
 	char*buffer;
 	int oriLen = mode_height[vcap->param.cap_mode]*mode_width[vcap->param.cap_mode]*3;
 	
-	if (0 == vcap->ioutloged){
+	if (false == vcap->bloged){
 		get_output_mediatype(vcap);
-		vcap->ioutloged = 1;
+		vcap->bloged = true;
 	}
 
 	if (BufferLen != oriLen)
@@ -891,7 +894,7 @@ static HRESULT STDMETHODCALLTYPE BufferCB(ISampleGrabberCB * This, double Sample
 		//计算目标图像比例
 		int srcH;
 		float fDstScale = (float)mode_width[vcap->param.cap_mode]/(float)mode_height[vcap->param.cap_mode];
-		float fSrcScale = (float)g_width/(float)g_height;
+		float fSrcScale = (float)vcap->iout_width/(float)vcap->iout_height;
 		if (fSrcScale != fDstScale)
 		{
 			//计算偏移量
@@ -903,23 +906,23 @@ static HRESULT STDMETHODCALLTYPE BufferCB(ISampleGrabberCB * This, double Sample
 			else if (fSrcScale < fDstScale)
 			{
 				//高度过长
-				nWidth = g_width;
-				nHeight = (int)ceil(g_width/fDstScale);
-				nOffset = (g_height-nHeight)/2*nWidth*3;
+				nWidth = vcap->iout_width;
+				nHeight = (int)ceil(vcap->iout_width/fDstScale);
+				nOffset = (vcap->iout_height -nHeight)/2*nWidth*3;
 			}
 			//计算等比例变换需要的SWS
 			sws=sws_getContext(nWidth,nHeight,AV_PIX_FMT_BGR24, mode_width[vcap->param.cap_mode], mode_height[vcap->param.cap_mode], AV_PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL);
-			av_image_alloc(src_data, src_linesize,g_width,g_height, AV_PIX_FMT_BGR24, 1);
+			av_image_alloc(src_data, src_linesize, vcap->iout_width , vcap->iout_height, AV_PIX_FMT_BGR24, 1);
 			memcpy(src_data[0],pBuffer+nOffset,nWidth*nHeight*3);                    //Y
 			srcH = nHeight;
 		}
 		else
 		{
 			//计算等比例变换需要的SWS
-			sws=sws_getContext(g_width,g_height,AV_PIX_FMT_BGR24, mode_width[vcap->param.cap_mode], mode_height[vcap->param.cap_mode], AV_PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL);
-			av_image_alloc(src_data, src_linesize,g_width,g_height, AV_PIX_FMT_BGR24, 1);
+			sws=sws_getContext(vcap->iout_width, vcap->iout_height,AV_PIX_FMT_BGR24, mode_width[vcap->param.cap_mode], mode_height[vcap->param.cap_mode], AV_PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL);
+			av_image_alloc(src_data, src_linesize, vcap->iout_width, vcap->iout_height, AV_PIX_FMT_BGR24, 1);
 			memcpy(src_data[0],pBuffer,BufferLen);                    //Y
-			srcH = g_height;
+			srcH = vcap->iout_height;
 		}
 
 		//分辨率转换
@@ -1211,7 +1214,9 @@ int videocap_create(videocap_t *h ,videocap_param *param)
         }
         InitializeCriticalSection(&vcap->res_cs);
     }
-	vcap->ioutloged = 0;
+	vcap->bloged = false;
+	vcap->iout_width = mode_width[param->cap_mode];
+	vcap->iout_height = mode_height[param->cap_mode];
     *h = vcap;
     return 0;
 }
@@ -1847,8 +1852,6 @@ HRESULT get_output_mediatype(videocap *vcap)
 	AM_MEDIA_TYPE   mt;
 	HRESULT hr;
 	VIDEOINFOHEADER *videoHeader = NULL;
-	char *subtype_str = NULL;
-	//char strmsg[MAX_PATH*2] = {0};
 
 	ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE));
 	hr = vcap->grabber->GetConnectedMediaType(&mt);
@@ -1859,24 +1862,30 @@ HRESULT get_output_mediatype(videocap *vcap)
 	}
 	
 	videoHeader = (VIDEOINFOHEADER*)(mt.pbFormat);
-	g_width = videoHeader->bmiHeader.biWidth;
-	g_height = videoHeader->bmiHeader.biHeight;
-
-	subtype_str=GuidToString(mt.subtype);
+	vcap->iout_width = videoHeader->bmiHeader.biWidth;
+	vcap->iout_height = videoHeader->bmiHeader.biHeight;
+
+	if (mode_width[vcap->param.cap_mode] != videoHeader->bmiHeader.biWidth || mode_height[vcap->param.cap_mode] != videoHeader->bmiHeader.biHeight) {
+		char* subtype_str = NULL;
+		char strmsg[MAX_PATH * 2] = { 0 };
+		subtype_str = GuidToString(mt.subtype);
+
+		_snprintf(strmsg, MAX_PATH * 2, "grabber Format Width=%d, Height=%d, biBitCount=%d, biSizeImage=%d, biCompression=%d, biPlanes=%d, biSize=%d, subtype=%s, newPmt->lSampleSize=%d, newPmt->bFixedSizeSamples=%d, newPmt->bTemporalCompression=%d",
+			videoHeader->bmiHeader.biWidth,
+			videoHeader->bmiHeader.biHeight,
+			videoHeader->bmiHeader.biBitCount,
+			videoHeader->bmiHeader.biSizeImage,
+			videoHeader->bmiHeader.biCompression,
+			videoHeader->bmiHeader.biPlanes,
+			videoHeader->bmiHeader.biSize,
+			subtype_str,
+			mt.lSampleSize,
+			mt.bFixedSizeSamples,
+			mt.bTemporalCompression);
+		capLogEvent(&vcap->param, 2, strmsg);
+	}
 	
-	//_snprintf(strmsg, MAX_PATH*2, "grabber Format Width=%d, Height=%d, biBitCount=%d, biSizeImage=%d, biCompression=%d, biPlanes=%d, biSize=%d, subtype=%s, newPmt->lSampleSize=%d, newPmt->bFixedSizeSamples=%d, newPmt->bTemporalCompression=%d",
-	//	videoHeader->bmiHeader.biWidth,
-	//	videoHeader->bmiHeader.biHeight,
-	//	videoHeader->bmiHeader.biBitCount,
-	//	videoHeader->bmiHeader.biSizeImage,
-	//	videoHeader->bmiHeader.biCompression,
-	//	videoHeader->bmiHeader.biPlanes,
-	//	videoHeader->bmiHeader.biSize,
-	//	subtype_str,
-	//	mt.lSampleSize,
-	//	mt.bFixedSizeSamples,
-	//	mt.bTemporalCompression);
-	//capLogEvent(&vcap->param, 1, strmsg);
+
 
 	return S_OK;
 }