#include "stdafx.h" #include #include #include #include #include #include #include "y2k_time.h" #include "../../Other/include/rvc_media_common.h" #include "../../Other/libvideohorflip/videohorflip.h" #include "Event.h" #include "capture.h" using namespace MediaController; #define av_always_inline __inline #define inline __inline #include "../../Other/libvideoframework/video_common/ffmpeg_api_cpp_adapter.h" static int Bin2Str(unsigned char *x, int xlen, char *str, int str_size) { static const char *hex2char = "0123456789ABCDEF"; int i, k = 0; if (str_size <= xlen * 2) return -1; for (i = 0; i < xlen; ++i) { int h = x[i] >> 4; int l = x[i] & 0xf; str[k++] = hex2char[h]; str[k++] = hex2char[l]; } str[k] = 0; return k; } static int translate_id(int in_direction, int idx) { int i, n, ii; n = Pa_GetDeviceCount(); for (i = 0, ii = 0; i < n; ++i) { const PaDeviceInfo *info = Pa_GetDeviceInfo(i); if (in_direction) { if (info->maxInputChannels) { if (ii == idx) { return i; } ii++; } } else { if (info->maxOutputChannels) { if (ii == idx) { return i; } ii++; } } } return -1; } static int StreamCallback(const void *input, void *output, unsigned long frameCount, const PaStreamCallbackTimeInfo* timeInfo, PaStreamCallbackFlags statusFlags, void *userData) { audio_capture_t *audio_cap = (audio_capture_t*)userData; if (input) { audio_frame frm; frm.bitspersample = 16; frm.format = 1; frm.data = (char*)const_cast(input); frm.framesize = frameCount << 1; frm.nchannels = 1; frm.samplespersec = CAPTURE_CLOCK; frm.iseriesnumber = 0; if (!audio_cap->shm_queue->InsertAudio(&frm)) { Dbg("[StreamCallback] InsertAudio to shm_queue failed! frameCount:%d", frameCount); } if (!audio_cap->salesol_shm_queue->InsertAudio(&frm)) { Dbg("[StreamCallback] InsertAudio to salesol_shm_queue failed! frameCount:%d", frameCount); } } if (output) { memset(output, 0, frameCount<<1); } return paContinue; } static int Sales_StreamCallback(const void *input, void *output, unsigned long frameCount, const PaStreamCallbackTimeInfo* timeInfo, PaStreamCallbackFlags statusFlags, void *userData) { rvc_audio_capture_t *audio_cap = (rvc_audio_capture_t*)userData; if ((NULL != audio_cap) && (NULL != audio_cap->audio_shm_queue) && (NULL != input)){ audio_frame frm; frm.bitspersample = 16; frm.format = 1; frm.data = (char*)const_cast(input); frm.framesize = frameCount << 1; frm.nchannels = 1; frm.samplespersec = audio_cap->iaudio_capture_samplerate; frm.iseriesnumber = audio_cap->iseriesnumber++; if (!audio_cap->audio_shm_queue->InsertAudio(&frm)) { Dbg("[Sales_StreamCallback] InsertAudio failed! frameCount:%d", frameCount); } else{ if ((audio_cap->iaudio_capture_peroid) > 0 && (0 == frm.iseriesnumber%audio_cap->iaudio_capture_peroid)){ if (audio_cap->iseriesnumber > INT_MAX){ audio_cap->iseriesnumber = 0; } //Dbg("current audio frame series number is %d.", frm.iseriesnumber); } } if (eSingleWriteLocal == audio_cap->eType){ fwrite(frm.data, frm.framesize, 1,(FILE*)(audio_cap->pdata)); } } if (output) { memset(output, 0, frameCount<<1); } return paContinue; } static rvc_audio_capture_t *salesrecord_audio_capture_create(rvc_sales_audio_capture_t *cap) { rvc_audio_capture_t *audio_cap = ZALLOC_T(rvc_audio_capture_t); if (audio_cap) { audio_cap->parent = cap; audio_cap->audio_shm_queue = new Clibaudioqueue(REC_COMMON_AUDIO_SALES_SHM_QUEUE); audio_cap->iseriesnumber = 0; audio_cap->eType = eUnKnown; audio_cap->pdata = NULL; } return audio_cap; } static void salesrecord_audio_capture_destroy(rvc_audio_capture_t *audio_cap) { if (NULL != audio_cap){ if (NULL != audio_cap->audio_shm_queue){ delete audio_cap->audio_shm_queue; audio_cap->audio_shm_queue = NULL; Dbg("set audio_cap audio_shm_queue null"); } free(audio_cap); } } static audio_capture_t *audio_capture_create(capture_t *cap) { audio_capture_t *audio_cap = ZALLOC_T(audio_capture_t); if (audio_cap) { audio_cap->parent = cap; audio_cap->shm_queue = new Clibaudioqueue(REC_COMMON_AUDIO_SHM_QUEUE); audio_cap->salesol_shm_queue = new Clibaudioqueue(REC_COMMON_AUDIO_SALESOL_SHM_QUEUE); //audio_cap->sales_shm_queue = new Clibaudioqueue(REC_COMMON_AUDIO_SALES_SHM_QUEUE); } return audio_cap; } static void audio_capture_destroy(audio_capture_t *audio_cap) { delete audio_cap->shm_queue; delete audio_cap->salesol_shm_queue; //delete audio_cap->sales_shm_queue; free(audio_cap); } static int audio_capture_start(audio_capture_t *audio_cap) { capture_t *cap = audio_cap->parent; PaStreamParameters inParam = {0}; PaStreamParameters salesInParam = {0}; PaStreamParameters outParam = {0}; PaError paError; const PaDeviceInfo *info; int nId = capture_get_audio_device_id(true, cap->config.strAudioIn); if (nId == -1) { //需要立即处理的告警使用Severity_High LogError(Severity_High,Error_DevMedia,ERROR_MOD_MEDIACONTROLLER_HANDFREEIN_INITFAIL,"hand free in device config error,please check"); return Error_AudioIN; } int in_dev_id = translate_id(TRUE, nId); if (in_dev_id < 0) { Dbg("audio in device translate failed!"); return Error_AudioIN; } info = Pa_GetDeviceInfo(in_dev_id); if (!info) { Dbg("get device info failed!"); return Error_AudioIN; } inParam.channelCount = 1; inParam.device = in_dev_id; inParam.suggestedLatency = info->defaultLowInputLatency; inParam.sampleFormat = paInt16; inParam.hostApiSpecificStreamInfo = NULL; if (Pa_IsFormatSupported(&inParam, NULL, CAPTURE_CLOCK) != paNoError) { Dbg("audio capture create error, cannot open audio input device"); return Error_AudioIN; } nId = capture_get_audio_device_id(false, cap->config.strAudioOut); if (nId == -1) { //需要立即处理的告警使用Severity_High LogError(Severity_High,Error_DevMedia,ERROR_MOD_MEDIACONTROLLER_HANDFREEOUT_INITFAIL,"hand free out device config error,please check"); return Error_AudioOut; } int out_dev_id = translate_id(FALSE, nId); if (out_dev_id < 0) { Dbg("audio out device translate failed!"); return Error_AudioOut; } info = Pa_GetDeviceInfo(out_dev_id); if (!info) { Dbg("get device info failed!"); return Error_AudioOut; } outParam.channelCount = 1; outParam.device = out_dev_id; outParam.suggestedLatency = info->defaultLowOutputLatency; outParam.sampleFormat = paInt16; outParam.hostApiSpecificStreamInfo = NULL; if (Pa_IsFormatSupported(NULL, &outParam, CAPTURE_CLOCK) != paNoError) { Dbg("audio capture create error, cannot open audio input device"); return Error_AudioOut; } //打开流设备,可以用以下代码替换paError = Pa_OpenStream(&audio_cap->stream, &inParam, &outParam, CAPTURE_CLOCK, //CAPTURE_FRAME_TIME * CAPTURE_CLOCK/1000, paClipOff|paDitherOff, &StreamCallback, audio_cap); paError = Pa_OpenStream(&audio_cap->stream, &inParam, NULL, CAPTURE_CLOCK, CAPTURE_FRAME_TIME * CAPTURE_CLOCK/1000, paClipOff|paDitherOff, &StreamCallback, audio_cap); if (paError != paNoError) { Dbg("port audio open stream failed! paError = %d", paError); return Error_AudioIN; } paError = Pa_StartStream(audio_cap->stream); if (paError != paNoError) { Dbg("port audio start stream failed! paError = %d", paError); return Error_AudioIN; } return Error_Succeed; } static void audio_capture_stop(audio_capture_t *audio_cap) { if (audio_cap->stream) { Pa_AbortStream(audio_cap->stream); Pa_CloseStream(audio_cap->stream); audio_cap->stream = NULL; } //if (audio_cap->sales_stream) { // Pa_AbortStream(audio_cap->sales_stream); // Pa_CloseStream(audio_cap->sales_stream); // audio_cap->sales_stream = NULL; //} } static int record_audio_capture_start(rvc_audio_capture_t *audio_cap) { rvc_sales_audio_capture_t *cap = audio_cap->parent; PaStreamParameters salesInParam = {0}; PaError paError; const PaDeviceInfo *info; int nId = capture_get_audio_device_id(true, cap->rvc_audio_config.strAudioIn); if (nId == -1) { //需要立即处理的告警使用Severity_High LogError(Severity_High,Error_DevMedia,ERROR_MOD_MEDIACONTROLLER_HANDFREEIN_INITFAIL,"hand free in device config error,please check"); return Error_AudioIN; } int in_dev_id = translate_id(TRUE, nId); if (in_dev_id < 0) { Dbg("sales audio in device translate failed!"); return Error_AudioIN; } info = Pa_GetDeviceInfo(in_dev_id); if (!info) { Dbg("get device info failed!"); return Error_AudioIN; } salesInParam.channelCount = 1; salesInParam.device = in_dev_id; salesInParam.suggestedLatency = info->defaultLowInputLatency; salesInParam.sampleFormat = paInt16; salesInParam.hostApiSpecificStreamInfo = NULL; if (Pa_IsFormatSupported(&salesInParam, NULL, audio_cap->iaudio_capture_samplerate) != paNoError) { Dbg("sales audio capture create error, cannot open audio input device, and current capture sample rate is %d.",audio_cap->iaudio_capture_samplerate); return Error_AudioIN; } paError = Pa_OpenStream(&audio_cap->stream, &salesInParam, NULL, audio_cap->iaudio_capture_samplerate, audio_cap->iaudio_capture_peroid * audio_cap->iaudio_capture_samplerate/1000, paClipOff|paDitherOff, &Sales_StreamCallback, audio_cap); if (paError != paNoError) { Dbg("port audio open sales stream failed! paError = %d", paError); return Error_AudioIN; } paError = Pa_StartStream(audio_cap->stream); if (paError != paNoError) { Dbg("port audio start sales stream failed! paError = %d", paError); return Error_AudioIN; } return Error_Succeed; } static void record_audio_capture_stop(rvc_audio_capture_t *audio_cap) { if (NULL != audio_cap){ if (audio_cap->stream) { PaError Error = Pa_AbortStream(audio_cap->stream); if (paNoError == Error){ Dbg("Pa_AbortStream no error."); } Error = Pa_CloseStream(audio_cap->stream); if (paNoError == Error){ Dbg("Pa_CloseStream no error."); } audio_cap->stream = NULL; } audio_cap->iseriesnumber = 0; } } static int calc_capture_mode(int width, int height, int *mode) { const struct { int mode; int width; int height; } modes [] = { {VIDEOCAP_FRAME_SQCIF, VIDEOCAP_SQCIF_WIDTH, VIDEOCAP_SQCIF_HEIGHT}, {VIDEOCAP_FRAME_QQVGA, VIDEOCAP_QQVGA_WIDTH, VIDEOCAP_QQVGA_HEIGHT}, {VIDEOCAP_FRAME_QCIF, VIDEOCAP_QCIF_WIDTH, VIDEOCAP_QCIF_HEIGHT}, {VIDEOCAP_FRAME_QVGA, VIDEOCAP_QVGA_WIDTH, VIDEOCAP_QVGA_HEIGHT}, {VIDEOCAP_FRAME_CIF, VIDEOCAP_CIF_WIDTH, VIDEOCAP_CIF_HEIGHT}, {VIDEOCAP_FRAME_VGA, VIDEOCAP_VGA_WIDTH, VIDEOCAP_VGA_HEIGHT}, {VIDEOCAP_FRAME_4CIF, VIDEOCAP_4CIF_WIDTH, VIDEOCAP_4CIF_HEIGHT}, {VIDEOCAP_FRAME_SVGA, VIDEOCAP_SVGA_WIDTH, VIDEOCAP_SVGA_HEIGHT}, {VIDEOCAP_FRAME_NHD, VIDEOCAP_NHD_WIDTH, VIDEOCAP_NHD_HEIGHT}, {VIDEOCAP_FRAME_SXGA, VIDEOCAP_SXGA_WIDTH, VIDEOCAP_SXGA_HEIGHT}, {VIDEOCAP_FRAME_720P, VIDEOCAP_720P_WIDTH, VIDEOCAP_720P_HEIGHT}, {VIDEOCAP_FRAME_1080P, VIDEOCAP_1080P_WIDTH, VIDEOCAP_1080P_HEIGHT}, }; int i; for (i = 0; i < array_size(modes); ++i) { if (modes[i].width == width && modes[i].height == height) { *mode = modes[i].mode; return 0; } } return Error_NotExist; } static int video_shm_enqueue(Clibvideoqueue *shm_queue, video_frame *frame, int flags) { videoq_frame tmp_frm; tmp_frm.data = frame->data[0]; tmp_frm.framesize = frame->width * frame->height * 3; tmp_frm.format = VIDEOQ_FORMAT_RGB24; tmp_frm.width = frame->width; tmp_frm.height = frame->height; unsigned int nowtime = y2k_time_now(); if (!shm_queue->InsertVideo(&tmp_frm, flags,nowtime)) { Dbg("caution: insert shm video failed!"); return Error_Unexpect; } else { //Dbg("insert shm video ok!"); return Error_Succeed; } } static void env_cap_on_frame(void *user_data, video_frame *frame) { video_capture_t *video_cap = (video_capture_t *)user_data; capture_t *cap = video_cap->parent; int rc; video_cap->frame_id++; //Dbg("start env on frame, id=%d, tick=%d", video_cap->frame_id, GetTickCount());; //IplImage*img = NULL; //img = cvCreateImage(cvSize(frame->width,frame->height),IPL_DEPTH_8U,3); //img->imageData = (char*)frame->data[0]; //cvSaveImage("d:\\env.jpg", img,0); //cvReleaseImageHeader(&img); rc = video_shm_enqueue(video_cap->snapshot_shm_queue, frame, VIDEOQUEUE_FLAG_VERTICAL_FLIP); if (rc != Error_Succeed) { Dbg("env snapshot queue enqueue shm failed! Error = %d, camera_type=%d", rc, video_cap->camera_type); } // snapshot if (rc==Error_Succeed) { if (*cap->config.ref_env_capture_count) { Dbg("env camera ref_env_capture_count=%d",*cap->config.ref_env_capture_count); InterlockedDecrement(cap->config.ref_env_capture_count); LogEvent(Severity_Middle, MOD_EVENT_MEDIACONTROLLER_FINISHED_CAPTURE_ENV, "agent capture env ok, and capture env finished!"); } else if (*cap->config.ref_envopt_capture_count & 2) { Dbg("env camera ref_envopt_capture_count=%d",*cap->config.ref_envopt_capture_count); _InterlockedAnd(cap->config.ref_envopt_capture_count, 0xfffffffD); if (*cap->config.ref_envopt_capture_count == 0) { LogEvent(Severity_Middle, MOD_EVENT_MEDIACONTROLLER_FINISHED_CAPTURE_ENVOPT, "agent capture env ok, and capture envopt finished!"); } } } // preview { video_frame preview_frame; video_frame_alloc(REC_COMMON_VIDEO_PREVIEW_WIDTH, REC_COMMON_VIDEO_PREVIEW_HEIGHT, VIDEO_FORMAT_RGB24, &preview_frame); uint8_t *src_data[4] = {frame->data[0] + 80*3, 0, 0, 0}; sws_scale(video_cap->preview_sws_ctx, src_data, frame->linesize, 0, frame->height, preview_frame.data, preview_frame.linesize); video_shm_enqueue(video_cap->preview_shm_queue, &preview_frame, 0); video_frame_free(&preview_frame); } // rtp { video_frame rtp_frame; video_frame_alloc(REC_COMMON_VIDEO_RTP_ENV_WIDTH, REC_COMMON_VIDEO_RTP_ENV_HEIGHT, VIDEO_FORMAT_RGB24, &rtp_frame); uint8_t *src_data[4] = {frame->data[0] + (frame->height-1) * frame->linesize[0], 0, 0, 0}; int src_linesize[4] = {-frame->linesize[0], 0, 0, 0}; sws_scale(video_cap->rtp_sws_ctx, src_data, src_linesize, 0, frame->height, rtp_frame.data, rtp_frame.linesize); video_shm_enqueue(video_cap->rtp_shm_queue, &rtp_frame, 0); #if 0 static int i = 0; if (i == 0 && 0) { video_frame tmp_frame; video_frame_alloc(320, 180, VIDEO_FORMAT_RGB24, &tmp_frame); video_frame_fill_black(&tmp_frame); videoq_frame frm; frm.data = tmp_frame.data[0]; video_cap->rtp_shm_queue->GetVideo(&frm, 0); video_frame_save_bmpfile("d:\\abc.bmp", &tmp_frame); video_frame_free(&tmp_frame); //video_frame_save_bmpfile("d:\\ab.bmp", &rtp_frame); } #endif video_frame_free(&rtp_frame); } //Dbg("end env on frame, id=%d, tick=%d", video_cap->frame_id, GetTickCount());; } static void opt_cap_on_frame(void *user_data, video_frame *frame) { video_capture_t *video_cap = (video_capture_t *)user_data; capture_t *cap = video_cap->parent; int rc; video_frame rframe; int rotate = 0; //Dbg("opt on frame!"); if (cap->config.video_opt_rotate == 90) { rotate = 1; } else if (cap->config.video_opt_rotate == 270) { rotate = -1; } else { return; } video_cap->frame_id++; // prepare for rotation video_frame_alloc(frame->height, frame->width, frame->format, &rframe); #if 1 // use IPP, it's fast { IppiSize srcSize; srcSize.width = frame->width; srcSize.height = frame->height; IppiRect srcROI; srcROI.width = frame->width; srcROI.height = frame->height; srcROI.x = 0; srcROI.y = 0; IppiRect dstROI; dstROI.width = frame->height; dstROI.height = frame->width; dstROI.x = 0; dstROI.y = 0; ippiTranspose_8u_C3R(frame->data[0], frame->linesize[0], rframe.data[0], rframe.linesize[0], srcSize); IppiAxis flip; if (rotate == 1) { flip = ippAxsVertical; ippiMirror_8u_C3IR(rframe.data[0], rframe.linesize[0], srcSize, flip); } else { IppiSize Size; Size.width = rframe.width; Size.height = rframe.height; //flip = ippAxsHorizontal; flip = ippAxsBoth; ippiMirror_8u_C3IR(rframe.data[0], rframe.linesize[0], Size, flip); } } #else videohorflip_rotate(frame->data[0], frame->width, frame->height, rframe.data[0], rotate); #endif rc = video_shm_enqueue(video_cap->snapshot_shm_queue, &rframe, 0); if (rc != Error_Succeed) { Dbg("opt snapshot queue enqueue shm failed! Error = %d, camera_type=%d", rc, video_cap->camera_type); } // snapshot if (rc==Error_Succeed) { if (*cap->config.ref_opt_capture_count) { Dbg("opt camera ref_opt_capture_count=%d",*cap->config.ref_opt_capture_count); InterlockedDecrement(cap->config.ref_opt_capture_count); LogEvent(Severity_Middle, MOD_EVENT_MEDIACONTROLLER_FINISHED_CAPTURE_OPT, "agent capture opt ok, and capture opt finished!"); } else if (*cap->config.ref_envopt_capture_count&1) { Dbg("opt camera ref_envopt_capture_count=%d",*cap->config.ref_envopt_capture_count); if (InterlockedDecrement(cap->config.ref_envopt_capture_count) == 0) { LogEvent(Severity_Middle, MOD_EVENT_MEDIACONTROLLER_FINISHED_CAPTURE_ENVOPT, "agent capture opt ok, and capture envopt finished!"); } } } // rtp { video_frame rtp_frame = {0}; video_frame_alloc(REC_COMMON_VIDEO_RTP_OPT_WIDTH, REC_COMMON_VIDEO_RTP_OPT_HEIGHT, VIDEO_FORMAT_RGB24, &rtp_frame); sws_scale(video_cap->rtp_sws_ctx, rframe.data, rframe.linesize, 0, rframe.height, rtp_frame.data, rtp_frame.linesize); video_shm_enqueue(video_cap->rtp_shm_queue, &rtp_frame, 0); video_frame_free(&rtp_frame); } video_frame_free(&rframe); } static video_capture_t *video_capture_create(capture_t *cap, int camera_type) { video_capture_t *video_cap = ZALLOC_T(video_capture_t); if (video_cap) { video_cap->parent = cap; video_cap->camera_type = camera_type; video_cap->frame_id = 0; if (camera_type == CAMERA_TYPE_ENV) { video_cap->snapshot_shm_queue = new Clibvideoqueue(REC_COMMON_VIDEO_ENV_SHM_SNAPSHOT_QUEUE); video_cap->rtp_shm_queue = new Clibvideoqueue(REC_COMMON_VIDEO_ENV_SHM_RTP_QUEUE); video_cap->rtp_sws_ctx = sws_getContext(REC_COMMON_VIDEO_SNAPSHOT_WIDTH, REC_COMMON_VIDEO_SNAPSHOT_HEIGHT, PIX_FMT_BGR24, REC_COMMON_VIDEO_RTP_ENV_WIDTH, REC_COMMON_VIDEO_RTP_ENV_HEIGHT, PIX_FMT_BGR24, SWS_POINT, NULL, NULL, NULL); video_cap->preview_shm_queue = new Clibvideoqueue(REC_COMMON_VIDEO_ENV_SHM_PREVIEW_QUEUE); video_cap->preview_sws_ctx = sws_getContext(REC_COMMON_VIDEO_SNAPSHOT_PREVIEW_WIDTH, REC_COMMON_VIDEO_SNAPSHOT_PREVIEW_HEIGHT, PIX_FMT_BGR24, REC_COMMON_VIDEO_PREVIEW_WIDTH, REC_COMMON_VIDEO_PREVIEW_HEIGHT, PIX_FMT_BGR24, SWS_FAST_BILINEAR, NULL, NULL, NULL); } else { video_cap->snapshot_shm_queue = new Clibvideoqueue(REC_COMMON_VIDEO_OPT_SHM_SNAPSHOT_QUEUE); video_cap->rtp_shm_queue = new Clibvideoqueue(REC_COMMON_VIDEO_OPT_SHM_RTP_QUEUE); video_cap->rtp_sws_ctx = sws_getContext(REC_COMMON_VIDEO_SNAPSHOT_HEIGHT, REC_COMMON_VIDEO_SNAPSHOT_WIDTH, PIX_FMT_BGR24, REC_COMMON_VIDEO_RTP_OPT_WIDTH, REC_COMMON_VIDEO_RTP_OPT_HEIGHT, PIX_FMT_BGR24, SWS_POINT, NULL, NULL, NULL); //video_cap->preview_shm_queue = NULL; //video_cap->preview_sws_ctx = NULL; } } return video_cap; } static void video_capture_destroy(video_capture_t *video_cap) { if (video_cap) { if (video_cap->preview_sws_ctx) { sws_freeContext(video_cap->preview_sws_ctx); video_cap->preview_sws_ctx = NULL; } if (video_cap->rtp_sws_ctx) { sws_freeContext(video_cap->rtp_sws_ctx); video_cap->rtp_sws_ctx = NULL; } if (video_cap->snapshot_shm_queue) { delete video_cap->snapshot_shm_queue; video_cap->snapshot_shm_queue = NULL; } if (video_cap->rtp_shm_queue) { delete video_cap->rtp_shm_queue; video_cap->rtp_shm_queue = NULL; } if (video_cap->preview_shm_queue) { delete video_cap->preview_shm_queue; video_cap->preview_shm_queue = NULL; } free(video_cap); } } static int video_capture_start(video_capture_t *video_cap) { capture_config_t *conf = &video_cap->parent->config; int dev_id; if (video_cap->camera_type == CAMERA_TYPE_ENV) { dev_id = capture_get_video_device_id(conf->strVideoEnv); if (dev_id == -1) { Dbg("No environment camera,please check config file or device!"); return -1; } } else { dev_id = capture_get_video_device_id(conf->strVideoOpt); if (dev_id == -1) { Dbg("No operation camera,please check config file or device!"); return -1; } } videocap_param param = {0}; int cap_mode; int rc = -1; rc = calc_capture_mode(REC_COMMON_VIDEO_SNAPSHOT_WIDTH, REC_COMMON_VIDEO_SNAPSHOT_HEIGHT, &cap_mode); if (rc != 0) { Dbg("calc cap_mode failed!"); return rc; } param.cap_mode = cap_mode; param.dev_id = dev_id; param.frame_fmt = VIDEO_FORMAT_RGB24; if ((ePadtype == g_eDeviceType)||(eMobilePadType == g_eDeviceType)||(eDesk2SType == g_eDeviceType)||(eDesk1SType == g_eDeviceType)||(eDesk2SIntegratedType == g_eDeviceType)) { param.fps = REC_COMMON_VIDEO_PADRAW_FPS; } else { param.fps = REC_COMMON_VIDEO_RAW_FPS; } param.on_frame = (video_cap->camera_type == CAMERA_TYPE_ENV ? &env_cap_on_frame : &opt_cap_on_frame); param.user_data = video_cap; param.option = 0; rc = videocap_create(&video_cap->cap, ¶m); if (rc != 0) { Dbg("videocap create failed!"); return rc; } rc = videocap_start(video_cap->cap); if (rc != 0) { Dbg("videocap start failed!"); videocap_destroy(video_cap->cap); video_cap->cap = NULL; return rc; } else{ Dbg("videocap start success!"); } return 0; } static void video_capture_stop(video_capture_t *video_cap) { if (video_cap->cap) { videocap_stop(video_cap->cap); videocap_destroy(video_cap->cap); video_cap->cap = NULL; } } static int audio_get_dev_count(int *in_cnt, int *out_cnt) { int icnt = 0, ocnt = 0; int cnt = Pa_GetDeviceCount(); for (int i = 0; i < cnt; ++i) { const PaDeviceInfo *info = Pa_GetDeviceInfo(i); if (info->maxInputChannels) icnt ++; if (info->maxOutputChannels) ocnt ++; } if (in_cnt) *in_cnt = icnt; if (out_cnt) *out_cnt = ocnt; return 0; } static CSimpleStringA audio_get_dev_name(bool in_direction, int idx) { int cnt = Pa_GetDeviceCount(); int ii, i; for (i = 0, ii = 0; i < cnt; ++i) { const PaDeviceInfo *info = Pa_GetDeviceInfo(i); if (in_direction) { if (info->maxInputChannels) { if (idx == ii) { return CSimpleStringA(info->name); } ii++; } } else { if (info->maxOutputChannels) { if (idx == ii) { return CSimpleStringA(info->name); } ii++; } } } return CSimpleStringA(); } namespace MediaController { DeviceTypeEnum g_eDeviceType; int capture_create(const capture_config_t *config, capture_t **p_cap) { capture_t *cap = ZALLOC_T(capture_t); cap->audio = NULL; cap->env_video = NULL; cap->opt_video = NULL; memcpy(&cap->config, config, sizeof(capture_config_t)); cap->audio = audio_capture_create(cap); if (!cap->audio) { Dbg("create audio capture object failed!"); return Error_Unexpect; } int dev_id_env = capture_get_video_device_id(config->strVideoEnv); if (dev_id_env != -1) { cap->env_video = video_capture_create(cap, CAMERA_TYPE_ENV); if (!cap->env_video) { Dbg("create env video object failed!"); return Error_Unexpect; } } int dev_id_opt = -1; if (eStand2sType == g_eDeviceType) { dev_id_opt = capture_get_video_device_id(config->strVideoOpt); } if (dev_id_opt != -1) { cap->opt_video = video_capture_create(cap, CAMERA_TYPE_OPT); if (!cap->opt_video) { Dbg("create opt video object failed!"); return Error_Unexpect; } } if((dev_id_env == -1)&&(dev_id_opt == -1)) { Dbg("all camera device id error!"); capture_destroy(cap); return Error_Unexpect; } else { *p_cap = cap; return 0; } } ErrorCodeEnum capture_create(const capture_config_t *config,capture_t *cap,int nCamera) { int dev_id1 = capture_get_video_device_id(config->strVideoEnv); int dev_id2 = capture_get_video_device_id(config->strVideoOpt); if((dev_id1 != -1)&&(nCamera==ENVCAMERA)&&(cap->env_video == NULL)) { cap->env_video = video_capture_create(cap, CAMERA_TYPE_ENV); if (!cap->env_video) { Dbg("create env video object failed!"); return Error_Unexpect; } } else if((dev_id2 != -1)&&(nCamera == OPTCAMERA)&&(cap->opt_video == NULL)) { cap->opt_video = video_capture_create(cap, CAMERA_TYPE_OPT); if (!cap->opt_video) { Dbg("create opt video object failed!"); return Error_Unexpect; } } else { return Error_Unexpect; } return Error_Succeed; } int salesaudio_capture_create(rvc_audio_capture_config_t *config, rvc_sales_audio_capture_t **p_cap) { rvc_sales_audio_capture_t *cap = ZALLOC_T(rvc_sales_audio_capture_t); cap->rvc_audio = NULL; memcpy(&cap->rvc_audio_config, config, sizeof(rvc_audio_capture_config_t)); cap->rvc_audio = salesrecord_audio_capture_create(cap); if (!cap->rvc_audio) { Dbg("create sales record audio capture object failed!"); salesaudio_capture_destroy(cap); cap = NULL; return Error_Unexpect; } else { cap->rvc_audio->iaudio_capture_peroid = config->audio_capture_period; cap->rvc_audio->iaudio_capture_samplerate = config->audio_capture_samplerate; *p_cap = cap; Dbg("create sales record audio capture object(%0x) success, capture sample rate is %d, and rvc audio(%0x) in device is %s.", cap, cap->rvc_audio->iaudio_capture_samplerate, cap->rvc_audio, cap->rvc_audio_config.strAudioIn.GetData()); return 0; } } void salesaudio_capture_destroy(rvc_sales_audio_capture_t *cap) { if (NULL != cap){ Dbg("sales audio capture destroy, and cap addr is %0x.", cap); if (cap->rvc_audio) { Dbg("sales audio capture rvc_audio destroy, and rvc_audio addr is %0x.", cap->rvc_audio); salesrecord_audio_capture_destroy(cap->rvc_audio); cap->rvc_audio = NULL; Dbg("set rvc_audio to null."); } free(cap); } } void capture_destroy(capture_t *cap) { if (cap) { if (cap->env_video) { video_capture_destroy(cap->env_video); cap->env_video = NULL; } if (cap->opt_video) { video_capture_destroy(cap->opt_video); cap->opt_video = NULL; } if (cap->audio) { audio_capture_destroy(cap->audio); cap->audio = NULL; } free(cap); } } void capture_destroy(capture_t *cap,int nCamera) { if (cap) { if((cap->env_video)&&(nCamera == ENVCAMERA)) { video_capture_destroy(cap->env_video); cap->env_video = NULL; } else if((cap->opt_video)&&(nCamera==OPTCAMERA)) { video_capture_destroy(cap->opt_video); cap->opt_video = NULL; } } } ErrorCodeEnum start_audio_capture(audio_capture_t *paudio) { ErrorCodeEnum rslt = Error_Succeed; int rc = 0; if (NULL != paudio) { rc = audio_capture_start(paudio); if (rc != Error_Succeed) { rslt = (ErrorCodeEnum)rc; if (rslt == Error_AudioIN){ Dbg("start audio In object failed! rc:%d", rc); LogError(Severity_High,Error_NotInit,ERROR_MOD_MEDIACONTROLLER_HANDFREE_OPENFAIL,"open audioIn device fail,please check device"); } else{ Dbg("start audio Out object failed! rc:%d", rc); LogError(Severity_High,Error_NotInit,ERROR_MOD_MEDIACONTROLLER_HANDFREE_OPENFAIL,"open audioOut device fail,please check device"); } } } else{ Dbg("audio_capture_t is null."); rslt = Error_AudioIN; } return rslt; } ErrorCodeEnum capture_start(capture_t *cap) { int rc = 0; ErrorCodeEnum rslt = start_audio_capture(cap->audio); if (Error_Succeed != rslt){ return rslt; } if (cap->env_video) { rc = video_capture_start(cap->env_video); if (rc != Error_Succeed) { Dbg("start env video capture object failed! rc:%d", rc); char strMessage[MAX_PATH*2] = {0}; get_camera_exception_message(strMessage, MAX_PATH*2, cap->config.strVideoEnv, "open environ camera fail,please check device."); LogError(Severity_High,Error_NotInit,ERROR_MOD_MEDIACONTROLLER_ENVCAM_OPEN,strMessage); if (cap->opt_video) { if (cap->env_video) { ::Sleep(3000); } rc = video_capture_start(cap->opt_video); if (rc != Error_Succeed) { Dbg("start opt video capture object failed! rc:%d", rc); char strMessage[MAX_PATH*2] = {0}; get_camera_exception_message(strMessage, MAX_PATH*2, cap->config.strVideoOpt, "open operate camera fail,please check device."); LogError(Severity_High,Error_NotInit,ERROR_MOD_MEDIACONTROLLER_OPTCAM_OPEN, strMessage); return Error_AllCamera; } else { Dbg("start env video capture object failed!"); return Error_EnvCamera; } } else { Dbg("start all video capture object failed!"); return Error_AllCamera; } } else { if (cap->opt_video) { if (cap->env_video) { ::Sleep(3000); } rc = video_capture_start(cap->opt_video); if (rc != Error_Succeed) { Dbg("start opt video capture object failed! rc:%d", rc); char strMessage[MAX_PATH*2] = {0}; get_camera_exception_message(strMessage, MAX_PATH*2, cap->config.strVideoOpt, "open operate camera fail,please check device."); LogError(Severity_High,Error_NotInit,ERROR_MOD_MEDIACONTROLLER_OPTCAM_OPEN,strMessage); return Error_OptCamera; } else { return Error_Succeed; } } else { Dbg("opt_video = Null"); return Error_OptCamera; } } } else { if (cap->opt_video) { if (cap->env_video) { ::Sleep(3000); } rc = video_capture_start(cap->opt_video); if (rc != Error_Succeed) { Dbg("start opt video capture object failed! rc:%d", rc); char strMessage[MAX_PATH*2] = {0}; get_camera_exception_message(strMessage, MAX_PATH*2, cap->config.strVideoOpt, "open operate camera fail,please check device."); LogError(Severity_High,Error_NotInit,ERROR_MOD_MEDIACONTROLLER_OPTCAM_OPEN,strMessage); return Error_AllCamera; } else { Dbg("start env video capture object failed!"); return Error_EnvCamera; } } else { Dbg("start env video Error_AllCamera"); return Error_AllCamera; } } } ErrorCodeEnum capture_start(capture_t *cap,int nCamera) { int rc = 0; if (cap->env_video&&(nCamera==ENVCAMERA)) { rc = video_capture_start(cap->env_video); if (rc != Error_Succeed) { return Error_Hardware; } else { return Error_Succeed; } } else if(cap->opt_video&&(nCamera==OPTCAMERA)) { rc = video_capture_start(cap->opt_video); if (rc != Error_Succeed) { return Error_Hardware; } else { return Error_Succeed; } } else { return Error_Unexpect; } } ErrorCodeEnum salesrecord_audio_capture_start(rvc_sales_audio_capture_t *cap) { auto rc = Error_Param; if (NULL == cap){ return rc; } if (cap->rvc_audio) { int rslt = record_audio_capture_start(cap->rvc_audio); if (rslt != Error_Succeed) { if (Error_AudioIN == rslt){ Dbg("start audio In object failed! rc:%d", rc); LogError(Severity_High,Error_NotInit,ERROR_MOD_MEDIACONTROLLER_HANDFREE_OPENFAIL,"open audioIn device fail,please check device"); } rc = (ErrorCodeEnum)rslt; } else{ rc = Error_Succeed; } } return rc; } void salesrecord_audio_capture_stop(rvc_sales_audio_capture_t *cap) { if (NULL != cap){ if (cap->rvc_audio){ if (cap->rvc_audio->pdata){ if (eSingleWriteLocal == cap->rvc_audio->eType){ fclose((FILE*)cap->rvc_audio->pdata); cap->rvc_audio->pdata = NULL; } } record_audio_capture_stop(cap->rvc_audio); } } else{ Dbg("sales record audio capture stop failed for param error."); } } void capture_stop(capture_t *cap) { if (cap->audio) { audio_capture_stop(cap->audio); } if (cap->env_video) { video_capture_stop(cap->env_video); } if (cap->opt_video) { video_capture_stop(cap->opt_video); } } int capture_detect_camera_bug(capture_t *cap, int *env_n, int *opt_n,BOOL IsPad) { *env_n = 0; *opt_n = 0; if (cap->env_video) { if (cap->env_video->rtp_shm_queue) { *env_n = cap->env_video->rtp_shm_queue->GetVideoLens(); } } else { *env_n = -1; } if (cap->opt_video) { if (cap->opt_video->rtp_shm_queue) { *opt_n = cap->opt_video->rtp_shm_queue->GetVideoLens(); } } else { if (!IsPad) { *opt_n = -1; } else { *opt_n = 1; } } return 0; } int capture_get_last_frametime(capture_t *cap, DWORD *env_n, DWORD *opt_n,BOOL IsPad) { *env_n = 0; *opt_n = 0; if (cap->env_video) { if (cap->env_video->rtp_shm_queue) { *env_n = cap->env_video->rtp_shm_queue->GetLastFrameTime(); } } else { *env_n = 0; } if (cap->opt_video) { if (cap->opt_video->rtp_shm_queue) { *opt_n = cap->opt_video->rtp_shm_queue->GetLastFrameTime(); } } else { *opt_n = 0; } return 0; } int capture_get_audio_device_id(bool in_direction, const char *dev_name) { int cnt = Pa_GetDeviceCount(); int ii, i; for (i = 0, ii = 0; i < cnt; ++i) { const PaDeviceInfo *info = Pa_GetDeviceInfo(i); if (in_direction) { if (info->maxInputChannels) { if (strstr(info->name, dev_name) != NULL) { return ii; } ii++; } } else { if (info->maxOutputChannels) { if (strstr(info->name, dev_name) != NULL) { return ii; } ii++; } } } return -1; } int capture_get_video_device_id(const char *dev_name) { int i, n; n = videocap_get_device_count(); for (i = 0; i < n; ++i) { WCHAR tmp[256]; char t[256]; WCHAR tmp1[256]; char t1[256]; videocap_get_device_name(i, tmp, ARRAYSIZE(tmp)); WideCharToMultiByte(CP_ACP, 0, tmp, -1, t, sizeof(t), 0, NULL); videocap_get_device_path(i, tmp1, ARRAYSIZE(tmp1)); WideCharToMultiByte(CP_ACP, 0, tmp1, -1, t1, sizeof(t1), 0, NULL); // save DevicePath (add by ly at 20160725) char t2[256]; strcpy(t2,t1); for (int j = 0; j < strlen(t2); ++j) { t2[j] = toupper(t2[j]); if (t2[j] == '#') t2[j] = '\\'; } { unsigned char x[MD5_DIGESTSIZE]; md5_ctx_t ctx; md5_init(&ctx); md5(x, t1, strlen(t1)); Bin2Str(x, sizeof(x), t1, sizeof(t1)); } if (dev_name != NULL && strlen(dev_name) > 1 && strstr(dev_name, ";") == NULL) // 外接摄像头 add by ly at 20160531 { /*if (strstr(t, dev_name) != NULL) { Dbg("[dbg] %s founded in %d cameras.", dev_name, n); return i; }*/ Dbg("[dbg] device_path: %s",t2); if (strstr(t2,dev_name) != NULL) // 判断外接摄像头DeviceLocationPaths是否是DevicePath的子串 { Dbg("[dbg] %s founded in %d cameras.", dev_name, n); return i; } if (strcmp(dev_name, t) == 0) // 如果是直接用友好名称查询(适用于高拍仪) add by ly 2017/11/08 return i; } else { strcat(t, ";"); strcat(t, t1); if (strcmp(dev_name, t) == 0) return i; } } return -1; // not found } int capture_lib_init() // edit by ly at 20160401 { HRESULT hr = CoInitialize(NULL); int rc; { HMODULE hModule = GetModuleHandleA("MSVCR100.dll"); if (hModule) { typedef char *(*f_setlocale)(int, const char*); f_setlocale f = (f_setlocale)GetProcAddress(hModule, "setlocale"); (*f)(LC_ALL, "chs"); } } if (SUCCEEDED(hr)) { PaError Error; Error = Pa_Initialize(); if (Error == paNoError) { rc = videoframework_init(); if (rc != 0) { Dbg("videoframework_init failed, rc=%d", rc); return Error_Resource; } } else { Dbg("PaInitialize failed, rc=%d", Error); return Error_Resource; } } else { Dbg("coinitialze failed! hr:%d", hr); return Error_Resource; } { int i, n; n = videocap_get_device_count(); for (i = 0; i < n; ++i) { WCHAR tmp[256]; char t[256]; WCHAR tmp1[256]; char t1[256]; videocap_get_device_name(i, tmp, ARRAYSIZE(tmp)); WideCharToMultiByte(CP_ACP, 0, tmp, -1, t, sizeof(t), 0, NULL); videocap_get_device_path(i, tmp1, ARRAYSIZE(tmp1)); WideCharToMultiByte(CP_ACP, 0, tmp1, -1, t1, sizeof(t1), 0, NULL); { unsigned char x[MD5_DIGESTSIZE]; md5_ctx_t ctx; md5_init(&ctx); md5(x, t1, strlen(t1)); Bin2Str(x, sizeof(x), t1, sizeof(t1)); } Dbg("%d = %s;%s", i, t, t1); } } { int icnt, ocnt; rc = audio_get_dev_count(&icnt, &ocnt); if (rc == 0) { int i; Dbg("audio input devices(%d):", icnt); for (i = 0; i < icnt; ++i) { CSimpleStringA str = audio_get_dev_name(true, i); Dbg("%d = %s", i, (LPCSTR)str); } Dbg("audio output devices(%d):", ocnt); for (i = 0; i < ocnt; ++i) { CSimpleStringA str = audio_get_dev_name(false, i); Dbg("%d = %s", i, (LPCSTR)str); } } } return Error_Succeed; } int videocap_innerdev_fetch(CSimpleStringA&frontcam,CSimpleStringA&rearcam) // edit by ly at 20160401 { int i, n; frontcam = "$", rearcam = "$"; n = videocap_get_device_count(); if (n == 2) // 未插入外接时 { for (i = 0; i < n; ++i) { WCHAR tmp[256]; char t[256]; WCHAR tmp1[256]; char t1[256]; videocap_get_device_name(i, tmp, ARRAYSIZE(tmp)); WideCharToMultiByte(CP_ACP, 0, tmp, -1, t, sizeof(t), 0, NULL); videocap_get_device_path(i, tmp1, ARRAYSIZE(tmp1)); WideCharToMultiByte(CP_ACP, 0, tmp1, -1, t1, sizeof(t1), 0, NULL); { unsigned char x[MD5_DIGESTSIZE]; md5_ctx_t ctx; md5_init(&ctx); md5(x, t1, strlen(t1)); Bin2Str(x, sizeof(x), t1, sizeof(t1)); } strcat(t, ";"); strcat(t, t1); if (i==0) { frontcam = t; } else if(i==1) { rearcam = t; } } } else { Dbg("[dbg] detect %d cameras.", n); } return n; } int videocap_outerdev_fetch( CSimpleStringA envcam,CSimpleStringA optcam,CSimpleStringA ewscam,CAutoArray &hspcams,CSimpleStringA &outercam ) { int i, n; outercam = "$"; n = videocap_get_device_count(); int m = hspcams.GetCount(); // 高拍仪的个数 add by ly 2017/11/07 if (ewscam.GetLength() <= 1) // 如果外部广角摄像头未配置 { if (n <= m || n > 3+m) { Dbg("[dbg] [videocap_outerdev_fetch] detect %d cameras.", n); } else { int cnt = 0; for (i = 0; i < n; ++i) { WCHAR tmp[256]; char t[256]; WCHAR tmp1[256]; char t1[256]; videocap_get_device_name(i, tmp, ARRAYSIZE(tmp)); WideCharToMultiByte(CP_ACP, 0, tmp, -1, t, sizeof(t), 0, NULL); videocap_get_device_path(i, tmp1, ARRAYSIZE(tmp1)); WideCharToMultiByte(CP_ACP, 0, tmp1, -1, t1, sizeof(t1), 0, NULL); // save DevicePath (add by ly at 20160725) char t2[256]; strcpy(t2,t1); for (int j = 0; j < strlen(t2); ++j) { t2[j] = toupper(t2[j]); if (t2[j] == '#') t2[j] = '\\'; } { unsigned char x[MD5_DIGESTSIZE]; md5_ctx_t ctx; md5_init(&ctx); md5(x, t1, strlen(t1)); Bin2Str(x, sizeof(x), t1, sizeof(t1)); } // 检查是否为高拍仪摄像头,若是则直接枚举下一个摄像头 bool isHspCam = false; for (int k = 0; k < m; ++k) { if (!strcmp((LPCTSTR)hspcams[k],t)) { isHspCam = true; break; } } if (isHspCam) { continue; } strcat(t, ";"); strcat(t, t1); if(strcmp(t,(LPCTSTR)envcam) && strcmp(t,(LPCTSTR)optcam)) { if (0 == cnt) { outercam = t2; } ++cnt; } } if(cnt != 1) { outercam = "$"; } } } else // 如果外部广角摄像头已配置 { if (n <= m || n > 4+m) { Dbg("[dbg] [videocap_outerdev_fetch] detect %d cameras.", n); } else { int cnt = 0; for (i = 0; i < n; ++i) { WCHAR tmp[256]; char t[256]; WCHAR tmp1[256]; char t1[256]; videocap_get_device_name(i, tmp, ARRAYSIZE(tmp)); WideCharToMultiByte(CP_ACP, 0, tmp, -1, t, sizeof(t), 0, NULL); videocap_get_device_path(i, tmp1, ARRAYSIZE(tmp1)); WideCharToMultiByte(CP_ACP, 0, tmp1, -1, t1, sizeof(t1), 0, NULL); // save DevicePath (add by ly at 20160725) char t2[256]; strcpy(t2,t1); for (int j = 0; j < strlen(t2); ++j) { t2[j] = toupper(t2[j]); if (t2[j] == '#') t2[j] = '\\'; } { unsigned char x[MD5_DIGESTSIZE]; md5_ctx_t ctx; md5_init(&ctx); md5(x, t1, strlen(t1)); Bin2Str(x, sizeof(x), t1, sizeof(t1)); } // 检查是否为高拍仪摄像头,若是则直接枚举下一个摄像头 bool isHspCam = false; for (int k = 0; k < m; ++k) { if (!strcmp((LPCTSTR)hspcams[k],t)) { isHspCam = true; break; } } if (isHspCam) { continue; } strcat(t, ";"); strcat(t, t1); if(strcmp(t,(LPCTSTR)envcam) && strcmp(t,(LPCTSTR)optcam) && !strstr(t2,(LPCTSTR)ewscam)) { if (0 == cnt) { outercam = t2; } ++cnt; } } if(cnt != 1) { outercam = "$"; } } } return n; } void capture_lib_term() { videoframework_term(); Pa_Terminate(); CoUninitialize(); } bool capture_adj_brightness(capture_t *cap,int nvalue,ErrorCodeEnum nCode) { HRESULT rst = S_OK; if (cap->env_video&&(nCode!=Error_EnvCamera)&&(nCode!=Error_AllCamera)) { rst = videocap_adj_brightness(cap->env_video->cap,nvalue); } HRESULT rst2 = S_OK; if (cap->opt_video->cap&&(nCode!=Error_OptCamera)&&(nCode!=Error_AllCamera)) { rst2 = videocap_adj_brightness(cap->opt_video->cap,nvalue); } if (SUCCEEDED(rst)&&SUCCEEDED(rst2)) return true; else return false; } bool capture_set_autobrightness(capture_t *cap,ErrorCodeEnum nCode) { HRESULT rst = S_OK; HRESULT rst2 = S_OK; if (cap->env_video&&(nCode!=Error_EnvCamera)&&(nCode!=Error_AllCamera)) { rst = videocap_set_autobrightness(cap->env_video->cap); } if (cap->opt_video&&(nCode!=Error_OptCamera)&&(nCode!=Error_AllCamera)) { rst2 = videocap_set_autobrightness(cap->opt_video->cap); } if (SUCCEEDED(rst)&&SUCCEEDED(rst2)) return true; else return false; } int capture_get_brightness(capture_t *cap,ErrorCodeEnum nCode) { int nValue1=0; int nValue2=0; HRESULT rst = S_OK; if (cap->env_video&&(nCode!=Error_EnvCamera)&&(nCode!=Error_AllCamera)) { HRESULT rst = videocap_get_brightness(cap->env_video->cap,&nValue1); } HRESULT rst2 = S_OK; if (cap->opt_video&&(nCode!=Error_OptCamera)&&(nCode!=Error_AllCamera)) { rst2 = videocap_get_brightness(cap->opt_video->cap,&nValue2); } else { return -1; } if ((cap->opt_video == NULL)&&cap->env_video) { return nValue1; } else if ((cap->env_video == NULL)&&cap->opt_video) { return nValue2; } else { if (SUCCEEDED(rst)&&SUCCEEDED(rst2)) return (nValue1<=nValue2)?nValue1:nValue2; else return -1; } } int StopCamera(capture_t *cap,int nCamera) { if((nCamera == ENVCAMERA)&&cap->env_video) { video_capture_stop(cap->env_video); video_capture_destroy(cap->env_video); cap->env_video = NULL; return 0; } else if((nCamera == OPTCAMERA)&&cap->opt_video) { video_capture_stop(cap->opt_video); video_capture_destroy(cap->opt_video); cap->opt_video = NULL; return 0; } else { return -1; } } //根据frontcam完整名(包含设备名+逗号+设备路径MD5)、ewscam设备路径、hspcams设备名,找到rearcam的完整名 //排除以上设备后的唯一设备即为rearcam int videocap_optdev_fetch( CSimpleStringA frontcam,CSimpleStringA ewscam,CAutoArray &hspcams,CSimpleStringA &rearcam ) { int i, n; rearcam = "$"; n = videocap_get_device_count(); int m = hspcams.GetCount(); // 高拍仪的个数 add by ly 2017/11/07 if (ewscam.GetLength() <= 1) // 如果外部广角摄像头未配置 { if (n <= 2+m) { int cnt = 0; for (i = 0; i < n; ++i) { WCHAR tmp[256]; char t[256]; WCHAR tmp1[256]; char t1[256]; videocap_get_device_name(i, tmp, ARRAYSIZE(tmp)); WideCharToMultiByte(CP_ACP, 0, tmp, -1, t, sizeof(t), 0, NULL); videocap_get_device_path(i, tmp1, ARRAYSIZE(tmp1)); WideCharToMultiByte(CP_ACP, 0, tmp1, -1, t1, sizeof(t1), 0, NULL); { unsigned char x[MD5_DIGESTSIZE]; md5_ctx_t ctx; md5_init(&ctx); md5(x, t1, strlen(t1)); Bin2Str(x, sizeof(x), t1, sizeof(t1)); } // 检查是否为高拍仪摄像头,若是则直接枚举下一个摄像头 bool isHspCam = false; for (int k = 0; k < m; ++k) { if (!strcmp((LPCTSTR)hspcams[k],t)) { isHspCam = true; break; } } if (isHspCam) { continue; } strcat(t, ";"); strcat(t, t1); if (strcmp((LPCTSTR)frontcam, t)) { if (0 == cnt) { rearcam = t; } cnt++; } } if (1 != cnt) // 未找到或无法识别内置后摄像头 { rearcam = "$"; } } else { Dbg("[dbg] [videocap_optdev_fetch] detect %d cameras.", n); } } else // 如果外部广角摄像头已配置 { if (n <= 3+m) { int cnt = 0; for (i = 0; i < n; ++i) { WCHAR tmp[256]; char t[256]; WCHAR tmp1[256]; char t1[256]; videocap_get_device_name(i, tmp, ARRAYSIZE(tmp)); WideCharToMultiByte(CP_ACP, 0, tmp, -1, t, sizeof(t), 0, NULL); videocap_get_device_path(i, tmp1, ARRAYSIZE(tmp1)); WideCharToMultiByte(CP_ACP, 0, tmp1, -1, t1, sizeof(t1), 0, NULL); // 检查是否为高拍仪摄像头,若是则直接枚举下一个摄像头 bool isHspCam = false; for (int k = 0; k < m; ++k) { if (!strcmp((LPCTSTR)hspcams[k],t)) { isHspCam = true; break; } } if (isHspCam) { continue; } // save DevicePath (add by ly at 20160725) char t2[256]; strcpy(t2,t1); for (int j = 0; j < strlen(t2); ++j) { t2[j] = toupper(t2[j]); if (t2[j] == '#') t2[j] = '\\'; } { unsigned char x[MD5_DIGESTSIZE]; md5_ctx_t ctx; md5_init(&ctx); md5(x, t1, strlen(t1)); Bin2Str(x, sizeof(x), t1, sizeof(t1)); } strcat(t, ";"); strcat(t, t1); if (strcmp((LPCTSTR)frontcam, t) && !strstr(t2,(LPCTSTR)ewscam)) { if (0 == cnt) { rearcam = t; } cnt++; } } if (1 != cnt) // 未找到或无法识别内置后摄像头 { rearcam = "$"; } } else { Dbg("[dbg] [videocap_optdev_fetch] detect %d cameras.", n); } } return n; } void capture_clearsnapshotvideo(capture_t *cap,int nCamera) { if((nCamera == ENVCAMERA)&&cap->env_video) { Dbg("clear ENV snapshot video!"); cap->env_video->snapshot_shm_queue->ClearVideoQueue(); return; } else if((nCamera == OPTCAMERA)&&cap->opt_video) { Dbg("clear OPT snapshot video!"); cap->opt_video->snapshot_shm_queue->ClearVideoQueue(); return; } else if(nCamera == AlLCAMERA) { if (cap->env_video) { Dbg("clear ENV snapshot video!"); cap->env_video->snapshot_shm_queue->ClearVideoQueue(); } if (cap->opt_video) { Dbg("clear OPT snapshot video!"); cap->opt_video->snapshot_shm_queue->ClearVideoQueue(); } return; } else { return ; } } int get_camera_exception_message(char* pBuffer, size_t uLen, CSimpleStringA strDeviceName, const char* strErrorMessage) { int iRet = 0; if (strDeviceName.GetLength() > 0){ const char* strCameraName = strDeviceName.GetData(); char strBuffer[MAX_PATH] = {0}; if (sprintf_s(strBuffer, MAX_PATH, "%s", strCameraName) > 0){ char *pIndex = NULL; if (pIndex = (char*)strstr(strBuffer, ";")){ *pIndex = '\0'; } } if (NULL != strErrorMessage){ size_t uDataLen = strlen(strBuffer); size_t uErrorLen = strlen(strErrorMessage); if (uLen > uDataLen + uErrorLen + 10){ iRet = sprintf_s(pBuffer, uLen, "[%s] %s", strBuffer, strErrorMessage); } } } if (0 == iRet){ if (NULL != strErrorMessage){ iRet = sprintf_s(pBuffer, uLen, "%s", strErrorMessage); } } return iRet; } }