DirectShow:网络摄像头预览和图像捕获

时间:2011-09-26 04:24:57

标签: c++ windows video webcam directshow

在查看一个非常相似的问题并看到几乎相同的代码后,我决定分别提出这个问题。我想在DirectShow使用的默认窗口中显示网络摄像头视频流的视频预览,并且我还希望能够在任何给定时刻“拍摄”视频流。

我从MSDN上的DirectShow示例开始,以及AMCap示例代码,并且我认为应该预览部分,但不是。我没有找到从视频流中抓取图像的例子,除了使用SampleGrabber,它已被弃用,因此我试图不使用它。

下面是我的代码,换行。请注意,EnumerateCameras中的大多数代码都已注释掉。该代码将用于附加到另一个窗口,我不想这样做。在MSDN文档中,它明确指出VMR_7创建自己的窗口来显示视频流。我的应用程序中没有错误,但此窗口从未出现过。

我的问题是:我做错了什么?或者,如果您知道我想要做的一个简单示例,请将我链接到它。 AMCap不是一个简单的例子,供参考。

注意:InitalizeVMR用于在无窗口状态下运行,这是我的最终目标(集成到DirectX游戏中)。但是现在,我只想让它以最简单的方式运行。

编辑:此问题的第一部分,即预览相机流,已解决。我现在只是在寻找已弃用的SampleGrabber类的替代品,以便我可以随时拍摄照片并将其保存到文件中。

编辑:在谷歌上寻找了近一个小时后,一般的共识似乎是你必须使用ISampleGrabber。如果您发现任何不同,请告诉我。

测试代码(main.cpp):

CWebcam* camera = new CWebcam();    
HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);    
MessageBox(NULL, L"text", L"caption", NULL);    
if (SUCCEEDED(hr))
{       
camera->Create();       
camera->EnumerateCameras();     
camera->StartCamera();  
}   
int d;  
cin >> d;

Webcam.cpp:

#include "Webcam.h"

CWebcam::CWebcam() {
    HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
    //m_pTexInst = nullptr;
    //m_pTexRes = nullptr;
}

CWebcam::~CWebcam() {
    CoUninitialize();
    m_pDeviceMonikers->Release();
    m_pMediaController->Release();
}

BOOL CWebcam::Create() {
    InitCaptureGraphBuilder(&m_pFilterGraph, &m_pCaptureGraph);
    hr = m_pFilterGraph->QueryInterface(IID_IMediaControl, (void **)&m_pMediaController);
    return TRUE;
}

void CWebcam::Destroy() {
}

void CWebcam::EnumerateCameras() {  
    HRESULT hr = EnumerateDevices(CLSID_VideoInputDeviceCategory, &m_pDeviceMonikers);
    if (SUCCEEDED(hr))
    {
        //DisplayDeviceInformation(m_pDeviceMonikers);
        //m_pDeviceMonikers->Release();

        IMoniker *pMoniker = NULL;
        if(m_pDeviceMonikers->Next(1, &pMoniker, NULL) == S_OK)
        {
            hr = pMoniker->BindToObject(0, 0, IID_IBaseFilter, (void**)&m_pCameraFilter);
            if (SUCCEEDED(hr))
            {
                hr = m_pFilterGraph->AddFilter(m_pCameraFilter, L"Capture Filter");
            }
        }

        // connect the output pin to the video renderer
        if(SUCCEEDED(hr))
        {
            hr = m_pCaptureGraph->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, 
                m_pCameraFilter, NULL, NULL);
        }
        //InitializeVMR(hwnd, m_pFilterGraph, &m_pVMRControl, 1, FALSE);
        //get the video window that will be displayed from the filter graph
        IVideoWindow *pVideoWindow = NULL;
        hr = m_pFilterGraph->QueryInterface(IID_IVideoWindow, (void **)&pVideoWindow);
        /*if(hr != NOERROR)
        {
            printf("This graph cannot preview properly");
        }
        else
        {
            //get the video stream configurations
            hr = m_pCaptureGraph->FindInterface(&PIN_CATEGORY_CAPTURE,
                &MEDIATYPE_Video, m_pCameraFilter,
                IID_IAMStreamConfig, (void **)&m_pVideoStreamConfig);

            //Find out if this is a DV stream
            AM_MEDIA_TYPE *pMediaTypeDV;

            //fake window handle
            HWND window = NULL;
            if(m_pVideoStreamConfig && SUCCEEDED(m_pVideoStreamConfig->GetFormat(&pMediaTypeDV)))
            {
                if(pMediaTypeDV->formattype == FORMAT_DvInfo)
                {
                    // in this case we want to set the size of the parent window to that of
                    // current DV resolution.
                    // We get that resolution from the IVideoWindow.
                    IBasicVideo* pBasivVideo;

                    // If we got here, gcap.pVW is not NULL 
                    //ASSERT(pVideoWindow != NULL);
                    hr = pVideoWindow->QueryInterface(IID_IBasicVideo, (void**)&pBasivVideo);

                    /*if(SUCCEEDED(hr))
                    {
                        HRESULT hr1, hr2;
                        long lWidth, lHeight;

                        hr1 = pBasivVideo->get_VideoHeight(&lHeight);
                        hr2 = pBasivVideo->get_VideoWidth(&lWidth);
                        if(SUCCEEDED(hr1) && SUCCEEDED(hr2))
                        {
                            ResizeWindow(lWidth, abs(lHeight));
                        }
                    }
                }
            }

            RECT rc;
            pVideoWindow->put_Owner((OAHWND)window);    // We own the window now
            pVideoWindow->put_WindowStyle(WS_CHILD);    // you are now a child

            GetClientRect(window, &rc);
            pVideoWindow->SetWindowPosition(0, 0, rc.right, rc.bottom); // be this big
            pVideoWindow->put_Visible(OATRUE);
        }*/
    }   
}

BOOL CWebcam::StartCamera() {
    if(m_bIsStreaming == FALSE)
    {
        m_bIsStreaming = TRUE;
        hr = m_pMediaController->Run();
        if(FAILED(hr))
        {
            // stop parts that ran
            m_pMediaController->Stop();
            return FALSE;
        }
        return TRUE;
    }
    return FALSE;
}

void CWebcam::EndCamera() {
    if(m_bIsStreaming)
    {
        hr = m_pMediaController->Stop();
        m_bIsStreaming = FALSE;
        //invalidate client rect as well so that it must redraw
    }
}

BOOL CWebcam::CaptureToTexture() {
    return TRUE;
}

HRESULT CWebcam::InitCaptureGraphBuilder(
  IGraphBuilder **ppGraph,  // Receives the pointer.
  ICaptureGraphBuilder2 **ppBuild  // Receives the pointer.
)
{
    if (!ppGraph || !ppBuild)
    {
        return E_POINTER;
    }
    IGraphBuilder *pGraph = NULL;
    ICaptureGraphBuilder2 *pBuild = NULL;

    // Create the Capture Graph Builder.
    HRESULT hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, 
        CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void**)&pBuild );
    if (SUCCEEDED(hr))
    {
        // Create the Filter Graph Manager.
        hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER,
            IID_IGraphBuilder, (void**)&pGraph);
        if (SUCCEEDED(hr))
        {
            // Initialize the Capture Graph Builder.
            pBuild->SetFiltergraph(pGraph);

            // Return both interface pointers to the caller.
            *ppBuild = pBuild;
            *ppGraph = pGraph; // The caller must release both interfaces.
            return S_OK;
        }
        else
        {
            pBuild->Release();
        }
    }
    return hr; // Failed
}

HRESULT CWebcam::EnumerateDevices(REFGUID category, IEnumMoniker **ppEnum)
{
    // Create the System Device Enumerator.
    ICreateDevEnum *pSystemDeviceEnumerator;
    HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL,  
        CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&pSystemDeviceEnumerator));

    if (SUCCEEDED(hr))
    {
        // Create an enumerator for the category.
        hr = pSystemDeviceEnumerator->CreateClassEnumerator(category, ppEnum, 0);
        if (hr == S_FALSE)
        {
            hr = VFW_E_NOT_FOUND;  // The category is empty. Treat as an error.
        }
        pSystemDeviceEnumerator->Release();
    }
    return hr;
}

void CWebcam::DisplayDeviceInformation(IEnumMoniker *pEnum)
{
    IMoniker *pMoniker = NULL;
    int counter = 0;

    while (pEnum->Next(1, &pMoniker, NULL) == S_OK)
    {
        IPropertyBag *pPropBag;
        HRESULT hr = pMoniker->BindToStorage(0, 0, IID_PPV_ARGS(&pPropBag));
        if (FAILED(hr))
        {
            pMoniker->Release();
            continue;  
        } 

        VARIANT var;
        VariantInit(&var);

        // Get description or friendly name.
        hr = pPropBag->Read(L"Description", &var, 0);
        if (FAILED(hr))
        {
            hr = pPropBag->Read(L"FriendlyName", &var, 0);
        }
        if (SUCCEEDED(hr))
        {
            printf("%d: %S\n", counter, var.bstrVal);
            VariantClear(&var); 
        }

        hr = pPropBag->Write(L"FriendlyName", &var);

        // WaveInID applies only to audio capture devices.
        hr = pPropBag->Read(L"WaveInID", &var, 0);
        if (SUCCEEDED(hr))
        {
            printf("%d: WaveIn ID: %d\n", counter, var.lVal);
            VariantClear(&var); 
        }

        hr = pPropBag->Read(L"DevicePath", &var, 0);
        if (SUCCEEDED(hr))
        {
            // The device path is not intended for display.
            printf("%d: Device path: %S\n", counter, var.bstrVal);
            VariantClear(&var); 
        }

        pPropBag->Release();
        pMoniker->Release();
        counter++;
    }
}

HRESULT CWebcam::InitializeVMR(
    HWND hwndApp,         // Application window.
    IGraphBuilder* pFG,    // Pointer to the Filter Graph Manager.
    IVMRWindowlessControl** ppWc,  // Receives the interface.
    DWORD dwNumStreams,  // Number of streams to use.
    BOOL fBlendAppImage  // Are we alpha-blending a bitmap?
    )
{
    IBaseFilter* pVmr = NULL;
    IVMRWindowlessControl* pWc = NULL;
    *ppWc = NULL;

    // Create the VMR and add it to the filter graph.
    HRESULT hr = CoCreateInstance(CLSID_VideoMixingRenderer, NULL,
       CLSCTX_INPROC, IID_IBaseFilter, (void**)&pVmr);
    if (FAILED(hr))
    {
        return hr;
    }
    hr = pFG->AddFilter(pVmr, L"Video Mixing Renderer");
    if (FAILED(hr))
    {
        pVmr->Release();
        return hr;
    }

    // Set the rendering mode and number of streams.  
    IVMRFilterConfig* pConfig;
    hr = pVmr->QueryInterface(IID_IVMRFilterConfig, (void**)&pConfig);
    if (SUCCEEDED(hr)) 
    {
        pConfig->SetRenderingMode(VMRMode_Windowless);

        // Set the VMR-7 to mixing mode if you want more than one video
        // stream, or you want to mix a static bitmap over the video.
        // (The VMR-9 defaults to mixing mode with four inputs.)
        if (dwNumStreams > 1 || fBlendAppImage) 
        {
            pConfig->SetNumberOfStreams(dwNumStreams);
        }
        pConfig->Release();

        hr = pVmr->QueryInterface(IID_IVMRWindowlessControl, (void**)&pWc);
        if (SUCCEEDED(hr)) 
        {
            pWc->SetVideoClippingWindow(hwndApp);
            *ppWc = pWc;  // The caller must release this interface.
        }
    }
    pVmr->Release();

    // Now the VMR can be connected to other filters.
    return hr;
}

1 个答案:

答案 0 :(得分:4)

在无窗口模式下,VMR不会创建单独的窗口。由于您开始初始化widnowless模式,您必须使用IVMRWowowlessControl :: SetVideoPosition调用SetVideoClippingWindow来提供窗口内的位置,请参阅VMR Windowless Mode on MSDN

另一个示例代码段:http://www.assembla.com/code/roatl-utilities/subversion/nodes/trunk/FullScreenWindowlessVmrSample01/MainDialog.h#ln188

相关问题