我在使用DirectShow創建的視頻聊天應用程序中使用Logitech C930e網絡攝像頭。到目前爲止,我能夠在YUY2或mJPEG中使用原始數據流。無論如何,我發現攝像頭通過UVC接口支持硬件H264編碼。如何通過UVC支持硬件編碼H264
現在我使用非標準方法獲取可能的攝像頭捕捉引腳配置,但是沒有H264引腳。
void list_cameras {
ICreateDevEnum *pDevEnum = nullptr;
IEnumMoniker *pEnum = nullptr;
// Create the System Device Enumerator.
HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, nullptr,
CLSCTX_INPROC_SERVER, IID_ICreateDevEnum,
reinterpret_cast<void**>(&pDevEnum));
if (SUCCEEDED(hr)) {
// Create an enumerator for the video capture category.
hr = pDevEnum->CreateClassEnumerator(
CLSID_VideoInputDeviceCategory,
&pEnum, 0);
if (hr == S_FALSE) {
return;
}
}
IMoniker *pMoniker = nullptr; // IMoniker is the device
int index = 0;
// for each device
while (pEnum->Next(1, &pMoniker, nullptr) == S_OK) {
// bind camera to filter to be able to use it
if (cam.device->BindToObject(nullptr, nullptr, IID_IBaseFilter, reinterpret_cast<void**>(&_pCapture)) != S_OK) {
continue;
}
// fetch the configuration interface
IAMStreamConfig *pConfig = nullptr;
HRESULT hr = _capture->FindInterface(
&PIN_CATEGORY_CAPTURE, // Preview pin.
nullptr, // Any media type.
_pCapture, // Pointer to the capture filter.
IID_IAMStreamConfig, reinterpret_cast<void**>(&pConfig));
if (FAILED(hr)) {
continue;
}
// fetch possible configurations
int iCount = 0, iSize = 0;
if (pConfig->GetNumberOfCapabilities(&iCount, &iSize) != S_OK) {
continue;
}
// store each configuration
AM_MEDIA_TYPE *pmtConfig;
for (int iFormat = 0; iFormat < iCount; iFormat++) {
// get config
VIDEO_STREAM_CONFIG_CAPS scc;
if (pConfig->GetStreamCaps(iFormat, &pmtConfig, reinterpret_cast<BYTE*>(&scc)) != S_OK) {
continue;
}
// copy config data
VIDEOINFOHEADER *pVih = new VIDEOINFOHEADER(); // deleted afterwards
*pVih = *reinterpret_cast<VIDEOINFOHEADER *>(pmtConfig->pbFormat);
AM_MEDIA_TYPE mt;
mt = *pmtConfig;
mt.pbFormat = reinterpret_cast<BYTE *>(pVih);
auto fcc = FOURCCMap(pVih->bmiHeader.biCompression);
// wrap it
CameraConfig config = { mt, pVih->bmiHeader.biWidth, pVih->bmiHeader.biHeight, 1000/(static_cast<float>(pVih->AvgTimePerFrame)/10000), fcc };
// if resolution is bad (or unsupported), skip this configuration
if (config.width == 0 || config.height == 0) // bad
continue;
cam.configurations.push_back(config);
}
_cameras.push_back(cam);
pConfig->Release();
_pCapture->Release();
}
pEnum->Release();
pDevEnum->Release();
}
_cameras
是相機的矢量,定義如下:
typedef struct {
//! Pointer to DirectShow device.
DSDevice device;
//! Camera name
string name;
//! List of supported configurations.
vector<CameraConfig> configurations; // list of all available configurations
//! Index of selected configuration.
int selected;
} Camera;
_pCapture
是指向所創建的捕獲過濾器。 CameraConfig
定義如下:
typedef struct {
//! Media type.
AM_MEDIA_TYPE _mediaType;
//! Output width.
int width;
//! Outpus height.
int height;
//! Output framerate.
float fps;
//! Compression algoritm. YUY2 and mJPEG are supported for now.
FOURCCMap compression;
} CameraConfig;
如何一個實施UVC設備支持?硬件編碼器的哪些參數可以控制?
謝謝。
您正在通過DirectShow API使用攝像頭。相機要麼通過'IAMStreamConfig'等衆所周知的方式,以您已經知道的方式(YUY2,MJPG)或某種供應商特定的方式公開這種方式,對於這種特定的模型(SDK等)來說是speficic。 –
謝謝。是否有任何可靠的信息來源,哪些攝像頭通過DirectShow API公開硬件編碼的H264? –
如果攝像機以「標準方式」展示了H.264捕獲上限,您應該可以通過列舉其功能來查看它。看到它的方法之一是['DirectShowCaptureCapabilities'](http://www.alax.info/svn/public/trunk/Toolbox/)工具,通過'IAMStreamConfig'界面顯示你編程的所有東西。另一種方法是使用['GraphStudioNext'](https://code.google.com/p/graph-studio-next/)插入捕獲過濾器,並通過引腳介質類型進行漫遊。無論採用哪種方式,您都應該看到「H264」或「AVC1」子類型或「biCompression」值。 –