qcap: Use a function table for video capture driver operations.

Signed-off-by: Zebediah Figura <z.figura12@gmail.com>
Signed-off-by: Alexandre Julliard <julliard@winehq.org>
This commit is contained in:
Zebediah Figura 2020-06-29 10:26:16 -05:00 committed by Alexandre Julliard
parent 9e035293b1
commit 8ad3707443
3 changed files with 151 additions and 178 deletions

View file

@ -40,23 +40,29 @@ HRESULT file_writer_create(IUnknown *outer, IUnknown **out) DECLSPEC_HIDDEN;
HRESULT smart_tee_create(IUnknown *outer, IUnknown **out) DECLSPEC_HIDDEN;
HRESULT vfw_capture_create(IUnknown *outer, IUnknown **out) DECLSPEC_HIDDEN;
typedef struct _Capture Capture;
struct video_capture_device
{
const struct video_capture_device_ops *ops;
};
Capture *qcap_driver_init(struct strmbase_source *pin, USHORT card) DECLSPEC_HIDDEN;
HRESULT qcap_driver_destroy(Capture *device) DECLSPEC_HIDDEN;
HRESULT qcap_driver_check_format(Capture *device, const AM_MEDIA_TYPE *mt) DECLSPEC_HIDDEN;
HRESULT qcap_driver_set_format(Capture *device, AM_MEDIA_TYPE *mt) DECLSPEC_HIDDEN;
HRESULT qcap_driver_get_caps(Capture *device, LONG index, AM_MEDIA_TYPE **mt,
VIDEO_STREAM_CONFIG_CAPS *vscc) DECLSPEC_HIDDEN;
LONG qcap_driver_get_caps_count(Capture *device) DECLSPEC_HIDDEN;
HRESULT qcap_driver_get_format(const Capture *device, AM_MEDIA_TYPE **mt) DECLSPEC_HIDDEN;
HRESULT qcap_driver_get_prop_range(Capture *device, VideoProcAmpProperty property,
LONG *min, LONG *max, LONG *step, LONG *default_value, LONG *flags) DECLSPEC_HIDDEN;
HRESULT qcap_driver_get_prop(Capture *device, VideoProcAmpProperty property, LONG *value, LONG *flags) DECLSPEC_HIDDEN;
HRESULT qcap_driver_set_prop(Capture *device, VideoProcAmpProperty property, LONG value, LONG flags) DECLSPEC_HIDDEN;
void qcap_driver_init_stream(Capture *device) DECLSPEC_HIDDEN;
void qcap_driver_start_stream(Capture *device) DECLSPEC_HIDDEN;
void qcap_driver_stop_stream(Capture *device) DECLSPEC_HIDDEN;
void qcap_driver_cleanup_stream(Capture *device) DECLSPEC_HIDDEN;
struct video_capture_device_ops
{
void (*destroy)(struct video_capture_device *device);
HRESULT (*check_format)(struct video_capture_device *device, const AM_MEDIA_TYPE *mt);
HRESULT (*set_format)(struct video_capture_device *device, const AM_MEDIA_TYPE *mt);
HRESULT (*get_format)(struct video_capture_device *device, AM_MEDIA_TYPE **mt);
HRESULT (*get_caps)(struct video_capture_device *device, LONG index, AM_MEDIA_TYPE **mt, VIDEO_STREAM_CONFIG_CAPS *caps);
LONG (*get_caps_count)(struct video_capture_device *device);
HRESULT (*get_prop_range)(struct video_capture_device *device, VideoProcAmpProperty property,
LONG *min, LONG *max, LONG *step, LONG *default_value, LONG *flags);
HRESULT (*get_prop)(struct video_capture_device *device, VideoProcAmpProperty property, LONG *value, LONG *flags);
HRESULT (*set_prop)(struct video_capture_device *device, VideoProcAmpProperty property, LONG value, LONG flags);
void (*init_stream)(struct video_capture_device *device);
void (*start_stream)(struct video_capture_device *device);
void (*stop_stream)(struct video_capture_device *device);
void (*cleanup_stream)(struct video_capture_device *device);
};
struct video_capture_device *v4l_device_create(struct strmbase_source *pin, USHORT card);
#endif

View file

@ -88,8 +88,10 @@ struct caps
VIDEO_STREAM_CONFIG_CAPS config;
};
struct _Capture
struct v4l_device
{
struct video_capture_device d;
const struct caps *current_caps;
struct caps *caps;
LONG caps_count;
@ -101,6 +103,11 @@ struct _Capture
HANDLE thread, run_event;
};
static inline struct v4l_device *v4l_device(struct video_capture_device *iface)
{
return CONTAINING_RECORD(iface, struct v4l_device, d);
}
static int xioctl(int fd, int request, void * arg)
{
int r;
@ -112,18 +119,18 @@ static int xioctl(int fd, int request, void * arg)
return r;
}
HRESULT qcap_driver_destroy(Capture *device)
static void v4l_device_destroy(struct video_capture_device *iface)
{
struct v4l_device *device = v4l_device(iface);
if (device->fd != -1)
video_close(device->fd);
if (device->caps_count)
heap_free(device->caps);
heap_free(device);
return S_OK;
}
static const struct caps *find_caps(Capture *device, const AM_MEDIA_TYPE *mt)
static const struct caps *find_caps(struct v4l_device *device, const AM_MEDIA_TYPE *mt)
{
const VIDEOINFOHEADER *video_info = (VIDEOINFOHEADER *)mt->pbFormat;
LONG index;
@ -143,8 +150,10 @@ static const struct caps *find_caps(Capture *device, const AM_MEDIA_TYPE *mt)
return NULL;
}
HRESULT qcap_driver_check_format(Capture *device, const AM_MEDIA_TYPE *mt)
static HRESULT v4l_device_check_format(struct video_capture_device *iface, const AM_MEDIA_TYPE *mt)
{
struct v4l_device *device = v4l_device(iface);
TRACE("device %p, mt %p.\n", device, mt);
if (!mt)
@ -159,7 +168,7 @@ HRESULT qcap_driver_check_format(Capture *device, const AM_MEDIA_TYPE *mt)
return E_FAIL;
}
static BOOL set_caps(Capture *device, const struct caps *caps)
static BOOL set_caps(struct v4l_device *device, const struct caps *caps)
{
struct v4l2_format format = {0};
LONG width, height;
@ -185,8 +194,9 @@ static BOOL set_caps(Capture *device, const struct caps *caps)
return TRUE;
}
HRESULT qcap_driver_set_format(Capture *device, AM_MEDIA_TYPE *mt)
static HRESULT v4l_device_set_format(struct video_capture_device *iface, const AM_MEDIA_TYPE *mt)
{
struct v4l_device *device = v4l_device(iface);
const struct caps *caps;
caps = find_caps(device, mt);
@ -202,8 +212,10 @@ HRESULT qcap_driver_set_format(Capture *device, AM_MEDIA_TYPE *mt)
return S_OK;
}
HRESULT qcap_driver_get_format(const Capture *device, AM_MEDIA_TYPE **mt)
static HRESULT v4l_device_get_format(struct video_capture_device *iface, AM_MEDIA_TYPE **mt)
{
struct v4l_device *device = v4l_device(iface);
*mt = CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE));
if (!*mt)
return E_OUTOFMEMORY;
@ -229,9 +241,10 @@ static __u32 v4l2_cid_from_qcap_property(VideoProcAmpProperty property)
}
}
HRESULT qcap_driver_get_prop_range(Capture *device, VideoProcAmpProperty property,
static HRESULT v4l_device_get_prop_range(struct video_capture_device *iface, VideoProcAmpProperty property,
LONG *min, LONG *max, LONG *step, LONG *default_value, LONG *flags)
{
struct v4l_device *device = v4l_device(iface);
struct v4l2_queryctrl ctrl;
ctrl.id = v4l2_cid_from_qcap_property(property);
@ -250,9 +263,10 @@ HRESULT qcap_driver_get_prop_range(Capture *device, VideoProcAmpProperty propert
return S_OK;
}
HRESULT qcap_driver_get_prop(Capture *device, VideoProcAmpProperty property,
LONG *value, LONG *flags)
static HRESULT v4l_device_get_prop(struct video_capture_device *iface,
VideoProcAmpProperty property, LONG *value, LONG *flags)
{
struct v4l_device *device = v4l_device(iface);
struct v4l2_control ctrl;
ctrl.id = v4l2_cid_from_qcap_property(property);
@ -269,9 +283,10 @@ HRESULT qcap_driver_get_prop(Capture *device, VideoProcAmpProperty property,
return S_OK;
}
HRESULT qcap_driver_set_prop(Capture *device, VideoProcAmpProperty property,
LONG value, LONG flags)
static HRESULT v4l_device_set_prop(struct video_capture_device *iface,
VideoProcAmpProperty property, LONG value, LONG flags)
{
struct v4l_device *device = v4l_device(iface);
struct v4l2_control ctrl;
ctrl.id = v4l2_cid_from_qcap_property(property);
@ -286,7 +301,7 @@ HRESULT qcap_driver_set_prop(Capture *device, VideoProcAmpProperty property,
return S_OK;
}
static void reverse_image(const Capture *device, LPBYTE output, const BYTE *input)
static void reverse_image(struct v4l_device *device, LPBYTE output, const BYTE *input)
{
int inoffset, outoffset, pitch;
UINT width, height, depth;
@ -311,7 +326,7 @@ static void reverse_image(const Capture *device, LPBYTE output, const BYTE *inpu
static DWORD WINAPI ReadThread(LPVOID lParam)
{
Capture * capBox = lParam;
struct v4l_device *capBox = lParam;
HRESULT hr;
IMediaSample *pSample = NULL;
ULONG framecount = 0;
@ -372,8 +387,9 @@ static DWORD WINAPI ReadThread(LPVOID lParam)
return 0;
}
void qcap_driver_init_stream(Capture *device)
static void v4l_device_init_stream(struct video_capture_device *iface)
{
struct v4l_device *device = v4l_device(iface);
ALLOCATOR_PROPERTIES req_props, ret_props;
HRESULT hr;
@ -397,20 +413,23 @@ void qcap_driver_init_stream(Capture *device)
device->thread = CreateThread(NULL, 0, ReadThread, device, 0, NULL);
}
void qcap_driver_start_stream(Capture *device)
static void v4l_device_start_stream(struct video_capture_device *iface)
{
struct v4l_device *device = v4l_device(iface);
device->state = State_Running;
SetEvent(device->run_event);
}
void qcap_driver_stop_stream(Capture *device)
static void v4l_device_stop_stream(struct video_capture_device *iface)
{
struct v4l_device *device = v4l_device(iface);
device->state = State_Paused;
ResetEvent(device->run_event);
}
void qcap_driver_cleanup_stream(Capture *device)
static void v4l_device_cleanup_stream(struct video_capture_device *iface)
{
struct v4l_device *device = v4l_device(iface);
HRESULT hr;
device->state = State_Stopped;
@ -460,12 +479,53 @@ static void fill_caps(__u32 pixelformat, __u32 width, __u32 height,
caps->pixelformat = pixelformat;
}
Capture *qcap_driver_init(struct strmbase_source *pin, USHORT card)
static HRESULT v4l_device_get_caps(struct video_capture_device *iface, LONG index,
AM_MEDIA_TYPE **type, VIDEO_STREAM_CONFIG_CAPS *vscc)
{
struct v4l_device *device = v4l_device(iface);
if (index >= device->caps_count)
return S_FALSE;
*type = CreateMediaType(&device->caps[index].media_type);
if (!*type)
return E_OUTOFMEMORY;
if (vscc)
memcpy(vscc, &device->caps[index].config, sizeof(VIDEO_STREAM_CONFIG_CAPS));
return S_OK;
}
static LONG v4l_device_get_caps_count(struct video_capture_device *iface)
{
struct v4l_device *device = v4l_device(iface);
return device->caps_count;
}
static const struct video_capture_device_ops v4l_device_ops =
{
.destroy = v4l_device_destroy,
.check_format = v4l_device_check_format,
.set_format = v4l_device_set_format,
.get_format = v4l_device_get_format,
.get_caps = v4l_device_get_caps,
.get_caps_count = v4l_device_get_caps_count,
.get_prop_range = v4l_device_get_prop_range,
.get_prop = v4l_device_get_prop,
.set_prop = v4l_device_set_prop,
.init_stream = v4l_device_init_stream,
.start_stream = v4l_device_start_stream,
.stop_stream = v4l_device_stop_stream,
.cleanup_stream = v4l_device_cleanup_stream,
};
struct video_capture_device *v4l_device_create(struct strmbase_source *pin, USHORT card)
{
struct v4l2_frmsizeenum frmsize = {0};
struct v4l2_capability caps = {{0}};
struct v4l2_format format = {0};
Capture *device = NULL;
struct v4l_device *device;
BOOL have_libv4l2;
char path[20];
int fd, i;
@ -597,6 +657,7 @@ Capture *qcap_driver_init(struct strmbase_source *pin, USHORT card)
goto error;
}
device->d.ops = &v4l_device_ops;
device->pin = pin;
device->state = State_Stopped;
device->run_event = CreateEventW(NULL, TRUE, FALSE, NULL);
@ -605,117 +666,19 @@ Capture *qcap_driver_init(struct strmbase_source *pin, USHORT card)
device->current_caps->video_info.bmiHeader.biWidth,
device->current_caps->video_info.bmiHeader.biHeight);
return device;
return &device->d;
error:
qcap_driver_destroy(device);
v4l_device_destroy(&device->d);
return NULL;
}
HRESULT qcap_driver_get_caps(Capture *device, LONG index, AM_MEDIA_TYPE **type,
VIDEO_STREAM_CONFIG_CAPS *vscc)
{
if (index >= device->caps_count)
return S_FALSE;
*type = CreateMediaType(&device->caps[index].media_type);
if (!*type)
return E_OUTOFMEMORY;
if (vscc)
memcpy(vscc, &device->caps[index].config, sizeof(VIDEO_STREAM_CONFIG_CAPS));
return S_OK;
}
LONG qcap_driver_get_caps_count(Capture *device)
{
return device->caps_count;
}
#else
Capture *qcap_driver_init(struct strmbase_source *pin, USHORT card)
struct video_capture_device *v4l_device_create(struct strmbase_source *pin, USHORT card)
{
static const char msg[] =
"The v4l headers were not available at compile time,\n"
"so video capture support is not available.\n";
MESSAGE(msg);
ERR("v4l2 was not present at compilation time.\n");
return NULL;
}
#define FAIL_WITH_ERR \
ERR("v4l absent: shouldn't be called\n"); \
return E_NOTIMPL
HRESULT qcap_driver_destroy(Capture *capBox)
{
FAIL_WITH_ERR;
}
HRESULT qcap_driver_check_format(Capture *device, const AM_MEDIA_TYPE *mt)
{
FAIL_WITH_ERR;
}
HRESULT qcap_driver_set_format(Capture *capBox, AM_MEDIA_TYPE * mT)
{
FAIL_WITH_ERR;
}
HRESULT qcap_driver_get_format(const Capture *capBox, AM_MEDIA_TYPE ** mT)
{
FAIL_WITH_ERR;
}
HRESULT qcap_driver_get_prop_range( Capture *capBox,
VideoProcAmpProperty Property, LONG *pMin, LONG *pMax,
LONG *pSteppingDelta, LONG *pDefault, LONG *pCapsFlags )
{
FAIL_WITH_ERR;
}
HRESULT qcap_driver_get_prop(Capture *capBox,
VideoProcAmpProperty Property, LONG *lValue, LONG *Flags)
{
FAIL_WITH_ERR;
}
HRESULT qcap_driver_set_prop(Capture *capBox, VideoProcAmpProperty Property,
LONG lValue, LONG Flags)
{
FAIL_WITH_ERR;
}
void qcap_driver_init_stream(Capture *device)
{
ERR("v4l absent: shouldn't be called\n");
}
void qcap_driver_start_stream(Capture *device)
{
ERR("v4l absent: shouldn't be called\n");
}
void qcap_driver_stop_stream(Capture *device)
{
ERR("v4l absent: shouldn't be called\n");
}
void qcap_driver_cleanup_stream(Capture *device)
{
ERR("v4l absent: shouldn't be called\n");
}
HRESULT qcap_driver_get_caps(Capture *device, LONG index, AM_MEDIA_TYPE **type,
VIDEO_STREAM_CONFIG_CAPS *vscc)
{
FAIL_WITH_ERR;
}
LONG qcap_driver_get_caps_count(Capture *device)
{
ERR("v4l absent: shouldn't be called\n");
return 0;
}
#endif /* defined(VIDIOCMCAPTURE) */

View file

@ -31,7 +31,8 @@ typedef struct VfwCapture
IAMFilterMiscFlags IAMFilterMiscFlags_iface;
IPersistPropertyBag IPersistPropertyBag_iface;
BOOL init;
Capture *driver_info;
struct video_capture_device *device;
struct strmbase_source source;
IKsPropertySet IKsPropertySet_iface;
@ -84,8 +85,8 @@ static void vfw_capture_destroy(struct strmbase_filter *iface)
if (filter->init)
{
if (filter->filter.state != State_Stopped)
qcap_driver_stop_stream(filter->driver_info);
qcap_driver_destroy(filter->driver_info);
filter->device->ops->stop_stream(filter->device);
filter->device->ops->destroy(filter->device);
}
if (filter->source.pin.peer)
@ -122,7 +123,7 @@ static HRESULT vfw_capture_init_stream(struct strmbase_filter *iface)
{
VfwCapture *filter = impl_from_strmbase_filter(iface);
qcap_driver_init_stream(filter->driver_info);
filter->device->ops->init_stream(filter->device);
return VFW_S_CANT_CUE;
}
@ -130,7 +131,7 @@ static HRESULT vfw_capture_start_stream(struct strmbase_filter *iface, REFERENCE
{
VfwCapture *filter = impl_from_strmbase_filter(iface);
qcap_driver_start_stream(filter->driver_info);
filter->device->ops->start_stream(filter->device);
return S_OK;
}
@ -138,7 +139,7 @@ static HRESULT vfw_capture_stop_stream(struct strmbase_filter *iface)
{
VfwCapture *filter = impl_from_strmbase_filter(iface);
qcap_driver_stop_stream(filter->driver_info);
filter->device->ops->stop_stream(filter->device);
return VFW_S_CANT_CUE;
}
@ -146,7 +147,7 @@ static HRESULT vfw_capture_cleanup_stream(struct strmbase_filter *iface)
{
VfwCapture *filter = impl_from_strmbase_filter(iface);
qcap_driver_cleanup_stream(filter->driver_info);
filter->device->ops->cleanup_stream(filter->device);
return S_OK;
}
@ -211,7 +212,7 @@ AMStreamConfig_SetFormat(IAMStreamConfig *iface, AM_MEDIA_TYPE *pmt)
return VFW_E_INVALIDMEDIATYPE;
}
hr = qcap_driver_set_format(This->driver_info, pmt);
hr = This->device->ops->set_format(This->device, pmt);
if (SUCCEEDED(hr) && This->filter.graph && This->source.pin.peer)
{
hr = IFilterGraph_Reconnect(This->filter.graph, &This->source.pin.IPin_iface);
@ -222,14 +223,14 @@ AMStreamConfig_SetFormat(IAMStreamConfig *iface, AM_MEDIA_TYPE *pmt)
return hr;
}
static HRESULT WINAPI
AMStreamConfig_GetFormat( IAMStreamConfig *iface, AM_MEDIA_TYPE **pmt )
static HRESULT WINAPI AMStreamConfig_GetFormat(IAMStreamConfig *iface, AM_MEDIA_TYPE **pmt)
{
VfwCapture *This = impl_from_IAMStreamConfig(iface);
VfwCapture *filter = impl_from_IAMStreamConfig(iface);
HRESULT hr;
TRACE("%p -> (%p)\n", iface, pmt);
hr = qcap_driver_get_format(This->driver_info, pmt);
TRACE("filter %p, mt %p.\n", filter, pmt);
hr = filter->device->ops->get_format(filter->device, pmt);
if (SUCCEEDED(hr))
strmbase_dump_media_type(*pmt);
return hr;
@ -245,7 +246,7 @@ static HRESULT WINAPI AMStreamConfig_GetNumberOfCapabilities(IAMStreamConfig *if
if (!count || !size)
return E_POINTER;
*count = qcap_driver_get_caps_count(filter->driver_info);
*count = filter->device->ops->get_caps_count(filter->device);
*size = sizeof(VIDEO_STREAM_CONFIG_CAPS);
return S_OK;
@ -258,7 +259,7 @@ static HRESULT WINAPI AMStreamConfig_GetStreamCaps(IAMStreamConfig *iface,
TRACE("filter %p, index %d, pmt %p, vscc %p.\n", filter, index, pmt, vscc);
return qcap_driver_get_caps(filter->driver_info, index, pmt, (VIDEO_STREAM_CONFIG_CAPS *)vscc);
return filter->device->ops->get_caps(filter->device, index, pmt, (VIDEO_STREAM_CONFIG_CAPS *)vscc);
}
static const IAMStreamConfigVtbl IAMStreamConfig_VTable =
@ -294,32 +295,36 @@ static ULONG WINAPI AMVideoProcAmp_Release(IAMVideoProcAmp * iface)
return IUnknown_Release(This->filter.outer_unk);
}
static HRESULT WINAPI
AMVideoProcAmp_GetRange( IAMVideoProcAmp * iface, LONG Property, LONG *pMin,
LONG *pMax, LONG *pSteppingDelta, LONG *pDefault, LONG *pCapsFlags )
static HRESULT WINAPI AMVideoProcAmp_GetRange(IAMVideoProcAmp *iface, LONG property,
LONG *min, LONG *max, LONG *step, LONG *default_value, LONG *flags)
{
VfwCapture *This = impl_from_IAMVideoProcAmp(iface);
VfwCapture *filter = impl_from_IAMVideoProcAmp(iface);
return qcap_driver_get_prop_range( This->driver_info, Property, pMin, pMax,
pSteppingDelta, pDefault, pCapsFlags );
TRACE("filter %p, property %#x, min %p, max %p, step %p, default_value %p, flags %p.\n",
filter, property, min, max, step, default_value, flags);
return filter->device->ops->get_prop_range(filter->device, property, min,
max, step, default_value, flags);
}
static HRESULT WINAPI
AMVideoProcAmp_Set( IAMVideoProcAmp * iface, LONG Property, LONG lValue,
LONG Flags )
static HRESULT WINAPI AMVideoProcAmp_Set(IAMVideoProcAmp *iface, LONG property,
LONG value, LONG flags)
{
VfwCapture *This = impl_from_IAMVideoProcAmp(iface);
VfwCapture *filter = impl_from_IAMVideoProcAmp(iface);
return qcap_driver_set_prop(This->driver_info, Property, lValue, Flags);
TRACE("filter %p, property %#x, value %d, flags %#x.\n", filter, property, value, flags);
return filter->device->ops->set_prop(filter->device, property, value, flags);
}
static HRESULT WINAPI
AMVideoProcAmp_Get( IAMVideoProcAmp * iface, LONG Property, LONG *lValue,
LONG *Flags )
static HRESULT WINAPI AMVideoProcAmp_Get(IAMVideoProcAmp *iface, LONG property,
LONG *value, LONG *flags)
{
VfwCapture *This = impl_from_IAMVideoProcAmp(iface);
VfwCapture *filter = impl_from_IAMVideoProcAmp(iface);
return qcap_driver_get_prop(This->driver_info, Property, lValue, Flags);
TRACE("filter %p, property %#x, value %p, flags %p.\n", filter, property, value, flags);
return filter->device->ops->get_prop(filter->device, property, value, flags);
}
static const IAMVideoProcAmpVtbl IAMVideoProcAmp_VTable =
@ -388,8 +393,7 @@ PPB_Load( IPersistPropertyBag * iface, IPropertyBag *pPropBag,
if (SUCCEEDED(hr))
{
This->driver_info = qcap_driver_init(&This->source, V_I4(&var));
if (This->driver_info)
if ((This->device = v4l_device_create(&This->source, V_I4(&var))))
{
This->init = TRUE;
hr = S_OK;
@ -505,7 +509,7 @@ static inline VfwCapture *impl_from_strmbase_pin(struct strmbase_pin *pin)
static HRESULT source_query_accept(struct strmbase_pin *pin, const AM_MEDIA_TYPE *mt)
{
VfwCapture *filter = impl_from_strmbase_pin(pin);
return qcap_driver_check_format(filter->driver_info, mt);
return filter->device->ops->check_format(filter->device, mt);
}
static HRESULT source_get_media_type(struct strmbase_pin *pin,
@ -515,11 +519,11 @@ static HRESULT source_get_media_type(struct strmbase_pin *pin,
AM_MEDIA_TYPE *vfw_pmt;
HRESULT hr;
if (index >= qcap_driver_get_caps_count(filter->driver_info))
if (index >= filter->device->ops->get_caps_count(filter->device))
return VFW_S_NO_MORE_ITEMS;
hr = qcap_driver_get_caps(filter->driver_info, index, &vfw_pmt, NULL);
if (SUCCEEDED(hr)) {
if (SUCCEEDED(hr = filter->device->ops->get_caps(filter->device, index, &vfw_pmt, NULL)))
{
CopyMediaType(pmt, vfw_pmt);
DeleteMediaType(vfw_pmt);
}