Implemented VfwCapture interface.
diff --git a/dlls/qcap/Makefile.in b/dlls/qcap/Makefile.in
index f22cca4..525384f 100644
--- a/dlls/qcap/Makefile.in
+++ b/dlls/qcap/Makefile.in
@@ -4,7 +4,7 @@
VPATH = @srcdir@
MODULE = qcap.dll
IMPORTLIB = libqcap.$(IMPLIBEXT)
-IMPORTS = ole32 oleaut32 user32 advapi32 kernel32
+IMPORTS = ole32 oleaut32 user32 gdi32 advapi32 kernel32
EXTRALIBS = -lstrmiids -luuid $(LIBUNICODE)
C_SRCS = \
@@ -14,6 +14,7 @@
enumpins.c \
pin.c \
qcap_main.c \
+ v4l.c \
vfwcapture.c \
yuv.c
diff --git a/dlls/qcap/capture.h b/dlls/qcap/capture.h
index 150c5f1..1c74933 100644
--- a/dlls/qcap/capture.h
+++ b/dlls/qcap/capture.h
@@ -23,29 +23,18 @@
#ifndef __QCAP_CAPTURE_H__
#define __QCAP_CAPTURE_H__
-typedef HRESULT (* Video_Destroy)(void *pBox);
-typedef HRESULT (* Video_SetMediaType)(void *pBox, AM_MEDIA_TYPE * mT);
-typedef HRESULT (* Video_GetMediaType)(void *pBox, AM_MEDIA_TYPE ** mT);
-typedef HRESULT (* Video_GetPropRange)(void *pBox, long Property, long *pMin, long *pMax, long *pSteppingDelta, long *pDefault, long *pCapsFlags);
-typedef HRESULT (* Video_GetProp)(void *pBox, long Property, long *lValue, long *Flags);
-typedef HRESULT (* Video_SetProp)(void *pBox, long Property, long lValue, long Flags);
-typedef HRESULT (* Video_Control)(void *pBox, FILTER_STATE *state);
+struct _Capture;
+typedef struct _Capture Capture;
-typedef struct capturefunctions {
- Video_Destroy Destroy;
- Video_SetMediaType SetFormat;
- Video_GetMediaType GetFormat;
- Video_GetPropRange GetPropRange;
- Video_GetProp Get_Prop;
- Video_SetProp Set_Prop;
- Video_Control Run, Pause, Stop;
- void *pMine;
-} Capture;
-
-typedef HRESULT (* Video_Init)(Capture *pBox, IPin *pOut, USHORT card);
-HRESULT V4l_Init(Capture *pBox, IPin *pOut, USHORT card);
-
-#define INVOKE(from, func, para...) from->func(from->pMine, para)
-#define INVOKENP(from, func) from->func(from->pMine)
+Capture *qcap_driver_init(IPin*,USHORT);
+HRESULT qcap_driver_destroy(Capture*);
+HRESULT qcap_driver_set_format(Capture*,AM_MEDIA_TYPE*);
+HRESULT qcap_driver_get_format(Capture*,AM_MEDIA_TYPE**);
+HRESULT qcap_driver_get_prop_range(Capture*,long,long*,long*,long*,long*,long*);
+HRESULT qcap_driver_get_prop(Capture*,long,long*,long*);
+HRESULT qcap_driver_set_prop(Capture*,long,long,long);
+HRESULT qcap_driver_run(Capture*,FILTER_STATE*);
+HRESULT qcap_driver_pause(Capture*,FILTER_STATE*);
+HRESULT qcap_driver_stop(Capture*,FILTER_STATE*);
#endif /* __QCAP_CAPTURE_H__ */
diff --git a/dlls/qcap/v4l.c b/dlls/qcap/v4l.c
new file mode 100644
index 0000000..3b16e31
--- /dev/null
+++ b/dlls/qcap/v4l.c
@@ -0,0 +1,963 @@
+/*
+ * DirectShow capture services (QCAP.DLL)
+ *
+ * Copyright 2005 Maarten Lankhorst
+ *
+ * This file contains the part of the vfw capture interface that
+ * does the actual Video4Linux(1/2) stuff required for capturing
+ * and setting/getting media format..
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ */
+
+#include "config.h"
+
+#define NONAMELESSSTRUCT
+#define NONAMELESSUNION
+#define COBJMACROS
+
+#include <stdarg.h>
+#include "windef.h"
+#include "winbase.h"
+#include "wtypes.h"
+#include "wingdi.h"
+#include "winuser.h"
+#include "dshow.h"
+#include "vfwmsgs.h"
+#include "amvideo.h"
+#include "wine/debug.h"
+
+#include "capture.h"
+#include "qcap_main.h"
+#include "pin.h"
+
+#include <stdio.h>
+#include <fcntl.h>
+
+#ifdef HAVE_SYS_IOCTL_H
+#include <sys/ioctl.h>
+#endif
+#ifdef HAVE_SYS_MMAN_H
+#include <sys/mman.h>
+#endif
+#ifdef HAVE_SYS_ERRNO_H
+#include <sys/errno.h>
+#endif
+#ifdef HAVE_SYS_TIME_H
+#include <sys/time.h>
+#endif
+#ifdef HAVE_ASM_TYPES_H
+#include <asm/types.h>
+#endif
+#ifdef HAVE_LINUX_VIDEODEV_H
+#include <linux/videodev.h>
+#endif
+#ifdef HAVE_UNISTD_H
+#include <unistd.h>
+#endif
+
+WINE_DEFAULT_DEBUG_CHANNEL(qcap_v4l);
+
+#ifdef HAVE_LINUX_VIDEODEV_H
+
+typedef void (* Renderer)(Capture *, LPBYTE bufferin, LPBYTE stream);
+
+struct _Capture
+{
+ UINT width, height, bitDepth, fps, outputwidth, outputheight;
+ BOOL swresize;
+
+ CRITICAL_SECTION CritSect;
+
+ IPin *pOut;
+ int fd, mmap;
+ int iscommitted, stopped;
+ struct video_picture pict;
+ int dbrightness, dhue, dcolour, dcontrast;
+
+ /* mmap (V4l1) */
+ struct video_mmap *grab_buf;
+ struct video_mbuf gb_buffers;
+ unsigned char *pmap;
+ int buffers;
+
+ /* read (V4l1) */
+ int imagesize;
+ char * grab_data;
+
+ int curframe;
+
+ HANDLE thread;
+ Renderer renderer;
+};
+
+struct renderlist
+{
+ int depth;
+ const char* name;
+ Renderer renderer;
+};
+
+static void renderer_RGB(Capture *capBox, LPBYTE bufferin, LPBYTE stream);
+static void renderer_YUV(Capture *capBox, LPBYTE bufferin, LPBYTE stream);
+
+static const struct renderlist renderlist_V4l[] = {
+ { 0, "NULL renderer", NULL },
+ { 8, "Gray scales", NULL }, /* 1, Don't support */
+ { 0, "High 240 cube (BT848)", NULL }, /* 2, Don't support */
+ { 16, "16 bit RGB (565)", NULL }, /* 3, Don't support */
+ { 24, "24 bit RGB values", renderer_RGB }, /* 4, Supported, */
+ { 32, "32 bit RGB values", renderer_RGB }, /* 5, Supported */
+ { 16, "15 bit RGB (555)", NULL }, /* 6, Don't support */
+ { 16, "YUV 422 (Not P)", renderer_YUV }, /* 7, Supported */
+ { 16, "YUYV (Not P)", renderer_YUV }, /* 8, Supported */
+ { 16, "UYVY (Not P)", renderer_YUV }, /* 9, Supported */
+ { 16, "YUV 420 (Not P)", NULL }, /* 10, Not supported, if I had to guess it's YYUYYV */
+ { 12, "YUV 411 (Not P)", renderer_YUV }, /* 11, Supported */
+ { 0, "Raw capturing (BT848)", NULL }, /* 12, Don't support */
+ { 16, "YUV 422 (Planar)", renderer_YUV }, /* 13, Supported */
+ { 12, "YUV 411 (Planar)", renderer_YUV }, /* 14, Supported */
+ { 12, "YUV 420 (Planar)", renderer_YUV }, /* 15, Supported */
+ { 10, "YUV 410 (Planar)", renderer_YUV }, /* 16, Supported */
+ /* FIXME: add YUV420 support */
+ { 0, NULL, NULL },
+};
+
+const int fallback_V4l[] = { 4, 5, 7, 8, 9, 13, 15, 14, 16, 11, -1 };
+/* Fallback: First try raw formats (Should try yuv first perhaps?), then yuv */
+
+/* static const Capture defbox; */
+
+static int xioctl(int fd, int request, void * arg)
+{
+ int r;
+
+ do {
+ r = ioctl (fd, request, arg);
+ } while (-1 == r && EINTR == errno);
+
+ return r;
+}
+
+/* Prepare the capture buffers */
+static HRESULT V4l_Prepare(Capture *capBox)
+{
+ TRACE("%p: Preparing for %dx%d resolution\n", capBox, capBox->width, capBox->height);
+
+ /* Try mmap */
+ capBox->mmap = 0;
+ if (xioctl(capBox->fd, VIDIOCGMBUF, &capBox->gb_buffers) != -1 &&
+ capBox->gb_buffers.frames)
+ {
+ capBox->buffers = capBox->gb_buffers.frames;
+ if (capBox->gb_buffers.frames > 1)
+ capBox->buffers = 1;
+ TRACE("%p: Using %d/%d buffers\n", capBox,
+ capBox->buffers, capBox->gb_buffers.frames);
+
+ capBox->pmap = mmap( 0, capBox->gb_buffers.size, PROT_READ|PROT_WRITE,
+ MAP_SHARED, capBox->fd, 0 );
+ if (capBox->pmap != MAP_FAILED)
+ {
+ int i;
+
+ capBox->grab_buf = CoTaskMemAlloc(sizeof(struct video_mmap) * capBox->buffers);
+ if (!capBox->grab_buf)
+ {
+ munmap(capBox->pmap, capBox->gb_buffers.size);
+ return E_OUTOFMEMORY;
+ }
+
+ /* Setup mmap capture buffers. */
+ for (i = 0; i < capBox->buffers; i++)
+ {
+ capBox->grab_buf[i].format = capBox->pict.palette;
+ capBox->grab_buf[i].frame = i;
+ capBox->grab_buf[i].width = capBox->width;
+ capBox->grab_buf[i].height = capBox->height;
+ }
+ capBox->mmap = 1;
+ }
+ }
+ if (!capBox->mmap)
+ {
+ capBox->buffers = 1;
+ capBox->imagesize = renderlist_V4l[capBox->pict.palette].depth *
+ capBox->height * capBox->width / 8;
+ capBox->grab_data = CoTaskMemAlloc(capBox->imagesize);
+ if (!capBox->grab_data)
+ return E_OUTOFMEMORY;
+ }
+ TRACE("Using mmap: %d\n", capBox->mmap);
+ return S_OK;
+}
+
+static void V4l_Unprepare(Capture *capBox)
+{
+ if (capBox->mmap)
+ {
+ for (capBox->curframe = 0; capBox->curframe < capBox->buffers; capBox->curframe++)
+ xioctl(capBox->fd, VIDIOCSYNC, &capBox->grab_buf[capBox->curframe]);
+ munmap(capBox->pmap, capBox->gb_buffers.size);
+ CoTaskMemFree(capBox->grab_buf);
+ }
+ else
+ CoTaskMemFree(capBox->grab_data);
+}
+
+HRESULT qcap_driver_destroy(Capture *capBox)
+{
+ TRACE("%p\n", capBox);
+
+ if( capBox->fd != -1 )
+ close(capBox->fd);
+ DeleteCriticalSection(&capBox->CritSect);
+ CoTaskMemFree(capBox);
+ return S_OK;
+}
+
+HRESULT qcap_driver_set_format(Capture *capBox, AM_MEDIA_TYPE * mT)
+{
+ int newheight, newwidth;
+ struct video_window window;
+ VIDEOINFOHEADER *format;
+
+ TRACE("%p\n", capBox);
+
+ format = (VIDEOINFOHEADER *) mT->pbFormat;
+ if (format->bmiHeader.biBitCount != 24 ||
+ format->bmiHeader.biCompression != BI_RGB)
+ {
+ FIXME("unsupported media type %d %ld\n", format->bmiHeader.biBitCount,
+ format->bmiHeader.biCompression );
+ return VFW_E_INVALIDMEDIATYPE;
+ }
+
+ newwidth = format->bmiHeader.biWidth;
+ newheight = format->bmiHeader.biHeight;
+
+ TRACE("%p -> (%p) - %d %d\n", capBox, mT, newwidth, newheight);
+
+ if (capBox->height == newheight && capBox->width == newwidth)
+ return S_OK;
+
+ if(-1 == xioctl(capBox->fd, VIDIOCGWIN, &window))
+ {
+ ERR("ioctl(VIDIOCGWIN) failed (%d)\n", errno);
+ return E_FAIL;
+ }
+ window.width = newwidth;
+ window.height = newheight;
+ if (xioctl(capBox->fd, VIDIOCSWIN, &window) == -1)
+ {
+ TRACE("using software resize: %dx%d -> %dx%d\n",
+ window.width, window.height, capBox->width, capBox->height);
+ capBox->swresize = TRUE;
+ }
+ else
+ {
+ capBox->height = window.height;
+ capBox->width = window.width;
+ capBox->swresize = FALSE;
+ }
+ capBox->outputwidth = window.width;
+ capBox->outputheight = window.height;
+ return S_OK;
+}
+
+HRESULT qcap_driver_get_format(Capture *capBox, AM_MEDIA_TYPE ** mT)
+{
+ VIDEOINFOHEADER *vi;
+
+ mT[0] = CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE));
+ if (!mT[0])
+ return E_OUTOFMEMORY;
+ vi = CoTaskMemAlloc(sizeof(VIDEOINFOHEADER));
+ mT[0]->cbFormat = sizeof(VIDEOINFOHEADER);
+ if (!vi)
+ {
+ CoTaskMemFree(mT[0]);
+ return E_OUTOFMEMORY;
+ }
+ memcpy(&mT[0]->majortype, &MEDIATYPE_Video, sizeof(GUID));
+ memcpy(&mT[0]->subtype, &MEDIASUBTYPE_RGB24, sizeof(GUID));
+ memcpy(&mT[0]->formattype, &FORMAT_VideoInfo, sizeof(GUID));
+ mT[0]->bFixedSizeSamples = TRUE;
+ mT[0]->bTemporalCompression = FALSE;
+ mT[0]->pUnk = NULL;
+ mT[0]->lSampleSize = capBox->outputwidth * capBox->outputheight * capBox->bitDepth / 8;
+ TRACE("Output format: %dx%d - %d bits = %lu KB\n", capBox->outputwidth,
+ capBox->outputheight, capBox->bitDepth, mT[0]->lSampleSize/1024);
+ vi->rcSource.left = 0; vi->rcSource.top = 0;
+ vi->rcTarget.left = 0; vi->rcTarget.top = 0;
+ vi->rcSource.right = capBox->width; vi->rcSource.bottom = capBox->height;
+ vi->rcTarget.right = capBox->outputwidth; vi->rcTarget.bottom = capBox->outputheight;
+ vi->dwBitRate = capBox->fps * mT[0]->lSampleSize;
+ vi->dwBitErrorRate = 0;
+ vi->AvgTimePerFrame = (LONGLONG)10000000.0 / (LONGLONG)capBox->fps;
+ vi->bmiHeader.biSize = 40;
+ vi->bmiHeader.biWidth = capBox->outputwidth;
+ vi->bmiHeader.biHeight = capBox->outputheight;
+ vi->bmiHeader.biPlanes = 1;
+ vi->bmiHeader.biBitCount = 24;
+ vi->bmiHeader.biCompression = BI_RGB;
+ vi->bmiHeader.biSizeImage = mT[0]->lSampleSize;
+ vi->bmiHeader.biClrUsed = vi->bmiHeader.biClrImportant = 0;
+ vi->bmiHeader.biXPelsPerMeter = 100;
+ vi->bmiHeader.biYPelsPerMeter = 100;
+ mT[0]->pbFormat = (void *)vi;
+ dump_AM_MEDIA_TYPE(mT[0]);
+ return S_OK;
+}
+
+HRESULT qcap_driver_get_prop_range( Capture *capBox, long Property, long *pMin,
+ long *pMax, long *pSteppingDelta, long *pDefault, long *pCapsFlags )
+{
+ TRACE("%p -> %ld %p %p %p %p %p\n", capBox, Property,
+ pMin, pMax, pSteppingDelta, pDefault, pCapsFlags);
+
+ switch (Property)
+ {
+ case VideoProcAmp_Brightness:
+ *pDefault = capBox->dbrightness;
+ break;
+ case VideoProcAmp_Contrast:
+ *pDefault = capBox->dcontrast;
+ break;
+ case VideoProcAmp_Hue:
+ *pDefault = capBox->dhue;
+ break;
+ case VideoProcAmp_Saturation:
+ *pDefault = capBox->dcolour;
+ break;
+ default:
+ FIXME("Not implemented %ld\n", Property);
+ return E_NOTIMPL;
+ }
+ *pMin = 0;
+ *pMax = 65535;
+ *pSteppingDelta = 65536/256;
+ *pCapsFlags = VideoProcAmp_Flags_Manual;
+ return S_OK;
+}
+
+HRESULT qcap_driver_get_prop( Capture *capBox, long Property, long *lValue, long *Flags )
+{
+ TRACE("%p -> %ld %p %p\n", capBox, Property, lValue, Flags);
+
+ switch (Property)
+ {
+ case VideoProcAmp_Brightness:
+ *lValue = capBox->pict.brightness;
+ break;
+ case VideoProcAmp_Contrast:
+ *lValue = capBox->pict.contrast;
+ break;
+ case VideoProcAmp_Hue:
+ *lValue = capBox->pict.hue;
+ break;
+ case VideoProcAmp_Saturation:
+ *lValue = capBox->pict.colour;
+ break;
+ default:
+ FIXME("Not implemented %ld\n", Property);
+ return E_NOTIMPL;
+ }
+ *Flags = VideoProcAmp_Flags_Manual;
+ return S_OK;
+}
+
+HRESULT qcap_driver_set_prop(Capture *capBox, long Property, long lValue, long Flags)
+{
+ TRACE("%p -> %ld %ld %ld\n", capBox, Property, lValue, Flags);
+
+ switch (Property)
+ {
+ case VideoProcAmp_Brightness:
+ capBox->pict.brightness = lValue;
+ break;
+ case VideoProcAmp_Contrast:
+ capBox->pict.contrast = lValue;
+ break;
+ case VideoProcAmp_Hue:
+ capBox->pict.hue = lValue;
+ break;
+ case VideoProcAmp_Saturation:
+ capBox->pict.colour = lValue;
+ break;
+ default:
+ FIXME("Not implemented %ld\n", Property);
+ return E_NOTIMPL;
+ }
+
+ if (xioctl(capBox->fd, VIDIOCSPICT, &capBox->pict) == -1)
+ {
+ ERR("ioctl(VIDIOCSPICT) failed (%d)\n",errno);
+ return E_FAIL;
+ }
+ return S_OK;
+}
+
+static void renderer_RGB(Capture *capBox, LPBYTE bufferin, LPBYTE stream)
+{
+ int depth = renderlist_V4l[capBox->pict.palette].depth;
+ int size = capBox->height * capBox->width * depth / 8;
+ int pointer, offset;
+
+ switch (depth)
+ {
+ case 24:
+ memcpy(bufferin, stream, size);
+ break;
+ case 32:
+ pointer = 0;
+ offset = 1;
+ while (pointer + offset <= size)
+ {
+ bufferin[pointer] = stream[pointer + offset];
+ pointer++;
+ bufferin[pointer] = stream[pointer + offset];
+ pointer++;
+ bufferin[pointer] = stream[pointer + offset];
+ pointer++;
+ offset++;
+ }
+ break;
+ default:
+ ERR("Unknown bit depth %d\n", depth);
+ return;
+ }
+}
+
+static void renderer_YUV(Capture *capBox, LPBYTE bufferin, LPBYTE stream)
+{
+ enum YUV_Format format;
+
+ switch (capBox->pict.palette)
+ {
+ case 7: /* YUV422 - same as YUYV */
+ case 8: /* YUYV */
+ format = YUYV;
+ break;
+ case 9: /* UYVY */
+ format = UYVY;
+ break;
+ case 11: /* YUV411 */
+ format = UYYVYY;
+ break;
+ case 13: /* YUV422P */
+ format = YUVP_421;
+ break;
+ case 14: /* YUV411P */
+ format = YUVP_441;
+ break;
+ case 15: /* YUV420P */
+ format = YUVP_422;
+ break;
+ case 16: /* YUV410P */
+ format = YUVP_444;
+ break;
+ default:
+ ERR("Unknown palette %d\n", capBox->pict.palette);
+ return;
+ }
+ YUV_To_RGB24(format, bufferin, stream, capBox->width, capBox->height);
+}
+
+static void Resize(Capture * capBox, LPBYTE output, LPBYTE input)
+{
+ /* the whole image needs to be reversed,
+ because the dibs are messed up in windows */
+ if (!capBox->swresize)
+ {
+ int depth = capBox->bitDepth / 8;
+ int inoffset = 0, outoffset = capBox->height * capBox->width * depth;
+ int ow = capBox->width * depth;
+ while (outoffset > 0)
+ {
+ int x;
+ outoffset -= ow;
+ for (x = 0; x < ow; x++)
+ output[outoffset + x] = input[inoffset + x];
+ inoffset += ow;
+ }
+ }
+ else
+ {
+ HDC dc_s, dc_d;
+ HBITMAP bmp_s, bmp_d;
+ int depth = capBox->bitDepth / 8;
+ int inoffset = 0, outoffset = (capBox->outputheight) * capBox->outputwidth * depth;
+ int ow = capBox->outputwidth * depth;
+ LPBYTE myarray;
+
+ /* FIXME: Improve software resizing: add error checks and optimize */
+
+ myarray = CoTaskMemAlloc(capBox->outputwidth * capBox->outputheight * depth);
+ dc_s = CreateCompatibleDC(NULL);
+ dc_d = CreateCompatibleDC(NULL);
+ bmp_s = CreateBitmap(capBox->width, capBox->height, 1, capBox->bitDepth, input);
+ bmp_d = CreateBitmap(capBox->outputwidth, capBox->outputheight, 1, capBox->bitDepth, NULL);
+ SelectObject(dc_s, bmp_s);
+ SelectObject(dc_d, bmp_d);
+ StretchBlt(dc_d, 0, 0, capBox->outputwidth, capBox->outputheight,
+ dc_s, 0, 0, capBox->width, capBox->height, SRCCOPY);
+ GetBitmapBits(bmp_d, capBox->outputwidth * capBox->outputheight * depth, myarray);
+ while (outoffset > 0)
+ {
+ int i;
+
+ outoffset -= ow;
+ for (i = 0; i < ow; i++)
+ output[outoffset + i] = myarray[inoffset + i];
+ inoffset += ow;
+ }
+ CoTaskMemFree(myarray);
+ DeleteObject(dc_s);
+ DeleteObject(dc_d);
+ DeleteObject(bmp_s);
+ DeleteObject(bmp_d);
+ }
+}
+
+static void V4l_GetFrame(Capture * capBox, unsigned char ** pInput)
+{
+ if (capBox->pmap)
+ {
+ if (xioctl(capBox->fd, VIDIOCSYNC, &capBox->grab_buf[capBox->curframe]) == -1)
+ WARN("Syncing ioctl failed: %d\n", errno);
+
+ *pInput = capBox->pmap + capBox->gb_buffers.offsets[capBox->curframe];
+ }
+ else
+ {
+ int retval;
+ while ((retval = read(capBox->fd, capBox->grab_data, capBox->imagesize)) == -1)
+ if (errno != EAGAIN) break;
+ if (retval == -1)
+ WARN("Error occured while reading from device: %s\n", strerror(errno));
+ *pInput = capBox->grab_data;
+ }
+}
+
+static void V4l_FreeFrame(Capture * capBox)
+{
+ TRACE("\n");
+ if (capBox->mmap)
+ {
+ if (xioctl(capBox->fd, VIDIOCMCAPTURE, &capBox->grab_buf[capBox->curframe]) == -1)
+ ERR("Freeing frame for capture failed: %s\n", strerror(errno));
+ }
+ if (++capBox->curframe == capBox->buffers)
+ capBox->curframe = 0;
+}
+
+static DWORD WINAPI ReadThread(LPVOID lParam)
+{
+ Capture * capBox = (Capture *)lParam;
+ HRESULT hr;
+ IMediaSample *pSample = NULL;
+ unsigned long framecount = 0;
+ unsigned char *pTarget, *pInput, *pOutput;
+
+ hr = V4l_Prepare(capBox);
+ if (FAILED(hr))
+ goto fail;
+
+ pOutput = CoTaskMemAlloc(capBox->width * capBox->height * capBox->bitDepth / 8);
+ capBox->curframe = 0;
+ do {
+ V4l_FreeFrame(capBox);
+ } while (capBox->curframe != 0);
+
+ while (1)
+ {
+ EnterCriticalSection(&capBox->CritSect);
+ if (capBox->stopped)
+ break;
+ hr = OutputPin_GetDeliveryBuffer((OutputPin *)capBox->pOut, &pSample, NULL, NULL, 0);
+ if (SUCCEEDED(hr))
+ {
+ int len;
+
+ if (!capBox->swresize)
+ len = capBox->height * capBox->width * capBox->bitDepth / 8;
+ else
+ len = capBox->outputheight * capBox->outputwidth * capBox->bitDepth / 8;
+ IMediaSample_SetActualDataLength(pSample, len);
+
+ len = IMediaSample_GetActualDataLength(pSample);
+ TRACE("Data length: %d KB\n", len / 1024);
+
+ IMediaSample_GetPointer(pSample, &pTarget);
+ /* FIXME: Check return values.. */
+ V4l_GetFrame(capBox, &pInput);
+ capBox->renderer(capBox, pOutput, pInput);
+ Resize(capBox, pTarget, pOutput);
+ hr = OutputPin_SendSample((OutputPin *)capBox->pOut, pSample);
+ TRACE("%p -> Frame %lu: %lx\n", capBox, ++framecount, hr);
+ IMediaSample_Release(pSample);
+ V4l_FreeFrame(capBox);
+ }
+ LeaveCriticalSection(&capBox->CritSect);
+ if (FAILED(hr) && hr != VFW_E_NOT_CONNECTED)
+ {
+ ERR("Received error: %lx\n", hr);
+ goto cfail;
+ }
+ }
+ LeaveCriticalSection(&capBox->CritSect);
+ CoTaskMemFree(pOutput);
+
+ return 0;
+
+cfail:
+ CoTaskMemFree(pOutput);
+ V4l_Unprepare(capBox);
+ LeaveCriticalSection(&capBox->CritSect);
+
+fail:
+ capBox->thread = 0; capBox->stopped = 1;
+ FIXME("Stop IFilterGraph\n");
+ return 0;
+}
+
+HRESULT qcap_driver_run(Capture *capBox, FILTER_STATE *state)
+{
+ HANDLE thread;
+ HRESULT hr;
+
+ TRACE("%p -> (%p)\n", capBox, state);
+
+ if (*state == State_Running) return S_OK;
+
+ EnterCriticalSection(&capBox->CritSect);
+
+ capBox->stopped = 0;
+
+ if (*state == State_Stopped)
+ {
+ *state = State_Running;
+ if (!capBox->iscommitted++)
+ {
+ IMemAllocator * pAlloc = NULL;
+ ALLOCATOR_PROPERTIES ap, actual;
+ OutputPin *out;
+
+ ap.cBuffers = 3;
+ if (!capBox->swresize)
+ ap.cbBuffer = capBox->width * capBox->height;
+ else
+ ap.cbBuffer = capBox->outputwidth * capBox->outputheight;
+ ap.cbBuffer = (ap.cbBuffer * capBox->bitDepth) / 8;
+ ap.cbAlign = 1;
+ ap.cbPrefix = 0;
+
+ out = (OutputPin *)capBox->pOut;
+ hr = IMemInputPin_GetAllocator(out->pMemInputPin, &pAlloc);
+
+ if (SUCCEEDED(hr))
+ hr = IMemAllocator_SetProperties(pAlloc, &ap, &actual);
+
+ if (SUCCEEDED(hr))
+ hr = IMemAllocator_Commit(pAlloc);
+
+ if (pAlloc)
+ IMemAllocator_Release(pAlloc);
+
+ TRACE("Committing allocator: %lx\n", hr);
+ }
+
+ thread = CreateThread(NULL, 0, ReadThread, capBox, 0, NULL);
+ if (thread)
+ {
+ capBox->thread = thread;
+ SetThreadPriority(thread, THREAD_PRIORITY_LOWEST);
+ LeaveCriticalSection(&capBox->CritSect);
+ return S_OK;
+ }
+ ERR("Creating thread failed.. %lx\n", GetLastError());
+ LeaveCriticalSection(&capBox->CritSect);
+ return E_FAIL;
+ }
+
+ ResumeThread(capBox->thread);
+ *state = State_Running;
+ LeaveCriticalSection(&capBox->CritSect);
+ return S_OK;
+}
+
+HRESULT qcap_driver_pause(Capture *capBox, FILTER_STATE *state)
+{
+ TRACE("%p -> (%p)\n", capBox, state);
+
+ if (*state == State_Paused)
+ return S_OK;
+ if (*state == State_Stopped)
+ qcap_driver_run(capBox, state);
+
+ EnterCriticalSection(&capBox->CritSect);
+ *state = State_Paused;
+ SuspendThread(capBox->thread);
+ LeaveCriticalSection(&capBox->CritSect);
+
+ return S_OK;
+}
+
+HRESULT qcap_driver_stop(Capture *capBox, FILTER_STATE *state)
+{
+ TRACE("%p -> (%p)\n", capBox, state);
+
+ if (*state == State_Stopped)
+ return S_OK;
+
+ EnterCriticalSection(&capBox->CritSect);
+
+ if (capBox->thread)
+ {
+ if (*state == State_Paused)
+ ResumeThread(capBox->thread);
+ capBox->stopped = 1;
+ capBox->thread = 0;
+ if (capBox->iscommitted)
+ {
+ IMemInputPin *pMem = NULL;
+ IMemAllocator * pAlloc = NULL;
+ IPin *pConnect = NULL;
+ HRESULT hr;
+
+ capBox->iscommitted = 0;
+
+ hr = IPin_ConnectedTo(capBox->pOut, &pConnect);
+
+ if (SUCCEEDED(hr))
+ hr = IPin_QueryInterface(pConnect, &IID_IMemInputPin, (void **) &pMem);
+
+ if (SUCCEEDED(hr))
+ hr = IMemInputPin_GetAllocator(pMem, &pAlloc);
+
+ if (SUCCEEDED(hr))
+ hr = IMemAllocator_Decommit(pAlloc);
+
+ if (pAlloc)
+ IMemAllocator_Release(pAlloc);
+
+ if (pMem)
+ IMemInputPin_Release(pMem);
+
+ if (pConnect)
+ IPin_Release(pConnect);
+
+ if (hr != S_OK && hr != VFW_E_NOT_COMMITTED)
+ WARN("Decommitting allocator: %lx\n", hr);
+ }
+ V4l_Unprepare(capBox);
+ }
+
+ *state = State_Stopped;
+ LeaveCriticalSection(&capBox->CritSect);
+ return S_OK;
+}
+
+Capture * qcap_driver_init( IPin *pOut, USHORT card )
+{
+ Capture * capBox = NULL;
+ char device[20];
+ struct video_capability capa;
+ struct video_picture pict;
+ struct video_window window;
+
+ YUV_Init();
+
+ capBox = CoTaskMemAlloc(sizeof(Capture));
+ if (!capBox)
+ goto error;
+
+ /* capBox->vtbl = &defboxVtbl; */
+
+ InitializeCriticalSection( &capBox->CritSect );
+
+ sprintf(device, "/dev/video%i", card);
+ TRACE("opening %s\n", device);
+ capBox->fd = open(device, O_RDWR | O_NONBLOCK);
+ if (capBox->fd == -1)
+ {
+ WARN("open failed (%d)\n", errno);
+ goto error;
+ }
+
+ memset(&capa, 0, sizeof(capa));
+
+ if (xioctl(capBox->fd, VIDIOCGCAP, &capa) == -1)
+ {
+ WARN("ioctl(VIDIOCGCAP) failed (%d)\n", errno);
+ goto error;
+ }
+
+ if (!(capa.type & VID_TYPE_CAPTURE))
+ {
+ WARN("not a video capture device\n");
+ goto error;
+ }
+
+ TRACE("%d inputs on %s\n", capa.channels, capa.name );
+
+ if (xioctl(capBox->fd, VIDIOCGPICT, &pict) == -1)
+ {
+ ERR("ioctl(VIDIOCGPICT) failed (%d)\n", errno );
+ goto error;
+ }
+
+ TRACE("depth %d palette %d (%s) hue %d color %d contrast %d\n",
+ pict.depth, pict.palette, renderlist_V4l[pict.palette].name,
+ pict.hue, pict.colour, pict.contrast );
+
+ capBox->dbrightness = pict.brightness;
+ capBox->dcolour = pict.colour;
+ capBox->dhue = pict.hue;
+ capBox->dcontrast = pict.contrast;
+
+ if (!renderlist_V4l[pict.palette].renderer)
+ {
+ int palet = pict.palette, i;
+
+ TRACE("No renderer available for %s, falling back to defaults\n",
+ renderlist_V4l[pict.palette].name);
+ capBox->renderer = NULL;
+ for (i = 0; fallback_V4l[i] >=0 ; i++)
+ {
+ int n = fallback_V4l[i];
+
+ if (renderlist_V4l[n].renderer == NULL)
+ continue;
+
+ pict.depth = renderlist_V4l[n].depth;
+ pict.palette = n;
+ if (xioctl(capBox->fd, VIDIOCSPICT, &pict) == -1)
+ {
+ TRACE("Could not render with %s (%d)\n",
+ renderlist_V4l[n].name, n);
+ continue;
+ }
+ TRACE("using renderer %s (%d)\n",
+ renderlist_V4l[n].name, n);
+ capBox->renderer = renderlist_V4l[n].renderer;
+ break;
+ }
+
+ if (!capBox->renderer)
+ {
+ ERR("video format %s isn't available\n",
+ renderlist_V4l[palet].name);
+ goto error;
+ }
+ }
+ else
+ {
+ TRACE("Using the suggested format\n");
+ capBox->renderer = renderlist_V4l[pict.palette].renderer;
+ }
+ memcpy(&capBox->pict, &pict, sizeof(struct video_picture));
+
+ memset(&window, 0, sizeof(window));
+ if (xioctl(capBox->fd, VIDIOCGWIN, &window) == -1)
+ {
+ WARN("VIDIOCGWIN failed (%d)\n", errno);
+ goto error;
+ }
+
+ capBox->height = capBox->outputheight = window.height;
+ capBox->width = capBox->outputwidth = window.width;
+ capBox->swresize = FALSE;
+ capBox->bitDepth = 24;
+ capBox->pOut = pOut;
+ capBox->fps = 3;
+ capBox->stopped = 0;
+ capBox->curframe = 0;
+ capBox->iscommitted = 0;
+
+ TRACE("format: %d bits - %d x %d\n", capBox->bitDepth, capBox->width, capBox->height);
+
+ return (Capture*) capBox;
+
+error:
+ if (capBox)
+ qcap_driver_destroy( (Capture*) capBox );
+
+ return NULL;
+}
+
+#else
+
+Capture * qcap_driver_init( IPin *pOut, USHORT card )
+{
+ const char msg[] =
+ "The v4l headers were not available at compile time,\n"
+ "so video capture support is not available.\n";
+ MESSAGE(msg);
+ return NULL;
+}
+
+#define FAIL_WITH_ERR \
+ ERR("v4l absent: shouldn't be called\n"); \
+ return E_NOTIMPL
+
+HRESULT qcap_driver_destroy(Capture *capBox)
+{
+ FAIL_WITH_ERR;
+}
+
+HRESULT qcap_driver_set_format(Capture *capBox, AM_MEDIA_TYPE * mT)
+{
+ FAIL_WITH_ERR;
+}
+
+HRESULT qcap_driver_get_format(Capture *capBox, AM_MEDIA_TYPE ** mT)
+{
+ FAIL_WITH_ERR;
+}
+
+HRESULT qcap_driver_get_prop_range( Capture *capBox, long Property, long *pMin,
+ long *pMax, long *pSteppingDelta, long *pDefault, long *pCapsFlags )
+{
+ FAIL_WITH_ERR;
+}
+
+HRESULT qcap_driver_get_prop(Capture *capBox, long Property, long *lValue, long *Flags)
+{
+ FAIL_WITH_ERR;
+}
+
+HRESULT qcap_driver_set_prop(Capture *capBox, long Property, long lValue, long Flags)
+{
+ FAIL_WITH_ERR;
+}
+
+HRESULT qcap_driver_run(Capture *capBox, FILTER_STATE *state)
+{
+ FAIL_WITH_ERR;
+}
+
+HRESULT qcap_driver_pause(Capture *capBox, FILTER_STATE *state)
+{
+ FAIL_WITH_ERR;
+}
+
+HRESULT qcap_driver_stop(Capture *capBox, FILTER_STATE *state)
+{
+ FAIL_WITH_ERR;
+}
+
+#endif /* HAVE_LINUX_VIDEODEV_H */
diff --git a/dlls/qcap/vfwcapture.c b/dlls/qcap/vfwcapture.c
index e0162d4..a0c71a0 100644
--- a/dlls/qcap/vfwcapture.c
+++ b/dlls/qcap/vfwcapture.c
@@ -67,7 +67,7 @@
const struct IPersistPropertyBagVtbl * IPersistPropertyBag_vtbl;
BOOL init;
- Capture *myCap;
+ Capture *driver_info;
ULONG refCount;
FILTER_INFO filterInfo;
FILTER_STATE state;
@@ -80,38 +80,10 @@
typedef struct VfwPinImpl
{
OutputPin pin;
- Capture *myCap;
+ Capture *driver_info;
IKsPropertySetVtbl * KSP_VT;
} VfwPinImpl;
-static const Video_Init Constructors[] =
-{
- /* V4l_Init, */
- NULL
-};
-
-static HRESULT Capture_Initialise(Capture **dimi, IPin *pOut, USHORT card)
-{
- HRESULT r = E_FAIL;
- Capture *driver;
- int i;
-
- TRACE("%p %p %hu\n", dimi, pOut, card);
-
- driver = CoTaskMemAlloc( sizeof(Capture) );
- if (!driver)
- return E_OUTOFMEMORY;
-
- for( i=0; FAILED(r) && Constructors[i]; i++ )
- r = Constructors[i]( driver, pOut, card );
-
- if( SUCCEEDED(r) )
- *dimi = driver;
- else
- CoTaskMemFree( driver );
-
- return r;
-}
IUnknown * WINAPI QCAP_createVFWCaptureFilter(IUnknown *pUnkOuter, HRESULT *phr)
{
@@ -181,7 +153,8 @@
{
FIXME("Capture system not initialised when looking for %s, "
"trying it on primary device now\n", debugstr_guid(riid));
- if (FAILED(Capture_Initialise(&This->myCap, (IPin *)This->pOutputPin, 0)))
+ This->driver_info = qcap_driver_init( This->pOutputPin, 0 );
+ if (!This->driver_info)
{
ERR("VfwCapture initialisation failed\n");
return E_UNEXPECTED;
@@ -225,8 +198,8 @@
if (This->init)
{
if (This->state != State_Stopped)
- INVOKE(This->myCap, Stop, &This->state);
- INVOKENP(This->myCap, Destroy);
+ qcap_driver_stop(This->driver_info, &This->state);
+ qcap_driver_destroy(This->driver_info);
}
pin = (IPinImpl*) This->pOutputPin;
if (pin->pConnectedTo != NULL)
@@ -259,7 +232,7 @@
VfwCapture *This = (VfwCapture *)iface;
TRACE("()\n");
- return INVOKE(This->myCap, Stop, &This->state);
+ return qcap_driver_stop(This->driver_info, &This->state);
}
static HRESULT WINAPI VfwCapture_Pause(IBaseFilter * iface)
@@ -267,14 +240,14 @@
VfwCapture *This = (VfwCapture *)iface;
TRACE("()\n");
- return INVOKE(This->myCap, Pause, &This->state);
+ return qcap_driver_pause(This->driver_info, &This->state);
}
static HRESULT WINAPI VfwCapture_Run(IBaseFilter * iface, REFERENCE_TIME tStart)
{
VfwCapture *This = (VfwCapture *)iface;
TRACE("(%lx%08lx)\n", (ULONG)(tStart >> 32), (ULONG)tStart);
- return INVOKE(This->myCap, Run, &This->state);
+ return qcap_driver_run(This->driver_info, &This->state);
}
static HRESULT WINAPI
@@ -444,7 +417,7 @@
return VFW_E_INVALIDMEDIATYPE;
}
- hr = INVOKE(This->myCap, SetFormat, pmt);
+ hr = qcap_driver_set_format(This->driver_info, pmt);
if (SUCCEEDED(hr) && This->filterInfo.pGraph && pin->pConnectedTo )
{
hr = IFilterGraph_Reconnect(This->filterInfo.pGraph, This->pOutputPin);
@@ -461,7 +434,7 @@
ICOM_THIS_MULTI(VfwCapture, IAMStreamConfig_vtbl, iface);
TRACE("%p -> (%p)\n", iface, pmt);
- return INVOKE(This->myCap, GetFormat, pmt);
+ return qcap_driver_get_format(This->driver_info, pmt);
}
static HRESULT WINAPI
@@ -527,7 +500,7 @@
{
ICOM_THIS_MULTI(VfwCapture, IAMVideoProcAmp_vtbl, iface);
- return INVOKE( This->myCap, GetPropRange, Property, pMin, pMax,
+ return qcap_driver_get_prop_range( This->driver_info, Property, pMin, pMax,
pSteppingDelta, pDefault, pCapsFlags );
}
@@ -537,7 +510,7 @@
{
ICOM_THIS_MULTI(VfwCapture, IAMVideoProcAmp_vtbl, iface);
- return INVOKE(This->myCap, Set_Prop, Property, lValue, Flags);
+ return qcap_driver_set_prop(This->driver_info, Property, lValue, Flags);
}
static HRESULT WINAPI
@@ -546,7 +519,7 @@
{
ICOM_THIS_MULTI(VfwCapture, IAMVideoProcAmp_vtbl, iface);
- return INVOKE(This->myCap, Get_Prop, Property, lValue, Flags);
+ return qcap_driver_get_prop(This->driver_info, Property, lValue, Flags);
}
static const IAMVideoProcAmpVtbl IAMVideoProcAmp_VTable =
@@ -637,12 +610,17 @@
{
VfwPinImpl *pin;
- hr = Capture_Initialise(&This->myCap, This->pOutputPin,
- (USHORT)var.__VARIANT_NAME_1.__VARIANT_NAME_2.__VARIANT_NAME_3.ulVal);
- pin = (VfwPinImpl *)This->pOutputPin;
- pin->myCap = This->myCap;
- if (SUCCEEDED(hr))
+ This->driver_info = qcap_driver_init( This->pOutputPin,
+ var.__VARIANT_NAME_1.__VARIANT_NAME_2.__VARIANT_NAME_3.ulVal );
+ if (This->driver_info)
+ {
+ pin = (VfwPinImpl *)This->pOutputPin;
+ pin->driver_info = This->driver_info;
This->init = TRUE;
+ hr = S_OK;
+ }
+ else
+ hr = E_FAIL;
}
return hr;
@@ -848,7 +826,7 @@
VfwPinImpl *This = (VfwPinImpl *)iface;
emd.cMediaTypes = 1;
- hr = INVOKE(This->myCap, GetFormat, &pmt);
+ hr = qcap_driver_get_format(This->driver_info, &pmt);
emd.pMediaTypes = pmt;
if (SUCCEEDED(hr))
hr = IEnumMediaTypesImpl_Construct(&emd, ppEnum);