[QCAP] V4l driver

Maarten Lankhorst m.b.lankhorst at gmail.com
Fri May 20 18:50:29 CDT 2005


m33p, i hope this is the last time i have to submit it :/

Added v4l driver for vfwcapture
-------------- next part --------------
diff -Nur qcap/Makefile.in qcap-old/Makefile.in
--- Makefile.in	2005-05-21 01:44:09.000000000 +0200
+++ Makefile.in	2005-05-21 01:43:48.000000000 +0200
@@ -4,7 +4,7 @@
 VPATH     = @srcdir@
 MODULE    = qcap.dll
 IMPORTLIB = libqcap.$(IMPLIBEXT)
-IMPORTS   = ole32 oleaut32 user32 advapi32 kernel32
+IMPORTS   = ole32 oleaut32 user32 advapi32 kernel32 gdi32
 EXTRALIBS = -lstrmiids -luuid $(LIBUNICODE)
 
 C_SRCS = \
@@ -15,6 +15,7 @@
 	pin.c \
 	qcap_main.c \
 	vfwcapture.c \
+	v4l.c \
 	yuv.c
 
 RC_SRCS = version.rc
diff -Nur qcap/v4l.c qcap-old/v4l.c
--- v4l.c	1970-01-01 01:00:00.000000000 +0100
+++ v4l.c	2005-05-21 01:29:19.000000000 +0200
@@ -0,0 +1,782 @@
+/* DirectShow capture services (QCAP.DLL)
+ *
+ * Copyright 2005 Maarten Lankhorst
+ *
+ * This file contains the part of the vfw capture interface that
+ * does the actual Video4Linux(1/2) stuff required for capturing
+ * and setting/getting media format..
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+ */
+
+#include "config.h"
+#ifdef HAVE_LINUX_VIDEODEV_H
+
+#define NONAMELESSSTRUCT
+#define NONAMELESSUNION
+#define COBJMACROS
+
+#include <stdarg.h>
+#include "windef.h"
+#include "winbase.h"
+#include "wtypes.h"
+#include "wingdi.h"
+#include "winuser.h"
+#include "dshow.h"
+#include "vfwmsgs.h"
+#include "amvideo.h"
+#include "wine/debug.h"
+
+#include "capture.h"
+#include "yuv.h"
+#include "qcap_main.h"
+#include "pin.h"
+
+#include <stdio.h>
+
+#ifdef HAVE_SYS_IOCTL_H
+#include <sys/ioctl.h>
+#endif
+
+#ifdef HAVE_SYS_MMAN_H
+#include <sys/mman.h>
+#endif
+
+#ifdef HAVE_SYS_ERRNO_H
+#include <sys/errno.h>
+#endif
+
+#ifdef HAVE_SYS_TIME_H
+#include <sys/time.h>
+#endif
+
+#ifdef HAVE_ASM_TYPES_H
+#include <asm/types.h>
+#endif
+
+#include <linux/videodev.h>
+#include <fcntl.h>
+
+#ifdef HAVE_UNISTD_H
+#include <unistd.h>
+#endif
+
+WINE_DEFAULT_DEBUG_CHANNEL(qcap_v4l);
+
+struct CaptureBox;
+typedef void (* Renderer)(struct CaptureBox *, LPBYTE bufferin, LPBYTE stream);
+
+typedef struct CaptureBox {
+/* Dunno what to put in here? */
+   UINT width, height, bitDepth, fps, outputwidth, outputheight;
+   BOOL swresize;
+
+   CRITICAL_SECTION CritSect;
+
+   IPin *pOut;
+   int fd, mmap;
+   int iscommitted, stopped;
+   struct video_picture pict;
+   int dbrightness, dhue, dcolour, dcontrast;
+
+/* mmap (V4l1) */
+   struct video_mmap *grab_buf;
+   struct video_mbuf gb_buffers;
+   void *pmap;
+   int buffers;
+
+/* read (V4l1) */
+   int imagesize;
+   char * grab_data;
+
+   int curframe;
+
+   HANDLE thread;
+   Renderer renderer;
+} CaptureBox;
+
+struct renderlist {
+   int depth;
+   char* name;
+   Renderer renderer;
+};
+
+static void renderer_RGB(CaptureBox *capBox, LPBYTE bufferin, LPBYTE stream);
+static void renderer_YUV(CaptureBox *capBox, LPBYTE bufferin, LPBYTE stream);
+
+static const struct renderlist renderlist_V4l[] = {
+   {  0, "NULL renderer",               NULL },
+   {  8, "Gray scales",                 NULL }, /* 1,  Don't support  */
+   {  0, "High 240 cube (BT848)",       NULL }, /* 2,  Don't support  */
+   { 16, "16 bit RGB (565)",            NULL }, /* 3,  Don't support  */
+   { 24, "24 bit RGB values",   renderer_RGB }, /* 4,  Supported,     */
+   { 32, "32 bit RGB values",   renderer_RGB }, /* 5,  Supported      */
+   { 16, "15 bit RGB (555)",            NULL }, /* 6,  Don't support  */
+   { 16, "YUV 422 (Not P)",     renderer_YUV }, /* 7,  Supported */
+   { 16, "YUYV (Not P)",        renderer_YUV }, /* 8,  Supported */
+   { 16, "UYVY (Not P)",        renderer_YUV }, /* 9,  Supported */
+   { 16, "YUV 420 (Not P)", NULL }, /* 10, Not supported, if I had to guess it's YYUYYV */
+   { 12, "YUV 411 (Not P)",     renderer_YUV }, /* 11, Supported */
+   {  0, "Raw capturing (BT848)",       NULL }, /* 12, Don't support  */
+   { 16, "YUV 422 (Planar)",    renderer_YUV }, /* 13, Supported */
+   { 12, "YUV 411 (Planar)",    renderer_YUV }, /* 14, Supported */
+   { 12, "YUV 420 (Planar)",    renderer_YUV }, /* 15, Supported */
+   { 10, "YUV 410 (Planar)",    renderer_YUV }, /* 16, Supported */
+   {  0, NULL,                          NULL },
+/* Only reason YUV420 isn't supported is because I have no idea what it is and how it works */
+};
+
+const int fallback_V4l[] = { 4, 5, 7, 8, 9, 13, 15, 14, 16, 11 };
+/* Fallback: First try raw formats (Should try yuv first perhaps?), then yuv */
+
+static const Capture defbox;
+
+static int xioctl(int fd, int request, void * arg)
+{
+   int r;
+
+   do r = ioctl (fd, request, arg);
+   while (-1 == r && EINTR == errno);
+
+   return r;
+}
+
+/* Prepare the capture buffers */
+static HRESULT V4l_Prepare(CaptureBox *capBox)
+{
+   TRACE("%p: Preparing for %dx%d resolution\n", capBox, capBox->width, capBox->height);
+
+/* Try mmap */
+   capBox->mmap = 0;
+   if (xioctl(capBox->fd, VIDIOCGMBUF, &capBox->gb_buffers) != -1 && capBox->gb_buffers.frames) {
+      capBox->buffers = capBox->gb_buffers.frames;
+      if (capBox->gb_buffers.frames > 1) {
+         TRACE("%p: %d buffers granted, but only using 1 anyway\n", capBox, capBox->gb_buffers.frames);
+         capBox->buffers = 1;
+      } else {
+         TRACE("%p: Using %d buffers\n", capBox, capBox->gb_buffers.frames);
+      }
+      capBox->pmap = mmap(0, capBox->gb_buffers.size, PROT_READ|PROT_WRITE, MAP_SHARED, capBox->fd, 0);
+      if (capBox->pmap != MAP_FAILED) {
+         int i;
+         capBox->grab_buf = CoTaskMemAlloc(sizeof(struct video_mmap) * capBox->buffers);
+
+         if(!capBox->grab_buf) {
+             munmap(capBox->pmap, capBox->gb_buffers.size);
+             return E_OUTOFMEMORY;
+         }
+
+         /* Setup mmap capture buffers. */
+         for (i = 0; i < capBox->buffers; i++) {
+            capBox->grab_buf[i].format = capBox->pict.palette;
+            capBox->grab_buf[i].frame = i;
+            capBox->grab_buf[i].width = capBox->width;
+            capBox->grab_buf[i].height = capBox->height;
+         }
+         capBox->mmap = 1;
+      }
+   }
+   if (!capBox->mmap) {
+      capBox->buffers = 1;
+      capBox->imagesize = renderlist_V4l[capBox->pict.palette].depth * capBox->height * capBox->width / 8;
+      capBox->grab_data = CoTaskMemAlloc(capBox->imagesize);
+      if (!capBox->grab_data) return E_OUTOFMEMORY;
+   }
+   TRACE("Using mmap: %d\n", capBox->mmap);
+   return S_OK;
+}
+
+void V4l_Unprepare(CaptureBox *capBox)
+{
+   if (capBox->mmap) {
+      for (capBox->curframe = 0; capBox->curframe < capBox->buffers; capBox->curframe++) 
+         xioctl(capBox->fd, VIDIOCSYNC, &capBox->grab_buf[capBox->curframe]);
+      munmap(capBox->pmap, capBox->gb_buffers.size);
+      CoTaskMemFree(capBox->grab_buf);
+   }
+   else CoTaskMemFree(capBox->grab_data);
+}
+
+HRESULT V4l_Init(Capture *pBox, IPin *pOut, USHORT card)
+{
+   CaptureBox * capBox = CoTaskMemAlloc(sizeof(CaptureBox));
+   char device[128];
+   struct video_capability capa;
+   struct video_picture pict;
+   struct video_window window;
+
+   YUV_Init();
+   if (!capBox) return E_OUTOFMEMORY;
+   sprintf(device, "/dev/video%i", card);
+
+   capBox->fd = open(device, O_RDWR | O_NONBLOCK);
+   if (capBox->fd == -1) {
+      WARN("%s: Failed to open: %s\n", device, strerror(errno));
+      CoTaskMemFree(capBox);
+      return E_FAIL;
+   }
+
+   memset(&capa, 0, sizeof(capa));
+
+   if (xioctl(capBox->fd, VIDIOCGCAP, &capa) == -1) {
+      if (errno != EINVAL && errno != 515) WARN("%s: Querying failed: %s\n", device, strerror(errno));
+      else WARN("%s: Querying failed: Not a V4L compatible device", device);
+      close(capBox->fd);
+      CoTaskMemFree(capBox);
+      return E_FAIL;
+   }
+
+   if (!(capa.type & VID_TYPE_CAPTURE)) {
+      WARN("%s: This is not a video capture device\n", device);
+      close(capBox->fd);
+      CoTaskMemFree(capBox);
+      return E_FAIL;
+   }
+   TRACE("Amount of inputs on %s: %d\n", capa.name, capa.channels);
+
+   if (xioctl(capBox->fd, VIDIOCGPICT, &pict) == -1) {
+      WARN("%s: Acquiring picture properties failed, giving up\n", device);
+      close(capBox->fd);
+      CoTaskMemFree(capBox);
+      return E_FAIL;
+   }
+
+   TRACE("%s: Suggested picture depth: %d, suggested picture palette: %d\n", device, pict.depth, pict.palette);
+   TRACE("%s: Hue %d, Color %d, Contrast %d\n", device, pict.hue,pict.colour,pict.contrast);
+   capBox->dbrightness = pict.brightness;
+   capBox->dcolour = pict.colour;
+   capBox->dhue = pict.hue;
+   capBox->dcontrast = pict.contrast;
+   TRACE("%s: Suggested format: \"%s\"\n", device, renderlist_V4l[pict.palette].name);
+   if (!renderlist_V4l[pict.palette].renderer)
+   {
+      int palet = pict.palette, formatenum;
+      WARN("No renderer available for \"%s\", trying to fall back to defaults\n", renderlist_V4l[pict.palette].name);
+      capBox->renderer = NULL;
+      for (formatenum = 0; formatenum < (sizeof(fallback_V4l) / sizeof(int)); formatenum++) {
+         int currentrender = fallback_V4l[formatenum];
+         if (renderlist_V4l[currentrender].renderer == NULL) continue;
+         pict.depth = renderlist_V4l[currentrender].depth;
+         pict.palette = currentrender;
+         if (xioctl(capBox->fd, VIDIOCSPICT, &pict) == -1) {
+            TRACE("%s: Could not render with \"%s\"\n (%d)", device, renderlist_V4l[currentrender].name, currentrender);
+            continue;
+         }
+         TRACE("%s: Found a suitable renderer: \"%s\" (%d)\n", device, renderlist_V4l[currentrender].name, currentrender);
+         capBox->renderer = renderlist_V4l[currentrender].renderer;
+         break;
+      }
+      if (!capBox->renderer) {
+         close(capBox->fd);
+         CoTaskMemFree(capBox);
+         FIXME("%s: This device wants to use \"%s\", this format isn't available, other formats didn't work either, GIVING UP!\n\n", device, renderlist_V4l[palet].name);
+         return E_FAIL;
+      }
+   } else {
+      TRACE("Using the suggested format\n");
+      capBox->renderer = renderlist_V4l[pict.palette].renderer;
+   }
+   memcpy(&capBox->pict, &pict, sizeof(struct video_picture));
+
+   memset(&window, 0, sizeof(window));
+   if (xioctl(capBox->fd, VIDIOCGWIN, &window) == -1) {
+      WARN("%s: Getting resolution failed.. (%s), giving up\n", device, strerror(errno));
+      close(capBox->fd);
+      CoTaskMemFree(capBox);
+      return E_FAIL;
+   }
+
+   capBox->height = capBox->outputheight = window.height;
+   capBox->width = capBox->outputwidth = window.width;
+   capBox->swresize = FALSE;
+   capBox->bitDepth = 24;
+   capBox->pOut = pOut;
+   capBox->fps = 3;
+   capBox->stopped = 0;
+   capBox->curframe = 0;
+   capBox->iscommitted = 0;
+   memcpy(pBox, &defbox, sizeof(Capture));
+   pBox->pMine = (void *)capBox;
+   InitializeCriticalSection(&capBox->CritSect);
+   TRACE("Using format: %d bits - %d x %d\n", capBox->bitDepth, capBox->width, capBox->height);
+   return S_OK;
+}
+
+static HRESULT V4l_Destroy(void *pBox)
+{
+   CaptureBox *capBox = (CaptureBox *)pBox;
+   TRACE("%p\n", capBox);
+   close(capBox->fd);
+   DeleteCriticalSection(&capBox->CritSect);
+   CoTaskMemFree(capBox);
+   return S_OK;
+}
+
+static HRESULT V4l_SetMediaType(void *pBox, AM_MEDIA_TYPE * mT)
+{
+   CaptureBox *capBox = (CaptureBox *)pBox;
+   int newheight, newwidth;
+   struct video_window window;
+   TRACE("%p\n", capBox);
+
+   if (((VIDEOINFOHEADER *)mT->pbFormat)->bmiHeader.biBitCount != 24 ||
+       ((VIDEOINFOHEADER *)mT->pbFormat)->bmiHeader.biCompression != BI_RGB)
+   {
+      FIXME("Media type suggested we can't yet process\n");
+      return VFW_E_INVALIDMEDIATYPE;
+   }
+
+   newwidth = ((VIDEOINFOHEADER *)mT->pbFormat)->bmiHeader.biWidth;
+   newheight = ((VIDEOINFOHEADER *)mT->pbFormat)->bmiHeader.biHeight;
+
+   TRACE("%p -> (%p) - %d %d\n", capBox, mT, newwidth, newheight);
+   if (capBox->height == newheight && capBox->width == newwidth)
+      return S_OK; /* Why on EARTH should we resize? */
+
+   xioctl(capBox->fd, VIDIOCGWIN, &window);
+   window.width = newwidth;
+   window.height = newheight;
+   if (xioctl(capBox->fd, VIDIOCSWIN, &window) == -1) {
+      TRACE("Can not use the format %d x %d, using software resize, old size: %d x %d\n", window.width, window.height, capBox->width, capBox->height);
+      capBox->swresize = TRUE;
+   } else {
+      capBox->height = window.height;
+      capBox->width = window.width;
+      capBox->swresize = FALSE;
+   }
+   capBox->outputwidth = window.width;
+   capBox->outputheight = window.height;
+   return S_OK;
+}
+
+static HRESULT V4l_GetMediaType(void *pBox, AM_MEDIA_TYPE ** mT)
+{
+   CaptureBox *capBox = (CaptureBox *)pBox;
+   VIDEOINFOHEADER *vi;
+
+   mT[0] = CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE));
+   if (!mT[0]) return E_OUTOFMEMORY;
+   vi = CoTaskMemAlloc(sizeof(VIDEOINFOHEADER));
+   mT[0]->cbFormat = sizeof(VIDEOINFOHEADER);
+   if (!vi)
+   {
+      CoTaskMemFree(mT[0]);
+      return E_OUTOFMEMORY;
+   }
+   memcpy(&mT[0]->majortype, &MEDIATYPE_Video, sizeof(GUID));
+   memcpy(&mT[0]->subtype, &MEDIASUBTYPE_RGB24, sizeof(GUID));
+   memcpy(&mT[0]->formattype, &FORMAT_VideoInfo, sizeof(GUID));
+   mT[0]->bFixedSizeSamples = TRUE;
+   mT[0]->bTemporalCompression = FALSE;
+   mT[0]->pUnk = NULL;
+   mT[0]->lSampleSize = capBox->outputwidth * capBox->outputheight * capBox->bitDepth / 8;
+   TRACE("Output format: %dx%d - %d bits = %lu KB\n", capBox->outputwidth, capBox->outputheight, capBox->bitDepth, mT[0]->lSampleSize/1024);
+   vi->rcSource.left = 0; vi->rcSource.top = 0;
+   vi->rcTarget.left = 0; vi->rcTarget.top = 0;
+   vi->rcSource.right = capBox->width; vi->rcSource.bottom = capBox->height;
+   vi->rcTarget.right = capBox->outputwidth; vi->rcTarget.bottom = capBox->outputheight;
+   vi->dwBitRate = capBox->fps * mT[0]->lSampleSize;
+   vi->dwBitErrorRate = 0;
+   vi->AvgTimePerFrame = (LONGLONG)10000000.0 / (LONGLONG)capBox->fps;
+   vi->bmiHeader.biSize = 40;
+   vi->bmiHeader.biWidth = capBox->outputwidth;
+   vi->bmiHeader.biHeight = capBox->outputheight;
+   vi->bmiHeader.biPlanes = 1;
+   vi->bmiHeader.biBitCount = 24;
+   vi->bmiHeader.biCompression = BI_RGB;
+   vi->bmiHeader.biSizeImage = mT[0]->lSampleSize;
+   vi->bmiHeader.biClrUsed = vi->bmiHeader.biClrImportant = 0;
+   vi->bmiHeader.biXPelsPerMeter = 100;
+   vi->bmiHeader.biYPelsPerMeter = 100;
+   mT[0]->pbFormat = (void *)vi;
+   dump_AM_MEDIA_TYPE(mT[0]);
+   return S_OK;
+}
+
+static HRESULT V4l_GetPropRange(void *pBox, long Property, long *pMin, long *pMax, long *pSteppingDelta, long *pDefault, long *pCapsFlags)
+{
+   CaptureBox *capBox = (CaptureBox *)pBox;
+   TRACE("%p -> %ld %p %p %p %p %p\n", pBox, Property, pMin, pMax, pSteppingDelta, pDefault, pCapsFlags);
+
+   switch (Property) {
+      case VideoProcAmp_Brightness: *pDefault = capBox->dbrightness; break;
+      case VideoProcAmp_Contrast: *pDefault = capBox->dcontrast; break;
+      case VideoProcAmp_Hue: *pDefault = capBox->dhue; break;
+      case VideoProcAmp_Saturation: *pDefault = capBox->dcolour; break;
+      default: FIXME("Not implemented %ld\n", Property); return E_NOTIMPL;
+   }
+   *pMin = 0;
+   *pMax = 65535;
+   *pSteppingDelta = 65536/256;
+   *pCapsFlags = VideoProcAmp_Flags_Manual;
+   return S_OK;
+}
+
+static HRESULT V4l_GetProp(void *pBox, long Property, long *lValue, long *Flags)
+{
+   CaptureBox *capBox = (CaptureBox *)pBox;
+   TRACE("%p -> %ld %p %p\n", pBox, Property, lValue, Flags);
+
+   switch (Property) {
+      case VideoProcAmp_Brightness: *lValue = capBox->pict.brightness; break;
+      case VideoProcAmp_Contrast: *lValue = capBox->pict.contrast; break;
+      case VideoProcAmp_Hue: *lValue = capBox->pict.hue; break;
+      case VideoProcAmp_Saturation: *lValue = capBox->pict.colour; break;
+      default: FIXME("Not implemented %ld\n", Property); return E_NOTIMPL;
+   }
+   *Flags = VideoProcAmp_Flags_Manual;
+   return S_OK;
+}
+
+static HRESULT V4l_SetProp(void *pBox, long Property, long lValue, long Flags)
+{
+   CaptureBox *capBox = (CaptureBox *)pBox;
+   TRACE("%p -> %ld %ld %ld\n", pBox, Property, lValue, Flags);
+
+   switch (Property) {
+      case VideoProcAmp_Brightness: capBox->pict.brightness = lValue; break;
+      case VideoProcAmp_Contrast: capBox->pict.contrast = lValue; break;
+      case VideoProcAmp_Hue: capBox->pict.hue = lValue; break;
+      case VideoProcAmp_Saturation: capBox->pict.colour = lValue; break;
+      default: FIXME("Not implemented %ld\n", Property); return E_NOTIMPL;
+   }
+
+   if (xioctl(capBox->fd, VIDIOCSPICT, &capBox->pict) == -1) {
+      WARN("Adjusting picture properties failed..\n");
+      return E_FAIL;
+   }
+   return S_OK;
+}
+
+static void renderer_RGB(CaptureBox *capBox, LPBYTE bufferin, LPBYTE stream)
+{
+   int depth = renderlist_V4l[capBox->pict.palette].depth;
+   int size = capBox->height * capBox->width * depth / 8;
+   switch (depth) {
+      case 24: memcpy(bufferin, stream, size); break;
+      case 32: {
+         int pointer = 0, offset = 1;
+         while (pointer + offset <= size) {
+            bufferin[pointer] = stream[pointer + offset];
+            pointer++;
+            bufferin[pointer] = stream[pointer + offset];
+            pointer++;
+            bufferin[pointer] = stream[pointer + offset];
+            pointer++;
+            offset++;
+         }
+         break;
+      }
+      default: ERR("Unknown bit depth \"%d\"\n", depth); return;
+/* I used ERR here because there's no WAY we can get to this point */
+   }
+}
+
+static void renderer_YUV(CaptureBox *capBox, LPBYTE bufferin, LPBYTE stream)
+{
+   enum YUV_Format format;
+   switch (capBox->pict.palette) {
+      case  7 /* YUV422  */: /* EXACTLY the same as YUYV */
+      case  8 /* YUYV    */: format = YUYV; break;
+      case  9 /* UYVY    */: format = UYVY; break;
+      case 11 /* YUV411  */: format = UYYVYY; break;
+      case 13 /* YUV422P */: format = YUVP_421; break;
+      case 14 /* YUV411P */: format = YUVP_441; break;
+      case 15 /* YUV420P */: format = YUVP_422; break;
+      case 16 /* YUV410P */: format = YUVP_444; break;
+      default: ERR("Unknown palette \"%d\"\n", capBox->pict.palette); return;
+/* Same here.. */
+   }
+   YUV_To_RGB24(format, bufferin, stream, capBox->width, capBox->height);
+}
+
+static void Resize(CaptureBox * capBox, LPBYTE output, LPBYTE input)
+{
+/* Basically the whole image needs to be reversed, because of dibs being messed up in windows */
+   if (!capBox->swresize) {
+      int depth = capBox->bitDepth / 8;
+      int inoffset = 0, outoffset = capBox->height * capBox->width * depth;
+      int ow = capBox->width * depth;
+      while (outoffset > 0) {
+         int x;
+         outoffset -= ow;
+         for (x = 0; x < ow; x++)
+            output[outoffset + x] = input[inoffset + x];
+         inoffset += ow;
+      }
+   } else {
+      static int shouldfixme = 1;
+      HDC dc_s, dc_d;
+      HBITMAP bmp_s, bmp_d;
+      int depth = capBox->bitDepth / 8;
+#ifndef STRETCHBLT_WORKS_PROPERLY /* It doesn't accept negative height, removeme when it does */
+      int inoffset = 0, outoffset = (capBox->outputheight) * capBox->outputwidth * depth;
+      int ow = capBox->outputwidth * depth;
+      LPBYTE myarray = CoTaskMemAlloc(capBox->outputwidth * capBox->outputheight * depth);
+#endif
+      if (shouldfixme) {
+         shouldfixme = 0;
+         FIXME("Improve software resizing: add error checks and optimize..\n");
+      }
+      dc_s = CreateCompatibleDC(NULL);
+      dc_d = CreateCompatibleDC(NULL);
+      bmp_s = CreateBitmap(capBox->width, capBox->height, 1, capBox->bitDepth, input);
+      bmp_d = CreateBitmap(capBox->outputwidth, capBox->outputheight, 1, capBox->bitDepth, NULL);
+      SelectObject(dc_s, bmp_s);
+      SelectObject(dc_d, bmp_d);
+#ifdef STRETCHBLT_WORKS_PROPERLY /* It doesn't accept negative height, removeme when it does */
+      StretchBlt(dc_d, 0, 0, capBox->outputwidth, -capBox->outputheight, dc_s, 0, 0, capBox->width, capBox->height, SRCCOPY);
+      GetBitmapBits(bmp_d, capBox->outputwidth * capBox->outputheight * depth, output);
+#else
+      StretchBlt(dc_d, 0, 0, capBox->outputwidth, capBox->outputheight, dc_s, 0, 0, capBox->width, capBox->height, SRCCOPY);
+      GetBitmapBits(bmp_d, capBox->outputwidth * capBox->outputheight * depth, myarray);
+      while (outoffset > 0) {
+         int x;
+         outoffset -= ow;
+         for (x = 0; x < ow; x++)
+            output[outoffset + x] = myarray[inoffset + x];
+         inoffset += ow;
+      }
+      CoTaskMemFree(myarray);
+#endif
+      DeleteObject(dc_s);
+      DeleteObject(dc_d);
+      DeleteObject(bmp_s);
+      DeleteObject(bmp_d);
+   }
+}
+
+static void V4l_GetFrame(CaptureBox * capBox, unsigned char ** pInput)
+{
+   if (capBox->mmap) {
+      if (xioctl(capBox->fd, VIDIOCSYNC, &capBox->grab_buf[capBox->curframe]) == -1)
+         WARN("Syncing ioctl failed: %s\n", strerror(errno));
+
+      *pInput = ((unsigned char *)capBox->pmap) + capBox->gb_buffers.offsets[capBox->curframe];
+   } else {
+      int retval;
+      while ((retval = read(capBox->fd, capBox->grab_data, capBox->imagesize)) == -1)
+         if (errno != EAGAIN) break;
+      if (retval == -1)
+         WARN("Error occured while reading from device: %s\n", strerror(errno));
+      *pInput = capBox->grab_data;
+   }
+}
+
+static void V4l_FreeFrame(CaptureBox * capBox)
+{
+   TRACE("\n");
+   if (capBox->mmap) {
+      if (xioctl(capBox->fd, VIDIOCMCAPTURE, &capBox->grab_buf[capBox->curframe]) == -1)
+/* ERR */WARN("Freeing frame for capture failed: %s\n", strerror(errno));
+/* Ah well, does it matter we can't read a frame again? the user doesn't have to know */
+   }
+   if (++capBox->curframe == capBox->buffers) capBox->curframe = 0;
+}
+
+static DWORD WINAPI ReadThread(LPVOID lParam) {
+   CaptureBox * capBox = (CaptureBox *)lParam;
+   HRESULT hr;
+   IMediaSample *pSample = NULL;
+   unsigned long framecount = 0;
+   unsigned char *pTarget, *pInput, *pOutput;
+
+   hr = V4l_Prepare(capBox);
+   if (FAILED(hr)) goto fail;
+
+   pOutput = CoTaskMemAlloc(capBox->width * capBox->height * capBox->bitDepth / 8);
+   capBox->curframe = 0;
+   do V4l_FreeFrame(capBox); while (capBox->curframe != 0);
+
+   while (1) {
+      EnterCriticalSection(&capBox->CritSect);
+      if (capBox->stopped) break;
+      hr = OutputPin_GetDeliveryBuffer((OutputPin *)capBox->pOut, &pSample, NULL, NULL, 0);
+      if (SUCCEEDED(hr)) {
+         if (!capBox->swresize)
+            IMediaSample_SetActualDataLength(pSample, capBox->height * capBox->width * capBox->bitDepth / 8);
+         else
+            IMediaSample_SetActualDataLength(pSample, capBox->outputheight * capBox->outputwidth * capBox->bitDepth / 8);
+         TRACE("Data length: %ld KB\n", IMediaSample_GetActualDataLength(pSample) / 1024);
+         IMediaSample_GetPointer(pSample, &pTarget);
+         /* TODO: Check return values.. */
+         V4l_GetFrame(capBox, &pInput);
+         capBox->renderer(capBox, pOutput, pInput);
+         Resize(capBox, pTarget, pOutput);
+         hr = OutputPin_SendSample((OutputPin *)capBox->pOut, pSample);
+         TRACE("%p -> Frame %lu: %lx\n", capBox, ++framecount, hr);
+         IMediaSample_Release(pSample);
+         V4l_FreeFrame(capBox);
+      }
+      LeaveCriticalSection(&capBox->CritSect);
+      if (FAILED(hr) && hr != VFW_E_NOT_CONNECTED) {
+         WARN("Received error: %lx\n", hr);
+         goto cfail;
+      }
+   }
+   LeaveCriticalSection(&capBox->CritSect);
+   CoTaskMemFree(pOutput);
+
+   return 0x0;
+cfail:
+CoTaskMemFree(pOutput);
+V4l_Unprepare(capBox);
+LeaveCriticalSection(&capBox->CritSect);
+fail:
+   capBox->thread = 0; capBox->stopped = 1;
+   FIXME("Stop IFilterGraph\n");
+   return 0x0;
+}
+
+static HRESULT V4l_Run(void *pBox, FILTER_STATE *state)
+{
+   CaptureBox *capBox = (CaptureBox *)pBox;
+   HANDLE thread;
+   HRESULT hr;
+
+   TRACE("%p -> (%p)\n", capBox, state); 
+
+   if (*state == State_Running) return S_OK;
+
+   EnterCriticalSection(&capBox->CritSect);
+
+   capBox->stopped = 0;
+
+   if (*state == State_Stopped)
+   {
+      *state = State_Running;
+      if (!capBox->iscommitted++) {
+         IMemAllocator * pAlloc = NULL;
+         ALLOCATOR_PROPERTIES ap, actual;
+         ap.cBuffers = 3;
+         if (!capBox->swresize)
+            ap.cbBuffer = capBox->width * capBox->height * capBox->bitDepth / 8;
+         else ap.cbBuffer = capBox->outputwidth * capBox->outputheight * capBox->bitDepth / 8;
+         ap.cbAlign = 1;
+         ap.cbPrefix = 0;
+
+         hr = IMemInputPin_GetAllocator(((OutputPin *)capBox->pOut)->pMemInputPin, &pAlloc);
+
+         if (SUCCEEDED(hr))
+            hr = IMemAllocator_SetProperties(pAlloc, &ap, &actual);
+
+         if (SUCCEEDED(hr))
+            hr = IMemAllocator_Commit(pAlloc);
+
+         if (pAlloc)
+            IMemAllocator_Release(pAlloc);
+
+         TRACE("Committing allocator: %lx\n", hr);
+      }
+
+      thread = CreateThread(NULL, 0, ReadThread, capBox, 0, NULL);
+      if (thread) {
+         capBox->thread = thread;
+         SetThreadPriority(thread, THREAD_PRIORITY_LOWEST);
+         LeaveCriticalSection(&capBox->CritSect);
+         return S_OK;
+      }
+/* ERR */WARN("Creating thread failed.. %lx\n", GetLastError());
+/* Capturing didn't start, but do we have to know? */
+      LeaveCriticalSection(&capBox->CritSect);
+      return E_FAIL;
+   }
+
+   ResumeThread(capBox->thread);
+   *state = State_Running;
+   LeaveCriticalSection(&capBox->CritSect);
+   return S_OK;
+}
+
+static HRESULT V4l_Pause(void *pBox, FILTER_STATE *state)
+{
+   CaptureBox *capBox = (CaptureBox *)pBox;
+   TRACE("%p -> (%p)\n", capBox, state);     
+   if (*state == State_Paused) return S_OK;
+   if (*state == State_Stopped) V4l_Run(pBox, state);
+   EnterCriticalSection(&capBox->CritSect);
+   *state = State_Paused;
+   SuspendThread(capBox->thread);
+   LeaveCriticalSection(&capBox->CritSect);
+   return S_OK;
+}
+
+static HRESULT V4l_Stop(void *pBox, FILTER_STATE *state)
+{
+   CaptureBox *capBox = (CaptureBox *)pBox;
+   TRACE("%p -> (%p)\n", capBox, state);
+
+   if (*state == State_Stopped) return S_OK;
+
+   EnterCriticalSection(&capBox->CritSect);
+
+   if (capBox->thread) {
+      if (*state == State_Paused)
+         ResumeThread(capBox->thread);
+      capBox->stopped = 1;
+      capBox->thread = 0;
+      if (capBox->iscommitted) {
+         HRESULT hr;
+         IMemInputPin *pMem = NULL;
+         IMemAllocator * pAlloc = NULL;
+         IPin *pConnect = NULL;
+
+         capBox->iscommitted = 0;
+
+         hr = IPin_ConnectedTo(capBox->pOut, &pConnect);
+
+         if (SUCCEEDED(hr))
+            hr = IPin_QueryInterface(pConnect, &IID_IMemInputPin, (void **) &pMem);
+
+         if (SUCCEEDED(hr))
+            hr = IMemInputPin_GetAllocator(pMem, &pAlloc);
+
+         if (SUCCEEDED(hr))
+            hr = IMemAllocator_Decommit(pAlloc);
+
+         if (pAlloc)
+            IMemAllocator_Release(pAlloc);
+
+         if (pMem)
+            IMemInputPin_Release(pMem);
+
+         if (pConnect)
+            IPin_Release(pConnect);
+
+         if (hr != S_OK && hr != VFW_E_NOT_COMMITTED)
+            WARN("Decommitting allocator: %lx\n", hr);
+      }
+      V4l_Unprepare(capBox);
+   }
+
+   *state = State_Stopped;
+   LeaveCriticalSection(&capBox->CritSect);
+   return S_OK;
+}
+
+static const Capture defbox = {
+   V4l_Destroy,
+   V4l_SetMediaType,
+   V4l_GetMediaType,
+   V4l_GetPropRange,
+   V4l_GetProp,
+   V4l_SetProp,
+   V4l_Run,
+   V4l_Pause,
+   V4l_Stop
+};
+
+#endif /* HAVE_LINUX_VIDEODEV_H */
+
diff -Nur qcap/vfwcapture.c qcap-old/vfwcapture.c
--- vfwcapture.c	2005-05-21 01:35:12.000000000 +0200
+++ vfwcapture.c	2005-05-21 01:16:01.000000000 +0200
@@ -54,7 +54,7 @@
 #undef HAVE_V4L2
 
 static const Video_Init Constructors[] = {
-#if 0/*def HAVE_LINUX_VIDEODEV_H*/
+#ifdef HAVE_LINUX_VIDEODEV_H
 #ifdef HAVE_V4L2
 /* There are 5 reasons I don't add V4l2 support
  * 1. Webcams don't use it


More information about the wine-patches mailing list