summaryrefslogtreecommitdiff
path: root/sys
diff options
context:
space:
mode:
authorRob Clark <rob@ti.com>2009-08-04 09:14:20 +0200
committerSebastian Dröge <sebastian.droege@collabora.co.uk>2009-08-04 09:16:56 +0200
commitf19cfbda96d098362cc2a2565197cef347878549 (patch)
tree598fdc98292aa14467da24fd6d711590588c1f68 /sys
parent56850099a6a47b8c3f8606f8107d42cb86dfe601 (diff)
v4l2: Add v4l2sink element
This also does the following changes: (1) pull the bufferpool code out into gstv4l2bufferpool.c, and make a bit more generic so it can be used both for v4l2src and v4l2sink (2) move some of the device probing/configuration/caps stuff into gstv4l2object.c so it does not have to be duplicated between v4l2src and v4l2sink Fixes bug #590280.
Diffstat (limited to 'sys')
-rw-r--r--sys/v4l2/Makefile.am3
-rw-r--r--sys/v4l2/gstv4l2.c18
-rw-r--r--sys/v4l2/gstv4l2bufferpool.c630
-rw-r--r--sys/v4l2/gstv4l2bufferpool.h97
-rw-r--r--sys/v4l2/gstv4l2object.c1343
-rw-r--r--sys/v4l2/gstv4l2object.h39
-rw-r--r--sys/v4l2/gstv4l2sink.c711
-rw-r--r--sys/v4l2/gstv4l2sink.h88
-rw-r--r--sys/v4l2/gstv4l2src.c510
-rw-r--r--sys/v4l2/gstv4l2src.h35
-rw-r--r--sys/v4l2/v4l2_calls.c17
-rw-r--r--sys/v4l2/v4l2_calls.h59
-rw-r--r--sys/v4l2/v4l2src_calls.c1320
-rw-r--r--sys/v4l2/v4l2src_calls.h5
14 files changed, 3051 insertions, 1824 deletions
diff --git a/sys/v4l2/Makefile.am b/sys/v4l2/Makefile.am
index 20ec5486..cf972cc2 100644
--- a/sys/v4l2/Makefile.am
+++ b/sys/v4l2/Makefile.am
@@ -12,7 +12,9 @@ plugin_LTLIBRARIES = libgstvideo4linux2.la
libgstvideo4linux2_la_SOURCES = gstv4l2.c \
gstv4l2colorbalance.c \
gstv4l2object.c \
+ gstv4l2bufferpool.c \
gstv4l2src.c \
+ gstv4l2sink.c \
gstv4l2tuner.c \
gstv4l2vidorient.c \
v4l2_calls.c \
@@ -31,6 +33,7 @@ libgstvideo4linux2_la_LIBTOOLFLAGS = --tag=disable-static
libgstvideo4linux2_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) \
$(GST_BASE_LIBS) \
+ $(GST_PLUGINS_BASE_LIBS) -lgstvideo-$(GST_MAJORMINOR) \
-lgstinterfaces-$(GST_MAJORMINOR) \
$(GST_LIBS) \
$(xv_libs) \
diff --git a/sys/v4l2/gstv4l2.c b/sys/v4l2/gstv4l2.c
index 8ba9e340..409526cb 100644
--- a/sys/v4l2/gstv4l2.c
+++ b/sys/v4l2/gstv4l2.c
@@ -31,6 +31,7 @@
#include "gstv4l2object.h"
#include "gstv4l2src.h"
+#include "gstv4l2sink.h"
/* #include "gstv4l2jpegsrc.h" */
/* #include "gstv4l2mjpegsrc.h" */
/* #include "gstv4l2mjpegsink.h" */
@@ -44,13 +45,16 @@ plugin_init (GstPlugin * plugin)
GST_DEBUG_CATEGORY_INIT (v4l2_debug, "v4l2", 0, "V4L2 API calls");
if (!gst_element_register (plugin, "v4l2src", GST_RANK_PRIMARY,
- GST_TYPE_V4L2SRC))
- /* !gst_element_register (plugin, "v4l2jpegsrc", */
- /* GST_RANK_NONE, GST_TYPE_V4L2JPEGSRC) || */
- /* !gst_element_register (plugin, "v4l2mjpegsrc", */
- /* GST_RANK_NONE, GST_TYPE_V4L2MJPEGSRC) || */
- /* !gst_element_register (plugin, "v4l2mjpegsink", */
- /* GST_RANK_NONE, GST_TYPE_V4L2MJPEGSINK)) */
+ GST_TYPE_V4L2SRC) ||
+ !gst_element_register (plugin, "v4l2sink", GST_RANK_PRIMARY,
+ GST_TYPE_V4L2SINK) ||
+ /* !gst_element_register (plugin, "v4l2jpegsrc", */
+ /* GST_RANK_NONE, GST_TYPE_V4L2JPEGSRC) || */
+ /* !gst_element_register (plugin, "v4l2mjpegsrc", */
+ /* GST_RANK_NONE, GST_TYPE_V4L2MJPEGSRC) || */
+ /* !gst_element_register (plugin, "v4l2mjpegsink", */
+ /* GST_RANK_NONE, GST_TYPE_V4L2MJPEGSINK)) */
+ FALSE)
return FALSE;
#ifdef ENABLE_NLS
diff --git a/sys/v4l2/gstv4l2bufferpool.c b/sys/v4l2/gstv4l2bufferpool.c
new file mode 100644
index 00000000..bde4991b
--- /dev/null
+++ b/sys/v4l2/gstv4l2bufferpool.c
@@ -0,0 +1,630 @@
+/* GStreamer
+ *
+ * Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * 2006 Edgard Lima <edgard.lima@indt.org.br>
+ * 2009 Texas Instruments, Inc - http://www.ti.com/
+ *
+ * gstv4l2src.h: BT8x8/V4L2 source element
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include <config.h>
+#endif
+
+#include <sys/mman.h>
+#include <string.h>
+#include <unistd.h>
+
+#include <gstv4l2bufferpool.h>
+#include "gstv4l2src.h"
+#include "gstv4l2sink.h"
+#include "v4l2_calls.h"
+#include "gst/gst-i18n-plugin.h"
+
+
+GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
+#define GST_CAT_DEFAULT v4l2_debug
+
+
+/*
+ * GstV4l2Buffer:
+ */
+
+static GstBufferClass *v4l2buffer_parent_class = NULL;
+
+static void
+gst_v4l2_buffer_finalize (GstV4l2Buffer * buffer)
+{
+ GstV4l2BufferPool *pool;
+ gboolean resuscitated = FALSE;
+ gint index;
+
+ pool = buffer->pool;
+
+ index = buffer->vbuffer.index;
+
+ GST_LOG_OBJECT (pool->v4l2elem, "finalizing buffer %p %d", buffer, index);
+
+ GST_V4L2_BUFFER_POOL_LOCK (pool);
+ if (GST_BUFFER_SIZE (buffer) != 0) {
+ /* BUFFER_SIZE is only set if the frame was dequeued */
+ pool->num_live_buffers--;
+ GST_DEBUG_OBJECT (pool->v4l2elem, "num_live_buffers--: %d",
+ pool->num_live_buffers);
+ }
+
+ if (pool->running) {
+ if (pool->requeuebuf) {
+ if (!gst_v4l2_buffer_pool_qbuf (pool, buffer)) {
+ GST_WARNING ("could not requeue buffer %p %d", buffer, index);
+ } else {
+ resuscitated = TRUE;
+ }
+ } else {
+ resuscitated = TRUE;
+ /* XXX double check this... I think it is ok to not synchronize this
+ * w.r.t. destruction of the pool, since the buffer is still live and
+ * the buffer holds a ref to the pool..
+ */
+ g_async_queue_push (pool->avail_buffers, buffer);
+ }
+ } else {
+ GST_LOG_OBJECT (pool->v4l2elem, "the pool is shutting down");
+ }
+
+ if (resuscitated) {
+ /* FIXME: check that the caps didn't change */
+ GST_LOG_OBJECT (pool->v4l2elem, "reviving buffer %p, %d", buffer, index);
+ gst_buffer_ref (GST_BUFFER (buffer));
+ GST_BUFFER_SIZE (buffer) = 0;
+ pool->buffers[index] = buffer;
+ }
+
+ GST_V4L2_BUFFER_POOL_UNLOCK (pool);
+
+ if (!resuscitated) {
+ GST_LOG_OBJECT (pool->v4l2elem, "buffer %p not recovered, unmapping",
+ buffer);
+ gst_mini_object_unref (GST_MINI_OBJECT (pool));
+ v4l2_munmap ((void *) GST_BUFFER_DATA (buffer), buffer->vbuffer.length);
+
+ GST_MINI_OBJECT_CLASS (v4l2buffer_parent_class)->finalize (GST_MINI_OBJECT
+ (buffer));
+ }
+}
+
+static void
+gst_v4l2_buffer_class_init (gpointer g_class, gpointer class_data)
+{
+ GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
+
+ v4l2buffer_parent_class = g_type_class_peek_parent (g_class);
+
+ mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
+ gst_v4l2_buffer_finalize;
+}
+
+GType
+gst_v4l2_buffer_get_type (void)
+{
+ static GType _gst_v4l2_buffer_type;
+
+ if (G_UNLIKELY (_gst_v4l2_buffer_type == 0)) {
+ static const GTypeInfo v4l2_buffer_info = {
+ sizeof (GstBufferClass),
+ NULL,
+ NULL,
+ gst_v4l2_buffer_class_init,
+ NULL,
+ NULL,
+ sizeof (GstV4l2Buffer),
+ 0,
+ NULL,
+ NULL
+ };
+ _gst_v4l2_buffer_type = g_type_register_static (GST_TYPE_BUFFER,
+ "GstV4l2Buffer", &v4l2_buffer_info, 0);
+ }
+ return _gst_v4l2_buffer_type;
+}
+
+static GstV4l2Buffer *
+gst_v4l2_buffer_new (GstV4l2BufferPool * pool, guint index, GstCaps * caps)
+{
+ GstV4l2Buffer *ret;
+ guint8 *data;
+
+ ret = (GstV4l2Buffer *) gst_mini_object_new (GST_TYPE_V4L2_BUFFER);
+
+ GST_LOG_OBJECT (pool->v4l2elem, "creating buffer %u, %p in pool %p", index,
+ ret, pool);
+
+ ret->pool =
+ (GstV4l2BufferPool *) gst_mini_object_ref (GST_MINI_OBJECT (pool));
+
+ ret->vbuffer.index = index;
+ ret->vbuffer.type = pool->type;
+ ret->vbuffer.memory = V4L2_MEMORY_MMAP;
+
+ if (v4l2_ioctl (pool->video_fd, VIDIOC_QUERYBUF, &ret->vbuffer) < 0)
+ goto querybuf_failed;
+
+ GST_LOG_OBJECT (pool->v4l2elem, " index: %u", ret->vbuffer.index);
+ GST_LOG_OBJECT (pool->v4l2elem, " type: %d", ret->vbuffer.type);
+ GST_LOG_OBJECT (pool->v4l2elem, " bytesused: %u", ret->vbuffer.bytesused);
+ GST_LOG_OBJECT (pool->v4l2elem, " flags: %08x", ret->vbuffer.flags);
+ GST_LOG_OBJECT (pool->v4l2elem, " field: %d", ret->vbuffer.field);
+ GST_LOG_OBJECT (pool->v4l2elem, " memory: %d", ret->vbuffer.memory);
+ if (ret->vbuffer.memory == V4L2_MEMORY_MMAP)
+ GST_LOG_OBJECT (pool->v4l2elem, " MMAP offset: %u",
+ ret->vbuffer.m.offset);
+ GST_LOG_OBJECT (pool->v4l2elem, " length: %u", ret->vbuffer.length);
+ GST_LOG_OBJECT (pool->v4l2elem, " input: %u", ret->vbuffer.input);
+
+ data = (guint8 *) v4l2_mmap (0, ret->vbuffer.length,
+ PROT_READ | PROT_WRITE, MAP_SHARED, pool->video_fd,
+ ret->vbuffer.m.offset);
+
+ if (data == MAP_FAILED)
+ goto mmap_failed;
+
+ GST_BUFFER_DATA (ret) = data;
+ GST_BUFFER_SIZE (ret) = ret->vbuffer.length;
+
+ GST_BUFFER_FLAG_SET (ret, GST_BUFFER_FLAG_READONLY);
+
+ gst_buffer_set_caps (GST_BUFFER (ret), caps);
+
+ return ret;
+
+ /* ERRORS */
+querybuf_failed:
+ {
+ gint errnosave = errno;
+
+ GST_WARNING ("Failed QUERYBUF: %s", g_strerror (errnosave));
+ gst_buffer_unref (GST_BUFFER (ret));
+ errno = errnosave;
+ return NULL;
+ }
+mmap_failed:
+ {
+ gint errnosave = errno;
+
+ GST_WARNING ("Failed to mmap: %s", g_strerror (errnosave));
+ gst_buffer_unref (GST_BUFFER (ret));
+ errno = errnosave;
+ return NULL;
+ }
+}
+
+
+/*
+ * GstV4l2BufferPool:
+ */
+
+static GstMiniObjectClass *buffer_pool_parent_class = NULL;
+
+static void
+gst_v4l2_buffer_pool_finalize (GstV4l2BufferPool * pool)
+{
+ g_mutex_free (pool->lock);
+ pool->lock = NULL;
+
+ g_async_queue_unref (pool->avail_buffers);
+ pool->avail_buffers = NULL;
+
+ if (pool->video_fd >= 0)
+ v4l2_close (pool->video_fd);
+
+ if (pool->buffers) {
+ g_free (pool->buffers);
+ pool->buffers = NULL;
+ }
+
+ GST_MINI_OBJECT_CLASS (buffer_pool_parent_class)->finalize (GST_MINI_OBJECT
+ (pool));
+}
+
+static void
+gst_v4l2_buffer_pool_init (GstV4l2BufferPool * pool, gpointer g_class)
+{
+ pool->lock = g_mutex_new ();
+ pool->running = FALSE;
+ pool->num_live_buffers = 0;
+}
+
+static void
+gst_v4l2_buffer_pool_class_init (gpointer g_class, gpointer class_data)
+{
+ GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
+
+ buffer_pool_parent_class = g_type_class_peek_parent (g_class);
+
+ mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
+ gst_v4l2_buffer_pool_finalize;
+}
+
+GType
+gst_v4l2_buffer_pool_get_type (void)
+{
+ static GType _gst_v4l2_buffer_pool_type;
+
+ if (G_UNLIKELY (_gst_v4l2_buffer_pool_type == 0)) {
+ static const GTypeInfo v4l2_buffer_pool_info = {
+ sizeof (GstBufferClass),
+ NULL,
+ NULL,
+ gst_v4l2_buffer_pool_class_init,
+ NULL,
+ NULL,
+ sizeof (GstV4l2BufferPool),
+ 0,
+ (GInstanceInitFunc) gst_v4l2_buffer_pool_init,
+ NULL
+ };
+ _gst_v4l2_buffer_pool_type = g_type_register_static (GST_TYPE_MINI_OBJECT,
+ "GstV4l2BufferPool", &v4l2_buffer_pool_info, 0);
+ }
+ return _gst_v4l2_buffer_pool_type;
+}
+
+
+/* this is somewhat of a hack.. but better to keep the hack in
+ * one place than copy/pasting it around..
+ */
+static GstV4l2Object *
+get_v4l2_object (GstElement * v4l2elem)
+{
+ GstV4l2Object *v4l2object = NULL;
+ if (GST_IS_V4L2SRC (v4l2elem)) {
+ v4l2object = (GST_V4L2SRC (v4l2elem))->v4l2object;
+ } else if (GST_IS_V4L2SINK (v4l2elem)) {
+ v4l2object = (GST_V4L2SINK (v4l2elem))->v4l2object;
+ } else {
+ GST_ERROR_OBJECT (v4l2elem, "unknown v4l2 element");
+ }
+ return v4l2object;
+}
+
+
+
+/**
+ * Construct a new buffer pool
+ *
+ * @v4l2elem the v4l2 element (src or sink) that owns this pool
+ * @fd the video device file descriptor
+ * @num_buffers the requested number of buffers in the pool
+ * @caps the caps to set on the buffer
+ * @requeuebuf if <code>TRUE</code>, and if the pool is still in the
+ * <code>running</code> state, a buffer with no remaining references
+ * is immediately passed back to v4l2 (<code>VIDIOC_QBUF</code>),
+ * otherwise it is returned to the pool of available buffers
+ * (which can be accessed via <code>gst_v4l2_buffer_pool_get()</code>.
+ */
+GstV4l2BufferPool *
+gst_v4l2_buffer_pool_new (GstElement * v4l2elem, gint fd, gint num_buffers,
+ GstCaps * caps, gboolean requeuebuf, enum v4l2_buf_type type)
+{
+ GstV4l2BufferPool *pool;
+ gint n;
+ struct v4l2_requestbuffers breq;
+
+ pool = (GstV4l2BufferPool *) gst_mini_object_new (GST_TYPE_V4L2_BUFFER_POOL);
+
+ pool->video_fd = v4l2_dup (fd);
+ if (pool->video_fd < 0)
+ goto dup_failed;
+
+
+ /* first, lets request buffers, and see how many we can get: */
+ GST_DEBUG_OBJECT (v4l2elem, "STREAMING, requesting %d MMAP buffers",
+ num_buffers);
+
+ memset (&breq, 0, sizeof (struct v4l2_requestbuffers));
+ breq.type = type;
+ breq.count = num_buffers;
+ breq.memory = V4L2_MEMORY_MMAP;
+
+ if (v4l2_ioctl (fd, VIDIOC_REQBUFS, &breq) < 0)
+ goto reqbufs_failed;
+
+ GST_LOG_OBJECT (v4l2elem, " count: %u", breq.count);
+ GST_LOG_OBJECT (v4l2elem, " type: %d", breq.type);
+ GST_LOG_OBJECT (v4l2elem, " memory: %d", breq.memory);
+
+ if (breq.count < GST_V4L2_MIN_BUFFERS)
+ goto no_buffers;
+
+ if (num_buffers != breq.count) {
+ GST_WARNING_OBJECT (v4l2elem, "using %u buffers instead", breq.count);
+ num_buffers = breq.count;
+ }
+
+ pool->v4l2elem = v4l2elem;
+ pool->requeuebuf = requeuebuf;
+ pool->type = type;
+ pool->buffer_count = num_buffers;
+ pool->buffers = g_new0 (GstV4l2Buffer *, num_buffers);
+ pool->avail_buffers = g_async_queue_new ();
+
+ /* now, map the buffers: */
+ for (n = 0; n < num_buffers; n++) {
+ pool->buffers[n] = gst_v4l2_buffer_new (pool, n, caps);
+ if (!pool->buffers[n])
+ goto buffer_new_failed;
+ g_async_queue_push (pool->avail_buffers, pool->buffers[n]);
+ }
+
+ return pool;
+
+ /* ERRORS */
+dup_failed:
+ {
+ gint errnosave = errno;
+
+ gst_mini_object_unref (GST_MINI_OBJECT (pool));
+
+ errno = errnosave;
+
+ return NULL;
+ }
+reqbufs_failed:
+ {
+ GstV4l2Object *v4l2object = get_v4l2_object (v4l2elem);
+ GST_ELEMENT_ERROR (v4l2elem, RESOURCE, READ,
+ (_("Could not get buffers from device '%s'."),
+ v4l2object->videodev),
+ ("error requesting %d buffers: %s", num_buffers, g_strerror (errno)));
+ return NULL;
+ }
+no_buffers:
+ {
+ GstV4l2Object *v4l2object = get_v4l2_object (v4l2elem);
+ GST_ELEMENT_ERROR (v4l2elem, RESOURCE, READ,
+ (_("Could not get enough buffers from device '%s'."),
+ v4l2object->videodev),
+ ("we received %d from device '%s', we want at least %d",
+ breq.count, v4l2object->videodev, GST_V4L2_MIN_BUFFERS));
+ return NULL;
+ }
+buffer_new_failed:
+ {
+ gint errnosave = errno;
+
+ gst_v4l2_buffer_pool_destroy (pool);
+
+ errno = errnosave;
+
+ return NULL;
+ }
+}
+
+
+void
+gst_v4l2_buffer_pool_destroy (GstV4l2BufferPool * pool)
+{
+ gint n;
+
+ GST_V4L2_BUFFER_POOL_LOCK (pool);
+ pool->running = FALSE;
+ GST_V4L2_BUFFER_POOL_UNLOCK (pool);
+
+ GST_DEBUG_OBJECT (pool->v4l2elem, "destroy pool");
+
+ /* after this point, no more buffers will be queued or dequeued; no buffer
+ * from pool->buffers that is NULL will be set to a buffer, and no buffer that
+ * is not NULL will be pushed out. */
+
+ /* miniobjects have no dispose, so they can't break ref-cycles, as buffers ref
+ * the pool, we need to unref the buffer to properly finalize te pool */
+ for (n = 0; n < pool->buffer_count; n++) {
+ GstBuffer *buf;
+
+ GST_V4L2_BUFFER_POOL_LOCK (pool);
+ buf = GST_BUFFER (pool->buffers[n]);
+ GST_V4L2_BUFFER_POOL_UNLOCK (pool);
+
+ if (buf)
+ /* we own the ref if the buffer is in pool->buffers; drop it. */
+ gst_buffer_unref (buf);
+ }
+
+ gst_mini_object_unref (GST_MINI_OBJECT (pool));
+}
+
+/**
+ * Get an available buffer in the pool
+ *
+ * @pool the "this" object
+ * @blocking if <code>TRUE</code>, then suspend until a buffer is available
+ */
+GstV4l2Buffer *
+gst_v4l2_buffer_pool_get (GstV4l2BufferPool * pool, gboolean blocking)
+{
+ GstV4l2Buffer *buf = NULL;
+
+ do {
+ buf = g_async_queue_try_pop (pool->avail_buffers);
+
+ /* if there isn't a buffer avail, let's try to dequeue one:
+ */
+ if (blocking && !buf) {
+ GST_DEBUG_OBJECT (pool->v4l2elem, "No buffers available.. need to dqbuf");
+ buf = gst_v4l2_buffer_pool_dqbuf (pool);
+
+ /* note: if we get a buf, we don't want to use it directly (because
+ * someone else could still hold a ref).. but instead we release our
+ * reference to it, and if no one else holds a ref it will be returned
+ * to the pool of available buffers.. and if not, we keep looping.
+ */
+ if (buf) {
+ gst_buffer_unref (GST_BUFFER (buf));
+ buf = NULL;
+ }
+ } else {
+ break;
+ }
+ } while (1);
+
+ if (buf) {
+ pool->num_live_buffers++;
+ GST_DEBUG_OBJECT (pool->v4l2elem, "num_live_buffers++: %d",
+ pool->num_live_buffers);
+ GST_BUFFER_SIZE (buf) = buf->vbuffer.length;
+ }
+
+ pool->running = TRUE;
+
+
+ return buf;
+}
+
+
+/**
+ * Queue a buffer to the driver
+ *
+ * @pool the "this" object
+ * @buf the buffer to queue
+ */
+gboolean
+gst_v4l2_buffer_pool_qbuf (GstV4l2BufferPool * pool, GstV4l2Buffer * buf)
+{
+ GST_LOG_OBJECT (pool->v4l2elem, "enqueue pool buffer %d", buf->vbuffer.index);
+
+ if (v4l2_ioctl (pool->video_fd, VIDIOC_QBUF, &buf->vbuffer) < 0)
+ return FALSE;
+
+ pool->num_live_buffers--;
+ GST_DEBUG_OBJECT (pool->v4l2elem, "num_live_buffers--: %d",
+ pool->num_live_buffers);
+
+ return TRUE;
+}
+
+/**
+ * Dequeue a buffer from the driver. Some generic error handling is done in
+ * this function, but any error handling specific to v4l2src (capture) or
+ * v4l2sink (output) can be done outside this function by checking 'errno'
+ *
+ * @pool the "this" object
+ */
+GstV4l2Buffer *
+gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool * pool)
+{
+ GstV4l2Object *v4l2object = get_v4l2_object (pool->v4l2elem);
+ GstV4l2Buffer *pool_buffer;
+ struct v4l2_buffer buffer;
+
+ memset (&buffer, 0x00, sizeof (buffer));
+ buffer.type = pool->type;
+ buffer.memory = V4L2_MEMORY_MMAP;
+
+
+ if (v4l2_ioctl (pool->video_fd, VIDIOC_DQBUF, &buffer) >= 0) {
+
+ GST_V4L2_BUFFER_POOL_LOCK (pool);
+
+ /* get our GstBuffer with that index from the pool, if the buffer was
+ * outstanding we have a serious problem.
+ */
+ pool_buffer = pool->buffers[buffer.index];
+
+ if (pool_buffer == NULL) {
+ GST_ELEMENT_ERROR (pool->v4l2elem, RESOURCE, FAILED,
+ (_("Failed trying to get video frames from device '%s'."),
+ v4l2object->videodev),
+ (_("No free buffers found in the pool at index %d."), buffer.index));
+ GST_V4L2_BUFFER_POOL_UNLOCK (pool);
+ return NULL;
+ }
+
+ GST_LOG_OBJECT (pool->v4l2elem,
+ "grabbed frame %d (ix=%d), flags %08x, pool-ct=%d, buffer=%p",
+ buffer.sequence, buffer.index, buffer.flags, pool->num_live_buffers,
+ pool_buffer);
+
+ pool->num_live_buffers++;
+ GST_DEBUG_OBJECT (pool->v4l2elem, "num_live_buffers++: %d",
+ pool->num_live_buffers);
+
+ GST_V4L2_BUFFER_POOL_UNLOCK (pool);
+
+ /* this can change at every frame, esp. with jpeg */
+ GST_BUFFER_SIZE (pool_buffer) = buffer.bytesused;
+
+ return pool_buffer;
+ }
+
+
+ GST_WARNING_OBJECT (pool->v4l2elem,
+ "problem grabbing frame %d (ix=%d), pool-ct=%d, buf.flags=%d",
+ buffer.sequence, buffer.index,
+ GST_MINI_OBJECT_REFCOUNT (pool), buffer.flags);
+
+ switch (errno) {
+ case EAGAIN:
+ GST_WARNING_OBJECT (pool->v4l2elem,
+ "Non-blocking I/O has been selected using O_NONBLOCK and"
+ " no buffer was in the outgoing queue. device %s",
+ v4l2object->videodev);
+ break;
+ case EINVAL:
+ GST_ELEMENT_ERROR (pool->v4l2elem, RESOURCE, FAILED,
+ (_("Failed trying to get video frames from device '%s'."),
+ v4l2object->videodev),
+ (_("The buffer type is not supported, or the index is out of bounds,"
+ " or no buffers have been allocated yet, or the userptr"
+ " or length are invalid. device %s"), v4l2object->videodev));
+ break;
+ case ENOMEM:
+ GST_ELEMENT_ERROR (pool->v4l2elem, RESOURCE, FAILED,
+ (_("Failed trying to get video frames from device '%s'. Not enough memory."), v4l2object->videodev), (_("insufficient memory to enqueue a user pointer buffer. device %s."), v4l2object->videodev));
+ break;
+ case EIO:
+ GST_INFO_OBJECT (pool->v4l2elem,
+ "VIDIOC_DQBUF failed due to an internal error."
+ " Can also indicate temporary problems like signal loss."
+ " Note the driver might dequeue an (empty) buffer despite"
+ " returning an error, or even stop capturing."
+ " device %s", v4l2object->videodev);
+ /* have we de-queued a buffer ? */
+ if (!(buffer.flags & (V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE))) {
+ GST_DEBUG_OBJECT (pool->v4l2elem, "reenqueing buffer");
+ /* FIXME ... should we do something here? */
+ }
+ break;
+ case EINTR:
+ GST_WARNING_OBJECT (pool->v4l2elem,
+ "could not sync on a buffer on device %s", v4l2object->videodev);
+ break;
+ default:
+ GST_WARNING_OBJECT (pool->v4l2elem,
+ "Grabbing frame got interrupted on %s unexpectedly. %d: %s.",
+ v4l2object->videodev, errno, g_strerror (errno));
+ break;
+ }
+
+ return NULL;
+}
+
+gint
+gst_v4l2_buffer_pool_available_buffers (GstV4l2BufferPool * pool)
+{
+ return pool->buffer_count - pool->num_live_buffers;
+}
diff --git a/sys/v4l2/gstv4l2bufferpool.h b/sys/v4l2/gstv4l2bufferpool.h
new file mode 100644
index 00000000..31746079
--- /dev/null
+++ b/sys/v4l2/gstv4l2bufferpool.h
@@ -0,0 +1,97 @@
+/* GStreamer
+ *
+ * Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * 2006 Edgard Lima <edgard.lima@indt.org.br>
+ * 2009 Texas Instruments, Inc - http://www.ti.com/
+ *
+ * gstv4l2src.h: BT8x8/V4L2 source element
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GSTV4L2BUFFER_H__
+#define __GSTV4L2BUFFER_H__
+
+#include <gst/gst.h>
+#include "v4l2_calls.h"
+
+GST_DEBUG_CATEGORY_EXTERN (v4l2buffer_debug);
+
+G_BEGIN_DECLS
+
+
+GType gst_v4l2_buffer_get_type (void);
+#define GST_TYPE_V4L2_BUFFER (gst_v4l2_buffer_get_type())
+#define GST_IS_V4L2_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_V4L2_BUFFER))
+#define GST_V4L2_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_V4L2_BUFFER, GstV4l2Buffer))
+
+GType gst_v4l2_buffer_pool_get_type (void);
+#define GST_TYPE_V4L2_BUFFER_POOL (gst_v4l2_buffer_pool_get_type())
+#define GST_IS_V4L2_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_V4L2_BUFFER_POOL))
+#define GST_V4L2_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_V4L2_BUFFER_POOL, GstV4l2BufferPool))
+
+
+
+typedef struct _GstV4l2BufferPool GstV4l2BufferPool;
+typedef struct _GstV4l2Buffer GstV4l2Buffer;
+
+
+struct _GstV4l2BufferPool
+{
+ GstMiniObject parent;
+
+ GstElement *v4l2elem; /* the v4l2 src/sink that owns us.. maybe we should be owned by v4l2object? */
+ gboolean requeuebuf; /* if true, unusued buffers are automatically re-QBUF'd */
+ enum v4l2_buf_type type; /* V4L2_BUF_TYPE_VIDEO_CAPTURE, V4L2_BUF_TYPE_VIDEO_OUTPUT */
+
+ GMutex *lock;
+ gboolean running; /* with lock */
+ gint num_live_buffers; /* number of buffers not with driver (capture) or not in avail buffer pool (display) */
+ GAsyncQueue* avail_buffers;/* pool of available buffers, not with the driver and which aren't held outside the bufferpool */
+ gint video_fd; /* a dup(2) of the v4l2object's video_fd */
+ guint buffer_count;
+ GstV4l2Buffer **buffers;
+};
+
+struct _GstV4l2Buffer {
+ GstBuffer buffer;
+
+ struct v4l2_buffer vbuffer;
+
+ /* FIXME: have GstV4l2Src* instead, as this has GstV4l2BufferPool* */
+ /* FIXME: do we really want to fix this if GstV4l2Buffer/Pool is shared
+ * between v4l2src and v4l2sink??
+ */
+ GstV4l2BufferPool *pool;
+};
+
+G_END_DECLS
+
+void gst_v4l2_buffer_pool_destroy (GstV4l2BufferPool * pool);
+GstV4l2BufferPool *gst_v4l2_buffer_pool_new (GstElement *v4l2elem, gint fd, gint num_buffers, GstCaps * caps, gboolean requeuebuf, enum v4l2_buf_type type);
+
+
+GstV4l2Buffer *gst_v4l2_buffer_pool_get (GstV4l2BufferPool *pool, gboolean blocking);
+gboolean gst_v4l2_buffer_pool_qbuf (GstV4l2BufferPool *pool, GstV4l2Buffer *buf);
+GstV4l2Buffer *gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool *pool);
+
+gint gst_v4l2_buffer_pool_available_buffers (GstV4l2BufferPool *pool);
+
+
+#define GST_V4L2_BUFFER_POOL_LOCK(pool) g_mutex_lock ((pool)->lock)
+#define GST_V4L2_BUFFER_POOL_UNLOCK(pool) g_mutex_unlock ((pool)->lock)
+
+#endif /* __GSTV4L2BUFFER_H__ */
diff --git a/sys/v4l2/gstv4l2object.c b/sys/v4l2/gstv4l2object.c
index b9a18cc0..021e49db 100644
--- a/sys/v4l2/gstv4l2object.c
+++ b/sys/v4l2/gstv4l2object.c
@@ -15,7 +15,7 @@
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307,
- * USA.
+ * USA.
*/
#ifdef HAVE_CONFIG_H
@@ -39,7 +39,13 @@
#endif
#include "gstv4l2colorbalance.h"
-#define DEFAULT_PROP_DEVICE "/dev/video0"
+#include "gst/gst-i18n-plugin.h"
+
+
+GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
+#define GST_CAT_DEFAULT v4l2_debug
+
+
#define DEFAULT_PROP_DEVICE_NAME NULL
#define DEFAULT_PROP_DEVICE_FD -1
#define DEFAULT_PROP_FLAGS 0
@@ -53,9 +59,6 @@ enum
V4L2_STD_OBJECT_PROPS,
};
-GST_DEBUG_CATEGORY_EXTERN (v4l2src_debug);
-#define GST_CAT_DEFAULT v4l2src_debug
-
const GList *
gst_v4l2_probe_get_properties (GstPropertyProbe * probe)
{
@@ -151,7 +154,7 @@ gst_v4l2_class_probe_devices (GstElementClass * klass, gboolean check,
}
/*
- * detect /dev entries
+ * detect /dev entries
*/
for (n = 0; n < 64; n++) {
for (base = 0; dev_base[base] != NULL; base++) {
@@ -161,11 +164,11 @@ gst_v4l2_class_probe_devices (GstElementClass * klass, gboolean check,
n);
/*
- * does the /dev/ entry exist at all?
+ * does the /dev/ entry exist at all?
*/
if (stat (device, &s) == 0) {
/*
- * yes: is a device attached?
+ * yes: is a device attached?
*/
if (S_ISCHR (s.st_mode)) {
@@ -307,11 +310,12 @@ gst_v4l2_device_get_type (void)
}
void
-gst_v4l2_object_install_properties_helper (GObjectClass * gobject_class)
+gst_v4l2_object_install_properties_helper (GObjectClass * gobject_class,
+ const char *default_device)
{
g_object_class_install_property (gobject_class, PROP_DEVICE,
g_param_spec_string ("device", "Device", "Device location",
- DEFAULT_PROP_DEVICE, G_PARAM_READWRITE));
+ default_device, G_PARAM_READWRITE));
g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
g_param_spec_string ("device-name", "Device name",
"Name of the device", DEFAULT_PROP_DEVICE_NAME, G_PARAM_READABLE));
@@ -326,6 +330,8 @@ gst_v4l2_object_install_properties_helper (GObjectClass * gobject_class)
GstV4l2Object *
gst_v4l2_object_new (GstElement * element,
+ enum v4l2_buf_type type,
+ char *default_device,
GstV4l2GetInOutFunction get_in_out_func,
GstV4l2SetInOutFunction set_in_out_func,
GstV4l2UpdateFpsFunction update_fps_func)
@@ -333,10 +339,13 @@ gst_v4l2_object_new (GstElement * element,
GstV4l2Object *v4l2object;
/*
- * some default values
+ * some default values
*/
v4l2object = g_new0 (GstV4l2Object, 1);
+ v4l2object->type = type;
+ v4l2object->formats = NULL;
+
v4l2object->element = element;
v4l2object->get_in_out_func = get_in_out_func;
v4l2object->set_in_out_func = set_in_out_func;
@@ -345,7 +354,7 @@ gst_v4l2_object_new (GstElement * element,
v4l2object->video_fd = -1;
v4l2object->poll = gst_poll_new (TRUE);
v4l2object->buffer = NULL;
- v4l2object->videodev = g_strdup (DEFAULT_PROP_DEVICE);
+ v4l2object->videodev = g_strdup (default_device);
v4l2object->norms = NULL;
v4l2object->channels = NULL;
@@ -356,6 +365,9 @@ gst_v4l2_object_new (GstElement * element,
return v4l2object;
}
+static gboolean gst_v4l2_object_clear_format_list (GstV4l2Object * v4l2object);
+
+
void
gst_v4l2_object_destroy (GstV4l2Object * v4l2object)
{
@@ -373,9 +385,25 @@ gst_v4l2_object_destroy (GstV4l2Object * v4l2object)
if (v4l2object->norm)
g_free (v4l2object->norm);
+ if (v4l2object->formats) {
+ gst_v4l2_object_clear_format_list (v4l2object);
+ }
+
g_free (v4l2object);
}
+
+static gboolean
+gst_v4l2_object_clear_format_list (GstV4l2Object * v4l2object)
+{
+ g_slist_foreach (v4l2object->formats, (GFunc) g_free, NULL);
+ g_slist_free (v4l2object->formats);
+ v4l2object->formats = NULL;
+
+ return TRUE;
+}
+
+
gboolean
gst_v4l2_object_set_property_helper (GstV4l2Object * v4l2object,
guint prop_id, const GValue * value, GParamSpec * pspec)
@@ -500,7 +528,12 @@ gst_v4l2_set_defaults (GstV4l2Object * v4l2object)
{
GstTunerNorm *norm = NULL;
GstTunerChannel *channel = NULL;
- GstTuner *tuner = GST_TUNER (v4l2object->element);
+ GstTuner *tuner;
+
+ if (!GST_IS_TUNER (v4l2object->element))
+ return;
+
+ tuner = GST_TUNER (v4l2object->element);
if (v4l2object->norm)
norm = gst_tuner_find_norm_by_name (tuner, v4l2object->norm);
@@ -573,3 +606,1287 @@ gst_v4l2_object_stop (GstV4l2Object * v4l2object)
return TRUE;
}
+
+
+/*
+ * common format / caps utilities:
+ */
+
+
+static const guint32 gst_v4l2_formats[] = {
+ /* from Linux 2.6.15 videodev2.h */
+ V4L2_PIX_FMT_RGB332,
+ V4L2_PIX_FMT_RGB555,
+ V4L2_PIX_FMT_RGB565,
+ V4L2_PIX_FMT_RGB555X,
+ V4L2_PIX_FMT_RGB565X,
+ V4L2_PIX_FMT_BGR24,
+ V4L2_PIX_FMT_RGB24,
+ V4L2_PIX_FMT_BGR32,
+ V4L2_PIX_FMT_RGB32,
+ V4L2_PIX_FMT_GREY,
+ V4L2_PIX_FMT_YVU410,
+ V4L2_PIX_FMT_YVU420,
+ V4L2_PIX_FMT_YUYV,
+ V4L2_PIX_FMT_UYVY,
+ V4L2_PIX_FMT_YUV422P,
+ V4L2_PIX_FMT_YUV411P,
+ V4L2_PIX_FMT_Y41P,
+
+ /* two planes -- one Y, one Cr + Cb interleaved */
+ V4L2_PIX_FMT_NV12,
+ V4L2_PIX_FMT_NV21,
+
+ /* The following formats are not defined in the V4L2 specification */
+ V4L2_PIX_FMT_YUV410,
+ V4L2_PIX_FMT_YUV420,
+ V4L2_PIX_FMT_YYUV,
+ V4L2_PIX_FMT_HI240,
+
+ /* see http://www.siliconimaging.com/RGB%20Bayer.htm */
+#ifdef V4L2_PIX_FMT_SBGGR8
+ V4L2_PIX_FMT_SBGGR8,
+#endif
+
+ /* compressed formats */
+ V4L2_PIX_FMT_MJPEG,
+ V4L2_PIX_FMT_JPEG,
+ V4L2_PIX_FMT_DV,
+ V4L2_PIX_FMT_MPEG,
+
+ /* Vendor-specific formats */
+ V4L2_PIX_FMT_WNVA,
+
+#ifdef V4L2_PIX_FMT_SN9C10X
+ V4L2_PIX_FMT_SN9C10X,
+#endif
+#ifdef V4L2_PIX_FMT_PWC1
+ V4L2_PIX_FMT_PWC1,
+#endif
+#ifdef V4L2_PIX_FMT_PWC2
+ V4L2_PIX_FMT_PWC2,
+#endif
+#ifdef V4L2_PIX_FMT_YVYU
+ V4L2_PIX_FMT_YVYU,
+#endif
+};
+
+#define GST_V4L2_FORMAT_COUNT (G_N_ELEMENTS (gst_v4l2_formats))
+
+
+static struct v4l2_fmtdesc *
+gst_v4l2_object_get_format_from_fourcc (GstV4l2Object * v4l2object,
+ guint32 fourcc)
+{
+ struct v4l2_fmtdesc *fmt;
+ GSList *walk;
+
+ if (fourcc == 0)
+ return NULL;
+
+ walk = gst_v4l2_object_get_format_list (v4l2object);
+ while (walk) {
+ fmt = (struct v4l2_fmtdesc *) walk->data;
+ if (fmt->pixelformat == fourcc)
+ return fmt;
+ /* special case for jpeg */
+ if ((fmt->pixelformat == V4L2_PIX_FMT_MJPEG && fourcc == V4L2_PIX_FMT_JPEG)
+ || (fmt->pixelformat == V4L2_PIX_FMT_JPEG
+ && fourcc == V4L2_PIX_FMT_MJPEG)) {
+ return fmt;
+ }
+ walk = g_slist_next (walk);
+ }
+
+ return NULL;
+}
+
+
+
+/* complete made up ranking, the values themselves are meaningless */
+#define YUV_BASE_RANK 1000
+#define JPEG_BASE_RANK 500
+#define DV_BASE_RANK 200
+#define RGB_BASE_RANK 100
+#define YUV_ODD_BASE_RANK 50
+#define RGB_ODD_BASE_RANK 25
+#define BAYER_BASE_RANK 15
+#define S910_BASE_RANK 10
+#define GREY_BASE_RANK 5
+#define PWC_BASE_RANK 1
+
+static gint
+gst_v4l2_object_format_get_rank (guint32 fourcc)
+{
+ switch (fourcc) {
+ case V4L2_PIX_FMT_MJPEG:
+ return JPEG_BASE_RANK;
+ case V4L2_PIX_FMT_JPEG:
+ return JPEG_BASE_RANK + 1;
+
+ case V4L2_PIX_FMT_RGB332:
+ case V4L2_PIX_FMT_RGB555:
+ case V4L2_PIX_FMT_RGB555X:
+ case V4L2_PIX_FMT_RGB565:
+ case V4L2_PIX_FMT_RGB565X:
+ return RGB_ODD_BASE_RANK;
+
+ case V4L2_PIX_FMT_RGB24:
+ case V4L2_PIX_FMT_BGR24:
+ return RGB_BASE_RANK - 1;
+
+ case V4L2_PIX_FMT_RGB32:
+ case V4L2_PIX_FMT_BGR32:
+ return RGB_BASE_RANK;
+
+ case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
+ return GREY_BASE_RANK;
+
+ case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
+ case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
+ case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
+ case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
+ return YUV_ODD_BASE_RANK;
+
+ case V4L2_PIX_FMT_YVU410: /* YVU9, 9 bits per pixel */
+ return YUV_BASE_RANK + 3;
+ case V4L2_PIX_FMT_YUV410: /* YUV9, 9 bits per pixel */
+ return YUV_BASE_RANK + 2;
+ case V4L2_PIX_FMT_YUV420: /* I420, 12 bits per pixel */
+ return YUV_BASE_RANK + 7;
+ case V4L2_PIX_FMT_YUYV: /* YUY2, 16 bits per pixel */
+ return YUV_BASE_RANK + 10;
+ case V4L2_PIX_FMT_YVU420: /* YV12, 12 bits per pixel */
+ return YUV_BASE_RANK + 6;
+ case V4L2_PIX_FMT_UYVY: /* UYVY, 16 bits per pixel */
+ return YUV_BASE_RANK + 9;
+ case V4L2_PIX_FMT_Y41P: /* Y41P, 12 bits per pixel */
+ return YUV_BASE_RANK + 5;
+ case V4L2_PIX_FMT_YUV411P: /* Y41B, 12 bits per pixel */
+ return YUV_BASE_RANK + 4;
+ case V4L2_PIX_FMT_YUV422P: /* Y42B, 16 bits per pixel */
+ return YUV_BASE_RANK + 8;
+
+ case V4L2_PIX_FMT_DV:
+ return DV_BASE_RANK;
+
+ case V4L2_PIX_FMT_MPEG: /* MPEG */
+ case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
+ return 0;
+
+#ifdef V4L2_PIX_FMT_SBGGR8
+ case V4L2_PIX_FMT_SBGGR8:
+ return BAYER_BASE_RANK;
+#endif
+
+#ifdef V4L2_PIX_FMT_SN9C10X
+ case V4L2_PIX_FMT_SN9C10X:
+ return S910_BASE_RANK;
+#endif
+
+#ifdef V4L2_PIX_FMT_PWC1
+ case V4L2_PIX_FMT_PWC1:
+ return PWC_BASE_RANK;
+#endif
+#ifdef V4L2_PIX_FMT_PWC2
+ case V4L2_PIX_FMT_PWC2:
+ return PWC_BASE_RANK;
+#endif
+
+ default:
+ break;
+ }
+
+ return 0;
+}
+
+
+
+static gint
+format_cmp_func (gconstpointer a, gconstpointer b)
+{
+ guint32 pf1 = ((struct v4l2_fmtdesc *) a)->pixelformat;
+ guint32 pf2 = ((struct v4l2_fmtdesc *) b)->pixelformat;
+
+ if (pf1 == pf2)
+ return 0;
+
+ return gst_v4l2_object_format_get_rank (pf2) -
+ gst_v4l2_object_format_get_rank (pf1);
+}
+
+/******************************************************
+ * gst_v4l2_object_fill_format_list():
+ * create list of supported capture formats
+ * return value: TRUE on success, FALSE on error
+ ******************************************************/
+static gboolean
+gst_v4l2_object_fill_format_list (GstV4l2Object * v4l2object)
+{
+ gint n;
+ struct v4l2_fmtdesc *format;
+
+ GST_DEBUG_OBJECT (v4l2object->element, "getting src format enumerations");
+
+ /* format enumeration */
+ for (n = 0;; n++) {
+ format = g_new0 (struct v4l2_fmtdesc, 1);
+
+ format->index = n;
+ format->type = v4l2object->type;
+
+ if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_ENUM_FMT, format) < 0) {
+ if (errno == EINVAL) {
+ g_free (format);
+ break; /* end of enumeration */
+ } else {
+ goto failed;
+ }
+ }
+
+ GST_LOG_OBJECT (v4l2object->element, "index: %u", format->index);
+ GST_LOG_OBJECT (v4l2object->element, "type: %d", format->type);
+ GST_LOG_OBJECT (v4l2object->element, "flags: %08x", format->flags);
+ GST_LOG_OBJECT (v4l2object->element, "description: '%s'",
+ format->description);
+ GST_LOG_OBJECT (v4l2object->element, "pixelformat: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (format->pixelformat));
+
+ /* sort formats according to our preference; we do this, because caps
+ * are probed in the order the formats are in the list, and the order of
+ * formats in the final probed caps matters for things like fixation */
+ v4l2object->formats = g_slist_insert_sorted (v4l2object->formats, format,
+ (GCompareFunc) format_cmp_func);
+ }
+
+ GST_DEBUG_OBJECT (v4l2object->element, "got %d format(s)", n);
+
+ return TRUE;
+
+ /* ERRORS */
+failed:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Failed to enumerate possible video formats device '%s' can work with"), v4l2object->videodev), ("Failed to get number %d in pixelformat enumeration for %s. (%d - %s)", n, v4l2object->videodev, errno, g_strerror (errno)));
+ g_free (format);
+ return FALSE;
+ }
+}
+
+/**
+ * Get the list of supported capture formats, a list of
+ * <code>struct v4l2_fmtdesc</code>.
+ */
+GSList *
+gst_v4l2_object_get_format_list (GstV4l2Object * v4l2object)
+{
+ if (!v4l2object->formats)
+ gst_v4l2_object_fill_format_list (v4l2object);
+ return v4l2object->formats;
+}
+
+
+GstStructure *
+gst_v4l2_object_v4l2fourcc_to_structure (guint32 fourcc)
+{
+ GstStructure *structure = NULL;
+
+ switch (fourcc) {
+ case V4L2_PIX_FMT_MJPEG: /* Motion-JPEG */
+ case V4L2_PIX_FMT_JPEG: /* JFIF JPEG */
+ structure = gst_structure_new ("image/jpeg", NULL);
+ break;
+ case V4L2_PIX_FMT_RGB332:
+ case V4L2_PIX_FMT_RGB555:
+ case V4L2_PIX_FMT_RGB555X:
+ case V4L2_PIX_FMT_RGB565:
+ case V4L2_PIX_FMT_RGB565X:
+ case V4L2_PIX_FMT_RGB24:
+ case V4L2_PIX_FMT_BGR24:
+ case V4L2_PIX_FMT_RGB32:
+ case V4L2_PIX_FMT_BGR32:{
+ guint depth = 0, bpp = 0;
+
+ gint endianness = 0;
+
+ guint32 r_mask = 0, b_mask = 0, g_mask = 0;
+
+ switch (fourcc) {
+ case V4L2_PIX_FMT_RGB332:
+ bpp = depth = 8;
+ endianness = G_BYTE_ORDER; /* 'like, whatever' */
+ r_mask = 0xe0;
+ g_mask = 0x1c;
+ b_mask = 0x03;
+ break;
+ case V4L2_PIX_FMT_RGB555:
+ case V4L2_PIX_FMT_RGB555X:
+ bpp = 16;
+ depth = 15;
+ endianness =
+ fourcc == V4L2_PIX_FMT_RGB555X ? G_BIG_ENDIAN : G_LITTLE_ENDIAN;
+ r_mask = 0x7c00;
+ g_mask = 0x03e0;
+ b_mask = 0x001f;
+ break;
+ case V4L2_PIX_FMT_RGB565:
+ case V4L2_PIX_FMT_RGB565X:
+ bpp = depth = 16;
+ endianness =
+ fourcc == V4L2_PIX_FMT_RGB565X ? G_BIG_ENDIAN : G_LITTLE_ENDIAN;
+ r_mask = 0xf800;
+ g_mask = 0x07e0;
+ b_mask = 0x001f;
+ break;
+ case V4L2_PIX_FMT_RGB24:
+ bpp = depth = 24;
+ endianness = G_BIG_ENDIAN;
+ r_mask = 0xff0000;
+ g_mask = 0x00ff00;
+ b_mask = 0x0000ff;
+ break;
+ case V4L2_PIX_FMT_BGR24:
+ bpp = depth = 24;
+ endianness = G_BIG_ENDIAN;
+ r_mask = 0x0000ff;
+ g_mask = 0x00ff00;
+ b_mask = 0xff0000;
+ break;
+ case V4L2_PIX_FMT_RGB32:
+ bpp = depth = 32;
+ endianness = G_BIG_ENDIAN;
+ r_mask = 0xff000000;
+ g_mask = 0x00ff0000;
+ b_mask = 0x0000ff00;
+ break;
+ case V4L2_PIX_FMT_BGR32:
+ bpp = depth = 32;
+ endianness = G_BIG_ENDIAN;
+ r_mask = 0x000000ff;
+ g_mask = 0x0000ff00;
+ b_mask = 0x00ff0000;
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ structure = gst_structure_new ("video/x-raw-rgb",
+ "bpp", G_TYPE_INT, bpp,
+ "depth", G_TYPE_INT, depth,
+ "red_mask", G_TYPE_INT, r_mask,
+ "green_mask", G_TYPE_INT, g_mask,
+ "blue_mask", G_TYPE_INT, b_mask,
+ "endianness", G_TYPE_INT, endianness, NULL);
+ break;
+ }
+ case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
+ structure = gst_structure_new ("video/x-raw-gray",
+ "bpp", G_TYPE_INT, 8, NULL);
+ break;
+ case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
+ case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
+ /* FIXME: get correct fourccs here */
+ break;
+ case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
+ case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
+ case V4L2_PIX_FMT_YVU410:
+ case V4L2_PIX_FMT_YUV410:
+ case V4L2_PIX_FMT_YUV420: /* I420/IYUV */
+ case V4L2_PIX_FMT_YUYV:
+ case V4L2_PIX_FMT_YVU420:
+ case V4L2_PIX_FMT_UYVY:
+ case V4L2_PIX_FMT_Y41P:
+ case V4L2_PIX_FMT_YUV422P:
+#ifdef V4L2_PIX_FMT_YVYU
+ case V4L2_PIX_FMT_YVYU:
+#endif
+ case V4L2_PIX_FMT_YUV411P:{
+ guint32 fcc = 0;
+
+ switch (fourcc) {
+ case V4L2_PIX_FMT_NV12:
+ fcc = GST_MAKE_FOURCC ('N', 'V', '1', '2');
+ break;
+ case V4L2_PIX_FMT_NV21:
+ fcc = GST_MAKE_FOURCC ('N', 'V', '2', '1');
+ break;
+ case V4L2_PIX_FMT_YVU410:
+ fcc = GST_MAKE_FOURCC ('Y', 'V', 'U', '9');
+ break;
+ case V4L2_PIX_FMT_YUV410:
+ fcc = GST_MAKE_FOURCC ('Y', 'U', 'V', '9');
+ break;
+ case V4L2_PIX_FMT_YUV420:
+ fcc = GST_MAKE_FOURCC ('I', '4', '2', '0');
+ break;
+ case V4L2_PIX_FMT_YUYV:
+ fcc = GST_MAKE_FOURCC ('Y', 'U', 'Y', '2');
+ break;
+ case V4L2_PIX_FMT_YVU420:
+ fcc = GST_MAKE_FOURCC ('Y', 'V', '1', '2');
+ break;
+ case V4L2_PIX_FMT_UYVY:
+ fcc = GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y');
+ break;
+ case V4L2_PIX_FMT_Y41P:
+ fcc = GST_MAKE_FOURCC ('Y', '4', '1', 'P');
+ break;
+ case V4L2_PIX_FMT_YUV411P:
+ fcc = GST_MAKE_FOURCC ('Y', '4', '1', 'B');
+ break;
+ case V4L2_PIX_FMT_YUV422P:
+ fcc = GST_MAKE_FOURCC ('Y', '4', '2', 'B');
+ break;
+#ifdef V4L2_PIX_FMT_YVYU
+ case V4L2_PIX_FMT_YVYU:
+ fcc = GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U');
+ break;
+#endif
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ structure = gst_structure_new ("video/x-raw-yuv",
+ "format", GST_TYPE_FOURCC, fcc, NULL);
+ break;
+ }
+ case V4L2_PIX_FMT_DV:
+ structure =
+ gst_structure_new ("video/x-dv", "systemstream", G_TYPE_BOOLEAN, TRUE,
+ NULL);
+ break;
+ case V4L2_PIX_FMT_MPEG: /* MPEG */
+ /* someone figure out the MPEG format used... */
+ break;
+ case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
+ break;
+#ifdef V4L2_PIX_FMT_SBGGR8
+ case V4L2_PIX_FMT_SBGGR8:
+ structure = gst_structure_new ("video/x-raw-bayer", NULL);
+ break;
+#endif
+#ifdef V4L2_PIX_FMT_SN9C10X
+ case V4L2_PIX_FMT_SN9C10X:
+ structure = gst_structure_new ("video/x-sonix", NULL);
+ break;
+#endif
+#ifdef V4L2_PIX_FMT_PWC1
+ case V4L2_PIX_FMT_PWC1:
+ structure = gst_structure_new ("video/x-pwc1", NULL);
+ break;
+#endif
+#ifdef V4L2_PIX_FMT_PWC2
+ case V4L2_PIX_FMT_PWC2:
+ structure = gst_structure_new ("video/x-pwc2", NULL);
+ break;
+#endif
+ default:
+ GST_DEBUG ("Unknown fourcc 0x%08x %" GST_FOURCC_FORMAT,
+ fourcc, GST_FOURCC_ARGS (fourcc));
+ break;
+ }
+
+ return structure;
+}
+
+
+
+GstCaps *
+gst_v4l2_object_get_all_caps (void)
+{
+ static GstCaps *caps = NULL;
+
+ if (caps == NULL) {
+ GstStructure *structure;
+
+ guint i;
+
+ caps = gst_caps_new_empty ();
+ for (i = 0; i < GST_V4L2_FORMAT_COUNT; i++) {
+ structure = gst_v4l2_object_v4l2fourcc_to_structure (gst_v4l2_formats[i]);
+ if (structure) {
+ gst_structure_set (structure,
+ "width", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
+ "height", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1, NULL);
+ gst_caps_append_structure (caps, structure);
+ }
+ }
+ }
+
+ return gst_caps_ref (caps);
+}
+
+
+/* collect data for the given caps
+ * @caps: given input caps
+ * @format: location for the v4l format
+ * @w/@h: location for width and height
+ * @fps_n/@fps_d: location for framerate
+ * @size: location for expected size of the frame or 0 if unknown
+ */
+gboolean
+gst_v4l2_object_get_caps_info (GstV4l2Object * v4l2object, GstCaps * caps,
+ struct v4l2_fmtdesc ** format, gint * w, gint * h, guint * fps_n,
+ guint * fps_d, guint * size)
+{
+ GstStructure *structure;
+ const GValue *framerate;
+ guint32 fourcc;
+ const gchar *mimetype;
+ guint outsize;
+
+ /* default unknown values */
+ fourcc = 0;
+ outsize = 0;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (structure, "width", w))
+ return FALSE;
+
+ if (!gst_structure_get_int (structure, "height", h))
+ return FALSE;
+
+ framerate = gst_structure_get_value (structure, "framerate");
+ if (!framerate)
+ return FALSE;
+
+ *fps_n = gst_value_get_fraction_numerator (framerate);
+ *fps_d = gst_value_get_fraction_denominator (framerate);
+
+ mimetype = gst_structure_get_name (structure);
+
+ if (!strcmp (mimetype, "video/x-raw-yuv")) {
+ gst_structure_get_fourcc (structure, "format", &fourcc);
+
+ switch (fourcc) {
+ case GST_MAKE_FOURCC ('I', '4', '2', '0'):
+ case GST_MAKE_FOURCC ('I', 'Y', 'U', 'V'):
+ fourcc = V4L2_PIX_FMT_YUV420;
+ outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h);
+ outsize += 2 * ((GST_ROUND_UP_8 (*w) / 2) * (GST_ROUND_UP_2 (*h) / 2));
+ break;
+ case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
+ fourcc = V4L2_PIX_FMT_YUYV;
+ outsize = (GST_ROUND_UP_2 (*w) * 2) * *h;
+ break;
+ case GST_MAKE_FOURCC ('Y', '4', '1', 'P'):
+ fourcc = V4L2_PIX_FMT_Y41P;
+ outsize = (GST_ROUND_UP_2 (*w) * 2) * *h;
+ break;
+ case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
+ fourcc = V4L2_PIX_FMT_UYVY;
+ outsize = (GST_ROUND_UP_2 (*w) * 2) * *h;
+ break;
+ case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
+ fourcc = V4L2_PIX_FMT_YVU420;
+ outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h);
+ outsize += 2 * ((GST_ROUND_UP_8 (*w) / 2) * (GST_ROUND_UP_2 (*h) / 2));
+ break;
+ case GST_MAKE_FOURCC ('Y', '4', '1', 'B'):
+ fourcc = V4L2_PIX_FMT_YUV411P;
+ outsize = GST_ROUND_UP_4 (*w) * *h;
+ outsize += 2 * ((GST_ROUND_UP_8 (*w) / 4) * *h);
+ break;
+ case GST_MAKE_FOURCC ('Y', '4', '2', 'B'):
+ fourcc = V4L2_PIX_FMT_YUV422P;
+ outsize = GST_ROUND_UP_4 (*w) * *h;
+ outsize += 2 * ((GST_ROUND_UP_8 (*w) / 2) * *h);
+ break;
+ case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
+ fourcc = V4L2_PIX_FMT_NV12;
+ outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h);
+ outsize += (GST_ROUND_UP_4 (*w) * *h) / 2;
+ break;
+ case GST_MAKE_FOURCC ('N', 'V', '2', '1'):
+ fourcc = V4L2_PIX_FMT_NV21;
+ outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h);
+ outsize += (GST_ROUND_UP_4 (*w) * *h) / 2;
+ break;
+#ifdef V4L2_PIX_FMT_YVYU
+ case GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U'):
+ fourcc = V4L2_PIX_FMT_YVYU;
+ outsize = (GST_ROUND_UP_2 (*w) * 2) * *h;
+ break;
+#endif
+ }
+ } else if (!strcmp (mimetype, "video/x-raw-rgb")) {
+ gint depth, endianness, r_mask;
+
+ gst_structure_get_int (structure, "depth", &depth);
+ gst_structure_get_int (structure, "endianness", &endianness);
+ gst_structure_get_int (structure, "red_mask", &r_mask);
+
+ switch (depth) {
+ case 8:
+ fourcc = V4L2_PIX_FMT_RGB332;
+ break;
+ case 15:
+ fourcc = (endianness == G_LITTLE_ENDIAN) ?
+ V4L2_PIX_FMT_RGB555 : V4L2_PIX_FMT_RGB555X;
+ break;
+ case 16:
+ fourcc = (endianness == G_LITTLE_ENDIAN) ?
+ V4L2_PIX_FMT_RGB565 : V4L2_PIX_FMT_RGB565X;
+ break;
+ case 24:
+ fourcc = (r_mask == 0xFF) ? V4L2_PIX_FMT_BGR24 : V4L2_PIX_FMT_RGB24;
+ break;
+ case 32:
+ fourcc = (r_mask == 0xFF) ? V4L2_PIX_FMT_BGR32 : V4L2_PIX_FMT_RGB32;
+ break;
+ }
+ } else if (strcmp (mimetype, "video/x-dv") == 0) {
+ fourcc = V4L2_PIX_FMT_DV;
+ } else if (strcmp (mimetype, "image/jpeg") == 0) {
+ fourcc = V4L2_PIX_FMT_JPEG;
+#ifdef V4L2_PIX_FMT_SBGGR8
+ } else if (strcmp (mimetype, "video/x-raw-bayer") == 0) {
+ fourcc = V4L2_PIX_FMT_SBGGR8;
+#endif
+#ifdef V4L2_PIX_FMT_SN9C10X
+ } else if (strcmp (mimetype, "video/x-sonix") == 0) {
+ fourcc = V4L2_PIX_FMT_SN9C10X;
+#endif
+#ifdef V4L2_PIX_FMT_PWC1
+ } else if (strcmp (mimetype, "video/x-pwc1") == 0) {
+ fourcc = V4L2_PIX_FMT_PWC1;
+#endif
+#ifdef V4L2_PIX_FMT_PWC2
+ } else if (strcmp (mimetype, "video/x-pwc2") == 0) {
+ fourcc = V4L2_PIX_FMT_PWC2;
+#endif
+ } else if (strcmp (mimetype, "video/x-raw-gray") == 0) {
+ fourcc = V4L2_PIX_FMT_GREY;
+ }
+
+ if (fourcc == 0)
+ return FALSE;
+
+ *format = gst_v4l2_object_get_format_from_fourcc (v4l2object, fourcc);
+ *size = outsize;
+
+ return TRUE;
+}
+
+
+
+/* The frame interval enumeration code first appeared in Linux 2.6.19. */
+#ifdef VIDIOC_ENUM_FRAMEINTERVALS
+static GstStructure *
+gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object,
+ guint32 pixelformat,
+ guint32 width, guint32 height, const GstStructure * template)
+{
+ gint fd = v4l2object->video_fd;
+ struct v4l2_frmivalenum ival;
+ guint32 num, denom;
+ GstStructure *s;
+ GValue rates = { 0, };
+
+ memset (&ival, 0, sizeof (struct v4l2_frmivalenum));
+ ival.index = 0;
+ ival.pixel_format = pixelformat;
+ ival.width = width;
+ ival.height = height;
+
+ GST_LOG_OBJECT (v4l2object->element,
+ "get frame interval for %ux%u, %" GST_FOURCC_FORMAT, width, height,
+ GST_FOURCC_ARGS (pixelformat));
+
+ /* keep in mind that v4l2 gives us frame intervals (durations); we invert the
+ * fraction to get framerate */
+ if (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) < 0)
+ goto enum_frameintervals_failed;
+
+ if (ival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
+ GValue rate = { 0, };
+
+ g_value_init (&rates, GST_TYPE_LIST);
+ g_value_init (&rate, GST_TYPE_FRACTION);
+
+ do {
+ num = ival.discrete.numerator;
+ denom = ival.discrete.denominator;
+
+ if (num > G_MAXINT || denom > G_MAXINT) {
+ /* let us hope we don't get here... */
+ num >>= 1;
+ denom >>= 1;
+ }
+
+ GST_LOG_OBJECT (v4l2object->element, "adding discrete framerate: %d/%d",
+ denom, num);
+
+ /* swap to get the framerate */
+ gst_value_set_fraction (&rate, denom, num);
+ gst_value_list_append_value (&rates, &rate);
+
+ ival.index++;
+ } while (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) >= 0);
+ } else if (ival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
+ GValue min = { 0, };
+ GValue step = { 0, };
+ GValue max = { 0, };
+ gboolean added = FALSE;
+ guint32 minnum, mindenom;
+ guint32 maxnum, maxdenom;
+
+ g_value_init (&rates, GST_TYPE_LIST);
+
+ g_value_init (&min, GST_TYPE_FRACTION);
+ g_value_init (&step, GST_TYPE_FRACTION);
+ g_value_init (&max, GST_TYPE_FRACTION);
+
+ /* get the min */
+ minnum = ival.stepwise.min.numerator;
+ mindenom = ival.stepwise.min.denominator;
+ if (minnum > G_MAXINT || mindenom > G_MAXINT) {
+ minnum >>= 1;
+ mindenom >>= 1;
+ }
+ GST_LOG_OBJECT (v4l2object->element, "stepwise min frame interval: %d/%d",
+ minnum, mindenom);
+ gst_value_set_fraction (&min, minnum, mindenom);
+
+ /* get the max */
+ maxnum = ival.stepwise.max.numerator;
+ maxdenom = ival.stepwise.max.denominator;
+ if (maxnum > G_MAXINT || maxdenom > G_MAXINT) {
+ maxnum >>= 1;
+ maxdenom >>= 1;
+ }
+
+ GST_LOG_OBJECT (v4l2object->element, "stepwise max frame interval: %d/%d",
+ maxnum, maxdenom);
+ gst_value_set_fraction (&max, maxnum, maxdenom);
+
+ /* get the step */
+ num = ival.stepwise.step.numerator;
+ denom = ival.stepwise.step.denominator;
+ if (num > G_MAXINT || denom > G_MAXINT) {
+ num >>= 1;
+ denom >>= 1;
+ }
+
+ if (num == 0 || denom == 0) {
+ /* in this case we have a wrong fraction or no step, set the step to max
+ * so that we only add the min value in the loop below */
+ num = maxnum;
+ denom = maxdenom;
+ }
+
+ /* since we only have gst_value_fraction_subtract and not add, negate the
+ * numerator */
+ GST_LOG_OBJECT (v4l2object->element, "stepwise step frame interval: %d/%d",
+ num, denom);
+ gst_value_set_fraction (&step, -num, denom);
+
+ while (gst_value_compare (&min, &max) <= 0) {
+ GValue rate = { 0, };
+
+ num = gst_value_get_fraction_numerator (&min);
+ denom = gst_value_get_fraction_denominator (&min);
+ GST_LOG_OBJECT (v4l2object->element, "adding stepwise framerate: %d/%d",
+ denom, num);
+
+ /* invert to get the framerate */
+ g_value_init (&rate, GST_TYPE_FRACTION);
+ gst_value_set_fraction (&rate, denom, num);
+ gst_value_list_append_value (&rates, &rate);
+ added = TRUE;
+
+ /* we're actually adding because step was negated above. This is because
+ * there is no _add function... */
+ if (!gst_value_fraction_subtract (&min, &min, &step)) {
+ GST_WARNING_OBJECT (v4l2object->element, "could not step fraction!");
+ break;
+ }
+ }
+ if (!added) {
+ /* no range was added, leave the default range from the template */
+ GST_WARNING_OBJECT (v4l2object->element,
+ "no range added, leaving default");
+ g_value_unset (&rates);
+ }
+ } else if (ival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
+ guint32 maxnum, maxdenom;
+
+ g_value_init (&rates, GST_TYPE_FRACTION_RANGE);
+
+ num = ival.stepwise.min.numerator;
+ denom = ival.stepwise.min.denominator;
+ if (num > G_MAXINT || denom > G_MAXINT) {
+ num >>= 1;
+ denom >>= 1;
+ }
+
+ maxnum = ival.stepwise.max.numerator;
+ maxdenom = ival.stepwise.max.denominator;
+ if (maxnum > G_MAXINT || maxdenom > G_MAXINT) {
+ maxnum >>= 1;
+ maxdenom >>= 1;
+ }
+
+ GST_LOG_OBJECT (v4l2object->element,
+ "continuous frame interval %d/%d to %d/%d", maxdenom, maxnum, denom,
+ num);
+
+ gst_value_set_fraction_range_full (&rates, maxdenom, maxnum, denom, num);
+ } else {
+ goto unknown_type;
+ }
+
+return_data:
+ s = gst_structure_copy (template);
+ gst_structure_set (s, "width", G_TYPE_INT, (gint) width,
+ "height", G_TYPE_INT, (gint) height, NULL);
+
+ if (G_IS_VALUE (&rates)) {
+ /* only change the framerate on the template when we have a valid probed new
+ * value */
+ gst_structure_set_value (s, "framerate", &rates);
+ g_value_unset (&rates);
+ } else {
+ gst_structure_set (s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1,
+ NULL);
+ }
+ return s;
+
+ /* ERRORS */
+enum_frameintervals_failed:
+ {
+ GST_DEBUG_OBJECT (v4l2object->element,
+ "Unable to enumerate intervals for %" GST_FOURCC_FORMAT "@%ux%u",
+ GST_FOURCC_ARGS (pixelformat), width, height);
+ goto return_data;
+ }
+unknown_type:
+ {
+ /* I don't see how this is actually an error, we ignore the format then */
+ GST_WARNING_OBJECT (v4l2object->element,
+ "Unknown frame interval type at %" GST_FOURCC_FORMAT "@%ux%u: %u",
+ GST_FOURCC_ARGS (pixelformat), width, height, ival.type);
+ return NULL;
+ }
+}
+#endif /* defined VIDIOC_ENUM_FRAMEINTERVALS */
+
+#ifdef VIDIOC_ENUM_FRAMESIZES
+static gint
+sort_by_frame_size (GstStructure * s1, GstStructure * s2)
+{
+ int w1, h1, w2, h2;
+
+ gst_structure_get_int (s1, "width", &w1);
+ gst_structure_get_int (s1, "height", &h1);
+ gst_structure_get_int (s2, "width", &w2);
+ gst_structure_get_int (s2, "height", &h2);
+
+ /* I think it's safe to assume that this won't overflow for a while */
+ return ((w2 * h2) - (w1 * h1));
+}
+#endif
+
+static gboolean
+gst_v4l2_object_get_nearest_size (GstV4l2Object * v4l2object,
+ guint32 pixelformat, gint * width, gint * height);
+
+GstCaps *
+gst_v4l2_object_probe_caps_for_format (GstV4l2Object * v4l2object,
+ guint32 pixelformat, const GstStructure * template)
+{
+ GstCaps *ret = gst_caps_new_empty ();
+ GstStructure *tmp;
+
+#ifdef VIDIOC_ENUM_FRAMESIZES
+ gint fd = v4l2object->video_fd;
+ struct v4l2_frmsizeenum size;
+ GList *results = NULL;
+ guint32 w, h;
+
+ memset (&size, 0, sizeof (struct v4l2_frmsizeenum));
+ size.index = 0;
+ size.pixel_format = pixelformat;
+
+ GST_DEBUG_OBJECT (v4l2object->element, "Enumerating frame sizes");
+
+ if (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0)
+ goto enum_framesizes_failed;
+
+ if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
+ do {
+ GST_LOG_OBJECT (v4l2object->element, "got discrete frame size %dx%d",
+ size.discrete.width, size.discrete.height);
+
+ w = MIN (size.discrete.width, G_MAXINT);
+ h = MIN (size.discrete.height, G_MAXINT);
+
+ if (w && h) {
+ tmp =
+ gst_v4l2_object_probe_caps_for_format_and_size (v4l2object,
+ pixelformat, w, h, template);
+
+ if (tmp)
+ results = g_list_prepend (results, tmp);
+ }
+
+ size.index++;
+ } while (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0);
+ GST_DEBUG_OBJECT (v4l2object->element,
+ "done iterating discrete frame sizes");
+ } else if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
+ GST_DEBUG_OBJECT (v4l2object->element, "we have stepwise frame sizes:");
+ GST_DEBUG_OBJECT (v4l2object->element, "min width: %d",
+ size.stepwise.min_width);
+ GST_DEBUG_OBJECT (v4l2object->element, "min height: %d",
+ size.stepwise.min_height);
+ GST_DEBUG_OBJECT (v4l2object->element, "max width: %d",
+ size.stepwise.max_width);
+ GST_DEBUG_OBJECT (v4l2object->element, "min height: %d",
+ size.stepwise.max_height);
+ GST_DEBUG_OBJECT (v4l2object->element, "step width: %d",
+ size.stepwise.step_width);
+ GST_DEBUG_OBJECT (v4l2object->element, "step height: %d",
+ size.stepwise.step_height);
+
+ for (w = size.stepwise.min_width, h = size.stepwise.min_height;
+ w < size.stepwise.max_width && h < size.stepwise.max_height;
+ w += size.stepwise.step_width, h += size.stepwise.step_height) {
+ if (w == 0 || h == 0)
+ continue;
+
+ tmp =
+ gst_v4l2_object_probe_caps_for_format_and_size (v4l2object,
+ pixelformat, w, h, template);
+
+ if (tmp)
+ results = g_list_prepend (results, tmp);
+ }
+ GST_DEBUG_OBJECT (v4l2object->element,
+ "done iterating stepwise frame sizes");
+ } else if (size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {
+ guint32 maxw, maxh;
+
+ GST_DEBUG_OBJECT (v4l2object->element, "we have continuous frame sizes:");
+ GST_DEBUG_OBJECT (v4l2object->element, "min width: %d",
+ size.stepwise.min_width);
+ GST_DEBUG_OBJECT (v4l2object->element, "min height: %d",
+ size.stepwise.min_height);
+ GST_DEBUG_OBJECT (v4l2object->element, "max width: %d",
+ size.stepwise.max_width);
+ GST_DEBUG_OBJECT (v4l2object->element, "min height: %d",
+ size.stepwise.max_height);
+
+ w = MAX (size.stepwise.min_width, 1);
+ h = MAX (size.stepwise.min_height, 1);
+ maxw = MIN (size.stepwise.max_width, G_MAXINT);
+ maxh = MIN (size.stepwise.max_height, G_MAXINT);
+
+ tmp =
+ gst_v4l2_object_probe_caps_for_format_and_size (v4l2object, pixelformat,
+ w, h, template);
+ if (tmp) {
+ gst_structure_set (tmp, "width", GST_TYPE_INT_RANGE, (gint) w,
+ (gint) maxw, "height", GST_TYPE_INT_RANGE, (gint) h, (gint) maxh,
+ NULL);
+
+ /* no point using the results list here, since there's only one struct */
+ gst_caps_append_structure (ret, tmp);
+ }
+ } else {
+ goto unknown_type;
+ }
+
+ /* we use an intermediary list to store and then sort the results of the
+ * probing because we can't make any assumptions about the order in which
+ * the driver will give us the sizes, but we want the final caps to contain
+ * the results starting with the highest resolution and having the lowest
+ * resolution last, since order in caps matters for things like fixation. */
+ results = g_list_sort (results, (GCompareFunc) sort_by_frame_size);
+ while (results != NULL) {
+ gst_caps_append_structure (ret, GST_STRUCTURE (results->data));
+ results = g_list_delete_link (results, results);
+ }
+
+ if (gst_caps_is_empty (ret))
+ goto enum_framesizes_no_results;
+
+ return ret;
+
+ /* ERRORS */
+enum_framesizes_failed:
+ {
+ /* I don't see how this is actually an error */
+ GST_DEBUG_OBJECT (v4l2object->element,
+ "Failed to enumerate frame sizes for pixelformat %" GST_FOURCC_FORMAT
+ " (%s)", GST_FOURCC_ARGS (pixelformat), g_strerror (errno));
+ goto default_frame_sizes;
+ }
+enum_framesizes_no_results:
+ {
+ /* it's possible that VIDIOC_ENUM_FRAMESIZES is defined but the driver in
+ * question doesn't actually support it yet */
+ GST_DEBUG_OBJECT (v4l2object->element,
+ "No results for pixelformat %" GST_FOURCC_FORMAT
+ " enumerating frame sizes, trying fallback",
+ GST_FOURCC_ARGS (pixelformat));
+ goto default_frame_sizes;
+ }
+unknown_type:
+ {
+ GST_WARNING_OBJECT (v4l2object->element,
+ "Unknown frame sizeenum type for pixelformat %" GST_FOURCC_FORMAT
+ ": %u", GST_FOURCC_ARGS (pixelformat), size.type);
+ goto default_frame_sizes;
+ }
+default_frame_sizes:
+#endif /* defined VIDIOC_ENUM_FRAMESIZES */
+ {
+ gint min_w, max_w, min_h, max_h, fix_num = 0, fix_denom = 0;
+
+ /* This code is for Linux < 2.6.19 */
+ min_w = min_h = 1;
+ max_w = max_h = GST_V4L2_MAX_SIZE;
+ if (!gst_v4l2_object_get_nearest_size (v4l2object, pixelformat, &min_w,
+ &min_h)) {
+ GST_WARNING_OBJECT (v4l2object->element,
+ "Could not probe minimum capture size for pixelformat %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat));
+ }
+ if (!gst_v4l2_object_get_nearest_size (v4l2object, pixelformat, &max_w,
+ &max_h)) {
+ GST_WARNING_OBJECT (v4l2object->element,
+ "Could not probe maximum capture size for pixelformat %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat));
+ }
+
+ /* Since we can't get framerate directly, try to use the current norm */
+ if (v4l2object->norm && v4l2object->norms) {
+ GList *norms;
+ GstTunerNorm *norm;
+
+ for (norms = v4l2object->norms; norms != NULL; norms = norms->next) {
+ norm = (GstTunerNorm *) norms->data;
+ if (!strcmp (norm->label, v4l2object->norm))
+ break;
+ }
+ /* If it's possible, set framerate to that (discrete) value */
+ if (norm) {
+ fix_num = gst_value_get_fraction_numerator (&norm->framerate);
+ fix_denom = gst_value_get_fraction_denominator (&norm->framerate);
+ }
+ }
+
+ tmp = gst_structure_copy (template);
+ if (fix_num) {
+ gst_structure_set (tmp,
+ "width", GST_TYPE_INT_RANGE, min_w, max_w,
+ "height", GST_TYPE_INT_RANGE, min_h, max_h,
+ "framerate", GST_TYPE_FRACTION, fix_num, fix_denom, NULL);
+ } else {
+ /* if norm can't be used, copy the template framerate */
+ gst_structure_set (tmp,
+ "width", GST_TYPE_INT_RANGE, min_w, max_w,
+ "height", GST_TYPE_INT_RANGE, min_h, max_h,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1, NULL);
+ }
+ gst_caps_append_structure (ret, tmp);
+
+ return ret;
+ }
+}
+
+static gboolean
+gst_v4l2_object_get_nearest_size (GstV4l2Object * v4l2object,
+ guint32 pixelformat, gint * width, gint * height)
+{
+ struct v4l2_format fmt;
+ int fd;
+ int r;
+
+ g_return_val_if_fail (width != NULL, FALSE);
+ g_return_val_if_fail (height != NULL, FALSE);
+
+ GST_LOG_OBJECT (v4l2object->element,
+ "getting nearest size to %dx%d with format %" GST_FOURCC_FORMAT,
+ *width, *height, GST_FOURCC_ARGS (pixelformat));
+
+ fd = v4l2object->video_fd;
+
+ /* get size delimiters */
+ memset (&fmt, 0, sizeof (fmt));
+ fmt.type = v4l2object->type;
+ fmt.fmt.pix.width = *width;
+ fmt.fmt.pix.height = *height;
+ fmt.fmt.pix.pixelformat = pixelformat;
+ fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
+
+ r = v4l2_ioctl (fd, VIDIOC_TRY_FMT, &fmt);
+ if (r < 0 && errno == EINVAL) {
+ /* try again with progressive video */
+ fmt.fmt.pix.width = *width;
+ fmt.fmt.pix.height = *height;
+ fmt.fmt.pix.pixelformat = pixelformat;
+ fmt.fmt.pix.field = V4L2_FIELD_NONE;
+ r = v4l2_ioctl (fd, VIDIOC_TRY_FMT, &fmt);
+ }
+
+ if (r < 0) {
+ /* The driver might not implement TRY_FMT, in which case we will try
+ S_FMT to probe */
+ if (errno != ENOTTY)
+ return FALSE;
+
+ /* Only try S_FMT if we're not actively capturing yet, which we shouldn't
+ be, because we're still probing */
+ if (GST_V4L2_IS_ACTIVE (v4l2object))
+ return FALSE;
+
+ GST_LOG_OBJECT (v4l2object->element,
+ "Failed to probe size limit with VIDIOC_TRY_FMT, trying VIDIOC_S_FMT");
+
+ fmt.fmt.pix.width = *width;
+ fmt.fmt.pix.height = *height;
+
+ r = v4l2_ioctl (fd, VIDIOC_S_FMT, &fmt);
+ if (r < 0 && errno == EINVAL) {
+ /* try again with progressive video */
+ fmt.fmt.pix.width = *width;
+ fmt.fmt.pix.height = *height;
+ fmt.fmt.pix.pixelformat = pixelformat;
+ fmt.fmt.pix.field = V4L2_FIELD_NONE;
+ r = v4l2_ioctl (fd, VIDIOC_S_FMT, &fmt);
+ }
+
+ if (r < 0)
+ return FALSE;
+ }
+
+ GST_LOG_OBJECT (v4l2object->element,
+ "got nearest size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
+
+ *width = fmt.fmt.pix.width;
+ *height = fmt.fmt.pix.height;
+
+ return TRUE;
+}
+
+
+gboolean
+gst_v4l2_object_set_format (GstV4l2Object * v4l2object, guint32 pixelformat,
+ guint32 width, guint32 height)
+{
+ gint fd = v4l2object->video_fd;
+ struct v4l2_format format;
+
+ GST_DEBUG_OBJECT (v4l2object->element, "Setting format to %dx%d, format "
+ "%" GST_FOURCC_FORMAT, width, height, GST_FOURCC_ARGS (pixelformat));
+
+ GST_V4L2_CHECK_OPEN (v4l2object);
+ GST_V4L2_CHECK_NOT_ACTIVE (v4l2object);
+
+ memset (&format, 0x00, sizeof (struct v4l2_format));
+ format.type = v4l2object->type;
+
+ if (v4l2_ioctl (fd, VIDIOC_G_FMT, &format) < 0)
+ goto get_fmt_failed;
+
+ format.type = v4l2object->type;
+ format.fmt.pix.width = width;
+ format.fmt.pix.height = height;
+ format.fmt.pix.pixelformat = pixelformat;
+ /* request whole frames; change when gstreamer supports interlaced video
+ * (INTERLACED mode returns frames where the fields have already been
+ * combined, there are other modes for requesting fields individually) */
+ format.fmt.pix.field = V4L2_FIELD_INTERLACED;
+
+ if (v4l2_ioctl (fd, VIDIOC_S_FMT, &format) < 0) {
+ if (errno != EINVAL)
+ goto set_fmt_failed;
+
+ GST_DEBUG_OBJECT (v4l2object->element, "trying again...");
+
+ /* try again with progressive video */
+ format.fmt.pix.width = width;
+ format.fmt.pix.height = height;
+ format.fmt.pix.pixelformat = pixelformat;
+ format.fmt.pix.field = V4L2_FIELD_NONE;
+ if (v4l2_ioctl (fd, VIDIOC_S_FMT, &format) < 0)
+ goto set_fmt_failed;
+ }
+
+ if (format.fmt.pix.width != width || format.fmt.pix.height != height)
+ goto invalid_dimensions;
+
+ if (format.fmt.pix.pixelformat != pixelformat)
+ goto invalid_pixelformat;
+
+ return TRUE;
+
+ /* ERRORS */
+get_fmt_failed:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Device '%s' does not support video capture"),
+ v4l2object->videodev),
+ ("Call to G_FMT failed: (%s)", g_strerror (errno)));
+ return FALSE;
+ }
+set_fmt_failed:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Device '%s' cannot capture at %dx%d"),
+ v4l2object->videodev, width, height),
+ ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
+ GST_FOURCC_ARGS (pixelformat), width, height, g_strerror (errno)));
+ return FALSE;
+ }
+invalid_dimensions:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Device '%s' cannot capture at %dx%d"),
+ v4l2object->videodev, width, height),
+ ("Tried to capture at %dx%d, but device returned size %dx%d",
+ width, height, format.fmt.pix.width, format.fmt.pix.height));
+ return FALSE;
+ }
+invalid_pixelformat:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Device '%s' cannot capture in the specified format"),
+ v4l2object->videodev),
+ ("Tried to capture in %" GST_FOURCC_FORMAT
+ ", but device returned format" " %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (pixelformat),
+ GST_FOURCC_ARGS (format.fmt.pix.pixelformat)));
+ return FALSE;
+ }
+}
+
+gboolean
+gst_v4l2_object_start_streaming (GstV4l2Object * v4l2object)
+{
+ if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_STREAMON,
+ &(v4l2object->type)) < 0) {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, OPEN_READ,
+ (_("Error starting streaming on device '%s'."), v4l2object->videodev),
+ GST_ERROR_SYSTEM);
+ return FALSE;
+ }
+ return TRUE;
+}
+
+gboolean
+gst_v4l2_object_stop_streaming (GstV4l2Object * v4l2object)
+{
+ if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_STREAMOFF,
+ &(v4l2object->type)) < 0) {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, OPEN_READ,
+ (_("Error stopping streaming on device '%s'."), v4l2object->videodev),
+ GST_ERROR_SYSTEM);
+ return FALSE;
+ }
+ return TRUE;
+}
diff --git a/sys/v4l2/gstv4l2object.h b/sys/v4l2/gstv4l2object.h
index 88d4eeeb..4aac3a51 100644
--- a/sys/v4l2/gstv4l2object.h
+++ b/sys/v4l2/gstv4l2object.h
@@ -51,6 +51,16 @@
#include <gst/interfaces/propertyprobe.h>
+
+/* size of v4l2 buffer pool in streaming case */
+#define GST_V4L2_MAX_BUFFERS 16
+#define GST_V4L2_MIN_BUFFERS 1
+
+/* max frame width/height */
+#define GST_V4L2_MAX_SIZE (1<<15) /* 2^15 == 32768 */
+
+
+
G_BEGIN_DECLS
#define GST_V4L2_OBJECT(obj) (GstV4l2Object *)(obj)
@@ -77,6 +87,8 @@ struct _GstV4l2Object {
/* the video buffer (mmap()'ed) */
guint8 **buffer;
+ enum v4l2_buf_type type; /* V4L2_BUF_TYPE_VIDEO_CAPTURE, V4L2_BUF_TYPE_VIDEO_OUTPUT */
+
/* the video device's capabilities */
struct v4l2_capability vcap;
@@ -87,6 +99,8 @@ struct _GstV4l2Object {
struct v4l2_input vinput;
/* lists... */
+ GSList *formats; /* list of available capture formats */
+
GList *colors;
GList *norms;
GList *channels;
@@ -121,13 +135,16 @@ GType gst_v4l2_object_get_type (void);
/* create/destroy */
GstV4l2Object * gst_v4l2_object_new (GstElement * element,
+ enum v4l2_buf_type type,
+ char *default_device,
GstV4l2GetInOutFunction get_in_out_func,
GstV4l2SetInOutFunction set_in_out_func,
GstV4l2UpdateFpsFunction update_fps_func);
void gst_v4l2_object_destroy (GstV4l2Object * v4l2object);
/* properties */
-void gst_v4l2_object_install_properties_helper (GObjectClass *gobject_class);
+
+void gst_v4l2_object_install_properties_helper (GObjectClass *gobject_class, const char *default_device);
gboolean gst_v4l2_object_set_property_helper (GstV4l2Object *v4l2object,
guint prop_id, const GValue * value,
@@ -152,6 +169,26 @@ GValueArray* gst_v4l2_probe_get_values (GstPropertyProbe * probe, guint pro
const GParamSpec * pspec,
GList ** klass_devices);
+GstCaps* gst_v4l2_object_probe_caps_for_format (GstV4l2Object *v4l2object, guint32 pixelformat,
+ const GstStructure * template);
+
+gboolean gst_v4l2_object_get_caps_info (GstV4l2Object *v4l2object, GstCaps *caps,
+ struct v4l2_fmtdesc **format, gint *w, gint *h,
+ guint *fps_n, guint *fps_d, guint *size);
+
+
+GSList* gst_v4l2_object_get_format_list (GstV4l2Object *v4l2object);
+
+GstCaps* gst_v4l2_object_get_all_caps (void);
+
+GstStructure* gst_v4l2_object_v4l2fourcc_to_structure (guint32 fourcc);
+
+gboolean gst_v4l2_object_set_format (GstV4l2Object *v4l2object, guint32 pixelformat, guint32 width, guint32 height);
+
+gboolean gst_v4l2_object_start_streaming (GstV4l2Object *v4l2object);
+gboolean gst_v4l2_object_stop_streaming (GstV4l2Object *v4l2object);
+
+
#define GST_IMPLEMENT_V4L2_PROBE_METHODS(Type_Class, interface_as_function) \
\
static void \
diff --git a/sys/v4l2/gstv4l2sink.c b/sys/v4l2/gstv4l2sink.c
new file mode 100644
index 00000000..42b4260d
--- /dev/null
+++ b/sys/v4l2/gstv4l2sink.c
@@ -0,0 +1,711 @@
+/* GStreamer
+ *
+ * Copyright (C) 2009 Texas Instruments, Inc - http://www.ti.com/
+ *
+ * Description: V4L2 sink element
+ * Created on: Jul 2, 2009
+ * Author: Rob Clark <rob@ti.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/**
+ * SECTION:element-v4l2sink
+ *
+ * v4l2sink can be used to display video to v4l2 devices (screen overlays
+ * provided by the graphics hardware, tv-out, etc)
+ *
+ * <refsect2>
+ * <title>Example launch lines</title>
+ * |[
+ * gst-launch videotestsrc ! v4l2sink device=/dev/video1
+ * ]| This pipeline displays a test pattern on /dev/video1
+ * </refsect2>
+ */
+
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+
+#include "gstv4l2colorbalance.h"
+#if 0 /* overlay is still not implemented #ifdef HAVE_XVIDEO */
+#include "gstv4l2xoverlay.h"
+#endif
+#include "gstv4l2vidorient.h"
+
+#include "gstv4l2sink.h"
+#include "gst/gst-i18n-plugin.h"
+
+#include <string.h>
+
+
+static const GstElementDetails gst_v4l2sink_details =
+GST_ELEMENT_DETAILS ("Video (video4linux2) Sink",
+ "Sink/Video",
+ "Displays frames on a video4linux2 device",
+ "Rob Clark <rob@ti.com>,");
+
+GST_DEBUG_CATEGORY (v4l2sink_debug);
+#define GST_CAT_DEFAULT v4l2sink_debug
+
+#define PROP_DEF_QUEUE_SIZE 8
+#define DEFAULT_PROP_DEVICE "/dev/video1"
+
+enum
+{
+ PROP_0,
+ V4L2_STD_OBJECT_PROPS,
+ PROP_QUEUE_SIZE,
+ PROP_OVERLAY_TOP,
+ PROP_OVERLAY_LEFT,
+ PROP_OVERLAY_WIDTH,
+ PROP_OVERLAY_HEIGHT,
+};
+
+
+GST_IMPLEMENT_V4L2_PROBE_METHODS (GstV4l2SinkClass, gst_v4l2sink);
+GST_IMPLEMENT_V4L2_COLOR_BALANCE_METHODS (GstV4l2Sink, gst_v4l2sink);
+#if 0 /* overlay is still not implemented #ifdef HAVE_XVIDEO */
+GST_IMPLEMENT_V4L2_XOVERLAY_METHODS (GstV4l2Sink, gst_v4l2sink);
+#endif
+GST_IMPLEMENT_V4L2_VIDORIENT_METHODS (GstV4l2Sink, gst_v4l2sink);
+
+static gboolean
+gst_v4l2sink_iface_supported (GstImplementsInterface * iface, GType iface_type)
+{
+ GstV4l2Object *v4l2object = GST_V4L2SINK (iface)->v4l2object;
+
+#if 0 /* overlay is still not implemented #ifdef HAVE_XVIDEO */
+ g_assert (iface_type == GST_TYPE_X_OVERLAY ||
+ iface_type == GST_TYPE_COLOR_BALANCE ||
+ iface_type == GST_TYPE_VIDEO_ORIENTATION);
+#else
+ g_assert (iface_type == GST_TYPE_COLOR_BALANCE ||
+ iface_type == GST_TYPE_VIDEO_ORIENTATION);
+#endif
+
+ if (v4l2object->video_fd == -1)
+ return FALSE;
+
+#if 0 /* overlay is still not implemented #ifdef HAVE_XVIDEO */
+ if (iface_type == GST_TYPE_X_OVERLAY && !GST_V4L2_IS_OVERLAY (v4l2object))
+ return FALSE;
+#endif
+
+ return TRUE;
+}
+
+static void
+gst_v4l2sink_interface_init (GstImplementsInterfaceClass * klass)
+{
+ /*
+ * default virtual functions
+ */
+ klass->supported = gst_v4l2sink_iface_supported;
+}
+
+void
+gst_v4l2sink_init_interfaces (GType type)
+{
+ static const GInterfaceInfo v4l2iface_info = {
+ (GInterfaceInitFunc) gst_v4l2sink_interface_init,
+ NULL,
+ NULL,
+ };
+#if 0 /* overlay is still not implemented #ifdef HAVE_XVIDEO */
+ static const GInterfaceInfo v4l2_xoverlay_info = {
+ (GInterfaceInitFunc) gst_v4l2sink_xoverlay_interface_init,
+ NULL,
+ NULL,
+ };
+#endif
+ static const GInterfaceInfo v4l2_colorbalance_info = {
+ (GInterfaceInitFunc) gst_v4l2sink_color_balance_interface_init,
+ NULL,
+ NULL,
+ };
+ static const GInterfaceInfo v4l2_videoorientation_info = {
+ (GInterfaceInitFunc) gst_v4l2sink_video_orientation_interface_init,
+ NULL,
+ NULL,
+ };
+ static const GInterfaceInfo v4l2_propertyprobe_info = {
+ (GInterfaceInitFunc) gst_v4l2sink_property_probe_interface_init,
+ NULL,
+ NULL,
+ };
+
+ g_type_add_interface_static (type,
+ GST_TYPE_IMPLEMENTS_INTERFACE, &v4l2iface_info);
+#if 0 /* overlay is still not implemented #ifdef HAVE_XVIDEO */
+ g_type_add_interface_static (type, GST_TYPE_X_OVERLAY, &v4l2_xoverlay_info);
+#endif
+ g_type_add_interface_static (type,
+ GST_TYPE_COLOR_BALANCE, &v4l2_colorbalance_info);
+ g_type_add_interface_static (type,
+ GST_TYPE_VIDEO_ORIENTATION, &v4l2_videoorientation_info);
+ g_type_add_interface_static (type, GST_TYPE_PROPERTY_PROBE,
+ &v4l2_propertyprobe_info);
+}
+
+
+GST_BOILERPLATE_FULL (GstV4l2Sink, gst_v4l2sink, GstVideoSink,
+ GST_TYPE_VIDEO_SINK, gst_v4l2sink_init_interfaces);
+
+
+static void gst_v4l2sink_dispose (GObject * object);
+static void gst_v4l2sink_finalize (GstV4l2Sink * v4l2sink);
+
+/* GObject methods: */
+static void gst_v4l2sink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_v4l2sink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+
+/* GstElement methods: */
+static GstStateChangeReturn gst_v4l2sink_change_state (GstElement * element,
+ GstStateChange transition);
+
+/* GstBaseSink methods: */
+static GstCaps *gst_v4l2sink_get_caps (GstBaseSink * bsink);
+static gboolean gst_v4l2sink_set_caps (GstBaseSink * bsink, GstCaps * caps);
+static GstFlowReturn gst_v4l2sink_buffer_alloc (GstBaseSink * bsink,
+ guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf);
+static GstFlowReturn gst_v4l2sink_show_frame (GstBaseSink * bsink,
+ GstBuffer * buf);
+
+
+static void
+gst_v4l2sink_base_init (gpointer g_class)
+{
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
+ GstV4l2SinkClass *gstv4l2sink_class = GST_V4L2SINK_CLASS (g_class);
+
+ gstv4l2sink_class->v4l2_class_devices = NULL;
+
+ GST_DEBUG_CATEGORY_INIT (v4l2sink_debug, "v4l2sink", 0, "V4L2 sink element");
+
+ gst_element_class_set_details (gstelement_class, &gst_v4l2sink_details);
+
+ gst_element_class_add_pad_template
+ (gstelement_class,
+ gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
+ gst_v4l2_object_get_all_caps ()));
+}
+
+static void
+gst_v4l2sink_class_init (GstV4l2SinkClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstBaseSinkClass *basesink_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ element_class = GST_ELEMENT_CLASS (klass);
+ basesink_class = GST_BASE_SINK_CLASS (klass);
+
+ gobject_class->dispose = gst_v4l2sink_dispose;
+ gobject_class->finalize = (GObjectFinalizeFunc) gst_v4l2sink_finalize;
+ gobject_class->set_property = gst_v4l2sink_set_property;
+ gobject_class->get_property = gst_v4l2sink_get_property;
+
+ element_class->change_state = gst_v4l2sink_change_state;
+
+ gst_v4l2_object_install_properties_helper (gobject_class,
+ DEFAULT_PROP_DEVICE);
+ g_object_class_install_property (gobject_class, PROP_QUEUE_SIZE,
+ g_param_spec_uint ("queue-size", "Queue size",
+ "Number of buffers to be enqueud in the driver in streaming mode",
+ GST_V4L2_MIN_BUFFERS, GST_V4L2_MAX_BUFFERS, PROP_DEF_QUEUE_SIZE,
+ G_PARAM_READWRITE));
+ g_object_class_install_property (gobject_class, PROP_OVERLAY_TOP,
+ g_param_spec_int ("overlay-top", "Overlay top",
+ "The topmost (y) coordinate of the video overlay; top left corner of screen is 0,0",
+ 0x80000000, 0x7fffffff, 0, G_PARAM_READWRITE));
+ g_object_class_install_property (gobject_class, PROP_OVERLAY_LEFT,
+ g_param_spec_int ("overlay-left", "Overlay left",
+ "The leftmost (x) coordinate of the video overlay; top left corner of screen is 0,0",
+ 0x80000000, 0x7fffffff, 0, G_PARAM_READWRITE));
+ g_object_class_install_property (gobject_class, PROP_OVERLAY_WIDTH,
+ g_param_spec_uint ("overlay-width", "Overlay width",
+ "The width of the video overlay; default is equal to negotiated image width",
+ 0, 0xffffffff, 0, G_PARAM_READWRITE));
+ g_object_class_install_property (gobject_class, PROP_OVERLAY_HEIGHT,
+ g_param_spec_uint ("overlay-height", "Overlay height",
+ "The height of the video overlay; default is equal to negotiated image height",
+ 0, 0xffffffff, 0, G_PARAM_READWRITE));
+
+ basesink_class->get_caps = GST_DEBUG_FUNCPTR (gst_v4l2sink_get_caps);
+ basesink_class->set_caps = GST_DEBUG_FUNCPTR (gst_v4l2sink_set_caps);
+ basesink_class->buffer_alloc = GST_DEBUG_FUNCPTR (gst_v4l2sink_buffer_alloc);
+ basesink_class->preroll = GST_DEBUG_FUNCPTR (gst_v4l2sink_show_frame);
+ basesink_class->render = GST_DEBUG_FUNCPTR (gst_v4l2sink_show_frame);
+}
+
+static void
+gst_v4l2sink_init (GstV4l2Sink * v4l2sink, GstV4l2SinkClass * klass)
+{
+ v4l2sink->v4l2object = gst_v4l2_object_new (GST_ELEMENT (v4l2sink),
+ V4L2_BUF_TYPE_VIDEO_OUTPUT, DEFAULT_PROP_DEVICE,
+ gst_v4l2_get_input, gst_v4l2_set_input, NULL);
+
+ /* same default value for video output device as is used for
+ * v4l2src/capture is no good.. so lets set a saner default
+ * (which can be overridden by the one creating the v4l2sink
+ * after the constructor returns)
+ */
+ g_object_set (v4l2sink, "device", "/dev/video1", NULL);
+
+ /* number of buffers requested */
+ v4l2sink->num_buffers = PROP_DEF_QUEUE_SIZE;
+
+ v4l2sink->probed_caps = NULL;
+ v4l2sink->current_caps = NULL;
+
+ v4l2sink->overlay_fields_set = 0;
+ v4l2sink->state = 0;
+}
+
+
+static void
+gst_v4l2sink_dispose (GObject * object)
+{
+ GstV4l2Sink *v4l2sink = GST_V4L2SINK (object);
+
+ if (v4l2sink->probed_caps) {
+ gst_caps_unref (v4l2sink->probed_caps);
+ }
+
+ if (v4l2sink->current_caps) {
+ gst_caps_unref (v4l2sink->current_caps);
+ }
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+
+static void
+gst_v4l2sink_finalize (GstV4l2Sink * v4l2sink)
+{
+ gst_v4l2_object_destroy (v4l2sink->v4l2object);
+
+ G_OBJECT_CLASS (parent_class)->finalize ((GObject *) (v4l2sink));
+}
+
+
+/**
+ * State values
+ */
+enum
+{
+ STATE_OFF = 0,
+ STATE_PENDING_STREAMON,
+ STATE_STREAMING
+};
+
+/**
+ * flags to indicate which overlay properties the user has set (and therefore
+ * which ones should override the defaults from the driver)
+ */
+enum
+{
+ OVERLAY_TOP_SET = 0x01,
+ OVERLAY_LEFT_SET = 0x02,
+ OVERLAY_WIDTH_SET = 0x04,
+ OVERLAY_HEIGHT_SET = 0x08
+};
+
+static void
+gst_v4l2sink_sync_overlay_fields (GstV4l2Sink * v4l2sink)
+{
+ if (GST_V4L2_IS_OPEN (v4l2sink->v4l2object)) {
+
+ gint fd = v4l2sink->v4l2object->video_fd;
+ struct v4l2_format format;
+
+ memset (&format, 0x00, sizeof (struct v4l2_format));
+ format.type = V4L2_BUF_TYPE_VIDEO_OVERLAY;
+
+ g_return_if_fail (v4l2_ioctl (fd, VIDIOC_G_FMT, &format) >= 0);
+
+ if (v4l2sink->overlay_fields_set) {
+ if (v4l2sink->overlay_fields_set & OVERLAY_TOP_SET)
+ format.fmt.win.w.top = v4l2sink->overlay.top;
+ if (v4l2sink->overlay_fields_set & OVERLAY_LEFT_SET)
+ format.fmt.win.w.left = v4l2sink->overlay.left;
+ if (v4l2sink->overlay_fields_set & OVERLAY_WIDTH_SET)
+ format.fmt.win.w.width = v4l2sink->overlay.width;
+ if (v4l2sink->overlay_fields_set & OVERLAY_HEIGHT_SET)
+ format.fmt.win.w.height = v4l2sink->overlay.height;
+
+ g_return_if_fail (v4l2_ioctl (fd, VIDIOC_S_FMT, &format) >= 0);
+ v4l2sink->overlay_fields_set = 0;
+ }
+
+ v4l2sink->overlay = format.fmt.win.w;
+ }
+}
+
+
+static void
+gst_v4l2sink_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstV4l2Sink *v4l2sink = GST_V4L2SINK (object);
+
+ if (!gst_v4l2_object_set_property_helper (v4l2sink->v4l2object,
+ prop_id, value, pspec)) {
+ switch (prop_id) {
+ case PROP_QUEUE_SIZE:
+ v4l2sink->num_buffers = g_value_get_uint (value);
+ break;
+ case PROP_OVERLAY_TOP:
+ v4l2sink->overlay.top = g_value_get_int (value);
+ v4l2sink->overlay_fields_set |= OVERLAY_TOP_SET;
+ gst_v4l2sink_sync_overlay_fields (v4l2sink);
+ break;
+ case PROP_OVERLAY_LEFT:
+ v4l2sink->overlay.left = g_value_get_int (value);
+ v4l2sink->overlay_fields_set |= OVERLAY_LEFT_SET;
+ gst_v4l2sink_sync_overlay_fields (v4l2sink);
+ break;
+ case PROP_OVERLAY_WIDTH:
+ v4l2sink->overlay.width = g_value_get_uint (value);
+ v4l2sink->overlay_fields_set |= OVERLAY_WIDTH_SET;
+ gst_v4l2sink_sync_overlay_fields (v4l2sink);
+ break;
+ case PROP_OVERLAY_HEIGHT:
+ v4l2sink->overlay.height = g_value_get_uint (value);
+ v4l2sink->overlay_fields_set |= OVERLAY_HEIGHT_SET;
+ gst_v4l2sink_sync_overlay_fields (v4l2sink);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+}
+
+
+static void
+gst_v4l2sink_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstV4l2Sink *v4l2sink = GST_V4L2SINK (object);
+
+ if (!gst_v4l2_object_get_property_helper (v4l2sink->v4l2object,
+ prop_id, value, pspec)) {
+ switch (prop_id) {
+ case PROP_QUEUE_SIZE:
+ g_value_set_uint (value, v4l2sink->num_buffers);
+ break;
+ case PROP_OVERLAY_TOP:
+ g_value_set_int (value, v4l2sink->overlay.top);
+ break;
+ case PROP_OVERLAY_LEFT:
+ g_value_set_int (value, v4l2sink->overlay.left);
+ break;
+ case PROP_OVERLAY_WIDTH:
+ g_value_set_uint (value, v4l2sink->overlay.width);
+ break;
+ case PROP_OVERLAY_HEIGHT:
+ g_value_set_uint (value, v4l2sink->overlay.height);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+}
+
+static GstStateChangeReturn
+gst_v4l2sink_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+ GstV4l2Sink *v4l2sink = GST_V4L2SINK (element);
+
+ GST_DEBUG_OBJECT (v4l2sink, "%d -> %d",
+ GST_STATE_TRANSITION_CURRENT (transition),
+ GST_STATE_TRANSITION_NEXT (transition));
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ /* open the device */
+ if (!gst_v4l2_object_start (v4l2sink->v4l2object))
+ return GST_STATE_CHANGE_FAILURE;
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ if (v4l2sink->state == STATE_STREAMING) {
+ if (!gst_v4l2_object_stop_streaming (v4l2sink->v4l2object)) {
+ return GST_STATE_CHANGE_FAILURE;
+ }
+ v4l2sink->state = STATE_OFF;
+ }
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ /* close the device */
+ if (!gst_v4l2_object_stop (v4l2sink->v4l2object))
+ return GST_STATE_CHANGE_FAILURE;
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+
+static GstCaps *
+gst_v4l2sink_get_caps (GstBaseSink * bsink)
+{
+ GstV4l2Sink *v4l2sink = GST_V4L2SINK (bsink);
+ GstCaps *ret;
+ GSList *walk;
+ GSList *formats;
+
+ if (!GST_V4L2_IS_OPEN (v4l2sink->v4l2object)) {
+ /* FIXME: copy? */
+ GST_DEBUG_OBJECT (v4l2sink, "device is not open");
+ return
+ gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_SINK_PAD
+ (v4l2sink)));
+ }
+
+ if (v4l2sink->probed_caps) {
+ LOG_CAPS (v4l2sink, v4l2sink->probed_caps);
+ return gst_caps_ref (v4l2sink->probed_caps);
+ }
+
+ formats = gst_v4l2_object_get_format_list (v4l2sink->v4l2object);
+
+ ret = gst_caps_new_empty ();
+
+ for (walk = v4l2sink->v4l2object->formats; walk; walk = walk->next) {
+ struct v4l2_fmtdesc *format;
+
+ GstStructure *template;
+
+ format = (struct v4l2_fmtdesc *) walk->data;
+
+ template = gst_v4l2_object_v4l2fourcc_to_structure (format->pixelformat);
+
+ if (template) {
+ GstCaps *tmp;
+
+ tmp =
+ gst_v4l2_object_probe_caps_for_format (v4l2sink->v4l2object,
+ format->pixelformat, template);
+ if (tmp)
+ gst_caps_append (ret, tmp);
+
+ gst_structure_free (template);
+ } else {
+ GST_DEBUG_OBJECT (v4l2sink, "unknown format %u", format->pixelformat);
+ }
+ }
+
+ v4l2sink->probed_caps = gst_caps_ref (ret);
+
+ GST_INFO_OBJECT (v4l2sink, "probed caps: %p", ret);
+ LOG_CAPS (v4l2sink, ret);
+
+ return ret;
+}
+
+static gboolean
+gst_v4l2sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
+{
+ GstV4l2Sink *v4l2sink = GST_V4L2SINK (bsink);
+ gint w = 0, h = 0;
+ struct v4l2_fmtdesc *format;
+ guint fps_n, fps_d;
+ guint size;
+
+ LOG_CAPS (v4l2sink, caps);
+
+ if (!GST_V4L2_IS_OPEN (v4l2sink->v4l2object)) {
+ GST_DEBUG_OBJECT (v4l2sink, "device is not open");
+ return FALSE;
+ }
+
+ if (v4l2sink->current_caps) {
+ GST_DEBUG_OBJECT (v4l2sink, "already have caps set.. are they equal?");
+ LOG_CAPS (v4l2sink, v4l2sink->current_caps);
+ if (gst_caps_is_equal (v4l2sink->current_caps, caps)) {
+ GST_DEBUG_OBJECT (v4l2sink, "yes they are!");
+ return TRUE;
+ }
+ GST_DEBUG_OBJECT (v4l2sink, "no they aren't!");
+ }
+
+ if (v4l2sink->pool) {
+ /* TODO: if we've already allocated buffers, we probably need to
+ * do something here to free and reallocate....
+ *
+ * gst_v4l2_object_stop_streaming()
+ * gst_v4l2_buffer_pool_destroy()
+ *
+ */
+ GST_DEBUG_OBJECT (v4l2sink, "warning, changing caps not supported yet");
+ return FALSE;
+ }
+
+ /* we want our own v4l2 type of fourcc codes */
+ if (!gst_v4l2_object_get_caps_info (v4l2sink->v4l2object, caps,
+ &format, &w, &h, &fps_n, &fps_d, &size)) {
+ GST_DEBUG_OBJECT (v4l2sink, "can't get capture format from caps %p", caps);
+ return FALSE;
+ }
+
+ if (!format) {
+ GST_DEBUG_OBJECT (v4l2sink, "unrecognized caps!!");
+ return FALSE;
+ }
+
+ if (!gst_v4l2_object_set_format (v4l2sink->v4l2object, format->pixelformat, w,
+ h)) {
+ /* error already posted */
+ return FALSE;
+ }
+
+ gst_v4l2sink_sync_overlay_fields (v4l2sink);
+
+ v4l2sink->current_caps = gst_caps_ref (caps);
+
+ return TRUE;
+}
+
+/** buffer alloc function to implement pad_alloc for upstream element */
+static GstFlowReturn
+gst_v4l2sink_buffer_alloc (GstBaseSink * bsink, guint64 offset, guint size,
+ GstCaps * caps, GstBuffer ** buf)
+{
+ GstV4l2Sink *v4l2sink = GST_V4L2SINK (bsink);
+ GstV4l2Buffer *v4l2buf;
+
+ if (v4l2sink->v4l2object->vcap.capabilities & V4L2_CAP_STREAMING) {
+
+ /* initialize the buffer pool if not initialized yet (first buffer): */
+ if (G_UNLIKELY (!v4l2sink->pool)) {
+
+ /* set_caps() might not be called yet.. so just to make sure: */
+ if (!gst_v4l2sink_set_caps (bsink, caps)) {
+ return GST_FLOW_ERROR;
+ }
+
+ GST_V4L2_CHECK_OPEN (v4l2sink->v4l2object);
+
+ if (!(v4l2sink->pool = gst_v4l2_buffer_pool_new (GST_ELEMENT (v4l2sink),
+ v4l2sink->v4l2object->video_fd,
+ v4l2sink->num_buffers, caps, FALSE,
+ V4L2_BUF_TYPE_VIDEO_OUTPUT))) {
+ return GST_FLOW_ERROR;
+ }
+#ifdef OMAPZOOM
+ if (!gst_v4l2_object_start_streaming (v4l2sink->v4l2object)) {
+ return GST_FLOW_ERROR;
+ }
+ v4l2sink->state = STATE_STREAMING;
+#else
+ v4l2sink->state = STATE_PENDING_STREAMON;
+#endif
+
+ GST_INFO_OBJECT (v4l2sink, "outputting buffers via mmap()");
+
+ if (v4l2sink->num_buffers != v4l2sink->pool->buffer_count) {
+ v4l2sink->num_buffers = v4l2sink->pool->buffer_count;
+ g_object_notify (G_OBJECT (v4l2sink), "queue-size");
+ }
+ }
+
+ v4l2buf = gst_v4l2_buffer_pool_get (v4l2sink->pool, TRUE);
+
+ GST_DEBUG_OBJECT (v4l2sink, "allocated buffer: %p\n", v4l2buf);
+
+ if (G_UNLIKELY (!v4l2buf)) {
+ return GST_FLOW_ERROR;
+ }
+
+ *buf = GST_BUFFER (v4l2buf);
+
+ return GST_FLOW_OK;
+
+ } else {
+ GST_ERROR_OBJECT (v4l2sink, "only supporting streaming mode for now...");
+ return GST_FLOW_ERROR;
+ }
+}
+
+/** called after A/V sync to render frame */
+static GstFlowReturn
+gst_v4l2sink_show_frame (GstBaseSink * bsink, GstBuffer * buf)
+{
+ GstV4l2Sink *v4l2sink = GST_V4L2SINK (bsink);
+ GstBuffer *newbuf = NULL;
+
+ GST_DEBUG_OBJECT (v4l2sink, "render buffer: %p\n", buf);
+
+ if (!GST_IS_V4L2_BUFFER (buf)) {
+ GstFlowReturn ret;
+
+ GST_DEBUG_OBJECT (v4l2sink, "slow-path.. I got a %s so I need to memcpy",
+ g_type_name (G_OBJECT_TYPE (buf)));
+
+ ret = gst_v4l2sink_buffer_alloc (bsink,
+ GST_BUFFER_OFFSET (buf), GST_BUFFER_SIZE (buf), GST_BUFFER_CAPS (buf),
+ &newbuf);
+
+ if (GST_FLOW_OK != ret) {
+ return ret;
+ }
+
+ memcpy (GST_BUFFER_DATA (newbuf),
+ GST_BUFFER_DATA (buf),
+ MIN (GST_BUFFER_SIZE (newbuf), GST_BUFFER_SIZE (buf)));
+
+ GST_DEBUG_OBJECT (v4l2sink, "render copied buffer: %p\n", newbuf);
+
+ buf = newbuf;
+ }
+
+ if (!gst_v4l2_buffer_pool_qbuf (v4l2sink->pool, GST_V4L2_BUFFER (buf))) {
+ return GST_FLOW_ERROR;
+ }
+#ifndef OMAPZOOM
+ if (v4l2sink->state == STATE_PENDING_STREAMON) {
+ if (!gst_v4l2_object_start_streaming (v4l2sink->v4l2object)) {
+ return GST_FLOW_ERROR;
+ }
+ v4l2sink->state = STATE_STREAMING;
+ }
+#endif
+
+ if (!newbuf) {
+ gst_buffer_ref (buf);
+ }
+
+ return GST_FLOW_OK;
+}
diff --git a/sys/v4l2/gstv4l2sink.h b/sys/v4l2/gstv4l2sink.h
new file mode 100644
index 00000000..2d5cadb8
--- /dev/null
+++ b/sys/v4l2/gstv4l2sink.h
@@ -0,0 +1,88 @@
+/* GStreamer
+ *
+ * Copyright (C) 2009 Texas Instruments, Inc - http://www.ti.com/
+ *
+ * Description: V4L2 sink element
+ * Created on: Jul 2, 2009
+ * Author: Rob Clark <rob@ti.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GSTV4L2SINK_H__
+#define __GSTV4L2SINK_H__
+
+#include <gst/video/gstvideosink.h>
+#include <gstv4l2object.h>
+#include <gstv4l2bufferpool.h>
+
+GST_DEBUG_CATEGORY_EXTERN (v4l2sink_debug);
+
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_V4L2SINK \
+ (gst_v4l2sink_get_type())
+#define GST_V4L2SINK(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_V4L2SINK, GstV4l2Sink))
+#define GST_V4L2SINK_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_V4L2SINK, GstV4l2SinkClass))
+#define GST_IS_V4L2SINK(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_V4L2SINK))
+#define GST_IS_V4L2SINK_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_V4L2SINK))
+
+typedef struct _GstV4l2Sink GstV4l2Sink;
+typedef struct _GstV4l2SinkClass GstV4l2SinkClass;
+
+
+struct _GstV4l2Sink {
+ GstVideoSink videosink;
+
+ /*< private >*/
+ GstV4l2Object * v4l2object;
+ GstCaps *probed_caps; /* all supported caps of underlying v4l2 device */
+ GstCaps *current_caps; /* the current negotiated caps */
+ GstV4l2BufferPool *pool;
+ guint32 num_buffers;
+
+ /**
+ * field to store requested overlay-top/left/width/height props:
+ * note, could maybe be combined with 'vwin' field in GstV4l2Object?
+ */
+ struct v4l2_rect overlay;
+
+ /**
+ * bitmask to track which 'overlay' fields user has requested by
+ * setting properties:
+ */
+ guint8 overlay_fields_set;
+
+ guint8 state;
+};
+
+struct _GstV4l2SinkClass {
+ GstVideoSinkClass parent_class;
+
+ GList *v4l2_class_devices;
+};
+
+GType gst_v4l2sink_get_type(void);
+
+G_END_DECLS
+
+
+#endif /* __GSTV4L2SINK_H__ */
diff --git a/sys/v4l2/gstv4l2src.c b/sys/v4l2/gstv4l2src.c
index 329ced5a..f0400518 100644
--- a/sys/v4l2/gstv4l2src.c
+++ b/sys/v4l2/gstv4l2src.c
@@ -56,6 +56,8 @@
#endif
#include "gstv4l2vidorient.h"
+#include "gst/gst-i18n-plugin.h"
+
static const GstElementDetails gst_v4l2src_details =
GST_ELEMENT_DETAILS ("Video (video4linux2) Source",
"Source/Video",
@@ -70,6 +72,8 @@ GST_DEBUG_CATEGORY (v4l2src_debug);
#define PROP_DEF_QUEUE_SIZE 2
#define PROP_DEF_ALWAYS_COPY TRUE
+#define DEFAULT_PROP_DEVICE "/dev/video0"
+
enum
{
PROP_0,
@@ -78,66 +82,6 @@ enum
PROP_ALWAYS_COPY
};
-static const guint32 gst_v4l2_formats[] = {
- /* from Linux 2.6.15 videodev2.h */
- V4L2_PIX_FMT_RGB332,
- V4L2_PIX_FMT_RGB555,
- V4L2_PIX_FMT_RGB565,
- V4L2_PIX_FMT_RGB555X,
- V4L2_PIX_FMT_RGB565X,
- V4L2_PIX_FMT_BGR24,
- V4L2_PIX_FMT_RGB24,
- V4L2_PIX_FMT_BGR32,
- V4L2_PIX_FMT_RGB32,
- V4L2_PIX_FMT_GREY,
- V4L2_PIX_FMT_YVU410,
- V4L2_PIX_FMT_YVU420,
- V4L2_PIX_FMT_YUYV,
- V4L2_PIX_FMT_UYVY,
- V4L2_PIX_FMT_YUV422P,
- V4L2_PIX_FMT_YUV411P,
- V4L2_PIX_FMT_Y41P,
-
- /* two planes -- one Y, one Cr + Cb interleaved */
- V4L2_PIX_FMT_NV12,
- V4L2_PIX_FMT_NV21,
-
- /* The following formats are not defined in the V4L2 specification */
- V4L2_PIX_FMT_YUV410,
- V4L2_PIX_FMT_YUV420,
- V4L2_PIX_FMT_YYUV,
- V4L2_PIX_FMT_HI240,
-
- /* see http://www.siliconimaging.com/RGB%20Bayer.htm */
-#ifdef V4L2_PIX_FMT_SBGGR8
- V4L2_PIX_FMT_SBGGR8,
-#endif
-
- /* compressed formats */
- V4L2_PIX_FMT_MJPEG,
- V4L2_PIX_FMT_JPEG,
- V4L2_PIX_FMT_DV,
- V4L2_PIX_FMT_MPEG,
-
- /* Vendor-specific formats */
- V4L2_PIX_FMT_WNVA,
-
-#ifdef V4L2_PIX_FMT_SN9C10X
- V4L2_PIX_FMT_SN9C10X,
-#endif
-#ifdef V4L2_PIX_FMT_PWC1
- V4L2_PIX_FMT_PWC1,
-#endif
-#ifdef V4L2_PIX_FMT_PWC2
- V4L2_PIX_FMT_PWC2,
-#endif
-#ifdef V4L2_PIX_FMT_YVYU
- V4L2_PIX_FMT_YVYU,
-#endif
-};
-
-#define GST_V4L2_FORMAT_COUNT (G_N_ELEMENTS (gst_v4l2_formats))
-
GST_IMPLEMENT_V4L2_PROBE_METHODS (GstV4l2SrcClass, gst_v4l2src);
GST_IMPLEMENT_V4L2_COLOR_BALANCE_METHODS (GstV4l2Src, gst_v4l2src);
GST_IMPLEMENT_V4L2_TUNER_METHODS (GstV4l2Src, gst_v4l2src);
@@ -259,8 +203,6 @@ static void gst_v4l2src_set_property (GObject * object, guint prop_id,
static void gst_v4l2src_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static GstCaps *gst_v4l2src_get_all_caps (void);
-
static void
gst_v4l2src_base_init (gpointer g_class)
{
@@ -276,7 +218,7 @@ gst_v4l2src_base_init (gpointer g_class)
gst_element_class_add_pad_template
(gstelement_class,
gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
- gst_v4l2src_get_all_caps ()));
+ gst_v4l2_object_get_all_caps ()));
}
static void
@@ -299,7 +241,8 @@ gst_v4l2src_class_init (GstV4l2SrcClass * klass)
element_class->change_state = gst_v4l2src_change_state;
- gst_v4l2_object_install_properties_helper (gobject_class);
+ gst_v4l2_object_install_properties_helper (gobject_class,
+ DEFAULT_PROP_DEVICE);
g_object_class_install_property (gobject_class, PROP_QUEUE_SIZE,
g_param_spec_uint ("queue-size", "Queue size",
"Number of buffers to be enqueud in the driver in streaming mode",
@@ -328,6 +271,7 @@ gst_v4l2src_init (GstV4l2Src * v4l2src, GstV4l2SrcClass * klass)
{
/* fixme: give an update_fps_function */
v4l2src->v4l2object = gst_v4l2_object_new (GST_ELEMENT (v4l2src),
+ V4L2_BUF_TYPE_VIDEO_CAPTURE, DEFAULT_PROP_DEVICE,
gst_v4l2_get_input, gst_v4l2_set_input, NULL);
/* number of buffers requested */
@@ -335,8 +279,6 @@ gst_v4l2src_init (GstV4l2Src * v4l2src, GstV4l2SrcClass * klass)
v4l2src->always_copy = PROP_DEF_ALWAYS_COPY;
- v4l2src->formats = NULL;
-
v4l2src->is_capturing = FALSE;
gst_base_src_set_format (GST_BASE_SRC (v4l2src), GST_FORMAT_TIME);
@@ -352,10 +294,6 @@ gst_v4l2src_dispose (GObject * object)
{
GstV4l2Src *v4l2src = GST_V4L2SRC (object);
- if (v4l2src->formats) {
- gst_v4l2src_clear_format_list (v4l2src);
- }
-
if (v4l2src->probed_caps) {
gst_caps_unref (v4l2src->probed_caps);
}
@@ -470,6 +408,8 @@ gst_v4l2src_negotiate (GstBaseSrc * basesrc)
/* first see what is possible on our source pad */
thiscaps = gst_pad_get_caps (GST_BASE_SRC_PAD (basesrc));
GST_DEBUG_OBJECT (basesrc, "caps of src: %" GST_PTR_FORMAT, thiscaps);
+ LOG_CAPS (basesrc, thiscaps);
+
/* nothing or anything is allowed, we're done */
if (thiscaps == NULL || gst_caps_is_any (thiscaps))
goto no_nego_needed;
@@ -477,6 +417,7 @@ gst_v4l2src_negotiate (GstBaseSrc * basesrc)
/* get the peer caps */
peercaps = gst_pad_peer_get_caps (GST_BASE_SRC_PAD (basesrc));
GST_DEBUG_OBJECT (basesrc, "caps of peer: %" GST_PTR_FORMAT, peercaps);
+ LOG_CAPS (basesrc, peercaps);
if (peercaps && !gst_caps_is_any (peercaps)) {
GstCaps *icaps = NULL;
int i;
@@ -487,6 +428,7 @@ gst_v4l2src_negotiate (GstBaseSrc * basesrc)
GstCaps *ipcaps = gst_caps_copy_nth (peercaps, i);
GST_DEBUG_OBJECT (basesrc, "peer: %" GST_PTR_FORMAT, ipcaps);
+ LOG_CAPS (basesrc, ipcaps);
icaps = gst_caps_intersect (thiscaps, ipcaps);
gst_caps_unref (ipcaps);
@@ -499,6 +441,7 @@ gst_v4l2src_negotiate (GstBaseSrc * basesrc)
}
GST_DEBUG_OBJECT (basesrc, "intersect: %" GST_PTR_FORMAT, icaps);
+ LOG_CAPS (basesrc, icaps);
if (icaps) {
/* If there are multiple intersections pick the one with the smallest
* resolution strictly bigger then the first peer caps */
@@ -553,6 +496,7 @@ gst_v4l2src_negotiate (GstBaseSrc * basesrc)
if (!gst_caps_is_empty (caps)) {
gst_pad_fixate_caps (GST_BASE_SRC_PAD (basesrc), caps);
GST_DEBUG_OBJECT (basesrc, "fixated to: %" GST_PTR_FORMAT, caps);
+ LOG_CAPS (basesrc, caps);
if (gst_caps_is_any (caps)) {
/* hmm, still anything, so element can do anything and
@@ -577,268 +521,13 @@ no_nego_needed:
}
}
-
-static GstStructure *
-gst_v4l2src_v4l2fourcc_to_structure (guint32 fourcc)
-{
- GstStructure *structure = NULL;
-
- switch (fourcc) {
- case V4L2_PIX_FMT_MJPEG: /* Motion-JPEG */
- case V4L2_PIX_FMT_JPEG: /* JFIF JPEG */
- structure = gst_structure_new ("image/jpeg", NULL);
- break;
- case V4L2_PIX_FMT_RGB332:
- case V4L2_PIX_FMT_RGB555:
- case V4L2_PIX_FMT_RGB555X:
- case V4L2_PIX_FMT_RGB565:
- case V4L2_PIX_FMT_RGB565X:
- case V4L2_PIX_FMT_RGB24:
- case V4L2_PIX_FMT_BGR24:
- case V4L2_PIX_FMT_RGB32:
- case V4L2_PIX_FMT_BGR32:{
- guint depth = 0, bpp = 0;
-
- gint endianness = 0;
-
- guint32 r_mask = 0, b_mask = 0, g_mask = 0;
-
- switch (fourcc) {
- case V4L2_PIX_FMT_RGB332:
- bpp = depth = 8;
- endianness = G_BYTE_ORDER; /* 'like, whatever' */
- r_mask = 0xe0;
- g_mask = 0x1c;
- b_mask = 0x03;
- break;
- case V4L2_PIX_FMT_RGB555:
- case V4L2_PIX_FMT_RGB555X:
- bpp = 16;
- depth = 15;
- endianness =
- fourcc == V4L2_PIX_FMT_RGB555X ? G_BIG_ENDIAN : G_LITTLE_ENDIAN;
- r_mask = 0x7c00;
- g_mask = 0x03e0;
- b_mask = 0x001f;
- break;
- case V4L2_PIX_FMT_RGB565:
- case V4L2_PIX_FMT_RGB565X:
- bpp = depth = 16;
- endianness =
- fourcc == V4L2_PIX_FMT_RGB565X ? G_BIG_ENDIAN : G_LITTLE_ENDIAN;
- r_mask = 0xf800;
- g_mask = 0x07e0;
- b_mask = 0x001f;
- break;
- case V4L2_PIX_FMT_RGB24:
- bpp = depth = 24;
- endianness = G_BIG_ENDIAN;
- r_mask = 0xff0000;
- g_mask = 0x00ff00;
- b_mask = 0x0000ff;
- break;
- case V4L2_PIX_FMT_BGR24:
- bpp = depth = 24;
- endianness = G_BIG_ENDIAN;
- r_mask = 0x0000ff;
- g_mask = 0x00ff00;
- b_mask = 0xff0000;
- break;
- case V4L2_PIX_FMT_RGB32:
- bpp = depth = 32;
- endianness = G_BIG_ENDIAN;
- r_mask = 0xff000000;
- g_mask = 0x00ff0000;
- b_mask = 0x0000ff00;
- break;
- case V4L2_PIX_FMT_BGR32:
- bpp = depth = 32;
- endianness = G_BIG_ENDIAN;
- r_mask = 0x000000ff;
- g_mask = 0x0000ff00;
- b_mask = 0x00ff0000;
- break;
- default:
- g_assert_not_reached ();
- break;
- }
- structure = gst_structure_new ("video/x-raw-rgb",
- "bpp", G_TYPE_INT, bpp,
- "depth", G_TYPE_INT, depth,
- "red_mask", G_TYPE_INT, r_mask,
- "green_mask", G_TYPE_INT, g_mask,
- "blue_mask", G_TYPE_INT, b_mask,
- "endianness", G_TYPE_INT, endianness, NULL);
- break;
- }
- case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
- structure = gst_structure_new ("video/x-raw-gray",
- "bpp", G_TYPE_INT, 8, NULL);
- break;
- case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
- case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
- /* FIXME: get correct fourccs here */
- break;
- case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
- case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
- case V4L2_PIX_FMT_YVU410:
- case V4L2_PIX_FMT_YUV410:
- case V4L2_PIX_FMT_YUV420: /* I420/IYUV */
- case V4L2_PIX_FMT_YUYV:
- case V4L2_PIX_FMT_YVU420:
- case V4L2_PIX_FMT_UYVY:
- case V4L2_PIX_FMT_Y41P:
- case V4L2_PIX_FMT_YUV422P:
-#ifdef V4L2_PIX_FMT_YVYU
- case V4L2_PIX_FMT_YVYU:
-#endif
- case V4L2_PIX_FMT_YUV411P:{
- guint32 fcc = 0;
-
- switch (fourcc) {
- case V4L2_PIX_FMT_NV12:
- fcc = GST_MAKE_FOURCC ('N', 'V', '1', '2');
- break;
- case V4L2_PIX_FMT_NV21:
- fcc = GST_MAKE_FOURCC ('N', 'V', '2', '1');
- break;
- case V4L2_PIX_FMT_YVU410:
- fcc = GST_MAKE_FOURCC ('Y', 'V', 'U', '9');
- break;
- case V4L2_PIX_FMT_YUV410:
- fcc = GST_MAKE_FOURCC ('Y', 'U', 'V', '9');
- break;
- case V4L2_PIX_FMT_YUV420:
- fcc = GST_MAKE_FOURCC ('I', '4', '2', '0');
- break;
- case V4L2_PIX_FMT_YUYV:
- fcc = GST_MAKE_FOURCC ('Y', 'U', 'Y', '2');
- break;
- case V4L2_PIX_FMT_YVU420:
- fcc = GST_MAKE_FOURCC ('Y', 'V', '1', '2');
- break;
- case V4L2_PIX_FMT_UYVY:
- fcc = GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y');
- break;
- case V4L2_PIX_FMT_Y41P:
- fcc = GST_MAKE_FOURCC ('Y', '4', '1', 'P');
- break;
- case V4L2_PIX_FMT_YUV411P:
- fcc = GST_MAKE_FOURCC ('Y', '4', '1', 'B');
- break;
- case V4L2_PIX_FMT_YUV422P:
- fcc = GST_MAKE_FOURCC ('Y', '4', '2', 'B');
- break;
-#ifdef V4L2_PIX_FMT_YVYU
- case V4L2_PIX_FMT_YVYU:
- fcc = GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U');
- break;
-#endif
- default:
- g_assert_not_reached ();
- break;
- }
- structure = gst_structure_new ("video/x-raw-yuv",
- "format", GST_TYPE_FOURCC, fcc, NULL);
- break;
- }
- case V4L2_PIX_FMT_DV:
- structure =
- gst_structure_new ("video/x-dv", "systemstream", G_TYPE_BOOLEAN, TRUE,
- NULL);
- break;
- case V4L2_PIX_FMT_MPEG: /* MPEG */
- /* someone figure out the MPEG format used... */
- break;
- case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
- break;
-#ifdef V4L2_PIX_FMT_SBGGR8
- case V4L2_PIX_FMT_SBGGR8:
- structure = gst_structure_new ("video/x-raw-bayer", NULL);
- break;
-#endif
-#ifdef V4L2_PIX_FMT_SN9C10X
- case V4L2_PIX_FMT_SN9C10X:
- structure = gst_structure_new ("video/x-sonix", NULL);
- break;
-#endif
-#ifdef V4L2_PIX_FMT_PWC1
- case V4L2_PIX_FMT_PWC1:
- structure = gst_structure_new ("video/x-pwc1", NULL);
- break;
-#endif
-#ifdef V4L2_PIX_FMT_PWC2
- case V4L2_PIX_FMT_PWC2:
- structure = gst_structure_new ("video/x-pwc2", NULL);
- break;
-#endif
- default:
- GST_DEBUG ("Unknown fourcc 0x%08x %" GST_FOURCC_FORMAT,
- fourcc, GST_FOURCC_ARGS (fourcc));
- break;
- }
-
- return structure;
-}
-
-static struct v4l2_fmtdesc *
-gst_v4l2src_get_format_from_fourcc (GstV4l2Src * v4l2src, guint32 fourcc)
-{
- struct v4l2_fmtdesc *fmt;
- GSList *walk;
-
- if (fourcc == 0)
- return NULL;
-
- walk = v4l2src->formats;
- while (walk) {
- fmt = (struct v4l2_fmtdesc *) walk->data;
- if (fmt->pixelformat == fourcc)
- return fmt;
- /* special case for jpeg */
- if ((fmt->pixelformat == V4L2_PIX_FMT_MJPEG && fourcc == V4L2_PIX_FMT_JPEG)
- || (fmt->pixelformat == V4L2_PIX_FMT_JPEG
- && fourcc == V4L2_PIX_FMT_MJPEG)) {
- return fmt;
- }
- walk = g_slist_next (walk);
- }
-
- return NULL;
-}
-
-static GstCaps *
-gst_v4l2src_get_all_caps (void)
-{
- static GstCaps *caps = NULL;
-
- if (caps == NULL) {
- GstStructure *structure;
-
- guint i;
-
- caps = gst_caps_new_empty ();
- for (i = 0; i < GST_V4L2_FORMAT_COUNT; i++) {
- structure = gst_v4l2src_v4l2fourcc_to_structure (gst_v4l2_formats[i]);
- if (structure) {
- gst_structure_set (structure,
- "width", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
- "height", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
- "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1, NULL);
- gst_caps_append_structure (caps, structure);
- }
- }
- }
-
- return caps;
-}
-
static GstCaps *
gst_v4l2src_get_caps (GstBaseSrc * src)
{
GstV4l2Src *v4l2src = GST_V4L2SRC (src);
GstCaps *ret;
GSList *walk;
+ GSList *formats;
if (!GST_V4L2_IS_OPEN (v4l2src->v4l2object)) {
/* FIXME: copy? */
@@ -850,25 +539,25 @@ gst_v4l2src_get_caps (GstBaseSrc * src)
if (v4l2src->probed_caps)
return gst_caps_ref (v4l2src->probed_caps);
- if (!v4l2src->formats)
- gst_v4l2src_fill_format_list (v4l2src);
+ formats = gst_v4l2_object_get_format_list (v4l2src->v4l2object);
ret = gst_caps_new_empty ();
- for (walk = v4l2src->formats; walk; walk = walk->next) {
+ for (walk = v4l2src->v4l2object->formats; walk; walk = walk->next) {
struct v4l2_fmtdesc *format;
GstStructure *template;
format = (struct v4l2_fmtdesc *) walk->data;
- template = gst_v4l2src_v4l2fourcc_to_structure (format->pixelformat);
+ template = gst_v4l2_object_v4l2fourcc_to_structure (format->pixelformat);
if (template) {
GstCaps *tmp;
- tmp = gst_v4l2src_probe_caps_for_format (v4l2src, format->pixelformat,
- template);
+ tmp =
+ gst_v4l2_object_probe_caps_for_format (v4l2src->v4l2object,
+ format->pixelformat, template);
if (tmp)
gst_caps_append (ret, tmp);
@@ -885,158 +574,6 @@ gst_v4l2src_get_caps (GstBaseSrc * src)
return ret;
}
-/* collect data for the given caps
- * @caps: given input caps
- * @format: location for the v4l format
- * @w/@h: location for width and height
- * @fps_n/@fps_d: location for framerate
- * @size: location for expected size of the frame or 0 if unknown
- */
-static gboolean
-gst_v4l2_get_caps_info (GstV4l2Src * v4l2src, GstCaps * caps,
- struct v4l2_fmtdesc **format, gint * w, gint * h, guint * fps_n,
- guint * fps_d, guint * size)
-{
- GstStructure *structure;
- const GValue *framerate;
- guint32 fourcc;
- const gchar *mimetype;
- guint outsize;
-
- /* default unknown values */
- fourcc = 0;
- outsize = 0;
-
- structure = gst_caps_get_structure (caps, 0);
-
- if (!gst_structure_get_int (structure, "width", w))
- return FALSE;
-
- if (!gst_structure_get_int (structure, "height", h))
- return FALSE;
-
- framerate = gst_structure_get_value (structure, "framerate");
- if (!framerate)
- return FALSE;
-
- *fps_n = gst_value_get_fraction_numerator (framerate);
- *fps_d = gst_value_get_fraction_denominator (framerate);
-
- mimetype = gst_structure_get_name (structure);
-
- if (!strcmp (mimetype, "video/x-raw-yuv")) {
- gst_structure_get_fourcc (structure, "format", &fourcc);
-
- switch (fourcc) {
- case GST_MAKE_FOURCC ('I', '4', '2', '0'):
- case GST_MAKE_FOURCC ('I', 'Y', 'U', 'V'):
- fourcc = V4L2_PIX_FMT_YUV420;
- outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h);
- outsize += 2 * ((GST_ROUND_UP_8 (*w) / 2) * (GST_ROUND_UP_2 (*h) / 2));
- break;
- case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
- fourcc = V4L2_PIX_FMT_YUYV;
- outsize = (GST_ROUND_UP_2 (*w) * 2) * *h;
- break;
- case GST_MAKE_FOURCC ('Y', '4', '1', 'P'):
- fourcc = V4L2_PIX_FMT_Y41P;
- outsize = (GST_ROUND_UP_2 (*w) * 2) * *h;
- break;
- case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
- fourcc = V4L2_PIX_FMT_UYVY;
- outsize = (GST_ROUND_UP_2 (*w) * 2) * *h;
- break;
- case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
- fourcc = V4L2_PIX_FMT_YVU420;
- outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h);
- outsize += 2 * ((GST_ROUND_UP_8 (*w) / 2) * (GST_ROUND_UP_2 (*h) / 2));
- break;
- case GST_MAKE_FOURCC ('Y', '4', '1', 'B'):
- fourcc = V4L2_PIX_FMT_YUV411P;
- outsize = GST_ROUND_UP_4 (*w) * *h;
- outsize += 2 * ((GST_ROUND_UP_8 (*w) / 4) * *h);
- break;
- case GST_MAKE_FOURCC ('Y', '4', '2', 'B'):
- fourcc = V4L2_PIX_FMT_YUV422P;
- outsize = GST_ROUND_UP_4 (*w) * *h;
- outsize += 2 * ((GST_ROUND_UP_8 (*w) / 2) * *h);
- break;
- case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
- fourcc = V4L2_PIX_FMT_NV12;
- outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h);
- outsize += (GST_ROUND_UP_4 (*w) * *h) / 2;
- break;
- case GST_MAKE_FOURCC ('N', 'V', '2', '1'):
- fourcc = V4L2_PIX_FMT_NV21;
- outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h);
- outsize += (GST_ROUND_UP_4 (*w) * *h) / 2;
- break;
-#ifdef V4L2_PIX_FMT_YVYU
- case GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U'):
- fourcc = V4L2_PIX_FMT_YVYU;
- outsize = (GST_ROUND_UP_2 (*w) * 2) * *h;
- break;
-#endif
- }
- } else if (!strcmp (mimetype, "video/x-raw-rgb")) {
- gint depth, endianness, r_mask;
-
- gst_structure_get_int (structure, "depth", &depth);
- gst_structure_get_int (structure, "endianness", &endianness);
- gst_structure_get_int (structure, "red_mask", &r_mask);
-
- switch (depth) {
- case 8:
- fourcc = V4L2_PIX_FMT_RGB332;
- break;
- case 15:
- fourcc = (endianness == G_LITTLE_ENDIAN) ?
- V4L2_PIX_FMT_RGB555 : V4L2_PIX_FMT_RGB555X;
- break;
- case 16:
- fourcc = (endianness == G_LITTLE_ENDIAN) ?
- V4L2_PIX_FMT_RGB565 : V4L2_PIX_FMT_RGB565X;
- break;
- case 24:
- fourcc = (r_mask == 0xFF) ? V4L2_PIX_FMT_BGR24 : V4L2_PIX_FMT_RGB24;
- break;
- case 32:
- fourcc = (r_mask == 0xFF) ? V4L2_PIX_FMT_BGR32 : V4L2_PIX_FMT_RGB32;
- break;
- }
- } else if (strcmp (mimetype, "video/x-dv") == 0) {
- fourcc = V4L2_PIX_FMT_DV;
- } else if (strcmp (mimetype, "image/jpeg") == 0) {
- fourcc = V4L2_PIX_FMT_JPEG;
-#ifdef V4L2_PIX_FMT_SBGGR8
- } else if (strcmp (mimetype, "video/x-raw-bayer") == 0) {
- fourcc = V4L2_PIX_FMT_SBGGR8;
-#endif
-#ifdef V4L2_PIX_FMT_SN9C10X
- } else if (strcmp (mimetype, "video/x-sonix") == 0) {
- fourcc = V4L2_PIX_FMT_SN9C10X;
-#endif
-#ifdef V4L2_PIX_FMT_PWC1
- } else if (strcmp (mimetype, "video/x-pwc1") == 0) {
- fourcc = V4L2_PIX_FMT_PWC1;
-#endif
-#ifdef V4L2_PIX_FMT_PWC2
- } else if (strcmp (mimetype, "video/x-pwc2") == 0) {
- fourcc = V4L2_PIX_FMT_PWC2;
-#endif
- } else if (strcmp (mimetype, "video/x-raw-gray") == 0) {
- fourcc = V4L2_PIX_FMT_GREY;
- }
-
- if (fourcc == 0)
- return FALSE;
-
- *format = gst_v4l2src_get_format_from_fourcc (v4l2src, fourcc);
- *size = outsize;
-
- return TRUE;
-}
-
static gboolean
gst_v4l2src_set_caps (GstBaseSrc * src, GstCaps * caps)
{
@@ -1062,8 +599,8 @@ gst_v4l2src_set_caps (GstBaseSrc * src, GstCaps * caps)
}
/* we want our own v4l2 type of fourcc codes */
- if (!gst_v4l2_get_caps_info (v4l2src, caps, &format, &w, &h, &fps_n, &fps_d,
- &size)) {
+ if (!gst_v4l2_object_get_caps_info (v4l2src->v4l2object, caps, &format, &w,
+ &h, &fps_n, &fps_d, &size)) {
GST_DEBUG_OBJECT (v4l2src,
"can't get capture format from caps %" GST_PTR_FORMAT, caps);
return FALSE;
@@ -1346,7 +883,6 @@ static GstFlowReturn
gst_v4l2src_create (GstPushSrc * src, GstBuffer ** buf)
{
GstV4l2Src *v4l2src = GST_V4L2SRC (src);
-
GstFlowReturn ret;
if (v4l2src->use_mmap) {
diff --git a/sys/v4l2/gstv4l2src.h b/sys/v4l2/gstv4l2src.h
index bedae8a7..fef3f108 100644
--- a/sys/v4l2/gstv4l2src.h
+++ b/sys/v4l2/gstv4l2src.h
@@ -25,16 +25,10 @@
#define __GST_V4L2SRC_H__
#include <gstv4l2object.h>
+#include <gstv4l2bufferpool.h>
GST_DEBUG_CATEGORY_EXTERN (v4l2src_debug);
-/* size of v4l2 buffer pool in streaming case */
-#define GST_V4L2_MAX_BUFFERS 16
-#define GST_V4L2_MIN_BUFFERS 1
-
-/* max frame width/height */
-#define GST_V4L2_MAX_SIZE (1<<15) /* 2^15 == 32768 */
-
G_BEGIN_DECLS
#define GST_TYPE_V4L2SRC \
@@ -48,34 +42,10 @@ G_BEGIN_DECLS
#define GST_IS_V4L2SRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2SRC))
-typedef struct _GstV4l2BufferPool GstV4l2BufferPool;
-typedef struct _GstV4l2Buffer GstV4l2Buffer;
typedef struct _GstV4l2Src GstV4l2Src;
typedef struct _GstV4l2SrcClass GstV4l2SrcClass;
-/* global info */
-struct _GstV4l2BufferPool
-{
- GstMiniObject parent;
-
- GMutex *lock;
- gboolean running; /* with lock */
- gint num_live_buffers; /* with lock */
- gint video_fd; /* a dup(2) of the v4l2object's video_fd */
- guint buffer_count;
- GstV4l2Buffer **buffers; /* with lock; buffers[n] is NULL that buffer has been
- * dequeued and pushed out */
-};
-
-struct _GstV4l2Buffer {
- GstBuffer buffer;
-
- struct v4l2_buffer vbuffer;
-
- /* FIXME: have GstV4l2Src* instead, as this has GstV4l2BufferPool* */
- GstV4l2BufferPool *pool;
-};
/**
* GstV4l2Src:
@@ -93,9 +63,6 @@ struct _GstV4l2Src
/* pads */
GstCaps *probed_caps;
- /* internal lists */
- GSList *formats; /* list of available capture formats */
-
/* buffer handling */
GstV4l2BufferPool *pool;
diff --git a/sys/v4l2/v4l2_calls.c b/sys/v4l2/v4l2_calls.c
index 4fb1fad8..5f6261ea 100644
--- a/sys/v4l2/v4l2_calls.c
+++ b/sys/v4l2/v4l2_calls.c
@@ -46,6 +46,9 @@
#include "gstv4l2colorbalance.h"
#include "gstv4l2src.h"
+#include "gstv4l2sink.h"
+
+#include "gst/gst-i18n-plugin.h"
/* Those are ioctl calls */
#ifndef V4L2_CID_HCENTER
@@ -449,10 +452,14 @@ gst_v4l2_open (GstV4l2Object * v4l2object)
goto error;
/* do we need to be a capture device? */
- if (GST_IS_V4L2SRC (v4l2object) &&
+ if (GST_IS_V4L2SRC (v4l2object->element) &&
!(v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
goto not_capture;
+ if (GST_IS_V4L2SINK (v4l2object->element) &&
+ !(v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_OUTPUT))
+ goto not_output;
+
/* create enumerations, posts errors. */
if (!gst_v4l2_fill_lists (v4l2object))
goto error;
@@ -497,6 +504,14 @@ not_capture:
("Capabilities: 0x%x", v4l2object->vcap.capabilities));
goto error;
}
+not_output:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, NOT_FOUND,
+ (_("Device '%s' is not a output device."),
+ v4l2object->videodev),
+ ("Capabilities: 0x%x", v4l2object->vcap.capabilities));
+ goto error;
+ }
error:
{
if (GST_V4L2_IS_OPEN (v4l2object)) {
diff --git a/sys/v4l2/v4l2_calls.h b/sys/v4l2/v4l2_calls.h
index 8bf7bcea..1f24c1f4 100644
--- a/sys/v4l2/v4l2_calls.h
+++ b/sys/v4l2/v4l2_calls.h
@@ -25,18 +25,20 @@
#define __V4L2_CALLS_H__
#include "gstv4l2object.h"
-#include "gst/gst-i18n-plugin.h"
#ifdef HAVE_LIBV4L2
-#include <libv4l2.h>
+# include <libv4l2.h>
#else
-#define v4l2_fd_open(fd, flags) (fd)
-#define v4l2_close close
-#define v4l2_dup dup
-#define v4l2_ioctl ioctl
-#define v4l2_read read
-#define v4l2_mmap mmap
-#define v4l2_munmap munmap
+# include <sys/ioctl.h>
+# include <linux/videodev.h>
+# include <linux/videodev2.h>
+# define v4l2_fd_open(fd, flags) (fd)
+# define v4l2_close close
+# define v4l2_dup dup
+# define v4l2_ioctl ioctl
+# define v4l2_read read
+# define v4l2_mmap mmap
+# define v4l2_munmap munmap
#endif
/* simple check whether the device is open */
@@ -137,4 +139,43 @@ gboolean gst_v4l2_set_attribute (GstV4l2Object *v4l2object,
gboolean gst_v4l2_get_capabilities (GstV4l2Object * v4l2object);
+
+/* note: in case this is a build with TTIF logging, we can optimize slightly
+ * and avoid the gst_caps_to_string() in case logging isn't enabled by using
+ * the TTIF_TRACE_ARG_PROCESSOR feature of ttif_trace_fprintf():
+ */
+#ifdef GST_LOG_OVER_TTIF
+# define LOG_CAPS(obj, caps) G_STMT_START { \
+ if (caps) { \
+ static TTIF_TRACE_ARG_PROCESSOR proc = { \
+ .convert = (char (*)(void *))gst_caps_to_string, \
+ .free = (void (*)(char *))g_free \
+ }; \
+ GST_DEBUG_OBJECT (obj, "%s: %qs", #caps, &proc, (caps)); \
+ } else { \
+ GST_DEBUG_OBJECT (obj, "null"); \
+ } \
+ } G_STMT_END
+#else
+# define LOG_CAPS(obj, caps) G_STMT_START { \
+ if (caps) { \
+ gchar *capstr = gst_caps_to_string (caps); \
+ GST_DEBUG_OBJECT (obj, "%s: %s", #caps, capstr); \
+ g_free (capstr); \
+ } else { \
+ GST_DEBUG_OBJECT (obj, "null"); \
+ } \
+ } G_STMT_END
+#endif
+
+/* note: the omapzoom kernel v4l2 display driver deviates from the v4l2 API
+ * spec in a few areas. For example, we must always have one buffer with
+ * the driver before STREAMON until after STREAMOFF. And some interfaces,
+ * such as rotation (and mirroring?) are different.
+ *
+ * this is only a temporary hack, as we should switch to the new driver soon
+ */
+#define OMAPZOOM
+
+
#endif /* __V4L2_CALLS_H__ */
diff --git a/sys/v4l2/v4l2src_calls.c b/sys/v4l2/v4l2src_calls.c
index 3dd6e0e6..75ed1dba 100644
--- a/sys/v4l2/v4l2src_calls.c
+++ b/sys/v4l2/v4l2src_calls.c
@@ -43,6 +43,9 @@
#endif
#include "gstv4l2tuner.h"
+#include "gstv4l2bufferpool.h"
+
+#include "gst/gst-i18n-plugin.h"
GST_DEBUG_CATEGORY_EXTERN (v4l2src_debug);
#define GST_CAT_DEFAULT v4l2src_debug
@@ -57,299 +60,17 @@ GST_DEBUG_CATEGORY_EXTERN (v4l2src_debug);
#endif
-#define GST_TYPE_V4L2_BUFFER (gst_v4l2_buffer_get_type())
-#define GST_IS_V4L2_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_V4L2_BUFFER))
-#define GST_V4L2_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_V4L2_BUFFER, GstV4l2Buffer))
-
-static GstBufferClass *v4l2buffer_parent_class = NULL;
-
/* Local functions */
-static gboolean
-gst_v4l2src_get_nearest_size (GstV4l2Src * v4l2src, guint32 pixelformat,
- gint * width, gint * height);
-static void gst_v4l2_buffer_pool_destroy (GstV4l2BufferPool * pool);
-
-static void
-gst_v4l2_buffer_finalize (GstV4l2Buffer * buffer)
-{
- GstV4l2BufferPool *pool;
- gboolean resuscitated = FALSE;
- gint index;
-
- pool = buffer->pool;
-
- index = buffer->vbuffer.index;
-
- GST_LOG ("finalizing buffer %p %d", buffer, index);
-
- g_mutex_lock (pool->lock);
- if (GST_BUFFER_SIZE (buffer) != 0)
- /* BUFFER_SIZE is only set if the frame was dequeued */
- pool->num_live_buffers--;
-
- if (pool->running) {
- if (v4l2_ioctl (pool->video_fd, VIDIOC_QBUF, &buffer->vbuffer) < 0) {
- GST_WARNING ("could not requeue buffer %p %d", buffer, index);
- } else {
- /* FIXME: check that the caps didn't change */
- GST_LOG ("reviving buffer %p, %d", buffer, index);
- gst_buffer_ref (GST_BUFFER (buffer));
- GST_BUFFER_SIZE (buffer) = 0;
- pool->buffers[index] = buffer;
- resuscitated = TRUE;
- }
- } else {
- GST_LOG ("the pool is shutting down");
- }
- g_mutex_unlock (pool->lock);
-
- if (!resuscitated) {
- GST_LOG ("buffer %p not recovered, unmapping", buffer);
- gst_mini_object_unref (GST_MINI_OBJECT (pool));
- v4l2_munmap ((void *) GST_BUFFER_DATA (buffer), buffer->vbuffer.length);
-
- GST_MINI_OBJECT_CLASS (v4l2buffer_parent_class)->finalize (GST_MINI_OBJECT
- (buffer));
- }
-}
-
-static void
-gst_v4l2_buffer_class_init (gpointer g_class, gpointer class_data)
-{
- GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
-
- v4l2buffer_parent_class = g_type_class_peek_parent (g_class);
-
- mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
- gst_v4l2_buffer_finalize;
-}
-
-static GType
-gst_v4l2_buffer_get_type (void)
-{
- static GType _gst_v4l2_buffer_type;
-
- if (G_UNLIKELY (_gst_v4l2_buffer_type == 0)) {
- static const GTypeInfo v4l2_buffer_info = {
- sizeof (GstBufferClass),
- NULL,
- NULL,
- gst_v4l2_buffer_class_init,
- NULL,
- NULL,
- sizeof (GstV4l2Buffer),
- 0,
- NULL,
- NULL
- };
- _gst_v4l2_buffer_type = g_type_register_static (GST_TYPE_BUFFER,
- "GstV4l2Buffer", &v4l2_buffer_info, 0);
- }
- return _gst_v4l2_buffer_type;
-}
-
-static GstV4l2Buffer *
-gst_v4l2_buffer_new (GstV4l2BufferPool * pool, guint index, GstCaps * caps)
-{
- GstV4l2Buffer *ret;
- guint8 *data;
-
- ret = (GstV4l2Buffer *) gst_mini_object_new (GST_TYPE_V4L2_BUFFER);
-
- GST_LOG ("creating buffer %u, %p in pool %p", index, ret, pool);
-
- ret->pool =
- (GstV4l2BufferPool *) gst_mini_object_ref (GST_MINI_OBJECT (pool));
-
- ret->vbuffer.index = index;
- ret->vbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- ret->vbuffer.memory = V4L2_MEMORY_MMAP;
-
- if (v4l2_ioctl (pool->video_fd, VIDIOC_QUERYBUF, &ret->vbuffer) < 0)
- goto querybuf_failed;
-
- GST_LOG (" index: %u", ret->vbuffer.index);
- GST_LOG (" type: %d", ret->vbuffer.type);
- GST_LOG (" bytesused: %u", ret->vbuffer.bytesused);
- GST_LOG (" flags: %08x", ret->vbuffer.flags);
- GST_LOG (" field: %d", ret->vbuffer.field);
- GST_LOG (" memory: %d", ret->vbuffer.memory);
- if (ret->vbuffer.memory == V4L2_MEMORY_MMAP)
- GST_LOG (" MMAP offset: %u", ret->vbuffer.m.offset);
- GST_LOG (" length: %u", ret->vbuffer.length);
- GST_LOG (" input: %u", ret->vbuffer.input);
-
- data = (guint8 *) v4l2_mmap (0, ret->vbuffer.length,
- PROT_READ | PROT_WRITE, MAP_SHARED, pool->video_fd,
- ret->vbuffer.m.offset);
-
- if (data == MAP_FAILED)
- goto mmap_failed;
-
- GST_BUFFER_DATA (ret) = data;
- GST_BUFFER_SIZE (ret) = ret->vbuffer.length;
-
- GST_BUFFER_FLAG_SET (ret, GST_BUFFER_FLAG_READONLY);
-
- gst_buffer_set_caps (GST_BUFFER (ret), caps);
-
- return ret;
-
- /* ERRORS */
-querybuf_failed:
- {
- gint errnosave = errno;
-
- GST_WARNING ("Failed QUERYBUF: %s", g_strerror (errnosave));
- gst_buffer_unref (GST_BUFFER (ret));
- errno = errnosave;
- return NULL;
- }
-mmap_failed:
- {
- gint errnosave = errno;
-
- GST_WARNING ("Failed to mmap: %s", g_strerror (errnosave));
- gst_buffer_unref (GST_BUFFER (ret));
- errno = errnosave;
- return NULL;
- }
-}
-
-#define GST_TYPE_V4L2_BUFFER_POOL (gst_v4l2_buffer_pool_get_type())
-#define GST_IS_V4L2_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_V4L2_BUFFER_POOL))
-#define GST_V4L2_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_V4L2_BUFFER_POOL, GstV4l2BufferPool))
-
-static GstMiniObjectClass *buffer_pool_parent_class = NULL;
-
-static void
-gst_v4l2_buffer_pool_finalize (GstV4l2BufferPool * pool)
-{
- g_mutex_free (pool->lock);
- pool->lock = NULL;
-
- if (pool->video_fd >= 0)
- v4l2_close (pool->video_fd);
-
- if (pool->buffers) {
- g_free (pool->buffers);
- pool->buffers = NULL;
- }
-
- GST_MINI_OBJECT_CLASS (buffer_pool_parent_class)->finalize (GST_MINI_OBJECT
- (pool));
-}
-
-static void
-gst_v4l2_buffer_pool_init (GstV4l2BufferPool * pool, gpointer g_class)
-{
- pool->lock = g_mutex_new ();
- pool->running = FALSE;
- pool->num_live_buffers = 0;
-}
-
-static void
-gst_v4l2_buffer_pool_class_init (gpointer g_class, gpointer class_data)
-{
- GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
-
- buffer_pool_parent_class = g_type_class_peek_parent (g_class);
-
- mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
- gst_v4l2_buffer_pool_finalize;
-}
-
-static GType
-gst_v4l2_buffer_pool_get_type (void)
-{
- static GType _gst_v4l2_buffer_pool_type;
-
- if (G_UNLIKELY (_gst_v4l2_buffer_pool_type == 0)) {
- static const GTypeInfo v4l2_buffer_pool_info = {
- sizeof (GstBufferClass),
- NULL,
- NULL,
- gst_v4l2_buffer_pool_class_init,
- NULL,
- NULL,
- sizeof (GstV4l2BufferPool),
- 0,
- (GInstanceInitFunc) gst_v4l2_buffer_pool_init,
- NULL
- };
- _gst_v4l2_buffer_pool_type = g_type_register_static (GST_TYPE_MINI_OBJECT,
- "GstV4l2BufferPool", &v4l2_buffer_pool_info, 0);
- }
- return _gst_v4l2_buffer_pool_type;
-}
-
-static GstV4l2BufferPool *
-gst_v4l2_buffer_pool_new (GstV4l2Src * v4l2src, gint fd, gint num_buffers,
- GstCaps * caps)
-{
- GstV4l2BufferPool *pool;
- gint n;
-
- pool = (GstV4l2BufferPool *) gst_mini_object_new (GST_TYPE_V4L2_BUFFER_POOL);
-
- pool->video_fd = v4l2_dup (fd);
- if (pool->video_fd < 0)
- goto dup_failed;
-
- pool->buffer_count = num_buffers;
- pool->buffers = g_new0 (GstV4l2Buffer *, num_buffers);
-
- for (n = 0; n < num_buffers; n++) {
- pool->buffers[n] = gst_v4l2_buffer_new (pool, n, caps);
- if (!pool->buffers[n])
- goto buffer_new_failed;
- }
-
- return pool;
-
- /* ERRORS */
-dup_failed:
- {
- gint errnosave = errno;
-
- gst_mini_object_unref (GST_MINI_OBJECT (pool));
-
- errno = errnosave;
-
- return NULL;
- }
-buffer_new_failed:
- {
- gint errnosave = errno;
-
- gst_v4l2_buffer_pool_destroy (pool);
-
- errno = errnosave;
-
- return NULL;
- }
-}
static gboolean
-gst_v4l2_buffer_pool_activate (GstV4l2BufferPool * pool, GstV4l2Src * v4l2src)
+gst_v4l2src_buffer_pool_activate (GstV4l2BufferPool * pool,
+ GstV4l2Src * v4l2src)
{
- gint n;
+ GstV4l2Buffer *buf;
- g_mutex_lock (pool->lock);
-
- for (n = 0; n < pool->buffer_count; n++) {
- struct v4l2_buffer *buf;
-
- buf = &pool->buffers[n]->vbuffer;
-
- GST_LOG ("enqueue pool buffer %d", n);
-
- if (v4l2_ioctl (pool->video_fd, VIDIOC_QBUF, buf) < 0)
+ while ((buf = gst_v4l2_buffer_pool_get (pool, FALSE)) != NULL)
+ if (!gst_v4l2_buffer_pool_qbuf (pool, buf))
goto queue_failed;
- }
- pool->running = TRUE;
-
- g_mutex_unlock (pool->lock);
return TRUE;
@@ -360,624 +81,11 @@ queue_failed:
(_("Could not enqueue buffers in device '%s'."),
v4l2src->v4l2object->videodev),
("enqueing buffer %d/%d failed: %s",
- n, v4l2src->num_buffers, g_strerror (errno)));
- g_mutex_unlock (pool->lock);
+ buf->vbuffer.index, v4l2src->num_buffers, g_strerror (errno)));
return FALSE;
}
}
-static void
-gst_v4l2_buffer_pool_destroy (GstV4l2BufferPool * pool)
-{
- gint n;
-
- g_mutex_lock (pool->lock);
- pool->running = FALSE;
- g_mutex_unlock (pool->lock);
-
- GST_DEBUG ("destroy pool");
-
- /* after this point, no more buffers will be queued or dequeued; no buffer
- * from pool->buffers that is NULL will be set to a buffer, and no buffer that
- * is not NULL will be pushed out. */
-
- /* miniobjects have no dispose, so they can't break ref-cycles, as buffers ref
- * the pool, we need to unref the buffer to properly finalize te pool */
- for (n = 0; n < pool->buffer_count; n++) {
- GstBuffer *buf;
-
- g_mutex_lock (pool->lock);
- buf = GST_BUFFER (pool->buffers[n]);
- g_mutex_unlock (pool->lock);
-
- if (buf)
- /* we own the ref if the buffer is in pool->buffers; drop it. */
- gst_buffer_unref (buf);
- }
-
- gst_mini_object_unref (GST_MINI_OBJECT (pool));
-}
-
-/* complete made up ranking, the values themselves are meaningless */
-#define YUV_BASE_RANK 1000
-#define JPEG_BASE_RANK 500
-#define DV_BASE_RANK 200
-#define RGB_BASE_RANK 100
-#define YUV_ODD_BASE_RANK 50
-#define RGB_ODD_BASE_RANK 25
-#define BAYER_BASE_RANK 15
-#define S910_BASE_RANK 10
-#define GREY_BASE_RANK 5
-#define PWC_BASE_RANK 1
-
-static gint
-gst_v4l2src_format_get_rank (guint32 fourcc)
-{
- switch (fourcc) {
- case V4L2_PIX_FMT_MJPEG:
- return JPEG_BASE_RANK;
- case V4L2_PIX_FMT_JPEG:
- return JPEG_BASE_RANK + 1;
-
- case V4L2_PIX_FMT_RGB332:
- case V4L2_PIX_FMT_RGB555:
- case V4L2_PIX_FMT_RGB555X:
- case V4L2_PIX_FMT_RGB565:
- case V4L2_PIX_FMT_RGB565X:
- return RGB_ODD_BASE_RANK;
-
- case V4L2_PIX_FMT_RGB24:
- case V4L2_PIX_FMT_BGR24:
- return RGB_BASE_RANK - 1;
-
- case V4L2_PIX_FMT_RGB32:
- case V4L2_PIX_FMT_BGR32:
- return RGB_BASE_RANK;
-
- case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
- return GREY_BASE_RANK;
-
- case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
- case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
- case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
- case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
- return YUV_ODD_BASE_RANK;
-
- case V4L2_PIX_FMT_YVU410: /* YVU9, 9 bits per pixel */
- return YUV_BASE_RANK + 3;
- case V4L2_PIX_FMT_YUV410: /* YUV9, 9 bits per pixel */
- return YUV_BASE_RANK + 2;
- case V4L2_PIX_FMT_YUV420: /* I420, 12 bits per pixel */
- return YUV_BASE_RANK + 7;
- case V4L2_PIX_FMT_YUYV: /* YUY2, 16 bits per pixel */
- return YUV_BASE_RANK + 10;
- case V4L2_PIX_FMT_YVU420: /* YV12, 12 bits per pixel */
- return YUV_BASE_RANK + 6;
- case V4L2_PIX_FMT_UYVY: /* UYVY, 16 bits per pixel */
- return YUV_BASE_RANK + 9;
- case V4L2_PIX_FMT_Y41P: /* Y41P, 12 bits per pixel */
- return YUV_BASE_RANK + 5;
- case V4L2_PIX_FMT_YUV411P: /* Y41B, 12 bits per pixel */
- return YUV_BASE_RANK + 4;
- case V4L2_PIX_FMT_YUV422P: /* Y42B, 16 bits per pixel */
- return YUV_BASE_RANK + 8;
-
- case V4L2_PIX_FMT_DV:
- return DV_BASE_RANK;
-
- case V4L2_PIX_FMT_MPEG: /* MPEG */
- case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
- return 0;
-
-#ifdef V4L2_PIX_FMT_SBGGR8
- case V4L2_PIX_FMT_SBGGR8:
- return BAYER_BASE_RANK;
-#endif
-
-#ifdef V4L2_PIX_FMT_SN9C10X
- case V4L2_PIX_FMT_SN9C10X:
- return S910_BASE_RANK;
-#endif
-
-#ifdef V4L2_PIX_FMT_PWC1
- case V4L2_PIX_FMT_PWC1:
- return PWC_BASE_RANK;
-#endif
-#ifdef V4L2_PIX_FMT_PWC2
- case V4L2_PIX_FMT_PWC2:
- return PWC_BASE_RANK;
-#endif
-
- default:
- break;
- }
-
- return 0;
-}
-
-static gint
-gst_v4l2src_format_cmp_func (gconstpointer a, gconstpointer b)
-{
- guint32 pf1 = ((struct v4l2_fmtdesc *) a)->pixelformat;
- guint32 pf2 = ((struct v4l2_fmtdesc *) b)->pixelformat;
-
- if (pf1 == pf2)
- return 0;
-
- return gst_v4l2src_format_get_rank (pf2) - gst_v4l2src_format_get_rank (pf1);
-}
-
-/******************************************************
- * gst_v4l2src_fill_format_list():
- * create list of supported capture formats
- * return value: TRUE on success, FALSE on error
- ******************************************************/
-gboolean
-gst_v4l2src_fill_format_list (GstV4l2Src * v4l2src)
-{
- gint n;
- struct v4l2_fmtdesc *format;
-
- GST_DEBUG_OBJECT (v4l2src, "getting src format enumerations");
-
- /* format enumeration */
- for (n = 0;; n++) {
- format = g_new0 (struct v4l2_fmtdesc, 1);
-
- format->index = n;
- format->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
-
- if (v4l2_ioctl (v4l2src->v4l2object->video_fd, VIDIOC_ENUM_FMT, format) < 0) {
- if (errno == EINVAL) {
- g_free (format);
- break; /* end of enumeration */
- } else {
- goto failed;
- }
- }
-
- GST_LOG_OBJECT (v4l2src, "index: %u", format->index);
- GST_LOG_OBJECT (v4l2src, "type: %d", format->type);
- GST_LOG_OBJECT (v4l2src, "flags: %08x", format->flags);
- GST_LOG_OBJECT (v4l2src, "description: '%s'", format->description);
- GST_LOG_OBJECT (v4l2src, "pixelformat: %" GST_FOURCC_FORMAT,
- GST_FOURCC_ARGS (format->pixelformat));
-
- /* sort formats according to our preference; we do this, because caps
- * are probed in the order the formats are in the list, and the order of
- * formats in the final probed caps matters for things like fixation */
- v4l2src->formats = g_slist_insert_sorted (v4l2src->formats, format,
- (GCompareFunc) gst_v4l2src_format_cmp_func);
- }
-
- GST_DEBUG_OBJECT (v4l2src, "got %d format(s)", n);
-
- return TRUE;
-
- /* ERRORS */
-failed:
- {
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, SETTINGS,
- (_("Failed to enumerate possible video formats device '%s' can work with"), v4l2src->v4l2object->videodev), ("Failed to get number %d in pixelformat enumeration for %s. (%d - %s)", n, v4l2src->v4l2object->videodev, errno, g_strerror (errno)));
- g_free (format);
- return FALSE;
- }
-}
-
-/******************************************************
- * gst_v4l2src_clear_format_list():
- * free list of supported capture formats
- * return value: TRUE on success, FALSE on error
- ******************************************************/
-gboolean
-gst_v4l2src_clear_format_list (GstV4l2Src * v4l2src)
-{
- g_slist_foreach (v4l2src->formats, (GFunc) g_free, NULL);
- g_slist_free (v4l2src->formats);
- v4l2src->formats = NULL;
-
- return TRUE;
-}
-
-/* The frame interval enumeration code first appeared in Linux 2.6.19. */
-#ifdef VIDIOC_ENUM_FRAMEINTERVALS
-static GstStructure *
-gst_v4l2src_probe_caps_for_format_and_size (GstV4l2Src * v4l2src,
- guint32 pixelformat,
- guint32 width, guint32 height, const GstStructure * template)
-{
- gint fd = v4l2src->v4l2object->video_fd;
- struct v4l2_frmivalenum ival;
- guint32 num, denom;
- GstStructure *s;
- GValue rates = { 0, };
-
- memset (&ival, 0, sizeof (struct v4l2_frmivalenum));
- ival.index = 0;
- ival.pixel_format = pixelformat;
- ival.width = width;
- ival.height = height;
-
- GST_LOG_OBJECT (v4l2src, "get frame interval for %ux%u, %" GST_FOURCC_FORMAT,
- width, height, GST_FOURCC_ARGS (pixelformat));
-
- /* keep in mind that v4l2 gives us frame intervals (durations); we invert the
- * fraction to get framerate */
- if (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) < 0)
- goto enum_frameintervals_failed;
-
- if (ival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
- GValue rate = { 0, };
-
- g_value_init (&rates, GST_TYPE_LIST);
- g_value_init (&rate, GST_TYPE_FRACTION);
-
- do {
- num = ival.discrete.numerator;
- denom = ival.discrete.denominator;
-
- if (num > G_MAXINT || denom > G_MAXINT) {
- /* let us hope we don't get here... */
- num >>= 1;
- denom >>= 1;
- }
-
- GST_LOG_OBJECT (v4l2src, "adding discrete framerate: %d/%d", denom, num);
-
- /* swap to get the framerate */
- gst_value_set_fraction (&rate, denom, num);
- gst_value_list_append_value (&rates, &rate);
-
- ival.index++;
- } while (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) >= 0);
- } else if (ival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
- GValue min = { 0, };
- GValue step = { 0, };
- GValue max = { 0, };
- gboolean added = FALSE;
- guint32 minnum, mindenom;
- guint32 maxnum, maxdenom;
-
- g_value_init (&rates, GST_TYPE_LIST);
-
- g_value_init (&min, GST_TYPE_FRACTION);
- g_value_init (&step, GST_TYPE_FRACTION);
- g_value_init (&max, GST_TYPE_FRACTION);
-
- /* get the min */
- minnum = ival.stepwise.min.numerator;
- mindenom = ival.stepwise.min.denominator;
- if (minnum > G_MAXINT || mindenom > G_MAXINT) {
- minnum >>= 1;
- mindenom >>= 1;
- }
- GST_LOG_OBJECT (v4l2src, "stepwise min frame interval: %d/%d", minnum,
- mindenom);
- gst_value_set_fraction (&min, minnum, mindenom);
-
- /* get the max */
- maxnum = ival.stepwise.max.numerator;
- maxdenom = ival.stepwise.max.denominator;
- if (maxnum > G_MAXINT || maxdenom > G_MAXINT) {
- maxnum >>= 1;
- maxdenom >>= 1;
- }
-
- GST_LOG_OBJECT (v4l2src, "stepwise max frame interval: %d/%d", maxnum,
- maxdenom);
- gst_value_set_fraction (&max, maxnum, maxdenom);
-
- /* get the step */
- num = ival.stepwise.step.numerator;
- denom = ival.stepwise.step.denominator;
- if (num > G_MAXINT || denom > G_MAXINT) {
- num >>= 1;
- denom >>= 1;
- }
-
- if (num == 0 || denom == 0) {
- /* in this case we have a wrong fraction or no step, set the step to max
- * so that we only add the min value in the loop below */
- num = maxnum;
- denom = maxdenom;
- }
-
- /* since we only have gst_value_fraction_subtract and not add, negate the
- * numerator */
- GST_LOG_OBJECT (v4l2src, "stepwise step frame interval: %d/%d", num, denom);
- gst_value_set_fraction (&step, -num, denom);
-
- while (gst_value_compare (&min, &max) <= 0) {
- GValue rate = { 0, };
-
- num = gst_value_get_fraction_numerator (&min);
- denom = gst_value_get_fraction_denominator (&min);
- GST_LOG_OBJECT (v4l2src, "adding stepwise framerate: %d/%d", denom, num);
-
- /* invert to get the framerate */
- g_value_init (&rate, GST_TYPE_FRACTION);
- gst_value_set_fraction (&rate, denom, num);
- gst_value_list_append_value (&rates, &rate);
- added = TRUE;
-
- /* we're actually adding because step was negated above. This is because
- * there is no _add function... */
- if (!gst_value_fraction_subtract (&min, &min, &step)) {
- GST_WARNING_OBJECT (v4l2src, "could not step fraction!");
- break;
- }
- }
- if (!added) {
- /* no range was added, leave the default range from the template */
- GST_WARNING_OBJECT (v4l2src, "no range added, leaving default");
- g_value_unset (&rates);
- }
- } else if (ival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
- guint32 maxnum, maxdenom;
-
- g_value_init (&rates, GST_TYPE_FRACTION_RANGE);
-
- num = ival.stepwise.min.numerator;
- denom = ival.stepwise.min.denominator;
- if (num > G_MAXINT || denom > G_MAXINT) {
- num >>= 1;
- denom >>= 1;
- }
-
- maxnum = ival.stepwise.max.numerator;
- maxdenom = ival.stepwise.max.denominator;
- if (maxnum > G_MAXINT || maxdenom > G_MAXINT) {
- maxnum >>= 1;
- maxdenom >>= 1;
- }
-
- GST_LOG_OBJECT (v4l2src, "continuous frame interval %d/%d to %d/%d",
- maxdenom, maxnum, denom, num);
-
- gst_value_set_fraction_range_full (&rates, maxdenom, maxnum, denom, num);
- } else {
- goto unknown_type;
- }
-
-return_data:
- s = gst_structure_copy (template);
- gst_structure_set (s, "width", G_TYPE_INT, (gint) width,
- "height", G_TYPE_INT, (gint) height, NULL);
-
- if (G_IS_VALUE (&rates)) {
- /* only change the framerate on the template when we have a valid probed new
- * value */
- gst_structure_set_value (s, "framerate", &rates);
- g_value_unset (&rates);
- } else {
- gst_structure_set (s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1,
- NULL);
- }
- return s;
-
- /* ERRORS */
-enum_frameintervals_failed:
- {
- GST_DEBUG_OBJECT (v4l2src,
- "Unable to enumerate intervals for %" GST_FOURCC_FORMAT "@%ux%u",
- GST_FOURCC_ARGS (pixelformat), width, height);
- goto return_data;
- }
-unknown_type:
- {
- /* I don't see how this is actually an error, we ignore the format then */
- GST_WARNING_OBJECT (v4l2src,
- "Unknown frame interval type at %" GST_FOURCC_FORMAT "@%ux%u: %u",
- GST_FOURCC_ARGS (pixelformat), width, height, ival.type);
- return NULL;
- }
-}
-#endif /* defined VIDIOC_ENUM_FRAMEINTERVALS */
-
-#ifdef VIDIOC_ENUM_FRAMESIZES
-static gint
-sort_by_frame_size (GstStructure * s1, GstStructure * s2)
-{
- int w1, h1, w2, h2;
-
- gst_structure_get_int (s1, "width", &w1);
- gst_structure_get_int (s1, "height", &h1);
- gst_structure_get_int (s2, "width", &w2);
- gst_structure_get_int (s2, "height", &h2);
-
- /* I think it's safe to assume that this won't overflow for a while */
- return ((w2 * h2) - (w1 * h1));
-}
-#endif
-
-GstCaps *
-gst_v4l2src_probe_caps_for_format (GstV4l2Src * v4l2src, guint32 pixelformat,
- const GstStructure * template)
-{
- GstCaps *ret = gst_caps_new_empty ();
- GstStructure *tmp;
-
-#ifdef VIDIOC_ENUM_FRAMESIZES
- gint fd = v4l2src->v4l2object->video_fd;
- struct v4l2_frmsizeenum size;
- GList *results = NULL;
- guint32 w, h;
-
- memset (&size, 0, sizeof (struct v4l2_frmsizeenum));
- size.index = 0;
- size.pixel_format = pixelformat;
-
- GST_DEBUG_OBJECT (v4l2src, "Enumerating frame sizes");
-
- if (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0)
- goto enum_framesizes_failed;
-
- if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
- do {
- GST_LOG_OBJECT (v4l2src, "got discrete frame size %dx%d",
- size.discrete.width, size.discrete.height);
-
- w = MIN (size.discrete.width, G_MAXINT);
- h = MIN (size.discrete.height, G_MAXINT);
-
- if (w && h) {
- tmp = gst_v4l2src_probe_caps_for_format_and_size (v4l2src, pixelformat,
- w, h, template);
-
- if (tmp)
- results = g_list_prepend (results, tmp);
- }
-
- size.index++;
- } while (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0);
- GST_DEBUG_OBJECT (v4l2src, "done iterating discrete frame sizes");
- } else if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
- GST_DEBUG_OBJECT (v4l2src, "we have stepwise frame sizes:");
- GST_DEBUG_OBJECT (v4l2src, "min width: %d", size.stepwise.min_width);
- GST_DEBUG_OBJECT (v4l2src, "min height: %d", size.stepwise.min_height);
- GST_DEBUG_OBJECT (v4l2src, "max width: %d", size.stepwise.max_width);
- GST_DEBUG_OBJECT (v4l2src, "min height: %d", size.stepwise.max_height);
- GST_DEBUG_OBJECT (v4l2src, "step width: %d", size.stepwise.step_width);
- GST_DEBUG_OBJECT (v4l2src, "step height: %d", size.stepwise.step_height);
-
- for (w = size.stepwise.min_width, h = size.stepwise.min_height;
- w < size.stepwise.max_width && h < size.stepwise.max_height;
- w += size.stepwise.step_width, h += size.stepwise.step_height) {
- if (w == 0 || h == 0)
- continue;
-
- tmp = gst_v4l2src_probe_caps_for_format_and_size (v4l2src, pixelformat,
- w, h, template);
-
- if (tmp)
- results = g_list_prepend (results, tmp);
- }
- GST_DEBUG_OBJECT (v4l2src, "done iterating stepwise frame sizes");
- } else if (size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {
- guint32 maxw, maxh;
-
- GST_DEBUG_OBJECT (v4l2src, "we have continuous frame sizes:");
- GST_DEBUG_OBJECT (v4l2src, "min width: %d", size.stepwise.min_width);
- GST_DEBUG_OBJECT (v4l2src, "min height: %d", size.stepwise.min_height);
- GST_DEBUG_OBJECT (v4l2src, "max width: %d", size.stepwise.max_width);
- GST_DEBUG_OBJECT (v4l2src, "min height: %d", size.stepwise.max_height);
-
- w = MAX (size.stepwise.min_width, 1);
- h = MAX (size.stepwise.min_height, 1);
- maxw = MIN (size.stepwise.max_width, G_MAXINT);
- maxh = MIN (size.stepwise.max_height, G_MAXINT);
-
- tmp = gst_v4l2src_probe_caps_for_format_and_size (v4l2src, pixelformat,
- w, h, template);
- if (tmp) {
- gst_structure_set (tmp, "width", GST_TYPE_INT_RANGE, (gint) w,
- (gint) maxw, "height", GST_TYPE_INT_RANGE, (gint) h, (gint) maxh,
- NULL);
-
- /* no point using the results list here, since there's only one struct */
- gst_caps_append_structure (ret, tmp);
- }
- } else {
- goto unknown_type;
- }
-
- /* we use an intermediary list to store and then sort the results of the
- * probing because we can't make any assumptions about the order in which
- * the driver will give us the sizes, but we want the final caps to contain
- * the results starting with the highest resolution and having the lowest
- * resolution last, since order in caps matters for things like fixation. */
- results = g_list_sort (results, (GCompareFunc) sort_by_frame_size);
- while (results != NULL) {
- gst_caps_append_structure (ret, GST_STRUCTURE (results->data));
- results = g_list_delete_link (results, results);
- }
-
- if (gst_caps_is_empty (ret))
- goto enum_framesizes_no_results;
-
- return ret;
-
- /* ERRORS */
-enum_framesizes_failed:
- {
- /* I don't see how this is actually an error */
- GST_DEBUG_OBJECT (v4l2src,
- "Failed to enumerate frame sizes for pixelformat %" GST_FOURCC_FORMAT
- " (%s)", GST_FOURCC_ARGS (pixelformat), g_strerror (errno));
- goto default_frame_sizes;
- }
-enum_framesizes_no_results:
- {
- /* it's possible that VIDIOC_ENUM_FRAMESIZES is defined but the driver in
- * question doesn't actually support it yet */
- GST_DEBUG_OBJECT (v4l2src, "No results for pixelformat %" GST_FOURCC_FORMAT
- " enumerating frame sizes, trying fallback",
- GST_FOURCC_ARGS (pixelformat));
- goto default_frame_sizes;
- }
-unknown_type:
- {
- GST_WARNING_OBJECT (v4l2src,
- "Unknown frame sizeenum type for pixelformat %" GST_FOURCC_FORMAT
- ": %u", GST_FOURCC_ARGS (pixelformat), size.type);
- goto default_frame_sizes;
- }
-default_frame_sizes:
-#endif /* defined VIDIOC_ENUM_FRAMESIZES */
- {
- gint min_w, max_w, min_h, max_h, fix_num = 0, fix_denom = 0;
-
- /* This code is for Linux < 2.6.19 */
- min_w = min_h = 1;
- max_w = max_h = GST_V4L2_MAX_SIZE;
- if (!gst_v4l2src_get_nearest_size (v4l2src, pixelformat, &min_w, &min_h)) {
- GST_WARNING_OBJECT (v4l2src,
- "Could not probe minimum capture size for pixelformat %"
- GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat));
- }
- if (!gst_v4l2src_get_nearest_size (v4l2src, pixelformat, &max_w, &max_h)) {
- GST_WARNING_OBJECT (v4l2src,
- "Could not probe maximum capture size for pixelformat %"
- GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat));
- }
-
- /* Since we can't get framerate directly, try to use the current norm */
- if (v4l2src->v4l2object->norm && v4l2src->v4l2object->norms) {
- GList *norms;
- GstTunerNorm *norm;
-
- for (norms = v4l2src->v4l2object->norms; norms != NULL;
- norms = norms->next) {
- norm = (GstTunerNorm *) norms->data;
- if (!strcmp (norm->label, v4l2src->v4l2object->norm))
- break;
- }
- /* If it's possible, set framerate to that (discrete) value */
- if (norm) {
- fix_num = gst_value_get_fraction_numerator (&norm->framerate);
- fix_denom = gst_value_get_fraction_denominator (&norm->framerate);
- }
- }
-
- tmp = gst_structure_copy (template);
- if (fix_num) {
- gst_structure_set (tmp,
- "width", GST_TYPE_INT_RANGE, min_w, max_w,
- "height", GST_TYPE_INT_RANGE, min_h, max_h,
- "framerate", GST_TYPE_FRACTION, fix_num, fix_denom, NULL);
- } else {
- /* if norm can't be used, copy the template framerate */
- gst_structure_set (tmp,
- "width", GST_TYPE_INT_RANGE, min_w, max_w,
- "height", GST_TYPE_INT_RANGE, min_h, max_h,
- "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1, NULL);
- }
- gst_caps_append_structure (ret, tmp);
-
- return ret;
- }
-}
-
/******************************************************
* gst_v4l2src_grab_frame ():
* grab a frame for capturing
@@ -987,27 +95,28 @@ GstFlowReturn
gst_v4l2src_grab_frame (GstV4l2Src * v4l2src, GstBuffer ** buf)
{
#define NUM_TRIALS 50
- struct v4l2_buffer buffer;
+ GstV4l2Object *v4l2object;
+ GstV4l2BufferPool *pool;
gint32 trials = NUM_TRIALS;
GstBuffer *pool_buffer;
gboolean need_copy;
- gint index;
gint ret;
- memset (&buffer, 0x00, sizeof (buffer));
- buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- buffer.memory = V4L2_MEMORY_MMAP;
+ v4l2object = v4l2src->v4l2object;
+ pool = v4l2src->pool;
+
+ GST_DEBUG_OBJECT (v4l2src, "grab frame");
for (;;) {
- if (v4l2src->v4l2object->can_poll_device) {
- ret = gst_poll_wait (v4l2src->v4l2object->poll, GST_CLOCK_TIME_NONE);
+ if (v4l2object->can_poll_device) {
+ ret = gst_poll_wait (v4l2object->poll, GST_CLOCK_TIME_NONE);
if (G_UNLIKELY (ret < 0)) {
if (errno == EBUSY)
goto stopped;
if (errno == ENXIO) {
GST_DEBUG_OBJECT (v4l2src,
"v4l2 device doesn't support polling. Disabling");
- v4l2src->v4l2object->can_poll_device = FALSE;
+ v4l2object->can_poll_device = FALSE;
} else {
if (errno != EAGAIN && errno != EINTR)
goto select_error;
@@ -1015,104 +124,37 @@ gst_v4l2src_grab_frame (GstV4l2Src * v4l2src, GstBuffer ** buf)
}
}
- if (v4l2_ioctl (v4l2src->v4l2object->video_fd, VIDIOC_DQBUF, &buffer) >= 0)
+ pool_buffer = GST_BUFFER (gst_v4l2_buffer_pool_dqbuf (pool));
+ if (pool_buffer)
break;
- GST_WARNING_OBJECT (v4l2src,
- "problem grabbing frame %d (ix=%d), trials=%d, pool-ct=%d, buf.flags=%d",
- buffer.sequence, buffer.index, trials,
- GST_MINI_OBJECT_REFCOUNT (v4l2src->pool), buffer.flags);
+ GST_WARNING_OBJECT (pool->v4l2elem, "trials=%d", trials);
/* if the sync() got interrupted, we can retry */
switch (errno) {
- case EAGAIN:
- GST_WARNING_OBJECT (v4l2src,
- "Non-blocking I/O has been selected using O_NONBLOCK and"
- " no buffer was in the outgoing queue. device %s",
- v4l2src->v4l2object->videodev);
- break;
case EINVAL:
- goto einval;
case ENOMEM:
- goto enomem;
+ /* fatal */
+ return GST_FLOW_ERROR;
+
+ case EAGAIN:
case EIO:
- GST_INFO_OBJECT (v4l2src,
- "VIDIOC_DQBUF failed due to an internal error."
- " Can also indicate temporary problems like signal loss."
- " Note the driver might dequeue an (empty) buffer despite"
- " returning an error, or even stop capturing."
- " device %s", v4l2src->v4l2object->videodev);
- /* have we de-queued a buffer ? */
- if (!(buffer.flags & (V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE))) {
- /* this fails
- if ((buffer.index >= 0) && (buffer.index < v4l2src->breq.count)) {
- GST_DEBUG_OBJECT (v4l2src, "reenqueing buffer (ix=%ld)", buffer.index);
- gst_v4l2src_queue_frame (v4l2src, buffer.index);
- }
- else {
- */
- GST_DEBUG_OBJECT (v4l2src, "reenqueing buffer");
- /* FIXME: this is not a good idea, as drivers usualy return the buffer
- * with index-number set to 0, thus the re-enque will fail unless it
- * was incidentialy 0.
- * We could try to re-enque all buffers without handling the ioctl
- * return.
- */
- /*
- if (ioctl (v4l2src->v4l2object->video_fd, VIDIOC_QBUF, &buffer) < 0) {
- goto qbuf_failed;
- }
- */
- /*} */
- }
- break;
case EINTR:
- GST_WARNING_OBJECT (v4l2src,
- "could not sync on a buffer on device %s",
- v4l2src->v4l2object->videodev);
- break;
default:
- GST_WARNING_OBJECT (v4l2src,
- "Grabbing frame got interrupted on %s unexpectedly. %d: %s.",
- v4l2src->v4l2object->videodev, errno, g_strerror (errno));
+ /* try again, until too many trials */
break;
}
/* check nr. of attempts to capture */
if (--trials == -1) {
goto too_many_trials;
- } else {
- memset (&buffer, 0x00, sizeof (buffer));
- buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- buffer.memory = V4L2_MEMORY_MMAP;
}
}
- g_mutex_lock (v4l2src->pool->lock);
-
- index = buffer.index;
-
- /* get our GstBuffer with that index from the pool, if the buffer was
- * outstanding we have a serious problem. */
- pool_buffer = GST_BUFFER (v4l2src->pool->buffers[index]);
-
- if (pool_buffer == NULL)
- goto no_buffer;
-
- GST_LOG_OBJECT (v4l2src, "grabbed buffer %p at index %d", pool_buffer, index);
-
- /* we have the buffer now, mark the spot in the pool empty */
- v4l2src->pool->buffers[index] = NULL;
- v4l2src->pool->num_live_buffers++;
/* if we are handing out the last buffer in the pool, we need to make a
* copy and bring the buffer back in the pool. */
need_copy = v4l2src->always_copy
- || (v4l2src->pool->num_live_buffers == v4l2src->pool->buffer_count);
-
- g_mutex_unlock (v4l2src->pool->lock);
-
- /* this can change at every frame, esp. with jpeg */
- GST_BUFFER_SIZE (pool_buffer) = buffer.bytesused;
+ || !gst_v4l2_buffer_pool_available_buffers (pool);
if (G_UNLIKELY (need_copy)) {
*buf = gst_buffer_copy (pool_buffer);
@@ -1124,16 +166,12 @@ gst_v4l2src_grab_frame (GstV4l2Src * v4l2src, GstBuffer ** buf)
}
/* we set the buffer metadata in gst_v4l2src_create() */
- GST_LOG_OBJECT (v4l2src, "grabbed frame %d (ix=%d), flags %08x, pool-ct=%d",
- buffer.sequence, buffer.index, buffer.flags,
- v4l2src->pool->num_live_buffers);
-
return GST_FLOW_OK;
/* ERRORS */
select_error:
{
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, READ, (NULL),
+ GST_ELEMENT_ERROR (pool->v4l2elem, RESOURCE, READ, (NULL),
("select error %d: %s (%d)", ret, g_strerror (errno), errno));
return GST_FLOW_ERROR;
}
@@ -1142,52 +180,15 @@ stopped:
GST_DEBUG ("stop called");
return GST_FLOW_WRONG_STATE;
}
-einval:
- {
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, FAILED,
- (_("Failed trying to get video frames from device '%s'."),
- v4l2src->v4l2object->videodev),
- (_("The buffer type is not supported, or the index is out of bounds,"
- " or no buffers have been allocated yet, or the userptr"
- " or length are invalid. device %s"),
- v4l2src->v4l2object->videodev));
- return GST_FLOW_ERROR;
- }
-enomem:
- {
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, FAILED,
- (_("Failed trying to get video frames from device '%s'. Not enough memory."), v4l2src->v4l2object->videodev), (_("insufficient memory to enqueue a user pointer buffer. device %s."), v4l2src->v4l2object->videodev));
- return GST_FLOW_ERROR;
- }
too_many_trials:
{
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, FAILED,
+ GST_ELEMENT_ERROR (pool->v4l2elem, RESOURCE, FAILED,
(_("Failed trying to get video frames from device '%s'."),
- v4l2src->v4l2object->videodev),
+ v4l2object->videodev),
(_("Failed after %d tries. device %s. system error: %s"),
- NUM_TRIALS, v4l2src->v4l2object->videodev, g_strerror (errno)));
- return GST_FLOW_ERROR;
- }
-no_buffer:
- {
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, FAILED,
- (_("Failed trying to get video frames from device '%s'."),
- v4l2src->v4l2object->videodev),
- (_("No free buffers found in the pool at index %d."), index));
- g_mutex_unlock (v4l2src->pool->lock);
- return GST_FLOW_ERROR;
- }
-/*
-qbuf_failed:
- {
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, WRITE,
- (_("Could not exchange data with device '%s'."),
- v4l2src->v4l2object->videodev),
- ("Error queueing buffer on device %s. system error: %s",
- v4l2src->v4l2object->videodev, g_strerror (errno)));
+ NUM_TRIALS, v4l2object->videodev, g_strerror (errno)));
return GST_FLOW_ERROR;
}
-*/
}
/* Note about fraction simplification
@@ -1205,49 +206,14 @@ gst_v4l2src_set_capture (GstV4l2Src * v4l2src, guint32 pixelformat,
guint32 width, guint32 height, guint fps_n, guint fps_d)
{
gint fd = v4l2src->v4l2object->video_fd;
- struct v4l2_format format;
struct v4l2_streamparm stream;
- GST_DEBUG_OBJECT (v4l2src, "Setting capture format to %dx%d, format "
- "%" GST_FOURCC_FORMAT, width, height, GST_FOURCC_ARGS (pixelformat));
-
- GST_V4L2_CHECK_OPEN (v4l2src->v4l2object);
- GST_V4L2_CHECK_NOT_ACTIVE (v4l2src->v4l2object);
-
- memset (&format, 0x00, sizeof (struct v4l2_format));
- format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
-
- if (v4l2_ioctl (fd, VIDIOC_G_FMT, &format) < 0)
- goto get_fmt_failed;
-
- format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- format.fmt.pix.width = width;
- format.fmt.pix.height = height;
- format.fmt.pix.pixelformat = pixelformat;
- /* request whole frames; change when gstreamer supports interlaced video
- * (INTERLACED mode returns frames where the fields have already been
- * combined, there are other modes for requesting fields individually) */
- format.fmt.pix.field = V4L2_FIELD_INTERLACED;
-
- if (v4l2_ioctl (fd, VIDIOC_S_FMT, &format) < 0) {
- if (errno != EINVAL)
- goto set_fmt_failed;
-
- /* try again with progressive video */
- format.fmt.pix.width = width;
- format.fmt.pix.height = height;
- format.fmt.pix.pixelformat = pixelformat;
- format.fmt.pix.field = V4L2_FIELD_NONE;
- if (v4l2_ioctl (fd, VIDIOC_S_FMT, &format) < 0)
- goto set_fmt_failed;
+ if (!gst_v4l2_object_set_format (v4l2src->v4l2object, pixelformat, width,
+ height)) {
+ /* error already reported */
+ return FALSE;
}
- if (format.fmt.pix.width != width || format.fmt.pix.height != height)
- goto invalid_dimensions;
-
- if (format.fmt.pix.pixelformat != pixelformat)
- goto invalid_pixelformat;
-
/* Is there a reason we require the caller to always specify a framerate? */
GST_LOG_OBJECT (v4l2src, "Desired framerate: %u/%u", fps_n, fps_d);
@@ -1297,45 +263,6 @@ gst_v4l2src_set_capture (GstV4l2Src * v4l2src, guint32 pixelformat,
done:
return TRUE;
-
- /* ERRORS */
-get_fmt_failed:
- {
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, SETTINGS,
- (_("Device '%s' does not support video capture"),
- v4l2src->v4l2object->videodev),
- ("Call to G_FMT failed: (%s)", g_strerror (errno)));
- return FALSE;
- }
-set_fmt_failed:
- {
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, SETTINGS,
- (_("Device '%s' cannot capture at %dx%d"),
- v4l2src->v4l2object->videodev, width, height),
- ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
- GST_FOURCC_ARGS (pixelformat), width, height, g_strerror (errno)));
- return FALSE;
- }
-invalid_dimensions:
- {
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, SETTINGS,
- (_("Device '%s' cannot capture at %dx%d"),
- v4l2src->v4l2object->videodev, width, height),
- ("Tried to capture at %dx%d, but device returned size %dx%d",
- width, height, format.fmt.pix.width, format.fmt.pix.height));
- return FALSE;
- }
-invalid_pixelformat:
- {
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, SETTINGS,
- (_("Device '%s' cannot capture in the specified format"),
- v4l2src->v4l2object->videodev),
- ("Tried to capture in %" GST_FOURCC_FORMAT
- ", but device returned format" " %" GST_FOURCC_FORMAT,
- GST_FOURCC_ARGS (pixelformat),
- GST_FOURCC_ARGS (format.fmt.pix.pixelformat)));
- return FALSE;
- }
}
/******************************************************
@@ -1346,11 +273,6 @@ invalid_pixelformat:
gboolean
gst_v4l2src_capture_init (GstV4l2Src * v4l2src, GstCaps * caps)
{
- gint fd = v4l2src->v4l2object->video_fd;
- struct v4l2_requestbuffers breq;
-
- memset (&breq, 0, sizeof (struct v4l2_requestbuffers));
-
GST_DEBUG_OBJECT (v4l2src, "initializing the capture system");
GST_V4L2_CHECK_OPEN (v4l2src->v4l2object);
@@ -1358,38 +280,22 @@ gst_v4l2src_capture_init (GstV4l2Src * v4l2src, GstCaps * caps)
if (v4l2src->v4l2object->vcap.capabilities & V4L2_CAP_STREAMING) {
- GST_DEBUG_OBJECT (v4l2src, "STREAMING, requesting %d MMAP CAPTURE buffers",
- v4l2src->num_buffers);
-
- breq.count = v4l2src->num_buffers;
- breq.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- breq.memory = V4L2_MEMORY_MMAP;
-
- if (v4l2_ioctl (fd, VIDIOC_REQBUFS, &breq) < 0)
- goto reqbufs_failed;
-
- GST_LOG_OBJECT (v4l2src, " count: %u", breq.count);
- GST_LOG_OBJECT (v4l2src, " type: %d", breq.type);
- GST_LOG_OBJECT (v4l2src, " memory: %d", breq.memory);
-
- if (breq.count < GST_V4L2_MIN_BUFFERS)
- goto no_buffers;
-
- if (v4l2src->num_buffers != breq.count) {
- GST_WARNING_OBJECT (v4l2src, "using %u buffers instead", breq.count);
- v4l2src->num_buffers = breq.count;
- g_object_notify (G_OBJECT (v4l2src), "queue-size");
- }
-
/* Map the buffers */
GST_LOG_OBJECT (v4l2src, "initiating buffer pool");
- if (!(v4l2src->pool = gst_v4l2_buffer_pool_new (v4l2src, fd,
- v4l2src->num_buffers, caps)))
+ if (!(v4l2src->pool = gst_v4l2_buffer_pool_new (GST_ELEMENT (v4l2src),
+ v4l2src->v4l2object->video_fd,
+ v4l2src->num_buffers, caps, TRUE, V4L2_BUF_TYPE_VIDEO_CAPTURE)))
goto buffer_pool_new_failed;
GST_INFO_OBJECT (v4l2src, "capturing buffers via mmap()");
v4l2src->use_mmap = TRUE;
+
+ if (v4l2src->num_buffers != v4l2src->pool->buffer_count) {
+ v4l2src->num_buffers = v4l2src->pool->buffer_count;
+ g_object_notify (G_OBJECT (v4l2src), "queue-size");
+ }
+
} else if (v4l2src->v4l2object->vcap.capabilities & V4L2_CAP_READWRITE) {
GST_INFO_OBJECT (v4l2src, "capturing buffers via read()");
v4l2src->use_mmap = FALSE;
@@ -1403,24 +309,6 @@ gst_v4l2src_capture_init (GstV4l2Src * v4l2src, GstCaps * caps)
return TRUE;
/* ERRORS */
-reqbufs_failed:
- {
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, READ,
- (_("Could not get buffers from device '%s'."),
- v4l2src->v4l2object->videodev),
- ("error requesting %d buffers: %s",
- v4l2src->num_buffers, g_strerror (errno)));
- return FALSE;
- }
-no_buffers:
- {
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, READ,
- (_("Could not get enough buffers from device '%s'."),
- v4l2src->v4l2object->videodev),
- ("we received %d from device '%s', we want at least %d",
- breq.count, v4l2src->v4l2object->videodev, GST_V4L2_MIN_BUFFERS));
- return FALSE;
- }
buffer_pool_new_failed:
{
GST_ELEMENT_ERROR (v4l2src, RESOURCE, READ,
@@ -1447,9 +335,6 @@ no_supported_capture_method:
gboolean
gst_v4l2src_capture_start (GstV4l2Src * v4l2src)
{
- gint type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- gint fd = v4l2src->v4l2object->video_fd;
-
GST_DEBUG_OBJECT (v4l2src, "starting the capturing");
//GST_V4L2_CHECK_OPEN (v4l2src->v4l2object);
GST_V4L2_CHECK_ACTIVE (v4l2src->v4l2object);
@@ -1457,30 +342,18 @@ gst_v4l2src_capture_start (GstV4l2Src * v4l2src)
v4l2src->quit = FALSE;
if (v4l2src->use_mmap) {
- if (!gst_v4l2_buffer_pool_activate (v4l2src->pool, v4l2src))
- goto pool_activate_failed;
+ if (!gst_v4l2src_buffer_pool_activate (v4l2src->pool, v4l2src)) {
+ return FALSE;
+ }
- if (v4l2_ioctl (fd, VIDIOC_STREAMON, &type) < 0)
- goto streamon_failed;
+ if (!gst_v4l2_object_start_streaming (v4l2src->v4l2object)) {
+ return FALSE;
+ }
}
v4l2src->is_capturing = TRUE;
return TRUE;
-
- /* ERRORS */
-pool_activate_failed:
- {
- /* already errored */
- return FALSE;
- }
-streamon_failed:
- {
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, OPEN_READ,
- (_("Error starting streaming capture from device '%s'."),
- v4l2src->v4l2object->videodev), GST_ERROR_SYSTEM);
- return FALSE;
- }
}
/******************************************************
@@ -1491,8 +364,6 @@ streamon_failed:
gboolean
gst_v4l2src_capture_stop (GstV4l2Src * v4l2src)
{
- gint type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
-
GST_DEBUG_OBJECT (v4l2src, "stopping capturing");
if (!GST_V4L2_IS_OPEN (v4l2src->v4l2object)) {
@@ -1505,8 +376,9 @@ gst_v4l2src_capture_stop (GstV4l2Src * v4l2src)
if (v4l2src->use_mmap) {
/* we actually need to sync on all queued buffers but not
* on the non-queued ones */
- if (v4l2_ioctl (v4l2src->v4l2object->video_fd, VIDIOC_STREAMOFF, &type) < 0)
- goto streamoff_failed;
+ if (!gst_v4l2_object_stop_streaming (v4l2src->v4l2object)) {
+ return FALSE;
+ }
}
done:
@@ -1516,15 +388,6 @@ done:
v4l2src->is_capturing = FALSE;
return TRUE;
-
- /* ERRORS */
-streamoff_failed:
- {
- GST_ELEMENT_ERROR (v4l2src, RESOURCE, CLOSE,
- (_("Error stopping streaming capture from device '%s'."),
- v4l2src->v4l2object->videodev), GST_ERROR_SYSTEM);
- return FALSE;
- }
}
/******************************************************
@@ -1553,80 +416,3 @@ gst_v4l2src_capture_deinit (GstV4l2Src * v4l2src)
return TRUE;
}
-
-/*
- */
-static gboolean
-gst_v4l2src_get_nearest_size (GstV4l2Src * v4l2src, guint32 pixelformat,
- gint * width, gint * height)
-{
- struct v4l2_format fmt;
- int fd;
- int r;
-
- g_return_val_if_fail (width != NULL, FALSE);
- g_return_val_if_fail (height != NULL, FALSE);
-
- GST_LOG_OBJECT (v4l2src,
- "getting nearest size to %dx%d with format %" GST_FOURCC_FORMAT,
- *width, *height, GST_FOURCC_ARGS (pixelformat));
-
- fd = v4l2src->v4l2object->video_fd;
-
- /* get size delimiters */
- memset (&fmt, 0, sizeof (fmt));
- fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- fmt.fmt.pix.width = *width;
- fmt.fmt.pix.height = *height;
- fmt.fmt.pix.pixelformat = pixelformat;
- fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
-
- r = v4l2_ioctl (fd, VIDIOC_TRY_FMT, &fmt);
- if (r < 0 && errno == EINVAL) {
- /* try again with progressive video */
- fmt.fmt.pix.width = *width;
- fmt.fmt.pix.height = *height;
- fmt.fmt.pix.pixelformat = pixelformat;
- fmt.fmt.pix.field = V4L2_FIELD_NONE;
- r = v4l2_ioctl (fd, VIDIOC_TRY_FMT, &fmt);
- }
-
- if (r < 0) {
- /* The driver might not implement TRY_FMT, in which case we will try
- S_FMT to probe */
- if (errno != ENOTTY)
- return FALSE;
-
- /* Only try S_FMT if we're not actively capturing yet, which we shouldn't
- be, because we're still probing */
- if (GST_V4L2_IS_ACTIVE (v4l2src->v4l2object))
- return FALSE;
-
- GST_LOG_OBJECT (v4l2src,
- "Failed to probe size limit with VIDIOC_TRY_FMT, trying VIDIOC_S_FMT");
-
- fmt.fmt.pix.width = *width;
- fmt.fmt.pix.height = *height;
-
- r = v4l2_ioctl (fd, VIDIOC_S_FMT, &fmt);
- if (r < 0 && errno == EINVAL) {
- /* try again with progressive video */
- fmt.fmt.pix.width = *width;
- fmt.fmt.pix.height = *height;
- fmt.fmt.pix.pixelformat = pixelformat;
- fmt.fmt.pix.field = V4L2_FIELD_NONE;
- r = v4l2_ioctl (fd, VIDIOC_S_FMT, &fmt);
- }
-
- if (r < 0)
- return FALSE;
- }
-
- GST_LOG_OBJECT (v4l2src,
- "got nearest size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
-
- *width = fmt.fmt.pix.width;
- *height = fmt.fmt.pix.height;
-
- return TRUE;
-}
diff --git a/sys/v4l2/v4l2src_calls.h b/sys/v4l2/v4l2src_calls.h
index 038aad08..1fc7411f 100644
--- a/sys/v4l2/v4l2src_calls.h
+++ b/sys/v4l2/v4l2src_calls.h
@@ -41,10 +41,5 @@ GstFlowReturn gst_v4l2src_grab_frame (GstV4l2Src * v4l2src, GstBuffer **buf)
gboolean gst_v4l2src_capture_stop (GstV4l2Src * v4l2src);
gboolean gst_v4l2src_capture_deinit (GstV4l2Src * v4l2src);
-gboolean gst_v4l2src_fill_format_list (GstV4l2Src * v4l2src);
-gboolean gst_v4l2src_clear_format_list (GstV4l2Src * v4l2src);
-
-GstCaps* gst_v4l2src_probe_caps_for_format (GstV4l2Src * v4l2src, guint32 pixelformat,
- const GstStructure *template);
#endif /* __V4L2SRC_CALLS_H__ */