summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSebastian Dröge <sebastian@centricular.com>2013-12-19 11:47:32 +0100
committerSebastian Dröge <sebastian@centricular.com>2013-12-19 13:12:27 +0100
commit4f5de7e8dadbc4b45625898e3b9314cb9a94e916 (patch)
tree3f43cf80dabf411d47d9931d4a590411df6b2a9e
parente087461632926bce75d38d6a8dee7495b59c3bb2 (diff)
Initial port of emotion to GStreamer 1.0
Samsung hardware specific code is removed. This has to be implemented properly for GStreamer 1.0 later. 1.0 has features that allow to implement this without the application knowing anything about the specific hardware.
-rw-r--r--configure.ac33
-rw-r--r--m4/emotion_module.m413
-rw-r--r--src/Makefile_Emotion.am35
-rw-r--r--src/modules/emotion/gstreamer1/emotion_alloc.c6
-rw-r--r--src/modules/emotion/gstreamer1/emotion_convert.c95
-rw-r--r--src/modules/emotion/gstreamer1/emotion_fakeeos.c17
-rw-r--r--src/modules/emotion/gstreamer1/emotion_gstreamer.c187
-rw-r--r--src/modules/emotion/gstreamer1/emotion_gstreamer.h78
-rw-r--r--src/modules/emotion/gstreamer1/emotion_sink.c512
9 files changed, 237 insertions, 739 deletions
diff --git a/configure.ac b/configure.ac
index 082ffb534..a22c396c9 100644
--- a/configure.ac
+++ b/configure.ac
@@ -1796,8 +1796,8 @@ AC_ARG_ENABLE([g-main-loop],
[want_g_main_loop="no"])
AC_ARG_ENABLE([gstreamer],
- [AC_HELP_STRING([--disable-gstreamer],
- [disable gstreamer support. @<:@default=enabled@:>@])],
+ [AC_HELP_STRING([--enable-gstreamer],
+ [enable gstreamer 0.10 support. @<:@default=disabled@:>@])],
[
if test "x${enableval}" = "xyes" ; then
want_gstreamer="yes"
@@ -1805,7 +1805,19 @@ AC_ARG_ENABLE([gstreamer],
want_gstreamer="no"
fi
],
- [want_gstreamer="yes"])
+ [want_gstreamer="no"])
+
+AC_ARG_ENABLE([gstreamer1],
+ [AC_HELP_STRING([--disable-gstreamer1],
+ [disable gstreamer 1.0 support. @<:@default=enabled@:>@])],
+ [
+ if test "x${enableval}" = "xyes" ; then
+ want_gstreamer1="yes"
+ else
+ want_gstreamer1="no"
+ fi
+ ],
+ [want_gstreamer1="yes"])
AC_ARG_ENABLE([tizen],
[AC_HELP_STRING([--enable-tizen],
@@ -1882,12 +1894,17 @@ if test "x${want_g_main_loop}" = "xyes" ; then
fi
# not EFL_OPTIONAL_DEPEND_PKG() because it's only used for ecore examples
+if test "${want_gstreamer1}" = "yes" -a "${want_gstreamer}" = "yes"; then
+ AC_MSG_ERROR([You can only enable either GStreamer 1.0 or GStreamer 0.10 support])
+fi
+
+if test "${want_gstreamer1}" = "yes"; then
+ PKG_CHECK_MODULES([GSTREAMER], [gstreamer-1.0])
+fi
if test "${want_gstreamer}" = "yes"; then
- have_gstreamer_1="no"
- PKG_CHECK_MODULES([GSTREAMER], [gstreamer-1.0], [have_gstreamer_1="yes"], [PKG_CHECK_MODULES([GSTREAMER], [gstreamer-0.10])])
+ PKG_CHECK_MODULES([GSTREAMER], [gstreamer-0.10])
fi
-AM_CONDITIONAL([HAVE_GSTREAMER], [test "${want_gstreamer}" = "yes"])
-AM_CONDITIONAL([HAVE_GSTREAMER_1], [test "${have_gstreamer_1}" = "yes"])
+AM_CONDITIONAL([HAVE_GSTREAMER], [test "${want_gstreamer}" = "yes" -o "${want_gstreamer1}" = "yes"])
EFL_EVAL_PKGS([ECORE])
@@ -3662,10 +3679,12 @@ have_gst_xoverlay="no"
EMOTION_MODULE([xine], [${want_xine}])
EMOTION_MODULE([gstreamer], [${want_gstreamer}])
+EMOTION_MODULE([gstreamer1], [${want_gstreamer1}])
EMOTION_MODULE([generic], [${want_emotion_generic}])
EFL_ADD_FEATURE([EMOTION], [xine])
EFL_ADD_FEATURE([EMOTION], [gstreamer])
+EFL_ADD_FEATURE([EMOTION], [gstreamer1])
EFL_ADD_FEATURE([EMOTION], [generic], [${want_emotion_generic}])
EFL_EVAL_PKGS([EMOTION])
diff --git a/m4/emotion_module.m4 b/m4/emotion_module.m4
index 7685f992c..75884e78d 100644
--- a/m4/emotion_module.m4
+++ b/m4/emotion_module.m4
@@ -45,6 +45,19 @@ AC_DEFUN([EMOTION_MODULE_DEP_CHECK_GSTREAMER],
fi
])
+dnl use: EMOTION_MODULE_DEP_CHECK_GSTREAMER_1(want_static)
+dnl where want_engine = yes or static
+AC_DEFUN([EMOTION_MODULE_DEP_CHECK_GSTREAMER1],
+[dnl
+ GST_VER=1.0
+ requirements="gstreamer-1.0 >= ${GST_VER} gstreamer-plugins-base-1.0 >= ${GST_VER} gstreamer-video-1.0 >= ${GST_VER} gstreamer-audio-1.0 >= ${GST_VER} gstreamer-tag-1.0 >= ${GST_VER}"
+ if test "$1" = "static"; then
+ EFL_DEPEND_PKG([EMOTION], [EMOTION_MODULE_GSTREAMER1], [${requirements}])
+ else
+ PKG_CHECK_MODULES([EMOTION_MODULE_GSTREAMER1], [${requirements}])
+ fi
+])
+
dnl use: EMOTION_MODULE_DEP_CHECK_GENERIC(want_static)
dnl where want_engine = yes or static
AC_DEFUN([EMOTION_MODULE_DEP_CHECK_GENERIC],
diff --git a/src/Makefile_Emotion.am b/src/Makefile_Emotion.am
index 4689f7d13..479601811 100644
--- a/src/Makefile_Emotion.am
+++ b/src/Makefile_Emotion.am
@@ -52,7 +52,7 @@ modules_emotion_xine_module_la_LIBTOOLFLAGS = --tag=disable-static
endif
endif
-# Gstreamer
+# Gstreamer 0.10
EMOTION_GSTREAMER_SOURCES = \
modules/emotion/gstreamer/emotion_gstreamer.h \
modules/emotion/gstreamer/emotion_gstreamer.c \
@@ -85,6 +85,39 @@ endif
endif
endif
+# Gstreamer 1.0
+EMOTION_GSTREAMER1_SOURCES = \
+modules/emotion/gstreamer1/emotion_gstreamer.h \
+modules/emotion/gstreamer1/emotion_gstreamer.c \
+modules/emotion/gstreamer1/emotion_alloc.c \
+modules/emotion/gstreamer1/emotion_convert.c \
+modules/emotion/gstreamer1/emotion_fakeeos.c \
+modules/emotion/gstreamer1/emotion_sink.c
+
+if EMOTION_STATIC_BUILD_GSTREAMER1
+lib_emotion_libemotion_la_SOURCES += $(EMOTION_GSTREAMER1_SOURCES)
+else
+if EMOTION_BUILD_GSTREAMER1
+emotionmodulegstreamer1dir = $(libdir)/emotion/modules/gstreamer1/$(MODULE_ARCH)
+emotionmodulegstreamer1_LTLIBRARIES = modules/emotion/gstreamer1/module.la
+modules_emotion_gstreamer1_module_la_SOURCES = $(EMOTION_GSTREAMER1_SOURCES)
+modules_emotion_gstreamer1_module_la_CPPFLAGS = -I$(top_builddir)/src/lib/efl \
+@EMOTION_CFLAGS@ \
+@EMOTION_MODULE_GSTREAMER1_CFLAGS@
+modules_emotion_gstreamer1_module_la_LIBADD = \
+@USE_EMOTION_LIBS@ \
+@EMOTION_MODULE_GSTREAMER1_LIBS@
+modules_emotion_gstreamer1_module_la_DEPENDENCIES = @USE_EMOTION_INTERNAL_LIBS@
+modules_emotion_gstreamer1_module_la_LDFLAGS = -module @EFL_LTMODULE_FLAGS@
+modules_emotion_gstreamer1_module_la_LIBTOOLFLAGS = --tag=disable-static
+if HAVE_ECORE_X
+modules_emotion_gstreamer1_module_la_CPPFLAGS += @ECORE_X_CFLAGS@ @ECORE_EVAS_CFLAGS@
+modules_emotion_gstreamer1_module_la_LIBADD += @USE_ECORE_X_LIBS@ @USE_ECORE_EVAS_LIBS@
+modules_emotion_gstreamer1_module_la_DEPENDENCIES += @USE_ECORE_X_INTERNAL_LIBS@ @USE_ECORE_EVAS_INTERNAL_LIBS@
+endif
+endif
+endif
+
# Generic
EMOTION_GENERIC_SOURCES = \
modules/emotion/generic/emotion_generic.h \
diff --git a/src/modules/emotion/gstreamer1/emotion_alloc.c b/src/modules/emotion/gstreamer1/emotion_alloc.c
index c4aae047b..a49a42662 100644
--- a/src/modules/emotion/gstreamer1/emotion_alloc.c
+++ b/src/modules/emotion/gstreamer1/emotion_alloc.c
@@ -8,14 +8,12 @@
#include <glib.h>
#include <gst/gst.h>
-#include <gst/video/video.h>
#include <gst/video/gstvideosink.h>
+#include <gst/video/video.h>
#ifdef HAVE_ECORE_X
# include <Ecore_X.h>
-# ifdef HAVE_XOVERLAY_H
-# include <gst/interfaces/xoverlay.h>
-# endif
+# include <gst/video/videooverlay.h>
#endif
#include "Emotion.h"
diff --git a/src/modules/emotion/gstreamer1/emotion_convert.c b/src/modules/emotion/gstreamer1/emotion_convert.c
index 2664d28be..2a2a327e8 100644
--- a/src/modules/emotion/gstreamer1/emotion_convert.c
+++ b/src/modules/emotion/gstreamer1/emotion_convert.c
@@ -12,9 +12,7 @@
#ifdef HAVE_ECORE_X
# include <Ecore_X.h>
-# ifdef HAVE_XOVERLAY_H
-# include <gst/interfaces/xoverlay.h>
-# endif
+# include <gst/video/videooverlay.h>
#endif
#include "Emotion.h"
@@ -163,89 +161,14 @@ _evas_video_nv12(unsigned char *evas_data, const unsigned char *gst_data, unsign
rows[i] = &gst_data[rh * w + j * w];
}
-static void
-_evas_video_mt12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height EINA_UNUSED)
-{
- const unsigned char **rows;
- unsigned int i;
- unsigned int j;
-
- rows = (const unsigned char **)evas_data;
-
- for (i = 0; i < (h / 32) / 2; i++)
- rows[i] = &gst_data[i * w * 2 * 32];
-
- if ((h / 32) % 2)
- {
- rows[i] = &gst_data[i * w * 2 * 32];
- i++;
- }
-
- for (j = 0; j < ((h / 2) / 32) / 2; ++j, ++i)
- rows[i] = &gst_data[h * w + j * (w / 2) * 2 * 16];
-}
-
-void
-_evas_video_st12_multiplane(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height EINA_UNUSED)
-{
- const GstMultiPlaneImageBuffer *mp_buf = (const GstMultiPlaneImageBuffer *) gst_data;
- const unsigned char **rows;
- unsigned int i;
- unsigned int j;
-
- rows = (const unsigned char **)evas_data;
-
- for (i = 0; i < (h / 32) / 2; i++)
- rows[i] = mp_buf->uaddr[0] + i * w * 2 * 32;
- if ((h / 32) % 2)
- {
- rows[i] = mp_buf->uaddr[0] + i * w * 2 * 32;
- i++;
- }
-
- for (j = 0; j < ((h / 2) / 16) / 2; j++, i++)
- {
- rows[i] = mp_buf->uaddr[1] + j * w * 2 * 16 * 2;
- }
- if (((h / 2) / 16) % 2)
- rows[i] = mp_buf->uaddr[1] + j * w * 2 * 16 * 2;
-}
-
-void
-_evas_video_st12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w EINA_UNUSED, unsigned int h, unsigned int output_height EINA_UNUSED)
-{
- const SCMN_IMGB *imgb = (const SCMN_IMGB *) gst_data;
- const unsigned char **rows;
- unsigned int i, j;
-
- rows = (const unsigned char **)evas_data;
-
- for (i = 0; i < (h / 32) / 2; i++)
- rows[i] = imgb->uaddr[0] + i * imgb->stride[0] * 2 * 32;
- if ((h / 32) % 2)
- {
- rows[i] = imgb->uaddr[0] + i * imgb->stride[0] * 2 * 32;
- i++;
- }
-
- for (j = 0; j < (unsigned int) imgb->elevation[1] / 32 / 2; j++, i++)
- rows[i] = imgb->uaddr[1] + j * imgb->stride[1] * 32 * 2;
- if ((imgb->elevation[1] / 32) % 2)
- rows[i++] = imgb->uaddr[1] + j * imgb->stride[1] * 32 * 2;
-}
-
-const ColorSpace_FourCC_Convertion colorspace_fourcc_convertion[] = {
- { "I420", GST_MAKE_FOURCC('I', '4', '2', '0'), EVAS_COLORSPACE_YCBCR422P601_PL, _evas_video_i420, EINA_TRUE },
- { "YV12", GST_MAKE_FOURCC('Y', 'V', '1', '2'), EVAS_COLORSPACE_YCBCR422P601_PL, _evas_video_yv12, EINA_TRUE },
- { "YUY2", GST_MAKE_FOURCC('Y', 'U', 'Y', '2'), EVAS_COLORSPACE_YCBCR422601_PL, _evas_video_yuy2, EINA_FALSE },
- { "NV12", GST_MAKE_FOURCC('N', 'V', '1', '2'), EVAS_COLORSPACE_YCBCR420NV12601_PL, _evas_video_nv12, EINA_TRUE },
- { "TM12", GST_MAKE_FOURCC('T', 'M', '1', '2'), EVAS_COLORSPACE_YCBCR420TM12601_PL, _evas_video_mt12, EINA_TRUE },
+const ColorSpace_Format_Convertion colorspace_format_convertion[] = {
+ { "I420", GST_VIDEO_FORMAT_I420, EVAS_COLORSPACE_YCBCR422P601_PL, _evas_video_i420, EINA_TRUE },
+ { "YV12", GST_VIDEO_FORMAT_YV12, EVAS_COLORSPACE_YCBCR422P601_PL, _evas_video_yv12, EINA_TRUE },
+ { "YUY2", GST_VIDEO_FORMAT_YUY2, EVAS_COLORSPACE_YCBCR422601_PL, _evas_video_yuy2, EINA_FALSE },
+ { "NV12", GST_VIDEO_FORMAT_NV12, EVAS_COLORSPACE_YCBCR420NV12601_PL, _evas_video_nv12, EINA_TRUE },
+ { "BGR", GST_VIDEO_FORMAT_BGR, EVAS_COLORSPACE_ARGB8888, _evas_video_bgr, EINA_FALSE },
+ { "BGRx", GST_VIDEO_FORMAT_BGRx, EVAS_COLORSPACE_ARGB8888, _evas_video_bgrx, EINA_FALSE },
+ { "BGRA", GST_VIDEO_FORMAT_BGRA, EVAS_COLORSPACE_ARGB8888, _evas_video_bgra, EINA_FALSE },
{ NULL, 0, 0, NULL, 0 }
};
-const ColorSpace_Format_Convertion colorspace_format_convertion[] = {
- { "BGR", GST_VIDEO_FORMAT_BGR, EVAS_COLORSPACE_ARGB8888, _evas_video_bgr },
- { "BGRx", GST_VIDEO_FORMAT_BGRx, EVAS_COLORSPACE_ARGB8888, _evas_video_bgrx },
- { "BGRA", GST_VIDEO_FORMAT_BGRA, EVAS_COLORSPACE_ARGB8888, _evas_video_bgra },
- { NULL, 0, 0, NULL }
-};
diff --git a/src/modules/emotion/gstreamer1/emotion_fakeeos.c b/src/modules/emotion/gstreamer1/emotion_fakeeos.c
index fc6dc0f98..25cdb8d40 100644
--- a/src/modules/emotion/gstreamer1/emotion_fakeeos.c
+++ b/src/modules/emotion/gstreamer1/emotion_fakeeos.c
@@ -12,9 +12,7 @@
#ifdef HAVE_ECORE_X
# include <Ecore_X.h>
-# ifdef HAVE_XOVERLAY_H
-# include <gst/interfaces/xoverlay.h>
-# endif
+# include <gst/video/videooverlay.h>
#endif
#include "Emotion.h"
@@ -30,8 +28,7 @@ typedef struct _FakeEOSBinClass
GstBinClass parent;
} FakeEOSBinClass;
-GST_BOILERPLATE(FakeEOSBin, fakeeos_bin, GstBin,
- GST_TYPE_BIN);
+G_DEFINE_TYPE (FakeEOSBin, fakeeos_bin, GST_TYPE_BIN);
static void
fakeeos_bin_handle_message(GstBin * bin, GstMessage * message)
@@ -46,12 +43,7 @@ fakeeos_bin_handle_message(GstBin * bin, GstMessage * message)
break;
}
- GST_BIN_CLASS(parent_class)->handle_message(bin, message);
-}
-
-static void
-fakeeos_bin_base_init(gpointer g_class EINA_UNUSED)
-{
+ GST_BIN_CLASS(fakeeos_bin_parent_class)->handle_message(bin, message);
}
static void
@@ -64,7 +56,6 @@ fakeeos_bin_class_init(FakeEOSBinClass * klass)
}
static void
-fakeeos_bin_init(FakeEOSBin *src EINA_UNUSED,
- FakeEOSBinClass *klass EINA_UNUSED)
+fakeeos_bin_init(FakeEOSBin *src EINA_UNUSED)
{
}
diff --git a/src/modules/emotion/gstreamer1/emotion_gstreamer.c b/src/modules/emotion/gstreamer1/emotion_gstreamer.c
index cf40c7f07..6de9a0976 100644
--- a/src/modules/emotion/gstreamer1/emotion_gstreamer.c
+++ b/src/modules/emotion/gstreamer1/emotion_gstreamer.c
@@ -16,15 +16,15 @@
#include <glib-object.h>
#include <gst/video/gstvideosink.h>
#include <gst/video/video.h>
+#include <gst/audio/audio.h>
+#include <gst/tag/tag.h>
// forcibly disable x overlay window.. broken badly.
#undef HAVE_ECORE_X
#ifdef HAVE_ECORE_X
# include <Ecore_X.h>
-# ifdef HAVE_XOVERLAY_H
-# include <gst/interfaces/xoverlay.h>
-# endif
+# include <gst/video/videooverlay.h>
#endif
#include "emotion_modules.h"
@@ -269,7 +269,7 @@ em_file_open(void *video,
return EINA_FALSE;
}
- gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev);
+ gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev, NULL);
ev->position = 0.0;
@@ -386,26 +386,17 @@ em_len_get(void *video)
Emotion_Video_Stream *vstream;
Emotion_Audio_Stream *astream;
Eina_List *l;
- GstFormat fmt;
gint64 val;
gboolean ret;
ev = video;
- fmt = GST_FORMAT_TIME;
if (!ev->pipeline) return 0.0;
- ret = gst_element_query_duration(ev->pipeline, &fmt, &val);
+ ret = gst_element_query_duration(ev->pipeline, GST_FORMAT_TIME, &val);
if (!ret)
goto fallback;
- if (fmt != GST_FORMAT_TIME)
- {
- DBG("requrested duration in time, but got %s instead.",
- gst_format_get_name(fmt));
- goto fallback;
- }
-
if (val <= 0.0)
goto fallback;
@@ -507,26 +498,17 @@ static double
em_pos_get(void *video)
{
Emotion_Gstreamer_Video *ev;
- GstFormat fmt;
gint64 val;
gboolean ret;
ev = video;
- fmt = GST_FORMAT_TIME;
if (!ev->pipeline) return 0.0;
- ret = gst_element_query_position(ev->pipeline, &fmt, &val);
+ ret = gst_element_query_position(ev->pipeline, GST_FORMAT_TIME, &val);
if (!ret)
return ev->position;
- if (fmt != GST_FORMAT_TIME)
- {
- ERR("requrested position in time, but got %s instead.",
- gst_format_get_name(fmt));
- return ev->position;
- }
-
ev->position = val / 1000000000.0;
return ev->position;
}
@@ -638,15 +620,16 @@ em_format_get(void *video)
vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
if (vstream)
{
- switch (vstream->fourcc)
+ switch (vstream->format)
{
- case GST_MAKE_FOURCC('I', '4', '2', '0'):
+ case GST_VIDEO_FORMAT_I420:
return EMOTION_FORMAT_I420;
- case GST_MAKE_FOURCC('Y', 'V', '1', '2'):
+ case GST_VIDEO_FORMAT_YV12:
return EMOTION_FORMAT_YV12;
- case GST_MAKE_FOURCC('Y', 'U', 'Y', '2'):
+ case GST_VIDEO_FORMAT_YUY2:
return EMOTION_FORMAT_YUY2;
- case GST_MAKE_FOURCC('A', 'R', 'G', 'B'):
+ case GST_VIDEO_FORMAT_ARGB:
+ /* FIXME: This will be wrong for big endian archs */
return EMOTION_FORMAT_BGRA;
default:
return EMOTION_FORMAT_NONE;
@@ -1144,7 +1127,7 @@ static const Emotion_Engine em_engine =
{
EMOTION_ENGINE_API_VERSION,
EMOTION_ENGINE_PRIORITY_DEFAULT,
- "gstreamer",
+ "gstreamer1",
em_add, /* add */
em_del, /* del */
em_file_open, /* file_open */
@@ -1341,6 +1324,7 @@ _for_each_tag(GstTagList const* list,
if (!ev || !ev->metadata) return;
+ /* FIXME: Should use the GStreamer tag merging functions */
count = gst_tag_list_get_tag_size(list, tag);
for (i = 0; i < count; i++)
@@ -1423,7 +1407,6 @@ _for_each_tag(GstTagList const* list,
break;
}
-#ifdef GST_TAG_CDDA_CDDB_DISCID
if (!strcmp(tag, GST_TAG_CDDA_CDDB_DISCID))
{
gchar *str;
@@ -1437,7 +1420,6 @@ _for_each_tag(GstTagList const* list,
ev->metadata->disc_id = str;
break;
}
-#endif
}
}
@@ -1477,7 +1459,7 @@ _em_restart_stream(void *data)
return EINA_FALSE;
}
- gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev);
+ gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev, NULL);
}
restart_idler = NULL;
@@ -1490,23 +1472,42 @@ _video_size_get(GstElement *elem, int *width, int *height)
{
GstIterator *itr = NULL;
GstCaps *caps;
- GstStructure *str;
- gpointer pad;
+ GValue v = G_VALUE_INIT;
+ GstPad *pad;
Eina_Bool ret = EINA_FALSE;
+ Eina_Bool done = EINA_FALSE;
+ GstVideoInfo info;
itr = gst_element_iterate_src_pads(elem);
- while(gst_iterator_next(itr, &pad) && !ret)
+
+ while (!done && !ret)
{
- caps = gst_pad_get_caps(GST_PAD(pad));
- str = gst_caps_get_structure(caps, 0);
- if (g_strrstr(gst_structure_get_name(str), "video"))
+ switch(gst_iterator_next(itr, &v))
{
- if (gst_structure_get_int(str, "width", width) && gst_structure_get_int(str, "height", height))
- ret = EINA_TRUE;
+ case GST_ITERATOR_OK:
+ pad = GST_PAD(g_value_get_object(&v));
+ caps = gst_pad_get_current_caps(pad);
+ if (gst_video_info_from_caps(&info, caps))
+ {
+ *width = info.width;
+ *height = info.height;
+ ret = EINA_TRUE;
+ done = EINA_TRUE;
+ }
+ gst_caps_unref(caps);
+ g_value_reset(&v);
+ break;
+ case GST_ITERATOR_RESYNC:
+ gst_iterator_resync(itr);
+ done = ret = EINA_FALSE;
+ break;
+ case GST_ITERATOR_ERROR:
+ case GST_ITERATOR_DONE:
+ done = TRUE;
+ break;
}
- gst_caps_unref(caps);
- gst_object_unref(pad);
}
+ g_value_unset(&v);
gst_iterator_free(itr);
return ret;
@@ -1527,21 +1528,40 @@ static void
_no_more_pads(GstElement *decodebin, gpointer data)
{
GstIterator *itr = NULL;
- gpointer elem;
Emotion_Gstreamer_Video *ev = data;
+ GValue v = G_VALUE_INIT;
+ GstElement *elem;
+ Eina_Bool done = EINA_FALSE;
itr = gst_bin_iterate_elements(GST_BIN(decodebin));
- while(gst_iterator_next(itr, &elem))
+
+ while (!done)
{
- if(_video_size_get(GST_ELEMENT(elem), &ev->src_width, &ev->src_height))
+ switch(gst_iterator_next(itr, &v))
{
- _emotion_pending_ecore_begin();
- ecore_main_loop_thread_safe_call_async(_main_frame_resize, ev);
- gst_object_unref(elem);
- break;
+ case GST_ITERATOR_OK:
+ elem = GST_ELEMENT(g_value_get_object(&v));
+ if(_video_size_get(elem, &ev->src_width, &ev->src_height))
+ {
+ _emotion_pending_ecore_begin();
+ ecore_main_loop_thread_safe_call_async(_main_frame_resize, ev);
+ g_value_reset(&v);
+ done = EINA_TRUE;
+ break;
+ }
+ g_value_reset(&v);
+ break;
+ case GST_ITERATOR_RESYNC:
+ gst_iterator_resync(itr);
+ done = EINA_FALSE;
+ break;
+ case GST_ITERATOR_ERROR:
+ case GST_ITERATOR_DONE:
+ done = TRUE;
+ break;
}
- gst_object_unref(elem);
}
+ g_value_unset(&v);
gst_iterator_free(itr);
}
@@ -1594,6 +1614,7 @@ _eos_main_fct(void *data)
case GST_MESSAGE_STATE_CHANGED:
if (!ev->delete_me)
{
+ /* FIXME: This is conceptionally broken */
if (!g_signal_handlers_disconnect_by_func(msg->src, _no_more_pads, ev))
g_signal_connect(msg->src, "no-more-pads", G_CALLBACK(_no_more_pads), ev);
}
@@ -1658,7 +1679,8 @@ _eos_sync_fct(GstBus *bus EINA_UNUSED, GstMessage *msg, gpointer data)
gst_element_state_get_name(old_state),
gst_element_state_get_name(new_state));
- if (!strncmp(GST_OBJECT_NAME(msg->src), "decodebin", 9) && !strcmp(gst_element_state_get_name(new_state), "READY"))
+ /* FIXME: This is broken */
+ if (!strncmp(GST_OBJECT_NAME(msg->src), "decodebin", 9) && new_state == GST_STATE_READY)
{
send = emotion_gstreamer_message_alloc(ev, msg);
@@ -1681,6 +1703,7 @@ _eos_sync_fct(GstBus *bus EINA_UNUSED, GstMessage *msg, gpointer data)
g_error_free(error);
g_free(debug);
+ /* FIXME: This is broken */
if (strncmp(GST_OBJECT_NAME(msg->src), "xvimagesink", 11) == 0)
{
send = emotion_gstreamer_message_alloc(ev, msg);
@@ -1778,43 +1801,20 @@ _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
Emotion_Video_Stream *vstream;
GstPad *pad = NULL;
GstCaps *caps;
- GstStructure *structure;
GstQuery *query;
- const GValue *val;
- gchar *str;
gdouble length_time = 0.0;
- gint width;
- gint height;
- gint fps_num;
- gint fps_den;
- guint32 fourcc = 0;
+ GstVideoInfo info;
g_signal_emit_by_name(ev->pipeline, "get-video-pad", i, &pad);
if (!pad)
continue;
- caps = gst_pad_get_negotiated_caps(pad);
+ caps = gst_pad_get_current_caps(pad);
if (!caps)
goto unref_pad_v;
- structure = gst_caps_get_structure(caps, 0);
- str = gst_caps_to_string(caps);
-
- if (!gst_structure_get_int(structure, "width", &width))
- goto unref_caps_v;
- if (!gst_structure_get_int(structure, "height", &height))
- goto unref_caps_v;
- if (!gst_structure_get_fraction(structure, "framerate", &fps_num, &fps_den))
- goto unref_caps_v;
- if (g_str_has_prefix(str, "video/x-raw-yuv"))
- {
- val = gst_structure_get_value(structure, "format");
- fourcc = gst_value_get_fourcc(val);
- }
- else if (g_str_has_prefix(str, "video/x-raw-rgb"))
- fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
- else
+ if (!gst_video_info_from_caps(&info, caps))
goto unref_caps_v;
query = gst_query_new_duration(GST_FORMAT_TIME);
@@ -1832,11 +1832,11 @@ _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
if (!vstream) goto unref_query_v;
vstream->length_time = length_time;
- vstream->width = width;
- vstream->height = height;
- vstream->fps_num = fps_num;
- vstream->fps_den = fps_den;
- vstream->fourcc = fourcc;
+ vstream->width = info.width;
+ vstream->height = info.height;
+ vstream->fps_num = info.fps_n;
+ vstream->fps_den = info.fps_d;
+ vstream->format = info.finfo->format;
vstream->index = i;
unref_query_v:
@@ -1853,25 +1853,20 @@ _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
Emotion_Audio_Stream *astream;
GstPad *pad;
GstCaps *caps;
- GstStructure *structure;
+ GstAudioInfo info;
GstQuery *query;
gdouble length_time = 0.0;
- gint channels;
- gint samplerate;
g_signal_emit_by_name(ev->pipeline, "get-audio-pad", i, &pad);
if (!pad)
continue;
- caps = gst_pad_get_negotiated_caps(pad);
+ caps = gst_pad_get_current_caps(pad);
if (!caps)
goto unref_pad_a;
- structure = gst_caps_get_structure(caps, 0);
- if (!gst_structure_get_int(structure, "channels", &channels))
- goto unref_caps_a;
- if (!gst_structure_get_int(structure, "rate", &samplerate))
+ if (!gst_audio_info_from_caps(&info, caps))
goto unref_caps_a;
query = gst_query_new_duration(GST_FORMAT_TIME);
@@ -1890,8 +1885,8 @@ _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
ev->audio_streams = eina_list_append(ev->audio_streams, astream);
astream->length_time = length_time;
- astream->channels = channels;
- astream->samplerate = samplerate;
+ astream->channels = info.channels;
+ astream->samplerate = info.rate;
unref_query_a:
gst_query_unref(query);
@@ -1930,7 +1925,7 @@ _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
vstream->height = 200;
vstream->fps_num = 25;
vstream->fps_den = 1;
- vstream->fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
+ vstream->format = GST_VIDEO_FORMAT_ARGB;
g_object_set(G_OBJECT(ev->pipeline), "vis-plugin", vis, NULL);
g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL);
@@ -1961,9 +1956,9 @@ _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
if (vstream)
{
DBG("video size=%dx%d, fps=%d/%d, "
- "fourcc=%"GST_FOURCC_FORMAT", length=%"GST_TIME_FORMAT,
+ "format=%s, length=%"GST_TIME_FORMAT,
vstream->width, vstream->height, vstream->fps_num, vstream->fps_den,
- GST_FOURCC_ARGS(vstream->fourcc),
+ gst_video_format_to_string(vstream->format),
GST_TIME_ARGS((guint64)(vstream->length_time * GST_SECOND)));
}
diff --git a/src/modules/emotion/gstreamer1/emotion_gstreamer.h b/src/modules/emotion/gstreamer1/emotion_gstreamer.h
index a6af9bc4d..d61456429 100644
--- a/src/modules/emotion/gstreamer1/emotion_gstreamer.h
+++ b/src/modules/emotion/gstreamer1/emotion_gstreamer.h
@@ -26,7 +26,7 @@ struct _Emotion_Video_Stream
gint height;
gint fps_num;
gint fps_den;
- guint32 fourcc;
+ GstVideoFormat format;
int index;
};
@@ -124,7 +124,6 @@ struct _Emotion_Gstreamer_Video
Eina_Bool audio_mute : 1;
Eina_Bool pipeline_parsed : 1;
Eina_Bool delete_me : 1;
- Eina_Bool samsung : 1;
Eina_Bool kill_buffer : 1;
Eina_Bool stream : 1;
Eina_Bool priority : 1;
@@ -170,7 +169,6 @@ struct _EvasVideoSinkPrivate {
//
// Protected by the buffer mutex
Eina_Bool unlocked : 1;
- Eina_Bool samsung : 1; /** ST12 will only define a Samsung specific GstBuffer */
};
struct _Emotion_Gstreamer_Buffer
@@ -264,89 +262,17 @@ void emotion_gstreamer_message_free(Emotion_Gstreamer_Message *send);
Eina_Bool _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
Eina_Bool force);
-typedef struct _ColorSpace_FourCC_Convertion ColorSpace_FourCC_Convertion;
typedef struct _ColorSpace_Format_Convertion ColorSpace_Format_Convertion;
-struct _ColorSpace_FourCC_Convertion
-{
- const char *name;
- guint32 fourcc;
- Evas_Colorspace eformat;
- Evas_Video_Convert_Cb func;
- Eina_Bool force_height;
-};
-
struct _ColorSpace_Format_Convertion
{
const char *name;
GstVideoFormat format;
Evas_Colorspace eformat;
Evas_Video_Convert_Cb func;
+ Eina_Bool force_height;
};
-extern const ColorSpace_FourCC_Convertion colorspace_fourcc_convertion[];
extern const ColorSpace_Format_Convertion colorspace_format_convertion[];
-/** Samsung specific infrastructure - do not touch, do not modify */
-#define MPLANE_IMGB_MAX_COUNT 4
-#define SCMN_IMGB_MAX_PLANE 4
-
-typedef struct _GstMultiPlaneImageBuffer GstMultiPlaneImageBuffer;
-typedef struct _SCMN_IMGB SCMN_IMGB;
-
-struct _GstMultiPlaneImageBuffer
-{
- GstBuffer buffer;
-
- /* width of each image plane */
- gint width[MPLANE_IMGB_MAX_COUNT];
- /* height of each image plane */
- gint height[MPLANE_IMGB_MAX_COUNT];
- /* stride of each image plane */
- gint stride[MPLANE_IMGB_MAX_COUNT];
- /* elevation of each image plane */
- gint elevation[MPLANE_IMGB_MAX_COUNT];
- /* user space address of each image plane */
- guchar *uaddr[MPLANE_IMGB_MAX_COUNT];
- /* Index of real address of each image plane, if needs */
- guchar *index[MPLANE_IMGB_MAX_COUNT];
- /* left postion, if needs */
- gint x;
- /* top position, if needs */
- gint y;
- /* to align memory */
- gint __dummy2;
- /* arbitrary data */
- gint data[16];
-};
-
-struct _SCMN_IMGB
-{
- /* width of each image plane */
- int width[SCMN_IMGB_MAX_PLANE];
- /* height of each image plane */
- int height[SCMN_IMGB_MAX_PLANE];
- /* stride of each image plane */
- int stride[SCMN_IMGB_MAX_PLANE];
- /* elevation of each image plane */
- int elevation[SCMN_IMGB_MAX_PLANE];
- /* user space address of each image plane */
- guchar *uaddr[SCMN_IMGB_MAX_PLANE];
- /* physical address of each image plane, if needs */
- guchar *p[SCMN_IMGB_MAX_PLANE];
- /* color space type of image */
- int cs;
- /* left postion, if needs */
- int x;
- /* top position, if needs */
- int y;
- /* to align memory */
- int __dummy2;
- /* arbitrary data */
- int data[16];
-};
-
-void _evas_video_st12_multiplane(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height EINA_UNUSED);
-void _evas_video_st12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w EINA_UNUSED, unsigned int h, unsigned int output_height EINA_UNUSED);
-
#endif /* __EMOTION_GSTREAMER_H__ */
diff --git a/src/modules/emotion/gstreamer1/emotion_sink.c b/src/modules/emotion/gstreamer1/emotion_sink.c
index f97ab3e0d..7aa5fdbaf 100644
--- a/src/modules/emotion/gstreamer1/emotion_sink.c
+++ b/src/modules/emotion/gstreamer1/emotion_sink.c
@@ -20,9 +20,7 @@
#ifdef HAVE_ECORE_X
# include <Ecore_X.h>
# include <Ecore_Evas.h>
-# ifdef HAVE_XOVERLAY_H
-# include <gst/interfaces/xoverlay.h>
-# endif
+# include <gst/video/videooverlay.h>
#endif
#include "emotion_modules.h"
@@ -30,8 +28,7 @@
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
GST_PAD_SINK, GST_PAD_ALWAYS,
- GST_STATIC_CAPS(GST_VIDEO_CAPS_YUV("{ I420, YV12, YUY2, NV12, ST12, TM12 }") ";"
- GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_BGRA));
+ GST_STATIC_CAPS(GST_VIDEO_CAPS_MAKE("{ I420, YV12, YUY2, NV12, BGRx, BGR, BGRA }")));
GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug);
#define GST_CAT_DEFAULT evas_video_sink_debug
@@ -52,37 +49,24 @@ enum {
static guint evas_video_sink_signals[LAST_SIGNAL] = { 0, };
-#define _do_init(bla) \
+#define _do_init \
GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug, \
"emotion-sink", \
0, \
"emotion video sink")
-GST_BOILERPLATE_FULL(EvasVideoSink,
+#define parent_class evas_video_sink_parent_class
+G_DEFINE_TYPE_WITH_CODE(EvasVideoSink,
evas_video_sink,
- GstVideoSink,
GST_TYPE_VIDEO_SINK,
_do_init);
static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv);
static void evas_video_sink_main_render(void *data);
-static void evas_video_sink_samsung_main_render(void *data);
static void
-evas_video_sink_base_init(gpointer g_class)
-{
- GstElementClass* element_class;
-
- element_class = GST_ELEMENT_CLASS(g_class);
- gst_element_class_add_pad_template(element_class, gst_static_pad_template_get(&sinktemplate));
- gst_element_class_set_details_simple(element_class, "Evas video sink",
- "Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object",
- "Vincent Torri <vtorri@univ-evry.fr>");
-}
-
-static void
-evas_video_sink_init(EvasVideoSink* sink, EvasVideoSinkClass* klass EINA_UNUSED)
+evas_video_sink_init(EvasVideoSink* sink)
{
EvasVideoSinkPrivate* priv;
@@ -93,7 +77,6 @@ evas_video_sink_init(EvasVideoSink* sink, EvasVideoSinkClass* klass EINA_UNUSED)
priv->height = 0;
priv->func = NULL;
priv->eformat = EVAS_COLORSPACE_ARGB8888;
- priv->samsung = EINA_FALSE;
eina_lock_new(&priv->m);
eina_condition_new(&priv->c, &priv->m);
priv->unlocked = EINA_FALSE;
@@ -138,8 +121,6 @@ evas_video_sink_set_property(GObject * object, guint prop_id,
INF("sink set ev.");
eina_lock_take(&priv->m);
priv->ev = g_value_get_pointer (value);
- if (priv->ev)
- priv->ev->samsung = EINA_TRUE;
eina_lock_release(&priv->m);
break;
default:
@@ -215,73 +196,38 @@ gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
{
EvasVideoSink* sink;
EvasVideoSinkPrivate* priv;
- GstStructure *structure;
- GstVideoFormat format;
- guint32 fourcc;
+ GstVideoInfo info;
unsigned int i;
sink = EVAS_VIDEO_SINK(bsink);
priv = sink->priv;
- structure = gst_caps_get_structure(caps, 0);
-
- if (gst_structure_get_int(structure, "width", (int*) &priv->width)
- && gst_structure_get_int(structure, "height", (int*) &priv->height)
- && gst_structure_get_fourcc(structure, "format", &fourcc))
- {
- priv->source_height = priv->height;
-
- for (i = 0; colorspace_fourcc_convertion[i].name != NULL; ++i)
- if (fourcc == colorspace_fourcc_convertion[i].fourcc)
- {
- DBG("Found '%s'", colorspace_fourcc_convertion[i].name);
- priv->eformat = colorspace_fourcc_convertion[i].eformat;
- priv->func = colorspace_fourcc_convertion[i].func;
- if (colorspace_fourcc_convertion[i].force_height)
- {
- priv->height = (priv->height >> 1) << 1;
- }
- if (priv->ev)
- priv->ev->kill_buffer = EINA_TRUE;
- return TRUE;
- }
-
- if (fourcc == GST_MAKE_FOURCC('S', 'T', '1', '2'))
- {
- DBG("Found '%s'", "ST12");
- priv->eformat = EVAS_COLORSPACE_YCBCR420TM12601_PL;
- priv->samsung = EINA_TRUE;
- priv->func = NULL;
- if (priv->ev)
- {
- priv->ev->samsung = EINA_TRUE;
- priv->ev->kill_buffer = EINA_TRUE;
- }
- return TRUE;
- }
- }
-
- INF("fallback code !");
- if (!gst_video_format_parse_caps(caps, &format, (int*) &priv->width, (int*) &priv->height))
+ if (!gst_video_info_from_caps(&info, caps))
{
ERR("Unable to parse caps.");
return FALSE;
}
+ priv->width = info.width;
+ priv->height = info.height;
priv->source_height = priv->height;
for (i = 0; colorspace_format_convertion[i].name != NULL; ++i)
- if (format == colorspace_format_convertion[i].format)
+ if (info.finfo->format == colorspace_format_convertion[i].format)
{
DBG("Found '%s'", colorspace_format_convertion[i].name);
priv->eformat = colorspace_format_convertion[i].eformat;
priv->func = colorspace_format_convertion[i].func;
+ if (colorspace_format_convertion[i].force_height)
+ {
+ priv->height = (priv->height >> 1) << 1;
+ }
if (priv->ev)
- priv->ev->kill_buffer = EINA_FALSE;
+ priv->ev->kill_buffer = EINA_TRUE;
return TRUE;
}
- ERR("unsupported : %d\n", format);
+ ERR("unsupported : %s\n", gst_video_format_to_string(info.finfo->format));
return FALSE;
}
@@ -355,12 +301,12 @@ evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
EvasVideoSinkPrivate *priv;
EvasVideoSink *sink;
- INF("sink preroll %p [%i]", GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
+ INF("sink preroll %p [%" G_GSIZE_FORMAT "]", buffer, gst_buffer_get_size(buffer));
sink = EVAS_VIDEO_SINK(bsink);
priv = sink->priv;
- if (GST_BUFFER_SIZE(buffer) <= 0 && !priv->samsung)
+ if (gst_buffer_get_size(buffer) <= 0)
{
WRN("empty buffer");
return GST_FLOW_OK;
@@ -370,32 +316,8 @@ evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
if (send)
{
- if (priv->samsung)
- {
- if (!priv->func)
- {
- GstStructure *structure;
- GstCaps *caps;
- gboolean is_multiplane = FALSE;
-
- caps = GST_BUFFER_CAPS(buffer);
- structure = gst_caps_get_structure (caps, 0);
- gst_structure_get_boolean(structure, "multiplane", &is_multiplane);
- gst_caps_unref(caps);
-
- if (is_multiplane)
- priv->func = _evas_video_st12_multiplane;
- else
- priv->func = _evas_video_st12;
- }
- _emotion_pending_ecore_begin();
- ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, send);
- }
- else
- {
- _emotion_pending_ecore_begin();
- ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
- }
+ _emotion_pending_ecore_begin();
+ ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
}
return GST_FLOW_OK;
@@ -427,32 +349,8 @@ evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
return GST_FLOW_ERROR;
}
- if (priv->samsung)
- {
- if (!priv->func)
- {
- GstStructure *structure;
- GstCaps *caps;
- gboolean is_multiplane = FALSE;
-
- caps = GST_BUFFER_CAPS(buffer);
- structure = gst_caps_get_structure (caps, 0);
- gst_structure_get_boolean(structure, "multiplane", &is_multiplane);
- gst_caps_unref(caps);
-
- if (is_multiplane)
- priv->func = _evas_video_st12_multiplane;
- else
- priv->func = _evas_video_st12;
- }
- _emotion_pending_ecore_begin();
- ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, send);
- }
- else
- {
- _emotion_pending_ecore_begin();
- ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
- }
+ _emotion_pending_ecore_begin();
+ ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
eina_condition_wait(&priv->c);
eina_lock_release(&priv->m);
@@ -486,151 +384,6 @@ _update_emotion_fps(Emotion_Gstreamer_Video *ev)
}
static void
-evas_video_sink_samsung_main_render(void *data)
-{
- Emotion_Gstreamer_Buffer *send;
- Emotion_Video_Stream *vstream;
- EvasVideoSinkPrivate *priv = NULL;
- GstBuffer* buffer;
- unsigned char *evas_data;
- const guint8 *gst_data;
- GstFormat fmt = GST_FORMAT_TIME;
- gint64 pos;
- Eina_Bool preroll = EINA_FALSE;
- int stride, elevation;
- Evas_Coord w, h;
-
- send = data;
-
- if (!send) goto exit_point;
-
- priv = send->sink;
- buffer = send->frame;
- preroll = send->preroll;
-
- /* frame after cleanup */
- if (!preroll && !send->ev->last_buffer)
- {
- priv = NULL;
- goto exit_point;
- }
-
- if (!priv || !priv->o || priv->unlocked)
- goto exit_point;
-
- if (send->ev->send)
- {
- emotion_gstreamer_buffer_free(send->ev->send);
- send->ev->send = NULL;
- }
-
- if (!send->ev->stream && !send->force)
- {
- send->ev->send = send;
- _emotion_frame_new(send->ev->obj);
- goto exit_stream;
- }
-
- _emotion_gstreamer_video_pipeline_parse(send->ev, EINA_TRUE);
-
- /* Getting stride to compute the right size and then fill the object properly */
- /* Y => [0] and UV in [1] */
- if (priv->func == _evas_video_st12_multiplane)
- {
- const GstMultiPlaneImageBuffer *mp_buf = (const GstMultiPlaneImageBuffer *) buffer;
-
- stride = mp_buf->stride[0];
- elevation = mp_buf->elevation[0];
- priv->width = mp_buf->width[0];
- priv->height = mp_buf->height[0];
-
- gst_data = (const guint8 *) mp_buf;
- }
- else
- {
- const SCMN_IMGB *imgb = (const SCMN_IMGB *) GST_BUFFER_MALLOCDATA(buffer);
-
- stride = imgb->stride[0];
- elevation = imgb->elevation[0];
- priv->width = imgb->width[0];
- priv->height = imgb->height[0];
-
- gst_data = (const guint8 *) imgb;
- }
-
- evas_object_geometry_get(priv->o, NULL, NULL, &w, &h);
-
- send->ev->fill.width = (double) stride / priv->width;
- send->ev->fill.height = (double) elevation / priv->height;
-
- evas_object_image_alpha_set(priv->o, 0);
- evas_object_image_colorspace_set(priv->o, priv->eformat);
- evas_object_image_size_set(priv->o, stride, elevation);
-
- _update_emotion_fps(send->ev);
-
- evas_data = evas_object_image_data_get(priv->o, 1);
-
- if (priv->func)
- priv->func(evas_data, gst_data, stride, elevation, elevation);
- else
- WRN("No way to decode %x colorspace !", priv->eformat);
-
- evas_object_image_data_set(priv->o, evas_data);
- evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
- evas_object_image_pixels_dirty_set(priv->o, 0);
-
- if (!preroll && send->ev->play_started)
- {
- _emotion_playback_started(send->ev->obj);
- send->ev->play_started = 0;
- }
-
- if (!send->force)
- {
- _emotion_frame_new(send->ev->obj);
- }
-
- vstream = eina_list_nth(send->ev->video_streams, send->ev->video_stream_nbr - 1);
-
- gst_element_query_position(send->ev->pipeline, &fmt, &pos);
- send->ev->position = (double)pos / (double)GST_SECOND;
-
- if (vstream)
- {
- vstream->width = priv->width;
- vstream->height = priv->height;
-
- _emotion_video_pos_update(send->ev->obj, send->ev->position, vstream->length_time);
- }
-
- send->ev->ratio = (double) priv->width / (double) priv->height;
- _emotion_frame_refill(send->ev->obj, send->ev->fill.width, send->ev->fill.height);
- _emotion_frame_resize(send->ev->obj, priv->width, priv->height, send->ev->ratio);
-
- buffer = gst_buffer_ref(buffer);
- if (send->ev->last_buffer) gst_buffer_unref(send->ev->last_buffer);
- send->ev->last_buffer = buffer;
-
- exit_point:
- if (send) emotion_gstreamer_buffer_free(send);
-
- exit_stream:
- if (priv)
- {
- if (preroll || !priv->o)
- {
- _emotion_pending_ecore_end();
- return;
- }
-
- if (!priv->unlocked)
- eina_condition_signal(&priv->c);
- }
- _emotion_pending_ecore_end();
-}
-
-static void
evas_video_sink_main_render(void *data)
{
Emotion_Gstreamer_Buffer *send;
@@ -638,8 +391,8 @@ evas_video_sink_main_render(void *data)
Emotion_Video_Stream *vstream;
EvasVideoSinkPrivate *priv = NULL;
GstBuffer *buffer;
+ GstMapInfo map;
unsigned char *evas_data;
- GstFormat fmt = GST_FORMAT_TIME;
gint64 pos;
Eina_Bool preroll = EINA_FALSE;
@@ -676,6 +429,9 @@ evas_video_sink_main_render(void *data)
goto exit_stream;
}
+ if (!gst_buffer_map(buffer, &map, GST_MAP_READ))
+ goto exit_stream;
+
_emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE);
INF("sink main render [%i, %i] (source height: %i)", priv->width, priv->height, priv->source_height);
@@ -687,10 +443,12 @@ evas_video_sink_main_render(void *data)
evas_data = evas_object_image_data_get(priv->o, 1);
if (priv->func)
- priv->func(evas_data, GST_BUFFER_DATA(buffer), priv->width, priv->source_height, priv->height);
+ priv->func(evas_data, map.data, priv->width, priv->source_height, priv->height);
else
WRN("No way to decode %x colorspace !", priv->eformat);
+ gst_buffer_unmap(buffer, &map);
+
evas_object_image_data_set(priv->o, evas_data);
evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
evas_object_image_pixels_dirty_set(priv->o, 0);
@@ -708,7 +466,7 @@ evas_video_sink_main_render(void *data)
_emotion_frame_new(ev->obj);
}
- gst_element_query_position(ev->pipeline, &fmt, &pos);
+ gst_element_query_position(ev->pipeline, GST_FORMAT_TIME, &pos);
ev->position = (double)pos / (double)GST_SECOND;
vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
@@ -755,38 +513,14 @@ unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
}
static void
-marshal_VOID__MINIOBJECT(GClosure * closure, GValue * return_value EINA_UNUSED,
- guint n_param_values, const GValue * param_values,
- gpointer invocation_hint EINA_UNUSED, gpointer marshal_data)
-{
- typedef void (*marshalfunc_VOID__MINIOBJECT) (gpointer obj, gpointer arg1, gpointer data2);
- marshalfunc_VOID__MINIOBJECT callback;
- GCClosure *cc;
- gpointer data1, data2;
-
- cc = (GCClosure *) closure;
-
- g_return_if_fail(n_param_values == 2);
-
- if (G_CCLOSURE_SWAP_DATA(closure)) {
- data1 = closure->data;
- data2 = g_value_peek_pointer(param_values + 0);
- } else {
- data1 = g_value_peek_pointer(param_values + 0);
- data2 = closure->data;
- }
- callback = (marshalfunc_VOID__MINIOBJECT) (marshal_data ? marshal_data : cc->callback);
-
- callback(data1, gst_value_get_mini_object(param_values + 1), data2);
-}
-
-static void
evas_video_sink_class_init(EvasVideoSinkClass* klass)
{
GObjectClass* gobject_class;
+ GstElementClass* gstelement_class;
GstBaseSinkClass* gstbase_sink_class;
gobject_class = G_OBJECT_CLASS(klass);
+ gstelement_class = GST_ELEMENT_CLASS(klass);
gstbase_sink_class = GST_BASE_SINK_CLASS(klass);
g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate));
@@ -810,11 +544,16 @@ evas_video_sink_class_init(EvasVideoSinkClass* klass)
0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_EV,
g_param_spec_pointer ("ev", "Emotion_Gstreamer_Video",
- "THe internal data of the emotion object",
+ "The internal data of the emotion object",
G_PARAM_READWRITE));
gobject_class->dispose = evas_video_sink_dispose;
+ gst_element_class_add_pad_template(gstelement_class, gst_static_pad_template_get(&sinktemplate));
+ gst_element_class_set_static_metadata(gstelement_class, "Evas video sink",
+ "Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object",
+ "Vincent Torri <vtorri@univ-evry.fr>");
+
gstbase_sink_class->set_caps = evas_video_sink_set_caps;
gstbase_sink_class->stop = evas_video_sink_stop;
gstbase_sink_class->start = evas_video_sink_start;
@@ -829,7 +568,7 @@ evas_video_sink_class_init(EvasVideoSinkClass* klass)
0,
0,
0,
- marshal_VOID__MINIOBJECT,
+ NULL,
G_TYPE_NONE, 1, GST_TYPE_BUFFER);
}
@@ -1011,150 +750,14 @@ _video_update_pixels(void *data, Evas_Object *obj EINA_UNUSED, const Evas_Video_
send->force = EINA_TRUE;
ev->send = NULL;
- if (priv->samsung)
- {
- _emotion_pending_ecore_begin();
- evas_video_sink_samsung_main_render(send);
- }
- else
- {
- _emotion_pending_ecore_begin();
- evas_video_sink_main_render(send);
- }
+ _emotion_pending_ecore_begin();
+ evas_video_sink_main_render(send);
}
+
static void
-_image_resize(void *data, Evas *e EINA_UNUSED, Evas_Object *obj, void *event_info EINA_UNUSED)
+_image_resize(void *data EINA_UNUSED, Evas *e EINA_UNUSED, Evas_Object *obj EINA_UNUSED, void *event_info EINA_UNUSED)
{
- Emotion_Gstreamer_Video *ev = data;
- Evas_Coord width, height;
- int image_area, src_area;
- double ratio;
-
- GstElementFactory *cfactory = NULL;
- GstElement *convert = NULL, *filter = NULL, *queue = NULL;
- GstPad *pad = NULL, *teepad = NULL;
- GstCaps *caps = NULL;
- Eina_List *l, *engines;
- const char *ename, *engine = NULL;
-
- evas_object_geometry_get(obj, NULL, NULL, &width, &height);
- image_area = width * height;
- src_area = ev->src_width * ev->src_height;
- ratio = (double)image_area / (double)src_area;
-
- // when an image is much smaller than original video size,
- // add fimcconvert element to the pipeline
- if (ratio < 0.8 && ev->stream && !ev->convert)
- {
- cfactory = gst_element_factory_find("fimcconvert");
- if (!cfactory) return;
-
- convert = gst_element_factory_create(cfactory, NULL);
- if (!convert) return;
-
- // add capsfilter to limit size and formats based on the backend
- filter = gst_element_factory_make("capsfilter", "fimccapsfilter");
- if (!filter)
- {
- gst_object_unref(convert);
- return;
- }
-
- engines = evas_render_method_list();
- EINA_LIST_FOREACH(engines, l, ename)
- {
- if (evas_render_method_lookup(ename) ==
- evas_output_method_get(evas_object_evas_get(obj)))
- {
- engine = ename;
- break;
- }
- }
-
- if (!engine) return;
-
- if (strstr(engine, "software") != NULL)
- {
- caps = gst_caps_new_simple("video/x-raw-rgb",
- "width", G_TYPE_INT, width,
- "height", G_TYPE_INT, height,
- NULL);
- }
- else if (strstr(engine, "gl") != NULL)
- {
- caps = gst_caps_new_simple("video/x-raw-yuv",
- "width", G_TYPE_INT, width,
- "height", G_TYPE_INT, height,
- NULL);
- }
- g_object_set(G_OBJECT(filter), "caps", caps, NULL);
- gst_caps_unref(caps);
-
- // add new elements to the pipeline
- queue = gst_bin_get_by_name(GST_BIN(ev->sink), "equeue");
- gst_element_unlink(ev->tee, queue);
- gst_element_release_request_pad(ev->tee, ev->eteepad);
- gst_object_unref(ev->eteepad);
-
- gst_bin_add_many(GST_BIN(ev->sink), convert, filter, NULL);
- gst_element_link_many(ev->tee, convert, filter, queue, NULL);
-
- pad = gst_element_get_pad(convert, "sink");
- teepad = gst_element_get_request_pad(ev->tee, "src%d");
- gst_pad_link(teepad, pad);
- gst_object_unref(pad);
-
- gst_element_sync_state_with_parent(convert);
- gst_element_sync_state_with_parent(filter);
-
- ev->eteepad = teepad;
- ev->convert = convert;
- evas_render_method_list_free(engines);
-
- INF("add fimcconvert element. video size: %dx%d. emotion object size: %dx%d",
- ev->src_width, ev->src_height, width, height);
- }
- // set size again to the capsfilter when the image is resized
- else if (ev->convert)
- {
- filter = gst_bin_get_by_name(GST_BIN(ev->sink), "fimccapsfilter");
-
- engines = evas_render_method_list();
- EINA_LIST_FOREACH(engines, l, ename)
- {
- if (evas_render_method_lookup(ename) ==
- evas_output_method_get(evas_object_evas_get(obj)))
- {
- engine = ename;
- break;
- }
- }
-
- if (!engine) return;
-
- if (strstr(engine, "software") != NULL)
- {
- caps = gst_caps_new_simple("video/x-raw-rgb",
- "width", G_TYPE_INT, width,
- "height", G_TYPE_INT, height,
- NULL);
- }
- else if (strstr(engine, "gl") != NULL)
- {
- caps = gst_caps_new_simple("video/x-raw-yuv",
- "width", G_TYPE_INT, width,
- "height", G_TYPE_INT, height,
- NULL);
- }
-
- g_object_set(G_OBJECT(filter), "caps", caps, NULL);
- gst_caps_unref(caps);
- evas_render_method_list_free(engines);
-
- INF("set capsfilter size again:. video size: %dx%d. emotion object size: %dx%d",
- ev->src_width, ev->src_height, width, height);
- }
}
GstElement *
@@ -1173,7 +776,7 @@ gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
GstPad *teepad;
int flags;
const char *launch;
-#if defined HAVE_ECORE_X && defined HAVE_XOVERLAY_H
+#if defined HAVE_ECORE_X
const char *engine = NULL;
Eina_List *engines;
#endif
@@ -1208,7 +811,7 @@ gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
}
else
{
- playbin = gst_element_factory_make("playbin2", "playbin");
+ playbin = gst_element_factory_make("playbin", "playbin");
if (!playbin)
{
ERR("Unable to create 'playbin' GstElement.");
@@ -1230,7 +833,7 @@ gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
goto unref_pipeline;
}
-#if defined HAVE_ECORE_X && defined HAVE_XOVERLAY_H
+#if defined HAVE_ECORE_X
if (window_manager_video)
{
Eina_List *l;
@@ -1279,11 +882,7 @@ gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
{
unsigned int pos[2];
-#ifdef HAVE_X_OVERLAY_SET
- gst_x_overlay_set_window_handle(GST_X_OVERLAY(xvsink), win);
-#else
- gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(xvsink), win);
-#endif
+ gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(xvsink), win);
ev->win = win;
ecore_x_window_prop_card32_set(win, ECORE_X_ATOM_E_VIDEO_PARENT, &parent, 1);
@@ -1301,7 +900,7 @@ gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
evas_render_method_list_free(engines);
}
#else
-//# warning "missing: ecore_x OR xoverlay"
+//# warning "missing: ecore_x"
#endif
esink = gst_element_factory_make("emotion-sink", "sink");
@@ -1329,13 +928,14 @@ gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
gst_element_link_many(queue, esink, NULL);
/* link both sink to GstTee */
- pad = gst_element_get_pad(queue, "sink");
- teepad = gst_element_get_request_pad(tee, "src%d");
+ pad = gst_element_get_static_pad(queue, "sink");
+ teepad = gst_element_get_request_pad(tee, "src_%u");
gst_pad_link(teepad, pad);
gst_object_unref(pad);
ev->eteepad = teepad;
+ /* FIXME: Why a bin that drops the EOS message?! */
if (xvsink)
{
GstElement *fakeeos;
@@ -1350,11 +950,11 @@ gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
gst_bin_add_many(GST_BIN(fakeeos), queue, xvsink, NULL);
gst_element_link_many(queue, xvsink, NULL);
- queue_pad = gst_element_get_pad(queue, "sink");
+ queue_pad = gst_element_get_static_pad(queue, "sink");
gst_element_add_pad(fakeeos, gst_ghost_pad_new("sink", queue_pad));
- pad = gst_element_get_pad(fakeeos, "sink");
- teepad = gst_element_get_request_pad(tee, "src%d");
+ pad = gst_element_get_static_pad(fakeeos, "sink");
+ teepad = gst_element_get_request_pad(tee, "src_%u");
gst_pad_link(teepad, pad);
xvsink = fakeeos;
@@ -1371,7 +971,7 @@ gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
}
}
- teepad = gst_element_get_pad(tee, "sink");
+ teepad = gst_element_get_static_pad(tee, "sink");
gst_element_add_pad(bin, gst_ghost_pad_new("sink", teepad));
gst_object_unref(teepad);