diff options
author | Julien Moutte <julien@moutte.net> | 2005-11-23 15:50:51 +0000 |
---|---|---|
committer | Julien Moutte <julien@moutte.net> | 2005-11-23 15:50:51 +0000 |
commit | 2ea4f5b3c92819681f2d9e6f5f8d014b4d179344 (patch) | |
tree | db8696ff4b83e3f0ea882402e3435d8dacdbb488 /gst/effectv | |
parent | 48520a455d828b4a6885fb37be48ebb720ae259b (diff) |
VideoFilter inherits from
Original commit message from CVS:
2005-11-23 Julien MOUTTE <julien@moutte.net>
* ext/cairo/gsttimeoverlay.c:
(gst_timeoverlay_update_font_height),
(gst_timeoverlay_set_caps), (gst_timeoverlay_get_unit_size),
(gst_timeoverlay_transform), (gst_timeoverlay_base_init),
(gst_timeoverlay_class_init), (gst_timeoverlay_init),
(gst_timeoverlay_get_type):
* ext/cairo/gsttimeoverlay.h:
* gst/debug/Makefile.am:
* gst/debug/gstnavigationtest.c:
(gst_navigationtest_handle_src_event),
(gst_navigationtest_get_unit_size),
(gst_navigationtest_set_caps),
(gst_navigationtest_transform),
(gst_navigationtest_change_state),
(gst_navigationtest_base_init), (gst_navigationtest_class_init),
(gst_navigationtest_init), (gst_navigationtest_get_type),
(plugin_init):
* gst/debug/gstnavigationtest.h:
* gst/effectv/Makefile.am:
* gst/effectv/gstaging.c: (gst_agingtv_set_caps),
(gst_agingtv_get_unit_size), (gst_agingtv_transform),
(gst_agingtv_base_init), (gst_agingtv_class_init),
(gst_agingtv_init), (gst_agingtv_get_type):
* gst/effectv/gstdice.c: (gst_dicetv_set_caps),
(gst_dicetv_get_unit_size), (gst_dicetv_transform),
(gst_dicetv_base_init), (gst_dicetv_class_init),
(gst_dicetv_init),
(gst_dicetv_get_type):
* gst/effectv/gstedge.c: (gst_edgetv_set_caps),
(gst_edgetv_get_unit_size), (gst_edgetv_transform),
(gst_edgetv_base_init), (gst_edgetv_class_init),
(gst_edgetv_init),
(gst_edgetv_get_type):
* gst/effectv/gsteffectv.c:
* gst/effectv/gsteffectv.h:
* gst/effectv/gstquark.c: (gst_quarktv_set_caps),
(gst_quarktv_get_unit_size), (fastrand),
(gst_quarktv_transform),
(gst_quarktv_change_state), (gst_quarktv_base_init),
(gst_quarktv_class_init), (gst_quarktv_init),
(gst_quarktv_get_type):
* gst/effectv/gstrev.c: (gst_revtv_set_caps),
(gst_revtv_get_unit_size), (gst_revtv_transform),
(gst_revtv_base_init), (gst_revtv_class_init), (gst_revtv_init),
(gst_revtv_get_type):
* gst/effectv/gstshagadelic.c: (gst_shagadelictv_set_caps),
(gst_shagadelictv_get_unit_size), (gst_shagadelictv_transform),
(gst_shagadelictv_base_init), (gst_shagadelictv_class_init),
(gst_shagadelictv_init), (gst_shagadelictv_get_type):
* gst/effectv/gstvertigo.c: (gst_vertigotv_set_caps),
(gst_vertigotv_get_unit_size), (gst_vertigotv_transform),
(gst_vertigotv_base_init), (gst_vertigotv_class_init),
(gst_vertigotv_init), (gst_vertigotv_get_type):
* gst/effectv/gstwarp.c: (gst_warptv_set_caps),
(gst_warptv_get_unit_size), (gst_warptv_transform),
(gst_warptv_base_init), (gst_warptv_class_init),
(gst_warptv_init),
(gst_warptv_get_type):
* gst/videofilter/Makefile.am:
* gst/videofilter/gstvideobalance.c:
* gst/videofilter/gstvideobalance.h:
* gst/videofilter/gstvideofilter.c: (gst_videofilter_get_type),
(gst_videofilter_class_init), (gst_videofilter_init):
* gst/videofilter/gstvideofilter.h:
* gst/videofilter/gstvideoflip.c: (gst_videoflip_set_caps),
(gst_videoflip_transform_caps), (gst_videoflip_get_unit_size),
(gst_videoflip_flip), (gst_videoflip_transform),
(gst_videoflip_handle_src_event), (gst_videoflip_set_property),
(gst_videoflip_base_init), (gst_videoflip_class_init),
(gst_videoflip_init), (plugin_init), (gst_videoflip_get_type):
* gst/videofilter/gstvideoflip.h: VideoFilter inherits from
BaseTransform, it's just a place holder for now and every video
effect plugin has been ported to use BaseTransform features
directly. QuarkTV was fixed too (was broken), navigationtest
works
and best for the end, videoflip converts navigation events
depending
on flip method ! Fixes #320953
Diffstat (limited to 'gst/effectv')
-rw-r--r-- | gst/effectv/Makefile.am | 2 | ||||
-rw-r--r-- | gst/effectv/gstaging.c | 253 | ||||
-rw-r--r-- | gst/effectv/gstdice.c | 307 | ||||
-rw-r--r-- | gst/effectv/gstedge.c | 263 | ||||
-rw-r--r-- | gst/effectv/gsteffectv.c | 34 | ||||
-rw-r--r-- | gst/effectv/gsteffectv.h | 3 | ||||
-rw-r--r-- | gst/effectv/gstquark.c | 328 | ||||
-rw-r--r-- | gst/effectv/gstrev.c | 251 | ||||
-rw-r--r-- | gst/effectv/gstshagadelic.c | 258 | ||||
-rw-r--r-- | gst/effectv/gstvertigo.c | 304 | ||||
-rw-r--r-- | gst/effectv/gstwarp.c | 315 |
11 files changed, 1101 insertions, 1217 deletions
diff --git a/gst/effectv/Makefile.am b/gst/effectv/Makefile.am index 1413526e..2bdf7b77 100644 --- a/gst/effectv/Makefile.am +++ b/gst/effectv/Makefile.am @@ -5,10 +5,12 @@ libgsteffectv_la_SOURCES = \ gstshagadelic.c gstvertigo.c gstrev.c gstquark.c libgsteffectv_la_CFLAGS = \ $(GST_PLUGINS_BASE_CFLAGS) \ + $(GST_BASE_CFLAGS) \ $(GST_CFLAGS) \ -I$(top_srcdir)/gst/videofilter libgsteffectv_la_LIBADD = \ $(GST_PLUGINS_BASE_LIBS) \ + $(GST_BASE_LIBS) \ $(GST_LIBS) \ $(top_builddir)/gst/videofilter/libgstvideofilter-@GST_MAJORMINOR@.la libgsteffectv_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) diff --git a/gst/effectv/gstaging.c b/gst/effectv/gstaging.c index 97bef900..3d70fbb3 100644 --- a/gst/effectv/gstaging.c +++ b/gst/effectv/gstaging.c @@ -37,11 +37,13 @@ #include "config.h" #endif -#include <gst/gst.h> #include <gstvideofilter.h> + #include <string.h> #include <math.h> +#include <gst/video/video.h> + #define GST_TYPE_AGINGTV \ (gst_agingtv_get_type()) #define GST_AGINGTV(obj) \ @@ -89,128 +91,69 @@ struct _GstAgingTVClass GstVideofilterClass parent_class; }; -/* GstAgingTV signals and args */ -enum -{ - /* FILL ME */ - LAST_SIGNAL -}; - -enum -{ - ARG_0 - /* FILL ME */ -}; +GType gst_agingtv_get_type (void); -static void gst_agingtv_base_init (gpointer g_class); -static void gst_agingtv_class_init (gpointer g_class, gpointer class_data); -static void gst_agingtv_init (GTypeInstance * instance, gpointer g_class); -static void gst_agingtv_setup (GstVideofilter * videofilter); +static GstElementDetails agingtv_details = GST_ELEMENT_DETAILS ("AgingTV", + "Filter/Effect/Video", + "AgingTV adds age to video input using scratches and dust", + "Sam Lantinga <slouken@devolution.com>"); -static void gst_agingtv_set_property (GObject * object, guint prop_id, - const GValue * value, GParamSpec * pspec); -static void gst_agingtv_get_property (GObject * object, guint prop_id, - GValue * value, GParamSpec * pspec); -static void gst_agingtv_rgb32 (GstVideofilter * videofilter, void *d, void *s); +static GstStaticPadTemplate gst_agingtv_src_template = +GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx) + ); -GType -gst_agingtv_get_type (void) -{ - static GType agingtv_type = 0; +static GstStaticPadTemplate gst_agingtv_sink_template = +GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx) + ); - if (!agingtv_type) { - static const GTypeInfo agingtv_info = { - sizeof (GstAgingTVClass), - gst_agingtv_base_init, - NULL, - gst_agingtv_class_init, - NULL, - NULL, - sizeof (GstAgingTV), - 0, - gst_agingtv_init, - }; +static GstVideofilterClass *parent_class = NULL; - agingtv_type = g_type_register_static (GST_TYPE_VIDEOFILTER, - "GstAgingTV", &agingtv_info, 0); - } - return agingtv_type; -} - -static GstVideofilterFormat gst_agingtv_formats[] = { - {"RGB ", 32, gst_agingtv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000, - 0xff000000} -}; - -static void -gst_agingtv_base_init (gpointer g_class) +static gboolean +gst_agingtv_set_caps (GstBaseTransform * btrans, GstCaps * incaps, + GstCaps * outcaps) { - static GstElementDetails agingtv_details = GST_ELEMENT_DETAILS ("AgingTV", - "Filter/Effect/Video", - "AgingTV adds age to video input using scratches and dust", - "Sam Lantinga <slouken@devolution.com>"); - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class); - int i; + GstAgingTV *filter = GST_AGINGTV (btrans); + GstStructure *structure; + gboolean ret = FALSE; - gst_element_class_set_details (element_class, &agingtv_details); + structure = gst_caps_get_structure (incaps, 0); - for (i = 0; i < G_N_ELEMENTS (gst_agingtv_formats); i++) { - gst_videofilter_class_add_format (videofilter_class, - gst_agingtv_formats + i); + if (gst_structure_get_int (structure, "width", &filter->width) && + gst_structure_get_int (structure, "height", &filter->height)) { + ret = TRUE; } - gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class)); + return ret; } -static void -gst_agingtv_class_init (gpointer g_class, gpointer class_data) -{ - GObjectClass *gobject_class; - GstVideofilterClass *videofilter_class; - - gobject_class = G_OBJECT_CLASS (g_class); - videofilter_class = GST_VIDEOFILTER_CLASS (g_class); - - gobject_class->set_property = gst_agingtv_set_property; - gobject_class->get_property = gst_agingtv_get_property; - -#if 0 - g_object_class_install_property (gobject_class, ARG_METHOD, - g_param_spec_enum ("method", "method", "method", - GST_TYPE_AGINGTV_METHOD, GST_AGINGTV_METHOD_1, G_PARAM_READWRITE)); -#endif - - videofilter_class->setup = gst_agingtv_setup; -} - -static void -gst_agingtv_init (GTypeInstance * instance, gpointer g_class) +static gboolean +gst_agingtv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps, + guint * size) { - GstAgingTV *agingtv = GST_AGINGTV (instance); - GstVideofilter *videofilter; - - GST_DEBUG ("gst_agingtv_init"); - - videofilter = GST_VIDEOFILTER (agingtv); - - /* do stuff */ -} + GstAgingTV *filter; + GstStructure *structure; + gboolean ret = FALSE; + gint width, height; -static void -gst_agingtv_setup (GstVideofilter * videofilter) -{ - GstAgingTV *agingtv; - int width = gst_videofilter_get_input_width (videofilter); - int height = gst_videofilter_get_input_height (videofilter); + filter = GST_AGINGTV (btrans); - g_return_if_fail (GST_IS_AGINGTV (videofilter)); - agingtv = GST_AGINGTV (videofilter); + structure = gst_caps_get_structure (caps, 0); - /* if any setup needs to be done, do it here */ + if (gst_structure_get_int (structure, "width", &width) && + gst_structure_get_int (structure, "height", &height)) { + *size = width * height * 32 / 8; + ret = TRUE; + GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size, + width, height); + } - agingtv->width = width; - agingtv->height = height; + return ret; } static unsigned int @@ -359,69 +302,89 @@ pits (guint32 * dest, gint width, gint height, gint area_scale, } } -static void -gst_agingtv_rgb32 (GstVideofilter * videofilter, void *d, void *s) +static GstFlowReturn +gst_agingtv_transform (GstBaseTransform * trans, GstBuffer * in, + GstBuffer * out) { - GstAgingTV *agingtv; - int width = gst_videofilter_get_input_width (videofilter); - int height = gst_videofilter_get_input_height (videofilter); + GstAgingTV *agingtv = GST_AGINGTV (trans); + gint width = agingtv->width; + gint height = agingtv->height; int video_size = width * height; - guint32 *src = s; - guint32 *dest = d; + guint32 *src = (guint32 *) GST_BUFFER_DATA (in); + guint32 *dest = (guint32 *) GST_BUFFER_DATA (out); gint area_scale = width * height / 64 / 480; + GstFlowReturn ret = GST_FLOW_OK; + + gst_buffer_stamp (out, in); if (area_scale <= 0) area_scale = 1; - g_return_if_fail (GST_IS_AGINGTV (videofilter)); - agingtv = GST_AGINGTV (videofilter); - coloraging (src, dest, video_size); scratching (agingtv->scratches, agingtv->scratch_lines, dest, width, height); pits (dest, width, height, area_scale, agingtv->pits_interval); if (area_scale > 1) dusts (dest, width, height, agingtv->dust_interval, area_scale); + return ret; } static void -gst_agingtv_set_property (GObject * object, guint prop_id, const GValue * value, - GParamSpec * pspec) +gst_agingtv_base_init (gpointer g_class) { - GstAgingTV *src; + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - g_return_if_fail (GST_IS_AGINGTV (object)); - src = GST_AGINGTV (object); + gst_element_class_set_details (element_class, &agingtv_details); - GST_DEBUG ("gst_agingtv_set_property"); - switch (prop_id) { -#if 0 - case ARG_METHOD: - src->method = g_value_get_enum (value); - break; -#endif - default: - break; - } + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_agingtv_sink_template)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_agingtv_src_template)); } static void -gst_agingtv_get_property (GObject * object, guint prop_id, GValue * value, - GParamSpec * pspec) +gst_agingtv_class_init (gpointer klass, gpointer class_data) { - GstAgingTV *src; + GObjectClass *gobject_class; + GstElementClass *element_class; + GstBaseTransformClass *trans_class; - g_return_if_fail (GST_IS_AGINGTV (object)); - src = GST_AGINGTV (object); + gobject_class = (GObjectClass *) klass; + element_class = (GstElementClass *) klass; + trans_class = (GstBaseTransformClass *) klass; - switch (prop_id) { -#if 0 - case ARG_METHOD: - g_value_set_enum (value, src->method); - break; -#endif - default: - G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); - break; + parent_class = g_type_class_peek_parent (klass); + + trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_agingtv_set_caps); + trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_agingtv_get_unit_size); + trans_class->transform = GST_DEBUG_FUNCPTR (gst_agingtv_transform); +} + +static void +gst_agingtv_init (GTypeInstance * instance, gpointer g_class) +{ +} + +GType +gst_agingtv_get_type (void) +{ + static GType agingtv_type = 0; + + if (!agingtv_type) { + static const GTypeInfo agingtv_info = { + sizeof (GstAgingTVClass), + gst_agingtv_base_init, + NULL, + gst_agingtv_class_init, + NULL, + NULL, + sizeof (GstAgingTV), + 0, + gst_agingtv_init, + }; + + agingtv_type = g_type_register_static (GST_TYPE_VIDEOFILTER, + "GstAgingTV", &agingtv_info, 0); } + return agingtv_type; } diff --git a/gst/effectv/gstdice.c b/gst/effectv/gstdice.c index e3506e73..3d2376c5 100644 --- a/gst/effectv/gstdice.c +++ b/gst/effectv/gstdice.c @@ -13,9 +13,13 @@ #ifdef HAVE_CONFIG_H #include "config.h" #endif + +#include <gstvideofilter.h> + #include <string.h> #include <gst/gst.h> -#include <gstvideofilter.h> + +#include <gst/video/video.h> #define GST_TYPE_DICETV \ (gst_dicetv_get_type()) @@ -60,164 +64,85 @@ struct _GstDiceTV struct _GstDiceTVClass { GstVideofilterClass parent_class; - - void (*reset) (GstElement * element); -}; - -/* Filter signals and args */ -enum -{ - /* FILL ME */ - RESET_SIGNAL, - LAST_SIGNAL }; -enum -{ - ARG_0, - ARG_CUBE_BITS -}; +GType gst_dicetv_get_type (void); -static void gst_dicetv_base_init (gpointer g_class); -static void gst_dicetv_class_init (gpointer g_class, gpointer class_data); -static void gst_dicetv_init (GTypeInstance * instance, gpointer g_class); - -static void gst_dicetv_reset_handler (GstElement * elem); static void gst_dicetv_create_map (GstDiceTV * filter); -static void gst_dicetv_set_property (GObject * object, guint prop_id, - const GValue * value, GParamSpec * pspec); -static void gst_dicetv_get_property (GObject * object, guint prop_id, - GValue * value, GParamSpec * pspec); -static void gst_dicetv_setup (GstVideofilter * videofilter); -static void gst_dicetv_draw (GstVideofilter * videofilter, void *d, void *s); - -static guint gst_dicetv_signals[LAST_SIGNAL] = { 0 }; +static GstElementDetails gst_dicetv_details = GST_ELEMENT_DETAILS ("DiceTV", + "Filter/Effect/Video", + "'Dices' the screen up into many small squares", + "Wim Taymans <wim.taymans@chello.be>"); + +static GstStaticPadTemplate gst_dicetv_src_template = + GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_xRGB ";" + GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR) + ); + +static GstStaticPadTemplate gst_dicetv_sink_template = + GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_xRGB ";" + GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR) + ); + +static GstVideofilterClass *parent_class = NULL; -GType -gst_dicetv_get_type (void) +enum { - static GType dicetv_type = 0; - - if (!dicetv_type) { - static const GTypeInfo dicetv_info = { - sizeof (GstDiceTVClass), - gst_dicetv_base_init, - NULL, - (GClassInitFunc) gst_dicetv_class_init, - NULL, - NULL, - sizeof (GstDiceTV), - 0, - (GInstanceInitFunc) gst_dicetv_init, - }; - - dicetv_type = - g_type_register_static (GST_TYPE_VIDEOFILTER, "GstDiceTV", &dicetv_info, - 0); - } - return dicetv_type; -} - -static GstVideofilterFormat gst_dicetv_formats[] = { - {"RGB ", 32, gst_dicetv_draw, 24, G_BIG_ENDIAN, 0x00ff0000, 0x0000ff00, - 0x000000ff}, - {"RGB ", 32, gst_dicetv_draw, 24, G_BIG_ENDIAN, 0xff000000, 0x00ff0000, - 0x0000ff00}, - {"RGB ", 32, gst_dicetv_draw, 24, G_BIG_ENDIAN, 0x000000ff, 0x0000ff00, - 0x00ff0000}, - {"RGB ", 32, gst_dicetv_draw, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000, - 0xff000000}, + ARG_0, + ARG_CUBE_BITS }; -static void -gst_dicetv_base_init (gpointer g_class) +static gboolean +gst_dicetv_set_caps (GstBaseTransform * btrans, GstCaps * incaps, + GstCaps * outcaps) { - /* elementfactory information */ - static GstElementDetails gst_dicetv_details = GST_ELEMENT_DETAILS ("DiceTV", - "Filter/Effect/Video", - "'Dices' the screen up into many small squares", - "Wim Taymans <wim.taymans@chello.be>"); - - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class); - int i; - - gst_element_class_set_details (element_class, &gst_dicetv_details); - - for (i = 0; i < G_N_ELEMENTS (gst_dicetv_formats); i++) { - gst_videofilter_class_add_format (videofilter_class, - gst_dicetv_formats + i); + GstDiceTV *filter = GST_DICETV (btrans); + GstStructure *structure; + gboolean ret = FALSE; + + structure = gst_caps_get_structure (incaps, 0); + + if (gst_structure_get_int (structure, "width", &filter->width) && + gst_structure_get_int (structure, "height", &filter->height)) { + g_free (filter->dicemap); + filter->dicemap = + (gchar *) g_malloc (filter->height * filter->width * sizeof (char)); + gst_dicetv_create_map (filter); + ret = TRUE; } - gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class)); -} - -static void -gst_dicetv_class_init (gpointer g_class, gpointer class_data) -{ - GObjectClass *gobject_class; - GstVideofilterClass *videofilter_class; - GstDiceTVClass *dicetv_class; - - gobject_class = G_OBJECT_CLASS (g_class); - videofilter_class = GST_VIDEOFILTER_CLASS (g_class); - dicetv_class = GST_DICETV_CLASS (g_class); - - gst_dicetv_signals[RESET_SIGNAL] = - g_signal_new ("reset", - G_TYPE_FROM_CLASS (g_class), - G_SIGNAL_RUN_LAST, - G_STRUCT_OFFSET (GstDiceTVClass, reset), - NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0); - - dicetv_class->reset = gst_dicetv_reset_handler; - - gobject_class->set_property = gst_dicetv_set_property; - gobject_class->get_property = gst_dicetv_get_property; - - g_object_class_install_property (gobject_class, ARG_CUBE_BITS, - g_param_spec_int ("square_bits", "Square Bits", "The size of the Squares", - MIN_CUBE_BITS, MAX_CUBE_BITS, DEFAULT_CUBE_BITS, G_PARAM_READWRITE)); - - videofilter_class->setup = gst_dicetv_setup; + return ret; } -static void -gst_dicetv_setup (GstVideofilter * videofilter) +static gboolean +gst_dicetv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps, + guint * size) { - GstDiceTV *dicetv; - - g_return_if_fail (GST_IS_DICETV (videofilter)); - dicetv = GST_DICETV (videofilter); - - dicetv->width = gst_videofilter_get_input_width (videofilter); - dicetv->height = gst_videofilter_get_input_height (videofilter); + GstDiceTV *filter; + GstStructure *structure; + gboolean ret = FALSE; + gint width, height; - g_free (dicetv->dicemap); - dicetv->dicemap = - (gchar *) g_malloc (dicetv->height * dicetv->width * sizeof (char)); - gst_dicetv_create_map (dicetv); -} + filter = GST_DICETV (btrans); -static void -gst_dicetv_init (GTypeInstance * instance, gpointer g_class) -{ - GstDiceTV *filter = GST_DICETV (instance); + structure = gst_caps_get_structure (caps, 0); - filter->dicemap = NULL; - filter->g_cube_bits = DEFAULT_CUBE_BITS; - filter->g_cube_size = 0; - filter->g_map_height = 0; - filter->g_map_width = 0; -} - -static void -gst_dicetv_reset_handler (GstElement * element) -{ - GstDiceTV *filter = GST_DICETV (element); + if (gst_structure_get_int (structure, "width", &width) && + gst_structure_get_int (structure, "height", &height)) { + *size = width * height * 32 / 8; + ret = TRUE; + GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size, + width, height); + } - gst_dicetv_create_map (filter); + return ret; } static unsigned int @@ -228,23 +153,20 @@ fastrand (void) return (fastrand_val = fastrand_val * 1103515245 + 12345); } -static void -gst_dicetv_draw (GstVideofilter * videofilter, void *d, void *s) +static GstFlowReturn +gst_dicetv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out) { GstDiceTV *filter; - guint32 *src; - guint32 *dest; - gint i; - gint map_x, map_y, map_i; - gint base; - gint dx, dy, di; - gint video_width; - gint g_cube_bits; - gint g_cube_size; + guint32 *src, *dest; + gint i, map_x, map_y, map_i, base, dx, dy, di; + gint video_width, g_cube_bits, g_cube_size; + GstFlowReturn ret = GST_FLOW_OK; - filter = GST_DICETV (videofilter); - src = (guint32 *) s; - dest = (guint32 *) d; + filter = GST_DICETV (trans); + src = (guint32 *) GST_BUFFER_DATA (in); + dest = (guint32 *) GST_BUFFER_DATA (out); + + gst_buffer_stamp (out, in); video_width = filter->width; g_cube_bits = filter->g_cube_bits; @@ -304,6 +226,8 @@ gst_dicetv_draw (GstVideofilter * videofilter, void *d, void *s) map_i++; } } + + return ret; } static void @@ -364,3 +288,78 @@ gst_dicetv_get_property (GObject * object, guint prop_id, GValue * value, break; } } + +static void +gst_dicetv_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + + gst_element_class_set_details (element_class, &gst_dicetv_details); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_dicetv_sink_template)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_dicetv_src_template)); +} + +static void +gst_dicetv_class_init (gpointer klass, gpointer class_data) +{ + GObjectClass *gobject_class; + GstElementClass *element_class; + GstBaseTransformClass *trans_class; + + gobject_class = (GObjectClass *) klass; + element_class = (GstElementClass *) klass; + trans_class = (GstBaseTransformClass *) klass; + + parent_class = g_type_class_peek_parent (klass); + + gobject_class->set_property = gst_dicetv_set_property; + gobject_class->get_property = gst_dicetv_get_property; + + g_object_class_install_property (gobject_class, ARG_CUBE_BITS, + g_param_spec_int ("square_bits", "Square Bits", "The size of the Squares", + MIN_CUBE_BITS, MAX_CUBE_BITS, DEFAULT_CUBE_BITS, G_PARAM_READWRITE)); + + trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_dicetv_set_caps); + trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_dicetv_get_unit_size); + trans_class->transform = GST_DEBUG_FUNCPTR (gst_dicetv_transform); +} + +static void +gst_dicetv_init (GTypeInstance * instance, gpointer g_class) +{ + GstDiceTV *filter = GST_DICETV (instance); + + filter->dicemap = NULL; + filter->g_cube_bits = DEFAULT_CUBE_BITS; + filter->g_cube_size = 0; + filter->g_map_height = 0; + filter->g_map_width = 0; +} + +GType +gst_dicetv_get_type (void) +{ + static GType dicetv_type = 0; + + if (!dicetv_type) { + static const GTypeInfo dicetv_info = { + sizeof (GstDiceTVClass), + gst_dicetv_base_init, + NULL, + (GClassInitFunc) gst_dicetv_class_init, + NULL, + NULL, + sizeof (GstDiceTV), + 0, + (GInstanceInitFunc) gst_dicetv_init, + }; + + dicetv_type = + g_type_register_static (GST_TYPE_VIDEOFILTER, "GstDiceTV", &dicetv_info, + 0); + } + return dicetv_type; +} diff --git a/gst/effectv/gstedge.c b/gst/effectv/gstedge.c index 257b1046..2ee53dbe 100644 --- a/gst/effectv/gstedge.c +++ b/gst/effectv/gstedge.c @@ -24,10 +24,13 @@ #ifdef HAVE_CONFIG_H #include "config.h" #endif -#include <string.h> -#include <gst/gst.h> + #include <gstvideofilter.h> +#include <string.h> + +#include <gst/video/video.h> + #define GST_TYPE_EDGETV \ (gst_edgetv_get_type()) #define GST_EDGETV(obj) \ @@ -57,146 +60,97 @@ struct _GstEdgeTVClass GstVideofilterClass parent_class; }; -/* Filter signals and args */ -enum -{ - /* FILL ME */ - LAST_SIGNAL -}; +GType gst_edgetv_get_type (void); -enum -{ - ARG_0 -}; - -static void gst_edgetv_base_init (gpointer g_class); -static void gst_edgetv_class_init (gpointer g_class, gpointer class_data); -static void gst_edgetv_init (GTypeInstance * instance, gpointer g_class); +static GstElementDetails gst_edgetv_details = GST_ELEMENT_DETAILS ("EdgeTV", + "Filter/Effect/Video", + "Apply edge detect on video", + "Wim Taymans <wim.taymans@chello.be>"); -static void gst_edgetv_set_property (GObject * object, guint prop_id, - const GValue * value, GParamSpec * pspec); -static void gst_edgetv_get_property (GObject * object, guint prop_id, - GValue * value, GParamSpec * pspec); +static GstStaticPadTemplate gst_edgetv_src_template = +GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx) + ); -static void gst_edgetv_setup (GstVideofilter * videofilter); -static void gst_edgetv_rgb32 (GstVideofilter * videofilter, void *d, void *s); +static GstStaticPadTemplate gst_edgetv_sink_template = +GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx) + ); -/*static guint gst_edgetv_signals[LAST_SIGNAL] = { 0 }; */ +static GstVideofilterClass *parent_class = NULL; -GType -gst_edgetv_get_type (void) +static gboolean +gst_edgetv_set_caps (GstBaseTransform * btrans, GstCaps * incaps, + GstCaps * outcaps) { - static GType edgetv_type = 0; - - if (!edgetv_type) { - static const GTypeInfo edgetv_info = { - sizeof (GstEdgeTVClass), - gst_edgetv_base_init, - NULL, - (GClassInitFunc) gst_edgetv_class_init, - NULL, - NULL, - sizeof (GstEdgeTV), - 0, - (GInstanceInitFunc) gst_edgetv_init, - }; - - edgetv_type = - g_type_register_static (GST_TYPE_VIDEOFILTER, "GstEdgeTV", &edgetv_info, - 0); + GstEdgeTV *edgetv = GST_EDGETV (btrans); + GstStructure *structure; + gboolean ret = FALSE; + + structure = gst_caps_get_structure (incaps, 0); + + if (gst_structure_get_int (structure, "width", &edgetv->width) && + gst_structure_get_int (structure, "height", &edgetv->height)) { + edgetv->map_width = edgetv->width / 4; + edgetv->map_height = edgetv->height / 4; + edgetv->video_width_margin = edgetv->width % 4; + + g_free (edgetv->map); + edgetv->map = + (guint32 *) g_malloc (edgetv->map_width * edgetv->map_height * + sizeof (guint32) * 2); + memset (edgetv->map, 0, + edgetv->map_width * edgetv->map_height * sizeof (guint32) * 2); + ret = TRUE; } - return edgetv_type; -} -static GstVideofilterFormat gst_edgetv_formats[] = { - {"RGB ", 32, gst_edgetv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000, - 0xff000000} -}; - -static void -gst_edgetv_base_init (gpointer g_class) -{ - /* elementfactory information */ - static GstElementDetails gst_edgetv_details = GST_ELEMENT_DETAILS ("EdgeTV", - "Filter/Effect/Video", - "Apply edge detect on video", - "Wim Taymans <wim.taymans@chello.be>"); - - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class); - int i; - - gst_element_class_set_details (element_class, &gst_edgetv_details); - - for (i = 0; i < G_N_ELEMENTS (gst_edgetv_formats); i++) { - gst_videofilter_class_add_format (videofilter_class, - gst_edgetv_formats + i); - } - - gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class)); + return ret; } -static void -gst_edgetv_class_init (gpointer g_class, gpointer class_data) +static gboolean +gst_edgetv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps, + guint * size) { - GObjectClass *gobject_class; - GstVideofilterClass *videofilter_class; - - gobject_class = G_OBJECT_CLASS (g_class); - videofilter_class = GST_VIDEOFILTER_CLASS (g_class); + GstEdgeTV *filter; + GstStructure *structure; + gboolean ret = FALSE; + gint width, height; - gobject_class->set_property = gst_edgetv_set_property; - gobject_class->get_property = gst_edgetv_get_property; + filter = GST_EDGETV (btrans); - videofilter_class->setup = gst_edgetv_setup; -} + structure = gst_caps_get_structure (caps, 0); -static void -gst_edgetv_init (GTypeInstance * instance, gpointer g_class) -{ - GstEdgeTV *edgetv = GST_EDGETV (instance); + if (gst_structure_get_int (structure, "width", &width) && + gst_structure_get_int (structure, "height", &height)) { + *size = width * height * 32 / 8; + ret = TRUE; + GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size, + width, height); + } - edgetv->map = NULL; + return ret; } -static void -gst_edgetv_setup (GstVideofilter * videofilter) -{ - GstEdgeTV *edgetv; - int width = gst_videofilter_get_input_width (videofilter); - int height = gst_videofilter_get_input_height (videofilter); - - g_return_if_fail (GST_IS_EDGETV (videofilter)); - edgetv = GST_EDGETV (videofilter); - - edgetv->width = width; - edgetv->height = height; - edgetv->map_width = width / 4; - edgetv->map_height = height / 4; - edgetv->video_width_margin = width % 4; - - g_free (edgetv->map); - edgetv->map = - (guint32 *) g_malloc (edgetv->map_width * edgetv->map_height * - sizeof (guint32) * 2); - memset (edgetv->map, 0, - edgetv->map_width * edgetv->map_height * sizeof (guint32) * 2); -} - -static void -gst_edgetv_rgb32 (GstVideofilter * videofilter, void *d, void *s) +static GstFlowReturn +gst_edgetv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out) { GstEdgeTV *filter; - int x, y; - int r, g, b; + gint x, y, r, g, b; guint32 *src, *dest; guint32 p, q; guint32 v0, v1, v2, v3; + GstFlowReturn ret = GST_FLOW_OK; - filter = GST_EDGETV (videofilter); + filter = GST_EDGETV (trans); - src = (guint32 *) s; - dest = (guint32 *) d; + gst_buffer_stamp (out, in); + + src = (guint32 *) GST_BUFFER_DATA (in); + dest = (guint32 *) GST_BUFFER_DATA (out); src += filter->width * 4 + 4; dest += filter->width * 4 + 4; @@ -207,7 +161,7 @@ gst_edgetv_rgb32 (GstVideofilter * videofilter, void *d, void *s) p = *src; q = *(src - 4); -/* difference between the current pixel and right neighbor. */ + /* difference between the current pixel and right neighbor. */ r = ((p & 0xff0000) - (q & 0xff0000)) >> 16; g = ((p & 0xff00) - (q & 0xff00)) >> 8; b = (p & 0xff) - (q & 0xff); @@ -225,7 +179,7 @@ gst_edgetv_rgb32 (GstVideofilter * videofilter, void *d, void *s) b = 255; v2 = (r << 17) | (g << 9) | b; -/* difference between the current pixel and upper neighbor. */ + /* difference between the current pixel and upper neighbor. */ q = *(src - filter->width * 4); r = ((p & 0xff0000) - (q & 0xff0000)) >> 16; g = ((p & 0xff00) - (q & 0xff00)) >> 8; @@ -275,37 +229,70 @@ gst_edgetv_rgb32 (GstVideofilter * videofilter, void *d, void *s) src += filter->width * 3 + 8 + filter->video_width_margin; dest += filter->width * 3 + 8 + filter->video_width_margin; } + + return ret; } static void -gst_edgetv_set_property (GObject * object, guint prop_id, const GValue * value, - GParamSpec * pspec) +gst_edgetv_base_init (gpointer g_class) { - GstEdgeTV *filter; + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - g_return_if_fail (GST_IS_EDGETV (object)); + gst_element_class_set_details (element_class, &gst_edgetv_details); - filter = GST_EDGETV (object); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_edgetv_sink_template)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_edgetv_src_template)); +} - switch (prop_id) { - default: - break; - } +static void +gst_edgetv_class_init (gpointer klass, gpointer class_data) +{ + GObjectClass *gobject_class; + GstElementClass *element_class; + GstBaseTransformClass *trans_class; + + gobject_class = (GObjectClass *) klass; + element_class = (GstElementClass *) klass; + trans_class = (GstBaseTransformClass *) klass; + + parent_class = g_type_class_peek_parent (klass); + + trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_edgetv_set_caps); + trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_edgetv_get_unit_size); + trans_class->transform = GST_DEBUG_FUNCPTR (gst_edgetv_transform); } static void -gst_edgetv_get_property (GObject * object, guint prop_id, GValue * value, - GParamSpec * pspec) +gst_edgetv_init (GTypeInstance * instance, gpointer g_class) { - GstEdgeTV *filter; + GstEdgeTV *edgetv = GST_EDGETV (instance); - g_return_if_fail (GST_IS_EDGETV (object)); + edgetv->map = NULL; +} - filter = GST_EDGETV (object); +GType +gst_edgetv_get_type (void) +{ + static GType edgetv_type = 0; - switch (prop_id) { - default: - G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); - break; + if (!edgetv_type) { + static const GTypeInfo edgetv_info = { + sizeof (GstEdgeTVClass), + gst_edgetv_base_init, + NULL, + (GClassInitFunc) gst_edgetv_class_init, + NULL, + NULL, + sizeof (GstEdgeTV), + 0, + (GInstanceInitFunc) gst_edgetv_init, + }; + + edgetv_type = + g_type_register_static (GST_TYPE_VIDEOFILTER, "GstEdgeTV", &edgetv_info, + 0); } + return edgetv_type; } diff --git a/gst/effectv/gsteffectv.c b/gst/effectv/gsteffectv.c index 5a051b63..bd8b6a04 100644 --- a/gst/effectv/gsteffectv.c +++ b/gst/effectv/gsteffectv.c @@ -25,12 +25,8 @@ #include "config.h" #endif -#include <string.h> -#include <gst/gst.h> -#include <gst/video/video.h> #include "gsteffectv.h" - struct _elements_entry { gchar *name; @@ -38,31 +34,17 @@ struct _elements_entry }; static struct _elements_entry _elements[] = { - {"edgeTV", gst_edgetv_get_type}, - {"agingTV", gst_agingtv_get_type}, - {"diceTV", gst_dicetv_get_type}, - {"warpTV", gst_warptv_get_type}, - {"shagadelicTV", gst_shagadelictv_get_type}, - {"vertigoTV", gst_vertigotv_get_type}, - {"revTV", gst_revtv_get_type}, - {"quarkTV", gst_quarktv_get_type}, + {"edgetv", gst_edgetv_get_type}, + {"agingtv", gst_agingtv_get_type}, + {"dicetv", gst_dicetv_get_type}, + {"warptv", gst_warptv_get_type}, + {"shagadelictv", gst_shagadelictv_get_type}, + {"vertigotv", gst_vertigotv_get_type}, + {"revtv", gst_revtv_get_type}, + {"quarktv", gst_quarktv_get_type}, {NULL, 0}, }; - -GstStaticPadTemplate gst_effectv_src_template = GST_STATIC_PAD_TEMPLATE ("src", - GST_PAD_SRC, - GST_PAD_ALWAYS, - GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx) - ); - -GstStaticPadTemplate gst_effectv_sink_template = - GST_STATIC_PAD_TEMPLATE ("sink", - GST_PAD_SINK, - GST_PAD_ALWAYS, - GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx) - ); - static gboolean plugin_init (GstPlugin * plugin) { diff --git a/gst/effectv/gsteffectv.h b/gst/effectv/gsteffectv.h index 4cf7f3e3..ce032335 100644 --- a/gst/effectv/gsteffectv.h +++ b/gst/effectv/gsteffectv.h @@ -31,6 +31,3 @@ GType gst_shagadelictv_get_type (void); GType gst_vertigotv_get_type (void); GType gst_revtv_get_type (void); GType gst_quarktv_get_type (void); - -extern GstStaticPadTemplate gst_effectv_sink_template; -extern GstStaticPadTemplate gst_effectv_src_template; diff --git a/gst/effectv/gstquark.c b/gst/effectv/gstquark.c index 48641e6e..d0cc1971 100644 --- a/gst/effectv/gstquark.c +++ b/gst/effectv/gstquark.c @@ -24,10 +24,13 @@ #ifdef HAVE_CONFIG_H #include "config.h" #endif + +#include <gstvideofilter.h> + #include <math.h> #include <string.h> -#include <gst/gst.h> -#include "gsteffectv.h" + +#include <gst/video/video.h> #define GST_TYPE_QUARKTV \ (gst_quarktv_get_type()) @@ -40,7 +43,7 @@ #define GST_IS_QUARKTV_CLASS(obj) \ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_QUARKTV)) -/* number of frames of time-buffer. It should be as a configurable paramter */ +/* number of frames of time-buffer. It should be as a configurable paramater */ /* This number also must be 2^n just for the speed. */ #define PLANES 16 @@ -49,9 +52,7 @@ typedef struct _GstQuarkTVClass GstQuarkTVClass; struct _GstQuarkTV { - GstElement element; - - GstPad *sinkpad, *srcpad; + GstVideofilter element; gint width, height; gint area; @@ -62,20 +63,7 @@ struct _GstQuarkTV struct _GstQuarkTVClass { - GstElementClass parent_class; -}; - -/* elementfactory information */ -static GstElementDetails gst_quarktv_details = GST_ELEMENT_DETAILS ("QuarkTV", - "Filter/Effect/Video", - "Motion dissolver", - "FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>"); - -/* Filter signals and args */ -enum -{ - /* FILL ME */ - LAST_SIGNAL + GstVideofilterClass parent_class; }; enum @@ -84,179 +72,103 @@ enum ARG_PLANES }; -static void gst_quarktv_base_init (gpointer g_class); -static void gst_quarktv_class_init (GstQuarkTVClass * klass); -static void gst_quarktv_init (GstQuarkTV * filter); - -static GstStateChangeReturn gst_quarktv_change_state (GstElement * element, - GstStateChange transition); - -static void gst_quarktv_set_property (GObject * object, guint prop_id, - const GValue * value, GParamSpec * pspec); -static void gst_quarktv_get_property (GObject * object, guint prop_id, - GValue * value, GParamSpec * pspec); - -static GstFlowReturn gst_quarktv_chain (GstPad * pad, GstBuffer * buffer); - -static GstElementClass *parent_class = NULL; - -/* static guint gst_quarktv_signals[LAST_SIGNAL] = { 0 }; */ - -static inline guint32 -fastrand (void) -{ - static unsigned int fastrand_val; +GType gst_quarktv_get_type (void); - return (fastrand_val = fastrand_val * 1103515245 + 12345); -} +static GstElementDetails quarktv_details = GST_ELEMENT_DETAILS ("QuarkTV", + "Filter/Effect/Video", + "Motion dissolver", + "FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>"); -GType -gst_quarktv_get_type (void) +static GstStaticPadTemplate gst_quarktv_src_template = + GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx) + ); + +static GstStaticPadTemplate gst_quarktv_sink_template = + GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx) + ); + +static GstVideofilterClass *parent_class = NULL; + +static gboolean +gst_quarktv_set_caps (GstBaseTransform * btrans, GstCaps * incaps, + GstCaps * outcaps) { - static GType quarktv_type = 0; + GstQuarkTV *filter = GST_QUARKTV (btrans); + GstStructure *structure; + gboolean ret = FALSE; - if (!quarktv_type) { - static const GTypeInfo quarktv_info = { - sizeof (GstQuarkTVClass), - gst_quarktv_base_init, - NULL, - (GClassInitFunc) gst_quarktv_class_init, - NULL, - NULL, - sizeof (GstQuarkTV), - 0, - (GInstanceInitFunc) gst_quarktv_init, - }; + structure = gst_caps_get_structure (incaps, 0); - quarktv_type = - g_type_register_static (GST_TYPE_ELEMENT, "GstQuarkTV", &quarktv_info, - 0); + if (gst_structure_get_int (structure, "width", &filter->width) && + gst_structure_get_int (structure, "height", &filter->height)) { + filter->area = filter->width * filter->height; + ret = TRUE; } - return quarktv_type; -} - -static void -gst_quarktv_base_init (gpointer g_class) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - - gst_element_class_add_pad_template (element_class, - gst_static_pad_template_get (&gst_effectv_src_template)); - gst_element_class_add_pad_template (element_class, - gst_static_pad_template_get (&gst_effectv_sink_template)); - - gst_element_class_set_details (element_class, &gst_quarktv_details); -} - -static void -gst_quarktv_class_init (GstQuarkTVClass * klass) -{ - GObjectClass *gobject_class; - GstElementClass *gstelement_class; - - gobject_class = (GObjectClass *) klass; - gstelement_class = (GstElementClass *) klass; - - parent_class = g_type_class_ref (GST_TYPE_ELEMENT); - - gobject_class->set_property = gst_quarktv_set_property; - gobject_class->get_property = gst_quarktv_get_property; - g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_PLANES, - g_param_spec_int ("planes", "Planes", "Number of frames in the buffer", - 1, 32, PLANES, G_PARAM_READWRITE)); - - gstelement_class->change_state = gst_quarktv_change_state; + return ret; } -static GstPadLinkReturn -gst_quarktv_link (GstPad * pad, GstPad * peer) +static gboolean +gst_quarktv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps, + guint * size) { GstQuarkTV *filter; - GstPad *otherpad; - - //gint i; - //GstStructure *structure; - //GstPadLinkReturn res; - - filter = GST_QUARKTV (gst_pad_get_parent (pad)); - g_return_val_if_fail (GST_IS_QUARKTV (filter), GST_PAD_LINK_REFUSED); - - otherpad = (pad == filter->srcpad ? filter->sinkpad : filter->srcpad); + GstStructure *structure; + gboolean ret = FALSE; + gint width, height; -#if 0 - res = gst_pad_try_set_caps (otherpad, caps); - if (GST_PAD_LINK_FAILED (res)) - return res; + filter = GST_QUARKTV (btrans); structure = gst_caps_get_structure (caps, 0); - if (!gst_structure_get_int (structure, "width", &filter->width) || - !gst_structure_get_int (structure, "height", &filter->height)) - return GST_PAD_LINK_REFUSED; - - filter->area = filter->width * filter->height; - for (i = 0; i < filter->planes; i++) { - if (filter->planetable[i]) - gst_buffer_unref (filter->planetable[i]); - filter->planetable[i] = NULL; + if (gst_structure_get_int (structure, "width", &width) && + gst_structure_get_int (structure, "height", &height)) { + *size = width * height * 32 / 8; + ret = TRUE; + GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size, + width, height); } -#endif - return GST_PAD_LINK_OK; + return ret; } -static void -gst_quarktv_init (GstQuarkTV * filter) +static inline guint32 +fastrand (void) { - filter->sinkpad = - gst_pad_new_from_template (gst_static_pad_template_get - (&gst_effectv_sink_template), "sink"); - //gst_pad_set_getcaps_function (filter->sinkpad, gst_pad_proxy_getcaps); - gst_pad_set_chain_function (filter->sinkpad, gst_quarktv_chain); - gst_pad_set_link_function (filter->sinkpad, gst_quarktv_link); - gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad); - - filter->srcpad = - gst_pad_new_from_template (gst_static_pad_template_get - (&gst_effectv_src_template), "src"); - //gst_pad_set_getcaps_function (filter->srcpad, gst_pad_proxy_getcaps); - gst_pad_set_link_function (filter->srcpad, gst_quarktv_link); - gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad); + static unsigned int fastrand_val; - filter->planes = PLANES; - filter->current_plane = filter->planes - 1; + return (fastrand_val = fastrand_val * 1103515245 + 12345); } static GstFlowReturn -gst_quarktv_chain (GstPad * pad, GstBuffer * buf) +gst_quarktv_transform (GstBaseTransform * trans, GstBuffer * in, + GstBuffer * out) { GstQuarkTV *filter; - guint32 *src, *dest; - GstBuffer *outbuf; gint area; - GstFlowReturn ret; + guint32 *src, *dest; + GstFlowReturn ret = GST_FLOW_OK; - filter = GST_QUARKTV (gst_pad_get_parent (pad)); + filter = GST_QUARKTV (trans); - src = (guint32 *) GST_BUFFER_DATA (buf); + gst_buffer_stamp (out, in); area = filter->area; - - ret = - gst_pad_alloc_buffer (filter->srcpad, 0, area, GST_PAD_CAPS (pad), - &outbuf); - if (ret != GST_FLOW_OK) - goto no_buffer; - - dest = (guint32 *) GST_BUFFER_DATA (outbuf); - GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf); + src = (guint32 *) GST_BUFFER_DATA (in); + dest = (guint32 *) GST_BUFFER_DATA (out); if (filter->planetable[filter->current_plane]) gst_buffer_unref (filter->planetable[filter->current_plane]); - filter->planetable[filter->current_plane] = buf; + filter->planetable[filter->current_plane] = gst_buffer_ref (in); + /* For each pixel */ while (--area) { GstBuffer *rand; @@ -265,27 +177,37 @@ gst_quarktv_chain (GstPad * pad, GstBuffer * buf) filter->planetable[(filter->current_plane + (fastrand () >> 24)) & (filter->planes - 1)]; + /* Copy the pixel from the random buffer to dest */ dest[area] = (rand ? ((guint32 *) GST_BUFFER_DATA (rand))[area] : 0); } - ret = gst_pad_push (filter->srcpad, outbuf); - filter->current_plane--; if (filter->current_plane < 0) filter->current_plane = filter->planes - 1; return ret; - -no_buffer: - { - return ret; - } } static GstStateChangeReturn gst_quarktv_change_state (GstElement * element, GstStateChange transition) { GstQuarkTV *filter = GST_QUARKTV (element); + GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS; + + switch (transition) { + case GST_STATE_CHANGE_READY_TO_PAUSED: + { + filter->planetable = + (GstBuffer **) g_malloc (filter->planes * sizeof (GstBuffer *)); + memset (filter->planetable, 0, filter->planes * sizeof (GstBuffer *)); + break; + } + default: + break; + } + + if (GST_ELEMENT_CLASS (parent_class)->change_state) + ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); switch (transition) { case GST_STATE_CHANGE_PAUSED_TO_READY: @@ -301,18 +223,11 @@ gst_quarktv_change_state (GstElement * element, GstStateChange transition) filter->planetable = NULL; break; } - case GST_STATE_CHANGE_READY_TO_PAUSED: - { - filter->planetable = - (GstBuffer **) g_malloc (filter->planes * sizeof (GstBuffer *)); - memset (filter->planetable, 0, filter->planes * sizeof (GstBuffer *)); - break; - } default: break; } - return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); + return ret; } @@ -377,3 +292,72 @@ gst_quarktv_get_property (GObject * object, guint prop_id, GValue * value, break; } } + +static void +gst_quarktv_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + + gst_element_class_set_details (element_class, &quarktv_details); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_quarktv_sink_template)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_quarktv_src_template)); +} + +static void +gst_quarktv_class_init (gpointer klass, gpointer class_data) +{ + GObjectClass *gobject_class; + GstElementClass *element_class; + GstBaseTransformClass *trans_class; + + gobject_class = (GObjectClass *) klass; + element_class = (GstElementClass *) klass; + trans_class = (GstBaseTransformClass *) klass; + + parent_class = g_type_class_peek_parent (klass); + + gobject_class->set_property = GST_DEBUG_FUNCPTR (gst_quarktv_set_property); + gobject_class->get_property = GST_DEBUG_FUNCPTR (gst_quarktv_get_property); + + element_class->change_state = GST_DEBUG_FUNCPTR (gst_quarktv_change_state); + + trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_quarktv_set_caps); + trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_quarktv_get_unit_size); + trans_class->transform = GST_DEBUG_FUNCPTR (gst_quarktv_transform); +} + +static void +gst_quarktv_init (GTypeInstance * instance, gpointer g_class) +{ + GstQuarkTV *filter = GST_QUARKTV (instance); + + filter->planes = PLANES; + filter->current_plane = filter->planes - 1; +} + +GType +gst_quarktv_get_type (void) +{ + static GType quarktv_type = 0; + + if (!quarktv_type) { + static const GTypeInfo quarktv_info = { + sizeof (GstQuarkTVClass), + gst_quarktv_base_init, + NULL, + gst_quarktv_class_init, + NULL, + NULL, + sizeof (GstQuarkTV), + 0, + gst_quarktv_init, + }; + + quarktv_type = g_type_register_static (GST_TYPE_VIDEOFILTER, + "GstQuarkTV", &quarktv_info, 0); + } + return quarktv_type; +} diff --git a/gst/effectv/gstrev.c b/gst/effectv/gstrev.c index 429a9b1c..331d6b37 100644 --- a/gst/effectv/gstrev.c +++ b/gst/effectv/gstrev.c @@ -43,10 +43,13 @@ #ifdef HAVE_CONFIG_H #include "config.h" #endif + +#include <gstvideofilter.h> + #include <math.h> #include <string.h> -#include <gst/gst.h> -#include <gstvideofilter.h> + +#include <gst/video/video.h> #define GST_TYPE_REVTV \ (gst_revtv_get_type()) @@ -78,15 +81,6 @@ struct _GstRevTV struct _GstRevTVClass { GstVideofilterClass parent_class; - - void (*reset) (GstElement * element); -}; - -/* Filter signals and args */ -enum -{ - /* FILL ME */ - LAST_SIGNAL }; enum @@ -97,132 +91,87 @@ enum ARG_GAIN }; -static void gst_revtv_base_init (gpointer g_class); -static void gst_revtv_class_init (gpointer g_class, gpointer class_data); -static void gst_revtv_init (GTypeInstance * instance, gpointer g_class); - -static void gst_revtv_set_property (GObject * object, guint prop_id, - const GValue * value, GParamSpec * pspec); -static void gst_revtv_get_property (GObject * object, guint prop_id, - GValue * value, GParamSpec * pspec); -static void gst_revtv_setup (GstVideofilter * videofilter); -static void gst_revtv_rgb32 (GstVideofilter * videofilter, void *d, void *s); +GType gst_revtv_get_type (void); -/* static guint gst_revtv_signals[LAST_SIGNAL] = { 0 }; */ +static GstElementDetails gst_revtv_details = GST_ELEMENT_DETAILS ("RevTV", + "Filter/Effect/Video", + "A video waveform monitor for each line of video processed", + "Wim Taymans <wim.taymans@chello.be>"); -GType -gst_revtv_get_type (void) -{ - static GType revtv_type = 0; - - if (!revtv_type) { - static const GTypeInfo revtv_info = { - sizeof (GstRevTVClass), - gst_revtv_base_init, - NULL, - (GClassInitFunc) gst_revtv_class_init, - NULL, - NULL, - sizeof (GstRevTV), - 0, - (GInstanceInitFunc) gst_revtv_init, - }; +static GstStaticPadTemplate gst_revtv_src_template = +GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx) + ); - revtv_type = - g_type_register_static (GST_TYPE_VIDEOFILTER, "GstRevTV", &revtv_info, - 0); - } - return revtv_type; -} +static GstStaticPadTemplate gst_revtv_sink_template = +GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx) + ); -static GstVideofilterFormat gst_revtv_formats[] = { - {"RGB ", 32, gst_revtv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000, - 0xff000000} -}; +static GstVideofilterClass *parent_class = NULL; -static void -gst_revtv_base_init (gpointer g_class) +static gboolean +gst_revtv_set_caps (GstBaseTransform * btrans, GstCaps * incaps, + GstCaps * outcaps) { - /* elementfactory information */ - static GstElementDetails gst_revtv_details = GST_ELEMENT_DETAILS ("RevTV", - "Filter/Effect/Video", - "A video waveform monitor for each line of video processed", - "Wim Taymans <wim.taymans@chello.be>"); + GstRevTV *filter = GST_REVTV (btrans); + GstStructure *structure; + gboolean ret = FALSE; - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class); - int i; + structure = gst_caps_get_structure (incaps, 0); - gst_element_class_set_details (element_class, &gst_revtv_details); - - for (i = 0; i < G_N_ELEMENTS (gst_revtv_formats); i++) { - gst_videofilter_class_add_format (videofilter_class, gst_revtv_formats + i); + if (gst_structure_get_int (structure, "width", &filter->width) && + gst_structure_get_int (structure, "height", &filter->height)) { + ret = TRUE; } - gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class)); -} - -static void -gst_revtv_class_init (gpointer klass, gpointer class_data) -{ - GObjectClass *gobject_class; - GstVideofilterClass *videofilter_class; - - gobject_class = G_OBJECT_CLASS (klass); - videofilter_class = GST_VIDEOFILTER_CLASS (klass); - - gobject_class->set_property = gst_revtv_set_property; - gobject_class->get_property = gst_revtv_get_property; - - g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_DELAY, - g_param_spec_int ("delay", "Delay", "Delay in frames between updates", - 1, 100, 1, G_PARAM_READWRITE)); - g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_LINESPACE, - g_param_spec_int ("linespace", "Linespace", "Control line spacing", - 1, 100, 6, G_PARAM_READWRITE)); - g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_GAIN, - g_param_spec_int ("gain", "Gain", "Control gain", - 1, 200, 50, G_PARAM_READWRITE)); - - videofilter_class->setup = gst_revtv_setup; + return ret; } -static void -gst_revtv_init (GTypeInstance * instance, gpointer g_class) +static gboolean +gst_revtv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps, + guint * size) { - GstRevTV *restv = GST_REVTV (instance); + GstRevTV *filter; + GstStructure *structure; + gboolean ret = FALSE; + gint width, height; - restv->vgrabtime = 1; - restv->vgrab = 0; - restv->linespace = 6; - restv->vscale = 50; -} + filter = GST_REVTV (btrans); -static void -gst_revtv_setup (GstVideofilter * videofilter) -{ - GstRevTV *revtv; + structure = gst_caps_get_structure (caps, 0); - g_return_if_fail (GST_IS_REVTV (videofilter)); - revtv = GST_REVTV (videofilter); + if (gst_structure_get_int (structure, "width", &width) && + gst_structure_get_int (structure, "height", &height)) { + *size = width * height * 32 / 8; + ret = TRUE; + GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size, + width, height); + } - revtv->width = gst_videofilter_get_input_width (videofilter); - revtv->height = gst_videofilter_get_input_height (videofilter); + return ret; } -static void -gst_revtv_rgb32 (GstVideofilter * videofilter, void *d, void *s) +static GstFlowReturn +gst_revtv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out) { GstRevTV *filter; guint32 *src, *dest; gint width, height; guint32 *nsrc; gint y, x, R, G, B, yval; + GstFlowReturn ret = GST_FLOW_OK; - filter = GST_REVTV (videofilter); + filter = GST_REVTV (trans); - src = (guint32 *) s; - dest = (guint32 *) d; + gst_buffer_stamp (out, in); + + src = (guint32 *) GST_BUFFER_DATA (in); + dest = (guint32 *) GST_BUFFER_DATA (out); width = filter->width; height = filter->height; @@ -247,6 +196,8 @@ gst_revtv_rgb32 (GstVideofilter * videofilter, void *d, void *s) } } } + + return ret; } static void @@ -299,3 +250,83 @@ gst_revtv_get_property (GObject * object, guint prop_id, GValue * value, break; } } + +static void +gst_revtv_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + + gst_element_class_set_details (element_class, &gst_revtv_details); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_revtv_sink_template)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_revtv_src_template)); +} + +static void +gst_revtv_class_init (gpointer klass, gpointer class_data) +{ + GObjectClass *gobject_class; + GstElementClass *element_class; + GstBaseTransformClass *trans_class; + + gobject_class = (GObjectClass *) klass; + element_class = (GstElementClass *) klass; + trans_class = (GstBaseTransformClass *) klass; + + parent_class = g_type_class_peek_parent (klass); + + gobject_class->set_property = gst_revtv_set_property; + gobject_class->get_property = gst_revtv_get_property; + + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_DELAY, + g_param_spec_int ("delay", "Delay", "Delay in frames between updates", + 1, 100, 1, G_PARAM_READWRITE)); + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_LINESPACE, + g_param_spec_int ("linespace", "Linespace", "Control line spacing", + 1, 100, 6, G_PARAM_READWRITE)); + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_GAIN, + g_param_spec_int ("gain", "Gain", "Control gain", + 1, 200, 50, G_PARAM_READWRITE)); + + trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_revtv_set_caps); + trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_revtv_get_unit_size); + trans_class->transform = GST_DEBUG_FUNCPTR (gst_revtv_transform); +} + +static void +gst_revtv_init (GTypeInstance * instance, gpointer g_class) +{ + GstRevTV *restv = GST_REVTV (instance); + + restv->vgrabtime = 1; + restv->vgrab = 0; + restv->linespace = 6; + restv->vscale = 50; +} + +GType +gst_revtv_get_type (void) +{ + static GType revtv_type = 0; + + if (!revtv_type) { + static const GTypeInfo revtv_info = { + sizeof (GstRevTVClass), + gst_revtv_base_init, + NULL, + (GClassInitFunc) gst_revtv_class_init, + NULL, + NULL, + sizeof (GstRevTV), + 0, + (GInstanceInitFunc) gst_revtv_init, + }; + + revtv_type = + g_type_register_static (GST_TYPE_VIDEOFILTER, "GstRevTV", &revtv_info, + 0); + } + return revtv_type; +} diff --git a/gst/effectv/gstshagadelic.c b/gst/effectv/gstshagadelic.c index e3c50d23..cedf9c92 100644 --- a/gst/effectv/gstshagadelic.c +++ b/gst/effectv/gstshagadelic.c @@ -25,10 +25,13 @@ #ifdef HAVE_CONFIG_H #include "config.h" #endif + +#include <gstvideofilter.h> + #include <math.h> #include <string.h> -#include <gst/gst.h> -#include <gstvideofilter.h> + +#include <gst/video/video.h> #define GST_TYPE_SHAGADELICTV \ (gst_shagadelictv_get_type()) @@ -64,135 +67,81 @@ struct _GstShagadelicTVClass GstVideofilterClass parent_class; }; -/* Filter signals and args */ -enum -{ - /* FILL ME */ - LAST_SIGNAL -}; - -enum -{ - ARG_0 -}; - -static void gst_shagadelictv_base_init (gpointer g_class); -static void gst_shagadelictv_class_init (gpointer g_class, gpointer class_data); -static void gst_shagadelictv_init (GTypeInstance * instance, gpointer g_class); +GType gst_shagadelictv_get_type (void); static void gst_shagadelic_initialize (GstShagadelicTV * filter); -static void gst_shagadelictv_set_property (GObject * object, guint prop_id, - const GValue * value, GParamSpec * pspec); -static void gst_shagadelictv_get_property (GObject * object, guint prop_id, - GValue * value, GParamSpec * pspec); -static void gst_shagadelictv_setup (GstVideofilter * videofilter); -static void gst_shagadelictv_rgb32 (GstVideofilter * videofilter, void *d, - void *s); - -/*static guint gst_shagadelictv_signals[LAST_SIGNAL] = { 0 }; */ - -GType -gst_shagadelictv_get_type (void) +static GstElementDetails shagadelictv_details = +GST_ELEMENT_DETAILS ("ShagadelicTV", + "Filter/Effect/Video", + "Oh behave, ShagedelicTV makes images shagadelic!", + "Wim Taymans <wim.taymans@chello.be>"); + +static GstStaticPadTemplate gst_shagadelictv_src_template = +GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx) + ); + +static GstStaticPadTemplate gst_shagadelictv_sink_template = +GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx) + ); + +static GstVideofilterClass *parent_class = NULL; + +static gboolean +gst_shagadelictv_set_caps (GstBaseTransform * btrans, GstCaps * incaps, + GstCaps * outcaps) { - static GType shagadelictv_type = 0; - - if (!shagadelictv_type) { - static const GTypeInfo shagadelictv_info = { - sizeof (GstShagadelicTVClass), - gst_shagadelictv_base_init, - NULL, - (GClassInitFunc) gst_shagadelictv_class_init, - NULL, - NULL, - sizeof (GstShagadelicTV), - 0, - (GInstanceInitFunc) gst_shagadelictv_init, - }; + GstShagadelicTV *filter = GST_SHAGADELICTV (btrans); + GstStructure *structure; + gboolean ret = FALSE; - shagadelictv_type = - g_type_register_static (GST_TYPE_VIDEOFILTER, "GstShagadelicTV", - &shagadelictv_info, 0); - } - return shagadelictv_type; -} - -static GstVideofilterFormat gst_shagadelictv_formats[] = { - {"RGB ", 32, gst_shagadelictv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000, - 0xff000000} -}; + structure = gst_caps_get_structure (incaps, 0); -static void -gst_shagadelictv_base_init (gpointer g_class) -{ - /* elementfactory information */ - static GstElementDetails gst_shagadelictv_details = - GST_ELEMENT_DETAILS ("ShagadelicTV", - "Filter/Effect/Video", - "Oh behave, ShagedelicTV makes images shagadelic!", - "Wim Taymans <wim.taymans@chello.be>"); + if (gst_structure_get_int (structure, "width", &filter->width) && + gst_structure_get_int (structure, "height", &filter->height)) { + gint area = filter->width * filter->height; - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class); - int i; + g_free (filter->ripple); + g_free (filter->spiral); - gst_element_class_set_details (element_class, &gst_shagadelictv_details); + filter->ripple = (gchar *) g_malloc (area * 4); + filter->spiral = (gchar *) g_malloc (area); - for (i = 0; i < G_N_ELEMENTS (gst_shagadelictv_formats); i++) { - gst_videofilter_class_add_format (videofilter_class, - gst_shagadelictv_formats + i); + gst_shagadelic_initialize (filter); + ret = TRUE; } - gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class)); -} - -static void -gst_shagadelictv_class_init (gpointer g_class, gpointer class_data) -{ - GObjectClass *gobject_class; - GstVideofilterClass *videofilter_class; - - gobject_class = G_OBJECT_CLASS (g_class); - videofilter_class = GST_VIDEOFILTER_CLASS (g_class); - - gobject_class->set_property = gst_shagadelictv_set_property; - gobject_class->get_property = gst_shagadelictv_get_property; - - videofilter_class->setup = gst_shagadelictv_setup; -} - -static void -gst_shagadelictv_init (GTypeInstance * instance, gpointer g_class) -{ - GstShagadelicTV *filter = GST_SHAGADELICTV (instance); - - filter->ripple = NULL; - filter->spiral = NULL; + return ret; } -static void -gst_shagadelictv_setup (GstVideofilter * videofilter) +static gboolean +gst_shagadelictv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps, + guint * size) { GstShagadelicTV *filter; - int width = gst_videofilter_get_input_width (videofilter); - int height = gst_videofilter_get_input_height (videofilter); - int area; - - g_return_if_fail (GST_IS_SHAGADELICTV (videofilter)); - filter = GST_SHAGADELICTV (videofilter); - - filter->width = width; - filter->height = height; + GstStructure *structure; + gboolean ret = FALSE; + gint width, height; - area = filter->width * filter->height; + filter = GST_SHAGADELICTV (btrans); - g_free (filter->ripple); - g_free (filter->spiral); + structure = gst_caps_get_structure (caps, 0); - filter->ripple = (gchar *) g_malloc (area * 4); - filter->spiral = (gchar *) g_malloc (area); + if (gst_structure_get_int (structure, "width", &width) && + gst_structure_get_int (structure, "height", &height)) { + *size = width * height * 32 / 8; + ret = TRUE; + GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size, + width, height); + } - gst_shagadelic_initialize (filter); + return ret; } static unsigned int @@ -261,8 +210,9 @@ gst_shagadelic_initialize (GstShagadelicTV * filter) filter->phase = 0; } -static void -gst_shagadelictv_rgb32 (GstVideofilter * videofilter, void *d, void *s) +static GstFlowReturn +gst_shagadelictv_transform (GstBaseTransform * trans, GstBuffer * in, + GstBuffer * out) { GstShagadelicTV *filter; guint32 *src, *dest; @@ -270,11 +220,14 @@ gst_shagadelictv_rgb32 (GstVideofilter * videofilter, void *d, void *s) guint32 v; guchar r, g, b; gint width, height; + GstFlowReturn ret = GST_FLOW_OK; + + filter = GST_SHAGADELICTV (trans); - filter = GST_SHAGADELICTV (videofilter); + gst_buffer_stamp (out, in); - src = (guint32 *) s; - dest = (guint32 *) d; + src = (guint32 *) GST_BUFFER_DATA (in); + dest = (guint32 *) GST_BUFFER_DATA (out); width = filter->width; height = filter->height; @@ -310,37 +263,72 @@ gst_shagadelictv_rgb32 (GstVideofilter * videofilter, void *d, void *s) filter->ry += filter->rvy; filter->bx += filter->bvx; filter->by += filter->bvy; + + return ret; } static void -gst_shagadelictv_set_property (GObject * object, guint prop_id, - const GValue * value, GParamSpec * pspec) +gst_shagadelictv_base_init (gpointer g_class) { - GstShagadelicTV *filter; + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - g_return_if_fail (GST_IS_SHAGADELICTV (object)); + gst_element_class_set_details (element_class, &shagadelictv_details); - filter = GST_SHAGADELICTV (object); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_shagadelictv_sink_template)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_shagadelictv_src_template)); +} - switch (prop_id) { - default: - break; - } +static void +gst_shagadelictv_class_init (gpointer klass, gpointer class_data) +{ + GObjectClass *gobject_class; + GstElementClass *element_class; + GstBaseTransformClass *trans_class; + + gobject_class = (GObjectClass *) klass; + element_class = (GstElementClass *) klass; + trans_class = (GstBaseTransformClass *) klass; + + parent_class = g_type_class_peek_parent (klass); + + trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_shagadelictv_set_caps); + trans_class->get_unit_size = + GST_DEBUG_FUNCPTR (gst_shagadelictv_get_unit_size); + trans_class->transform = GST_DEBUG_FUNCPTR (gst_shagadelictv_transform); } static void -gst_shagadelictv_get_property (GObject * object, guint prop_id, GValue * value, - GParamSpec * pspec) +gst_shagadelictv_init (GTypeInstance * instance, gpointer g_class) { - GstShagadelicTV *filter; + GstShagadelicTV *filter = GST_SHAGADELICTV (instance); - g_return_if_fail (GST_IS_SHAGADELICTV (object)); + filter->ripple = NULL; + filter->spiral = NULL; +} - filter = GST_SHAGADELICTV (object); +GType +gst_shagadelictv_get_type (void) +{ + static GType shagadelictv_type = 0; + + if (!shagadelictv_type) { + static const GTypeInfo shagadelictv_info = { + sizeof (GstShagadelicTVClass), + gst_shagadelictv_base_init, + NULL, + (GClassInitFunc) gst_shagadelictv_class_init, + NULL, + NULL, + sizeof (GstShagadelicTV), + 0, + (GInstanceInitFunc) gst_shagadelictv_init, + }; - switch (prop_id) { - default: - G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); - break; + shagadelictv_type = + g_type_register_static (GST_TYPE_VIDEOFILTER, "GstShagadelicTV", + &shagadelictv_info, 0); } + return shagadelictv_type; } diff --git a/gst/effectv/gstvertigo.c b/gst/effectv/gstvertigo.c index d44a5a0b..abe92a52 100644 --- a/gst/effectv/gstvertigo.c +++ b/gst/effectv/gstvertigo.c @@ -25,10 +25,13 @@ #ifdef HAVE_CONFIG_H #include "config.h" #endif + +#include <gstvideofilter.h> + #include <math.h> #include <string.h> -#include <gst/gst.h> -#include <gstvideofilter.h> + +#include <gst/video/video.h> #define GST_TYPE_VERTIGOTV \ (gst_vertigotv_get_type()) @@ -61,18 +64,11 @@ struct _GstVertigoTV struct _GstVertigoTVClass { GstVideofilterClass parent_class; - - void (*reset) (GstElement * element); }; -/* Filter signals and args */ -enum -{ - /* FILL ME */ - RESET_SIGNAL, - LAST_SIGNAL -}; +GType gst_vertigotv_get_type (void); +/* Filter signals and args */ enum { ARG_0, @@ -80,152 +76,77 @@ enum ARG_ZOOM_SPEED }; -static void gst_vertigotv_base_init (gpointer g_class); -static void gst_vertigotv_class_init (GstVertigoTVClass * klass, - gpointer class_data); -static void gst_vertigotv_init (GTypeInstance * instance, gpointer g_class); -static void gst_vertigotv_setup (GstVideofilter * videofilter); - -static void gst_vertigotv_reset_handler (GstElement * element); - -static void gst_vertigotv_set_property (GObject * object, guint prop_id, - const GValue * value, GParamSpec * pspec); -static void gst_vertigotv_get_property (GObject * object, guint prop_id, - GValue * value, GParamSpec * pspec); -static void gst_vertigotv_rgb32 (GstVideofilter * videofilter, void *d, - void *s); - -static guint gst_vertigotv_signals[LAST_SIGNAL] = { 0 }; - -GType -gst_vertigotv_get_type (void) +static GstElementDetails vertigotv_details = GST_ELEMENT_DETAILS ("VertigoTV", + "Filter/Effect/Video", + "A loopback alpha blending effector with rotating and scaling", + "Wim Taymans <wim.taymans@chello.be>"); + +static GstStaticPadTemplate gst_vertigotv_src_template = +GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx) + ); + +static GstStaticPadTemplate gst_vertigotv_sink_template = +GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx) + ); + +static GstVideofilterClass *parent_class = NULL; + +static gboolean +gst_vertigotv_set_caps (GstBaseTransform * btrans, GstCaps * incaps, + GstCaps * outcaps) { - static GType vertigotv_type = 0; + GstVertigoTV *filter = GST_VERTIGOTV (btrans); + GstStructure *structure; + gboolean ret = FALSE; - if (!vertigotv_type) { - static const GTypeInfo vertigotv_info = { - sizeof (GstVertigoTVClass), - gst_vertigotv_base_init, - NULL, - (GClassInitFunc) gst_vertigotv_class_init, - NULL, - NULL, - sizeof (GstVertigoTV), - 0, - (GInstanceInitFunc) gst_vertigotv_init, - }; + structure = gst_caps_get_structure (incaps, 0); - vertigotv_type = - g_type_register_static (GST_TYPE_VIDEOFILTER, "GstVertigoTV", - &vertigotv_info, 0); - } - return vertigotv_type; -} - -static GstVideofilterFormat gst_vertigotv_formats[] = { - {"RGB ", 32, gst_vertigotv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000, - 0xff000000} -}; + if (gst_structure_get_int (structure, "width", &filter->width) && + gst_structure_get_int (structure, "height", &filter->height)) { + gint area = filter->width * filter->height; -static void -gst_vertigotv_base_init (gpointer g_class) -{ - /* elementfactory information */ - static GstElementDetails vertigotv_details = GST_ELEMENT_DETAILS ("VertigoTV", - "Filter/Effect/Video", - "A loopback alpha blending effector with rotating and scaling", - "Wim Taymans <wim.taymans@chello.be>"); - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class); - int i; + g_free (filter->buffer); + filter->buffer = (guint32 *) g_malloc (area * 2 * sizeof (guint32)); - gst_element_class_set_details (element_class, &vertigotv_details); + memset (filter->buffer, 0, area * 2 * sizeof (guint32)); + filter->current_buffer = filter->buffer; + filter->alt_buffer = filter->buffer + area; + filter->phase = 0; - for (i = 0; i < G_N_ELEMENTS (gst_vertigotv_formats); i++) { - gst_videofilter_class_add_format (videofilter_class, - gst_vertigotv_formats + i); + ret = TRUE; } - gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class)); + return ret; } -static void -gst_vertigotv_class_init (GstVertigoTVClass * klass, gpointer class_data) -{ - GObjectClass *gobject_class; - GstElementClass *gstelement_class; - GstVideofilterClass *videofilter_class; - - gobject_class = (GObjectClass *) klass; - gstelement_class = (GstElementClass *) klass; - videofilter_class = GST_VIDEOFILTER_CLASS (klass); - - gst_vertigotv_signals[RESET_SIGNAL] = - g_signal_new ("reset-parms", - G_TYPE_FROM_CLASS (klass), - G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, - G_STRUCT_OFFSET (GstVertigoTVClass, reset), - NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0); - - klass->reset = gst_vertigotv_reset_handler; - - gobject_class->set_property = gst_vertigotv_set_property; - gobject_class->get_property = gst_vertigotv_get_property; - - g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_SPEED, - g_param_spec_float ("speed", "Speed", "Control the speed of movement", - 0.01, 100.0, 0.02, G_PARAM_READWRITE)); - g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_ZOOM_SPEED, - g_param_spec_float ("zoom_speed", "Zoom Speed", - "Control the rate of zooming", 1.01, 1.1, 1.01, G_PARAM_READWRITE)); - - videofilter_class->setup = gst_vertigotv_setup; -} - -static void -gst_vertigotv_reset_handler (GstElement * element) -{ - GstVertigoTV *filter = GST_VERTIGOTV (element); - - filter->phase = 0.0; - filter->phase_increment = 0.02; - filter->zoomrate = 1.01; -} - -static void -gst_vertigotv_setup (GstVideofilter * videofilter) +static gboolean +gst_vertigotv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps, + guint * size) { GstVertigoTV *filter; - gint area; - int width = gst_videofilter_get_input_width (videofilter); - int height = gst_videofilter_get_input_height (videofilter); - - g_return_if_fail (GST_IS_VERTIGOTV (videofilter)); - filter = GST_VERTIGOTV (videofilter); - - filter->width = width; - filter->height = height; + GstStructure *structure; + gboolean ret = FALSE; + gint width, height; - area = width * height; + filter = GST_VERTIGOTV (btrans); - g_free (filter->buffer); - filter->buffer = (guint32 *) g_malloc (area * 2 * sizeof (guint32)); + structure = gst_caps_get_structure (caps, 0); - memset (filter->buffer, 0, area * 2 * sizeof (guint32)); - filter->current_buffer = filter->buffer; - filter->alt_buffer = filter->buffer + area; - filter->phase = 0; -} - -static void -gst_vertigotv_init (GTypeInstance * instance, gpointer g_class) -{ - GstVertigoTV *filter = GST_VERTIGOTV (instance); + if (gst_structure_get_int (structure, "width", &width) && + gst_structure_get_int (structure, "height", &height)) { + *size = width * height * 32 / 8; + ret = TRUE; + GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size, + width, height); + } - filter->buffer = NULL; - filter->phase = 0.0; - filter->phase_increment = 0.02; - filter->zoomrate = 1.01; + return ret; } static void @@ -276,22 +197,22 @@ gst_vertigotv_set_parms (GstVertigoTV * filter) filter->phase = 0; } -static void -gst_vertigotv_rgb32 (GstVideofilter * videofilter, void *d, void *s) +static GstFlowReturn +gst_vertigotv_transform (GstBaseTransform * trans, GstBuffer * in, + GstBuffer * out) { GstVertigoTV *filter; - guint32 *src, *dest; - guint32 *p; + guint32 *src, *dest, *p; guint32 v; - gint x, y; - gint ox, oy; - gint i; - gint width, height, area; + gint x, y, ox, oy, i, width, height, area; + GstFlowReturn ret = GST_FLOW_OK; + + filter = GST_VERTIGOTV (trans); - filter = GST_VERTIGOTV (videofilter); + gst_buffer_stamp (out, in); - src = (guint32 *) s; - dest = (guint32 *) d; + src = (guint32 *) GST_BUFFER_DATA (in); + dest = (guint32 *) GST_BUFFER_DATA (out); width = filter->width; height = filter->height; @@ -327,6 +248,8 @@ gst_vertigotv_rgb32 (GstVideofilter * videofilter, void *d, void *s) p = filter->current_buffer; filter->current_buffer = filter->alt_buffer; filter->alt_buffer = p; + + return ret; } static void @@ -373,3 +296,80 @@ gst_vertigotv_get_property (GObject * object, guint prop_id, GValue * value, break; } } + +static void +gst_vertigotv_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + + gst_element_class_set_details (element_class, &vertigotv_details); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_vertigotv_sink_template)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_vertigotv_src_template)); +} + +static void +gst_vertigotv_class_init (gpointer klass, gpointer class_data) +{ + GObjectClass *gobject_class; + GstElementClass *element_class; + GstBaseTransformClass *trans_class; + + gobject_class = (GObjectClass *) klass; + element_class = (GstElementClass *) klass; + trans_class = (GstBaseTransformClass *) klass; + + parent_class = g_type_class_peek_parent (klass); + + gobject_class->set_property = gst_vertigotv_set_property; + gobject_class->get_property = gst_vertigotv_get_property; + + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_SPEED, + g_param_spec_float ("speed", "Speed", "Control the speed of movement", + 0.01, 100.0, 0.02, G_PARAM_READWRITE)); + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_ZOOM_SPEED, + g_param_spec_float ("zoom_speed", "Zoom Speed", + "Control the rate of zooming", 1.01, 1.1, 1.01, G_PARAM_READWRITE)); + + trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_vertigotv_set_caps); + trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_vertigotv_get_unit_size); + trans_class->transform = GST_DEBUG_FUNCPTR (gst_vertigotv_transform); +} + +static void +gst_vertigotv_init (GTypeInstance * instance, gpointer g_class) +{ + GstVertigoTV *filter = GST_VERTIGOTV (instance); + + filter->buffer = NULL; + filter->phase = 0.0; + filter->phase_increment = 0.02; + filter->zoomrate = 1.01; +} + +GType +gst_vertigotv_get_type (void) +{ + static GType vertigotv_type = 0; + + if (!vertigotv_type) { + static const GTypeInfo vertigotv_info = { + sizeof (GstVertigoTVClass), + gst_vertigotv_base_init, + NULL, + (GClassInitFunc) gst_vertigotv_class_init, + NULL, + NULL, + sizeof (GstVertigoTV), + 0, + (GInstanceInitFunc) gst_vertigotv_init, + }; + + vertigotv_type = + g_type_register_static (GST_TYPE_VIDEOFILTER, "GstVertigoTV", + &vertigotv_info, 0); + } + return vertigotv_type; +} diff --git a/gst/effectv/gstwarp.c b/gst/effectv/gstwarp.c index 4030613d..4cf4b548 100644 --- a/gst/effectv/gstwarp.c +++ b/gst/effectv/gstwarp.c @@ -37,17 +37,17 @@ #include "config.h" #endif -#include <gst/gst.h> #include <gstvideofilter.h> + #include <string.h> #include <math.h> -#include "gsteffectv.h" + +#include <gst/video/video.h> #ifndef M_PI #define M_PI 3.14159265358979323846 #endif - #define GST_TYPE_WARPTV \ (gst_warptv_get_type()) #define GST_WARPTV(obj) \ @@ -79,196 +79,85 @@ struct _GstWarpTVClass GstVideofilterClass parent_class; }; +GType gst_warptv_get_type (void); -/* GstWarpTV signals and args */ -enum -{ - /* FILL ME */ - LAST_SIGNAL -}; - -enum -{ - ARG_0 - /* FILL ME */ -}; - -static void gst_warptv_base_init (gpointer g_class); -static void gst_warptv_class_init (gpointer g_class, gpointer class_data); -static void gst_warptv_init (GTypeInstance * instance, gpointer g_class); - -static void gst_warptv_set_property (GObject * object, guint prop_id, - const GValue * value, GParamSpec * pspec); -static void gst_warptv_get_property (GObject * object, guint prop_id, - GValue * value, GParamSpec * pspec); - -static void gst_warptv_setup (GstVideofilter * videofilter); static void initSinTable (GstWarpTV * filter); static void initOffsTable (GstWarpTV * filter); static void initDistTable (GstWarpTV * filter); -static void gst_warptv_rgb32 (GstVideofilter * videofilter, void *d, void *s); -GType -gst_warptv_get_type (void) +static GstElementDetails warptv_details = GST_ELEMENT_DETAILS ("WarpTV", + "Filter/Effect/Video", + "WarpTV does realtime goo'ing of the video input", + "Sam Lantinga <slouken@devolution.com>"); + +static GstStaticPadTemplate gst_warptv_src_template = + GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_xRGB ";" + GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR) + ); + +static GstStaticPadTemplate gst_warptv_sink_template = + GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_xRGB ";" + GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR) + ); + +static GstVideofilterClass *parent_class = NULL; + +static gboolean +gst_warptv_set_caps (GstBaseTransform * btrans, GstCaps * incaps, + GstCaps * outcaps) { - static GType warptv_type = 0; + GstWarpTV *filter = GST_WARPTV (btrans); + GstStructure *structure; + gboolean ret = FALSE; - if (!warptv_type) { - static const GTypeInfo warptv_info = { - sizeof (GstWarpTVClass), - gst_warptv_base_init, - NULL, - gst_warptv_class_init, - NULL, - NULL, - sizeof (GstWarpTV), - 0, - gst_warptv_init, - }; + structure = gst_caps_get_structure (incaps, 0); - warptv_type = g_type_register_static (GST_TYPE_VIDEOFILTER, - "GstWarpTV", &warptv_info, 0); - } - return warptv_type; -} + if (gst_structure_get_int (structure, "width", &filter->width) && + gst_structure_get_int (structure, "height", &filter->height)) { + g_free (filter->disttable); + g_free (filter->offstable); -static GstVideofilterFormat gst_warptv_formats[] = { - {"RGB ", 32, gst_warptv_rgb32, 24, G_BIG_ENDIAN, 0x00ff0000, 0x0000ff00, - 0x000000ff}, - {"RGB ", 32, gst_warptv_rgb32, 24, G_BIG_ENDIAN, 0xff000000, 0x00ff0000, - 0x0000ff00}, - {"RGB ", 32, gst_warptv_rgb32, 24, G_BIG_ENDIAN, 0x000000ff, 0x0000ff00, - 0x00ff0000}, - {"RGB ", 32, gst_warptv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000, - 0xff000000}, -}; + filter->offstable = g_malloc (filter->height * sizeof (guint32)); + filter->disttable = + g_malloc (filter->width * filter->height * sizeof (guint32)); -static void -gst_warptv_base_init (gpointer g_class) -{ - static GstElementDetails warptv_details = GST_ELEMENT_DETAILS ("WarpTV", - "Filter/Effect/Video", - "WarpTV does realtime goo'ing of the video input", - "Sam Lantinga <slouken@devolution.com>"); - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class); - int i; - - gst_element_class_set_details (element_class, &warptv_details); - - for (i = 0; i < G_N_ELEMENTS (gst_warptv_formats); i++) { - gst_videofilter_class_add_format (videofilter_class, - gst_warptv_formats + i); + initSinTable (filter); + initOffsTable (filter); + initDistTable (filter); + ret = TRUE; } - gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class)); + return ret; } -static void -gst_warptv_class_init (gpointer g_class, gpointer class_data) +static gboolean +gst_warptv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps, + guint * size) { - GObjectClass *gobject_class; - GstVideofilterClass *videofilter_class; - - gobject_class = G_OBJECT_CLASS (g_class); - videofilter_class = GST_VIDEOFILTER_CLASS (g_class); - - gobject_class->set_property = gst_warptv_set_property; - gobject_class->get_property = gst_warptv_get_property; - -#if 0 - g_object_class_install_property (gobject_class, ARG_METHOD, - g_param_spec_enum ("method", "method", "method", - GST_TYPE_WARPTV_METHOD, GST_WARPTV_METHOD_1, G_PARAM_READWRITE)); -#endif - - videofilter_class->setup = gst_warptv_setup; -} - -static void -gst_warptv_init (GTypeInstance * instance, gpointer g_class) -{ - GstWarpTV *warptv = GST_WARPTV (instance); - GstVideofilter *videofilter; - - GST_DEBUG ("gst_warptv_init"); - - videofilter = GST_VIDEOFILTER (warptv); - - /* do stuff */ -} - -static void -gst_warptv_set_property (GObject * object, guint prop_id, const GValue * value, - GParamSpec * pspec) -{ - GstWarpTV *src; - - g_return_if_fail (GST_IS_WARPTV (object)); - src = GST_WARPTV (object); - - GST_DEBUG ("gst_warptv_set_property"); - switch (prop_id) { -#if 0 - case ARG_METHOD: - src->method = g_value_get_enum (value); - break; -#endif - default: - break; - } -} + GstWarpTV *filter; + GstStructure *structure; + gboolean ret = FALSE; + gint width, height; -static void -gst_warptv_get_property (GObject * object, guint prop_id, GValue * value, - GParamSpec * pspec) -{ - GstWarpTV *src; + filter = GST_WARPTV (btrans); - g_return_if_fail (GST_IS_WARPTV (object)); - src = GST_WARPTV (object); + structure = gst_caps_get_structure (caps, 0); - switch (prop_id) { -#if 0 - case ARG_METHOD: - g_value_set_enum (value, src->method); - break; -#endif - default: - G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); - break; + if (gst_structure_get_int (structure, "width", &width) && + gst_structure_get_int (structure, "height", &height)) { + *size = width * height * 32 / 8; + ret = TRUE; + GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size, + width, height); } -} - - -static void -gst_warptv_setup (GstVideofilter * videofilter) -{ - GstWarpTV *warptv; - int width = gst_videofilter_get_input_width (videofilter); - int height = gst_videofilter_get_input_height (videofilter); - - g_return_if_fail (GST_IS_WARPTV (videofilter)); - warptv = GST_WARPTV (videofilter); - - /* if any setup needs to be done, do it here */ - - warptv->width = width; - warptv->height = height; -#if 0 - /* FIXME this should be reset in PAUSE->READY, not here */ - warptv->tval = 0; -#endif - - g_free (warptv->disttable); - g_free (warptv->offstable); - warptv->offstable = g_malloc (height * sizeof (guint32)); - warptv->disttable = g_malloc (width * height * sizeof (guint32)); - - initSinTable (warptv); - initOffsTable (warptv); - initDistTable (warptv); + return ret; } static void @@ -323,21 +212,21 @@ initDistTable (GstWarpTV * filter) #endif } -static void -gst_warptv_rgb32 (GstVideofilter * videofilter, void *d, void *s) +static GstFlowReturn +gst_warptv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out) { - GstWarpTV *warptv; - int width = gst_videofilter_get_input_width (videofilter); - int height = gst_videofilter_get_input_height (videofilter); - guint32 *src = s; - guint32 *dest = d; + GstWarpTV *warptv = GST_WARPTV (trans); + int width = warptv->width; + int height = warptv->height; + guint32 *src = (guint32 *) GST_BUFFER_DATA (in); + guint32 *dest = (guint32 *) GST_BUFFER_DATA (out); gint xw, yw, cw; gint32 c, i, x, y, dx, dy, maxx, maxy; gint32 skip, *ctptr, *distptr; gint32 *sintable, *ctable; + GstFlowReturn ret = GST_FLOW_OK; - g_return_if_fail (GST_IS_WARPTV (videofilter)); - warptv = GST_WARPTV (videofilter); + gst_buffer_stamp (out, in); xw = (gint) (sin ((warptv->tval + 100) * M_PI / 128) * 30); yw = (gint) (sin ((warptv->tval) * M_PI / 256) * -35); @@ -383,4 +272,66 @@ gst_warptv_rgb32 (GstVideofilter * videofilter, void *d, void *s) } warptv->tval = (warptv->tval + 1) & 511; + + return ret; +} + +static void +gst_warptv_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + + gst_element_class_set_details (element_class, &warptv_details); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_warptv_sink_template)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_warptv_src_template)); +} + +static void +gst_warptv_class_init (gpointer klass, gpointer class_data) +{ + GObjectClass *gobject_class; + GstElementClass *element_class; + GstBaseTransformClass *trans_class; + + gobject_class = (GObjectClass *) klass; + element_class = (GstElementClass *) klass; + trans_class = (GstBaseTransformClass *) klass; + + parent_class = g_type_class_peek_parent (klass); + + trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_warptv_set_caps); + trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_warptv_get_unit_size); + trans_class->transform = GST_DEBUG_FUNCPTR (gst_warptv_transform); +} + +static void +gst_warptv_init (GTypeInstance * instance, gpointer g_class) +{ +} + +GType +gst_warptv_get_type (void) +{ + static GType warptv_type = 0; + + if (!warptv_type) { + static const GTypeInfo warptv_info = { + sizeof (GstWarpTVClass), + gst_warptv_base_init, + NULL, + gst_warptv_class_init, + NULL, + NULL, + sizeof (GstWarpTV), + 0, + gst_warptv_init, + }; + + warptv_type = g_type_register_static (GST_TYPE_VIDEOFILTER, + "GstWarpTV", &warptv_info, 0); + } + return warptv_type; } |