summaryrefslogtreecommitdiff
path: root/ext
diff options
context:
space:
mode:
authorWim Taymans <wim.taymans@collabora.co.uk>2011-08-04 09:36:07 +0200
committerWim Taymans <wim.taymans@collabora.co.uk>2011-08-04 09:36:07 +0200
commit01b9b5002fb057604855dffc9faac4903df4b8d7 (patch)
treee6fc518a8dadb3d755e4ee409becba0cd783fee8 /ext
parent2988a4ccb224f0be59e0b67ca84e88a01bcee8ec (diff)
parentcc9e4903138894963e52d52dfc22ffb23d763b9c (diff)
Merge branch 'master' into 0.11
Conflicts: common configure.ac gst/colorspace/colorspace.c gst/colorspace/colorspace.h gst/colorspace/gstcolorspace.c
Diffstat (limited to 'ext')
-rw-r--r--ext/Makefile.am8
-rw-r--r--ext/assrender/gstassrender.c7
-rw-r--r--ext/cog/cogvirtframe.c9
-rw-r--r--ext/dirac/gstdiracenc.cc15
-rw-r--r--ext/directfb/dfbvideosink.c4
-rw-r--r--ext/jp2k/gstjasperdec.c40
-rw-r--r--ext/jp2k/gstjasperenc.c40
-rw-r--r--ext/lv2/gstlv2.c3
-rw-r--r--ext/modplug/gstmodplug.cc5
-rw-r--r--ext/neon/gstneonhttpsrc.c24
-rw-r--r--ext/opencv/Makefile.am12
-rw-r--r--ext/opencv/MotionCells.cpp593
-rw-r--r--ext/opencv/MotionCells.h259
-rw-r--r--ext/opencv/gstmotioncells.c1109
-rw-r--r--ext/opencv/gstmotioncells.h124
-rw-r--r--ext/opencv/gstopencv.c4
-rw-r--r--ext/opencv/motioncells_wrapper.cpp213
-rw-r--r--ext/opencv/motioncells_wrapper.h89
-rw-r--r--ext/opus/Makefile.am16
-rw-r--r--ext/opus/gstopus.c50
-rw-r--r--ext/opus/gstopusdec.c865
-rw-r--r--ext/opus/gstopusdec.h77
-rw-r--r--ext/opus/gstopusenc.c1198
-rw-r--r--ext/opus/gstopusenc.h105
-rw-r--r--ext/resindvd/gstpesfilter.c3
-rw-r--r--ext/resindvd/rsnstreamselector.c15
-rw-r--r--ext/rtmp/Makefile.am4
-rw-r--r--ext/rtmp/gstrtmp.c54
-rw-r--r--ext/rtmp/gstrtmpsink.c347
-rw-r--r--ext/rtmp/gstrtmpsink.h68
-rw-r--r--ext/rtmp/gstrtmpsrc.c17
-rw-r--r--ext/schroedinger/gstschrodec.c56
-rw-r--r--ext/schroedinger/gstschroenc.c11
-rw-r--r--ext/sndfile/gstsfsrc.c5
-rw-r--r--ext/timidity/gsttimidity.c3
-rw-r--r--ext/timidity/gstwildmidi.c7
-rw-r--r--ext/vp8/gstvp8dec.c6
-rw-r--r--ext/vp8/gstvp8enc.c62
-rw-r--r--ext/zbar/gstzbar.c3
39 files changed, 5298 insertions, 232 deletions
diff --git a/ext/Makefile.am b/ext/Makefile.am
index 70d4c69c3..2a6f8ec76 100644
--- a/ext/Makefile.am
+++ b/ext/Makefile.am
@@ -262,6 +262,12 @@ else
OPENCV_DIR=
endif
+if USE_OPUS
+OPUS_DIR=opus
+else
+OPUS_DIR=
+endif
+
if USE_RSVG
RSVG_DIR=rsvg
else
@@ -419,6 +425,7 @@ SUBDIRS=\
$(OFA_DIR) \
$(OPENAL_DIR) \
$(OPENCV_DIR) \
+ $(OPUS_DIR) \
$(RSVG_DIR) \
$(SCHRO_DIR) \
$(SDL_DIR) \
@@ -471,6 +478,7 @@ DIST_SUBDIRS = \
ofa \
openal \
opencv \
+ opus \
rsvg \
resindvd \
schroedinger \
diff --git a/ext/assrender/gstassrender.c b/ext/assrender/gstassrender.c
index c5a8e1bcd..2727365db 100644
--- a/ext/assrender/gstassrender.c
+++ b/ext/assrender/gstassrender.c
@@ -570,10 +570,13 @@ blit_i420 (GstAssRender * render, ASS_Image * ass_image, GstBuffer * buffer)
const guint8 *src;
guint8 *dst_y, *dst_u, *dst_v;
gint x, y, w, h;
+/* FIXME ignoring source image stride might be wrong here */
+#if 0
gint w2;
+ gint src_stride;
+#endif
gint width = render->width;
gint height = render->height;
- gint src_stride;
gint y_offset, y_stride;
gint u_offset, u_stride;
gint v_offset, v_stride;
@@ -609,9 +612,11 @@ blit_i420 (GstAssRender * render, ASS_Image * ass_image, GstBuffer * buffer)
w = MIN (ass_image->w, width - ass_image->dst_x);
h = MIN (ass_image->h, height - ass_image->dst_y);
+#if 0
w2 = (w + 1) / 2;
src_stride = ass_image->stride;
+#endif
src = ass_image->bitmap;
dst_y =
diff --git a/ext/cog/cogvirtframe.c b/ext/cog/cogvirtframe.c
index 0fc0ad129..e6d08e551 100644
--- a/ext/cog/cogvirtframe.c
+++ b/ext/cog/cogvirtframe.c
@@ -520,12 +520,11 @@ cog_virt_frame_render_resample_vert_1tap (CogFrame * frame, void *_dest,
int n_src;
int scale = frame->param1;
int acc;
- int x;
int src_i;
acc = scale * i;
src_i = acc >> 8;
- x = acc & 0xff;
+ /* x = acc & 0xff; */
n_src = frame->virt_frame1->components[component].height;
src1 = cog_virt_frame_get_line (frame->virt_frame1, component,
@@ -634,10 +633,9 @@ cog_virt_frame_render_resample_horiz_1tap (CogFrame * frame, void *_dest,
{
uint8_t *dest = _dest;
uint8_t *src;
- int n_src;
int scale = frame->param1;
- n_src = frame->virt_frame1->components[component].width;
+ /* n_src = frame->virt_frame1->components[component].width; */
src = cog_virt_frame_get_line (frame->virt_frame1, component, i);
cogorc_resample_horiz_1tap (dest, src, 0, scale,
@@ -650,10 +648,9 @@ cog_virt_frame_render_resample_horiz_2tap (CogFrame * frame, void *_dest,
{
uint8_t *dest = _dest;
uint8_t *src;
- int n_src;
int scale = frame->param1;
- n_src = frame->virt_frame1->components[component].width;
+ /* n_src = frame->virt_frame1->components[component].width; */
src = cog_virt_frame_get_line (frame->virt_frame1, component, i);
cogorc_resample_horiz_2tap (dest, src, 0, scale,
diff --git a/ext/dirac/gstdiracenc.cc b/ext/dirac/gstdiracenc.cc
index 1c499d71f..c42fc17eb 100644
--- a/ext/dirac/gstdiracenc.cc
+++ b/ext/dirac/gstdiracenc.cc
@@ -24,6 +24,7 @@
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstbasevideoencoder.h>
+#include <gst/video/gstbasevideoutils.h>
#include <string.h>
#include <libdirac_encoder/dirac_encoder.h>
#include <math.h>
@@ -149,7 +150,7 @@ static gboolean gst_dirac_enc_set_format (GstBaseVideoEncoder *
base_video_encoder, GstVideoState * state);
static gboolean gst_dirac_enc_start (GstBaseVideoEncoder * base_video_encoder);
static gboolean gst_dirac_enc_stop (GstBaseVideoEncoder * base_video_encoder);
-static gboolean gst_dirac_enc_finish (GstBaseVideoEncoder * base_video_encoder);
+static GstFlowReturn gst_dirac_enc_finish (GstBaseVideoEncoder * base_video_encoder);
static GstFlowReturn gst_dirac_enc_handle_frame (GstBaseVideoEncoder *
base_video_encoder, GstVideoFrame * frame);
static GstFlowReturn gst_dirac_enc_shape_output (GstBaseVideoEncoder *
@@ -223,13 +224,11 @@ static void
gst_dirac_enc_class_init (GstDiracEncClass * klass)
{
GObjectClass *gobject_class;
- GstElementClass *gstelement_class;
GstBaseVideoEncoderClass *basevideoencoder_class;
//int i;
gobject_class = G_OBJECT_CLASS (klass);
- gstelement_class = GST_ELEMENT_CLASS (klass);
basevideoencoder_class = GST_BASE_VIDEO_ENCODER_CLASS (klass);
gobject_class->set_property = gst_dirac_enc_set_property;
@@ -843,7 +842,7 @@ gst_dirac_enc_stop (GstBaseVideoEncoder * base_video_encoder)
return TRUE;
}
-static gboolean
+static GstFlowReturn
gst_dirac_enc_finish (GstBaseVideoEncoder * base_video_encoder)
{
GstDiracEnc *dirac_enc = GST_DIRAC_ENC (base_video_encoder);
@@ -852,7 +851,7 @@ gst_dirac_enc_finish (GstBaseVideoEncoder * base_video_encoder)
gst_dirac_enc_process (dirac_enc, TRUE);
- return TRUE;
+ return GST_FLOW_OK;
}
static GstFlowReturn
@@ -1136,7 +1135,6 @@ gst_dirac_enc_process (GstDiracEnc * dirac_enc, gboolean end_sequence)
{
GstBuffer *outbuf;
GstFlowReturn ret;
- int presentation_frame;
int parse_code;
int state;
GstVideoFrame *frame;
@@ -1192,8 +1190,6 @@ gst_dirac_enc_process (GstDiracEnc * dirac_enc, gboolean end_sequence)
dirac_enc->pull_frame_num++;
parse_code = ((guint8 *) GST_BUFFER_DATA (outbuf))[4];
- /* FIXME */
- presentation_frame = 0;
if (DIRAC_PARSE_CODE_IS_SEQ_HEADER (parse_code)) {
frame->is_sync_point = TRUE;
@@ -1230,7 +1226,6 @@ gst_dirac_enc_shape_output_ogg (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame)
{
GstDiracEnc *dirac_enc;
- int dpn;
int delay;
int dist;
int pt;
@@ -1241,8 +1236,6 @@ gst_dirac_enc_shape_output_ogg (GstBaseVideoEncoder * base_video_encoder,
dirac_enc = GST_DIRAC_ENC (base_video_encoder);
- dpn = frame->decode_frame_number;
-
pt = frame->presentation_frame_number * 2 + dirac_enc->granule_offset;
dt = frame->decode_frame_number * 2 + dirac_enc->granule_offset;
delay = pt - dt;
diff --git a/ext/directfb/dfbvideosink.c b/ext/directfb/dfbvideosink.c
index a6db91073..c32d27af0 100644
--- a/ext/directfb/dfbvideosink.c
+++ b/ext/directfb/dfbvideosink.c
@@ -2223,8 +2223,8 @@ gst_dfbvideosink_init (GstDfbVideoSink * dfbvideosink)
{
dfbvideosink->pool_lock = g_mutex_new ();
dfbvideosink->buffer_pool = NULL;
- dfbvideosink->video_height = dfbvideosink->out_width = 0;
- dfbvideosink->video_width = dfbvideosink->out_height = 0;
+ dfbvideosink->video_height = dfbvideosink->out_height = 0;
+ dfbvideosink->video_width = dfbvideosink->out_width = 0;
dfbvideosink->fps_d = 0;
dfbvideosink->fps_n = 0;
diff --git a/ext/jp2k/gstjasperdec.c b/ext/jp2k/gstjasperdec.c
index 159eb95c8..e896142bd 100644
--- a/ext/jp2k/gstjasperdec.c
+++ b/ext/jp2k/gstjasperdec.c
@@ -68,11 +68,6 @@ static GstStaticPadTemplate gst_jasper_dec_src_template =
GST_VIDEO_CAPS_YUV ("{ I420, YV12, YUY2, UYVY, Y41B, Y42B, v308 }"))
);
-static void gst_jasper_dec_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_jasper_dec_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-
static void gst_jasper_dec_reset (GstJasperDec * dec);
static GstStateChangeReturn gst_jasper_dec_change_state (GstElement * element,
GstStateChange transition);
@@ -114,18 +109,13 @@ gst_jasper_dec_base_init (gpointer g_class)
static void
gst_jasper_dec_class_init (GstJasperDecClass * klass)
{
- GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
GST_DEBUG_CATEGORY_INIT (gst_jasper_dec_debug, "jp2kdec", 0,
"Jasper JPEG2000 decoder");
- gobject_class->set_property = gst_jasper_dec_set_property;
- gobject_class->get_property = gst_jasper_dec_get_property;
-
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_jasper_dec_change_state);
}
@@ -819,36 +809,6 @@ invalid_bytes_segment:
}
}
-static void
-gst_jasper_dec_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec)
-{
- GstJasperDec *filter;
-
- filter = GST_JASPER_DEC (object);
-
- switch (prop_id) {
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-}
-
-static void
-gst_jasper_dec_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec)
-{
- GstJasperDec *filter;
-
- filter = GST_JASPER_DEC (object);
-
- switch (prop_id) {
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-}
-
static GstStateChangeReturn
gst_jasper_dec_change_state (GstElement * element, GstStateChange transition)
{
diff --git a/ext/jp2k/gstjasperenc.c b/ext/jp2k/gstjasperenc.c
index 90fca5159..ae7b92567 100644
--- a/ext/jp2k/gstjasperenc.c
+++ b/ext/jp2k/gstjasperenc.c
@@ -65,11 +65,6 @@ static GstStaticPadTemplate gst_jasper_enc_src_template =
"image/jp2")
);
-static void gst_jasper_enc_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_jasper_enc_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-
static void gst_jasper_enc_reset (GstJasperEnc * enc);
static GstStateChangeReturn gst_jasper_enc_change_state (GstElement * element,
GstStateChange transition);
@@ -118,18 +113,13 @@ gst_jasper_enc_base_init (gpointer g_class)
static void
gst_jasper_enc_class_init (GstJasperEncClass * klass)
{
- GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
GST_DEBUG_CATEGORY_INIT (gst_jasper_enc_debug, "jp2kenc", 0,
"Jasper JPEG2000 encoder");
- gobject_class->set_property = gst_jasper_enc_set_property;
- gobject_class->get_property = gst_jasper_enc_get_property;
-
/* FIXME add some encoder properties */
gstelement_class->change_state =
@@ -535,36 +525,6 @@ not_negotiated:
}
}
-static void
-gst_jasper_enc_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec)
-{
- GstJasperEnc *filter;
-
- filter = GST_JASPER_ENC (object);
-
- switch (prop_id) {
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-}
-
-static void
-gst_jasper_enc_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec)
-{
- GstJasperEnc *filter;
-
- filter = GST_JASPER_ENC (object);
-
- switch (prop_id) {
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-}
-
static GstStateChangeReturn
gst_jasper_enc_change_state (GstElement * element, GstStateChange transition)
{
diff --git a/ext/lv2/gstlv2.c b/ext/lv2/gstlv2.c
index dd1df3df1..f31050fc6 100644
--- a/ext/lv2/gstlv2.c
+++ b/ext/lv2/gstlv2.c
@@ -622,7 +622,6 @@ gst_lv2_setup (GstSignalProcessor * gsp, GstCaps * caps)
GstLV2Group *group = NULL;
GstAudioChannelPosition *positions = NULL;
GstPad *pad;
- GstCaps *pad_caps;
gsp_class = GST_SIGNAL_PROCESSOR_GET_CLASS (gsp);
lv2 = (GstLV2 *) gsp;
@@ -655,7 +654,6 @@ gst_lv2_setup (GstSignalProcessor * gsp, GstCaps * caps)
slv2_value_as_string (group->symbol)))) {
GST_INFO_OBJECT (lv2, "set audio channel positions on sink pad %s",
slv2_value_as_string (group->symbol));
- pad_caps = GST_PAD_CAPS (pad);
s = gst_caps_get_structure (caps, 0);
gst_audio_set_channel_positions (s, positions);
gst_object_unref (pad);
@@ -674,7 +672,6 @@ gst_lv2_setup (GstSignalProcessor * gsp, GstCaps * caps)
slv2_value_as_string (group->symbol)))) {
GST_INFO_OBJECT (lv2, "set audio channel positions on src pad %s",
slv2_value_as_string (group->symbol));
- pad_caps = GST_PAD_CAPS (pad);
s = gst_caps_get_structure (caps, 0);
gst_audio_set_channel_positions (s, positions);
gst_object_unref (pad);
diff --git a/ext/modplug/gstmodplug.cc b/ext/modplug/gstmodplug.cc
index b6b59eb12..6faaa0c83 100644
--- a/ext/modplug/gstmodplug.cc
+++ b/ext/modplug/gstmodplug.cc
@@ -370,15 +370,20 @@ gst_modplug_src_event (GstPad * pad, GstEvent * event)
GstSeekType cur_type, stop_type;
gboolean flush;
gint64 cur, stop;
+/* FIXME timestamp is set but not used */
+#if 0
guint64 timestamp;
+#endif
if (modplug->frequency == 0) {
GST_DEBUG_OBJECT (modplug, "no song loaded yet");
break;
}
+#if 0
timestamp = gst_util_uint64_scale_int (modplug->offset, GST_SECOND,
modplug->frequency);
+#endif
gst_event_parse_seek (event, &rate, &format, &flags,
&cur_type, &cur, &stop_type, &stop);
diff --git a/ext/neon/gstneonhttpsrc.c b/ext/neon/gstneonhttpsrc.c
index 2844dcd64..b295f0837 100644
--- a/ext/neon/gstneonhttpsrc.c
+++ b/ext/neon/gstneonhttpsrc.c
@@ -98,6 +98,7 @@ static gboolean gst_neonhttp_src_get_size (GstBaseSrc * bsrc, guint64 * size);
static gboolean gst_neonhttp_src_is_seekable (GstBaseSrc * bsrc);
static gboolean gst_neonhttp_src_do_seek (GstBaseSrc * bsrc,
GstSegment * segment);
+static gboolean gst_neonhttp_src_query (GstBaseSrc * bsrc, GstQuery * query);
static gboolean gst_neonhttp_src_set_proxy (GstNeonhttpSrc * src,
const gchar * uri);
@@ -268,6 +269,7 @@ gst_neonhttp_src_class_init (GstNeonhttpSrcClass * klass)
gstbasesrc_class->is_seekable =
GST_DEBUG_FUNCPTR (gst_neonhttp_src_is_seekable);
gstbasesrc_class->do_seek = GST_DEBUG_FUNCPTR (gst_neonhttp_src_do_seek);
+ gstbasesrc_class->query = GST_DEBUG_FUNCPTR (gst_neonhttp_src_query);
gstpushsrc_class->create = GST_DEBUG_FUNCPTR (gst_neonhttp_src_create);
@@ -778,6 +780,28 @@ gst_neonhttp_src_do_seek (GstBaseSrc * bsrc, GstSegment * segment)
}
static gboolean
+gst_neonhttp_src_query (GstBaseSrc * bsrc, GstQuery * query)
+{
+ GstNeonhttpSrc *src = GST_NEONHTTP_SRC (bsrc);
+ gboolean ret;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_URI:
+ gst_query_set_uri (query, src->location);
+ ret = TRUE;
+ break;
+ default:
+ ret = FALSE;
+ break;
+ }
+
+ if (!ret)
+ ret = GST_BASE_SRC_CLASS (parent_class)->query (bsrc, query);
+
+ return ret;
+}
+
+static gboolean
gst_neonhttp_src_set_location (GstNeonhttpSrc * src, const gchar * uri)
{
ne_uri_free (&src->uri);
diff --git a/ext/opencv/Makefile.am b/ext/opencv/Makefile.am
index d5a70edad..a32e16cfa 100644
--- a/ext/opencv/Makefile.am
+++ b/ext/opencv/Makefile.am
@@ -16,7 +16,12 @@ libgstopencv_la_SOURCES = gstopencv.c \
gstfacedetect.c \
gstpyramidsegment.c \
gsttemplatematch.c \
- gsttextoverlay.c
+ gsttextoverlay.c \
+ gstmotioncells.c \
+ motioncells_wrapper.cpp \
+ MotionCells.cpp
+
+libgstopencv_la_CXXFLAGS = $(GST_CXXFLAGS) $(OPENCV_CFLAGS)
# flags used to compile this facedetect
# add other _CFLAGS and _LIBS as needed
@@ -46,4 +51,7 @@ noinst_HEADERS = gstopencvvideofilter.h gstopencvutils.h \
gstfacedetect.h \
gstpyramidsegment.h \
gsttemplatematch.h \
- gsttextoverlay.h
+ gsttextoverlay.h \
+ gstmotioncells.h \
+ motioncells_wrapper.h \
+ MotionCells.h
diff --git a/ext/opencv/MotionCells.cpp b/ext/opencv/MotionCells.cpp
new file mode 100644
index 000000000..2b81b305d
--- /dev/null
+++ b/ext/opencv/MotionCells.cpp
@@ -0,0 +1,593 @@
+/*
+ * GStreamer
+ * Copyright (C) 2011 Robert Jobbagy <jobbagy.robert@gmail.com>
+ * Copyright (C) 2011 Nicola Murino <nicola.murino@gmail.com>
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the "Software"),
+ * to deal in the Software without restriction, including without limitation
+ * the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ * and/or sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ * DEALINGS IN THE SOFTWARE.
+ *
+ * Alternatively, the contents of this file may be used under the
+ * GNU Lesser General Public License Version 2.1 (the "LGPL"), in
+ * which case the following provisions apply instead of the ones
+ * mentioned above:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#include <cstdlib>
+#include <errno.h>
+#include <math.h>
+#include <gst/gst.h>
+#include <arpa/inet.h>
+#include "MotionCells.h"
+
+uint64_t ntohl64 (uint64_t val);
+uint64_t htonl64 (uint64_t val);
+
+uint64_t
+ntohl64 (uint64_t val)
+{
+ uint64_t res64;
+ uint32_t low = (uint32_t) (val & 0x00000000FFFFFFFFLL);
+ uint32_t high = (uint32_t) ((val & 0xFFFFFFFF00000000LL) >> 32);
+ low = ntohl (low);
+ high = ntohl (high);
+ res64 = (uint64_t) high + (((uint64_t) low) << 32);
+ return res64;
+}
+
+
+uint64_t
+htonl64 (uint64_t val)
+{
+ uint64_t res64;
+ uint32_t low = (uint32_t) (val & 0x00000000FFFFFFFFLL);
+ uint32_t high = (uint32_t) ((val & 0xFFFFFFFF00000000LL) >> 32);
+ low = htonl (low);
+ high = htonl (high);
+ res64 = (uint64_t) high + (((uint64_t) low) << 32);
+ return res64;
+}
+
+MotionCells::MotionCells ()
+{
+ m_framecnt = 0;
+ m_motioncells_idx_count = 0;
+ m_motioncellsidxcstr = NULL;
+ m_saveInDatafile = false;
+ mc_savefile = NULL;
+ m_pcurFrame = NULL;
+ m_pprevFrame = NULL;
+ transparencyimg = NULL;
+ m_pdifferenceImage = NULL;
+ m_pbwImage = NULL;
+ m_initdatafilefailed = new char[BUSMSGLEN];
+ m_savedatafilefailed = new char[BUSMSGLEN];
+ m_initerrorcode = 0;
+ m_saveerrorcode = 0;
+ m_alpha = 0.5;
+ m_beta = 0.5;
+
+}
+
+MotionCells::~MotionCells ()
+{
+ if (mc_savefile) {
+ fclose (mc_savefile);
+ mc_savefile = NULL;
+ }
+ delete[]m_initdatafilefailed;
+ delete[]m_savedatafilefailed;
+ if (m_motioncellsidxcstr)
+ delete[]m_motioncellsidxcstr;
+ if (m_pcurFrame)
+ cvReleaseImage (&m_pcurFrame);
+ if (m_pprevFrame)
+ cvReleaseImage (&m_pprevFrame);
+ if (transparencyimg)
+ cvReleaseImage (&transparencyimg);
+ if (m_pdifferenceImage)
+ cvReleaseImage (&m_pdifferenceImage);
+ if (m_pbwImage)
+ cvReleaseImage (&m_pbwImage);
+}
+
+int
+MotionCells::performDetectionMotionCells (IplImage * p_frame,
+ double p_sensitivity, double p_framerate, int p_gridx, int p_gridy,
+ gint64 timestamp_millisec, bool p_isVisible, bool p_useAlpha,
+ int motionmaskcoord_count, motionmaskcoordrect * motionmaskcoords,
+ int motionmaskcells_count, motioncellidx * motionmaskcellsidx,
+ cellscolor motioncellscolor, int motioncells_count,
+ motioncellidx * motioncellsidx, gint64 starttime, char *p_datafile,
+ bool p_changed_datafile, int p_thickness)
+{
+
+ int sumframecnt = 0;
+ int ret = 0;
+ p_framerate >= 1 ? p_framerate <= 5 ? sumframecnt = 1
+ : p_framerate <= 10 ? sumframecnt = 2
+ : p_framerate <= 15 ? sumframecnt = 3
+ : p_framerate <= 20 ? sumframecnt = 4
+ : p_framerate <= 25 ? sumframecnt = 5 : sumframecnt = 0 : sumframecnt = 0;
+
+ m_framecnt++;
+ m_changed_datafile = p_changed_datafile;
+ if (m_framecnt >= sumframecnt) {
+ m_useAlpha = p_useAlpha;
+ m_gridx = p_gridx;
+ m_gridy = p_gridy;
+ if (m_changed_datafile) {
+ ret = initDataFile (p_datafile, starttime);
+ if (ret != 0)
+ return ret;
+ }
+
+ m_frameSize = cvGetSize (p_frame);
+ m_frameSize.width /= 2;
+ m_frameSize.height /= 2;
+ setMotionCells (m_frameSize.width, m_frameSize.height);
+ m_sensitivity = 1 - p_sensitivity;
+ m_isVisible = p_isVisible;
+ m_pcurFrame = cvCloneImage (p_frame);
+ IplImage *m_pcurgreyImage = cvCreateImage (m_frameSize, IPL_DEPTH_8U, 1);
+ IplImage *m_pprevgreyImage = cvCreateImage (m_frameSize, IPL_DEPTH_8U, 1);
+ IplImage *m_pgreyImage = cvCreateImage (m_frameSize, IPL_DEPTH_8U, 1);
+ IplImage *m_pcurDown =
+ cvCreateImage (m_frameSize, m_pcurFrame->depth, m_pcurFrame->nChannels);
+ IplImage *m_pprevDown = cvCreateImage (m_frameSize, m_pprevFrame->depth,
+ m_pprevFrame->nChannels);
+ m_pbwImage = cvCreateImage (m_frameSize, IPL_DEPTH_8U, 1);
+ cvPyrDown (m_pprevFrame, m_pprevDown);
+ cvCvtColor (m_pprevDown, m_pprevgreyImage, CV_RGB2GRAY);
+ if (m_pprevFrame)
+ cvReleaseImage (&m_pprevFrame);
+ cvPyrDown (m_pcurFrame, m_pcurDown);
+ cvCvtColor (m_pcurDown, m_pcurgreyImage, CV_RGB2GRAY);
+ m_pdifferenceImage = cvCloneImage (m_pcurgreyImage);
+ //cvSmooth(m_pcurgreyImage, m_pcurgreyImage, CV_GAUSSIAN, 3, 0);//TODO camera noise reduce,something smoothing, and rethink runningavg weights
+
+ //Minus the current gray frame from the 8U moving average.
+ cvAbsDiff (m_pprevgreyImage, m_pcurgreyImage, m_pdifferenceImage);
+
+ //Convert the image to black and white.
+ cvAdaptiveThreshold (m_pdifferenceImage, m_pbwImage, 255,
+ CV_ADAPTIVE_THRESH_GAUSSIAN_C, CV_THRESH_BINARY_INV, 7);
+
+ // Dilate and erode to get object blobs
+ cvDilate (m_pbwImage, m_pbwImage, NULL, 2);
+ cvErode (m_pbwImage, m_pbwImage, NULL, 2);
+
+ //mask-out the overlay on difference image
+ if (motionmaskcoord_count > 0)
+ performMotionMaskCoords (motionmaskcoords, motionmaskcoord_count);
+ if (motionmaskcells_count > 0)
+ performMotionMask (motionmaskcellsidx, motionmaskcells_count);
+ if (getIsNonZero (m_pbwImage)) { //detect Motion
+ GST_DEBUG ("DETECT MOTION \n");
+ if (m_MotionCells.size () > 0) //it contains previous motioncells what we used when frames dropped
+ m_MotionCells.clear ();
+ if (transparencyimg)
+ cvReleaseImage (&transparencyimg);
+ (motioncells_count > 0) ?
+ calculateMotionPercentInMotionCells (motioncellsidx,
+ motioncells_count)
+ : calculateMotionPercentInMotionCells (motionmaskcellsidx, 0);
+
+ transparencyimg = cvCreateImage (cvGetSize (p_frame), p_frame->depth, 3);
+ cvSetZero (transparencyimg);
+ if (m_motioncellsidxcstr)
+ delete[]m_motioncellsidxcstr;
+ m_motioncells_idx_count = m_MotionCells.size () * MSGLEN; //one motion cell idx: (lin idx : col idx,) it's 4 character except last motion cell idx
+ m_motioncellsidxcstr = new char[m_motioncells_idx_count];
+ char *tmpstr = new char[MSGLEN];
+ for (int i = 0; i < MSGLEN; i++)
+ tmpstr[i] = ' ';
+ for (unsigned int i = 0; i < m_MotionCells.size (); i++) {
+ CvPoint pt1, pt2;
+ pt1.x = m_MotionCells.at (i).cell_pt1.x * 2;
+ pt1.y = m_MotionCells.at (i).cell_pt1.y * 2;
+ pt2.x = m_MotionCells.at (i).cell_pt2.x * 2;
+ pt2.y = m_MotionCells.at (i).cell_pt2.y * 2;
+ if (m_useAlpha && m_isVisible) {
+ cvRectangle (transparencyimg,
+ pt1,
+ pt2,
+ CV_RGB (motioncellscolor.B_channel_value,
+ motioncellscolor.G_channel_value,
+ motioncellscolor.R_channel_value), CV_FILLED);
+ } else if (m_isVisible) {
+ cvRectangle (p_frame,
+ pt1,
+ pt2,
+ CV_RGB (motioncellscolor.B_channel_value,
+ motioncellscolor.G_channel_value,
+ motioncellscolor.R_channel_value), p_thickness);
+ }
+
+ if (i < m_MotionCells.size () - 1) {
+ snprintf (tmpstr, MSGLEN, "%d:%d,", m_MotionCells.at (i).lineidx,
+ m_MotionCells.at (i).colidx);
+ } else {
+ snprintf (tmpstr, MSGLEN, "%d:%d", m_MotionCells.at (i).lineidx,
+ m_MotionCells.at (i).colidx);
+ }
+ if (i == 0)
+ strncpy (m_motioncellsidxcstr, tmpstr, m_motioncells_idx_count);
+ else
+ strcat (m_motioncellsidxcstr, tmpstr);
+ }
+ if (m_MotionCells.size () == 0)
+ strncpy (m_motioncellsidxcstr, " ", m_motioncells_idx_count);
+
+ if (m_useAlpha && m_isVisible) {
+ if (m_MotionCells.size () > 0)
+ blendImages (p_frame, transparencyimg, m_alpha, m_beta);
+ }
+
+ delete[]tmpstr;
+
+ if (mc_savefile && m_saveInDatafile) {
+ ret = saveMotionCells (timestamp_millisec);
+ if (ret != 0)
+ return ret;
+ }
+ } else {
+ m_motioncells_idx_count = 0;
+ if (m_MotionCells.size () > 0)
+ m_MotionCells.clear ();
+ if (transparencyimg)
+ cvReleaseImage (&transparencyimg);
+ }
+
+ m_pprevFrame = cvCloneImage (m_pcurFrame);
+ m_framecnt = 0;
+ if (m_pcurFrame)
+ cvReleaseImage (&m_pcurFrame);
+ if (m_pdifferenceImage)
+ cvReleaseImage (&m_pdifferenceImage);
+ if (m_pcurgreyImage)
+ cvReleaseImage (&m_pcurgreyImage);
+ if (m_pprevgreyImage)
+ cvReleaseImage (&m_pprevgreyImage);
+ if (m_pgreyImage)
+ cvReleaseImage (&m_pgreyImage);
+ if (m_pbwImage)
+ cvReleaseImage (&m_pbwImage);
+ if (m_pprevDown)
+ cvReleaseImage (&m_pprevDown);
+ if (m_pcurDown)
+ cvReleaseImage (&m_pcurDown);
+ if (m_pCells) {
+ for (int i = 0; i < m_gridy; ++i) {
+ delete[]m_pCells[i];
+ }
+ delete[]m_pCells;
+ }
+
+ if (p_framerate <= 5) {
+ if (m_MotionCells.size () > 0)
+ m_MotionCells.clear ();
+ if (transparencyimg)
+ cvReleaseImage (&transparencyimg);
+ }
+ } else { //we do frame drop
+ m_motioncells_idx_count = 0;
+ ret = -2;
+ for (unsigned int i = 0; i < m_MotionCells.size (); i++) {
+ CvPoint pt1, pt2;
+ pt1.x = m_MotionCells.at (i).cell_pt1.x * 2;
+ pt1.y = m_MotionCells.at (i).cell_pt1.y * 2;
+ pt2.x = m_MotionCells.at (i).cell_pt2.x * 2;
+ pt2.y = m_MotionCells.at (i).cell_pt2.y * 2;
+ if (m_useAlpha && m_isVisible) {
+ cvRectangle (transparencyimg,
+ pt1,
+ pt2,
+ CV_RGB (motioncellscolor.B_channel_value,
+ motioncellscolor.G_channel_value,
+ motioncellscolor.R_channel_value), CV_FILLED);
+ } else if (m_isVisible) {
+ cvRectangle (p_frame,
+ pt1,
+ pt2,
+ CV_RGB (motioncellscolor.B_channel_value,
+ motioncellscolor.G_channel_value,
+ motioncellscolor.R_channel_value), p_thickness);
+ }
+
+ }
+ if (m_useAlpha && m_isVisible) {
+ if (m_MotionCells.size () > 0)
+ blendImages (p_frame, transparencyimg, m_alpha, m_beta);
+ }
+ }
+ return ret;
+}
+
+int
+MotionCells::initDataFile (char *p_datafile, gint64 starttime) //p_date is increased with difference between current and previous buffer ts
+{
+ MotionCellData mcd;
+ if (strncmp (p_datafile, " ", 1)) {
+ mc_savefile = fopen (p_datafile, "w");
+ if (mc_savefile == NULL) {
+ //fprintf(stderr, "%s %d:initDataFile:fopen:%d (%s)\n", __FILE__, __LINE__, errno,
+ //strerror(errno));
+ strncpy (m_initdatafilefailed, strerror (errno), BUSMSGLEN - 1);
+ m_initerrorcode = errno;
+ return 1;
+ } else {
+ m_saveInDatafile = true;
+ }
+ } else
+ mc_savefile = NULL;
+ bzero (&m_header, sizeof (MotionCellHeader));
+ m_header.headersize = htonl (MC_HEADER);
+ m_header.type = htonl (MC_TYPE);
+ m_header.version = htonl (MC_VERSION);
+ //it needs these bytes
+ m_header.itemsize =
+ htonl ((int) ceil (ceil (m_gridx * m_gridy / 8.0) / 4.0) * 4 +
+ sizeof (mcd.timestamp));
+ m_header.gridx = htonl (m_gridx);
+ m_header.gridy = htonl (m_gridy);
+ m_header.starttime = htonl64 (starttime);
+
+ snprintf (m_header.name, sizeof (m_header.name), "%s %dx%d", MC_VERSIONTEXT,
+ ntohl (m_header.gridx), ntohl (m_header.gridy));
+ m_changed_datafile = false;
+ return 0;
+}
+
+int
+MotionCells::saveMotionCells (gint64 timestamp_millisec)
+{
+
+ MotionCellData mc_data;
+ mc_data.timestamp = htonl (timestamp_millisec);
+ mc_data.data = NULL;
+ //There is no datafile
+ if (mc_savefile == NULL)
+ return 0;
+
+ if (ftello (mc_savefile) == 0) {
+ //cerr << "Writing out file header"<< m_header.headersize <<":" << sizeof(MotionCellHeader) << " itemsize:"
+ //<< m_header.itemsize << endl;
+ if (fwrite (&m_header, sizeof (MotionCellHeader), 1, mc_savefile) != 1) {
+ //fprintf(stderr, "%s %d:saveMotionCells:fwrite:%d (%s)\n", __FILE__, __LINE__, errno,
+ //strerror(errno));
+ strncpy (m_savedatafilefailed, strerror (errno), BUSMSGLEN - 1);
+ m_saveerrorcode = errno;
+ return -1;
+ }
+ }
+
+ mc_data.data =
+ (char *) calloc (1,
+ ntohl (m_header.itemsize) - sizeof (mc_data.timestamp));
+ if (mc_data.data == NULL) {
+ //fprintf(stderr, "%s %d:saveMotionCells:calloc:%d (%s)\n", __FILE__, __LINE__, errno,
+ //strerror(errno));
+ strncpy (m_savedatafilefailed, strerror (errno), BUSMSGLEN - 1);
+ m_saveerrorcode = errno;
+ return -1;
+ }
+
+ for (unsigned int i = 0; i < m_MotionCells.size (); i++) {
+ int bitnum =
+ m_MotionCells.at (i).lineidx * ntohl (m_header.gridx) +
+ m_MotionCells.at (i).colidx;
+ int bytenum = (int) floor (bitnum / 8.0);
+ int shift = bitnum - bytenum * 8;
+ mc_data.data[bytenum] = mc_data.data[bytenum] | (1 << shift);
+ //cerr << "Motion Detected " << "line:" << m_MotionCells.at(i).lineidx << " col:" << m_MotionCells.at(i).colidx;
+ //cerr << " bitnum " << bitnum << " bytenum " << bytenum << " shift " << shift << " value " << (int)mc_data.data[bytenum] << endl;
+ }
+
+ if (fwrite (&mc_data.timestamp, sizeof (mc_data.timestamp), 1,
+ mc_savefile) != 1) {
+ //fprintf(stderr, "%s %d:saveMotionCells:fwrite:%d (%s)\n", __FILE__, __LINE__, errno,
+ //strerror(errno));
+ strncpy (m_savedatafilefailed, strerror (errno), BUSMSGLEN - 1);
+ m_saveerrorcode = errno;
+ return -1;
+ }
+
+ if (fwrite (mc_data.data,
+ ntohl (m_header.itemsize) - sizeof (mc_data.timestamp), 1,
+ mc_savefile) != 1) {
+ //fprintf(stderr, "%s %d:saveMotionCells:fwrite:%d (%s)\n", __FILE__, __LINE__, errno,
+ //strerror(errno));
+ strncpy (m_savedatafilefailed, strerror (errno), BUSMSGLEN - 1);
+ m_saveerrorcode = errno;
+ return -1;
+ }
+
+ free (mc_data.data);
+ return 0;
+}
+
+double
+MotionCells::calculateMotionPercentInCell (int p_row, int p_col,
+ double *p_cellarea, double *p_motionarea)
+{
+ double cntpixelsnum = 0;
+ double cntmotionpixelnum = 0;
+
+ int ybegin = floor ((double) p_row * m_cellheight);
+ int yend = floor ((double) (p_row + 1) * m_cellheight);
+ int xbegin = floor ((double) (p_col) * m_cellwidth);
+ int xend = floor ((double) (p_col + 1) * m_cellwidth);
+ int cellw = xend - xbegin;
+ int cellh = yend - ybegin;
+ int cellarea = cellw * cellh;
+ *p_cellarea = cellarea;
+ int thresholdmotionpixelnum = floor ((double) cellarea * m_sensitivity);
+
+ for (int i = ybegin; i < yend; i++) {
+ for (int j = xbegin; j < xend; j++) {
+ cntpixelsnum++;
+ if ((((uchar *) (m_pbwImage->imageData + m_pbwImage->widthStep * i))[j]) >
+ 0) {
+ cntmotionpixelnum++;
+ if (cntmotionpixelnum >= thresholdmotionpixelnum) { //we dont needs calculate anymore
+ *p_motionarea = cntmotionpixelnum;
+ return (cntmotionpixelnum / cntpixelsnum);
+ }
+ }
+ int remainingpixelsnum = cellarea - cntpixelsnum;
+ if ((cntmotionpixelnum + remainingpixelsnum) < thresholdmotionpixelnum) { //moving pixels number will be less than threshold
+ *p_motionarea = 0;
+ return 0;
+ }
+ }
+ }
+
+ return (cntmotionpixelnum / cntpixelsnum);
+}
+
+void
+MotionCells::calculateMotionPercentInMotionCells (motioncellidx *
+ p_motioncellsidx, int p_motioncells_count)
+{
+ if (p_motioncells_count == 0) {
+ for (int i = 0; i < m_gridy; i++) {
+ for (int j = 0; j < m_gridx; j++) {
+ m_pCells[i][j].MotionPercent = calculateMotionPercentInCell (i, j,
+ &m_pCells[i][j].CellArea, &m_pCells[i][j].MotionArea);
+ m_pCells[i][j].hasMotion =
+ m_sensitivity < m_pCells[i][j].MotionPercent ? true : false;
+ if (m_pCells[i][j].hasMotion) {
+ MotionCellsIdx mci;
+ mci.lineidx = i;
+ mci.colidx = j;
+ mci.cell_pt1.x = floor ((double) j * m_cellwidth);
+ mci.cell_pt1.y = floor ((double) i * m_cellheight);
+ mci.cell_pt2.x = floor ((double) (j + 1) * m_cellwidth);
+ mci.cell_pt2.y = floor ((double) (i + 1) * m_cellheight);
+ int w = mci.cell_pt2.x - mci.cell_pt1.x;
+ int h = mci.cell_pt2.y - mci.cell_pt1.y;
+ mci.motioncell = cvRect (mci.cell_pt1.x, mci.cell_pt1.y, w, h);
+ m_MotionCells.push_back (mci);
+ }
+ }
+ }
+ } else {
+ for (int k = 0; k < p_motioncells_count; ++k) {
+
+ int i = p_motioncellsidx[k].lineidx;
+ int j = p_motioncellsidx[k].columnidx;
+ m_pCells[i][j].MotionPercent =
+ calculateMotionPercentInCell (i, j,
+ &m_pCells[i][j].CellArea, &m_pCells[i][j].MotionArea);
+ m_pCells[i][j].hasMotion =
+ m_pCells[i][j].MotionPercent > m_sensitivity ? true : false;
+ if (m_pCells[i][j].hasMotion) {
+ MotionCellsIdx mci;
+ mci.lineidx = p_motioncellsidx[k].lineidx;
+ mci.colidx = p_motioncellsidx[k].columnidx;
+ mci.cell_pt1.x = floor ((double) j * m_cellwidth);
+ mci.cell_pt1.y = floor ((double) i * m_cellheight);
+ mci.cell_pt2.x = floor ((double) (j + 1) * m_cellwidth);
+ mci.cell_pt2.y = floor ((double) (i + 1) * m_cellheight);
+ int w = mci.cell_pt2.x - mci.cell_pt1.x;
+ int h = mci.cell_pt2.y - mci.cell_pt1.y;
+ mci.motioncell = cvRect (mci.cell_pt1.x, mci.cell_pt1.y, w, h);
+ m_MotionCells.push_back (mci);
+ }
+ }
+ }
+}
+
+void
+MotionCells::performMotionMaskCoords (motionmaskcoordrect * p_motionmaskcoords,
+ int p_motionmaskcoords_count)
+{
+ CvPoint upperleft;
+ upperleft.x = 0;
+ upperleft.y = 0;
+ CvPoint lowerright;
+ lowerright.x = 0;
+ lowerright.y = 0;
+ for (int i = 0; i < p_motionmaskcoords_count; i++) {
+ upperleft.x = p_motionmaskcoords[i].upper_left_x;
+ upperleft.y = p_motionmaskcoords[i].upper_left_y;
+ lowerright.x = p_motionmaskcoords[i].lower_right_x;
+ lowerright.y = p_motionmaskcoords[i].lower_right_y;
+ cvRectangle (m_pbwImage, upperleft, lowerright, CV_RGB (0, 0, 0),
+ CV_FILLED);
+ }
+}
+
+void
+MotionCells::performMotionMask (motioncellidx * p_motionmaskcellsidx,
+ int p_motionmaskcells_count)
+{
+ for (int k = 0; k < p_motionmaskcells_count; k++) {
+ int beginy = p_motionmaskcellsidx[k].lineidx * m_cellheight;
+ int beginx = p_motionmaskcellsidx[k].columnidx * m_cellwidth;
+ int endx =
+ (double) p_motionmaskcellsidx[k].columnidx * m_cellwidth + m_cellwidth;
+ int endy =
+ (double) p_motionmaskcellsidx[k].lineidx * m_cellheight + m_cellheight;
+ for (int i = beginy; i < endy; i++)
+ for (int j = beginx; j < endx; j++) {
+ ((uchar *) (m_pbwImage->imageData + m_pbwImage->widthStep * i))[j] = 0;
+ }
+ }
+}
+
+///BGR if we use only OpenCV
+//RGB if we use gst+OpenCV
+void
+MotionCells::blendImages (IplImage * p_actFrame, IplImage * p_cellsFrame,
+ float p_alpha, float p_beta)
+{
+
+ int height = p_actFrame->height;
+ int width = p_actFrame->width;
+ int step = p_actFrame->widthStep / sizeof (uchar);
+ int channels = p_actFrame->nChannels;
+ int cellstep = p_cellsFrame->widthStep / sizeof (uchar);
+ uchar *curImageData = (uchar *) p_actFrame->imageData;
+ uchar *cellImageData = (uchar *) p_cellsFrame->imageData;
+
+ for (int i = 0; i < height; i++)
+ for (int j = 0; j < width; j++)
+ for (int k = 0; k < channels; k++)
+ if (cellImageData[i * cellstep + j * channels + k] > 0) {
+ curImageData[i * step + j * channels + k] =
+ round ((double) curImageData[i * step + j * channels +
+ k] * p_alpha + ((double) cellImageData[i * cellstep +
+ j * channels + k] * p_beta));
+ }
+}
diff --git a/ext/opencv/MotionCells.h b/ext/opencv/MotionCells.h
new file mode 100644
index 000000000..ee84fd6b5
--- /dev/null
+++ b/ext/opencv/MotionCells.h
@@ -0,0 +1,259 @@
+/*
+ * GStreamer
+ * Copyright (C) 2011 Robert Jobbagy <jobbagy.robert@gmail.com>
+ * Copyright (C) 2011 Nicola Murino <nicola.murino@gmail.com>
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the "Software"),
+ * to deal in the Software without restriction, including without limitation
+ * the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ * and/or sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ * DEALINGS IN THE SOFTWARE.
+ *
+ * Alternatively, the contents of this file may be used under the
+ * GNU Lesser General Public License Version 2.1 (the "LGPL"), in
+ * which case the following provisions apply instead of the ones
+ * mentioned above:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef MOTIONCELLS_H_
+#define MOTIONCELLS_H_
+
+#include <cv.h> // includes OpenCV definitions
+#include <highgui.h> // includes highGUI definitions
+#include <iostream>
+#include <fstream>
+#include <vector>
+#include <cstdio>
+#include <cmath>
+#include <glib.h>
+
+//MotionCells defines
+#define MC_HEADER 64
+#define MC_TYPE 1
+#define MC_VERSION 1
+#define MC_VERSIONTEXT "MotionCells-1"
+#define MSGLEN 6
+#define BUSMSGLEN 20
+
+using namespace std;
+
+struct MotionCellHeader{
+ gint32 headersize;
+ gint32 type;
+ gint32 version;
+ gint32 itemsize;
+ gint32 gridx;
+ gint32 gridy;
+ gint64 starttime;
+ char name[MC_HEADER - 32];
+};
+
+struct MotionCellData{
+ gint32 timestamp;
+ char *data;
+};
+
+typedef struct {
+ int upper_left_x;
+ int upper_left_y;
+ int lower_right_x;
+ int lower_right_y;
+} motionmaskcoordrect;
+
+typedef struct {
+ int R_channel_value;
+ int G_channel_value;
+ int B_channel_value;
+} cellscolor;
+
+typedef struct {
+ int lineidx;
+ int columnidx;
+} motioncellidx;
+
+struct Cell
+{
+ double MotionArea;
+ double CellArea;
+ double MotionPercent;
+ bool hasMotion;
+};
+
+struct MotionCellsIdx
+{
+ CvRect motioncell;
+ //Points for the edges of the rectangle.
+ CvPoint cell_pt1;
+ CvPoint cell_pt2;
+ int lineidx;
+ int colidx;
+};
+
+struct OverlayRegions
+{
+ CvPoint upperleft;
+ CvPoint lowerright;
+};
+
+class MotionCells
+{
+public:
+
+ MotionCells ();
+ virtual ~ MotionCells ();
+
+ int performDetectionMotionCells (IplImage * p_frame, double p_sensitivity,
+ double p_framerate, int p_gridx, int p_gridy, gint64 timestamp_millisec,
+ bool p_isVisble, bool p_useAlpha, int motionmaskcoord_count,
+ motionmaskcoordrect * motionmaskcoords, int motionmaskcells_count,
+ motioncellidx * motionmaskcellsidx, cellscolor motioncellscolor,
+ int motioncells_count, motioncellidx * motioncellsidx, gint64 starttime,
+ char *datafile, bool p_changed_datafile, int p_thickness);
+
+ void setPrevFrame (IplImage * p_prevframe)
+ {
+ m_pprevFrame = cvCloneImage (p_prevframe);
+ }
+ char *getMotionCellsIdx ()
+ {
+ return m_motioncellsidxcstr;
+ }
+
+ int getMotionCellsIdxCount ()
+ {
+ return m_motioncells_idx_count;
+ }
+
+ bool getChangedDataFile ()
+ {
+ return m_changed_datafile;
+ }
+
+ char *getDatafileInitFailed ()
+ {
+ return m_initdatafilefailed;
+ }
+
+ char *getDatafileSaveFailed ()
+ {
+ return m_savedatafilefailed;
+ }
+
+ int getInitErrorCode ()
+ {
+ return m_initerrorcode;
+ }
+
+ int getSaveErrorCode ()
+ {
+ return m_saveerrorcode;
+ }
+
+ void freeDataFile ()
+ {
+ if (mc_savefile) {
+ fclose (mc_savefile);
+ mc_savefile = NULL;
+ m_saveInDatafile = false;
+ }
+ }
+
+private:
+
+ double calculateMotionPercentInCell (int p_row, int p_col, double *p_cellarea,
+ double *p_motionarea);
+ void performMotionMaskCoords (motionmaskcoordrect * p_motionmaskcoords,
+ int p_motionmaskcoords_count);
+ void performMotionMask (motioncellidx * p_motionmaskcellsidx,
+ int p_motionmaskcells_count);
+ void calculateMotionPercentInMotionCells (motioncellidx *
+ p_motionmaskcellsidx, int p_motionmaskcells_count = 0);
+ int saveMotionCells (gint64 timestamp_millisec);
+ int initDataFile (char *p_datafile, gint64 starttime);
+ void blendImages (IplImage * p_actFrame, IplImage * p_cellsFrame,
+ float p_alpha, float p_beta);
+
+ void setData (IplImage * img, int lin, int col, uchar valor)
+ {
+ ((uchar *) (img->imageData + img->widthStep * lin))[col] = valor;
+ }
+
+ uchar getData (IplImage * img, int lin, int col)
+ {
+ return ((uchar *) (img->imageData + img->widthStep * lin))[col];
+ }
+
+ bool getIsNonZero (IplImage * img)
+ {
+ for (int lin = 0; lin < img->height; lin++)
+ for (int col = 0; col < img->width; col++) {
+ if ((((uchar *) (img->imageData + img->widthStep * lin))[col]) > 0)
+ return true;
+ }
+ return false;
+ }
+
+ void setMotionCells (int p_frameWidth, int p_frameHeight)
+ {
+ m_cellwidth = (double) p_frameWidth / (double) m_gridx;
+ m_cellheight = (double) p_frameHeight / (double) m_gridy;
+ m_pCells = new Cell *[m_gridy];
+ for (int i = 0; i < m_gridy; i++)
+ m_pCells[i] = new Cell[m_gridx];
+
+ //init cells
+ for (int i = 0; i < m_gridy; i++)
+ for (int j = 0; j < m_gridx; j++) {
+ m_pCells[i][j].MotionArea = 0;
+ m_pCells[i][j].CellArea = 0;
+ m_pCells[i][j].MotionPercent = 0;
+ m_pCells[i][j].hasMotion = false;
+ }
+ }
+
+ IplImage *m_pcurFrame, *m_pprevFrame, *m_pdifferenceImage,
+ *m_pbwImage,*transparencyimg;
+ CvSize m_frameSize;
+ bool m_isVisible, m_changed_datafile, m_useAlpha, m_saveInDatafile;
+ Cell **m_pCells;
+ vector < MotionCellsIdx > m_MotionCells;
+ vector < OverlayRegions > m_OverlayRegions;
+ int m_gridx, m_gridy;
+ double m_cellwidth, m_cellheight;
+ double m_alpha, m_beta;
+ double m_thresholdBoundingboxArea, m_cellArea, m_sensitivity;
+ int m_framecnt, m_motioncells_idx_count, m_initerrorcode, m_saveerrorcode;
+ char *m_motioncellsidxcstr, *m_initdatafilefailed, *m_savedatafilefailed;
+ FILE *mc_savefile;
+ MotionCellHeader m_header;
+
+};
+
+#endif /* MOTIONCELLS_H_ */
diff --git a/ext/opencv/gstmotioncells.c b/ext/opencv/gstmotioncells.c
new file mode 100644
index 000000000..a349bcac1
--- /dev/null
+++ b/ext/opencv/gstmotioncells.c
@@ -0,0 +1,1109 @@
+/*
+ * GStreamer MotioCells detect areas of motion
+ * Copyright (C) 2011 Robert Jobbagy <jobbagy.robert@gmail.com>
+ * Copyright (C) 2011 Nicola Murino <nicola.murino@gmail.com>
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the "Software"),
+ * to deal in the Software without restriction, including without limitation
+ * the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ * and/or sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ * DEALINGS IN THE SOFTWARE.
+ *
+ * Alternatively, the contents of this file may be used under the
+ * GNU Lesser General Public License Version 2.1 (the "LGPL"), in
+ * which case the following provisions apply instead of the ones
+ * mentioned above:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/**
+ * SECTION:element-motioncells
+ *
+ * Performs motion detection on videos.
+ *
+ * <refsect2>
+ * <title>Example launch line</title>
+ * |[
+ * gst-launch-0.10 videotestsrc pattern=18 ! videorate ! videoscale ! video/x-raw-yuv,width=320,height=240,framerate=5/1 ! ffmpegcolorspace ! motioncells ! ffmpegcolorspace ! xvimagesink
+ * ]|
+ * </refsect2>
+ */
+
+#ifdef HAVE_CONFIG_H
+# include <config.h>
+#endif
+
+#include <stdio.h>
+#include <string.h>
+#include <glib.h>
+#include "gstmotioncells.h"
+#include "motioncells_wrapper.h"
+#include <sys/time.h>
+#include <time.h>
+#include <limits.h>
+
+GST_DEBUG_CATEGORY_STATIC (gst_motion_cells_debug);
+#define GST_CAT_DEFAULT gst_motion_cells_debug
+
+#define GRID_DEF 10
+#define GRID_MIN 8
+#define GRID_MAX 32
+#define SENSITIVITY_DEFAULT 0.5
+#define SENSITIVITY_MIN 0
+#define SENSITIVITY_MAX 1
+#define THRESHOLD_MIN 0
+#define THRESHOLD_DEFAULT 0.01
+#define THRESHOLD_MAX 1.0
+#define GAP_MIN 1
+#define GAP_DEF 5
+#define GAP_MAX 60
+#define POST_NO_MOTION_MIN 0
+#define POST_NO_MOTION_DEF 0
+#define POST_NO_MOTION_MAX 180
+#define MINIMUM_MOTION_FRAMES_MIN 1
+#define MINIMUM_MOTION_FRAMES_DEF 1
+#define MINIMUM_MOTION_FRAMES_MAX 60
+#define THICKNESS_MIN -1
+#define THICKNESS_DEF 1
+#define THICKNESS_MAX 5
+#define DATE_MIN 0
+#define DATE_DEF 1
+#define DATE_MAX LONG_MAX
+#define DEF_DATAFILEEXT "vamc"
+#define MSGLEN 6
+#define BUSMSGLEN 20
+
+#define GFREE(POINTER)\
+ {\
+ g_free(POINTER);\
+ POINTER = NULL;\
+ }
+
+int instanceCounter = 0;
+gboolean element_id_was_max = false;
+
+/* Filter signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_GRID_X,
+ PROP_GRID_Y,
+ PROP_SENSITIVITY,
+ PROP_THRESHOLD,
+ PROP_DISPLAY,
+ PROP_DATE,
+ PROP_DATAFILE,
+ PROP_DATAFILE_EXT,
+ PROP_MOTIONMASKCOORD,
+ PROP_MOTIONMASKCELLSPOS,
+ PROP_CELLSCOLOR,
+ PROP_MOTIONCELLSIDX,
+ PROP_GAP,
+ PROP_POSTNOMOTION,
+ PROP_MINIMUNMOTIONFRAMES,
+ PROP_CALCULATEMOTION,
+ PROP_POSTALLMOTION,
+ PROP_USEALPHA,
+ PROP_MOTIONCELLTHICKNESS
+};
+
+/* the capabilities of the inputs and outputs.
+ */
+static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS ("video/x-raw-rgb"));
+
+static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC, GST_PAD_ALWAYS, GST_STATIC_CAPS ("video/x-raw-rgb"));
+
+GST_BOILERPLATE (GstMotioncells, gst_motion_cells, GstElement,
+ GST_TYPE_ELEMENT);
+
+static void gst_motion_cells_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_motion_cells_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_motion_cells_set_caps (GstPad * pad, GstCaps * caps);
+static GstFlowReturn gst_motion_cells_chain (GstPad * pad, GstBuffer * buf);
+
+static void gst_motioncells_update_motion_cells (GstMotioncells * filter);
+static void gst_motioncells_update_motion_masks (GstMotioncells * filter);
+
+/* Clean up */
+static void
+gst_motion_cells_finalize (GObject * obj)
+{
+ GstMotioncells *filter = gst_motion_cells (obj);
+
+ motion_cells_free (filter->id);
+
+ //freeing previously allocated dynamic array
+ if (filter->motionmaskcoord_count > 0) {
+ GFREE (filter->motionmaskcoords);
+ }
+
+ if (filter->motionmaskcells_count > 0) {
+ GFREE (filter->motionmaskcellsidx);
+ }
+ if (filter->motioncells_count > 0) {
+ GFREE (filter->motioncellsidx);
+ }
+
+ if (filter->cvImage) {
+ cvReleaseImage (&filter->cvImage);
+ }
+
+ GFREE (filter->motioncellscolor);
+ GFREE (filter->prev_datafile);
+ GFREE (filter->cur_datafile);
+ GFREE (filter->basename_datafile);
+ GFREE (filter->datafile_extension);
+
+ g_mutex_free (filter->propset_mutex);
+
+ G_OBJECT_CLASS (parent_class)->finalize (obj);
+}
+
+/* GObject vmethod implementations */
+static void
+gst_motion_cells_base_init (gpointer gclass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
+
+ gst_element_class_set_details_simple (element_class,
+ "motioncells",
+ "Filter/Effect/Video",
+ "Performs motion detection on videos and images, providing detected motion cells index via bus messages",
+ "Robert Jobbagy <jobbagy dot robert at gmail dot com>, Nicola Murino <nicola dot murino at gmail.com>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_factory));
+}
+
+/* initialize the motioncells's class */
+static void
+gst_motion_cells_class_init (GstMotioncellsClass * klass)
+{
+ GObjectClass *gobject_class;
+
+ gobject_class = (GObjectClass *) klass;
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_motion_cells_finalize);
+ gobject_class->set_property = gst_motion_cells_set_property;
+ gobject_class->get_property = gst_motion_cells_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_GRID_X,
+ g_param_spec_int ("gridx", "Number of Horizontal Grids",
+ "You can give number of horizontal grid cells.", GRID_MIN, GRID_MAX,
+ GRID_DEF, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_GRID_Y,
+ g_param_spec_int ("gridy", "Number of Vertical Grids",
+ "You can give number of vertical grid cells.", GRID_MIN, GRID_MAX,
+ GRID_DEF, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_SENSITIVITY,
+ g_param_spec_double ("sensitivity", "Motion Sensitivity",
+ "You can tunning the element motion sensitivity.", SENSITIVITY_MIN,
+ SENSITIVITY_MAX, SENSITIVITY_DEFAULT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_THRESHOLD,
+ g_param_spec_double ("threshold", "Lower bound of motion cells number",
+ "Threshold value for motion, when motion cells number greater sum cells * threshold, we show motion.",
+ THRESHOLD_MIN, THRESHOLD_MAX, THRESHOLD_DEFAULT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_GAP,
+ g_param_spec_int ("gap",
+ "Gap is time in second, elapsed time from last motion timestamp. ",
+ "If elapsed time minus form last motion timestamp is greater or equal than gap then we post motion finished bus message. ",
+ GAP_MIN, GAP_MAX, GAP_DEF,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_POSTNOMOTION,
+ g_param_spec_int ("postnomotion", "POSTNOMOTION",
+ "If non 0 post a no_motion event is posted on the bus if no motion is detected for N seconds",
+ POST_NO_MOTION_MIN, POST_NO_MOTION_MAX, POST_NO_MOTION_DEF,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MINIMUNMOTIONFRAMES,
+ g_param_spec_int ("minimummotionframes", "MINIMUN MOTION FRAMES",
+ "Define the minimum number of motion frames that trigger a motion event",
+ MINIMUM_MOTION_FRAMES_MIN, MINIMUM_MOTION_FRAMES_MAX,
+ MINIMUM_MOTION_FRAMES_DEF,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_DISPLAY,
+ g_param_spec_boolean ("display", "Display",
+ "Motion Cells visible or not on Current Frame", FALSE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_POSTALLMOTION,
+ g_param_spec_boolean ("postallmotion", "Post All Motion",
+ "Element post bus msg for every motion frame or just motion start and motion stop",
+ FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_USEALPHA,
+ g_param_spec_boolean ("usealpha", "Use alpha",
+ "Use or not alpha blending on frames with motion cells", TRUE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_DATE,
+ g_param_spec_long ("date", "Motion Cell Date",
+ "Current Date in milliseconds", DATE_MIN, DATE_MAX, DATE_DEF,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_DATAFILE,
+ g_param_spec_string ("datafile", "DataFile",
+ "Location of motioncells data file (empty string means no saving)",
+ NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_DATAFILE_EXT,
+ g_param_spec_string ("datafileextension", "DataFile Extension",
+ "Extension of datafile", DEF_DATAFILEEXT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MOTIONMASKCOORD,
+ g_param_spec_string ("motionmaskcoords", "Motion Mask with Coordinates",
+ "The upper left x, y and lower right x, y coordinates separated with \":\", "
+ "describe a region. Regions separated with \",\"", NULL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MOTIONMASKCELLSPOS,
+ g_param_spec_string ("motionmaskcellspos",
+ "Motion Mask with Cells Position",
+ "The line and column idx separated with \":\" what cells want we mask-out, "
+ "describe a cell. Cells separated with \",\"", NULL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_CELLSCOLOR,
+ g_param_spec_string ("cellscolor", "Color of Motion Cells",
+ "The color of motion cells separated with \",\"", "255,255,0",
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MOTIONCELLSIDX,
+ g_param_spec_string ("motioncellsidx", "Motion Cells Of Interest(MOCI)",
+ "The line and column idx separated with \":\", "
+ "describe a cell. Cells separated with \",\"", NULL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_CALCULATEMOTION,
+ g_param_spec_boolean ("calculatemotion", "Calculate Motion",
+ "If needs calculate motion on frame you need this property setting true otherwise false",
+ TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MOTIONCELLTHICKNESS,
+ g_param_spec_int ("motioncellthickness", "Motion Cell Thickness",
+ "Motion Cell Border Thickness, if it's -1 then motion cell will be fill",
+ THICKNESS_MIN, THICKNESS_MAX, THICKNESS_DEF,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+}
+
+/* initialize the new element
+ * instantiate pads and add them to element
+ * set pad callback functions
+ * initialize instance structure
+ */
+static void
+gst_motion_cells_init (GstMotioncells * filter, GstMotioncellsClass * gclass)
+{
+ filter->propset_mutex = g_mutex_new ();
+ filter->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
+ gst_pad_set_setcaps_function (filter->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_motion_cells_set_caps));
+ gst_pad_set_getcaps_function (filter->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_pad_proxy_getcaps));
+ gst_pad_set_chain_function (filter->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_motion_cells_chain));
+
+ filter->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
+ gst_pad_set_getcaps_function (filter->srcpad,
+ GST_DEBUG_FUNCPTR (gst_pad_proxy_getcaps));
+
+ gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
+ gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad);
+
+ filter->display = TRUE;
+ filter->calculate_motion = TRUE;
+
+ filter->prevgridx = 0;
+ filter->prevgridy = 0;
+ filter->gridx = GRID_DEF;
+ filter->gridy = GRID_DEF;
+ filter->gap = GAP_DEF;
+ filter->postnomotion = POST_NO_MOTION_DEF;
+ filter->minimum_motion_frames = MINIMUM_MOTION_FRAMES_DEF;
+
+ filter->prev_datafile = g_strdup (NULL);
+ filter->cur_datafile = g_strdup (NULL);
+ filter->basename_datafile = g_strdup (NULL);
+ filter->datafile_extension = g_strdup (DEF_DATAFILEEXT);
+ filter->sensitivity = SENSITIVITY_DEFAULT;
+ filter->threshold = THRESHOLD_DEFAULT;
+
+ filter->motionmaskcoord_count = 0;
+ filter->motionmaskcoords = NULL;
+ filter->motionmaskcells_count = 0;
+ filter->motionmaskcellsidx = NULL;
+ filter->motioncellscolor = g_new0 (cellscolor, 1);
+ filter->motioncellscolor->R_channel_value = 255;
+ filter->motioncellscolor->G_channel_value = 255;
+ filter->motioncellscolor->B_channel_value = 0;
+ filter->motioncellsidx = NULL;
+ filter->motioncells_count = 0;
+ filter->motion_begin_timestamp = 0;
+ filter->last_motion_timestamp = 0;
+ filter->last_nomotion_notified = 0;
+ filter->consecutive_motion = 0;
+ filter->motion_timestamp = 0;
+ filter->prev_buff_timestamp = 0;
+ filter->cur_buff_timestamp = 0;
+ filter->diff_timestamp = -1;
+ gettimeofday (&filter->tv, NULL);
+ filter->starttime = 1000 * filter->tv.tv_sec;
+ filter->previous_motion = false;
+ filter->changed_datafile = false;
+ filter->postallmotion = false;
+ filter->usealpha = true;
+ filter->firstdatafile = false;
+ filter->firstgridx = true;
+ filter->firstgridy = true;
+ filter->changed_gridx = false;
+ filter->changed_gridy = false;
+ filter->firstframe = true;
+ filter->changed_startime = false;
+ filter->sent_init_error_msg = false;
+ filter->sent_save_error_msg = false;
+ filter->thickness = THICKNESS_DEF;
+
+ filter->datafileidx = 0;
+ g_mutex_lock (filter->propset_mutex);
+ filter->id = instanceCounter;
+ motion_cells_init ();
+ g_mutex_unlock (filter->propset_mutex);
+
+}
+
+static void
+gst_motion_cells_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstMotioncells *filter = gst_motion_cells (object);
+ //variables for overlay regions setup
+ gchar **strs, **colorstr, **motioncellsstr, **motionmaskcellsstr;
+ int i, ux, uy, lx, ly;
+ int r, g, b;
+ int cellscolorscnt = 0;
+ int linidx, colidx, masklinidx, maskcolidx;
+ int tmpux = -1;
+ int tmpuy = -1;
+ int tmplx = -1;
+ int tmply = -1;
+ GstStateChangeReturn ret;
+
+ g_mutex_lock (filter->propset_mutex);
+ switch (prop_id) {
+ case PROP_GRID_X:
+ ret = gst_element_get_state (GST_ELEMENT (filter),
+ &filter->state, NULL, 250 * GST_NSECOND);
+ filter->gridx = g_value_get_int (value);
+ if (filter->prevgridx != filter->gridx
+ && ret == GST_STATE_CHANGE_SUCCESS
+ && filter->state == GST_STATE_PLAYING) {
+ filter->changed_gridx = true;
+ }
+ filter->prevgridx = filter->gridx;
+ break;
+ case PROP_GRID_Y:
+ ret = gst_element_get_state (GST_ELEMENT (filter),
+ &filter->state, NULL, 250 * GST_NSECOND);
+ filter->gridy = g_value_get_int (value);
+ if (filter->prevgridy != filter->gridy
+ && ret == GST_STATE_CHANGE_SUCCESS
+ && filter->state == GST_STATE_PLAYING) {
+ filter->changed_gridy = true;
+ }
+ filter->prevgridy = filter->gridy;
+ break;
+ case PROP_GAP:
+ filter->gap = g_value_get_int (value);
+ break;
+ case PROP_POSTNOMOTION:
+ filter->postnomotion = g_value_get_int (value);
+ break;
+ case PROP_MINIMUNMOTIONFRAMES:
+ filter->minimum_motion_frames = g_value_get_int (value);
+ break;
+ case PROP_SENSITIVITY:
+ filter->sensitivity = g_value_get_double (value);
+ break;
+ case PROP_THRESHOLD:
+ filter->threshold = g_value_get_double (value);
+ break;
+ case PROP_DISPLAY:
+ filter->display = g_value_get_boolean (value);
+ break;
+ case PROP_POSTALLMOTION:
+ filter->postallmotion = g_value_get_boolean (value);
+ break;
+ case PROP_USEALPHA:
+ filter->usealpha = g_value_get_boolean (value);
+ break;
+ case PROP_CALCULATEMOTION:
+ filter->calculate_motion = g_value_get_boolean (value);
+ break;
+ case PROP_DATE:
+ ret = gst_element_get_state (GST_ELEMENT (filter),
+ &filter->state, NULL, 250 * GST_NSECOND);
+ if (ret == GST_STATE_CHANGE_SUCCESS && filter->state == GST_STATE_PLAYING) {
+ filter->changed_startime = true;
+ }
+ filter->starttime = g_value_get_long (value);
+ break;
+ case PROP_DATAFILE:
+ GFREE (filter->cur_datafile);
+ GFREE (filter->basename_datafile);
+ filter->basename_datafile = g_value_dup_string (value);
+
+ if (strlen (filter->basename_datafile) == 0) {
+ filter->cur_datafile = g_strdup (NULL);
+ break;
+ }
+ filter->cur_datafile =
+ g_strdup_printf ("%s-0.%s", filter->basename_datafile,
+ filter->datafile_extension);
+ if (g_strcmp0 (filter->prev_datafile, filter->basename_datafile) != 0) {
+ filter->changed_datafile = TRUE;
+ filter->sent_init_error_msg = FALSE;
+ filter->sent_save_error_msg = FALSE;
+ filter->datafileidx = 0;
+ motion_cells_free_resources (filter->id);
+ } else {
+ filter->changed_datafile = FALSE;
+ }
+
+ GFREE (filter->prev_datafile);
+ filter->prev_datafile = g_strdup (filter->basename_datafile);
+ break;
+ case PROP_DATAFILE_EXT:
+ GFREE (filter->datafile_extension);
+ filter->datafile_extension = g_value_dup_string (value);
+ break;
+ case PROP_MOTIONMASKCOORD:
+ strs = g_strsplit (g_value_get_string (value), ",", 255);
+ GFREE (filter->motionmaskcoords);
+ //setting number of regions
+ for (filter->motionmaskcoord_count = 0;
+ strs[filter->motionmaskcoord_count] != NULL;
+ ++filter->motionmaskcoord_count);
+ if (filter->motionmaskcoord_count > 0) {
+ sscanf (strs[0], "%d:%d:%d:%d", &tmpux, &tmpuy, &tmplx, &tmply);
+ if (tmpux > -1 && tmpuy > -1 && tmplx > -1 && tmply > -1) {
+ filter->motionmaskcoords =
+ g_new0 (motionmaskcoordrect, filter->motionmaskcoord_count);
+
+ for (i = 0; i < filter->motionmaskcoord_count; ++i) {
+ sscanf (strs[i], "%d:%d:%d:%d", &ux, &uy, &lx, &ly);
+ ux = CLAMP (ux, 0, filter->width - 1);
+ uy = CLAMP (uy, 0, filter->height - 1);
+ lx = CLAMP (lx, 0, filter->width - 1);
+ ly = CLAMP (ly, 0, filter->height - 1);
+ filter->motionmaskcoords[i].upper_left_x = ux;
+ filter->motionmaskcoords[i].upper_left_y = uy;
+ filter->motionmaskcoords[i].lower_right_x = lx;
+ filter->motionmaskcoords[i].lower_right_y = ly;
+ }
+ } else {
+ filter->motionmaskcoord_count = 0;
+ }
+ }
+ if (strs)
+ g_strfreev (strs);
+ tmpux = -1;
+ tmpuy = -1;
+ tmplx = -1;
+ tmply = -1;
+ break;
+ case PROP_MOTIONMASKCELLSPOS:
+ motionmaskcellsstr = g_strsplit (g_value_get_string (value), ",", 255);
+ GFREE (filter->motionmaskcellsidx);
+ //setting number of regions
+ for (filter->motionmaskcells_count = 0;
+ motionmaskcellsstr[filter->motionmaskcells_count] != NULL;
+ ++filter->motionmaskcells_count);
+ if (filter->motionmaskcells_count > 0) {
+ sscanf (motionmaskcellsstr[0], "%d:%d", &tmpux, &tmpuy);
+ if (tmpux > -1 && tmpuy > -1) {
+ filter->motionmaskcellsidx =
+ g_new0 (motioncellidx, filter->motionmaskcells_count);
+ for (i = 0; i < filter->motionmaskcells_count; ++i) {
+ sscanf (motionmaskcellsstr[i], "%d:%d", &masklinidx, &maskcolidx);
+ filter->motionmaskcellsidx[i].lineidx = masklinidx;
+ filter->motionmaskcellsidx[i].columnidx = maskcolidx;
+ }
+ } else {
+ filter->motionmaskcells_count = 0;
+ }
+ }
+ if (motionmaskcellsstr)
+ g_strfreev (motionmaskcellsstr);
+ tmpux = -1;
+ tmpuy = -1;
+ tmplx = -1;
+ tmply = -1;
+ break;
+ case PROP_CELLSCOLOR:
+ colorstr = g_strsplit (g_value_get_string (value), ",", 255);
+ for (cellscolorscnt = 0; colorstr[cellscolorscnt] != NULL;
+ ++cellscolorscnt);
+ if (cellscolorscnt == 3) {
+ sscanf (colorstr[0], "%d", &r);
+ sscanf (colorstr[1], "%d", &g);
+ sscanf (colorstr[2], "%d", &b);
+ //check right RGB color format
+ r = CLAMP (r, 1, 255);
+ g = CLAMP (g, 1, 255);
+ b = CLAMP (b, 1, 255);
+ filter->motioncellscolor->R_channel_value = r;
+ filter->motioncellscolor->G_channel_value = g;
+ filter->motioncellscolor->B_channel_value = b;
+ }
+ if (colorstr)
+ g_strfreev (colorstr);
+ break;
+ case PROP_MOTIONCELLSIDX:
+ motioncellsstr = g_strsplit (g_value_get_string (value), ",", 255);
+
+ //setting number of regions
+ for (filter->motioncells_count = 0;
+ motioncellsstr[filter->motioncells_count] != NULL;
+ ++filter->motioncells_count);
+ if (filter->motioncells_count > 0) {
+ sscanf (motioncellsstr[0], "%d:%d", &tmpux, &tmpuy);
+ if (tmpux > -1 && tmpuy > -1) {
+ GFREE (filter->motioncellsidx);
+
+ filter->motioncellsidx =
+ g_new0 (motioncellidx, filter->motioncells_count);
+
+ for (i = 0; i < filter->motioncells_count; ++i) {
+ sscanf (motioncellsstr[i], "%d:%d", &linidx, &colidx);
+ filter->motioncellsidx[i].lineidx = linidx;
+ filter->motioncellsidx[i].columnidx = colidx;
+ }
+ } else {
+ filter->motioncells_count = 0;
+ }
+ }
+ if (motioncellsstr)
+ g_strfreev (motioncellsstr);
+ tmpux = -1;
+ tmpuy = -1;
+ tmplx = -1;
+ tmply = -1;
+ break;
+ case PROP_MOTIONCELLTHICKNESS:
+ filter->thickness = g_value_get_int (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ g_mutex_unlock (filter->propset_mutex);
+}
+
+static void
+gst_motion_cells_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstMotioncells *filter = gst_motion_cells (object);
+ GString *str;
+ int i;
+
+ switch (prop_id) {
+ case PROP_GRID_X:
+ g_value_set_int (value, filter->gridx);
+ break;
+ case PROP_GRID_Y:
+ g_value_set_int (value, filter->gridy);
+ break;
+ case PROP_GAP:
+ g_value_set_int (value, filter->gap);
+ break;
+ case PROP_POSTNOMOTION:
+ g_value_set_int (value, filter->postnomotion);
+ break;
+ case PROP_MINIMUNMOTIONFRAMES:
+ g_value_set_int (value, filter->minimum_motion_frames);
+ break;
+ case PROP_SENSITIVITY:
+ g_value_set_double (value, filter->sensitivity);
+ break;
+ case PROP_THRESHOLD:
+ g_value_set_double (value, filter->threshold);
+ break;
+ case PROP_DISPLAY:
+ g_value_set_boolean (value, filter->display);
+ break;
+ case PROP_POSTALLMOTION:
+ g_value_set_boolean (value, filter->postallmotion);
+ break;
+ case PROP_USEALPHA:
+ g_value_set_boolean (value, filter->usealpha);
+ break;
+ case PROP_CALCULATEMOTION:
+ g_value_set_boolean (value, filter->calculate_motion);
+ break;
+ case PROP_DATE:
+ g_value_set_long (value, filter->starttime);
+ break;
+ case PROP_DATAFILE:
+ g_value_set_string (value, filter->basename_datafile);
+ break;
+ case PROP_DATAFILE_EXT:
+ g_value_set_string (value, filter->datafile_extension);
+ break;
+ case PROP_MOTIONMASKCOORD:
+ str = g_string_new ("");
+ for (i = 0; i < filter->motionmaskcoord_count; ++i) {
+ if (i < filter->motionmaskcoord_count - 1)
+ g_string_append_printf (str, "%d:%d:%d:%d,",
+ filter->motionmaskcoords[i].upper_left_x,
+ filter->motionmaskcoords[i].upper_left_y,
+ filter->motionmaskcoords[i].lower_right_x,
+ filter->motionmaskcoords[i].lower_right_y);
+ else
+ g_string_append_printf (str, "%d:%d:%d:%d",
+ filter->motionmaskcoords[i].upper_left_x,
+ filter->motionmaskcoords[i].upper_left_y,
+ filter->motionmaskcoords[i].lower_right_x,
+ filter->motionmaskcoords[i].lower_right_y);
+
+ }
+ g_value_set_string (value, str->str);
+ g_string_free (str, TRUE);
+ break;
+ case PROP_MOTIONMASKCELLSPOS:
+ str = g_string_new ("");
+ for (i = 0; i < filter->motionmaskcells_count; ++i) {
+ if (i < filter->motionmaskcells_count - 1)
+ g_string_append_printf (str, "%d:%d,",
+ filter->motionmaskcellsidx[i].lineidx,
+ filter->motionmaskcellsidx[i].columnidx);
+ else
+ g_string_append_printf (str, "%d:%d",
+ filter->motionmaskcellsidx[i].lineidx,
+ filter->motionmaskcellsidx[i].columnidx);
+ }
+ g_value_set_string (value, str->str);
+ g_string_free (str, TRUE);
+ break;
+ case PROP_CELLSCOLOR:
+ str = g_string_new ("");
+
+ g_string_printf (str, "%d,%d,%d",
+ filter->motioncellscolor->R_channel_value,
+ filter->motioncellscolor->G_channel_value,
+ filter->motioncellscolor->B_channel_value);
+
+ g_value_set_string (value, str->str);
+ g_string_free (str, TRUE);
+ break;
+ case PROP_MOTIONCELLSIDX:
+ str = g_string_new ("");
+ for (i = 0; i < filter->motioncells_count; ++i) {
+ if (i < filter->motioncells_count - 1)
+ g_string_append_printf (str, "%d:%d,",
+ filter->motioncellsidx[i].lineidx,
+ filter->motioncellsidx[i].columnidx);
+ else
+ g_string_append_printf (str, "%d:%d",
+ filter->motioncellsidx[i].lineidx,
+ filter->motioncellsidx[i].columnidx);
+ }
+ g_value_set_string (value, str->str);
+ g_string_free (str, TRUE);
+ break;
+ case PROP_MOTIONCELLTHICKNESS:
+ g_value_set_int (value, filter->thickness);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_motioncells_update_motion_cells (GstMotioncells * filter)
+{
+ int i = 0;
+ int cellscnt = 0;
+ int j = 0;
+ int newcellscnt;
+ motioncellidx *motioncellsidx;
+ for (i = 0; i < filter->motioncells_count; i++) {
+ if ((filter->gridx <= filter->motioncellsidx[i].columnidx) ||
+ (filter->gridy <= filter->motioncellsidx[i].lineidx)) {
+ cellscnt++;
+ }
+ }
+ newcellscnt = filter->motioncells_count - cellscnt;
+ motioncellsidx = g_new0 (motioncellidx, newcellscnt);
+ for (i = 0; i < filter->motioncells_count; i++) {
+ if ((filter->motioncellsidx[i].lineidx < filter->gridy) &&
+ (filter->motioncellsidx[i].columnidx < filter->gridx)) {
+ motioncellsidx[j].lineidx = filter->motioncellsidx[i].lineidx;
+ motioncellsidx[j].columnidx = filter->motioncellsidx[i].columnidx;
+ j++;
+ }
+ }
+ GFREE (filter->motioncellsidx);
+ filter->motioncells_count = newcellscnt;
+ filter->motioncellsidx = g_new0 (motioncellidx, filter->motioncells_count);
+ j = 0;
+ for (i = 0; i < filter->motioncells_count; i++) {
+ filter->motioncellsidx[i].lineidx = motioncellsidx[j].lineidx;
+ filter->motioncellsidx[i].columnidx = motioncellsidx[j].columnidx;
+ j++;
+ }
+ GFREE (motioncellsidx);
+}
+
+static void
+gst_motioncells_update_motion_masks (GstMotioncells * filter)
+{
+
+ int i = 0;
+ int maskcnt = 0;
+ int j = 0;
+ int newmaskcnt;
+ motioncellidx *motionmaskcellsidx;
+ for (i = 0; i < filter->motionmaskcells_count; i++) {
+ if ((filter->gridx <= filter->motionmaskcellsidx[i].columnidx) ||
+ (filter->gridy <= filter->motionmaskcellsidx[i].lineidx)) {
+ maskcnt++;
+ }
+ }
+ newmaskcnt = filter->motionmaskcells_count - maskcnt;
+ motionmaskcellsidx = g_new0 (motioncellidx, newmaskcnt);
+ for (i = 0; i < filter->motionmaskcells_count; i++) {
+ if ((filter->motionmaskcellsidx[i].lineidx < filter->gridy) &&
+ (filter->motionmaskcellsidx[i].columnidx < filter->gridx)) {
+ motionmaskcellsidx[j].lineidx = filter->motionmaskcellsidx[i].lineidx;
+ motionmaskcellsidx[j].columnidx = filter->motionmaskcellsidx[i].columnidx;
+ j++;
+ }
+ }
+ GFREE (filter->motionmaskcellsidx);
+ filter->motionmaskcells_count = newmaskcnt;
+ filter->motionmaskcellsidx =
+ g_new0 (motioncellidx, filter->motionmaskcells_count);
+ j = 0;
+ for (i = 0; i < filter->motionmaskcells_count; i++) {
+ filter->motionmaskcellsidx[i].lineidx = motionmaskcellsidx[j].lineidx;
+ filter->motionmaskcellsidx[i].columnidx = motionmaskcellsidx[j].columnidx;
+ j++;
+ }
+ GFREE (motionmaskcellsidx);
+}
+
+/* GstElement vmethod implementations */
+
+/* this function handles the link with other elements */
+static gboolean
+gst_motion_cells_set_caps (GstPad * pad, GstCaps * caps)
+{
+ GstMotioncells *filter;
+ GstPad *otherpad;
+ GstStructure *structure;
+ int numerator, denominator;
+
+ filter = gst_motion_cells (gst_pad_get_parent (pad));
+ structure = gst_caps_get_structure (caps, 0);
+ gst_structure_get_int (structure, "width", &filter->width);
+ gst_structure_get_int (structure, "height", &filter->height);
+ gst_structure_get_fraction (structure, "framerate", &numerator, &denominator);
+ filter->framerate = (double) numerator / (double) denominator;
+ if (filter->cvImage)
+ cvReleaseImage (&filter->cvImage);
+ filter->cvImage =
+ cvCreateImage (cvSize (filter->width, filter->height), IPL_DEPTH_8U, 3);
+
+ otherpad = (pad == filter->srcpad) ? filter->sinkpad : filter->srcpad;
+ gst_object_unref (filter);
+
+ return gst_pad_set_caps (otherpad, caps);
+}
+
+/* chain function
+ * this function does the actual processing
+ */
+static GstFlowReturn
+gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
+{
+
+ GstMotioncells *filter;
+
+ filter = gst_motion_cells (GST_OBJECT_PARENT (pad));
+ if (filter->calculate_motion) {
+ double sensitivity;
+ int framerate, gridx, gridy, motionmaskcells_count, motionmaskcoord_count,
+ motioncells_count, i;
+ int thickness, success, motioncellsidxcnt, numberOfCells,
+ motioncellsnumber, cellsOfInterestNumber;
+ int mincellsOfInterestNumber, motiondetect;
+ char *datafile;
+ bool display, changed_datafile, useAlpha;
+ gint64 starttime;
+ motionmaskcoordrect *motionmaskcoords;
+ motioncellidx *motionmaskcellsidx;
+ cellscolor motioncellscolor;
+ motioncellidx *motioncellsidx;
+ g_mutex_lock (filter->propset_mutex);
+ buf = gst_buffer_make_writable (buf);
+ filter->cvImage->imageData = (char *) GST_BUFFER_DATA (buf);
+ if (filter->firstframe) {
+ setPrevFrame (filter->cvImage, filter->id);
+ filter->firstframe = FALSE;
+ }
+
+ sensitivity = filter->sensitivity;
+ framerate = filter->framerate;
+ gridx = filter->gridx;
+ gridy = filter->gridy;
+ display = filter->display;
+ motionmaskcoord_count = filter->motionmaskcoord_count;
+ motionmaskcoords =
+ g_new0 (motionmaskcoordrect, filter->motionmaskcoord_count);
+ for (i = 0; i < filter->motionmaskcoord_count; i++) { //we need divide 2 because we use gauss pyramid in C++ side
+ motionmaskcoords[i].upper_left_x =
+ filter->motionmaskcoords[i].upper_left_x / 2;
+ motionmaskcoords[i].upper_left_y =
+ filter->motionmaskcoords[i].upper_left_y / 2;
+ motionmaskcoords[i].lower_right_x =
+ filter->motionmaskcoords[i].lower_right_x / 2;
+ motionmaskcoords[i].lower_right_y =
+ filter->motionmaskcoords[i].lower_right_y / 2;
+ }
+
+ motioncellscolor.R_channel_value =
+ filter->motioncellscolor->R_channel_value;
+ motioncellscolor.G_channel_value =
+ filter->motioncellscolor->G_channel_value;
+ motioncellscolor.B_channel_value =
+ filter->motioncellscolor->B_channel_value;
+
+ if ((filter->changed_gridx || filter->changed_gridy
+ || filter->changed_startime)) {
+ if ((g_strcmp0 (filter->cur_datafile, NULL) != 0)) {
+ GFREE (filter->cur_datafile);
+ filter->datafileidx++;
+ filter->cur_datafile =
+ g_strdup_printf ("%s-%d.%s", filter->basename_datafile,
+ filter->datafileidx, filter->datafile_extension);
+ filter->changed_datafile = TRUE;
+ motion_cells_free_resources (filter->id);
+ }
+ if (filter->motioncells_count > 0)
+ gst_motioncells_update_motion_cells (filter);
+ if (filter->motionmaskcells_count > 0)
+ gst_motioncells_update_motion_masks (filter);
+ filter->changed_gridx = FALSE;
+ filter->changed_gridy = FALSE;
+ filter->changed_startime = FALSE;
+ }
+ datafile = g_strdup (filter->cur_datafile);
+ filter->cur_buff_timestamp = (GST_BUFFER_TIMESTAMP (buf) / GST_MSECOND);
+ filter->starttime +=
+ (filter->cur_buff_timestamp - filter->prev_buff_timestamp);
+ starttime = filter->starttime;
+ if (filter->changed_datafile || filter->diff_timestamp < 0)
+ filter->diff_timestamp =
+ (gint64) (GST_BUFFER_TIMESTAMP (buf) / GST_MSECOND);
+ changed_datafile = filter->changed_datafile;
+ motionmaskcells_count = filter->motionmaskcells_count;
+ motionmaskcellsidx = g_new0 (motioncellidx, filter->motionmaskcells_count);
+ for (i = 0; i < filter->motionmaskcells_count; i++) {
+ motionmaskcellsidx[i].lineidx = filter->motionmaskcellsidx[i].lineidx;
+ motionmaskcellsidx[i].columnidx = filter->motionmaskcellsidx[i].columnidx;
+ }
+ motioncells_count = filter->motioncells_count;
+ motioncellsidx = g_new0 (motioncellidx, filter->motioncells_count);
+ for (i = 0; i < filter->motioncells_count; i++) {
+ motioncellsidx[i].lineidx = filter->motioncellsidx[i].lineidx;
+ motioncellsidx[i].columnidx = filter->motioncellsidx[i].columnidx;
+ }
+ useAlpha = filter->usealpha;
+ thickness = filter->thickness;
+ success =
+ perform_detection_motion_cells (filter->cvImage, sensitivity, framerate,
+ gridx, gridy,
+ (gint64) (GST_BUFFER_TIMESTAMP (buf) / GST_MSECOND) -
+ filter->diff_timestamp, display, useAlpha, motionmaskcoord_count,
+ motionmaskcoords, motionmaskcells_count, motionmaskcellsidx,
+ motioncellscolor, motioncells_count, motioncellsidx, starttime,
+ datafile, changed_datafile, thickness, filter->id);
+ if ((success == 1) && (filter->sent_init_error_msg == false)) {
+ char *initfailedreason;
+ int initerrorcode;
+ GstStructure *s;
+ GstMessage *m;
+ initfailedreason = getInitDataFileFailed (filter->id);
+ initerrorcode = getInitErrorCode (filter->id);
+ s = gst_structure_new ("motion", "init_error_code", G_TYPE_INT,
+ initerrorcode, "details", G_TYPE_STRING, initfailedreason, NULL);
+ m = gst_message_new_element (GST_OBJECT (filter), s);
+ gst_element_post_message (GST_ELEMENT (filter), m);
+ filter->sent_init_error_msg = TRUE;
+ }
+ if ((success == -1) && (filter->sent_save_error_msg == false)) {
+ char *savefailedreason;
+ int saveerrorcode;
+ GstStructure *s;
+ GstMessage *m;
+ savefailedreason = getSaveDataFileFailed (filter->id);
+ saveerrorcode = getSaveErrorCode (filter->id);
+ s = gst_structure_new ("motion", "save_error_code", G_TYPE_INT,
+ saveerrorcode, "details", G_TYPE_STRING, savefailedreason, NULL);
+ m = gst_message_new_element (GST_OBJECT (filter), s);
+ gst_element_post_message (GST_ELEMENT (filter), m);
+ filter->sent_save_error_msg = TRUE;
+ }
+ if (success == -2) { //frame dropped
+ filter->prev_buff_timestamp = filter->cur_buff_timestamp;
+ //free
+ GFREE (datafile);
+ GFREE (motionmaskcoords);
+ GFREE (motionmaskcellsidx);
+ GFREE (motioncellsidx);
+ g_mutex_unlock (filter->propset_mutex);
+ return gst_pad_push (filter->srcpad, buf);
+ }
+ filter->changed_datafile = getChangedDataFile (filter->id);
+ motioncellsidxcnt = getMotionCellsIdxCnt (filter->id);
+ numberOfCells = filter->gridx * filter->gridy;
+ motioncellsnumber = motioncellsidxcnt / MSGLEN;
+ cellsOfInterestNumber = (filter->motioncells_count > 0) ? //how many cells interest for us
+ (filter->motioncells_count) : (numberOfCells);
+ mincellsOfInterestNumber =
+ floor ((double) cellsOfInterestNumber * filter->threshold);
+ motiondetect = (motioncellsnumber >= mincellsOfInterestNumber) ? 1 : 0;
+ if ((motioncellsidxcnt > 0) && (motiondetect == 1)) {
+ char *detectedmotioncells;
+ filter->last_motion_timestamp = GST_BUFFER_TIMESTAMP (buf);
+ detectedmotioncells = getMotionCellsIdx (filter->id);
+ if (detectedmotioncells) {
+ filter->consecutive_motion++;
+ if ((filter->previous_motion == false)
+ && (filter->consecutive_motion >= filter->minimum_motion_frames)) {
+ GstStructure *s;
+ GstMessage *m;
+ filter->previous_motion = true;
+ filter->motion_begin_timestamp = GST_BUFFER_TIMESTAMP (buf);
+ s = gst_structure_new ("motion", "motion_cells_indices",
+ G_TYPE_STRING, detectedmotioncells, "motion_begin", G_TYPE_UINT64,
+ filter->motion_begin_timestamp, NULL);
+ m = gst_message_new_element (GST_OBJECT (filter), s);
+ gst_element_post_message (GST_ELEMENT (filter), m);
+ } else if (filter->postallmotion) {
+ GstStructure *s;
+ GstMessage *m;
+ filter->motion_timestamp = GST_BUFFER_TIMESTAMP (buf);
+ s = gst_structure_new ("motion", "motion_cells_indices",
+ G_TYPE_STRING, detectedmotioncells, "motion", G_TYPE_UINT64,
+ filter->motion_timestamp, NULL);
+ m = gst_message_new_element (GST_OBJECT (filter), s);
+ gst_element_post_message (GST_ELEMENT (filter), m);
+ }
+ } else {
+ GstStructure *s;
+ GstMessage *m;
+ s = gst_structure_new ("motion", "motion_cells_indices", G_TYPE_STRING,
+ "error", NULL);
+ m = gst_message_new_element (GST_OBJECT (filter), s);
+ gst_element_post_message (GST_ELEMENT (filter), m);
+ }
+ } else {
+ filter->consecutive_motion = 0;
+ if ((((GST_BUFFER_TIMESTAMP (buf) -
+ filter->last_motion_timestamp) / 1000000000l) >=
+ filter->gap)
+ && (filter->last_motion_timestamp > 0)) {
+ GST_DEBUG ("POST MOTION FINISHED MSG\n");
+ if (filter->previous_motion) {
+ GstStructure *s;
+ GstMessage *m;
+ filter->previous_motion = false;
+ s = gst_structure_new ("motion", "motion_finished", G_TYPE_UINT64,
+ filter->last_motion_timestamp, NULL);
+ m = gst_message_new_element (GST_OBJECT (filter), s);
+ gst_element_post_message (GST_ELEMENT (filter), m);
+ }
+ }
+ }
+ if (filter->postnomotion > 0) {
+ guint64 last_buf_timestamp = GST_BUFFER_TIMESTAMP (buf) / 1000000000l;
+ if ((last_buf_timestamp -
+ (filter->last_motion_timestamp / 1000000000l)) >=
+ filter->postnomotion) {
+ GST_DEBUG ("POST NO MOTION MSG\n");
+ if ((last_buf_timestamp -
+ (filter->last_nomotion_notified / 1000000000l)) >=
+ filter->postnomotion) {
+ GstStructure *s;
+ GstMessage *m;
+ filter->last_nomotion_notified = GST_BUFFER_TIMESTAMP (buf);
+ s = gst_structure_new ("motion", "no_motion", G_TYPE_UINT64,
+ filter->last_motion_timestamp, NULL);
+ m = gst_message_new_element (GST_OBJECT (filter), s);
+ gst_element_post_message (GST_ELEMENT (filter), m);
+ }
+ }
+ }
+ filter->prev_buff_timestamp = filter->cur_buff_timestamp;
+ //free
+ GFREE (datafile);
+ GFREE (motionmaskcoords);
+ GFREE (motionmaskcellsidx);
+ GFREE (motioncellsidx);
+
+ g_mutex_unlock (filter->propset_mutex);
+ }
+
+ return gst_pad_push (filter->srcpad, buf);
+}
+
+/* entry point to initialize the plug-in
+ * initialize the plug-in itself
+ * register the element factories and other features
+ */
+gboolean
+gst_motioncells_plugin_init (GstPlugin * plugin)
+{
+ /* debug category for fltering log messages */
+ GST_DEBUG_CATEGORY_INIT (gst_motion_cells_debug,
+ "motioncells",
+ 0,
+ "Performs motion detection on videos, providing detected positions via bus messages");
+
+ return gst_element_register (plugin, "motioncells", GST_RANK_NONE,
+ GST_TYPE_MOTIONCELLS);
+}
diff --git a/ext/opencv/gstmotioncells.h b/ext/opencv/gstmotioncells.h
new file mode 100644
index 000000000..d26a2d6dd
--- /dev/null
+++ b/ext/opencv/gstmotioncells.h
@@ -0,0 +1,124 @@
+/*
+ * GStreamer
+ * Copyright (C) 2011 Robert Jobbagy <jobbagy.robert@gmail.com>
+ * Copyright (C) 2011 Nicola Murino <nicola.murino@gmail.com>
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the "Software"),
+ * to deal in the Software without restriction, including without limitation
+ * the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ * and/or sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ * DEALINGS IN THE SOFTWARE.
+ *
+ * Alternatively, the contents of this file may be used under the
+ * GNU Lesser General Public License Version 2.1 (the "LGPL"), in
+ * which case the following provisions apply instead of the ones
+ * mentioned above:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_MOTIONCELLS_H__
+#define __GST_MOTIONCELLS_H__
+
+#include <gst/gst.h>
+#include <cv.h>
+
+G_BEGIN_DECLS
+/* #defines don't like whitespacey bits */
+#define GST_TYPE_MOTIONCELLS \
+ (gst_motion_cells_get_type())
+#define gst_motion_cells(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MOTIONCELLS,GstMotioncells))
+#define gst_motion_cells_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MOTIONCELLS,GstMotioncellsClass))
+#define GST_IS_MOTIONCELLS(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MOTIONCELLS))
+#define GST_IS_MOTIONCELLS_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MOTIONCELLS))
+typedef struct _GstMotioncells GstMotioncells;
+typedef struct _GstMotioncellsClass GstMotioncellsClass;
+
+typedef struct {
+ int upper_left_x;
+ int upper_left_y;
+ int lower_right_x;
+ int lower_right_y;
+} motionmaskcoordrect;
+
+typedef struct {
+ int R_channel_value;
+ int G_channel_value;
+ int B_channel_value;
+} cellscolor;
+
+typedef struct {
+ int lineidx;
+ int columnidx;
+} motioncellidx;
+
+struct _GstMotioncells
+{
+ GstElement element;
+ GstPad *sinkpad, *srcpad;
+ GstState state;
+ gboolean display, calculate_motion, firstgridx, firstgridy, changed_gridx,
+ changed_gridy, changed_startime;
+ gboolean previous_motion, changed_datafile, postallmotion, usealpha,
+ firstdatafile, firstframe;
+ gboolean sent_init_error_msg, sent_save_error_msg;
+ gchar *prev_datafile, *cur_datafile, *basename_datafile, *datafile_extension;
+ gint prevgridx, gridx, prevgridy, gridy, id;
+ gdouble sensitivity, threshold;
+ IplImage *cvImage;
+ motionmaskcoordrect *motionmaskcoords;
+ cellscolor *motioncellscolor;
+ motioncellidx *motioncellsidx, *motionmaskcellsidx;
+ int motionmaskcoord_count, motioncells_count, motionmaskcells_count;
+ int gap, thickness, datafileidx, postnomotion, minimum_motion_frames;
+ guint64 motion_begin_timestamp, last_motion_timestamp, motion_timestamp,
+ last_nomotion_notified, prev_buff_timestamp, cur_buff_timestamp;
+ gint64 diff_timestamp, starttime;
+ guint64 consecutive_motion;
+ gint width, height;
+ //time stuff
+ struct timeval tv;
+ GMutex *propset_mutex;
+ double framerate;
+};
+
+struct _GstMotioncellsClass
+{
+ GstElementClass parent_class;
+};
+
+GType gst_motion_cells_get_type (void);
+
+gboolean gst_motioncells_plugin_init (GstPlugin * plugin);
+
+G_END_DECLS
+#endif /* __GST_MOTION_CELLS_H__ */
diff --git a/ext/opencv/gstopencv.c b/ext/opencv/gstopencv.c
index e12ca7802..8d9def2b2 100644
--- a/ext/opencv/gstopencv.c
+++ b/ext/opencv/gstopencv.c
@@ -32,6 +32,7 @@
#include "gstedgedetect.h"
#include "gstfaceblur.h"
#include "gstfacedetect.h"
+#include "gstmotioncells.h"
#include "gstpyramidsegment.h"
#include "gsttemplatematch.h"
#include "gsttextoverlay.h"
@@ -66,6 +67,9 @@ plugin_init (GstPlugin * plugin)
if (!gst_facedetect_plugin_init (plugin))
return FALSE;
+ if (!gst_motioncells_plugin_init (plugin))
+ return FALSE;
+
if (!gst_pyramidsegment_plugin_init (plugin))
return FALSE;
diff --git a/ext/opencv/motioncells_wrapper.cpp b/ext/opencv/motioncells_wrapper.cpp
new file mode 100644
index 000000000..b768f9ece
--- /dev/null
+++ b/ext/opencv/motioncells_wrapper.cpp
@@ -0,0 +1,213 @@
+/*
+ * GStreamer
+ * Copyright (C) 2011 Robert Jobbagy <jobbagy.robert@gmail.com>
+ * Copyright (C) 2011 Nicola Murino <nicola.murino@gmail.com>
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the "Software"),
+ * to deal in the Software without restriction, including without limitation
+ * the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ * and/or sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ * DEALINGS IN THE SOFTWARE.
+ *
+ * Alternatively, the contents of this file may be used under the
+ * GNU Lesser General Public License Version 2.1 (the "LGPL"), in
+ * which case the following provisions apply instead of the ones
+ * mentioned above:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#include <stdio.h>
+#include <limits.h>
+#include "motioncells_wrapper.h"
+
+extern int instanceCounter;
+extern bool element_id_was_max;
+MotionCells *mc;
+char p_str[] = "idx failed";
+
+void
+motion_cells_init ()
+{
+ mc = new MotionCells ();
+ instanceOfMC tmpmc;
+ tmpmc.id = instanceCounter;
+ tmpmc.mc = mc;
+ motioncellsvector.push_back (tmpmc);
+ if ((instanceCounter < INT_MAX) && !element_id_was_max) {
+ instanceCounter++;
+ element_id_was_max = false;
+ } else {
+ element_id_was_max = true;
+ instanceCounter = motioncellsfreeids.back ();
+ motioncellsfreeids.pop_back ();
+ }
+}
+
+int
+perform_detection_motion_cells (IplImage * p_image, double p_sensitivity,
+ double p_framerate, int p_gridx, int p_gridy, long int p_timestamp_millisec,
+ bool p_isVisible, bool p_useAlpha, int motionmaskcoord_count,
+ motionmaskcoordrect * motionmaskcoords, int motionmaskcells_count,
+ motioncellidx * motionmaskcellsidx, cellscolor motioncellscolor,
+ int motioncells_count, motioncellidx * motioncellsidx, gint64 starttime,
+ char *p_datafile, bool p_changed_datafile, int p_thickness, int p_id)
+{
+ int idx = 0;
+ idx = searchIdx (p_id);
+ return motioncellsvector.at (idx).mc->performDetectionMotionCells (p_image,
+ p_sensitivity, p_framerate, p_gridx, p_gridy, p_timestamp_millisec,
+ p_isVisible, p_useAlpha, motionmaskcoord_count, motionmaskcoords,
+ motionmaskcells_count, motionmaskcellsidx, motioncellscolor,
+ motioncells_count, motioncellsidx, starttime, p_datafile,
+ p_changed_datafile, p_thickness);
+}
+
+
+void
+setPrevFrame (IplImage * p_prevFrame, int p_id)
+{
+ int idx = 0;
+ idx = searchIdx (p_id);
+ motioncellsvector.at (idx).mc->setPrevFrame (p_prevFrame);
+}
+
+char *
+getMotionCellsIdx (int p_id)
+{
+ int idx = 0;
+ idx = searchIdx (p_id);
+ if (idx > -1)
+ return motioncellsvector.at (idx).mc->getMotionCellsIdx ();
+ else {
+ return p_str;
+ }
+
+}
+
+int
+getMotionCellsIdxCnt (int p_id)
+{
+ int idx = 0;
+ idx = searchIdx (p_id);
+ if (idx > -1)
+ return motioncellsvector.at (idx).mc->getMotionCellsIdxCount ();
+ else
+ return 0;
+}
+
+bool
+getChangedDataFile (int p_id)
+{
+ int idx = 0;
+ idx = searchIdx (p_id);
+ if (idx > -1)
+ return motioncellsvector.at (idx).mc->getChangedDataFile ();
+ else
+ return false;
+}
+
+int
+searchIdx (int p_id)
+{
+ for (unsigned int i = 0; i < motioncellsvector.size (); i++) {
+ instanceOfMC tmpmc;
+ tmpmc = motioncellsvector.at (i);
+ if (tmpmc.id == p_id) {
+ return i;
+ }
+ }
+ return -1;
+}
+
+char *
+getInitDataFileFailed (int p_id)
+{
+ int idx = 0;
+ idx = searchIdx (p_id);
+ if (idx > -1)
+ return motioncellsvector.at (idx).mc->getDatafileInitFailed ();
+ else {
+ return p_str;
+ }
+}
+
+char *
+getSaveDataFileFailed (int p_id)
+{
+ int idx = 0;
+ idx = searchIdx (p_id);
+ if (idx > -1)
+ return motioncellsvector.at (idx).mc->getDatafileSaveFailed ();
+ else {
+ return p_str;
+ }
+}
+
+int
+getInitErrorCode (int p_id)
+{
+ int idx = 0;
+ idx = searchIdx (p_id);
+ if (idx > -1)
+ return motioncellsvector.at (idx).mc->getInitErrorCode ();
+ else
+ return -1;
+}
+
+int
+getSaveErrorCode (int p_id)
+{
+ int idx = 0;
+ idx = searchIdx (p_id);
+ if (idx > -1)
+ return motioncellsvector.at (idx).mc->getSaveErrorCode ();
+ else
+ return -1;
+}
+
+void
+motion_cells_free (int p_id)
+{
+ int idx = 0;
+ idx = searchIdx (p_id);
+ if (idx > -1) {
+ delete motioncellsvector.at (idx).mc;
+ motioncellsvector.erase (motioncellsvector.begin () + idx);
+ motioncellsfreeids.push_back (p_id);
+ }
+}
+
+void
+motion_cells_free_resources (int p_id)
+{
+ int idx = 0;
+ idx = searchIdx (p_id);
+ if (idx > -1)
+ motioncellsvector.at (idx).mc->freeDataFile ();
+}
diff --git a/ext/opencv/motioncells_wrapper.h b/ext/opencv/motioncells_wrapper.h
new file mode 100644
index 000000000..0feaafa8b
--- /dev/null
+++ b/ext/opencv/motioncells_wrapper.h
@@ -0,0 +1,89 @@
+/*
+ * GStreamer
+ * Copyright (C) 2011 Robert Jobbagy <jobbagy.robert@gmail.com>
+ * Copyright (C) 2011 Nicola Murino <nicola.murino@gmail.com>
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the "Software"),
+ * to deal in the Software without restriction, including without limitation
+ * the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ * and/or sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ * DEALINGS IN THE SOFTWARE.
+ *
+ * Alternatively, the contents of this file may be used under the
+ * GNU Lesser General Public License Version 2.1 (the "LGPL"), in
+ * which case the following provisions apply instead of the ones
+ * mentioned above:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef MOTIONCELLS_WRAPPER_H
+#define MOTIONCELLS_WRAPPER_H
+
+#include <stdbool.h>
+
+#ifdef __cplusplus
+#include "MotionCells.h"
+struct instanceOfMC
+{
+ int id;
+ MotionCells *mc;
+};
+vector < instanceOfMC > motioncellsvector;
+vector < int >motioncellsfreeids;
+
+int searchIdx (int p_id);
+extern "C"
+{
+#endif
+
+ void motion_cells_init ();
+ int perform_detection_motion_cells (IplImage * p_image, double p_sensitivity,
+ double p_framerate, int p_gridx, int p_gridy,
+ long int p_timestamp_millisec, bool p_isVisible, bool p_useAlpha,
+ int motionmaskcoord_count, motionmaskcoordrect * motionmaskcoords,
+ int motionmaskcells_count, motioncellidx * motionmaskcellsidx,
+ cellscolor motioncellscolor, int motioncells_count,
+ motioncellidx * motioncellsidx, gint64 starttime, char *datafile,
+ bool p_changed_datafile, int p_thickness, int p_id);
+ void setPrevFrame (IplImage * p_prevFrame, int p_id);
+ void motion_cells_free (int p_id);
+ void motion_cells_free_resources (int p_id);
+ char *getMotionCellsIdx (int p_id);
+ int getMotionCellsIdxCnt (int p_id);
+ bool getChangedDataFile (int p_id);
+ char *getInitDataFileFailed (int p_id);
+ char *getSaveDataFileFailed (int p_id);
+ int getInitErrorCode (int p_id);
+ int getSaveErrorCode (int p_id);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* MOTIONCELLS_WRAPPER_H */
diff --git a/ext/opus/Makefile.am b/ext/opus/Makefile.am
new file mode 100644
index 000000000..aa50ba96e
--- /dev/null
+++ b/ext/opus/Makefile.am
@@ -0,0 +1,16 @@
+plugin_LTLIBRARIES = libgstopus.la
+
+libgstopus_la_SOURCES = gstopus.c gstopusdec.c gstopusenc.c
+libgstopus_la_CFLAGS = \
+ $(GST_PLUGINS_BASE_CFLAGS) \
+ $(GST_CFLAGS) \
+ $(OPUS_CFLAGS)
+libgstopus_la_LIBADD = \
+ $(GST_PLUGINS_BASE_LIBS) -lgsttag-$(GST_MAJORMINOR) \
+ $(GST_BASE_LIBS) \
+ $(GST_LIBS) \
+ $(OPUS_LIBS)
+libgstopus_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) $(LIBM)
+libgstopus_la_LIBTOOLFLAGS = --tag=disable-static
+
+noinst_HEADERS = gstopusenc.h gstopusdec.h
diff --git a/ext/opus/gstopus.c b/ext/opus/gstopus.c
new file mode 100644
index 000000000..65e9dcdc5
--- /dev/null
+++ b/ext/opus/gstopus.c
@@ -0,0 +1,50 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2008> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include "gstopusdec.h"
+#include "gstopusenc.h"
+
+#include <gst/tag/tag.h>
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+
+ if (!gst_element_register (plugin, "opusenc", GST_RANK_NONE,
+ GST_TYPE_OPUS_ENC))
+ return FALSE;
+
+ if (!gst_element_register (plugin, "opusdec", GST_RANK_PRIMARY,
+ GST_TYPE_OPUS_DEC))
+ return FALSE;
+
+ gst_tag_register_musicbrainz_tags ();
+
+ return TRUE;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ "opus",
+ "OPUS plugin library",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/ext/opus/gstopusdec.c b/ext/opus/gstopusdec.c
new file mode 100644
index 000000000..47c06cec0
--- /dev/null
+++ b/ext/opus/gstopusdec.c
@@ -0,0 +1,865 @@
+/* GStreamer
+ * Copyright (C) 2004 Wim Taymans <wim@fluendo.com>
+ * Copyright (C) 2006 Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) 2008 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/*
+ * Based on the speexdec element.
+ */
+
+/**
+ * SECTION:element-opusdec
+ * @see_also: opusenc, oggdemux
+ *
+ * This element decodes a OPUS stream to raw integer audio.
+ *
+ * <refsect2>
+ * <title>Example pipelines</title>
+ * |[
+ * gst-launch -v filesrc location=opus.ogg ! oggdemux ! opusdec ! audioconvert ! audioresample ! alsasink
+ * ]| Decode an Ogg/Opus file. To create an Ogg/Opus file refer to the documentation of opusenc.
+ * </refsect2>
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include "gstopusdec.h"
+#include <string.h>
+#include <gst/tag/tag.h>
+
+GST_DEBUG_CATEGORY_STATIC (opusdec_debug);
+#define GST_CAT_DEFAULT opusdec_debug
+
+#define DEC_MAX_FRAME_SIZE 2000
+
+static GstStaticPadTemplate opus_dec_src_factory =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw-int, "
+ "rate = (int) [ 32000, 64000 ], "
+ "channels = (int) [ 1, 2 ], "
+ "endianness = (int) BYTE_ORDER, "
+ "signed = (boolean) true, " "width = (int) 16, " "depth = (int) 16")
+ );
+
+static GstStaticPadTemplate opus_dec_sink_factory =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-opus")
+ );
+
+GST_BOILERPLATE (GstOpusDec, gst_opus_dec, GstElement, GST_TYPE_ELEMENT);
+
+static gboolean opus_dec_sink_event (GstPad * pad, GstEvent * event);
+static GstFlowReturn opus_dec_chain (GstPad * pad, GstBuffer * buf);
+static gboolean opus_dec_sink_setcaps (GstPad * pad, GstCaps * caps);
+static GstStateChangeReturn opus_dec_change_state (GstElement * element,
+ GstStateChange transition);
+
+static gboolean opus_dec_src_event (GstPad * pad, GstEvent * event);
+static gboolean opus_dec_src_query (GstPad * pad, GstQuery * query);
+static gboolean opus_dec_sink_query (GstPad * pad, GstQuery * query);
+static const GstQueryType *opus_get_src_query_types (GstPad * pad);
+static const GstQueryType *opus_get_sink_query_types (GstPad * pad);
+static gboolean opus_dec_convert (GstPad * pad,
+ GstFormat src_format, gint64 src_value,
+ GstFormat * dest_format, gint64 * dest_value);
+
+static GstFlowReturn opus_dec_chain_parse_data (GstOpusDec * dec,
+ GstBuffer * buf, GstClockTime timestamp, GstClockTime duration);
+static GstFlowReturn opus_dec_chain_parse_header (GstOpusDec * dec,
+ GstBuffer * buf);
+#if 0
+static GstFlowReturn opus_dec_chain_parse_comments (GstOpusDec * dec,
+ GstBuffer * buf);
+#endif
+
+static void
+gst_opus_dec_base_init (gpointer g_class)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&opus_dec_src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&opus_dec_sink_factory));
+ gst_element_class_set_details_simple (element_class, "Opus audio decoder",
+ "Codec/Decoder/Audio",
+ "decode opus streams to audio",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+}
+
+static void
+gst_opus_dec_class_init (GstOpusDecClass * klass)
+{
+ GstElementClass *gstelement_class;
+
+ gstelement_class = (GstElementClass *) klass;
+
+ gstelement_class->change_state = GST_DEBUG_FUNCPTR (opus_dec_change_state);
+
+ GST_DEBUG_CATEGORY_INIT (opusdec_debug, "opusdec", 0,
+ "opus decoding element");
+}
+
+static void
+gst_opus_dec_reset (GstOpusDec * dec)
+{
+ gst_segment_init (&dec->segment, GST_FORMAT_UNDEFINED);
+ dec->granulepos = -1;
+ dec->packetno = 0;
+ dec->frame_size = 0;
+ dec->frame_samples = 960;
+ dec->frame_duration = 0;
+ if (dec->state) {
+ opus_decoder_destroy (dec->state);
+ dec->state = NULL;
+ }
+#if 0
+ if (dec->mode) {
+ opus_mode_destroy (dec->mode);
+ dec->mode = NULL;
+ }
+#endif
+
+ gst_buffer_replace (&dec->streamheader, NULL);
+ gst_buffer_replace (&dec->vorbiscomment, NULL);
+ g_list_foreach (dec->extra_headers, (GFunc) gst_mini_object_unref, NULL);
+ g_list_free (dec->extra_headers);
+ dec->extra_headers = NULL;
+
+#if 0
+ memset (&dec->header, 0, sizeof (dec->header));
+#endif
+}
+
+static void
+gst_opus_dec_init (GstOpusDec * dec, GstOpusDecClass * g_class)
+{
+ dec->sinkpad =
+ gst_pad_new_from_static_template (&opus_dec_sink_factory, "sink");
+ gst_pad_set_chain_function (dec->sinkpad, GST_DEBUG_FUNCPTR (opus_dec_chain));
+ gst_pad_set_event_function (dec->sinkpad,
+ GST_DEBUG_FUNCPTR (opus_dec_sink_event));
+ gst_pad_set_query_type_function (dec->sinkpad,
+ GST_DEBUG_FUNCPTR (opus_get_sink_query_types));
+ gst_pad_set_query_function (dec->sinkpad,
+ GST_DEBUG_FUNCPTR (opus_dec_sink_query));
+ gst_pad_set_setcaps_function (dec->sinkpad,
+ GST_DEBUG_FUNCPTR (opus_dec_sink_setcaps));
+ gst_element_add_pad (GST_ELEMENT (dec), dec->sinkpad);
+
+ dec->srcpad = gst_pad_new_from_static_template (&opus_dec_src_factory, "src");
+ gst_pad_use_fixed_caps (dec->srcpad);
+ gst_pad_set_event_function (dec->srcpad,
+ GST_DEBUG_FUNCPTR (opus_dec_src_event));
+ gst_pad_set_query_type_function (dec->srcpad,
+ GST_DEBUG_FUNCPTR (opus_get_src_query_types));
+ gst_pad_set_query_function (dec->srcpad,
+ GST_DEBUG_FUNCPTR (opus_dec_src_query));
+ gst_element_add_pad (GST_ELEMENT (dec), dec->srcpad);
+
+ dec->sample_rate = 48000;
+ dec->n_channels = 2;
+
+ gst_opus_dec_reset (dec);
+}
+
+static gboolean
+opus_dec_sink_setcaps (GstPad * pad, GstCaps * caps)
+{
+ GstOpusDec *dec = GST_OPUS_DEC (gst_pad_get_parent (pad));
+ gboolean ret = TRUE;
+ GstStructure *s;
+ const GValue *streamheader;
+
+ s = gst_caps_get_structure (caps, 0);
+ if ((streamheader = gst_structure_get_value (s, "streamheader")) &&
+ G_VALUE_HOLDS (streamheader, GST_TYPE_ARRAY) &&
+ gst_value_array_get_size (streamheader) >= 2) {
+ const GValue *header;
+ GstBuffer *buf;
+ GstFlowReturn res = GST_FLOW_OK;
+
+ header = gst_value_array_get_value (streamheader, 0);
+ if (header && G_VALUE_HOLDS (header, GST_TYPE_BUFFER)) {
+ buf = gst_value_get_buffer (header);
+ res = opus_dec_chain_parse_header (dec, buf);
+ if (res != GST_FLOW_OK)
+ goto done;
+ gst_buffer_replace (&dec->streamheader, buf);
+ }
+#if 0
+ vorbiscomment = gst_value_array_get_value (streamheader, 1);
+ if (vorbiscomment && G_VALUE_HOLDS (vorbiscomment, GST_TYPE_BUFFER)) {
+ buf = gst_value_get_buffer (vorbiscomment);
+ res = opus_dec_chain_parse_comments (dec, buf);
+ if (res != GST_FLOW_OK)
+ goto done;
+ gst_buffer_replace (&dec->vorbiscomment, buf);
+ }
+#endif
+
+ g_list_foreach (dec->extra_headers, (GFunc) gst_mini_object_unref, NULL);
+ g_list_free (dec->extra_headers);
+ dec->extra_headers = NULL;
+
+ if (gst_value_array_get_size (streamheader) > 2) {
+ gint i, n;
+
+ n = gst_value_array_get_size (streamheader);
+ for (i = 2; i < n; i++) {
+ header = gst_value_array_get_value (streamheader, i);
+ buf = gst_value_get_buffer (header);
+ dec->extra_headers =
+ g_list_prepend (dec->extra_headers, gst_buffer_ref (buf));
+ }
+ }
+ }
+
+done:
+ gst_object_unref (dec);
+ return ret;
+}
+
+static gboolean
+opus_dec_convert (GstPad * pad,
+ GstFormat src_format, gint64 src_value,
+ GstFormat * dest_format, gint64 * dest_value)
+{
+ gboolean res = TRUE;
+ GstOpusDec *dec;
+ guint64 scale = 1;
+
+ dec = GST_OPUS_DEC (gst_pad_get_parent (pad));
+
+ if (dec->packetno < 1) {
+ res = FALSE;
+ goto cleanup;
+ }
+
+ if (src_format == *dest_format) {
+ *dest_value = src_value;
+ res = TRUE;
+ goto cleanup;
+ }
+
+ if (pad == dec->sinkpad &&
+ (src_format == GST_FORMAT_BYTES || *dest_format == GST_FORMAT_BYTES)) {
+ res = FALSE;
+ goto cleanup;
+ }
+
+ switch (src_format) {
+ case GST_FORMAT_TIME:
+ switch (*dest_format) {
+ case GST_FORMAT_BYTES:
+ scale = sizeof (gint16) * dec->n_channels;
+ case GST_FORMAT_DEFAULT:
+ *dest_value =
+ gst_util_uint64_scale_int (scale * src_value,
+ dec->sample_rate, GST_SECOND);
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ case GST_FORMAT_DEFAULT:
+ switch (*dest_format) {
+ case GST_FORMAT_BYTES:
+ *dest_value = src_value * sizeof (gint16) * dec->n_channels;
+ break;
+ case GST_FORMAT_TIME:
+ *dest_value =
+ gst_util_uint64_scale_int (src_value, GST_SECOND,
+ dec->sample_rate);
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ case GST_FORMAT_BYTES:
+ switch (*dest_format) {
+ case GST_FORMAT_DEFAULT:
+ *dest_value = src_value / (sizeof (gint16) * dec->n_channels);
+ break;
+ case GST_FORMAT_TIME:
+ *dest_value = gst_util_uint64_scale_int (src_value, GST_SECOND,
+ dec->sample_rate * sizeof (gint16) * dec->n_channels);
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+
+cleanup:
+ gst_object_unref (dec);
+ return res;
+}
+
+static const GstQueryType *
+opus_get_sink_query_types (GstPad * pad)
+{
+ static const GstQueryType opus_dec_sink_query_types[] = {
+ GST_QUERY_CONVERT,
+ 0
+ };
+
+ return opus_dec_sink_query_types;
+}
+
+static gboolean
+opus_dec_sink_query (GstPad * pad, GstQuery * query)
+{
+ GstOpusDec *dec;
+ gboolean res;
+
+ dec = GST_OPUS_DEC (gst_pad_get_parent (pad));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CONVERT:
+ {
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_val, dest_val;
+
+ gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
+ res = opus_dec_convert (pad, src_fmt, src_val, &dest_fmt, &dest_val);
+ if (res) {
+ gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
+ }
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, query);
+ break;
+ }
+
+ gst_object_unref (dec);
+ return res;
+}
+
+static const GstQueryType *
+opus_get_src_query_types (GstPad * pad)
+{
+ static const GstQueryType opus_dec_src_query_types[] = {
+ GST_QUERY_POSITION,
+ GST_QUERY_DURATION,
+ 0
+ };
+
+ return opus_dec_src_query_types;
+}
+
+static gboolean
+opus_dec_src_query (GstPad * pad, GstQuery * query)
+{
+ GstOpusDec *dec;
+ gboolean res = FALSE;
+
+ dec = GST_OPUS_DEC (gst_pad_get_parent (pad));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:{
+ GstSegment segment;
+ GstFormat format;
+ gint64 cur;
+
+ gst_query_parse_position (query, &format, NULL);
+
+ GST_PAD_STREAM_LOCK (dec->sinkpad);
+ segment = dec->segment;
+ GST_PAD_STREAM_UNLOCK (dec->sinkpad);
+
+ if (segment.format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (dec, "segment not initialised yet");
+ break;
+ }
+
+ if ((res = opus_dec_convert (dec->srcpad, GST_FORMAT_TIME,
+ segment.last_stop, &format, &cur))) {
+ gst_query_set_position (query, format, cur);
+ }
+ break;
+ }
+ case GST_QUERY_DURATION:{
+ GstFormat format = GST_FORMAT_TIME;
+ gint64 dur;
+
+ /* get duration from demuxer */
+ if (!gst_pad_query_peer_duration (dec->sinkpad, &format, &dur))
+ break;
+
+ gst_query_parse_duration (query, &format, NULL);
+
+ /* and convert it into the requested format */
+ if ((res = opus_dec_convert (dec->srcpad, GST_FORMAT_TIME,
+ dur, &format, &dur))) {
+ gst_query_set_duration (query, format, dur);
+ }
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, query);
+ break;
+ }
+
+ gst_object_unref (dec);
+ return res;
+}
+
+static gboolean
+opus_dec_src_event (GstPad * pad, GstEvent * event)
+{
+ gboolean res = FALSE;
+ GstOpusDec *dec = GST_OPUS_DEC (gst_pad_get_parent (pad));
+
+ GST_LOG_OBJECT (dec, "handling %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:{
+ GstFormat format, tformat;
+ gdouble rate;
+ GstEvent *real_seek;
+ GstSeekFlags flags;
+ GstSeekType cur_type, stop_type;
+ gint64 cur, stop;
+ gint64 tcur, tstop;
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, &cur,
+ &stop_type, &stop);
+
+ /* we have to ask our peer to seek to time here as we know
+ * nothing about how to generate a granulepos from the src
+ * formats or anything.
+ *
+ * First bring the requested format to time
+ */
+ tformat = GST_FORMAT_TIME;
+ if (!(res = opus_dec_convert (pad, format, cur, &tformat, &tcur)))
+ break;
+ if (!(res = opus_dec_convert (pad, format, stop, &tformat, &tstop)))
+ break;
+
+ /* then seek with time on the peer */
+ real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME,
+ flags, cur_type, tcur, stop_type, tstop);
+
+ GST_LOG_OBJECT (dec, "seek to %" GST_TIME_FORMAT, GST_TIME_ARGS (tcur));
+
+ res = gst_pad_push_event (dec->sinkpad, real_seek);
+ gst_event_unref (event);
+ break;
+ }
+ default:
+ res = gst_pad_event_default (pad, event);
+ break;
+ }
+
+ gst_object_unref (dec);
+ return res;
+}
+
+static gboolean
+opus_dec_sink_event (GstPad * pad, GstEvent * event)
+{
+ GstOpusDec *dec;
+ gboolean ret = FALSE;
+
+ dec = GST_OPUS_DEC (gst_pad_get_parent (pad));
+
+ GST_LOG_OBJECT (dec, "handling %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_NEWSEGMENT:{
+ GstFormat format;
+ gdouble rate, arate;
+ gint64 start, stop, time;
+ gboolean update;
+
+ gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
+ &start, &stop, &time);
+
+ if (format != GST_FORMAT_TIME)
+ goto newseg_wrong_format;
+
+ if (rate <= 0.0)
+ goto newseg_wrong_rate;
+
+ if (update) {
+ /* time progressed without data, see if we can fill the gap with
+ * some concealment data */
+ if (dec->segment.last_stop < start) {
+ GstClockTime duration;
+
+ duration = start - dec->segment.last_stop;
+ opus_dec_chain_parse_data (dec, NULL, dec->segment.last_stop,
+ duration);
+ }
+ }
+
+ /* now configure the values */
+ gst_segment_set_newsegment_full (&dec->segment, update,
+ rate, arate, GST_FORMAT_TIME, start, stop, time);
+
+ dec->granulepos = -1;
+
+ GST_DEBUG_OBJECT (dec, "segment now: cur = %" GST_TIME_FORMAT " [%"
+ GST_TIME_FORMAT " - %" GST_TIME_FORMAT "]",
+ GST_TIME_ARGS (dec->segment.last_stop),
+ GST_TIME_ARGS (dec->segment.start),
+ GST_TIME_ARGS (dec->segment.stop));
+
+ ret = gst_pad_push_event (dec->srcpad, event);
+ break;
+ }
+ default:
+ ret = gst_pad_event_default (pad, event);
+ break;
+ }
+
+ gst_object_unref (dec);
+ return ret;
+
+ /* ERRORS */
+newseg_wrong_format:
+ {
+ GST_DEBUG_OBJECT (dec, "received non TIME newsegment");
+ gst_object_unref (dec);
+ return FALSE;
+ }
+newseg_wrong_rate:
+ {
+ GST_DEBUG_OBJECT (dec, "negative rates not supported yet");
+ gst_object_unref (dec);
+ return FALSE;
+ }
+}
+
+static GstFlowReturn
+opus_dec_chain_parse_header (GstOpusDec * dec, GstBuffer * buf)
+{
+ GstCaps *caps;
+ //gint error = OPUS_OK;
+
+#if 0
+ dec->samples_per_frame = opus_packet_get_samples_per_frame (
+ (const unsigned char *) GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
+#endif
+
+#if 0
+ if (memcmp (dec->header.codec_id, "OPUS ", 8) != 0)
+ goto invalid_header;
+#endif
+
+#if 0
+#ifdef HAVE_OPUS_0_7
+ dec->mode =
+ opus_mode_create (dec->sample_rate, dec->header.frame_size, &error);
+#else
+ dec->mode =
+ opus_mode_create (dec->sample_rate, dec->header.nb_channels,
+ dec->header.frame_size, &error);
+#endif
+ if (!dec->mode)
+ goto mode_init_failed;
+
+ /* initialize the decoder */
+#ifdef HAVE_OPUS_0_11
+ dec->state =
+ opus_decoder_create_custom (dec->mode, dec->header.nb_channels, &error);
+#else
+#ifdef HAVE_OPUS_0_7
+ dec->state = opus_decoder_create (dec->mode, dec->header.nb_channels, &error);
+#else
+ dec->state = opus_decoder_create (dec->mode);
+#endif
+#endif
+#endif
+ dec->state = opus_decoder_create (dec->sample_rate, dec->n_channels);
+ if (!dec->state)
+ goto init_failed;
+
+#if 0
+#ifdef HAVE_OPUS_0_8
+ dec->frame_size = dec->header.frame_size;
+#else
+ opus_mode_info (dec->mode, OPUS_GET_FRAME_SIZE, &dec->frame_size);
+#endif
+#endif
+
+ dec->frame_duration = gst_util_uint64_scale_int (dec->frame_size,
+ GST_SECOND, dec->sample_rate);
+
+ /* set caps */
+ caps = gst_caps_new_simple ("audio/x-raw-int",
+ "rate", G_TYPE_INT, dec->sample_rate,
+ "channels", G_TYPE_INT, dec->n_channels,
+ "signed", G_TYPE_BOOLEAN, TRUE,
+ "endianness", G_TYPE_INT, G_BYTE_ORDER,
+ "width", G_TYPE_INT, 16, "depth", G_TYPE_INT, 16, NULL);
+
+ GST_DEBUG_OBJECT (dec, "rate=%d channels=%d frame-size=%d",
+ dec->sample_rate, dec->n_channels, dec->frame_size);
+
+ if (!gst_pad_set_caps (dec->srcpad, caps))
+ goto nego_failed;
+
+ gst_caps_unref (caps);
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+#if 0
+invalid_header:
+ {
+ GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,
+ (NULL), ("Invalid header"));
+ return GST_FLOW_ERROR;
+ }
+mode_init_failed:
+ {
+ GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,
+ (NULL), ("Mode initialization failed: %d", error));
+ return GST_FLOW_ERROR;
+ }
+#endif
+init_failed:
+ {
+ GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,
+ (NULL), ("couldn't initialize decoder"));
+ return GST_FLOW_ERROR;
+ }
+nego_failed:
+ {
+ GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,
+ (NULL), ("couldn't negotiate format"));
+ gst_caps_unref (caps);
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+}
+
+#if 0
+static GstFlowReturn
+opus_dec_chain_parse_comments (GstOpusDec * dec, GstBuffer * buf)
+{
+ GstTagList *list;
+ gchar *encoder = NULL;
+
+ list = gst_tag_list_from_vorbiscomment_buffer (buf, NULL, 0, &encoder);
+
+ if (!list) {
+ GST_WARNING_OBJECT (dec, "couldn't decode comments");
+ list = gst_tag_list_new ();
+ }
+
+ if (encoder) {
+ gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
+ GST_TAG_ENCODER, encoder, NULL);
+ }
+
+ gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
+ GST_TAG_AUDIO_CODEC, "Opus", NULL);
+
+ if (dec->header.bytes_per_packet > 0) {
+ gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
+ GST_TAG_BITRATE, (guint) dec->header.bytes_per_packet * 8, NULL);
+ }
+
+ GST_INFO_OBJECT (dec, "tags: %" GST_PTR_FORMAT, list);
+
+ gst_element_found_tags_for_pad (GST_ELEMENT (dec), dec->srcpad, list);
+
+ g_free (encoder);
+ g_free (ver);
+
+ return GST_FLOW_OK;
+}
+#endif
+
+static GstFlowReturn
+opus_dec_chain_parse_data (GstOpusDec * dec, GstBuffer * buf,
+ GstClockTime timestamp, GstClockTime duration)
+{
+ GstFlowReturn res = GST_FLOW_OK;
+ gint size;
+ guint8 *data;
+ GstBuffer *outbuf;
+ gint16 *out_data;
+ int n;
+
+ if (timestamp != -1) {
+ dec->segment.last_stop = timestamp;
+ dec->granulepos = -1;
+ }
+
+ if (dec->state == NULL) {
+ GstCaps *caps;
+
+ dec->state = opus_decoder_create (dec->sample_rate, dec->n_channels);
+
+ /* set caps */
+ caps = gst_caps_new_simple ("audio/x-raw-int",
+ "rate", G_TYPE_INT, dec->sample_rate,
+ "channels", G_TYPE_INT, dec->n_channels,
+ "signed", G_TYPE_BOOLEAN, TRUE,
+ "endianness", G_TYPE_INT, G_BYTE_ORDER,
+ "width", G_TYPE_INT, 16, "depth", G_TYPE_INT, 16, NULL);
+
+ GST_DEBUG_OBJECT (dec, "rate=%d channels=%d frame-size=%d",
+ dec->sample_rate, dec->n_channels, dec->frame_size);
+
+ if (!gst_pad_set_caps (dec->srcpad, caps))
+ GST_ERROR ("nego failure");
+
+ gst_caps_unref (caps);
+ }
+
+ if (buf) {
+ data = GST_BUFFER_DATA (buf);
+ size = GST_BUFFER_SIZE (buf);
+
+ GST_DEBUG_OBJECT (dec, "received buffer of size %u", size);
+
+ /* copy timestamp */
+ } else {
+ /* concealment data, pass NULL as the bits parameters */
+ GST_DEBUG_OBJECT (dec, "creating concealment data");
+ data = NULL;
+ size = 0;
+ }
+
+ GST_DEBUG ("bandwidth %d", opus_packet_get_bandwidth (data));
+ GST_DEBUG ("samples_per_frame %d", opus_packet_get_samples_per_frame (data,
+ 48000));
+ GST_DEBUG ("channels %d", opus_packet_get_nb_channels (data));
+
+ res = gst_pad_alloc_buffer_and_set_caps (dec->srcpad,
+ GST_BUFFER_OFFSET_NONE, dec->frame_samples * dec->n_channels * 2,
+ GST_PAD_CAPS (dec->srcpad), &outbuf);
+
+ if (res != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (dec, "buf alloc flow: %s", gst_flow_get_name (res));
+ return res;
+ }
+
+ out_data = (gint16 *) GST_BUFFER_DATA (outbuf);
+
+ GST_LOG_OBJECT (dec, "decoding frame");
+
+ n = opus_decode (dec->state, data, size, out_data, dec->frame_samples, TRUE);
+ if (n < 0) {
+ GST_ELEMENT_ERROR (dec, STREAM, DECODE, ("Decoding error: %d", n), (NULL));
+ return GST_FLOW_ERROR;
+ }
+
+ if (!GST_CLOCK_TIME_IS_VALID (timestamp)) {
+ timestamp = gst_util_uint64_scale_int (dec->granulepos - dec->frame_size,
+ GST_SECOND, dec->sample_rate);
+ }
+
+ GST_DEBUG_OBJECT (dec, "timestamp=%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf);
+ GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buf);
+ if (dec->discont) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
+ dec->discont = 0;
+ }
+
+ dec->segment.last_stop += dec->frame_duration;
+
+ GST_LOG_OBJECT (dec, "pushing buffer with ts=%" GST_TIME_FORMAT ", dur=%"
+ GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
+ GST_TIME_ARGS (dec->frame_duration));
+
+ res = gst_pad_push (dec->srcpad, outbuf);
+
+ if (res != GST_FLOW_OK)
+ GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (res));
+
+ return res;
+}
+
+static GstFlowReturn
+opus_dec_chain (GstPad * pad, GstBuffer * buf)
+{
+ GstFlowReturn res;
+ GstOpusDec *dec;
+
+ dec = GST_OPUS_DEC (gst_pad_get_parent (pad));
+
+ if (GST_BUFFER_IS_DISCONT (buf)) {
+ dec->discont = TRUE;
+ }
+
+ res = opus_dec_chain_parse_data (dec, buf, GST_BUFFER_TIMESTAMP (buf),
+ GST_BUFFER_DURATION (buf));
+
+//done:
+ dec->packetno++;
+
+ gst_buffer_unref (buf);
+ gst_object_unref (dec);
+
+ return res;
+}
+
+static GstStateChangeReturn
+opus_dec_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstOpusDec *dec = GST_OPUS_DEC (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ default:
+ break;
+ }
+
+ ret = parent_class->change_state (element, transition);
+ if (ret != GST_STATE_CHANGE_SUCCESS)
+ return ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_opus_dec_reset (dec);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
diff --git a/ext/opus/gstopusdec.h b/ext/opus/gstopusdec.h
new file mode 100644
index 000000000..886a90753
--- /dev/null
+++ b/ext/opus/gstopusdec.h
@@ -0,0 +1,77 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2008> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_OPUS_DEC_H__
+#define __GST_OPUS_DEC_H__
+
+#include <gst/gst.h>
+#include <opus/opus.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_OPUS_DEC \
+ (gst_opus_dec_get_type())
+#define GST_OPUS_DEC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_OPUS_DEC,GstOpusDec))
+#define GST_OPUS_DEC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_OPUS_DEC,GstOpusDecClass))
+#define GST_IS_OPUS_DEC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_OPUS_DEC))
+#define GST_IS_OPUS_DEC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_OPUS_DEC))
+
+typedef struct _GstOpusDec GstOpusDec;
+typedef struct _GstOpusDecClass GstOpusDecClass;
+
+struct _GstOpusDec {
+ GstElement element;
+
+ /* pads */
+ GstPad *sinkpad;
+ GstPad *srcpad;
+
+ OpusDecoder *state;
+ int frame_samples;
+
+ gint frame_size;
+ GstClockTime frame_duration;
+ guint64 packetno;
+
+ GstSegment segment; /* STREAM LOCK */
+ gint64 granulepos; /* -1 = needs to be set from current time */
+ gboolean discont;
+
+ GstBuffer *streamheader;
+ GstBuffer *vorbiscomment;
+ GList *extra_headers;
+
+ int sample_rate;
+ int n_channels;
+};
+
+struct _GstOpusDecClass {
+ GstElementClass parent_class;
+};
+
+GType gst_opus_dec_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_OPUS_DEC_H__ */
diff --git a/ext/opus/gstopusenc.c b/ext/opus/gstopusenc.c
new file mode 100644
index 000000000..db57ff75d
--- /dev/null
+++ b/ext/opus/gstopusenc.c
@@ -0,0 +1,1198 @@
+/* GStreamer Opus Encoder
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2008> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/*
+ * Based on the speexenc element
+ */
+
+/**
+ * SECTION:element-opusenc
+ * @see_also: opusdec, oggmux
+ *
+ * This element encodes raw audio to OPUS.
+ *
+ * <refsect2>
+ * <title>Example pipelines</title>
+ * |[
+ * gst-launch -v audiotestsrc wave=sine num-buffers=100 ! audioconvert ! opusenc ! oggmux ! filesink location=sine.ogg
+ * ]| Encode a test sine signal to Ogg/OPUS.
+ * </refsect2>
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+#include <math.h>
+#include <opus/opus.h>
+
+#include <gst/gsttagsetter.h>
+#include <gst/tag/tag.h>
+#include <gst/audio/audio.h>
+#include "gstopusenc.h"
+
+GST_DEBUG_CATEGORY_STATIC (opusenc_debug);
+#define GST_CAT_DEFAULT opusenc_debug
+
+#define GST_OPUS_ENC_TYPE_BANDWIDTH (gst_opus_enc_bandwidth_get_type())
+static GType
+gst_opus_enc_bandwidth_get_type (void)
+{
+ static const GEnumValue values[] = {
+ {OPUS_BANDWIDTH_NARROWBAND, "Narrow band", "narrowband"},
+ {OPUS_BANDWIDTH_MEDIUMBAND, "Medium band", "mediumband"},
+ {OPUS_BANDWIDTH_WIDEBAND, "Wide band", "wideband"},
+ {OPUS_BANDWIDTH_SUPERWIDEBAND, "Super wide band", "superwideband"},
+ {OPUS_BANDWIDTH_FULLBAND, "Full band", "fullband"},
+ {OPUS_BANDWIDTH_AUTO, "Auto", "auto"},
+ {0, NULL, NULL}
+ };
+ static volatile GType id = 0;
+
+ if (g_once_init_enter ((gsize *) & id)) {
+ GType _id;
+
+ _id = g_enum_register_static ("GstOpusEncBandwidth", values);
+
+ g_once_init_leave ((gsize *) & id, _id);
+ }
+
+ return id;
+}
+
+static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw-int, "
+ "rate = (int) { 8000, 12000, 16000, 24000, 48000 }, "
+ "channels = (int) [ 1, 2 ], "
+ "endianness = (int) BYTE_ORDER, "
+ "signed = (boolean) TRUE, " "width = (int) 16, " "depth = (int) 16")
+ );
+
+static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-opus, "
+ "rate = (int) { 8000, 12000, 16000, 24000, 48000 }, "
+ "channels = (int) [ 1, 2 ], " "frame-size = (int) [ 2, 60 ]")
+ );
+
+#define DEFAULT_AUDIO TRUE
+#define DEFAULT_BITRATE 64000
+#define DEFAULT_BANDWIDTH OPUS_BANDWIDTH_FULLBAND
+#define DEFAULT_FRAMESIZE 20
+#define DEFAULT_CBR TRUE
+#define DEFAULT_CONSTRAINED_VBR TRUE
+#define DEFAULT_COMPLEXITY 10
+#define DEFAULT_INBAND_FEC FALSE
+#define DEFAULT_DTX FALSE
+#define DEFAULT_PACKET_LOSS_PERCENT 0
+
+enum
+{
+ PROP_0,
+ PROP_AUDIO,
+ PROP_BITRATE,
+ PROP_BANDWIDTH,
+ PROP_FRAME_SIZE,
+ PROP_CBR,
+ PROP_CONSTRAINED_VBR,
+ PROP_COMPLEXITY,
+ PROP_INBAND_FEC,
+ PROP_DTX,
+ PROP_PACKET_LOSS_PERCENT
+};
+
+static void gst_opus_enc_finalize (GObject * object);
+
+static gboolean gst_opus_enc_sinkevent (GstPad * pad, GstEvent * event);
+static GstFlowReturn gst_opus_enc_chain (GstPad * pad, GstBuffer * buf);
+static gboolean gst_opus_enc_setup (GstOpusEnc * enc);
+
+static void gst_opus_enc_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_opus_enc_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static GstStateChangeReturn gst_opus_enc_change_state (GstElement * element,
+ GstStateChange transition);
+
+static GstFlowReturn gst_opus_enc_encode (GstOpusEnc * enc, gboolean flush);
+
+static void
+gst_opus_enc_setup_interfaces (GType opusenc_type)
+{
+ static const GInterfaceInfo tag_setter_info = { NULL, NULL, NULL };
+ const GInterfaceInfo preset_interface_info = {
+ NULL, /* interface_init */
+ NULL, /* interface_finalize */
+ NULL /* interface_data */
+ };
+
+ g_type_add_interface_static (opusenc_type, GST_TYPE_TAG_SETTER,
+ &tag_setter_info);
+ g_type_add_interface_static (opusenc_type, GST_TYPE_PRESET,
+ &preset_interface_info);
+
+ GST_DEBUG_CATEGORY_INIT (opusenc_debug, "opusenc", 0, "Opus encoder");
+}
+
+GST_BOILERPLATE_FULL (GstOpusEnc, gst_opus_enc, GstElement, GST_TYPE_ELEMENT,
+ gst_opus_enc_setup_interfaces);
+
+static void
+gst_opus_enc_base_init (gpointer g_class)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_factory));
+ gst_element_class_set_details_simple (element_class, "Opus audio encoder",
+ "Codec/Encoder/Audio",
+ "Encodes audio in Opus format",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+}
+
+static void
+gst_opus_enc_class_init (GstOpusEncClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->set_property = gst_opus_enc_set_property;
+ gobject_class->get_property = gst_opus_enc_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_AUDIO,
+ g_param_spec_boolean ("audio", "Audio or voice",
+ "Audio or voice", DEFAULT_AUDIO,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BITRATE,
+ g_param_spec_int ("bitrate", "Encoding Bit-rate",
+ "Specify an encoding bit-rate (in bps).",
+ 1, 320000, DEFAULT_BITRATE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_BANDWIDTH,
+ g_param_spec_enum ("bandwidth", "Band Width",
+ "Audio Band Width", GST_OPUS_ENC_TYPE_BANDWIDTH, DEFAULT_BANDWIDTH,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_FRAME_SIZE,
+ g_param_spec_int ("frame-size", "Frame Size",
+ "The duration of an audio frame, in ms", 2, 60, DEFAULT_FRAMESIZE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_CBR,
+ g_param_spec_boolean ("cbr", "Constant bit rate",
+ "Constant bit rate", DEFAULT_CBR,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_CONSTRAINED_VBR,
+ g_param_spec_boolean ("constrained-cbr", "Constrained VBR",
+ "Constrained VBR", DEFAULT_CONSTRAINED_VBR,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_COMPLEXITY,
+ g_param_spec_int ("complexity", "Complexity",
+ "Complexity", 0, 10, DEFAULT_COMPLEXITY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_INBAND_FEC,
+ g_param_spec_boolean ("inband-fec", "In-band FEC",
+ "Enable forward error correction", DEFAULT_INBAND_FEC,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_DTX,
+ g_param_spec_boolean ("dtx", "DTX",
+ "DTX", DEFAULT_DTX, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass),
+ PROP_PACKET_LOSS_PERCENT, g_param_spec_int ("packet-loss-percentage",
+ "Loss percentage", "Packet loss percentage", 0, 100,
+ DEFAULT_PACKET_LOSS_PERCENT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_opus_enc_finalize);
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_opus_enc_change_state);
+}
+
+static void
+gst_opus_enc_finalize (GObject * object)
+{
+ GstOpusEnc *enc;
+
+ enc = GST_OPUS_ENC (object);
+
+ g_object_unref (enc->adapter);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_opus_enc_sink_setcaps (GstPad * pad, GstCaps * caps)
+{
+ GstOpusEnc *enc;
+ GstStructure *structure;
+ GstCaps *otherpadcaps;
+
+ enc = GST_OPUS_ENC (GST_PAD_PARENT (pad));
+ enc->setup = FALSE;
+ enc->frame_size = DEFAULT_FRAMESIZE;
+ otherpadcaps = gst_pad_get_allowed_caps (pad);
+
+ structure = gst_caps_get_structure (caps, 0);
+ gst_structure_get_int (structure, "channels", &enc->n_channels);
+ gst_structure_get_int (structure, "rate", &enc->sample_rate);
+
+ if (otherpadcaps) {
+ if (!gst_caps_is_empty (otherpadcaps)) {
+ GstStructure *ps = gst_caps_get_structure (otherpadcaps, 0);
+ gst_structure_get_int (ps, "frame-size", &enc->frame_size);
+ }
+ gst_caps_unref (otherpadcaps);
+ }
+
+ GST_ERROR_OBJECT (pad, "channels=%d rate=%d frame-size=%d",
+ enc->n_channels, enc->sample_rate, enc->frame_size);
+ switch (enc->frame_size) {
+ case 2:
+ enc->frame_samples = enc->sample_rate / 400;
+ break;
+ case 5:
+ enc->frame_samples = enc->sample_rate / 200;
+ break;
+ case 10:
+ enc->frame_samples = enc->sample_rate / 100;
+ break;
+ case 20:
+ enc->frame_samples = enc->sample_rate / 50;
+ break;
+ case 40:
+ enc->frame_samples = enc->sample_rate / 20;
+ break;
+ case 60:
+ enc->frame_samples = 3 * enc->sample_rate / 50;
+ break;
+ default:
+ return FALSE;
+ break;
+ }
+ GST_ERROR ("frame_samples %d", enc->frame_samples);
+
+ gst_opus_enc_setup (enc);
+
+ return TRUE;
+}
+
+
+static GstCaps *
+gst_opus_enc_sink_getcaps (GstPad * pad)
+{
+ GstCaps *caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
+ GstCaps *peercaps = NULL;
+ GstOpusEnc *enc = GST_OPUS_ENC (gst_pad_get_parent_element (pad));
+
+ peercaps = gst_pad_peer_get_caps (enc->srcpad);
+
+ if (peercaps) {
+ if (!gst_caps_is_empty (peercaps) && !gst_caps_is_any (peercaps)) {
+ GstStructure *ps = gst_caps_get_structure (peercaps, 0);
+ GstStructure *s = gst_caps_get_structure (caps, 0);
+ gint rate, channels;
+
+ if (gst_structure_get_int (ps, "rate", &rate)) {
+ gst_structure_fixate_field_nearest_int (s, "rate", rate);
+ }
+
+ if (gst_structure_get_int (ps, "channels", &channels)) {
+ gst_structure_fixate_field_nearest_int (s, "channels", channels);
+ }
+ }
+ gst_caps_unref (peercaps);
+ }
+
+ gst_object_unref (enc);
+
+ return caps;
+}
+
+
+static gboolean
+gst_opus_enc_convert_src (GstPad * pad, GstFormat src_format, gint64 src_value,
+ GstFormat * dest_format, gint64 * dest_value)
+{
+ gboolean res = TRUE;
+ GstOpusEnc *enc;
+ gint64 avg;
+
+ enc = GST_OPUS_ENC (GST_PAD_PARENT (pad));
+
+ if (enc->samples_in == 0 || enc->bytes_out == 0 || enc->sample_rate == 0)
+ return FALSE;
+
+ avg = (enc->bytes_out * enc->sample_rate) / (enc->samples_in);
+
+ switch (src_format) {
+ case GST_FORMAT_BYTES:
+ switch (*dest_format) {
+ case GST_FORMAT_TIME:
+ *dest_value = src_value * GST_SECOND / avg;
+ break;
+ default:
+ res = FALSE;
+ }
+ break;
+ case GST_FORMAT_TIME:
+ switch (*dest_format) {
+ case GST_FORMAT_BYTES:
+ *dest_value = src_value * avg / GST_SECOND;
+ break;
+ default:
+ res = FALSE;
+ }
+ break;
+ default:
+ res = FALSE;
+ }
+ return res;
+}
+
+static gboolean
+gst_opus_enc_convert_sink (GstPad * pad, GstFormat src_format,
+ gint64 src_value, GstFormat * dest_format, gint64 * dest_value)
+{
+ gboolean res = TRUE;
+ guint scale = 1;
+ gint bytes_per_sample;
+ GstOpusEnc *enc;
+
+ enc = GST_OPUS_ENC (GST_PAD_PARENT (pad));
+
+ bytes_per_sample = enc->n_channels * 2;
+
+ switch (src_format) {
+ case GST_FORMAT_BYTES:
+ switch (*dest_format) {
+ case GST_FORMAT_DEFAULT:
+ if (bytes_per_sample == 0)
+ return FALSE;
+ *dest_value = src_value / bytes_per_sample;
+ break;
+ case GST_FORMAT_TIME:
+ {
+ gint byterate = bytes_per_sample * enc->sample_rate;
+
+ if (byterate == 0)
+ return FALSE;
+ *dest_value = src_value * GST_SECOND / byterate;
+ break;
+ }
+ default:
+ res = FALSE;
+ }
+ break;
+ case GST_FORMAT_DEFAULT:
+ switch (*dest_format) {
+ case GST_FORMAT_BYTES:
+ *dest_value = src_value * bytes_per_sample;
+ break;
+ case GST_FORMAT_TIME:
+ if (enc->sample_rate == 0)
+ return FALSE;
+ *dest_value = src_value * GST_SECOND / enc->sample_rate;
+ break;
+ default:
+ res = FALSE;
+ }
+ break;
+ case GST_FORMAT_TIME:
+ switch (*dest_format) {
+ case GST_FORMAT_BYTES:
+ scale = bytes_per_sample;
+ /* fallthrough */
+ case GST_FORMAT_DEFAULT:
+ *dest_value = src_value * scale * enc->sample_rate / GST_SECOND;
+ break;
+ default:
+ res = FALSE;
+ }
+ break;
+ default:
+ res = FALSE;
+ }
+ return res;
+}
+
+static gint64
+gst_opus_enc_get_latency (GstOpusEnc * enc)
+{
+ return gst_util_uint64_scale (enc->frame_samples, GST_SECOND,
+ enc->sample_rate);
+}
+
+static const GstQueryType *
+gst_opus_enc_get_query_types (GstPad * pad)
+{
+ static const GstQueryType gst_opus_enc_src_query_types[] = {
+ GST_QUERY_POSITION,
+ GST_QUERY_DURATION,
+ GST_QUERY_CONVERT,
+ GST_QUERY_LATENCY,
+ 0
+ };
+
+ return gst_opus_enc_src_query_types;
+}
+
+static gboolean
+gst_opus_enc_src_query (GstPad * pad, GstQuery * query)
+{
+ gboolean res = TRUE;
+ GstOpusEnc *enc;
+
+ enc = GST_OPUS_ENC (gst_pad_get_parent (pad));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:
+ {
+ GstFormat fmt, req_fmt;
+ gint64 pos, val;
+
+ gst_query_parse_position (query, &req_fmt, NULL);
+ if ((res = gst_pad_query_peer_position (enc->sinkpad, &req_fmt, &val))) {
+ gst_query_set_position (query, req_fmt, val);
+ break;
+ }
+
+ fmt = GST_FORMAT_TIME;
+ if (!(res = gst_pad_query_peer_position (enc->sinkpad, &fmt, &pos)))
+ break;
+
+ if ((res =
+ gst_pad_query_peer_convert (enc->sinkpad, fmt, pos, &req_fmt,
+ &val)))
+ gst_query_set_position (query, req_fmt, val);
+
+ break;
+ }
+ case GST_QUERY_DURATION:
+ {
+ GstFormat fmt, req_fmt;
+ gint64 dur, val;
+
+ gst_query_parse_duration (query, &req_fmt, NULL);
+ if ((res = gst_pad_query_peer_duration (enc->sinkpad, &req_fmt, &val))) {
+ gst_query_set_duration (query, req_fmt, val);
+ break;
+ }
+
+ fmt = GST_FORMAT_TIME;
+ if (!(res = gst_pad_query_peer_duration (enc->sinkpad, &fmt, &dur)))
+ break;
+
+ if ((res =
+ gst_pad_query_peer_convert (enc->sinkpad, fmt, dur, &req_fmt,
+ &val))) {
+ gst_query_set_duration (query, req_fmt, val);
+ }
+ break;
+ }
+ case GST_QUERY_CONVERT:
+ {
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_val, dest_val;
+
+ gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
+ if (!(res = gst_opus_enc_convert_src (pad, src_fmt, src_val, &dest_fmt,
+ &dest_val)))
+ goto error;
+ gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
+ break;
+ }
+ case GST_QUERY_LATENCY:
+ {
+ gboolean live;
+ GstClockTime min_latency, max_latency;
+ gint64 latency;
+
+ if ((res = gst_pad_peer_query (pad, query))) {
+ gst_query_parse_latency (query, &live, &min_latency, &max_latency);
+
+ latency = gst_opus_enc_get_latency (enc);
+
+ /* add our latency */
+ min_latency += latency;
+ if (max_latency != -1)
+ max_latency += latency;
+
+ gst_query_set_latency (query, live, min_latency, max_latency);
+ }
+ break;
+ }
+ default:
+ res = gst_pad_peer_query (pad, query);
+ break;
+ }
+
+error:
+
+ gst_object_unref (enc);
+
+ return res;
+}
+
+static gboolean
+gst_opus_enc_sink_query (GstPad * pad, GstQuery * query)
+{
+ gboolean res = TRUE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CONVERT:
+ {
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_val, dest_val;
+
+ gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
+ if (!(res =
+ gst_opus_enc_convert_sink (pad, src_fmt, src_val, &dest_fmt,
+ &dest_val)))
+ goto error;
+ gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, query);
+ break;
+ }
+
+error:
+ return res;
+}
+
+static void
+gst_opus_enc_init (GstOpusEnc * enc, GstOpusEncClass * klass)
+{
+ enc->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
+ gst_element_add_pad (GST_ELEMENT (enc), enc->sinkpad);
+ gst_pad_set_event_function (enc->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_opus_enc_sinkevent));
+ gst_pad_set_chain_function (enc->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_opus_enc_chain));
+ gst_pad_set_setcaps_function (enc->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_opus_enc_sink_setcaps));
+ gst_pad_set_getcaps_function (enc->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_opus_enc_sink_getcaps));
+ gst_pad_set_query_function (enc->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_opus_enc_sink_query));
+
+ enc->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
+ gst_pad_set_query_function (enc->srcpad,
+ GST_DEBUG_FUNCPTR (gst_opus_enc_src_query));
+ gst_pad_set_query_type_function (enc->srcpad,
+ GST_DEBUG_FUNCPTR (gst_opus_enc_get_query_types));
+ gst_element_add_pad (GST_ELEMENT (enc), enc->srcpad);
+
+ enc->n_channels = -1;
+ enc->sample_rate = -1;
+ enc->frame_samples = 0;
+
+ enc->bitrate = DEFAULT_BITRATE;
+ enc->bandwidth = DEFAULT_BANDWIDTH;
+ enc->frame_size = DEFAULT_FRAMESIZE;
+ enc->cbr = DEFAULT_CBR;
+ enc->constrained_vbr = DEFAULT_CONSTRAINED_VBR;
+ enc->complexity = DEFAULT_COMPLEXITY;
+ enc->inband_fec = DEFAULT_INBAND_FEC;
+ enc->dtx = DEFAULT_DTX;
+ enc->packet_loss_percentage = DEFAULT_PACKET_LOSS_PERCENT;
+
+ enc->setup = FALSE;
+ enc->header_sent = FALSE;
+
+ enc->adapter = gst_adapter_new ();
+}
+
+#if 0
+static GstBuffer *
+gst_opus_enc_create_metadata_buffer (GstOpusEnc * enc)
+{
+ const GstTagList *tags;
+ GstTagList *empty_tags = NULL;
+ GstBuffer *comments = NULL;
+
+ tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (enc));
+
+ GST_DEBUG_OBJECT (enc, "tags = %" GST_PTR_FORMAT, tags);
+
+ if (tags == NULL) {
+ /* FIXME: better fix chain of callers to not write metadata at all,
+ * if there is none */
+ empty_tags = gst_tag_list_new ();
+ tags = empty_tags;
+ }
+ comments = gst_tag_list_to_vorbiscomment_buffer (tags, NULL,
+ 0, "Encoded with GStreamer Opusenc");
+
+ GST_BUFFER_OFFSET (comments) = enc->bytes_out;
+ GST_BUFFER_OFFSET_END (comments) = 0;
+
+ if (empty_tags)
+ gst_tag_list_free (empty_tags);
+
+ return comments;
+}
+#endif
+
+static gboolean
+gst_opus_enc_setup (GstOpusEnc * enc)
+{
+ //gint error = OPUS_OK;
+
+ enc->setup = FALSE;
+
+#if 0
+#ifdef HAVE_OPUS_0_7
+ enc->mode = opus_mode_create (enc->rate, enc->frame_size, &error);
+#else
+ enc->mode =
+ opus_mode_create (enc->rate, enc->n_channels, enc->frame_size, &error);
+#endif
+ if (!enc->mode)
+ goto mode_initialization_failed;
+
+#ifdef HAVE_OPUS_0_11
+ opus_header_init (&enc->header, enc->mode, enc->frame_size, enc->n_channels);
+#else
+#ifdef HAVE_OPUS_0_7
+ opus_header_init (&enc->header, enc->mode, enc->n_channels);
+#else
+ opus_header_init (&enc->header, enc->mode);
+#endif
+#endif
+ enc->header.nb_channels = enc->n_channels;
+
+#ifdef HAVE_OPUS_0_8
+ enc->frame_size = enc->header.frame_size;
+#else
+ opus_mode_info (enc->mode, OPUS_GET_FRAME_SIZE, &enc->frame_size);
+#endif
+#endif
+
+#if 0
+#ifdef HAVE_OPUS_0_11
+ enc->state = opus_encoder_create_custom (enc->mode, enc->n_channels, &error);
+#else
+#ifdef HAVE_OPUS_0_7
+ enc->state = opus_encoder_create (enc->mode, enc->n_channels, &error);
+#else
+ enc->state = opus_encoder_create (enc->mode);
+#endif
+#endif
+#endif
+ enc->state = opus_encoder_create (enc->sample_rate, enc->n_channels,
+ enc->audio_or_voip ? OPUS_APPLICATION_AUDIO : OPUS_APPLICATION_VOIP);
+ if (!enc->state)
+ goto encoder_creation_failed;
+
+ opus_encoder_ctl (enc->state, OPUS_SET_BITRATE (enc->bitrate), 0);
+ opus_encoder_ctl (enc->state, OPUS_SET_BANDWIDTH (enc->bandwidth), 0);
+ opus_encoder_ctl (enc->state, OPUS_SET_VBR_FLAG (!enc->cbr), 0);
+ opus_encoder_ctl (enc->state, OPUS_SET_VBR_CONSTRAINT (enc->constrained_vbr),
+ 0);
+ opus_encoder_ctl (enc->state, OPUS_SET_COMPLEXITY (enc->complexity), 0);
+ opus_encoder_ctl (enc->state, OPUS_SET_INBAND_FEC_FLAG (enc->inband_fec), 0);
+ opus_encoder_ctl (enc->state, OPUS_SET_DTX_FLAG (enc->dtx), 0);
+ opus_encoder_ctl (enc->state,
+ OPUS_SET_PACKET_LOSS_PERC (enc->packet_loss_percentage), 0);
+
+ GST_LOG_OBJECT (enc, "we have frame size %d", enc->frame_size);
+
+ enc->setup = TRUE;
+
+ return TRUE;
+
+#if 0
+mode_initialization_failed:
+ GST_ERROR_OBJECT (enc, "Mode initialization failed: %d", error);
+ return FALSE;
+#endif
+
+encoder_creation_failed:
+ GST_ERROR_OBJECT (enc, "Encoder creation failed");
+ return FALSE;
+}
+
+
+/* push out the buffer and do internal bookkeeping */
+static GstFlowReturn
+gst_opus_enc_push_buffer (GstOpusEnc * enc, GstBuffer * buffer)
+{
+ guint size;
+
+ size = GST_BUFFER_SIZE (buffer);
+
+ enc->bytes_out += size;
+
+ GST_DEBUG_OBJECT (enc, "pushing output buffer of size %u", size);
+
+ return gst_pad_push (enc->srcpad, buffer);
+}
+
+#if 0
+static GstCaps *
+gst_opus_enc_set_header_on_caps (GstCaps * caps, GstBuffer * buf1,
+ GstBuffer * buf2)
+{
+ GstStructure *structure = NULL;
+ GstBuffer *buf;
+ GValue array = { 0 };
+ GValue value = { 0 };
+
+ caps = gst_caps_make_writable (caps);
+ structure = gst_caps_get_structure (caps, 0);
+
+ g_assert (gst_buffer_is_metadata_writable (buf1));
+ g_assert (gst_buffer_is_metadata_writable (buf2));
+
+ /* mark buffers */
+ GST_BUFFER_FLAG_SET (buf1, GST_BUFFER_FLAG_IN_CAPS);
+ GST_BUFFER_FLAG_SET (buf2, GST_BUFFER_FLAG_IN_CAPS);
+
+ /* put buffers in a fixed list */
+ g_value_init (&array, GST_TYPE_ARRAY);
+ g_value_init (&value, GST_TYPE_BUFFER);
+ buf = gst_buffer_copy (buf1);
+ gst_value_set_buffer (&value, buf);
+ gst_buffer_unref (buf);
+ gst_value_array_append_value (&array, &value);
+ g_value_unset (&value);
+ g_value_init (&value, GST_TYPE_BUFFER);
+ buf = gst_buffer_copy (buf2);
+ gst_value_set_buffer (&value, buf);
+ gst_buffer_unref (buf);
+ gst_value_array_append_value (&array, &value);
+ gst_structure_set_value (structure, "streamheader", &array);
+ g_value_unset (&value);
+ g_value_unset (&array);
+
+ return caps;
+}
+#endif
+
+
+static gboolean
+gst_opus_enc_sinkevent (GstPad * pad, GstEvent * event)
+{
+ gboolean res = TRUE;
+ GstOpusEnc *enc;
+
+ enc = GST_OPUS_ENC (gst_pad_get_parent (pad));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ gst_opus_enc_encode (enc, TRUE);
+ res = gst_pad_event_default (pad, event);
+ break;
+ case GST_EVENT_TAG:
+ {
+ GstTagList *list;
+ GstTagSetter *setter = GST_TAG_SETTER (enc);
+ const GstTagMergeMode mode = gst_tag_setter_get_tag_merge_mode (setter);
+
+ gst_event_parse_tag (event, &list);
+ gst_tag_setter_merge_tags (setter, list, mode);
+ res = gst_pad_event_default (pad, event);
+ break;
+ }
+ default:
+ res = gst_pad_event_default (pad, event);
+ break;
+ }
+
+ gst_object_unref (enc);
+
+ return res;
+}
+
+static GstFlowReturn
+gst_opus_enc_encode (GstOpusEnc * enc, gboolean flush)
+{
+
+ GstFlowReturn ret = GST_FLOW_OK;
+ gint bytes = enc->frame_samples * 2 * enc->n_channels;
+ gint bytes_per_packet;
+
+ bytes_per_packet =
+ (enc->bitrate * enc->frame_samples / enc->sample_rate + 4) / 8;
+
+ if (flush && gst_adapter_available (enc->adapter) % bytes != 0) {
+ guint diff = gst_adapter_available (enc->adapter) % bytes;
+ GstBuffer *buf = gst_buffer_new_and_alloc (diff);
+
+ memset (GST_BUFFER_DATA (buf), 0, diff);
+ gst_adapter_push (enc->adapter, buf);
+ }
+
+
+ while (gst_adapter_available (enc->adapter) >= bytes) {
+ gint16 *data;
+ gint outsize;
+ GstBuffer *outbuf;
+
+ ret = gst_pad_alloc_buffer_and_set_caps (enc->srcpad,
+ GST_BUFFER_OFFSET_NONE, bytes_per_packet, GST_PAD_CAPS (enc->srcpad),
+ &outbuf);
+
+ if (GST_FLOW_OK != ret)
+ goto done;
+
+ data = (gint16 *) gst_adapter_take (enc->adapter, bytes);
+ enc->samples_in += enc->frame_samples;
+
+ GST_DEBUG_OBJECT (enc, "encoding %d samples (%d bytes)",
+ enc->frame_samples, bytes);
+
+ outsize = opus_encode (enc->state, data, enc->frame_samples,
+ GST_BUFFER_DATA (outbuf), bytes_per_packet);
+
+ g_free (data);
+
+ if (outsize < 0) {
+ GST_ERROR_OBJECT (enc, "Encoding failed: %d", outsize);
+ ret = GST_FLOW_ERROR;
+ goto done;
+ }
+
+ GST_BUFFER_TIMESTAMP (outbuf) = enc->start_ts +
+ gst_util_uint64_scale_int (enc->frameno_out * enc->frame_samples,
+ GST_SECOND, enc->sample_rate);
+ GST_BUFFER_DURATION (outbuf) =
+ gst_util_uint64_scale_int (enc->frame_samples, GST_SECOND,
+ enc->sample_rate);
+ GST_BUFFER_OFFSET (outbuf) =
+ gst_util_uint64_scale_int (GST_BUFFER_OFFSET_END (outbuf), GST_SECOND,
+ enc->sample_rate);
+
+ enc->frameno++;
+ enc->frameno_out++;
+
+ ret = gst_opus_enc_push_buffer (enc, outbuf);
+
+ if ((GST_FLOW_OK != ret) && (GST_FLOW_NOT_LINKED != ret))
+ goto done;
+ }
+
+done:
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_opus_enc_chain (GstPad * pad, GstBuffer * buf)
+{
+ GstOpusEnc *enc;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ enc = GST_OPUS_ENC (GST_PAD_PARENT (pad));
+
+ if (!enc->setup)
+ goto not_setup;
+
+#if 0
+ if (!enc->header_sent) {
+ /* Opus streams begin with two headers; the initial header (with
+ most of the codec setup parameters) which is mandated by the Ogg
+ bitstream spec. The second header holds any comment fields.
+ We merely need to make the headers, then pass them to libopus
+ one at a time; libopus handles the additional Ogg bitstream
+ constraints */
+ GstBuffer *buf1, *buf2;
+ GstCaps *caps;
+ guchar data[100];
+
+ /* create header buffer */
+ opus_header_to_packet (&enc->header, data, 100);
+ buf1 = gst_opus_enc_buffer_from_data (enc, data, 100, 0);
+
+ /* create comment buffer */
+ buf2 = gst_opus_enc_create_metadata_buffer (enc);
+
+ /* mark and put on caps */
+ caps = gst_pad_get_caps (enc->srcpad);
+ caps = gst_opus_enc_set_header_on_caps (caps, buf1, buf2);
+
+ gst_caps_set_simple (caps,
+ "rate", G_TYPE_INT, enc->sample_rate,
+ "channels", G_TYPE_INT, enc->n_channels,
+ "frame-size", G_TYPE_INT, enc->frame_size, NULL);
+
+ /* negotiate with these caps */
+ GST_DEBUG_OBJECT (enc, "here are the caps: %" GST_PTR_FORMAT, caps);
+ GST_LOG_OBJECT (enc, "rate=%d channels=%d frame-size=%d",
+ enc->sample_rate, enc->n_channels, enc->frame_size);
+ gst_pad_set_caps (enc->srcpad, caps);
+
+ gst_buffer_set_caps (buf1, caps);
+ gst_buffer_set_caps (buf2, caps);
+ gst_caps_unref (caps);
+
+ /* push out buffers */
+ ret = gst_opus_enc_push_buffer (enc, buf1);
+
+ if (ret != GST_FLOW_OK) {
+ gst_buffer_unref (buf2);
+ goto done;
+ }
+
+ ret = gst_opus_enc_push_buffer (enc, buf2);
+
+ if (ret != GST_FLOW_OK)
+ goto done;
+
+ enc->header_sent = TRUE;
+ }
+#endif
+
+ GST_DEBUG_OBJECT (enc, "received buffer of %u bytes", GST_BUFFER_SIZE (buf));
+
+ /* Save the timestamp of the first buffer. This will be later
+ * used as offset for all following buffers */
+ if (enc->start_ts == GST_CLOCK_TIME_NONE) {
+ if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
+ enc->start_ts = GST_BUFFER_TIMESTAMP (buf);
+ } else {
+ enc->start_ts = 0;
+ }
+ }
+
+
+ /* Check if we have a continous stream, if not drop some samples or the buffer or
+ * insert some silence samples */
+ if (enc->next_ts != GST_CLOCK_TIME_NONE &&
+ GST_BUFFER_TIMESTAMP (buf) < enc->next_ts) {
+ guint64 diff = enc->next_ts - GST_BUFFER_TIMESTAMP (buf);
+ guint64 diff_bytes;
+
+ GST_WARNING_OBJECT (enc, "Buffer is older than previous "
+ "timestamp + duration (%" GST_TIME_FORMAT "< %" GST_TIME_FORMAT
+ "), cannot handle. Clipping buffer.",
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
+ GST_TIME_ARGS (enc->next_ts));
+
+ diff_bytes =
+ GST_CLOCK_TIME_TO_FRAMES (diff, enc->sample_rate) * enc->n_channels * 2;
+ if (diff_bytes >= GST_BUFFER_SIZE (buf)) {
+ gst_buffer_unref (buf);
+ return GST_FLOW_OK;
+ }
+ buf = gst_buffer_make_metadata_writable (buf);
+ GST_BUFFER_DATA (buf) += diff_bytes;
+ GST_BUFFER_SIZE (buf) -= diff_bytes;
+
+ GST_BUFFER_TIMESTAMP (buf) += diff;
+ if (GST_BUFFER_DURATION_IS_VALID (buf))
+ GST_BUFFER_DURATION (buf) -= diff;
+ }
+
+ if (enc->next_ts != GST_CLOCK_TIME_NONE
+ && GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
+ guint64 max_diff =
+ gst_util_uint64_scale (enc->frame_size, GST_SECOND, enc->sample_rate);
+
+ if (GST_BUFFER_TIMESTAMP (buf) != enc->next_ts &&
+ GST_BUFFER_TIMESTAMP (buf) - enc->next_ts > max_diff) {
+ GST_WARNING_OBJECT (enc,
+ "Discontinuity detected: %" G_GUINT64_FORMAT " > %" G_GUINT64_FORMAT,
+ GST_BUFFER_TIMESTAMP (buf) - enc->next_ts, max_diff);
+
+ gst_opus_enc_encode (enc, TRUE);
+
+ enc->frameno_out = 0;
+ enc->start_ts = GST_BUFFER_TIMESTAMP (buf);
+ }
+ }
+
+ if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)
+ && GST_BUFFER_DURATION_IS_VALID (buf))
+ enc->next_ts = GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf);
+ else
+ enc->next_ts = GST_CLOCK_TIME_NONE;
+
+ /* push buffer to adapter */
+ gst_adapter_push (enc->adapter, buf);
+ buf = NULL;
+
+ ret = gst_opus_enc_encode (enc, FALSE);
+
+done:
+
+ if (buf)
+ gst_buffer_unref (buf);
+
+ return ret;
+
+ /* ERRORS */
+not_setup:
+ {
+ GST_ELEMENT_ERROR (enc, CORE, NEGOTIATION, (NULL),
+ ("encoder not initialized (input is not audio?)"));
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ goto done;
+ }
+
+}
+
+
+static void
+gst_opus_enc_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstOpusEnc *enc;
+
+ enc = GST_OPUS_ENC (object);
+
+ switch (prop_id) {
+ case PROP_AUDIO:
+ g_value_set_boolean (value, enc->audio_or_voip);
+ break;
+ case PROP_BITRATE:
+ g_value_set_int (value, enc->bitrate);
+ break;
+ case PROP_BANDWIDTH:
+ g_value_set_int (value, enc->bandwidth);
+ break;
+ case PROP_FRAME_SIZE:
+ g_value_set_int (value, enc->frame_size);
+ break;
+ case PROP_CBR:
+ g_value_set_boolean (value, enc->cbr);
+ break;
+ case PROP_CONSTRAINED_VBR:
+ g_value_set_boolean (value, enc->constrained_vbr);
+ break;
+ case PROP_COMPLEXITY:
+ g_value_set_int (value, enc->complexity);
+ break;
+ case PROP_INBAND_FEC:
+ g_value_set_boolean (value, enc->inband_fec);
+ break;
+ case PROP_DTX:
+ g_value_set_boolean (value, enc->dtx);
+ break;
+ case PROP_PACKET_LOSS_PERCENT:
+ g_value_set_int (value, enc->packet_loss_percentage);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_opus_enc_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstOpusEnc *enc;
+
+ enc = GST_OPUS_ENC (object);
+
+ switch (prop_id) {
+ case PROP_AUDIO:
+ enc->audio_or_voip = g_value_get_boolean (value);
+ break;
+ case PROP_BITRATE:
+ enc->bitrate = g_value_get_int (value);
+ break;
+ case PROP_BANDWIDTH:
+ enc->bandwidth = g_value_get_int (value);
+ break;
+ case PROP_FRAME_SIZE:
+ enc->frame_size = g_value_get_int (value);
+ break;
+ case PROP_CBR:
+ enc->cbr = g_value_get_boolean (value);
+ break;
+ case PROP_CONSTRAINED_VBR:
+ enc->constrained_vbr = g_value_get_boolean (value);
+ break;
+ case PROP_COMPLEXITY:
+ enc->complexity = g_value_get_int (value);
+ break;
+ case PROP_INBAND_FEC:
+ enc->inband_fec = g_value_get_boolean (value);
+ break;
+ case PROP_DTX:
+ enc->dtx = g_value_get_boolean (value);
+ break;
+ case PROP_PACKET_LOSS_PERCENT:
+ enc->packet_loss_percentage = g_value_get_int (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstStateChangeReturn
+gst_opus_enc_change_state (GstElement * element, GstStateChange transition)
+{
+ GstOpusEnc *enc = GST_OPUS_ENC (element);
+ GstStateChangeReturn res;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ enc->frameno = 0;
+ enc->samples_in = 0;
+ enc->frameno_out = 0;
+ enc->start_ts = GST_CLOCK_TIME_NONE;
+ enc->next_ts = GST_CLOCK_TIME_NONE;
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ /* fall through */
+ default:
+ break;
+ }
+
+ res = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (res == GST_STATE_CHANGE_FAILURE)
+ return res;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ enc->setup = FALSE;
+ enc->header_sent = FALSE;
+ if (enc->state) {
+ opus_encoder_destroy (enc->state);
+ enc->state = NULL;
+ }
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ gst_tag_setter_reset_tags (GST_TAG_SETTER (enc));
+ default:
+ break;
+ }
+
+ return res;
+}
diff --git a/ext/opus/gstopusenc.h b/ext/opus/gstopusenc.h
new file mode 100644
index 000000000..5cb54598a
--- /dev/null
+++ b/ext/opus/gstopusenc.h
@@ -0,0 +1,105 @@
+/* GStreamer Opus Encoder
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2008> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+
+#ifndef __GST_OPUS_ENC_H__
+#define __GST_OPUS_ENC_H__
+
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+
+#include <opus/opus.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_OPUS_ENC \
+ (gst_opus_enc_get_type())
+#define GST_OPUS_ENC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_OPUS_ENC,GstOpusEnc))
+#define GST_OPUS_ENC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_OPUS_ENC,GstOpusEncClass))
+#define GST_IS_OPUS_ENC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_OPUS_ENC))
+#define GST_IS_OPUS_ENC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_OPUS_ENC))
+
+#define MAX_FRAME_SIZE 2000*2
+#define MAX_FRAME_BYTES 2000
+
+typedef struct _GstOpusEnc GstOpusEnc;
+typedef struct _GstOpusEncClass GstOpusEncClass;
+
+struct _GstOpusEnc {
+ GstElement element;
+
+ /* pads */
+ GstPad *sinkpad;
+ GstPad *srcpad;
+
+ //OpusHeader header;
+ //OpusMode *mode;
+ OpusEncoder *state;
+ GstAdapter *adapter;
+
+ /* properties */
+ gboolean audio_or_voip;
+ gint bitrate;
+ gint bandwidth;
+ gint frame_size;
+ gboolean cbr;
+ gboolean constrained_vbr;
+ gint complexity;
+ gboolean inband_fec;
+ gboolean dtx;
+ gint packet_loss_percentage;
+
+ int frame_samples;
+
+ gint n_channels;
+ gint sample_rate;
+
+ gboolean setup;
+ gboolean header_sent;
+ gboolean eos;
+
+ guint64 samples_in;
+ guint64 bytes_out;
+
+ guint64 frameno;
+ guint64 frameno_out;
+
+ GstClockTime start_ts;
+ GstClockTime next_ts;
+ guint64 granulepos_offset;
+};
+
+struct _GstOpusEncClass {
+ GstElementClass parent_class;
+
+ /* signals */
+ void (*frame_encoded) (GstElement *element);
+};
+
+GType gst_opus_enc_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_OPUS_ENC_H__ */
diff --git a/ext/resindvd/gstpesfilter.c b/ext/resindvd/gstpesfilter.c
index f0a8cb549..f2ccc7849 100644
--- a/ext/resindvd/gstpesfilter.c
+++ b/ext/resindvd/gstpesfilter.c
@@ -101,7 +101,6 @@ gst_pes_filter_parse (GstPESFilter * filter)
GstFlowReturn ret;
guint32 start_code;
- gboolean STD_buffer_bound_scale;
guint16 STD_buffer_size_bound;
const guint8 *data;
gint avail, datalen;
@@ -213,7 +212,7 @@ gst_pes_filter_parse (GstPESFilter * filter)
if (datalen < 3)
goto need_more_data;
- STD_buffer_bound_scale = *data & 0x20;
+ /* STD_buffer_bound_scale = *data & 0x20; */
STD_buffer_size_bound = ((guint16) (*data++ & 0x1F)) << 8;
STD_buffer_size_bound |= *data++;
diff --git a/ext/resindvd/rsnstreamselector.c b/ext/resindvd/rsnstreamselector.c
index 3bb607157..eaae4f61c 100644
--- a/ext/resindvd/rsnstreamselector.c
+++ b/ext/resindvd/rsnstreamselector.c
@@ -424,7 +424,6 @@ ignore:
}
static void rsn_stream_selector_dispose (GObject * object);
-static void rsn_stream_selector_finalize (GObject * object);
static void rsn_stream_selector_init (RsnStreamSelector * sel);
static void rsn_stream_selector_base_init (RsnStreamSelectorClass * klass);
@@ -497,7 +496,6 @@ rsn_stream_selector_class_init (RsnStreamSelectorClass * klass)
parent_class = g_type_class_peek_parent (klass);
gobject_class->dispose = rsn_stream_selector_dispose;
- gobject_class->finalize = rsn_stream_selector_finalize;
gobject_class->set_property =
GST_DEBUG_FUNCPTR (rsn_stream_selector_set_property);
@@ -546,16 +544,6 @@ rsn_stream_selector_dispose (GObject * object)
}
static void
-rsn_stream_selector_finalize (GObject * object)
-{
- RsnStreamSelector *sel;
-
- sel = RSN_STREAM_SELECTOR (object);
-
- G_OBJECT_CLASS (parent_class)->finalize (object);
-}
-
-static void
rsn_stream_selector_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
@@ -653,11 +641,8 @@ rsn_stream_selector_getcaps (GstPad * pad)
static gboolean
rsn_stream_selector_is_active_sinkpad (RsnStreamSelector * sel, GstPad * pad)
{
- RsnSelectorPad *selpad;
gboolean res;
- selpad = GST_SELECTOR_PAD_CAST (pad);
-
GST_OBJECT_LOCK (sel);
res = (pad == sel->active_sinkpad);
GST_OBJECT_UNLOCK (sel);
diff --git a/ext/rtmp/Makefile.am b/ext/rtmp/Makefile.am
index e97c7a758..bd2398cab 100644
--- a/ext/rtmp/Makefile.am
+++ b/ext/rtmp/Makefile.am
@@ -1,8 +1,8 @@
plugin_LTLIBRARIES = libgstrtmp.la
-libgstrtmp_la_SOURCES = gstrtmpsrc.c
+libgstrtmp_la_SOURCES = gstrtmpsrc.c gstrtmpsink.c gstrtmp.c
-noinst_HEADERS = gstrtmpsrc.h
+noinst_HEADERS = gstrtmpsrc.h gstrtmpsink.h
libgstrtmp_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS) $(RTMP_CFLAGS)
libgstrtmp_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) $(GST_LIBS) $(RTMP_LIBS)
libgstrtmp_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
diff --git a/ext/rtmp/gstrtmp.c b/ext/rtmp/gstrtmp.c
new file mode 100644
index 000000000..7acbea4a9
--- /dev/null
+++ b/ext/rtmp/gstrtmp.c
@@ -0,0 +1,54 @@
+/* GStreamer
+ * Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2000 Wim Taymans <wtay@chello.be>
+ * 2002 Kristian Rietveld <kris@gtk.org>
+ * 2002,2003 Colin Walters <walters@gnu.org>
+ * 2001,2010 Bastien Nocera <hadess@hadess.net>
+ * 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * 2010 Jan Schmidt <thaytan@noraisin.net>
+ *
+ * rtmpsrc.c:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+
+#include "gstrtmpsrc.h"
+#include "gstrtmpsink.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret;
+
+ ret = gst_element_register (plugin, "rtmpsrc", GST_RANK_PRIMARY,
+ GST_TYPE_RTMP_SRC);
+ ret &= gst_element_register (plugin, "rtmpsink", GST_RANK_PRIMARY,
+ GST_TYPE_RTMP_SINK);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ "rtmp",
+ "RTMP source and sink",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
diff --git a/ext/rtmp/gstrtmpsink.c b/ext/rtmp/gstrtmpsink.c
new file mode 100644
index 000000000..e3933b150
--- /dev/null
+++ b/ext/rtmp/gstrtmpsink.c
@@ -0,0 +1,347 @@
+/*
+ * GStreamer
+ * Copyright (C) 2010 Jan Schmidt <thaytan@noraisin.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/**
+ * SECTION:element-rtmpsink
+ *
+ * This element delivers data to a streaming server via RTMP. It uses
+ * librtmp, and supports any protocols/urls that librtmp supports.
+ * The URL/location can contain extra connection or session parameters
+ * for librtmp, such as 'flashver=version'. See the librtmp documentation
+ * for more detail
+ *
+ * <refsect2>
+ * <title>Example launch line</title>
+ * |[
+ * gst-launch -v videotestsrc ! ffenc_flv ! flvmux ! rtmpsink location='rtmp://localhost/path/to/stream live=1'
+ * ]| Encode a test video stream to FLV video format and stream it via RTMP.
+ * </refsect2>
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+
+#include "gstrtmpsink.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtmp_sink_debug);
+#define GST_CAT_DEFAULT gst_rtmp_sink_debug
+
+/* Filter signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_LOCATION
+};
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-flv")
+ );
+
+static void gst_rtmp_sink_uri_handler_init (gpointer g_iface,
+ gpointer iface_data);
+static void gst_rtmp_sink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtmp_sink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static gboolean gst_rtmp_sink_stop (GstBaseSink * sink);
+static gboolean gst_rtmp_sink_start (GstBaseSink * sink);
+static GstFlowReturn gst_rtmp_sink_render (GstBaseSink * sink, GstBuffer * buf);
+
+static void
+_do_init (GType gtype)
+{
+ static const GInterfaceInfo urihandler_info = {
+ gst_rtmp_sink_uri_handler_init,
+ NULL,
+ NULL
+ };
+
+ g_type_add_interface_static (gtype, GST_TYPE_URI_HANDLER, &urihandler_info);
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtmp_sink_debug, "rtmpsink", 0,
+ "RTMP server element");
+}
+
+GST_BOILERPLATE_FULL (GstRTMPSink, gst_rtmp_sink, GstBaseSink,
+ GST_TYPE_BASE_SINK, _do_init);
+
+
+static void
+gst_rtmp_sink_base_init (gpointer klass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ gst_element_class_set_details_simple (element_class,
+ "RTMP output sink",
+ "Sink/Network", "Sends FLV content to a server via RTMP",
+ "Jan Schmidt <thaytan@noraisin.net>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+}
+
+/* initialize the plugin's class */
+static void
+gst_rtmp_sink_class_init (GstRTMPSinkClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstBaseSinkClass *gstbasesink_class = (GstBaseSinkClass *) klass;
+
+ gobject_class = (GObjectClass *) klass;
+ gobject_class->set_property = gst_rtmp_sink_set_property;
+ gobject_class->get_property = gst_rtmp_sink_get_property;
+
+ gstbasesink_class->start = GST_DEBUG_FUNCPTR (gst_rtmp_sink_start);
+ gstbasesink_class->stop = GST_DEBUG_FUNCPTR (gst_rtmp_sink_stop);
+ gstbasesink_class->render = GST_DEBUG_FUNCPTR (gst_rtmp_sink_render);
+
+ gst_element_class_install_std_props (GST_ELEMENT_CLASS (klass),
+ "location", PROP_LOCATION, G_PARAM_READWRITE, NULL);
+}
+
+/* initialize the new element
+ * initialize instance structure
+ */
+static void
+gst_rtmp_sink_init (GstRTMPSink * sink, GstRTMPSinkClass * klass)
+{
+}
+
+static gboolean
+gst_rtmp_sink_start (GstBaseSink * basesink)
+{
+ GstRTMPSink *sink = GST_RTMP_SINK (basesink);
+
+ if (!sink->uri) {
+ GST_ELEMENT_ERROR (sink, RESOURCE, OPEN_WRITE,
+ ("Please set URI for RTMP output"), ("No URI set before starting"));
+ return FALSE;
+ }
+
+ sink->rtmp_uri = g_strdup (sink->uri);
+ sink->rtmp = RTMP_Alloc ();
+ RTMP_Init (sink->rtmp);
+ if (!RTMP_SetupURL (sink->rtmp, sink->rtmp_uri)) {
+ GST_ELEMENT_ERROR (sink, RESOURCE, OPEN_WRITE, (NULL),
+ ("Failed to setup URL '%s'", sink->uri));
+ RTMP_Free (sink->rtmp);
+ sink->rtmp = NULL;
+ g_free (sink->rtmp_uri);
+ sink->rtmp_uri = NULL;
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (sink, "Created RTMP object");
+
+ /* Mark this as an output connection */
+ RTMP_EnableWrite (sink->rtmp);
+
+ /* open the connection */
+ if (!RTMP_IsConnected (sink->rtmp)) {
+ if (!RTMP_Connect (sink->rtmp, NULL) || !RTMP_ConnectStream (sink->rtmp, 0)) {
+ GST_ELEMENT_ERROR (sink, RESOURCE, OPEN_WRITE, (NULL),
+ ("Could not connect to RTMP stream \"%s\" for writing", sink->uri));
+ RTMP_Free (sink->rtmp);
+ sink->rtmp = NULL;
+ g_free (sink->rtmp_uri);
+ sink->rtmp_uri = NULL;
+ return FALSE;
+ }
+ GST_DEBUG_OBJECT (sink, "Opened connection to %s", sink->rtmp_uri);
+ }
+
+ sink->first = TRUE;
+
+ return TRUE;
+}
+
+static gboolean
+gst_rtmp_sink_stop (GstBaseSink * basesink)
+{
+ GstRTMPSink *sink = GST_RTMP_SINK (basesink);
+
+ gst_buffer_replace (&sink->cache, NULL);
+
+ if (sink->rtmp) {
+ RTMP_Close (sink->rtmp);
+ RTMP_Free (sink->rtmp);
+ sink->rtmp = NULL;
+ }
+ if (sink->rtmp_uri) {
+ g_free (sink->rtmp_uri);
+ sink->rtmp_uri = NULL;
+ }
+
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_rtmp_sink_render (GstBaseSink * bsink, GstBuffer * buf)
+{
+ GstRTMPSink *sink = GST_RTMP_SINK (bsink);
+ GstBuffer *reffed_buf = NULL;
+
+ if (sink->first) {
+ /* FIXME: Parse the first buffer and see if it contains a header plus a packet instead
+ * of just assuming it's only the header */
+ GST_LOG_OBJECT (sink, "Caching first buffer of size %d for concatenation",
+ GST_BUFFER_SIZE (buf));
+ gst_buffer_replace (&sink->cache, buf);
+ sink->first = FALSE;
+ return GST_FLOW_OK;
+ }
+
+ if (sink->cache) {
+ GST_LOG_OBJECT (sink, "Joining 2nd buffer of size %d to cached buf",
+ GST_BUFFER_SIZE (buf));
+ gst_buffer_ref (buf);
+ reffed_buf = buf = gst_buffer_join (sink->cache, buf);
+ sink->cache = NULL;
+ }
+
+ GST_LOG_OBJECT (sink, "Sending %d bytes to RTMP server",
+ GST_BUFFER_SIZE (buf));
+
+ if (!RTMP_Write (sink->rtmp,
+ (char *) GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf))) {
+ GST_ELEMENT_ERROR (sink, RESOURCE, WRITE, (NULL), ("Failed to write data"));
+ if (reffed_buf)
+ gst_buffer_unref (reffed_buf);
+ return GST_FLOW_ERROR;
+ }
+
+ if (reffed_buf)
+ gst_buffer_unref (reffed_buf);
+
+ return GST_FLOW_OK;
+}
+
+/*
+ * URI interface support.
+ */
+static GstURIType
+gst_rtmp_sink_uri_get_type (void)
+{
+ return GST_URI_SINK;
+}
+
+static gchar **
+gst_rtmp_sink_uri_get_protocols (void)
+{
+ static gchar *protocols[] =
+ { (char *) "rtmp", (char *) "rtmpt", (char *) "rtmps", (char *) "rtmpe",
+ (char *) "rtmfp", (char *) "rtmpte", (char *) "rtmpts", NULL
+ };
+ return protocols;
+}
+
+static const gchar *
+gst_rtmp_sink_uri_get_uri (GstURIHandler * handler)
+{
+ GstRTMPSink *sink = GST_RTMP_SINK (handler);
+
+ return sink->uri;
+}
+
+static gboolean
+gst_rtmp_sink_uri_set_uri (GstURIHandler * handler, const gchar * uri)
+{
+ GstRTMPSink *sink = GST_RTMP_SINK (handler);
+
+ if (GST_STATE (sink) >= GST_STATE_PAUSED)
+ return FALSE;
+
+ g_free (sink->uri);
+ sink->uri = NULL;
+
+ if (uri != NULL) {
+ int protocol;
+ AVal host;
+ unsigned int port;
+ AVal playpath, app;
+
+ if (!RTMP_ParseURL (uri, &protocol, &host, &port, &playpath, &app) ||
+ !host.av_len || !playpath.av_len) {
+ GST_ELEMENT_ERROR (sink, RESOURCE, OPEN_WRITE,
+ ("Failed to parse URI %s", uri), (NULL));
+ return FALSE;
+ }
+ sink->uri = g_strdup (uri);
+ }
+
+ GST_DEBUG_OBJECT (sink, "Changed URI to %s", GST_STR_NULL (uri));
+
+ return TRUE;
+}
+
+static void
+gst_rtmp_sink_uri_handler_init (gpointer g_iface, gpointer iface_data)
+{
+ GstURIHandlerInterface *iface = (GstURIHandlerInterface *) g_iface;
+
+ iface->get_type = gst_rtmp_sink_uri_get_type;
+ iface->get_protocols = gst_rtmp_sink_uri_get_protocols;
+ iface->get_uri = gst_rtmp_sink_uri_get_uri;
+ iface->set_uri = gst_rtmp_sink_uri_set_uri;
+}
+
+static void
+gst_rtmp_sink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRTMPSink *sink = GST_RTMP_SINK (object);
+
+ switch (prop_id) {
+ case PROP_LOCATION:
+ gst_rtmp_sink_uri_set_uri (GST_URI_HANDLER (sink),
+ g_value_get_string (value));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtmp_sink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRTMPSink *sink = GST_RTMP_SINK (object);
+
+ switch (prop_id) {
+ case PROP_LOCATION:
+ g_value_set_string (value, sink->uri);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/ext/rtmp/gstrtmpsink.h b/ext/rtmp/gstrtmpsink.h
new file mode 100644
index 000000000..cb9315ebb
--- /dev/null
+++ b/ext/rtmp/gstrtmpsink.h
@@ -0,0 +1,68 @@
+/*
+ * GStreamer
+ * Copyright (C) 2010 Jan Schmidt <thaytan@noraisin.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_RTMP_SINK_H__
+#define __GST_RTMP_SINK_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbasesink.h>
+
+#include <librtmp/rtmp.h>
+#include <librtmp/log.h>
+#include <librtmp/amf.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTMP_SINK \
+ (gst_rtmp_sink_get_type())
+#define GST_RTMP_SINK(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTMP_SINK,GstRTMPSink))
+#define GST_RTMP_SINK_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTMP_SINK,GstRTMPSinkClass))
+#define GST_IS_RTMP_SINK(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTMP_SINK))
+#define GST_IS_RTMP_SINK_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTMP_SINK))
+
+typedef struct _GstRTMPSink GstRTMPSink;
+typedef struct _GstRTMPSinkClass GstRTMPSinkClass;
+
+struct _GstRTMPSink {
+ GstBaseSink parent;
+
+ /* < private > */
+ gchar *uri;
+
+ RTMP *rtmp;
+ gchar *rtmp_uri; /* copy of url for librtmp */
+
+ GstBuffer *cache; /* Cached buffer */
+ gboolean first;
+};
+
+struct _GstRTMPSinkClass {
+ GstBaseSinkClass parent_class;
+};
+
+GType gst_rtmp_sink_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTMP_SINK_H__ */
diff --git a/ext/rtmp/gstrtmpsrc.c b/ext/rtmp/gstrtmpsrc.c
index 2376ccef1..e37ac06b7 100644
--- a/ext/rtmp/gstrtmpsrc.c
+++ b/ext/rtmp/gstrtmpsrc.c
@@ -98,6 +98,8 @@ _do_init (GType gtype)
};
g_type_add_interface_static (gtype, GST_TYPE_URI_HANDLER, &urihandler_info);
+
+ GST_DEBUG_CATEGORY_INIT (rtmpsrc_debug, "rtmpsrc", 0, "RTMP Source");
}
GST_BOILERPLATE_FULL (GstRTMPSrc, gst_rtmp_src, GstPushSrc, GST_TYPE_PUSH_SRC,
@@ -581,18 +583,3 @@ gst_rtmp_src_stop (GstBaseSrc * basesrc)
return TRUE;
}
-
-static gboolean
-plugin_init (GstPlugin * plugin)
-{
- GST_DEBUG_CATEGORY_INIT (rtmpsrc_debug, "rtmpsrc", 0, "RTMP Source");
-
- return gst_element_register (plugin, "rtmpsrc", GST_RANK_PRIMARY,
- GST_TYPE_RTMP_SRC);
-}
-
-GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
- GST_VERSION_MINOR,
- "rtmpsrc",
- "RTMP source",
- plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
diff --git a/ext/schroedinger/gstschrodec.c b/ext/schroedinger/gstschrodec.c
index ab2c45a2c..126ef1fed 100644
--- a/ext/schroedinger/gstschrodec.c
+++ b/ext/schroedinger/gstschrodec.c
@@ -78,10 +78,6 @@ enum
};
static void gst_schro_dec_finalize (GObject * object);
-static void gst_schro_dec_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_schro_dec_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
static gboolean gst_schro_dec_sink_query (GstPad * pad, GstQuery * query);
@@ -137,8 +133,6 @@ gst_schro_dec_class_init (GstSchroDecClass * klass)
gobject_class = G_OBJECT_CLASS (klass);
base_video_decoder_class = GST_BASE_VIDEO_DECODER_CLASS (klass);
- gobject_class->set_property = gst_schro_dec_set_property;
- gobject_class->get_property = gst_schro_dec_get_property;
gobject_class->finalize = gst_schro_dec_finalize;
base_video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_schro_dec_start);
@@ -172,21 +166,16 @@ static gint64
granulepos_to_frame (gint64 granulepos)
{
guint64 pt;
- int dist_h;
- int dist_l;
- int dist;
- int delay;
- guint64 dt;
if (granulepos == -1)
return -1;
pt = ((granulepos >> 22) + (granulepos & OGG_DIRAC_GRANULE_LOW_MASK)) >> 9;
- dist_h = (granulepos >> 22) & 0xff;
- dist_l = granulepos & 0xff;
- dist = (dist_h << 8) | dist_l;
- delay = (granulepos >> 9) & 0x1fff;
- dt = pt - delay;
+ /* dist_h = (granulepos >> 22) & 0xff;
+ * dist_l = granulepos & 0xff;
+ * dist = (dist_h << 8) | dist_l;
+ * delay = (granulepos >> 9) & 0x1fff;
+ * dt = pt - delay; */
return pt >> 1;
}
@@ -309,38 +298,6 @@ gst_schro_dec_finalize (GObject * object)
}
static void
-gst_schro_dec_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec)
-{
- GstSchroDec *src;
-
- g_return_if_fail (GST_IS_SCHRO_DEC (object));
- src = GST_SCHRO_DEC (object);
-
- GST_DEBUG ("gst_schro_dec_set_property");
- switch (prop_id) {
- default:
- break;
- }
-}
-
-static void
-gst_schro_dec_get_property (GObject * object, guint prop_id, GValue * value,
- GParamSpec * pspec)
-{
- GstSchroDec *src;
-
- g_return_if_fail (GST_IS_SCHRO_DEC (object));
- src = GST_SCHRO_DEC (object);
-
- switch (prop_id) {
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-}
-
-static void
parse_sequence_header (GstSchroDec * schro_dec, guint8 * data, int size)
{
SchroVideoFormat video_format;
@@ -642,7 +599,6 @@ gst_schro_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
GstVideoFrame * frame)
{
GstSchroDec *schro_dec;
- int schro_ret;
SchroBuffer *input_buffer;
schro_dec = GST_SCHRO_DEC (base_video_decoder);
@@ -654,7 +610,7 @@ gst_schro_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
input_buffer->tag = schro_tag_new (frame, NULL);
- schro_ret = schro_decoder_autoparse_push (schro_dec->decoder, input_buffer);
+ schro_decoder_autoparse_push (schro_dec->decoder, input_buffer);
return gst_schro_dec_process (schro_dec, FALSE);
}
diff --git a/ext/schroedinger/gstschroenc.c b/ext/schroedinger/gstschroenc.c
index 16a3af95a..669a61264 100644
--- a/ext/schroedinger/gstschroenc.c
+++ b/ext/schroedinger/gstschroenc.c
@@ -24,6 +24,7 @@
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstbasevideoencoder.h>
+#include <gst/video/gstbasevideoutils.h>
#include <string.h>
#include <schroedinger/schro.h>
@@ -107,7 +108,8 @@ static gboolean gst_schro_enc_set_format (GstBaseVideoEncoder *
base_video_encoder, GstVideoState * state);
static gboolean gst_schro_enc_start (GstBaseVideoEncoder * base_video_encoder);
static gboolean gst_schro_enc_stop (GstBaseVideoEncoder * base_video_encoder);
-static gboolean gst_schro_enc_finish (GstBaseVideoEncoder * base_video_encoder);
+static GstFlowReturn gst_schro_enc_finish (GstBaseVideoEncoder *
+ base_video_encoder);
static GstFlowReturn gst_schro_enc_handle_frame (GstBaseVideoEncoder *
base_video_encoder, GstVideoFrame * frame);
static GstFlowReturn gst_schro_enc_shape_output (GstBaseVideoEncoder *
@@ -439,7 +441,7 @@ gst_schro_enc_stop (GstBaseVideoEncoder * base_video_encoder)
return TRUE;
}
-static gboolean
+static GstFlowReturn
gst_schro_enc_finish (GstBaseVideoEncoder * base_video_encoder)
{
GstSchroEnc *schro_enc = GST_SCHRO_ENC (base_video_encoder);
@@ -449,7 +451,7 @@ gst_schro_enc_finish (GstBaseVideoEncoder * base_video_encoder)
schro_encoder_end_of_stream (schro_enc->encoder);
gst_schro_enc_process (schro_enc);
- return TRUE;
+ return GST_FLOW_OK;
}
static GstFlowReturn
@@ -612,7 +614,6 @@ gst_schro_enc_shape_output_ogg (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame)
{
GstSchroEnc *schro_enc;
- int dpn;
int delay;
int dist;
int pt;
@@ -623,8 +624,6 @@ gst_schro_enc_shape_output_ogg (GstBaseVideoEncoder * base_video_encoder,
schro_enc = GST_SCHRO_ENC (base_video_encoder);
- dpn = frame->decode_frame_number;
-
pt = frame->presentation_frame_number * 2 + schro_enc->granule_offset;
dt = frame->decode_frame_number * 2 + schro_enc->granule_offset;
delay = pt - dt;
diff --git a/ext/sndfile/gstsfsrc.c b/ext/sndfile/gstsfsrc.c
index f725d3f5b..226f54085 100644
--- a/ext/sndfile/gstsfsrc.c
+++ b/ext/sndfile/gstsfsrc.c
@@ -200,7 +200,10 @@ gst_sf_src_create (GstBaseSrc * bsrc, guint64 offset, guint length,
{
GstSFSrc *this;
GstBuffer *buf;
+/* FIXME discont is set but not used */
+#if 0
gboolean discont = FALSE;
+#endif
sf_count_t bytes_read;
this = GST_SF_SRC (bsrc);
@@ -221,7 +224,9 @@ gst_sf_src_create (GstBaseSrc * bsrc, guint64 offset, guint length,
goto seek_failed;
this->offset = offset;
+#if 0
discont = TRUE;
+#endif
}
buf = gst_buffer_new_and_alloc (length);
diff --git a/ext/timidity/gsttimidity.c b/ext/timidity/gsttimidity.c
index 997b0b20b..cbbcc0da1 100644
--- a/ext/timidity/gsttimidity.c
+++ b/ext/timidity/gsttimidity.c
@@ -108,12 +108,9 @@ gst_timidity_base_init (gpointer gclass)
static void
gst_timidity_class_init (GstTimidityClass * klass)
{
- GObjectClass *gobject_class;
GstElementClass *gstelement_class;
- gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
-
gstelement_class->change_state = gst_timidity_change_state;
}
diff --git a/ext/timidity/gstwildmidi.c b/ext/timidity/gstwildmidi.c
index 4d5c0e36d..6def9f946 100644
--- a/ext/timidity/gstwildmidi.c
+++ b/ext/timidity/gstwildmidi.c
@@ -443,7 +443,10 @@ gst_wildmidi_do_seek (GstWildmidi * wildmidi, GstEvent * event)
GstSeekFlags flags;
GstSeekType start_type, stop_type;
gint64 start, stop;
- gboolean flush, update, accurate;
+ gboolean flush, update;
+#ifdef HAVE_WILDMIDI_0_2_2
+ gboolean accurate;
+#endif
gboolean res;
unsigned long int sample;
GstSegment *segment;
@@ -472,7 +475,9 @@ gst_wildmidi_do_seek (GstWildmidi * wildmidi, GstEvent * event)
return res;
flush = ((flags & GST_SEEK_FLAG_FLUSH) == GST_SEEK_FLAG_FLUSH);
+#ifdef HAVE_WILDMIDI_0_2_2
accurate = ((flags & GST_SEEK_FLAG_ACCURATE) == GST_SEEK_FLAG_ACCURATE);
+#endif
if (flush) {
GST_DEBUG ("performing flush");
diff --git a/ext/vp8/gstvp8dec.c b/ext/vp8/gstvp8dec.c
index a945717be..4376f4be7 100644
--- a/ext/vp8/gstvp8dec.c
+++ b/ext/vp8/gstvp8dec.c
@@ -405,8 +405,10 @@ gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, GstVideoFrame * frame)
state->width = stream_info.w;
state->height = stream_info.h;
state->format = GST_VIDEO_FORMAT_I420;
- state->par_n = 1;
- state->par_d = 1;
+ if (state->par_n == 0 || state->par_d == 0) {
+ state->par_n = 1;
+ state->par_d = 1;
+ }
gst_vp8_dec_send_tags (dec);
gst_base_video_decoder_set_src_caps (decoder);
diff --git a/ext/vp8/gstvp8enc.c b/ext/vp8/gstvp8enc.c
index ad6a282be..e3903d97f 100644
--- a/ext/vp8/gstvp8enc.c
+++ b/ext/vp8/gstvp8enc.c
@@ -65,6 +65,24 @@ typedef struct
GList *invisible;
} GstVP8EncCoderHook;
+static void
+_gst_mini_object_unref0 (GstMiniObject * obj)
+{
+ if (obj)
+ gst_mini_object_unref (obj);
+}
+
+static void
+gst_vp8_enc_coder_hook_free (GstVP8EncCoderHook * hook)
+{
+ if (hook->image)
+ g_slice_free (vpx_image_t, hook->image);
+
+ g_list_foreach (hook->invisible, (GFunc) _gst_mini_object_unref0, NULL);
+ g_list_free (hook->invisible);
+ g_slice_free (GstVP8EncCoderHook, hook);
+}
+
#define DEFAULT_BITRATE 0
#define DEFAULT_MODE VPX_VBR
#define DEFAULT_MIN_QUANTIZER 0
@@ -283,7 +301,7 @@ gst_vp8_enc_class_init (GstVP8EncClass * klass)
g_object_class_install_property (gobject_class, PROP_SPEED,
g_param_spec_int ("speed", "Speed",
"Speed",
- 0, 2, DEFAULT_SPEED,
+ 0, 7, DEFAULT_SPEED,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_THREADS,
@@ -586,7 +604,9 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
return FALSE;
}
- status = vpx_codec_control (&encoder->encoder, VP8E_SET_CPUUSED, 0);
+ /* FIXME move this to a set_speed() function */
+ status = vpx_codec_control (&encoder->encoder, VP8E_SET_CPUUSED,
+ (encoder->speed == 0) ? 0 : (encoder->speed - 1));
if (status != VPX_CODEC_OK) {
GST_WARNING_OBJECT (encoder, "Failed to set VP8E_SET_CPUUSED to 0: %s",
gst_vpx_error_name (status));
@@ -779,7 +799,7 @@ gst_vp8_enc_process (GstVP8Enc * encoder)
return ret;
}
-static gboolean
+static GstFlowReturn
gst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder)
{
GstVP8Enc *encoder;
@@ -796,7 +816,7 @@ gst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder)
if (status != 0) {
GST_ERROR_OBJECT (encoder, "encode returned %d %s", status,
gst_vpx_error_name (status));
- return FALSE;
+ return GST_FLOW_ERROR;
}
/* dispatch remaining frames */
@@ -815,7 +835,7 @@ gst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder)
}
}
- return TRUE;
+ return GST_FLOW_OK;
}
static vpx_image_t *
@@ -823,9 +843,6 @@ gst_vp8_enc_buffer_to_image (GstVP8Enc * enc, GstBuffer * buffer)
{
vpx_image_t *image = g_slice_new (vpx_image_t);
guint8 *data = GST_BUFFER_DATA (buffer);
- const GstVideoState *state;
-
- state = gst_base_video_encoder_get_state (GST_BASE_VIDEO_ENCODER (enc));
memcpy (image, &enc->image, sizeof (*image));
@@ -837,12 +854,6 @@ gst_vp8_enc_buffer_to_image (GstVP8Enc * enc, GstBuffer * buffer)
return image;
}
-static const int speed_table[] = {
- VPX_DL_BEST_QUALITY,
- VPX_DL_GOOD_QUALITY,
- VPX_DL_REALTIME,
-};
-
static GstFlowReturn
gst_vp8_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame)
@@ -853,6 +864,7 @@ gst_vp8_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder,
int flags = 0;
vpx_image_t *image;
GstVP8EncCoderHook *hook;
+ int quality;
GST_DEBUG_OBJECT (base_video_encoder, "handle_frame");
@@ -869,13 +881,17 @@ gst_vp8_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder,
hook = g_slice_new0 (GstVP8EncCoderHook);
hook->image = image;
frame->coder_hook = hook;
+ frame->coder_hook_destroy_notify =
+ (GDestroyNotify) gst_vp8_enc_coder_hook_free;
if (frame->force_keyframe) {
flags |= VPX_EFLAG_FORCE_KF;
}
+ quality = (encoder->speed == 0) ? VPX_DL_BEST_QUALITY : VPX_DL_GOOD_QUALITY;
+
status = vpx_codec_encode (&encoder->encoder, image,
- encoder->n_frames, 1, flags, speed_table[encoder->speed]);
+ encoder->n_frames, 1, flags, quality);
if (status != 0) {
GST_ELEMENT_ERROR (encoder, LIBRARY, ENCODE,
("Failed to encode frame"), ("%s", gst_vpx_error_name (status)));
@@ -900,13 +916,6 @@ _to_granulepos (guint64 frame_end_number, guint inv_count, guint keyframe_dist)
return granulepos;
}
-static void
-_gst_mini_object_unref0 (GstMiniObject * obj)
-{
- if (obj)
- gst_mini_object_unref (obj);
-}
-
static GstFlowReturn
gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame)
@@ -939,6 +948,8 @@ gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
encoder->keyframe_distance++;
}
+ GST_BUFFER_TIMESTAMP (buf) = GST_BUFFER_TIMESTAMP (frame->src_buffer);
+ GST_BUFFER_DURATION (buf) = 0;
GST_BUFFER_OFFSET_END (buf) =
_to_granulepos (frame->presentation_frame_number + 1,
inv_count, encoder->keyframe_distance);
@@ -980,13 +991,6 @@ gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
}
done:
- if (hook) {
- g_list_foreach (hook->invisible, (GFunc) _gst_mini_object_unref0, NULL);
- g_list_free (hook->invisible);
- g_slice_free (GstVP8EncCoderHook, hook);
- frame->coder_hook = NULL;
- }
-
return ret;
}
diff --git a/ext/zbar/gstzbar.c b/ext/zbar/gstzbar.c
index 9ffb8a052..cdeb89889 100644
--- a/ext/zbar/gstzbar.c
+++ b/ext/zbar/gstzbar.c
@@ -277,7 +277,7 @@ gst_zbar_transform_ip (GstBaseTransform * base, GstBuffer * outbuf)
{
GstZBar *zbar = GST_ZBAR (base);
guint8 *data;
- guint size, rowstride;
+ guint rowstride;
zbar_image_t *image;
const zbar_symbol_t *symbol;
int n;
@@ -286,7 +286,6 @@ gst_zbar_transform_ip (GstBaseTransform * base, GstBuffer * outbuf)
goto done;
data = GST_BUFFER_DATA (outbuf);
- size = GST_BUFFER_SIZE (outbuf);
image = zbar_image_create ();