summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRonald S. Bultje <rbultje@ronald.bitfreak.net>2004-03-01 04:59:17 +0000
committerRonald S. Bultje <rbultje@ronald.bitfreak.net>2004-03-01 04:59:17 +0000
commit7b63c14e2bb2908baae0b06724d1a7623f642545 (patch)
treebf44cbc7b87eebeefbd3d298e70ec3b668e1120b
parentdb029240a889988e508b860d86192e6a3c9893fe (diff)
HACKING: Add some basic documentation on how our wrapping works.
Original commit message from CVS: * HACKING: Add some basic documentation on how our wrapping works. * TODO: Add a list of things that could be worked on or that need doing. * configure.ac: Update snapshot. * ext/ffmpeg/Makefile.am: Changne .la links. See below (autotools patch). * ext/ffmpeg/gstffmpeg.c: (plugin_init): Enable demuxers. See below (gstffmpegdemux.c). * ext/ffmpeg/gstffmpegcodecmap.c: (gst_ffmpeg_formatid_to_caps): Realmedia caused a crash - fix that. * ext/ffmpeg/gstffmpegdemux.c: (gst_ffmpegdemux_averror), (gst_ffmpegdemux_base_init), (gst_ffmpegdemux_init), (gst_ffmpegdemux_close), (gst_ffmpegdemux_dispose), (gst_ffmpegdemux_stream_from_pad), (gst_ffmpegdemux_src_event_mask), (gst_ffmpegdemux_src_event), (gst_ffmpegdemux_src_format_list), (gst_ffmpegdemux_src_query_list), (gst_ffmpegdemux_src_query), (gst_ffmpegdemux_src_convert), (gst_ffmpegdemux_add), (gst_ffmpegdemux_open), (gst_ffmpegdemux_loop), (gst_ffmpegdemux_change_state), (gst_ffmpegdemux_register): Right. OK, so I fixed up the demuxing and have it basically-working, and the best way to get some more people to test it is to actually enable it. I'm not sure if we want this for 0.8.0, but we can at least give it a try. I've tested avi, matroska and mpeg, all appear to work. The cool thing is that this gives us instant support for several exotic formats that we'd never care about ourselves. Again, this needs more testing for it to still be enabled in 0.8.0, but I want to give it a try... * ext/ffmpeg/gstffmpegmux.c: (gst_ffmpegmux_base_init), (gst_ffmpegmux_init), (gst_ffmpegmux_request_new_pad), (gst_ffmpegmux_connect), (gst_ffmpegmux_loop), (gst_ffmpegmux_register): Add some fixups that I use locally. Make it work in the case of MPEG encoding, but the muxer is still not in shape to be enabled. * ext/ffmpeg/gstffmpegprotocol.c: (gst_ffmpegdata_open), (gst_ffmpegdata_read), (gst_ffmpegdata_write), (gst_ffmpegdata_seek), (gst_ffmpegdata_close): Some small fixups that crept into it while it was disabled for the last few years. Basically works. * gst-libs/ext/ffmpeg/Makefile.am: Instead of having our local-autotoolized version, I patch the ffmpeg source to be fully autotoolized. That means a simple SUBDIRS here is now enough. * gst-libs/ext/ffmpeg/Tag: Version update. * gst-libs/ext/ffmpeg/patch/autotools.diff: Autotoolize ffmpeg. Needs to be sent to ffmpeg-devel@... * gst-libs/ext/ffmpeg/patch/disableinstalllibs.diff: Don't install their libs. * gst-libs/ext/ffmpeg/patch/disablemmx.diff: Don't use MMX. It cannot ocmpile using PIC. * gst-libs/ext/ffmpeg/patch/disabletools.diff: Don't compile/install their tools, we don't use them. * gst-libs/ext/ffmpeg/patch/functions.diff: Prevent symbol conflicts. * gst-libs/ext/ffmpeg/patch/matroska.diff: Add a matroska demuxer. Needs to be sent to ffmpeg-devel@...
-rw-r--r--ChangeLog62
-rw-r--r--HACKING62
-rw-r--r--TODO20
-rw-r--r--configure.ac4
-rw-r--r--ext/ffmpeg/Makefile.am4
-rw-r--r--ext/ffmpeg/gstffmpeg.c4
-rw-r--r--ext/ffmpeg/gstffmpegcodecmap.c9
-rw-r--r--ext/ffmpeg/gstffmpegdemux.c563
-rw-r--r--ext/ffmpeg/gstffmpegmux.c170
-rw-r--r--ext/ffmpeg/gstffmpegprotocol.c111
10 files changed, 731 insertions, 278 deletions
diff --git a/ChangeLog b/ChangeLog
index 10797d5..4a43730 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,4 +1,64 @@
-=== gst-ffmpeg ===
+2004-02-29 Ronald Bultje <rbultje@ronald.bitfreak.net>
+
+ * HACKING:
+ Add some basic documentation on how our wrapping works.
+ * TODO:
+ Add a list of things that could be worked on or that need doing.
+ * configure.ac:
+ Update snapshot.
+ * ext/ffmpeg/Makefile.am:
+ Changne .la links. See below (autotools patch).
+ * ext/ffmpeg/gstffmpeg.c: (plugin_init):
+ Enable demuxers. See below (gstffmpegdemux.c).
+ * ext/ffmpeg/gstffmpegcodecmap.c: (gst_ffmpeg_formatid_to_caps):
+ Realmedia caused a crash - fix that.
+ * ext/ffmpeg/gstffmpegdemux.c: (gst_ffmpegdemux_averror),
+ (gst_ffmpegdemux_base_init), (gst_ffmpegdemux_init),
+ (gst_ffmpegdemux_close), (gst_ffmpegdemux_dispose),
+ (gst_ffmpegdemux_stream_from_pad),
+ (gst_ffmpegdemux_src_event_mask), (gst_ffmpegdemux_src_event),
+ (gst_ffmpegdemux_src_format_list),
+ (gst_ffmpegdemux_src_query_list), (gst_ffmpegdemux_src_query),
+ (gst_ffmpegdemux_src_convert), (gst_ffmpegdemux_add),
+ (gst_ffmpegdemux_open), (gst_ffmpegdemux_loop),
+ (gst_ffmpegdemux_change_state), (gst_ffmpegdemux_register):
+ Right. OK, so I fixed up the demuxing and have it basically-working,
+ and the best way to get some more people to test it is to actually
+ enable it. I'm not sure if we want this for 0.8.0, but we can at
+ least give it a try. I've tested avi, matroska and mpeg, all appear
+ to work. The cool thing is that this gives us instant support for
+ several exotic formats that we'd never care about ourselves. Again,
+ this needs more testing for it to still be enabled in 0.8.0, but I
+ want to give it a try...
+ * ext/ffmpeg/gstffmpegmux.c: (gst_ffmpegmux_base_init),
+ (gst_ffmpegmux_init), (gst_ffmpegmux_request_new_pad),
+ (gst_ffmpegmux_connect), (gst_ffmpegmux_loop),
+ (gst_ffmpegmux_register):
+ Add some fixups that I use locally. Make it work in the case of
+ MPEG encoding, but the muxer is still not in shape to be enabled.
+ * ext/ffmpeg/gstffmpegprotocol.c: (gst_ffmpegdata_open),
+ (gst_ffmpegdata_read), (gst_ffmpegdata_write),
+ (gst_ffmpegdata_seek), (gst_ffmpegdata_close):
+ Some small fixups that crept into it while it was disabled for the
+ last few years. Basically works.
+ * gst-libs/ext/ffmpeg/Makefile.am:
+ Instead of having our local-autotoolized version, I patch the ffmpeg
+ source to be fully autotoolized. That means a simple SUBDIRS here
+ is now enough.
+ * gst-libs/ext/ffmpeg/Tag:
+ Version update.
+ * gst-libs/ext/ffmpeg/patch/autotools.diff:
+ Autotoolize ffmpeg. Needs to be sent to ffmpeg-devel@...
+ * gst-libs/ext/ffmpeg/patch/disableinstalllibs.diff:
+ Don't install their libs.
+ * gst-libs/ext/ffmpeg/patch/disablemmx.diff:
+ Don't use MMX. It cannot ocmpile using PIC.
+ * gst-libs/ext/ffmpeg/patch/disabletools.diff:
+ Don't compile/install their tools, we don't use them.
+ * gst-libs/ext/ffmpeg/patch/functions.diff:
+ Prevent symbol conflicts.
+ * gst-libs/ext/ffmpeg/patch/matroska.diff:
+ Add a matroska demuxer. Needs to be sent to ffmpeg-devel@...
2004-02-26 Thomas Vander Stichele <thomas at apestaart dot org>
diff --git a/HACKING b/HACKING
index ba064f6..97cadaa 100644
--- a/HACKING
+++ b/HACKING
@@ -34,3 +34,65 @@ Axioms under which we work:
- it would be very nice if, on update of either the Tag file or the patch set,
make would know exactly what to do with it.
+Some notes on how ffmpeg wrapping inside GStreamer currently works:
+* gstffmpeg{dec,enc,demux,mux}.c are wrappers for specific element types from
+ their ffmpeg counterpart. If you want to wrap a new type of element in
+ ffmpeg (e.g. the URLProtocol things), then you'd need to write a new
+ wrapper file.
+
+* gstffmpegcolorspace.c is a wrapper for one specific function in ffmpeg:
+ colorspace conversion. This works different from the previously mentioned
+ ones, and we'll come to that in the next item. If you want to wrap one
+ specific function, then that, too, belongs in a new wrapper file.
+
+* the important difference between all those is that the colorspace element
+ contains one element, so there is a 1<->1 mapping. This makes for a fairly
+ basic element implementation. gstffmpegcolorspace.c, therefore, doesn't
+ differ much from other colorspace elements. The ffmpeg element types,
+ however, define a whole *list* of elements (in GStreamer, each decoder etc.
+ needs to be its own element). We use a set of tricks for that to keep
+ coding simple: codec mapping and dynamic type creation.
+
+* ffmpeg uses CODEC_ID_* enumerations for their codecs. GStreamer uses caps,
+ which consists of a mimetype and a defined set of properties. In ffmpeg,
+ these properties live in a AVCodecContext struct, which contains anything
+ that could configure any codec (which makes it rather messy, but ohwell).
+ To convert from one to the other, we use codec mapping, which is done in
+ gstffmpegcodecmap.[ch]. This is the most important file in the whole
+ ffmpeg wrapping process! It contains functions to go from a codec type
+ (video or audio - used as the output format for decoding or the input
+ format for encoding), a codec id (to identify each format) or a format id
+ (a string identifying a file format - usually the file format extension)
+ to a GstCaps, and the other way around.
+
+* to define multiple elements in one source file (which all behave similarly),
+ we dynamically create types for each plugin and let all of them operate on
+ the same struct (GstFFMpegDec, GstFFMpegEnc, ...). The functions in
+ gstffmpeg{dec,enc,demux,mux}.c called gst_ffmpeg*_register() do this.
+ The magic is as follows: for each codec or format, ffmpeg has a single
+ AVCodec or AV{Input,Output}Format, which are packed together in a list of
+ supported codecs/formats. We simply walk through the list, for each of
+ those, we check whether gstffmpegcodecmap.c knows about this single one.
+ If it does, we get the GstCaps for each pad template that belongs to it,
+ and register a type for all of those together. We also leave this inside
+ a caching struct, that will later be used by the base_init() function to
+ fill in information about this specific codec in the class struct of this
+ element (pad templates and codec/format information). Since the actual
+ codec information is the only thing that really makes each codec/format
+ different (they all behave the same through the ffmpeg API), we don't
+ really need to do anything else that is codec-specific, so all other
+ functions are rather simple.
+
+* one particular thing that needs mention is how gstffmpeg{mux,demux}.c and
+ gstffmpegprotocol.c interoperate. ffmpeg uses URLProtocols for data input
+ and output. Now, of course, we want to use the *GStreamer* way of doing
+ input and output (filesrc, ...) rather than the ffmpeg way. Therefore, we
+ wrap up a GstPad as a URLProtocol and register this with ffmpeg. This is
+ what gstffmpegprotocol.c does. The URL is called gstreamer://%p, where %p
+ is the address of a GstPad. gstffmpeg{mux,demux}.c then open a file called
+ gstreamer://%p, with %p being their source/sink pad, respectively. This
+ way, we use GStreamer for data input/output through the ffmpeg API. It's
+ rather ugly, but it has worked quite well so far.
+
+* there's lots of things that still need doing. See the TODO file for more
+ information.
diff --git a/TODO b/TODO
new file mode 100644
index 0000000..3ea31a5
--- /dev/null
+++ b/TODO
@@ -0,0 +1,20 @@
+The never-ending story of new features:
+* add more codecs into our codec map
+* encoding/decoding support lacks:
+ - event handling (particularly discont/flush and EOS)
+ - prevent data copying
+* demux/mux support lacks:
+ - good testing of exotic formats
+ - correct caps sets on the pad templates
+ - event handling in the loop function (mux)
+ - prevent data copying
+* some sort of codectype-fallback, so that we still register a codec plus its
+ mimetype even if we don't have a defined gst-type for it
+* ffvideoscale && other filter elements
+* can we wrap URLProtocol as a source/sink?
+* propagate options like --disable-ffplay (and server and ffmpeg) to ffmpeg
+ source tree build directly in AC_CONFIG_SUBDIRS instead of the
+ 'disabletools.diff' patch
+
+If you have cool ideas, add them here or contact the mailinglist:
+<gstreamer-devel@lists.sf.net>
diff --git a/configure.ac b/configure.ac
index a8b5dc8..7bfdd6c 100644
--- a/configure.ac
+++ b/configure.ac
@@ -83,7 +83,7 @@ GST_CHECK_FEATURE(FFMPEG, [ffmpeg plug-ins], ffmpeg, [
# prerelease and release should get it disted
if test "x$GST_PLUGINS_VERSION_NANO" = x1; then
AC_MSG_NOTICE(slurping FFmpeg CVS source)
- AS_SLURP_FFMPEG(gst-libs/ext/ffmpeg, 2003-10-26 10:00 GMT,
+ AS_SLURP_FFMPEG(gst-libs/ext/ffmpeg, 2004-02-29 20:00 GMT,
HAVE_FFMPEG=yes, HAVE_FFMPEG=no)
else
AC_MSG_NOTICE(FFmpeg CVS code should be included already)
@@ -100,7 +100,7 @@ GST_ARCH()
dnl ###########################
dnl # Configure external libs #
dnl ###########################
-if test "x$HAVE_FFMPEG" = xyes; then
+if test "x$HAVE_FFMPEG" = "xyes"; then
AC_CONFIG_SUBDIRS(gst-libs/ext/ffmpeg/ffmpeg)
fi
diff --git a/ext/ffmpeg/Makefile.am b/ext/ffmpeg/Makefile.am
index b7dcd4f..2964be8 100644
--- a/ext/ffmpeg/Makefile.am
+++ b/ext/ffmpeg/Makefile.am
@@ -14,8 +14,8 @@ libgstffmpeg_la_CFLAGS = $(GST_CFLAGS) \
-I $(top_srcdir)/gst-libs/ext/ffmpeg/ffmpeg/libavcodec \
-I $(top_srcdir)/gst-libs/ext/ffmpeg/ffmpeg/libavformat
libgstffmpeg_la_LIBADD = \
- $(top_builddir)/gst-libs/ext/ffmpeg/libavcodec.la \
- $(top_builddir)/gst-libs/ext/ffmpeg/libavformat.la
+ $(top_builddir)/gst-libs/ext/ffmpeg/ffmpeg/libavcodec/libavcodec.la \
+ $(top_builddir)/gst-libs/ext/ffmpeg/ffmpeg/libavformat/libavformat.la
libgstffmpeg_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
diff --git a/ext/ffmpeg/gstffmpeg.c b/ext/ffmpeg/gstffmpeg.c
index ff7611a..e10a274 100644
--- a/ext/ffmpeg/gstffmpeg.c
+++ b/ext/ffmpeg/gstffmpeg.c
@@ -53,11 +53,11 @@ plugin_init (GstPlugin *plugin)
gst_ffmpegenc_register (plugin);
gst_ffmpegdec_register (plugin);
- /*gst_ffmpegdemux_register (plugin);*/
+ gst_ffmpegdemux_register (plugin);
/*gst_ffmpegmux_register (plugin);*/
gst_ffmpegcsp_register (plugin);
- /*register_protocol (&gstreamer_protocol);*/
+ register_protocol (&gstreamer_protocol);
/* Now we can return the pointer to the newly created Plugin object. */
return TRUE;
diff --git a/ext/ffmpeg/gstffmpegcodecmap.c b/ext/ffmpeg/gstffmpegcodecmap.c
index c1eb3b1..976201d 100644
--- a/ext/ffmpeg/gstffmpegcodecmap.c
+++ b/ext/ffmpeg/gstffmpegcodecmap.c
@@ -890,7 +890,7 @@ gst_ffmpeg_formatid_to_caps (const gchar *format_name)
"systemstream", G_TYPE_BOOLEAN, TRUE,
NULL);
} else if (!strcmp (format_name, "rm")) {
- caps = gst_caps_new_simple ("ffmpeg_rm", "audio/x-pn-realvideo",
+ caps = gst_caps_new_simple ("application/x-pn-realmedia",
"systemstream", G_TYPE_BOOLEAN, TRUE,
NULL);
} else if (!strcmp (format_name, "asf")) {
@@ -919,7 +919,12 @@ gst_ffmpeg_formatid_to_caps (const gchar *format_name)
caps = gst_caps_new_simple ("video/x-4xm",
NULL);
} else {
- /* unknown! */
+ gchar *name;
+
+ GST_WARNING ("Could not create stream format caps for %s", format_name);
+ name = g_strdup_printf ("application/x-gst_ff-%s", format_name);
+ caps = gst_caps_new_simple (name, NULL);
+ g_free (name);
}
return caps;
diff --git a/ext/ffmpeg/gstffmpegdemux.c b/ext/ffmpeg/gstffmpegdemux.c
index 8a9e75d..bdb85c6 100644
--- a/ext/ffmpeg/gstffmpegdemux.c
+++ b/ext/ffmpeg/gstffmpegdemux.c
@@ -46,6 +46,8 @@ struct _GstFFMpegDemux {
gboolean opened;
GstPad *srcpads[MAX_STREAMS];
+ gboolean handled[MAX_STREAMS];
+ guint64 last_ts[MAX_STREAMS];
gint videopads, audiopads;
};
@@ -91,8 +93,8 @@ static GHashTable *global_plugins;
/* A number of functon prototypes are given so we can refer to them later. */
static void gst_ffmpegdemux_class_init (GstFFMpegDemuxClass *klass);
static void gst_ffmpegdemux_base_init (GstFFMpegDemuxClass *klass);
-static void gst_ffmpegdemux_init (GstFFMpegDemux *ffmpegdemux);
-static void gst_ffmpegdemux_dispose (GObject *object);
+static void gst_ffmpegdemux_init (GstFFMpegDemux *demux);
+static void gst_ffmpegdemux_dispose (GObject *object);
static void gst_ffmpegdemux_loop (GstElement *element);
@@ -103,6 +105,39 @@ static GstElementClass *parent_class = NULL;
/*static guint gst_ffmpegdemux_signals[LAST_SIGNAL] = { 0 }; */
+static const gchar *
+gst_ffmpegdemux_averror (gint av_errno)
+{
+ const gchar *message = NULL;
+
+ switch (av_errno) {
+ default:
+ case AVERROR_UNKNOWN:
+ message = "Unknown error";
+ break;
+ case AVERROR_IO:
+ message = "Input/output error";
+ break;
+ case AVERROR_NUMEXPECTED:
+ message = "Number syntax expected in filename";
+ break;
+ case AVERROR_INVALIDDATA:
+ message = "Invalid data found";
+ break;
+ case AVERROR_NOMEM:
+ message = "Not enough memory";
+ break;
+ case AVERROR_NOFMT:
+ message = "Unknown format";
+ break;
+ case AVERROR_NOTSUPP:
+ message = "Operation not supported";
+ break;
+ }
+
+ return message;
+}
+
static void
gst_ffmpegdemux_base_init (GstFFMpegDemuxClass *klass)
{
@@ -121,15 +156,14 @@ gst_ffmpegdemux_base_init (GstFFMpegDemuxClass *klass)
/* construct the element details struct */
details.longname = g_strdup_printf("FFMPEG %s demuxer",
- params->in_plugin->name);
- details.klass = g_strdup("Codec/Demuxer");
+ params->in_plugin->long_name);
+ details.klass = "Codec/Demuxer";
details.description = g_strdup_printf("FFMPEG %s decoder",
- params->in_plugin->name);
+ params->in_plugin->long_name);
details.author = "Wim Taymans <wim.taymans@chello.be>, "
"Ronald Bultje <rbultje@ronald.bitfreak.net>";
gst_element_class_set_details (element_class, &details);
g_free (details.longname);
- g_free (details.klass);
g_free (details.description);
/* pad templates */
@@ -172,32 +206,338 @@ gst_ffmpegdemux_class_init (GstFFMpegDemuxClass *klass)
}
static void
-gst_ffmpegdemux_init(GstFFMpegDemux *ffmpegdemux)
+gst_ffmpegdemux_init (GstFFMpegDemux *demux)
{
- GstFFMpegDemuxClass *oclass = (GstFFMpegDemuxClass*)(G_OBJECT_GET_CLASS (ffmpegdemux));
+ GstFFMpegDemuxClass *oclass = (GstFFMpegDemuxClass *) (G_OBJECT_GET_CLASS (demux));
- ffmpegdemux->sinkpad = gst_pad_new_from_template (oclass->sinktempl,
+ demux->sinkpad = gst_pad_new_from_template (oclass->sinktempl,
"sink");
- gst_element_add_pad (GST_ELEMENT (ffmpegdemux),
- ffmpegdemux->sinkpad);
- gst_element_set_loop_function (GST_ELEMENT (ffmpegdemux),
+ gst_element_add_pad (GST_ELEMENT (demux), demux->sinkpad);
+ gst_element_set_loop_function (GST_ELEMENT (demux),
gst_ffmpegdemux_loop);
- ffmpegdemux->opened = FALSE;
+ demux->opened = FALSE;
+
+ memset (demux->srcpads, 0, sizeof (demux->srcpads));
+ memset (demux->handled, FALSE, sizeof (demux->handled));
+ memset (demux->last_ts, 0, sizeof (demux->last_ts));
+ demux->videopads = 0;
+ demux->audiopads = 0;
+}
+
+static void
+gst_ffmpegdemux_close (GstFFMpegDemux *demux)
+{
+ gint n;
+
+ if (!demux->opened)
+ return;
+
+ /* remove pads from ourselves */
+ for (n = 0; n < MAX_STREAMS; n++) {
+ if (demux->srcpads[n]) {
+ gst_element_remove_pad (GST_ELEMENT (demux), demux->srcpads[n]);
+ demux->srcpads[n] = NULL;
+ }
+ demux->handled[n] = FALSE;
+ demux->last_ts[n] = 0;
+ }
+ demux->videopads = 0;
+ demux->audiopads = 0;
+
+ /* close demuxer context from ffmpeg */
+ av_close_input_file (demux->context);
- ffmpegdemux->videopads = 0;
- ffmpegdemux->audiopads = 0;
+ demux->opened = FALSE;
}
static void
gst_ffmpegdemux_dispose (GObject *object)
{
- GstFFMpegDemux *ffmpegdemux = (GstFFMpegDemux *) object;
+ GstFFMpegDemux *demux = (GstFFMpegDemux *) demux;
+
+ gst_ffmpegdemux_close (demux);
+}
+
+static AVStream *
+gst_ffmpegdemux_stream_from_pad (GstPad *pad)
+{
+ GstFFMpegDemux *demux = (GstFFMpegDemux *) gst_pad_get_parent (pad);
+ AVStream *stream = NULL;
+ gint n;
+
+ for (n = 0; n < MAX_STREAMS; n++) {
+ if (demux->srcpads[n] == pad) {
+ stream = demux->context->streams[n];
+ break;
+ }
+ }
+
+ return stream;
+}
+
+static const GstEventMask *
+gst_ffmpegdemux_src_event_mask (GstPad *pad)
+{
+ static const GstEventMask masks[] = {
+ { GST_EVENT_SEEK, GST_SEEK_METHOD_SET | GST_SEEK_FLAG_KEY_UNIT },
+ { 0, }
+ };
+
+ return masks;
+}
+
+static gboolean
+gst_ffmpegdemux_src_event (GstPad *pad,
+ GstEvent *event)
+{
+ GstFFMpegDemux *demux = (GstFFMpegDemux *) gst_pad_get_parent (pad);
+ AVStream *stream = gst_ffmpegdemux_stream_from_pad (pad);
+ gboolean res = TRUE;
+ gint64 offset;
+
+ if (!stream)
+ return;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ offset = GST_EVENT_SEEK_OFFSET (event);
+ switch (GST_EVENT_SEEK_FORMAT (event)) {
+ case GST_FORMAT_DEFAULT:
+ if (stream->codec.codec_type != CODEC_TYPE_VIDEO) {
+ res = FALSE;
+ break;
+ } else {
+ GstFormat fmt = GST_FORMAT_TIME;
+ if (!(res = gst_pad_convert (pad, GST_FORMAT_DEFAULT, offset,
+ &fmt, &offset)))
+ break;
+ }
+ /* fall-through */
+ case GST_FORMAT_TIME:
+ if (av_seek_frame (demux->context, stream->index,
+ offset / (GST_SECOND / AV_TIME_BASE)))
+ res = FALSE;
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+
+ return res;
+}
+
+static const GstFormat *
+gst_ffmpegdemux_src_format_list (GstPad *pad)
+{
+ AVStream *stream = gst_ffmpegdemux_stream_from_pad (pad);
+ static const GstFormat src_v_formats[] = {
+ GST_FORMAT_TIME,
+ GST_FORMAT_DEFAULT,
+ 0
+ }, src_a_formats[] = {
+ GST_FORMAT_TIME,
+ 0
+ };
+
+ return (stream->codec.codec_type == CODEC_TYPE_VIDEO) ?
+ src_v_formats : src_a_formats;
+}
+
+static const GstQueryType *
+gst_ffmpegdemux_src_query_list (GstPad *pad)
+{
+ static const GstQueryType src_types[] = {
+ GST_QUERY_TOTAL,
+ GST_QUERY_POSITION,
+ 0
+ };
+
+ return src_types;
+}
+
+static gboolean
+gst_ffmpegdemux_src_query (GstPad *pad,
+ GstQueryType type,
+ GstFormat *fmt,
+ gint64 *value)
+{
+ GstFFMpegDemux *demux = (GstFFMpegDemux *) gst_pad_get_parent (pad);
+ AVStream *stream = gst_ffmpegdemux_stream_from_pad (pad);
+ gboolean res = TRUE;
+ gint n;
+
+ if (!stream || (*fmt == GST_FORMAT_DEFAULT &&
+ stream->codec.codec_type != CODEC_TYPE_VIDEO))
+ return FALSE;
+
+ switch (type) {
+ case GST_QUERY_TOTAL:
+ switch (*fmt) {
+ case GST_FORMAT_TIME:
+ *value = stream->duration * (GST_SECOND / AV_TIME_BASE);
+ break;
+ case GST_FORMAT_DEFAULT:
+ if (stream->codec_info_nb_frames) {
+ *value = stream->codec_info_nb_frames;
+ break;
+ } /* else fall-through */
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ case GST_QUERY_POSITION:
+ switch (*fmt) {
+ case GST_FORMAT_TIME:
+ *value = demux->last_ts[stream->index];
+ break;
+ case GST_FORMAT_DEFAULT:
+ res = gst_pad_convert (pad, GST_FORMAT_TIME,
+ demux->last_ts[stream->index],
+ fmt, value);
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+
+ return res;
+}
+
+static gboolean
+gst_ffmpegdemux_src_convert (GstPad *pad,
+ GstFormat src_fmt,
+ gint64 src_value,
+ GstFormat *dest_fmt,
+ gint64 *dest_value)
+{
+ GstFFMpegDemux *demux = (GstFFMpegDemux *) gst_pad_get_parent (pad);
+ AVStream *stream = gst_ffmpegdemux_stream_from_pad (pad);
+ gboolean res = TRUE;
+
+ if (!stream || stream->codec.codec_type != CODEC_TYPE_VIDEO)
+ return FALSE;
+
+ switch (src_fmt) {
+ case GST_FORMAT_TIME:
+ switch (*dest_fmt) {
+ case GST_FORMAT_DEFAULT:
+ *dest_value = src_value * stream->r_frame_rate /
+ (GST_SECOND * stream->r_frame_rate_base);
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ case GST_FORMAT_DEFAULT:
+ switch (*dest_fmt) {
+ case GST_FORMAT_TIME:
+ *dest_value = src_value * GST_SECOND * stream->r_frame_rate_base /
+ stream->r_frame_rate;
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+
+ return res;
+}
- if (ffmpegdemux->opened) {
- av_close_input_file (ffmpegdemux->context);
- ffmpegdemux->opened = FALSE;
+static gboolean
+gst_ffmpegdemux_add (GstFFMpegDemux *demux,
+ AVStream *stream)
+{
+ GstFFMpegDemuxClass *oclass = (GstFFMpegDemuxClass *) G_OBJECT_GET_CLASS (demux);
+ GstPadTemplate *templ = NULL;
+ GstPad *pad;
+ GstCaps *caps;
+ gint num;
+ gchar *padname;
+
+ switch (stream->codec.codec_type) {
+ case CODEC_TYPE_VIDEO:
+ templ = oclass->videosrctempl;
+ num = demux->videopads++;
+ break;
+ case CODEC_TYPE_AUDIO:
+ templ = oclass->audiosrctempl;
+ num = demux->audiopads++;
+ break;
+ default:
+ GST_WARNING ("Unknown pad type %d", stream->codec.codec_type);
+ break;
}
+ if (!templ)
+ return FALSE;
+
+ /* create new pad for this stream */
+ padname = g_strdup_printf (GST_PAD_TEMPLATE_NAME_TEMPLATE (templ), num);
+ pad = gst_pad_new_from_template (templ, padname);
+ g_free (padname);
+
+ gst_pad_use_explicit_caps (pad);
+ /* FIXME: srcevent(), convert() and query() functions for pad */
+
+ /* store pad internally */
+ demux->srcpads[stream->index] = pad;
+
+ /* get caps that belongs to this stream */
+ caps = gst_ffmpeg_codecid_to_caps (stream->codec.codec_id, &stream->codec);
+ gst_pad_set_explicit_caps (pad, caps);
+
+ gst_element_add_pad (GST_ELEMENT (demux), pad);
+
+ return TRUE;
+}
+
+static gboolean
+gst_ffmpegdemux_open (GstFFMpegDemux *demux)
+{
+ GstFFMpegDemuxClass *oclass = (GstFFMpegDemuxClass *) G_OBJECT_GET_CLASS (demux);
+ gchar *location;
+ gint res;
+
+ /* to be sure... */
+ gst_ffmpegdemux_close (demux);
+
+ /* open via our input protocol hack */
+ location = g_strdup_printf ("gstreamer://%p", demux->sinkpad);
+ res = av_open_input_file (&demux->context, location,
+ oclass->in_plugin, 0, NULL);
+ g_free (location);
+ if (res < 0) {
+ GST_ELEMENT_ERROR (demux, LIBRARY, FAILED, (NULL),
+ (gst_ffmpegdemux_averror (res)));
+ return FALSE;
+ }
+
+ /* open_input_file() automatically reads the header. We can now map each
+ * created AVStream to a GstPad to make GStreamer handle it. */
+ for (res = 0; res < demux->context->nb_streams; res++) {
+ gst_ffmpegdemux_add (demux, demux->context->streams[res]);
+ demux->handled[res] = TRUE;
+ }
+
+ demux->opened = TRUE;
+
+ return TRUE;
}
#define GST_FFMPEG_TYPE_FIND_SIZE 4096
@@ -227,151 +567,70 @@ gst_ffmpegdemux_type_find (GstTypeFind *tf, gpointer priv)
static void
gst_ffmpegdemux_loop (GstElement *element)
{
- GstFFMpegDemux *ffmpegdemux = (GstFFMpegDemux *)(element);
- GstFFMpegDemuxClass *oclass = (GstFFMpegDemuxClass*)(G_OBJECT_GET_CLASS (ffmpegdemux));
-
+ GstFFMpegDemux *demux = (GstFFMpegDemux *)(element);
gint res;
AVPacket pkt;
- AVFormatContext *ct;
- AVStream *st;
GstPad *pad;
/* open file if we didn't so already */
- if (!ffmpegdemux->opened) {
- res = av_open_input_file (&ffmpegdemux->context,
- g_strdup_printf ("gstreamer://%p",
- ffmpegdemux->sinkpad),
- oclass->in_plugin, 0, NULL);
- if (res < 0) {
- GST_ELEMENT_ERROR (ffmpegdemux, LIBRARY, TOO_LAZY, (NULL),
- ("Failed to open demuxer/file context"));
+ if (!demux->opened) {
+ if (!gst_ffmpegdemux_open (demux))
return;
- }
-
- ffmpegdemux->opened = TRUE;
}
- /* shortcut to context */
- ct = ffmpegdemux->context;
-
/* read a package */
- res = av_read_packet (ct, &pkt);
+ res = av_read_packet (demux->context, &pkt);
if (res < 0) {
- if (url_feof (&ct->pb)) {
- int i;
-
- /* we're at the end of file - send an EOS to
- * each stream that we opened so far */
- for (i = 0; i < ct->nb_streams; i++) {
- GstPad *pad;
- GstEvent *event = gst_event_new (GST_EVENT_EOS);
-
- pad = ffmpegdemux->srcpads[i];
- if (GST_PAD_IS_USABLE (pad)) {
- gst_data_ref (GST_DATA (event));
- gst_pad_push (pad, GST_DATA (event));
- }
- gst_data_unref (GST_DATA (event));
- }
- gst_element_set_eos (element);
-
- /* FIXME: should we go into
- * should we close the context here?
- * either way, a new media stream needs an
- * event too */
+ if (url_feof (&demux->context->pb)) {
+ gst_pad_event_default (demux->sinkpad, gst_event_new (GST_EVENT_EOS));
+ gst_ffmpegdemux_close (demux);
+ } else {
+ GST_ELEMENT_ERROR (demux, LIBRARY, FAILED, (NULL),
+ (gst_ffmpegdemux_averror (res)));
}
return;
}
- /* shortcut to stream */
- st = ct->streams[pkt.stream_index];
-
- /* create the pad/stream if we didn't do so already */
- if (st->codec_info_state == 0) {
- GstPadTemplate *templ = NULL;
- GstCaps *caps;
- gchar *padname;
- gint num;
-
- /* mark as handled */
- st->codec_info_state = 1;
-
- /* find template */
- switch (st->codec.codec_type) {
- case CODEC_TYPE_VIDEO:
- templ = oclass->videosrctempl;
- num = ffmpegdemux->videopads++;
- break;
- case CODEC_TYPE_AUDIO:
- templ = oclass->audiosrctempl;
- num = ffmpegdemux->audiopads++;
- break;
- default:
- g_warning ("Unknown pad type %d",
- st->codec.codec_type);
- return;
- }
-
- /* create new pad for this stream */
- padname = g_strdup_printf (GST_PAD_TEMPLATE_NAME_TEMPLATE(templ),
- num);
- pad = gst_pad_new_from_template (templ, padname);
- g_free (padname);
-
- gst_pad_use_explicit_caps (pad);
- /* FIXME: convert() and query() functions for pad */
-
- /* store pad internally */
- ffmpegdemux->srcpads[pkt.stream_index] = pad;
-
- /* get caps that belongs to this stream */
- caps = gst_ffmpeg_codecid_to_caps (st->codec.codec_id,
- &st->codec);
- gst_pad_set_explicit_caps (pad, caps);
-
- gst_element_add_pad (GST_ELEMENT (ffmpegdemux), pad);
-
- /* we continue here, in the next pad-is-usable check,
- * we'll return nonetheless */
+ /* for stream-generation-while-playing */
+ if (!demux->handled[pkt.stream_index]) {
+ gst_ffmpegdemux_add (demux, demux->context->streams[pkt.stream_index]);
+ demux->handled[pkt.stream_index] = TRUE;
}
/* shortcut to pad belonging to this stream */
- pad = ffmpegdemux->srcpads[pkt.stream_index];
+ pad = demux->srcpads[pkt.stream_index];
/* and handle the data by pushing it forward... */
- if (GST_PAD_IS_USABLE (pad)) {
+ if (pad && GST_PAD_IS_USABLE (pad)) {
GstBuffer *outbuf;
outbuf = gst_buffer_new_and_alloc (pkt.size);
memcpy (GST_BUFFER_DATA (outbuf), pkt.data, pkt.size);
GST_BUFFER_SIZE (outbuf) = pkt.size;
- if (pkt.pts != AV_NOPTS_VALUE && ct->pts_den) {
- GST_BUFFER_TIMESTAMP (outbuf) = pkt.pts * GST_SECOND *
- ct->pts_num / ct->pts_den;
- }
+ if (pkt.pts != AV_NOPTS_VALUE && demux->context->pts_den)
+ GST_BUFFER_TIMESTAMP (outbuf) = (double) pkt.pts * GST_SECOND *
+ demux->context->pts_num / demux->context->pts_den;
if (pkt.flags & PKT_FLAG_KEY) {
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_KEY_UNIT);
}
gst_pad_push (pad, GST_DATA (outbuf));
- pkt.destruct (&pkt);
}
+
+ pkt.destruct (&pkt);
}
static GstElementStateReturn
gst_ffmpegdemux_change_state (GstElement *element)
{
- GstFFMpegDemux *ffmpegdemux = (GstFFMpegDemux *)(element);
+ GstFFMpegDemux *demux = (GstFFMpegDemux *)(element);
gint transition = GST_STATE_TRANSITION (element);
switch (transition) {
case GST_STATE_PAUSED_TO_READY:
- if (ffmpegdemux->opened) {
- av_close_input_file (ffmpegdemux->context);
- ffmpegdemux->opened = FALSE;
- }
+ gst_ffmpegdemux_close (demux);
break;
}
@@ -400,7 +659,6 @@ gst_ffmpegdemux_register (GstPlugin *plugin)
0,
(GInstanceInitFunc)gst_ffmpegdemux_init,
};
- GstCaps *any_caps = gst_caps_new_any ();
in_plugin = first_iformat;
@@ -408,19 +666,34 @@ gst_ffmpegdemux_register (GstPlugin *plugin)
while (in_plugin) {
gchar *type_name, *typefind_name;
- gchar *p;
+ gchar *p, *name = NULL;
GstCaps *sinkcaps, *audiosrccaps, *videosrccaps;
+ /* no emulators */
+ if (!strncmp (in_plugin->long_name, "raw ", 4) ||
+ !strncmp (in_plugin->long_name, "pcm ", 4) ||
+ !strcmp (in_plugin->name, "audio_device") ||
+ !strncmp (in_plugin->name, "image", 5) ||
+ !strcmp (in_plugin->name, "mpegvideo") ||
+ !strcmp (in_plugin->name, "mjpeg"))
+ goto next;
+
+ p = name = g_strdup (in_plugin->name);
+ while (*p) {
+ if (*p == '.' || *p == ',') *p = '_';
+ p++;
+ }
+
/* Try to find the caps that belongs here */
- sinkcaps = gst_ffmpeg_formatid_to_caps (in_plugin->name);
+ sinkcaps = gst_ffmpeg_formatid_to_caps (name);
if (!sinkcaps) {
goto next;
}
/* This is a bit ugly, but we just take all formats
* for the pad template. We'll get an exact match
* when we open the stream */
- audiosrccaps = NULL;
- videosrccaps = NULL;
+ audiosrccaps = gst_caps_new_empty ();
+ videosrccaps = gst_caps_new_empty ();
for (in_codec = first_avcodec; in_codec != NULL;
in_codec = in_codec->next) {
GstCaps *temp = gst_ffmpeg_codecid_to_caps (in_codec->id, NULL);
@@ -441,21 +714,15 @@ gst_ffmpegdemux_register (GstPlugin *plugin)
}
/* construct the type */
- type_name = g_strdup_printf("ffdemux_%s", in_plugin->name);
- typefind_name = g_strdup_printf("fftype_%s", in_plugin->name);
-
- p = type_name;
-
- while (*p) {
- if (*p == '.') *p = '_';
- p++;
- }
+ type_name = g_strdup_printf("ffdemux_%s", name);
/* if it's already registered, drop it */
- if (g_type_from_name(type_name)) {
- g_free(type_name);
+ if (g_type_from_name (type_name)) {
+ g_free (type_name);
goto next;
}
+
+ typefind_name = g_strdup_printf("fftype_%s", name);
/* create a cache for these properties */
params = g_new0 (GstFFMpegDemuxClassParams, 1);
@@ -469,24 +736,32 @@ gst_ffmpegdemux_register (GstPlugin *plugin)
(gpointer) params);
/* create the type now */
- type = g_type_register_static(GST_TYPE_ELEMENT, type_name , &typeinfo, 0);
+ type = g_type_register_static (GST_TYPE_ELEMENT, type_name , &typeinfo, 0);
g_hash_table_insert (global_plugins,
GINT_TO_POINTER (type),
(gpointer) params);
- extensions = g_strsplit (in_plugin->extensions, " ", 0);
+ if (in_plugin->extensions)
+ extensions = g_strsplit (in_plugin->extensions, " ", 0);
+ else
+ extensions = NULL;
+
if (!gst_element_register (plugin, type_name, GST_RANK_MARGINAL, type) ||
!gst_type_find_register (plugin, typefind_name, GST_RANK_MARGINAL,
gst_ffmpegdemux_type_find,
- extensions, any_caps, params))
+ extensions, sinkcaps, params)) {
+ g_warning ("Register of type ffdemux_%s failed", name);
return FALSE;
- g_strfreev (extensions);
+ }
+
+ if (extensions)
+ g_strfreev (extensions);
next:
+ g_free (name);
in_plugin = in_plugin->next;
}
- gst_caps_free (any_caps);
g_hash_table_remove (global_plugins, GINT_TO_POINTER (0));
return TRUE;
diff --git a/ext/ffmpeg/gstffmpegmux.c b/ext/ffmpeg/gstffmpegmux.c
index 920c7db..66e571e 100644
--- a/ext/ffmpeg/gstffmpegmux.c
+++ b/ext/ffmpeg/gstffmpegmux.c
@@ -60,9 +60,6 @@ struct _GstFFMpegMuxClass {
GstElementClass parent_class;
AVOutputFormat *in_plugin;
- GstPadTemplate *srctempl;
- GstPadTemplate *videosinktempl;
- GstPadTemplate *audiosinktempl;
};
#define GST_TYPE_FFMPEGMUX \
@@ -139,7 +136,7 @@ gst_ffmpegmux_base_init (GstFFMpegMuxClass *klass)
g_free (details.description);
/* pad templates */
- srctempl = gst_pad_template_new ("sink", GST_PAD_SRC,
+ srctempl = gst_pad_template_new ("src", GST_PAD_SRC,
GST_PAD_ALWAYS,
params->srccaps);
audiosinktempl = gst_pad_template_new ("audio_%d",
@@ -156,9 +153,6 @@ gst_ffmpegmux_base_init (GstFFMpegMuxClass *klass)
gst_element_class_add_pad_template (element_class, audiosinktempl);
klass->in_plugin = params->in_plugin;
- klass->srctempl = srctempl;
- klass->videosinktempl = videosinktempl;
- klass->audiosinktempl = audiosinktempl;
}
static void
@@ -180,21 +174,22 @@ gst_ffmpegmux_class_init (GstFFMpegMuxClass *klass)
static void
gst_ffmpegmux_init(GstFFMpegMux *ffmpegmux)
{
- GstFFMpegMuxClass *oclass = (GstFFMpegMuxClass*)(G_OBJECT_GET_CLASS (ffmpegmux));
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (ffmpegmux);
+ GstFFMpegMuxClass *oclass = (GstFFMpegMuxClass*) klass;
+ GstPadTemplate *templ = gst_element_class_get_pad_template (klass, "src");
- ffmpegmux->srcpad = gst_pad_new_from_template (oclass->srctempl,
- "src");
- gst_element_add_pad (GST_ELEMENT (ffmpegmux),
- ffmpegmux->srcpad);
+ ffmpegmux->srcpad = gst_pad_new_from_template (templ, "src");
gst_element_set_loop_function (GST_ELEMENT (ffmpegmux),
gst_ffmpegmux_loop);
+ gst_element_add_pad (GST_ELEMENT (ffmpegmux),
+ ffmpegmux->srcpad);
ffmpegmux->context = g_new0 (AVFormatContext, 1);
- memset (ffmpegmux->context, 0, sizeof (AVFormatContext));
ffmpegmux->context->oformat = oclass->in_plugin;
ffmpegmux->context->nb_streams = 0;
- sprintf (ffmpegmux->context->filename, "gstreamer://%p",
- ffmpegmux->srcpad);
+ snprintf (ffmpegmux->context->filename,
+ sizeof (ffmpegmux->context->filename),
+ "gstreamer://%p", ffmpegmux->srcpad);
ffmpegmux->opened = FALSE;
ffmpegmux->videopads = 0;
@@ -220,26 +215,30 @@ gst_ffmpegmux_request_new_pad (GstElement *element,
const gchar *name)
{
GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) element;
- GstFFMpegMuxClass *oclass = (GstFFMpegMuxClass*)(G_OBJECT_GET_CLASS (ffmpegmux));
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (element);
+ GstFFMpegMuxClass *oclass = (GstFFMpegMuxClass*) klass;
gchar *padname;
GstPad *pad;
AVStream *st;
enum CodecType type;
- gint padnum;
+ gint padnum, bitrate = 0, framesize = 0;
g_return_val_if_fail (templ != NULL, NULL);
g_return_val_if_fail (templ->direction == GST_PAD_SINK, NULL);
g_return_val_if_fail (ffmpegmux->opened == FALSE, NULL);
/* figure out a name that *we* like */
- if (templ == oclass->videosinktempl) {
- padname = g_strdup_printf ("video_%02d",
+ if (templ == gst_element_class_get_pad_template (klass, "video_%d")) {
+ padname = g_strdup_printf ("video_%d",
ffmpegmux->videopads++);
type = CODEC_TYPE_VIDEO;
- } else if (templ == oclass->audiosinktempl) {
- padname = g_strdup_printf ("audio_%02d",
+ bitrate = 64 * 1024;
+ framesize = 1152;
+ } else if (templ == gst_element_class_get_pad_template (klass, "audio_%d")) {
+ padname = g_strdup_printf ("audio_%d",
ffmpegmux->audiopads++);
type = CODEC_TYPE_AUDIO;
+ bitrate = 285 * 1024;
} else {
g_warning("ffmux: unknown pad template!");
return NULL;
@@ -247,25 +246,22 @@ gst_ffmpegmux_request_new_pad (GstElement *element,
/* create pad */
pad = gst_pad_new_from_template (templ, padname);
- padnum = ffmpegmux->context->nb_streams++;
+ padnum = ffmpegmux->context->nb_streams;
ffmpegmux->sinkpads[padnum] = pad;
gst_pad_set_link_function (pad, gst_ffmpegmux_connect);
gst_element_add_pad (element, pad);
/* AVStream needs to be created */
- st = g_malloc (sizeof (AVStream));
- memset(st, 0, sizeof (AVStream));
- avcodec_get_context_defaults (&st->codec);
+ st = av_new_stream (ffmpegmux->context, padnum);
st->codec.codec_type = type;
st->codec.codec_id = CODEC_ID_NONE; /* this is a check afterwards */
- st->index = padnum;
st->stream_copy = 1; /* we're not the actual encoder */
- /* we fill in other codec-related info during capsnego */
- ffmpegmux->context->streams[padnum] = st;
+ st->codec.bit_rate = bitrate;
+ st->codec.frame_size = framesize;
+ /* we fill in codec during capsnego */
/* we love debug output (c) (tm) (r) */
- GST_DEBUG (
- "Created %s pad for ffmux_%s element",
+ GST_DEBUG ("Created %s pad for ffmux_%s element",
padname, oclass->in_plugin->name);
g_free (padname);
@@ -280,8 +276,8 @@ gst_ffmpegmux_connect (GstPad *pad,
gint i;
AVStream *st;
- g_return_val_if_fail (ffmpegmux->opened == FALSE,
- GST_PAD_LINK_REFUSED);
+ /*g_return_val_if_fail (ffmpegmux->opened == FALSE,
+ GST_PAD_LINK_REFUSED);*/
for (i = 0; i < ffmpegmux->context->nb_streams; i++) {
if (pad == ffmpegmux->sinkpads[i]) {
@@ -309,26 +305,57 @@ static void
gst_ffmpegmux_loop (GstElement *element)
{
GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) element;
- GstFFMpegMuxClass *oclass = (GstFFMpegMuxClass*)(G_OBJECT_GET_CLASS (ffmpegmux));
gint i, bufnum;
- GstBuffer *buf;
- /* we do need all streams to have started capsnego,
- * or things will go horribly wrong */
+ /* start by filling an internal queue of buffers */
for (i = 0; i < ffmpegmux->context->nb_streams; i++) {
- AVStream *st = ffmpegmux->context->streams[i];
+ GstPad *pad = ffmpegmux->sinkpads[i];
- /* check whether the pad has successfully completed capsnego */
- if (st->codec.codec_id == CODEC_ID_NONE) {
- GST_DEBUG (
- "ffmux loop function called without capsnego on pad %d",
- i);
- return;
+ /* check for "pull'ability" */
+ while (pad != NULL &&
+ GST_PAD_IS_USABLE (pad) &&
+ ffmpegmux->eos[i] == FALSE &&
+ ffmpegmux->bufferqueue[i] == NULL) {
+ GstData *data;
+
+ /* we can pull a buffer! */
+ data = gst_pad_pull (pad);
+ if (GST_IS_EVENT (data)) {
+ GstEvent *event = GST_EVENT (data);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ /* flag EOS on this stream */
+ ffmpegmux->eos[i] = TRUE;
+ gst_event_unref (event);
+ break;
+ default:
+ gst_pad_event_default (pad, event);
+ break;
+ }
+ } else {
+ ffmpegmux->bufferqueue[i] = GST_BUFFER (data);
+ }
}
}
/* open "file" (gstreamer protocol to next element) */
if (!ffmpegmux->opened) {
+ /* we do need all streams to have started capsnego,
+ * or things will go horribly wrong */
+ for (i = 0; i < ffmpegmux->context->nb_streams; i++) {
+ AVStream *st = ffmpegmux->context->streams[i];
+
+ /* check whether the pad has successfully completed capsnego */
+ if (st->codec.codec_id == CODEC_ID_NONE) {
+ GST_ELEMENT_ERROR (element, CORE, NEGOTIATION, (NULL),
+ ("no caps set on stream %d (%s)", i,
+ (st->codec.codec_type == CODEC_TYPE_VIDEO) ?
+ "video" : "audio"));
+ return;
+ }
+ }
+
if (url_fopen (&ffmpegmux->context->pb,
ffmpegmux->context->filename,
URL_WRONLY) < 0) {
@@ -337,38 +364,17 @@ gst_ffmpegmux_loop (GstElement *element)
return;
}
+ if (av_set_parameters (ffmpegmux->context, NULL)) {
+ GST_ELEMENT_ERROR (element, LIBRARY, INIT, (NULL),
+ ("Failed to initialize muxer"));
+ return;
+ }
+
/* we're now opened */
ffmpegmux->opened = TRUE;
/* now open the mux format */
- oclass->in_plugin->write_header (ffmpegmux->context);
- }
-
- /* FIXME: do we support more events? */
-
- /* start by filling an internal queue of buffers */
- for (i = 0; i < ffmpegmux->context->nb_streams; i++) {
- GstPad *pad = ffmpegmux->sinkpads[i];
-
- /* check for "pull'ability" */
- while (pad != NULL &&
- GST_PAD_IS_USABLE (pad) &&
- /*GST_PAD_IS_ACTIVE (pad) &&*/
- GST_PAD_IS_LINKED (pad) &&
- ffmpegmux->eos[i] == FALSE &&
- ffmpegmux->bufferqueue[i] == NULL) {
- /* we can pull a buffer! */
- buf = GST_BUFFER (gst_pad_pull (pad));
- if (GST_IS_EVENT (buf)) {
- if (GST_EVENT_TYPE (GST_EVENT (buf)) == GST_EVENT_EOS) {
- /* flag EOS on this stream */
- ffmpegmux->eos[i] = TRUE;
- gst_data_unref (GST_DATA (buf));
- }
- } else {
- ffmpegmux->bufferqueue[i] = buf;
- }
- }
+ av_write_header (ffmpegmux->context);
}
/* take the one with earliest timestamp,
@@ -383,6 +389,7 @@ gst_ffmpegmux_loop (GstElement *element)
/* if we have no buffer yet, just use the first one */
if (bufnum == -1) {
bufnum = i;
+ continue;
}
/* if we do have one, only use this one if it's older */
@@ -395,17 +402,24 @@ gst_ffmpegmux_loop (GstElement *element)
/* now handle the buffer, or signal EOS if we have
* no buffers left */
if (bufnum >= 0) {
+ GstBuffer *buf;
+
/* push out current buffer */
buf = ffmpegmux->bufferqueue[bufnum];
ffmpegmux->bufferqueue[bufnum] = NULL;
- oclass->in_plugin->write_packet (ffmpegmux->context,
- bufnum,
- GST_BUFFER_DATA (buf),
- GST_BUFFER_SIZE (buf), 0);
+
+ ffmpegmux->context->streams[bufnum]->codec.frame_number++;
+
+ /* set time */
+ ffmpegmux->context->streams[bufnum]->pts.val = (GST_BUFFER_TIMESTAMP (buf) * 90) / 1000000;
+ av_write_frame (ffmpegmux->context, bufnum,
+ GST_BUFFER_DATA (buf),
+ GST_BUFFER_SIZE (buf));
+ //ffmpegmux->context->streams[bufnum]->codec.real_pict_num++;
gst_buffer_unref (buf);
} else {
/* close down */
- oclass->in_plugin->write_trailer (ffmpegmux->context);
+ av_write_trailer (ffmpegmux->context);
url_fclose (&ffmpegmux->context->pb);
ffmpegmux->opened = FALSE;
gst_element_set_eos (element);
@@ -470,8 +484,8 @@ gst_ffmpegmux_register (GstPlugin *plugin)
/* This is a bit ugly, but we just take all formats
* for the pad template. We'll get an exact match
* when we open the stream */
- audiosinkcaps = NULL;
- videosinkcaps = NULL;
+ audiosinkcaps = gst_caps_new_empty ();
+ videosinkcaps = gst_caps_new_empty ();
for (in_codec = first_avcodec; in_codec != NULL;
in_codec = in_codec->next) {
GstCaps *temp = gst_ffmpeg_codecid_to_caps (in_codec->id, NULL);
diff --git a/ext/ffmpeg/gstffmpegprotocol.c b/ext/ffmpeg/gstffmpegprotocol.c
index 2b1468d..be95dcc 100644
--- a/ext/ffmpeg/gstffmpegprotocol.c
+++ b/ext/ffmpeg/gstffmpegprotocol.c
@@ -43,9 +43,9 @@ struct _GstProtocolInfo {
};
static int
-gst_open (URLContext *h,
- const char *filename,
- int flags)
+gst_ffmpegdata_open (URLContext *h,
+ const char *filename,
+ int flags)
{
GstProtocolInfo *info;
GstPad *pad;
@@ -80,6 +80,7 @@ gst_open (URLContext *h,
}
info->eos = FALSE;
+ info->pad = pad;
h->priv_data = (void *) info;
@@ -87,12 +88,12 @@ gst_open (URLContext *h,
}
static int
-gst_read (URLContext *h,
- unsigned char *buf,
- int size)
+gst_ffmpegdata_read (URLContext *h,
+ unsigned char *buf,
+ int size)
{
GstByteStream *bs;
- guint32 total;
+ guint32 total, request;
guint8 *data;
GstProtocolInfo *info;
@@ -102,33 +103,47 @@ gst_read (URLContext *h,
bs = info->bs;
- if (info->eos)
+ if (info->eos)
return 0;
- total = gst_bytestream_peek_bytes (bs, &data, size);
-
- if (total < size) {
- GstEvent *event;
- guint32 remaining;
-
- gst_bytestream_get_status (bs, &remaining, &event);
-
- if (!event) {
- g_warning ("gstffmpegprotocol: no bytestream event");
- return total;
+ do {
+ /* prevent EOS */
+ if (gst_bytestream_tell (bs) + size > gst_bytestream_length (bs))
+ request = gst_bytestream_length (bs) - gst_bytestream_tell (bs);
+ else
+ request = size;
+
+ if (request)
+ total = gst_bytestream_peek_bytes (bs, &data, request);
+ else
+ total = 0;
+
+ if (total < request) {
+ GstEvent *event;
+ guint32 remaining;
+
+ gst_bytestream_get_status (bs, &remaining, &event);
+
+ if (!event) {
+ g_warning ("gstffmpegprotocol: no bytestream event");
+ return total;
+ }
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_DISCONTINUOUS:
+ gst_bytestream_flush_fast (bs, remaining);
+ gst_event_unref (event);
+ break;
+ case GST_EVENT_EOS:
+ info->eos = TRUE;
+ gst_event_unref (event);
+ break;
+ default:
+ gst_pad_event_default (info->pad, event);
+ break;
+ }
}
-
- switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_DISCONTINUOUS:
- gst_bytestream_flush_fast (bs, remaining);
- case GST_EVENT_EOS:
- info->eos = TRUE;
- break;
- default:
- break;
- }
- gst_event_unref (event);
- }
+ } while (!info->eos && total != request);
memcpy (buf, data, total);
gst_bytestream_flush (bs, total);
@@ -137,9 +152,9 @@ gst_read (URLContext *h,
}
static int
-gst_write (URLContext *h,
- unsigned char *buf,
- int size)
+gst_ffmpegdata_write (URLContext *h,
+ unsigned char *buf,
+ int size)
{
GstProtocolInfo *info;
GstBuffer *outbuf;
@@ -159,9 +174,9 @@ gst_write (URLContext *h,
}
static offset_t
-gst_seek (URLContext *h,
- offset_t pos,
- int whence)
+gst_ffmpegdata_seek (URLContext *h,
+ offset_t pos,
+ int whence)
{
GstSeekType seek_type = 0;
GstProtocolInfo *info;
@@ -188,10 +203,12 @@ gst_seek (URLContext *h,
gst_bytestream_seek (info->bs, pos, seek_type);
break;
- case URL_WRONLY: {
- GstEvent *event = gst_event_new_seek (seek_type, pos);
- gst_pad_push (info->pad, GST_DATA (event));
- }
+ case URL_WRONLY:
+ gst_pad_push (info->pad, GST_DATA (gst_event_new_seek (seek_type, pos)));
+ break;
+
+ default:
+ g_assert (0);
break;
}
@@ -199,7 +216,7 @@ gst_seek (URLContext *h,
}
static int
-gst_close (URLContext *h)
+gst_ffmpegdata_close (URLContext *h)
{
GstProtocolInfo *info;
@@ -227,10 +244,10 @@ gst_close (URLContext *h)
URLProtocol gstreamer_protocol = {
.name = "gstreamer",
- .url_open = gst_open,
- .url_read = gst_read,
- .url_write = gst_write,
- .url_seek = gst_seek,
- .url_close = gst_close,
+ .url_open = gst_ffmpegdata_open,
+ .url_read = gst_ffmpegdata_read,
+ .url_write = gst_ffmpegdata_write,
+ .url_seek = gst_ffmpegdata_seek,
+ .url_close = gst_ffmpegdata_close,
};