summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSebastian Dröge <sebastian@centricular.com>2013-10-31 10:01:26 +0100
committerSebastian Dröge <sebastian@centricular.com>2013-10-31 10:25:59 +0100
commit8440e137d90ffe4df2cf615b38be4c358d137c33 (patch)
tree5c76b8d0a1225d55915ac055b5506f9424ae9702
parent395d7b92f9a8f7ecc991ffabd93525fd339f35b1 (diff)
Port basic tutorials to 1.0
-rw-r--r--gst-sdk/tutorials/basic-tutorial-1.c4
-rw-r--r--gst-sdk/tutorials/basic-tutorial-12.c4
-rw-r--r--gst-sdk/tutorials/basic-tutorial-13.c8
-rw-r--r--gst-sdk/tutorials/basic-tutorial-15.c2
-rw-r--r--gst-sdk/tutorials/basic-tutorial-3.c2
-rw-r--r--gst-sdk/tutorials/basic-tutorial-4.c29
-rw-r--r--gst-sdk/tutorials/basic-tutorial-5.c89
-rw-r--r--gst-sdk/tutorials/basic-tutorial-6.c16
-rw-r--r--gst-sdk/tutorials/basic-tutorial-7.c4
-rw-r--r--gst-sdk/tutorials/basic-tutorial-8.c32
10 files changed, 95 insertions, 95 deletions
diff --git a/gst-sdk/tutorials/basic-tutorial-1.c b/gst-sdk/tutorials/basic-tutorial-1.c
index a02e8f2..1266786 100644
--- a/gst-sdk/tutorials/basic-tutorial-1.c
+++ b/gst-sdk/tutorials/basic-tutorial-1.c
@@ -9,7 +9,7 @@ int main(int argc, char *argv[]) {
gst_init (&argc, &argv);
/* Build the pipeline */
- pipeline = gst_parse_launch ("playbin2 uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
+ pipeline = gst_parse_launch ("playbin uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
@@ -25,4 +25,4 @@ int main(int argc, char *argv[]) {
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
-} \ No newline at end of file
+}
diff --git a/gst-sdk/tutorials/basic-tutorial-12.c b/gst-sdk/tutorials/basic-tutorial-12.c
index a1f0808..76aa9e3 100644
--- a/gst-sdk/tutorials/basic-tutorial-12.c
+++ b/gst-sdk/tutorials/basic-tutorial-12.c
@@ -68,7 +68,7 @@ int main(int argc, char *argv[]) {
memset (&data, 0, sizeof (data));
/* Build the pipeline */
- pipeline = gst_parse_launch ("playbin2 uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
+ pipeline = gst_parse_launch ("playbin uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
bus = gst_element_get_bus (pipeline);
/* Start playing */
@@ -96,4 +96,4 @@ int main(int argc, char *argv[]) {
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
-} \ No newline at end of file
+}
diff --git a/gst-sdk/tutorials/basic-tutorial-13.c b/gst-sdk/tutorials/basic-tutorial-13.c
index 4f8dda3..084e269 100644
--- a/gst-sdk/tutorials/basic-tutorial-13.c
+++ b/gst-sdk/tutorials/basic-tutorial-13.c
@@ -1,4 +1,5 @@
#include <string.h>
+#include <stdio.h>
#include <gst/gst.h>
typedef struct _CustomData {
@@ -13,11 +14,10 @@ typedef struct _CustomData {
/* Send seek event to change rate */
static void send_seek_event (CustomData *data) {
gint64 position;
- GstFormat format = GST_FORMAT_TIME;
GstEvent *seek_event;
/* Obtain the current position, needed for the seek event */
- if (!gst_element_query_position (data->pipeline, &format, &position)) {
+ if (!gst_element_query_position (data->pipeline, GST_FORMAT_TIME, &position)) {
g_printerr ("Unable to retrieve current position.\n");
return;
}
@@ -111,10 +111,10 @@ int main(int argc, char *argv[]) {
" 'Q' to quit\n");
/* Build the pipeline */
- data.pipeline = gst_parse_launch ("playbin2 uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
+ data.pipeline = gst_parse_launch ("playbin uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
/* Add a keyboard watch so we get notified of keystrokes */
-#ifdef _WIN32
+#ifdef G_OS_WIN32
io_stdin = g_io_channel_win32_new_fd (fileno (stdin));
#else
io_stdin = g_io_channel_unix_new (fileno (stdin));
diff --git a/gst-sdk/tutorials/basic-tutorial-15.c b/gst-sdk/tutorials/basic-tutorial-15.c
index 1daf10b..29e68cb 100644
--- a/gst-sdk/tutorials/basic-tutorial-15.c
+++ b/gst-sdk/tutorials/basic-tutorial-15.c
@@ -57,7 +57,7 @@ int main(int argc, char *argv[]) {
g_signal_connect (texture, "size-change", G_CALLBACK (size_change), NULL);
/* Build the GStreamer pipeline */
- pipeline = gst_parse_launch ("playbin2 uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
+ pipeline = gst_parse_launch ("playbin uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
/* Instantiate the Clutter sink */
sink = gst_element_factory_make ("autocluttersink", NULL);
diff --git a/gst-sdk/tutorials/basic-tutorial-3.c b/gst-sdk/tutorials/basic-tutorial-3.c
index c4ac241..bbb1524 100644
--- a/gst-sdk/tutorials/basic-tutorial-3.c
+++ b/gst-sdk/tutorials/basic-tutorial-3.c
@@ -123,7 +123,7 @@ static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *dat
}
/* Check the new pad's type */
- new_pad_caps = gst_pad_get_caps (new_pad);
+ new_pad_caps = gst_pad_query_caps (new_pad, NULL);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) {
diff --git a/gst-sdk/tutorials/basic-tutorial-4.c b/gst-sdk/tutorials/basic-tutorial-4.c
index 7f3c94f..2924602 100644
--- a/gst-sdk/tutorials/basic-tutorial-4.c
+++ b/gst-sdk/tutorials/basic-tutorial-4.c
@@ -2,7 +2,7 @@
/* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData {
- GstElement *playbin2; /* Our one and only element */
+ GstElement *playbin; /* Our one and only element */
gboolean playing; /* Are we in the PLAYING state? */
gboolean terminate; /* Should we terminate execution? */
gboolean seek_enabled; /* Is seeking enabled for this media? */
@@ -29,26 +29,26 @@ int main(int argc, char *argv[]) {
gst_init (&argc, &argv);
/* Create the elements */
- data.playbin2 = gst_element_factory_make ("playbin2", "playbin2");
+ data.playbin = gst_element_factory_make ("playbin", "playbin");
- if (!data.playbin2) {
+ if (!data.playbin) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Set the URI to play */
- g_object_set (data.playbin2, "uri", "http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
+ g_object_set (data.playbin, "uri", "http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
/* Start playing */
- ret = gst_element_set_state (data.playbin2, GST_STATE_PLAYING);
+ ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
- gst_object_unref (data.playbin2);
+ gst_object_unref (data.playbin);
return -1;
}
/* Listen to the bus */
- bus = gst_element_get_bus (data.playbin2);
+ bus = gst_element_get_bus (data.playbin);
do {
msg = gst_bus_timed_pop_filtered (bus, 100 * GST_MSECOND,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_DURATION);
@@ -59,17 +59,16 @@ int main(int argc, char *argv[]) {
} else {
/* We got no message, this means the timeout expired */
if (data.playing) {
- GstFormat fmt = GST_FORMAT_TIME;
gint64 current = -1;
/* Query the current position of the stream */
- if (!gst_element_query_position (data.playbin2, &fmt, &current)) {
+ if (!gst_element_query_position (data.playbin, GST_FORMAT_TIME, &current)) {
g_printerr ("Could not query current position.\n");
}
/* If we didn't know it yet, query the stream duration */
if (!GST_CLOCK_TIME_IS_VALID (data.duration)) {
- if (!gst_element_query_duration (data.playbin2, &fmt, &data.duration)) {
+ if (!gst_element_query_duration (data.playbin, GST_FORMAT_TIME, &data.duration)) {
g_printerr ("Could not query current duration.\n");
}
}
@@ -81,7 +80,7 @@ int main(int argc, char *argv[]) {
/* If seeking is enabled, we have not done it yet, and the time is right, seek */
if (data.seek_enabled && !data.seek_done && current > 10 * GST_SECOND) {
g_print ("\nReached 10s, performing seek...\n");
- gst_element_seek_simple (data.playbin2, GST_FORMAT_TIME,
+ gst_element_seek_simple (data.playbin, GST_FORMAT_TIME,
GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT, 30 * GST_SECOND);
data.seek_done = TRUE;
}
@@ -91,8 +90,8 @@ int main(int argc, char *argv[]) {
/* Free resources */
gst_object_unref (bus);
- gst_element_set_state (data.playbin2, GST_STATE_NULL);
- gst_object_unref (data.playbin2);
+ gst_element_set_state (data.playbin, GST_STATE_NULL);
+ gst_object_unref (data.playbin);
return 0;
}
@@ -120,7 +119,7 @@ static void handle_message (CustomData *data, GstMessage *msg) {
case GST_MESSAGE_STATE_CHANGED: {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
- if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin2)) {
+ if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) {
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
@@ -132,7 +131,7 @@ static void handle_message (CustomData *data, GstMessage *msg) {
GstQuery *query;
gint64 start, end;
query = gst_query_new_seeking (GST_FORMAT_TIME);
- if (gst_element_query (data->playbin2, query)) {
+ if (gst_element_query (data->playbin, query)) {
gst_query_parse_seeking (query, NULL, &data->seek_enabled, &start, &end);
if (data->seek_enabled) {
g_print ("Seeking is ENABLED from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT "\n",
diff --git a/gst-sdk/tutorials/basic-tutorial-5.c b/gst-sdk/tutorials/basic-tutorial-5.c
index c182d76..46eeb79 100644
--- a/gst-sdk/tutorials/basic-tutorial-5.c
+++ b/gst-sdk/tutorials/basic-tutorial-5.c
@@ -2,7 +2,7 @@
#include <gtk/gtk.h>
#include <gst/gst.h>
-#include <gst/interfaces/xoverlay.h>
+#include <gst/video/videooverlay.h>
#include <gdk/gdk.h>
#if defined (GDK_WINDOWING_X11)
@@ -15,7 +15,7 @@
/* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData {
- GstElement *playbin2; /* Our one and only pipeline */
+ GstElement *playbin; /* Our one and only pipeline */
GtkWidget *slider; /* Slider widget to keep track of current position */
GtkWidget *streams_list; /* Text widget to display info about the streams */
@@ -43,23 +43,23 @@ static void realize_cb (GtkWidget *widget, CustomData *data) {
#elif defined (GDK_WINDOWING_X11)
window_handle = GDK_WINDOW_XID (window);
#endif
- /* Pass it to playbin2, which implements XOverlay and will forward it to the video sink */
- gst_x_overlay_set_window_handle (GST_X_OVERLAY (data->playbin2), window_handle);
+ /* Pass it to playbin, which implements XOverlay and will forward it to the video sink */
+ gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (data->playbin), window_handle);
}
/* This function is called when the PLAY button is clicked */
static void play_cb (GtkButton *button, CustomData *data) {
- gst_element_set_state (data->playbin2, GST_STATE_PLAYING);
+ gst_element_set_state (data->playbin, GST_STATE_PLAYING);
}
/* This function is called when the PAUSE button is clicked */
static void pause_cb (GtkButton *button, CustomData *data) {
- gst_element_set_state (data->playbin2, GST_STATE_PAUSED);
+ gst_element_set_state (data->playbin, GST_STATE_PAUSED);
}
/* This function is called when the STOP button is clicked */
static void stop_cb (GtkButton *button, CustomData *data) {
- gst_element_set_state (data->playbin2, GST_STATE_READY);
+ gst_element_set_state (data->playbin, GST_STATE_READY);
}
/* This function is called when the main window is closed */
@@ -71,20 +71,16 @@ static void delete_event_cb (GtkWidget *widget, GdkEvent *event, CustomData *dat
/* This function is called everytime the video window needs to be redrawn (due to damage/exposure,
* rescaling, etc). GStreamer takes care of this in the PAUSED and PLAYING states, otherwise,
* we simply draw a black rectangle to avoid garbage showing up. */
-static gboolean expose_cb (GtkWidget *widget, GdkEventExpose *event, CustomData *data) {
+static gboolean draw_cb (GtkWidget *widget, cairo_t *cr, CustomData *data) {
if (data->state < GST_STATE_PAUSED) {
GtkAllocation allocation;
- GdkWindow *window = gtk_widget_get_window (widget);
- cairo_t *cr;
/* Cairo is a 2D graphics library which we use here to clean the video window.
* It is used by GStreamer for other reasons, so it will always be available to us. */
gtk_widget_get_allocation (widget, &allocation);
- cr = gdk_cairo_create (window);
cairo_set_source_rgb (cr, 0, 0, 0);
cairo_rectangle (cr, 0, 0, allocation.width, allocation.height);
cairo_fill (cr);
- cairo_destroy (cr);
}
return FALSE;
@@ -94,7 +90,7 @@ static gboolean expose_cb (GtkWidget *widget, GdkEventExpose *event, CustomData
* new position here. */
static void slider_cb (GtkRange *range, CustomData *data) {
gdouble value = gtk_range_get_value (GTK_RANGE (data->slider));
- gst_element_seek_simple (data->playbin2, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT,
+ gst_element_seek_simple (data->playbin, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT,
(gint64)(value * GST_SECOND));
}
@@ -113,7 +109,7 @@ static void create_ui (CustomData *data) {
video_window = gtk_drawing_area_new ();
gtk_widget_set_double_buffered (video_window, FALSE);
g_signal_connect (video_window, "realize", G_CALLBACK (realize_cb), data);
- g_signal_connect (video_window, "expose_event", G_CALLBACK (expose_cb), data);
+ g_signal_connect (video_window, "draw", G_CALLBACK (draw_cb), data);
play_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_PLAY);
g_signal_connect (G_OBJECT (play_button), "clicked", G_CALLBACK (play_cb), data);
@@ -124,24 +120,24 @@ static void create_ui (CustomData *data) {
stop_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_STOP);
g_signal_connect (G_OBJECT (stop_button), "clicked", G_CALLBACK (stop_cb), data);
- data->slider = gtk_hscale_new_with_range (0, 100, 1);
+ data->slider = gtk_scale_new_with_range (GTK_ORIENTATION_HORIZONTAL, 0, 100, 1);
gtk_scale_set_draw_value (GTK_SCALE (data->slider), 0);
data->slider_update_signal_id = g_signal_connect (G_OBJECT (data->slider), "value-changed", G_CALLBACK (slider_cb), data);
data->streams_list = gtk_text_view_new ();
gtk_text_view_set_editable (GTK_TEXT_VIEW (data->streams_list), FALSE);
- controls = gtk_hbox_new (FALSE, 0);
+ controls = gtk_box_new (GTK_ORIENTATION_HORIZONTAL, 0);
gtk_box_pack_start (GTK_BOX (controls), play_button, FALSE, FALSE, 2);
gtk_box_pack_start (GTK_BOX (controls), pause_button, FALSE, FALSE, 2);
gtk_box_pack_start (GTK_BOX (controls), stop_button, FALSE, FALSE, 2);
gtk_box_pack_start (GTK_BOX (controls), data->slider, TRUE, TRUE, 2);
- main_hbox = gtk_hbox_new (FALSE, 0);
+ main_hbox = gtk_box_new (GTK_ORIENTATION_HORIZONTAL, 0);
gtk_box_pack_start (GTK_BOX (main_hbox), video_window, TRUE, TRUE, 0);
gtk_box_pack_start (GTK_BOX (main_hbox), data->streams_list, FALSE, FALSE, 2);
- main_box = gtk_vbox_new (FALSE, 0);
+ main_box = gtk_box_new (GTK_ORIENTATION_VERTICAL, 0);
gtk_box_pack_start (GTK_BOX (main_box), main_hbox, TRUE, TRUE, 0);
gtk_box_pack_start (GTK_BOX (main_box), controls, FALSE, FALSE, 0);
gtk_container_add (GTK_CONTAINER (main_window), main_box);
@@ -152,7 +148,6 @@ static void create_ui (CustomData *data) {
/* This function is called periodically to refresh the GUI */
static gboolean refresh_ui (CustomData *data) {
- GstFormat fmt = GST_FORMAT_TIME;
gint64 current = -1;
/* We do not want to update anything unless we are in the PAUSED or PLAYING states */
@@ -161,7 +156,7 @@ static gboolean refresh_ui (CustomData *data) {
/* If we didn't know it yet, query the stream duration */
if (!GST_CLOCK_TIME_IS_VALID (data->duration)) {
- if (!gst_element_query_duration (data->playbin2, &fmt, &data->duration)) {
+ if (!gst_element_query_duration (data->playbin, GST_FORMAT_TIME, &data->duration)) {
g_printerr ("Could not query current duration.\n");
} else {
/* Set the range of the slider to the clip duration, in SECONDS */
@@ -169,7 +164,7 @@ static gboolean refresh_ui (CustomData *data) {
}
}
- if (gst_element_query_position (data->playbin2, &fmt, &current)) {
+ if (gst_element_query_position (data->playbin, GST_FORMAT_TIME, &current)) {
/* Block the "value-changed" signal, so the slider_cb function is not called
* (which would trigger a seek the user has not requested) */
g_signal_handler_block (data->slider, data->slider_update_signal_id);
@@ -182,12 +177,12 @@ static gboolean refresh_ui (CustomData *data) {
}
/* This function is called when new metadata is discovered in the stream */
-static void tags_cb (GstElement *playbin2, gint stream, CustomData *data) {
+static void tags_cb (GstElement *playbin, gint stream, CustomData *data) {
/* We are possibly in a GStreamer working thread, so we notify the main
* thread of this event through a message in the bus */
- gst_element_post_message (playbin2,
- gst_message_new_application (GST_OBJECT (playbin2),
- gst_structure_new ("tags-changed", NULL)));
+ gst_element_post_message (playbin,
+ gst_message_new_application (GST_OBJECT (playbin),
+ gst_structure_new_empty ("tags-changed")));
}
/* This function is called when an error message is posted on the bus */
@@ -203,14 +198,14 @@ static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
g_free (debug_info);
/* Set the pipeline to READY (which stops playback) */
- gst_element_set_state (data->playbin2, GST_STATE_READY);
+ gst_element_set_state (data->playbin, GST_STATE_READY);
}
/* This function is called when an End-Of-Stream message is posted on the bus.
* We just set the pipeline to READY (which stops playback) */
static void eos_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
g_print ("End-Of-Stream reached.\n");
- gst_element_set_state (data->playbin2, GST_STATE_READY);
+ gst_element_set_state (data->playbin, GST_STATE_READY);
}
/* This function is called when the pipeline changes states. We use it to
@@ -218,7 +213,7 @@ static void eos_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
static void state_changed_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
- if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin2)) {
+ if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) {
data->state = new_state;
g_print ("State set to %s\n", gst_element_state_get_name (new_state));
if (old_state == GST_STATE_READY && new_state == GST_STATE_PAUSED) {
@@ -242,14 +237,14 @@ static void analyze_streams (CustomData *data) {
gtk_text_buffer_set_text (text, "", -1);
/* Read some properties */
- g_object_get (data->playbin2, "n-video", &n_video, NULL);
- g_object_get (data->playbin2, "n-audio", &n_audio, NULL);
- g_object_get (data->playbin2, "n-text", &n_text, NULL);
+ g_object_get (data->playbin, "n-video", &n_video, NULL);
+ g_object_get (data->playbin, "n-audio", &n_audio, NULL);
+ g_object_get (data->playbin, "n-text", &n_text, NULL);
for (i = 0; i < n_video; i++) {
tags = NULL;
/* Retrieve the stream's video tags */
- g_signal_emit_by_name (data->playbin2, "get-video-tags", i, &tags);
+ g_signal_emit_by_name (data->playbin, "get-video-tags", i, &tags);
if (tags) {
total_str = g_strdup_printf ("video stream %d:\n", i);
gtk_text_buffer_insert_at_cursor (text, total_str, -1);
@@ -266,7 +261,7 @@ static void analyze_streams (CustomData *data) {
for (i = 0; i < n_audio; i++) {
tags = NULL;
/* Retrieve the stream's audio tags */
- g_signal_emit_by_name (data->playbin2, "get-audio-tags", i, &tags);
+ g_signal_emit_by_name (data->playbin, "get-audio-tags", i, &tags);
if (tags) {
total_str = g_strdup_printf ("\naudio stream %d:\n", i);
gtk_text_buffer_insert_at_cursor (text, total_str, -1);
@@ -295,7 +290,7 @@ static void analyze_streams (CustomData *data) {
for (i = 0; i < n_text; i++) {
tags = NULL;
/* Retrieve the stream's subtitle tags */
- g_signal_emit_by_name (data->playbin2, "get-text-tags", i, &tags);
+ g_signal_emit_by_name (data->playbin, "get-text-tags", i, &tags);
if (tags) {
total_str = g_strdup_printf ("\nsubtitle stream %d:\n", i);
gtk_text_buffer_insert_at_cursor (text, total_str, -1);
@@ -314,7 +309,7 @@ static void analyze_streams (CustomData *data) {
/* This function is called when an "application" message is posted on the bus.
* Here we retrieve the message posted by the tags_cb callback */
static void application_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
- if (g_strcmp0 (gst_structure_get_name (msg->structure), "tags-changed") == 0) {
+ if (g_strcmp0 (gst_structure_get_name (gst_message_get_structure (msg)), "tags-changed") == 0) {
/* If the message is the "tags-changed" (only one we are currently issuing), update
* the stream info GUI */
analyze_streams (data);
@@ -337,26 +332,26 @@ int main(int argc, char *argv[]) {
data.duration = GST_CLOCK_TIME_NONE;
/* Create the elements */
- data.playbin2 = gst_element_factory_make ("playbin2", "playbin2");
+ data.playbin = gst_element_factory_make ("playbin", "playbin");
- if (!data.playbin2) {
+ if (!data.playbin) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Set the URI to play */
- g_object_set (data.playbin2, "uri", "http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
+ g_object_set (data.playbin, "uri", "http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
- /* Connect to interesting signals in playbin2 */
- g_signal_connect (G_OBJECT (data.playbin2), "video-tags-changed", (GCallback) tags_cb, &data);
- g_signal_connect (G_OBJECT (data.playbin2), "audio-tags-changed", (GCallback) tags_cb, &data);
- g_signal_connect (G_OBJECT (data.playbin2), "text-tags-changed", (GCallback) tags_cb, &data);
+ /* Connect to interesting signals in playbin */
+ g_signal_connect (G_OBJECT (data.playbin), "video-tags-changed", (GCallback) tags_cb, &data);
+ g_signal_connect (G_OBJECT (data.playbin), "audio-tags-changed", (GCallback) tags_cb, &data);
+ g_signal_connect (G_OBJECT (data.playbin), "text-tags-changed", (GCallback) tags_cb, &data);
/* Create the GUI */
create_ui (&data);
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
- bus = gst_element_get_bus (data.playbin2);
+ bus = gst_element_get_bus (data.playbin);
gst_bus_add_signal_watch (bus);
g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, &data);
g_signal_connect (G_OBJECT (bus), "message::eos", (GCallback)eos_cb, &data);
@@ -365,10 +360,10 @@ int main(int argc, char *argv[]) {
gst_object_unref (bus);
/* Start playing */
- ret = gst_element_set_state (data.playbin2, GST_STATE_PLAYING);
+ ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
- gst_object_unref (data.playbin2);
+ gst_object_unref (data.playbin);
return -1;
}
@@ -379,7 +374,7 @@ int main(int argc, char *argv[]) {
gtk_main ();
/* Free resources */
- gst_element_set_state (data.playbin2, GST_STATE_NULL);
- gst_object_unref (data.playbin2);
+ gst_element_set_state (data.playbin, GST_STATE_NULL);
+ gst_object_unref (data.playbin);
return 0;
}
diff --git a/gst-sdk/tutorials/basic-tutorial-6.c b/gst-sdk/tutorials/basic-tutorial-6.c
index 0715a51..4714938 100644
--- a/gst-sdk/tutorials/basic-tutorial-6.c
+++ b/gst-sdk/tutorials/basic-tutorial-6.c
@@ -37,14 +37,14 @@ static void print_pad_templates_information (GstElementFactory * factory) {
GstStaticPadTemplate *padtemplate;
g_print ("Pad Templates for %s:\n", gst_element_factory_get_longname (factory));
- if (!factory->numpadtemplates) {
+ if (!gst_element_factory_get_num_pad_templates (factory)) {
g_print (" none\n");
return;
}
- pads = factory->staticpadtemplates;
+ pads = gst_element_factory_get_static_pad_templates (factory);
while (pads) {
- padtemplate = (GstStaticPadTemplate *) (pads->data);
+ padtemplate = pads->data;
pads = g_list_next (pads);
if (padtemplate->direction == GST_PAD_SRC)
@@ -64,8 +64,12 @@ static void print_pad_templates_information (GstElementFactory * factory) {
g_print (" Availability: UNKNOWN!!!\n");
if (padtemplate->static_caps.string) {
+ GstCaps *caps;
+
g_print (" Capabilities:\n");
- print_caps (gst_static_caps_get (&padtemplate->static_caps), " ");
+ caps = gst_static_caps_get (&padtemplate->static_caps);
+ print_caps (caps, " ");
+ gst_caps_unref (caps);
}
g_print ("\n");
@@ -85,9 +89,9 @@ static void print_pad_capabilities (GstElement *element, gchar *pad_name) {
}
/* Retrieve negotiated caps (or acceptable caps if negotiation is not finished yet) */
- caps = gst_pad_get_negotiated_caps (pad);
+ caps = gst_pad_get_current_caps (pad);
if (!caps)
- caps = gst_pad_get_caps_reffed (pad);
+ caps = gst_pad_query_caps (pad, NULL);
/* Print and free */
g_print ("Caps for the %s pad:\n", pad_name);
diff --git a/gst-sdk/tutorials/basic-tutorial-7.c b/gst-sdk/tutorials/basic-tutorial-7.c
index 3201d64..73e34d9 100644
--- a/gst-sdk/tutorials/basic-tutorial-7.c
+++ b/gst-sdk/tutorials/basic-tutorial-7.c
@@ -21,7 +21,7 @@ int main(int argc, char *argv[]) {
audio_sink = gst_element_factory_make ("autoaudiosink", "audio_sink");
video_queue = gst_element_factory_make ("queue", "video_queue");
visual = gst_element_factory_make ("wavescope", "visual");
- video_convert = gst_element_factory_make ("ffmpegcolorspace", "csp");
+ video_convert = gst_element_factory_make ("videoconvert", "video_convert");
video_sink = gst_element_factory_make ("autovideosink", "video_sink");
/* Create the empty pipeline */
@@ -49,7 +49,7 @@ int main(int argc, char *argv[]) {
}
/* Manually link the Tee, which has "Request" pads */
- tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (tee), "src%d");
+ tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (tee), "src_%u");
tee_audio_pad = gst_element_request_pad (tee, tee_src_pad_template, NULL, NULL);
g_print ("Obtained request pad %s for audio branch.\n", gst_pad_get_name (tee_audio_pad));
queue_audio_pad = gst_element_get_static_pad (audio_queue, "sink");
diff --git a/gst-sdk/tutorials/basic-tutorial-8.c b/gst-sdk/tutorials/basic-tutorial-8.c
index 8824e09..2ddeaa7 100644
--- a/gst-sdk/tutorials/basic-tutorial-8.c
+++ b/gst-sdk/tutorials/basic-tutorial-8.c
@@ -1,9 +1,9 @@
#include <gst/gst.h>
+#include <gst/audio/audio.h>
#include <string.h>
#define CHUNK_SIZE 1024 /* Amount of bytes we are sending in each buffer */
#define SAMPLE_RATE 44100 /* Samples per second we are sending */
-#define AUDIO_CAPS "audio/x-raw-int,channels=1,rate=%d,signed=(boolean)true,width=16,depth=16,endianness=BYTE_ORDER"
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
@@ -27,6 +27,7 @@ static gboolean push_data (CustomData *data) {
GstBuffer *buffer;
GstFlowReturn ret;
int i;
+ GstMapInfo map;
gint16 *raw;
gint num_samples = CHUNK_SIZE / 2; /* Because each sample is 16 bits */
gfloat freq;
@@ -39,7 +40,8 @@ static gboolean push_data (CustomData *data) {
GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale (CHUNK_SIZE, GST_SECOND, SAMPLE_RATE);
/* Generate some psychodelic waveforms */
- raw = (gint16 *)GST_BUFFER_DATA (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_WRITE);
+ raw = (gint16 *)map.data;
data->c += data->d;
data->d -= data->c / 1000;
freq = 1100 + 1000 * data->d;
@@ -48,6 +50,7 @@ static gboolean push_data (CustomData *data) {
data->b -= data->a / freq;
raw[i] = (gint16)(500 * data->a);
}
+ gst_buffer_unmap (buffer, &map);
data->num_samples += num_samples;
/* Push the buffer into the appsrc */
@@ -84,15 +87,15 @@ static void stop_feed (GstElement *source, CustomData *data) {
}
/* The appsink has received a buffer */
-static void new_buffer (GstElement *sink, CustomData *data) {
- GstBuffer *buffer;
+static void new_sample (GstElement *sink, CustomData *data) {
+ GstSample *sample;
/* Retrieve the buffer */
- g_signal_emit_by_name (sink, "pull-buffer", &buffer);
- if (buffer) {
+ g_signal_emit_by_name (sink, "pull-sample", &sample);
+ if (sample) {
/* The only thing we do in this example is print a * to indicate a received buffer */
g_print ("*");
- gst_buffer_unref (buffer);
+ gst_sample_unref (sample);
}
}
@@ -116,7 +119,7 @@ int main(int argc, char *argv[]) {
GstPadTemplate *tee_src_pad_template;
GstPad *tee_audio_pad, *tee_video_pad, *tee_app_pad;
GstPad *queue_audio_pad, *queue_video_pad, *queue_app_pad;
- gchar *audio_caps_text;
+ GstAudioInfo info;
GstCaps *audio_caps;
GstBus *bus;
@@ -138,7 +141,7 @@ int main(int argc, char *argv[]) {
data.video_queue = gst_element_factory_make ("queue", "video_queue");
data.audio_convert2 = gst_element_factory_make ("audioconvert", "audio_convert2");
data.visual = gst_element_factory_make ("wavescope", "visual");
- data.video_convert = gst_element_factory_make ("ffmpegcolorspace", "csp");
+ data.video_convert = gst_element_factory_make ("videoconvert", "video_convert");
data.video_sink = gst_element_factory_make ("autovideosink", "video_sink");
data.app_queue = gst_element_factory_make ("queue", "app_queue");
data.app_sink = gst_element_factory_make ("appsink", "app_sink");
@@ -157,17 +160,16 @@ int main(int argc, char *argv[]) {
g_object_set (data.visual, "shader", 0, "style", 0, NULL);
/* Configure appsrc */
- audio_caps_text = g_strdup_printf (AUDIO_CAPS, SAMPLE_RATE);
- audio_caps = gst_caps_from_string (audio_caps_text);
- g_object_set (data.app_source, "caps", audio_caps, NULL);
+ gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S16, SAMPLE_RATE, 1, NULL);
+ audio_caps = gst_audio_info_to_caps (&info);
+ g_object_set (data.app_source, "caps", audio_caps, "format", GST_FORMAT_TIME, NULL);
g_signal_connect (data.app_source, "need-data", G_CALLBACK (start_feed), &data);
g_signal_connect (data.app_source, "enough-data", G_CALLBACK (stop_feed), &data);
/* Configure appsink */
g_object_set (data.app_sink, "emit-signals", TRUE, "caps", audio_caps, NULL);
- g_signal_connect (data.app_sink, "new-buffer", G_CALLBACK (new_buffer), &data);
+ g_signal_connect (data.app_sink, "new-sample", G_CALLBACK (new_sample), &data);
gst_caps_unref (audio_caps);
- g_free (audio_caps_text);
/* Link all elements that can be automatically linked because they have "Always" pads */
gst_bin_add_many (GST_BIN (data.pipeline), data.app_source, data.tee, data.audio_queue, data.audio_convert1, data.audio_resample,
@@ -183,7 +185,7 @@ int main(int argc, char *argv[]) {
}
/* Manually link the Tee, which has "Request" pads */
- tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (data.tee), "src%d");
+ tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (data.tee), "src_%u");
tee_audio_pad = gst_element_request_pad (data.tee, tee_src_pad_template, NULL, NULL);
g_print ("Obtained request pad %s for audio branch.\n", gst_pad_get_name (tee_audio_pad));
queue_audio_pad = gst_element_get_static_pad (data.audio_queue, "sink");