diff options
author | Frediano Ziglio <fziglio@redhat.com> | 2018-11-14 15:48:19 +0000 |
---|---|---|
committer | Frediano Ziglio <freddy77@gmail.com> | 2022-03-06 14:53:30 +0000 |
commit | 603f4e30378a0e298dbab523b2a4ab78ab6e7f87 (patch) | |
tree | be63573f00bad8856928e9d8bac6687bb69e40d9 | |
parent | 195716311c2f99513afa617c744b409b76dcfa4a (diff) |
SAVE
vaapisink does not crash... but something is still wrong!
this actually contains tons of changes... but vaapisink not crashing
is interesting
-rw-r--r-- | src/channel-display-gst.c | 146 |
1 files changed, 133 insertions, 13 deletions
diff --git a/src/channel-display-gst.c b/src/channel-display-gst.c index f1ff83d..8dc1437 100644 --- a/src/channel-display-gst.c +++ b/src/channel-display-gst.c @@ -28,6 +28,9 @@ #include <gst/app/gstappsink.h> #include <gst/video/gstvideometa.h> +#include <gdk/gdkx.h> +#include <va/va_x11.h> + /* -GST_EVENT_QOS msg @@ -73,12 +76,11 @@ * */ #define QOS FALSE // TRUE will emit qos events upstream which may cause decoder to drop frames (should be effective only with SYNC) -#define SYNC FALSE // TRUE will sync buffer times with clock time -#define LIVE FALSE // TRUE will add pipeline latency estimation (should be effective only with SYNC) +#define SYNC TRUE // TRUE will sync buffer times with clock time +#define LIVE TRUE // TRUE will add pipeline latency estimation (should be effective only with SYNC) #define DROP FALSE // TRUE if you want pipeline to decide to drop frames -GstClockTime last; typedef struct SpiceGstFrame SpiceGstFrame; /* GStreamer decoder implementation */ @@ -102,12 +104,14 @@ typedef struct SpiceGstDecoder { uint32_t last_mm_time; gdouble avg_rate; + GstClockTime last_sink; + GstClockTime last_pts; GMutex queues_mutex; GQueue *decoding_queue; SpiceGstFrame *display_frame; guint timer_id; guint pending_samples; - guint queue; // queue may not be accurate if qos or drop is true + gint queue; // queue may not be accurate if qos or drop is true } SpiceGstDecoder; #define VALID_VIDEO_CODEC_TYPE(codec) \ @@ -426,6 +430,7 @@ static void free_pipeline(SpiceGstDecoder *decoder) static gboolean handle_pipeline_message(GstBus *bus, GstMessage *msg, gpointer video_decoder) { + static GstObject *gstvaapidisplay = NULL; SpiceGstDecoder *decoder = video_decoder; switch(GST_MESSAGE_TYPE(msg)) { @@ -467,6 +472,78 @@ static gboolean handle_pipeline_message(GstBus *bus, GstMessage *msg, gpointer v printf("Feedback QOS MSG(%d): processed: %lu dropped: %lu \n", (int)format, processed, dropped); break; } + case GST_MESSAGE_NEED_CONTEXT: + { + const gchar *context_type; + + gst_message_parse_context_type(msg, &context_type); + g_print("got need context %s from %s\n", context_type, GST_MESSAGE_SRC_NAME(msg)); + +#if 0 + if (g_strcmp0(context_type, "gst.vaapi.Display") == 0 && gstvaapidisplay) { + g_print("Setting saved context on %s\n", GST_MESSAGE_SRC_NAME(msg)); + GstContext *context = gst_context_new("gst.vaapi.Display", TRUE); + GstStructure *s = gst_context_writable_structure(context); + gst_structure_set(s, "gst.vaapi.Display", GST_TYPE_OBJECT, gstvaapidisplay, NULL); + gst_element_set_context(GST_ELEMENT(msg->src), context); + gst_context_unref(context); + break; + } +#endif + + if (g_strcmp0(context_type, "gst.vaapi.app.Display") == 0) { + static GstContext *context = NULL; + if (!context) { + Display *x11_display = gdk_x11_get_default_xdisplay(); + context = gst_context_new("gst.vaapi.app.Display", TRUE); + GstStructure *structure = gst_context_writable_structure(context); + gst_structure_set(structure, "x11-display", G_TYPE_POINTER, x11_display, NULL); + g_assert_nonnull(x11_display); + VADisplay va_display = vaGetDisplay(x11_display); + g_assert_nonnull(va_display); + gst_structure_set(structure, "va-display", G_TYPE_POINTER, va_display, NULL); + } + gst_element_set_context(GST_ELEMENT(msg->src), context); +// gst_context_unref(context); + } +#if 0 + if (g_strcmp0 (context_type, GST_GL_DISPLAY_CONTEXT_TYPE) == 0) { + GstContext *context = NULL; + Display *x11_display; /* get this from the application somehow */ + GstGLDisplay *gl_display = GST_GL_DISPLAY (gst_gl_display_x11_new_with_display (x11_display)); + + context = gst_context_new (GST_GL_DISPLAY_CONTEXT_TYPE, TRUE); + gst_context_set_gl_display (context, gl_display); + + gst_element_set_context (GST_ELEMENT (msg->src), context); + if (context) + gst_context_unref (context); + } +#endif + break; + } + case GST_MESSAGE_HAVE_CONTEXT:{ + const gchar *context_type; + GstContext *context = NULL; + + gst_message_parse_have_context(msg, &context); + if (!context) + break; + + context_type = gst_context_get_context_type(context); + g_print("Got have context %s from %s\n", context_type, GST_MESSAGE_SRC_NAME(msg)); + if (g_strcmp0(context_type, "gst.vaapi.Display") != 0 || gstvaapidisplay) + break; + const GstStructure *s = gst_context_get_structure(context); + if (!s) + break; + const GValue *value = gst_structure_get_value(s, "gst.vaapi.Display"); + if (!value) + break; + gstvaapidisplay = g_value_dup_object(value); + g_print("found display %s\n", GST_OBJECT_NAME(gstvaapidisplay)); + break; + } default: /* not being handled */ break; @@ -503,15 +580,24 @@ static GstPadProbeReturn event_probe(GstPad *pad, GstPadProbeInfo *info, gpointer data) { SpiceGstDecoder *decoder = (SpiceGstDecoder*)data; - static GstClockTime last; if (info->type & GST_PAD_PROBE_TYPE_BUFFER) { // Buffer arrived GstBuffer *obj = GST_PAD_PROBE_INFO_BUFFER(info); GstClockTime cur = gst_clock_get_time(decoder->clock); - gdouble rate = gst_guint64_to_gdouble(cur - last) / gst_guint64_to_gdouble(GST_BUFFER_DURATION(obj)); // rate is the ratio between actual procssing time to target rate - - decoder->queue--; - if (GST_CLOCK_TIME_IS_VALID(last) && last != 0) { + GstClockTime pts = GST_BUFFER_PTS(obj); + printf("PTS %lu CUR %lu " + "diff %lu " + "duration %lu\n", + pts, cur, + cur - GST_BUFFER_PTS(obj) - gst_element_get_base_time(decoder->pipeline), + GST_BUFFER_DURATION(obj)); + + // TODO handle drop using PTS + // TODO what if FPS is not constant? GST_BUFFER_DURATION is + // still valid? + gint queue = g_atomic_int_add(&decoder->queue, -1) - 1; + if (GST_CLOCK_TIME_IS_VALID(decoder->last_sink)) { + gdouble rate = gst_guint64_to_gdouble(cur - decoder->last_sink) / gst_guint64_to_gdouble(pts - decoder->last_pts); // rate is the ratio between actual processing time to target rate if (decoder->avg_rate < 0.0) { decoder->avg_rate = rate; } else { @@ -522,8 +608,9 @@ static GstPadProbeReturn event_probe(GstPad *pad, } } } - printf("BUFFERS QOS: queue: %u, AVG RATE: %f\n",decoder->queue, decoder->avg_rate); - last = cur; + printf("BUFFERS QOS: queue: %d, AVG RATE: %f\n", queue, decoder->avg_rate); + decoder->last_sink = cur; + decoder->last_pts = pts; } else { // qos & latency events GstEvent *event = GST_PAD_PROBE_INFO_EVENT(info); @@ -550,6 +637,18 @@ static GstPadProbeReturn event_probe(GstPad *pad, return GST_PAD_PROBE_OK; } +static GstPadProbeReturn +event_probe2(GstPad *pad, GstPadProbeInfo *info, gpointer data) +{ + char **p = data; + SpiceGstDecoder *decoder = (SpiceGstDecoder*)p[0]; + + GstClockTime cur = gst_clock_get_time(decoder->clock); + GstBuffer *obj = GST_PAD_PROBE_INFO_BUFFER(info); + printf("buffer @%s time %lu PTS %lu\n", (char*) (p+1), cur, obj ? GST_BUFFER_PTS(obj) : 0); + return GST_PAD_PROBE_OK; +} + static inline const char *gst_element_name(GstElement *element) { GstElementFactory *f = gst_element_get_factory(element); @@ -569,16 +668,30 @@ add_elem_cb(GstBin * pipeline, GstBin * bin, GstElement * element, SpiceGstDecod if (GST_IS_BASE_SINK(element)) {// && GST_OBJECT_FLAG_IS_SET(element, GST_ELEMENT_FLAG_SINK) GstPad *pad; + printf("BASE time %lu %lu\n", gst_element_get_base_time(decoder->pipeline), gst_element_get_base_time(element)); pad = gst_element_get_static_pad(element, "sink"); gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_EVENT_UPSTREAM | GST_PAD_PROBE_TYPE_BUFFER, event_probe, decoder, NULL); //also buffers + // TODO why ?? g_object_set(element, "sync", SYNC, "qos", QOS, +// "display", 1, "drop", DROP, NULL); gst_object_unref(pad); spice_debug("^^^^SINK^^^^"); } else { + GstPad *pad = gst_element_get_static_pad(element, "sink"); + if (pad) { + void **p = g_malloc0(sizeof(void *) + strlen(name) + 1); + p[0] = decoder; + strcpy((char*) (p+1), name); + gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, event_probe2, p, g_free); //also buffers + } + g_object_set(element, + "low-latency", TRUE, + NULL); + //just trying to set this in other elements /*g_object_set(element, "max-size-buffers", 0, @@ -852,16 +965,21 @@ static gboolean spice_gst_decoder_queue_frame(VideoDecoder *video_decoder, frame->data, frame->size, 0, frame->size, frame, (GDestroyNotify) spice_frame_free); + GstClockTime cur = gst_clock_get_time(decoder->clock); // GstClockTime pts = gst_clock_get_time(decoder->clock) - gst_element_get_base_time(decoder->pipeline) + ((uint64_t)MAX(0, margin)) * 1000 * 1000; - GstClockTime pts = gst_clock_get_time(decoder->clock) - gst_element_get_base_time(decoder->pipeline); //ignore margin and audio sync, this is actually kind of arrival time based +// GstClockTime pts = cur - gst_element_get_base_time(decoder->pipeline); + GstClockTime pts = 1*(cur - gst_element_get_base_time(decoder->pipeline)); //ignore margin and audio sync, this is actually kind of arrival time based //GstClockTime pts = GST_CLOCK_TIME_NONE; GST_BUFFER_DURATION(buffer) = GST_CLOCK_TIME_NONE; GST_BUFFER_DTS(buffer) = GST_CLOCK_TIME_NONE; + //GST_BUFFER_DTS(buffer) = cur - gst_element_get_base_time(decoder->pipeline); GST_BUFFER_PTS(buffer) = pts; + //GST_BUFFER_PTS(buffer) = GST_CLOCK_TIME_NONE; #if GST_CHECK_VERSION(1,14,0) gst_buffer_add_reference_timestamp_meta(buffer, gst_static_caps_get(&stream_reference), pts, GST_CLOCK_TIME_NONE); #endif + printf("PUSHING %lu curr %lu\n", GST_BUFFER_PTS(buffer), cur); SpiceGstFrame *gst_frame = create_gst_frame(buffer, frame); g_mutex_lock(&decoder->queues_mutex); @@ -873,7 +991,7 @@ static gboolean spice_gst_decoder_queue_frame(VideoDecoder *video_decoder, SPICE_DEBUG("GStreamer error: unable to push frame"); stream_dropped_frame_on_playback(decoder->base.stream); } else { - decoder->queue++; + g_atomic_int_inc(&decoder->queue); } return TRUE; } @@ -912,6 +1030,8 @@ VideoDecoder* create_gstreamer_decoder(int codec_type, display_stream *stream) g_mutex_init(&decoder->queues_mutex); decoder->decoding_queue = g_queue_new(); decoder->avg_rate = -1; + decoder->last_sink = GST_CLOCK_TIME_NONE; + decoder->last_pts = GST_CLOCK_TIME_NONE; if (!create_pipeline(decoder)) { decoder->base.destroy((VideoDecoder*)decoder); |