summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/channel-display-gst.c166
1 files changed, 161 insertions, 5 deletions
diff --git a/src/channel-display-gst.c b/src/channel-display-gst.c
index deb7f5b..f1ff83d 100644
--- a/src/channel-display-gst.c
+++ b/src/channel-display-gst.c
@@ -29,11 +29,56 @@
#include <gst/video/gstvideometa.h>
+
+/* -GST_EVENT_QOS msg
+ *
+ * This is QOS measured by gstreamer for every buffer in order to let pipeline elements know the real-time performance
+ * and adjust accordingly if possible. For example it may cause frame drops (any adjustment should be notified by "Received QOS MSG" msg)
+ * (QOS+SYNC has to be TRUE)
+ *
+ * proportion: running avg of the ratio between synced presenting times and frame rate
+ * diff: jitter, presenting time difference from frame PTS.
+ * timestamp:
+ *
+ * more info:
+ * https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/qos.html
+ *
+ *
+ *
+ *
+ * -BUFFERS QOS
+ *
+ * This is alternative measurements are not part of gstreamer QOS system but partly similar
+ *
+ * queue: number of frames which pushed to gstreamer pipeline and did not arrive to sink yet. (some elements may have their own queues
+ * so that queue will never be 1 but you can monitor it is not accumulating - accumulating means client is not keeping up)
+ * rate: Similar idea to proportion but a bit different- running avg of the ratio between buffers sink arrival times difference and framerate
+ * - As value goes more higher above 1 means pipeline processing takes too long for current frame rate.
+ *
+ *
+ *
+ *
+ *
+ * -Feedback QOS
+ *
+ * If pipeline element changing its behaviour because of GST_EVENT_QOS should be notified by this msg.
+ *
+ *
+ *
+ *
+ * -GST_EVENT_LATENCY
+ *
+ * Estimation of the pipeline latency in the beginning (if LIVE is TRUE) so that frames can be synchronized correctly
+ *
+ *
+ */
+#define QOS FALSE // TRUE will emit qos events upstream which may cause decoder to drop frames (should be effective only with SYNC)
#define SYNC FALSE // TRUE will sync buffer times with clock time
-#define LIVE TRUE // TRUE will add pipeline latency estimation (should be effective only with SYNC)
+#define LIVE FALSE // TRUE will add pipeline latency estimation (should be effective only with SYNC)
#define DROP FALSE // TRUE if you want pipeline to decide to drop frames
+GstClockTime last;
typedef struct SpiceGstFrame SpiceGstFrame;
/* GStreamer decoder implementation */
@@ -56,11 +101,13 @@ typedef struct SpiceGstDecoder {
uint32_t last_mm_time;
+ gdouble avg_rate;
GMutex queues_mutex;
GQueue *decoding_queue;
SpiceGstFrame *display_frame;
guint timer_id;
guint pending_samples;
+ guint queue; // queue may not be accurate if qos or drop is true
} SpiceGstDecoder;
#define VALID_VIDEO_CODEC_TYPE(codec) \
@@ -69,6 +116,17 @@ typedef struct SpiceGstDecoder {
/* Decoded frames are big so limit how many are queued by GStreamer */
#define MAX_DECODED_FRAMES 2
+#define DO_RUNNING_AVG(avg,val,size) (((val) + ((size)-1) * (avg)) / (size))
+
+/* generic running average, this has a neutral window size */
+#define UPDATE_RUNNING_AVG(avg,val) DO_RUNNING_AVG(avg,val,8)
+
+/* the windows for these running averages are experimentally obtained.
+ * positive values get averaged more while negative values use a small
+ * window so we can react faster to badness. */
+#define UPDATE_RUNNING_AVG_P(avg,val) DO_RUNNING_AVG(avg,val,16)
+#define UPDATE_RUNNING_AVG_N(avg,val) DO_RUNNING_AVG(avg,val,4)
+
/* GstPlayFlags enum is in plugin's header which should not be exported.
* https://bugzilla.gnome.org/show_bug.cgi?id=784279
*/
@@ -399,6 +457,16 @@ static gboolean handle_pipeline_message(GstBus *bus, GstMessage *msg, gpointer v
g_free(filename);
break;
}
+ case GST_MESSAGE_QOS: {
+ // seems sometimes it drops but do not emit qos msg, basically drops should be update the queue.
+ GstFormat format;
+ guint64 processed;
+ guint64 dropped;
+
+ gst_message_parse_qos_stats(msg, &format, &processed, &dropped);
+ printf("Feedback QOS MSG(%d): processed: %lu dropped: %lu \n", (int)format, processed, dropped);
+ break;
+ }
default:
/* not being handled */
break;
@@ -422,7 +490,7 @@ static void app_source_setup(GstElement *pipeline G_GNUC_UNUSED,
caps = gst_caps_from_string(gst_opts[decoder->base.codec_type].dec_caps);
g_object_set(source,
"caps", caps,
- "is-live", LIVE,
+ "is-live", LIVE, //add latency estimation
"format", GST_FORMAT_TIME,
"max-bytes", G_GINT64_CONSTANT(0),
"block", TRUE,
@@ -431,13 +499,64 @@ static void app_source_setup(GstElement *pipeline G_GNUC_UNUSED,
decoder->appsrc = GST_APP_SRC(gst_object_ref(source));
}
+static GstPadProbeReturn event_probe(GstPad *pad,
+ GstPadProbeInfo *info, gpointer data)
+{
+ SpiceGstDecoder *decoder = (SpiceGstDecoder*)data;
+ static GstClockTime last;
+
+ if (info->type & GST_PAD_PROBE_TYPE_BUFFER) { // Buffer arrived
+ GstBuffer *obj = GST_PAD_PROBE_INFO_BUFFER(info);
+ GstClockTime cur = gst_clock_get_time(decoder->clock);
+ gdouble rate = gst_guint64_to_gdouble(cur - last) / gst_guint64_to_gdouble(GST_BUFFER_DURATION(obj)); // rate is the ratio between actual procssing time to target rate
+
+ decoder->queue--;
+ if (GST_CLOCK_TIME_IS_VALID(last) && last != 0) {
+ if (decoder->avg_rate < 0.0) {
+ decoder->avg_rate = rate;
+ } else {
+ if (rate > 1.0) {
+ decoder->avg_rate = UPDATE_RUNNING_AVG/*_N*/ (decoder->avg_rate, rate);
+ } else {
+ decoder->avg_rate = UPDATE_RUNNING_AVG/*_P*/ (decoder->avg_rate, rate);
+ }
+ }
+ }
+ printf("BUFFERS QOS: queue: %u, AVG RATE: %f\n",decoder->queue, decoder->avg_rate);
+ last = cur;
+ } else { // qos & latency events
+ GstEvent *event = GST_PAD_PROBE_INFO_EVENT(info);
+
+ g_assert (GST_IS_EVENT(event));
+ if (GST_EVENT_TYPE(event) == GST_EVENT_QOS) { // QOS event as calculated by gstreamer, may cause behaviour changing in elements (i.e. dropping frames)
+ GstQOSType type;
+ gdouble proportion;
+ GstClockTimeDiff diff;
+ GstClockTime timestamp;
+
+ gst_event_parse_qos (event, &type, &proportion, &diff, &timestamp);
+ printf("GST_EVENT_QOS type %u, proportion %lf, diff %"
+ G_GINT64_FORMAT ", timestamp %" GST_TIME_FORMAT "\n", type,
+ proportion,
+ diff,
+ GST_TIME_ARGS (timestamp));
+ }
+ if (GST_EVENT_TYPE(event) == GST_EVENT_LATENCY) { // Tells sink to adjust their synchronisation with latency
+ GstClockTime latency;
+ gst_event_parse_latency (event, &latency);
+ printf("GST_EVENT_LATENCY: %lums\n", latency/1000/1000);
+ }
+ }
+ return GST_PAD_PROBE_OK;
+}
+
static inline const char *gst_element_name(GstElement *element)
{
GstElementFactory *f = gst_element_get_factory(element);
return f ? GST_OBJECT_NAME(f) : GST_OBJECT_NAME(element);
}
-// This function is used to set properties in dynamically added sink (if overlay is used)
+// This function is used to set properties in dynamically added sink (if overlay is used), and setting a probe on the sink
static void
add_elem_cb(GstBin * pipeline, GstBin * bin, GstElement * element, SpiceGstDecoder *decoder)
{
@@ -447,12 +566,23 @@ add_elem_cb(GstBin * pipeline, GstBin * bin, GstElement * element, SpiceGstDecod
spice_debug("Adding element: %s", name);
- if (GST_IS_BASE_SINK(element)) {
+ if (GST_IS_BASE_SINK(element)) {// && GST_OBJECT_FLAG_IS_SET(element, GST_ELEMENT_FLAG_SINK)
+ GstPad *pad;
+
+ pad = gst_element_get_static_pad(element, "sink");
+ gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_EVENT_UPSTREAM | GST_PAD_PROBE_TYPE_BUFFER, event_probe, decoder, NULL); //also buffers
g_object_set(element,
"sync", SYNC,
+ "qos", QOS,
"drop", DROP,
NULL);
+ gst_object_unref(pad);
spice_debug("^^^^SINK^^^^");
+ } else {
+ //just trying to set this in other elements
+ /*g_object_set(element,
+ "max-size-buffers", 0,
+ NULL);*/
}
g_free(name);
}
@@ -516,6 +646,27 @@ static gboolean create_pipeline(SpiceGstDecoder *decoder)
gst_object_unref(vaapisink);
}
#endif
+
+ //disable vaapi decoding
+ /*if (registry) {
+ vaapisink = gst_registry_lookup_feature(registry, "vaapih264dec");
+ }
+ if (vaapisink) {
+ gst_plugin_feature_set_rank(vaapisink, GST_RANK_NONE);
+ gst_object_unref(vaapisink);
+ }
+ if (registry) {
+ vaapisink = gst_registry_lookup_feature(registry, "vaapidecodebin");
+ }
+ if (vaapisink) {
+ gst_plugin_feature_set_rank(vaapisink, GST_RANK_NONE);
+ gst_object_unref(vaapisink);
+ }*/
+ // force sink
+ /*sink = gst_element_factory_make("xvimagesink", "sink");
+ g_object_set(playbin,
+ "video-sink", gst_object_ref(sink),
+ NULL);*/
}
g_signal_connect(playbin, "deep-element-added", G_CALLBACK(add_elem_cb), decoder);
@@ -701,7 +852,9 @@ static gboolean spice_gst_decoder_queue_frame(VideoDecoder *video_decoder,
frame->data, frame->size, 0, frame->size,
frame, (GDestroyNotify) spice_frame_free);
- GstClockTime pts = gst_clock_get_time(decoder->clock) - gst_element_get_base_time(decoder->pipeline) + ((uint64_t)MAX(0, margin)) * 1000 * 1000;
+// GstClockTime pts = gst_clock_get_time(decoder->clock) - gst_element_get_base_time(decoder->pipeline) + ((uint64_t)MAX(0, margin)) * 1000 * 1000;
+ GstClockTime pts = gst_clock_get_time(decoder->clock) - gst_element_get_base_time(decoder->pipeline); //ignore margin and audio sync, this is actually kind of arrival time based
+ //GstClockTime pts = GST_CLOCK_TIME_NONE;
GST_BUFFER_DURATION(buffer) = GST_CLOCK_TIME_NONE;
GST_BUFFER_DTS(buffer) = GST_CLOCK_TIME_NONE;
GST_BUFFER_PTS(buffer) = pts;
@@ -719,6 +872,8 @@ static gboolean spice_gst_decoder_queue_frame(VideoDecoder *video_decoder,
if (gst_app_src_push_buffer(decoder->appsrc, buffer) != GST_FLOW_OK) {
SPICE_DEBUG("GStreamer error: unable to push frame");
stream_dropped_frame_on_playback(decoder->base.stream);
+ } else {
+ decoder->queue++;
}
return TRUE;
}
@@ -756,6 +911,7 @@ VideoDecoder* create_gstreamer_decoder(int codec_type, display_stream *stream)
decoder->last_mm_time = stream_get_time(stream);
g_mutex_init(&decoder->queues_mutex);
decoder->decoding_queue = g_queue_new();
+ decoder->avg_rate = -1;
if (!create_pipeline(decoder)) {
decoder->base.destroy((VideoDecoder*)decoder);