diff options
author | Frediano Ziglio <fziglio@redhat.com> | 2018-11-14 18:30:27 +0000 |
---|---|---|
committer | Frediano Ziglio <freddy77@gmail.com> | 2022-03-06 14:53:30 +0000 |
commit | 4678e77c1e91505b7be5a1bd739373464e394477 (patch) | |
tree | ec166e009037be449baca80ee078483537b2beeb | |
parent | 603f4e30378a0e298dbab523b2a4ab78ab6e7f87 (diff) |
OTHER stupid save...qos_testing
calling XInitThreads from main does a HUGE difference...
(remote-viewer)
-rw-r--r-- | src/channel-display-gst.c | 50 |
1 files changed, 33 insertions, 17 deletions
diff --git a/src/channel-display-gst.c b/src/channel-display-gst.c index 8dc1437..2ae619a 100644 --- a/src/channel-display-gst.c +++ b/src/channel-display-gst.c @@ -76,7 +76,7 @@ * */ #define QOS FALSE // TRUE will emit qos events upstream which may cause decoder to drop frames (should be effective only with SYNC) -#define SYNC TRUE // TRUE will sync buffer times with clock time +#define SYNC FALSE // TRUE will sync buffer times with clock time #define LIVE TRUE // TRUE will add pipeline latency estimation (should be effective only with SYNC) #define DROP FALSE // TRUE if you want pipeline to decide to drop frames @@ -94,7 +94,7 @@ typedef struct SpiceGstDecoder { /* ---------- GStreamer pipeline ---------- */ - GstAppSrc *appsrc; +// GstAppSrc *appsrc; GstAppSink *appsink; GstElement *pipeline; GstClock *clock; @@ -419,7 +419,7 @@ static void free_pipeline(SpiceGstDecoder *decoder) } gst_element_set_state(decoder->pipeline, GST_STATE_NULL); - gst_object_unref(decoder->appsrc); +// gst_object_unref(decoder->appsrc); if (decoder->appsink) { gst_object_unref(decoder->appsink); } @@ -428,7 +428,7 @@ static void free_pipeline(SpiceGstDecoder *decoder) decoder->pipeline = NULL; } -static gboolean handle_pipeline_message(GstBus *bus, GstMessage *msg, gpointer video_decoder) +static GstBusSyncReply handle_pipeline_message(GstBus *bus, GstMessage *msg, gpointer video_decoder) { static GstObject *gstvaapidisplay = NULL; SpiceGstDecoder *decoder = video_decoder; @@ -482,7 +482,7 @@ static gboolean handle_pipeline_message(GstBus *bus, GstMessage *msg, gpointer v #if 0 if (g_strcmp0(context_type, "gst.vaapi.Display") == 0 && gstvaapidisplay) { g_print("Setting saved context on %s\n", GST_MESSAGE_SRC_NAME(msg)); - GstContext *context = gst_context_new("gst.vaapi.Display", TRUE); + GstContext *context = gst_context_new("gst.vaapi.Display", FALSE); GstStructure *s = gst_context_writable_structure(context); gst_structure_set(s, "gst.vaapi.Display", GST_TYPE_OBJECT, gstvaapidisplay, NULL); gst_element_set_context(GST_ELEMENT(msg->src), context); @@ -492,19 +492,25 @@ static gboolean handle_pipeline_message(GstBus *bus, GstMessage *msg, gpointer v #endif if (g_strcmp0(context_type, "gst.vaapi.app.Display") == 0) { - static GstContext *context = NULL; + static Display *x11_display = NULL; + static VADisplay va_display = NULL; + GstContext *context = NULL; + if (!x11_display) { + x11_display = gdk_x11_get_default_xdisplay(); + g_assert_nonnull(x11_display); + } + if (!va_display) { + va_display = vaGetDisplay(x11_display); + g_assert_nonnull(va_display); + } if (!context) { - Display *x11_display = gdk_x11_get_default_xdisplay(); - context = gst_context_new("gst.vaapi.app.Display", TRUE); + context = gst_context_new("gst.vaapi.app.Display", FALSE); GstStructure *structure = gst_context_writable_structure(context); gst_structure_set(structure, "x11-display", G_TYPE_POINTER, x11_display, NULL); - g_assert_nonnull(x11_display); - VADisplay va_display = vaGetDisplay(x11_display); - g_assert_nonnull(va_display); gst_structure_set(structure, "va-display", G_TYPE_POINTER, va_display, NULL); } gst_element_set_context(GST_ELEMENT(msg->src), context); -// gst_context_unref(context); + gst_context_unref(context); } #if 0 if (g_strcmp0 (context_type, GST_GL_DISPLAY_CONTEXT_TYPE) == 0) { @@ -548,9 +554,10 @@ static gboolean handle_pipeline_message(GstBus *bus, GstMessage *msg, gpointer v /* not being handled */ break; } - return TRUE; + return GST_BUS_PASS; } +#if 0 static void app_source_setup(GstElement *pipeline G_GNUC_UNUSED, GstElement *source, SpiceGstDecoder *decoder) @@ -575,6 +582,7 @@ static void app_source_setup(GstElement *pipeline G_GNUC_UNUSED, gst_caps_unref(caps); decoder->appsrc = GST_APP_SRC(gst_object_ref(source)); } +#endif static GstPadProbeReturn event_probe(GstPad *pad, GstPadProbeInfo *info, gpointer data) @@ -688,9 +696,11 @@ add_elem_cb(GstBin * pipeline, GstBin * bin, GstElement * element, SpiceGstDecod strcpy((char*) (p+1), name); gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, event_probe2, p, g_free); //also buffers } +#if 0 g_object_set(element, "low-latency", TRUE, NULL); +#endif //just trying to set this in other elements /*g_object_set(element, @@ -737,6 +747,7 @@ static gboolean create_pipeline(SpiceGstDecoder *decoder) decoder->appsink = GST_APP_SINK(sink); } else { + printf("CORRECT\n"); /* handle has received, it means playbin will render directly into * widget using the gstvideooverlay interface instead of app-sink. */ @@ -783,10 +794,10 @@ static gboolean create_pipeline(SpiceGstDecoder *decoder) } g_signal_connect(playbin, "deep-element-added", G_CALLBACK(add_elem_cb), decoder); - g_signal_connect(playbin, "source-setup", G_CALLBACK(app_source_setup), decoder); +// g_signal_connect(playbin, "source-setup", G_CALLBACK(app_source_setup), decoder); g_object_set(playbin, - "uri", "appsrc://", + "uri", "file:///home/fziglio/Downloads/recording.mp4", NULL); /* Disable audio in playbin */ @@ -794,7 +805,7 @@ static gboolean create_pipeline(SpiceGstDecoder *decoder) flags &= ~(GST_PLAY_FLAG_AUDIO | GST_PLAY_FLAG_TEXT); g_object_set(playbin, "flags", flags, NULL); - g_warn_if_fail(decoder->appsrc == NULL); +// g_warn_if_fail(decoder->appsrc == NULL); decoder->pipeline = playbin; if (decoder->appsink) { @@ -805,7 +816,7 @@ static gboolean create_pipeline(SpiceGstDecoder *decoder) gst_app_sink_set_drop(decoder->appsink, FALSE); } bus = gst_pipeline_get_bus(GST_PIPELINE(decoder->pipeline)); - gst_bus_add_watch(bus, handle_pipeline_message, decoder); + gst_bus_set_sync_handler (bus, handle_pipeline_message, decoder, NULL); gst_object_unref(bus); decoder->clock = gst_pipeline_get_clock(GST_PIPELINE(decoder->pipeline)); @@ -953,12 +964,14 @@ static gboolean spice_gst_decoder_queue_frame(VideoDecoder *video_decoder, return FALSE; } +#if 0 if (decoder->appsrc == NULL) { spice_warning("Error: Playbin has not yet initialized the Appsrc element"); stream_dropped_frame_on_playback(decoder->base.stream); spice_frame_free(frame); return TRUE; } +#endif /* frame ownership is moved to the buffer */ GstBuffer *buffer = gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_PHYSICALLY_CONTIGUOUS, @@ -987,12 +1000,15 @@ static gboolean spice_gst_decoder_queue_frame(VideoDecoder *video_decoder, g_queue_push_tail(decoder->decoding_queue, gst_frame); g_mutex_unlock(&decoder->queues_mutex); + gst_buffer_unref(buffer); +#if 0 if (gst_app_src_push_buffer(decoder->appsrc, buffer) != GST_FLOW_OK) { SPICE_DEBUG("GStreamer error: unable to push frame"); stream_dropped_frame_on_playback(decoder->base.stream); } else { g_atomic_int_inc(&decoder->queue); } +#endif return TRUE; } |