summaryrefslogtreecommitdiff
path: root/manual-dataaccess.md
blob: 3b2cec746bde49bccc1c89c1b91ad5704a625d1c (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
---
title: Pipeline manipulation
...

# Pipeline manipulation

This chapter will discuss how you can manipulate your pipeline in
several ways from your application on. Parts of this chapter are very
lowlevel, so be assured that you'll need some programming knowledge and
a good understanding of GStreamer before you start reading this.

Topics that will be discussed here include how you can insert data into
a pipeline from your application, how to read data from a pipeline, how
to manipulate the pipeline's speed, length, starting point and how to
listen to a pipeline's data processing.

## Using probes

Probing is best envisioned as a pad listener. Technically, a probe is
nothing more than a callback that can be attached to a pad. You can
attach a probe using `gst_pad_add_probe ()`. Similarly, one can use the
`gst_pad_remove_probe ()` to remove the callback again. The probe
notifies you of any activity that happens on the pad, like buffers,
events and queries. You can define what kind of notifications you are
interested in when you add the probe.

The probe can notify you of the following activity on pads:

  - A buffer is pushed or pulled. You want to specify the
    GST\_PAD\_PROBE\_TYPE\_BUFFER when registering the probe. Because
    the pad can be scheduled in different ways, it is possible to also
    specify in what scheduling mode you are interested with the optional
    GST\_PAD\_PROBE\_TYPE\_PUSH and GST\_PAD\_PROBE\_TYPE\_PULL flags.
    
    You can use this probe to inspect, modify or drop the buffer. See
    [Data probes](#data-probes).

  - A bufferlist is pushed. Use the GST\_PAD\_PROBE\_TYPE\_BUFFER\_LIST
    when registering the probe.

  - An event travels over a pad. Use the
    GST\_PAD\_PROBE\_TYPE\_EVENT\_DOWNSTREAM and
    GST\_PAD\_PROBE\_TYPE\_EVENT\_UPSTREAM flags to select downstream
    and upstream events. There is also a convenience
    GST\_PAD\_PROBE\_TYPE\_EVENT\_BOTH to be notified of events going
    both upstream and downstream. By default, flush events do not cause
    a notification. You need to explicitly enable
    GST\_PAD\_PROBE\_TYPE\_EVENT\_FLUSH to receive callbacks from
    flushing events. Events are always only notified in push mode.
    
    You can use this probe to inspect, modify or drop the event.

  - A query travels over a pad. Use the
    GST\_PAD\_PROBE\_TYPE\_QUERY\_DOWNSTREAM and
    GST\_PAD\_PROBE\_TYPE\_QUERY\_UPSTREAM flags to select downstream
    and upstream queries. The convenience
    GST\_PAD\_PROBE\_TYPE\_QUERY\_BOTH can also be used to select both
    directions. Query probes will be notified twice, once when the query
    travels upstream/downstream and once when the query result is
    returned. You can select in what stage the callback will be called
    with the GST\_PAD\_PROBE\_TYPE\_PUSH and
    GST\_PAD\_PROBE\_TYPE\_PULL, respectively when the query is
    performed and when the query result is returned.
    
    You can use this probe to inspect or modify the query. You can also
    answer the query in the probe callback by placing the result value
    in the query and by returning GST\_PAD\_PROBE\_DROP from the
    callback.

  - In addition to notifying you of dataflow, you can also ask the probe
    to block the dataflow when the callback returns. This is called a
    blocking probe and is activated by specifying the
    GST\_PAD\_PROBE\_TYPE\_BLOCK flag. You can use this flag with the
    other flags to only block dataflow on selected activity. A pad
    becomes unblocked again if you remove the probe or when you return
    GST\_PAD\_PROBE\_REMOVE from the callback. You can let only the
    currently blocked item pass by returning GST\_PAD\_PROBE\_PASS from
    the callback, it will block again on the next item.
    
    Blocking probes are used to temporarily block pads because they are
    unlinked or because you are going to unlink them. If the dataflow is
    not blocked, the pipeline would go into an error state if data is
    pushed on an unlinked pad. We will se how to use blocking probes to
    partially preroll a pipeline. See also [Play a region of a media
    file](#play-a-region-of-a-media-file).

  - Be notified when no activity is happening on a pad. You install this
    probe with the GST\_PAD\_PROBE\_TYPE\_IDLE flag. You can specify
    GST\_PAD\_PROBE\_TYPE\_PUSH and/or GST\_PAD\_PROBE\_TYPE\_PULL to
    only be notified depending on the pad scheduling mode. The IDLE
    probe is also a blocking probe in that it will not let any data pass
    on the pad for as long as the IDLE probe is installed.
    
    You can use idle probes to dynamically relink a pad. We will see how
    to use idle probes to replace an element in the pipeline. See also
    [Dynamically changing the
    pipeline](#dynamically-changing-the-pipeline).

### Data probes

Data probes allow you to be notified when there is data passing on a
pad. When adding the probe, specify the GST\_PAD\_PROBE\_TYPE\_BUFFER
and/or GST\_PAD\_PROBE\_TYPE\_BUFFER\_LIST.

Data probes run in pipeline streaming thread context, so callbacks
should try to not block and generally not do any weird stuff, since this
could have a negative impact on pipeline performance or, in case of
bugs, cause deadlocks or crashes. More precisely, one should usually not
call any GUI-related functions from within a probe callback, nor try to
change the state of the pipeline. An application may post custom
messages on the pipeline's bus though to communicate with the main
application thread and have it do things like stop the pipeline.

In any case, most common buffer operations that elements can do in
`_chain ()` functions, can be done in probe callbacks as well. The
example below gives a short impression on how to use them.

``` c


#include <gst/gst.h>

static GstPadProbeReturn
cb_have_data (GstPad          *pad,
              GstPadProbeInfo *info,
              gpointer         user_data)
{
  gint x, y;
  GstMapInfo map;
  guint16 *ptr, t;
  GstBuffer *buffer;

  buffer = GST_PAD_PROBE_INFO_BUFFER (info);

  buffer = gst_buffer_make_writable (buffer);

  /* Making a buffer writable can fail (for example if it
   * cannot be copied and is used more than once)
   */
  if (buffer == NULL)
    return GST_PAD_PROBE_OK;

  /* Mapping a buffer can fail (non-writable) */
  if (gst_buffer_map (buffer, &map, GST_MAP_WRITE)) {
    ptr = (guint16 *) map.data;
    /* invert data */
    for (y = 0; y < 288; y++) {
      for (x = 0; x < 384 / 2; x++) {
        t = ptr[384 - 1 - x];
        ptr[384 - 1 - x] = ptr[x];
        ptr[x] = t;
      }
      ptr += 384;
    }
    gst_buffer_unmap (buffer, &map);
  }

  GST_PAD_PROBE_INFO_DATA (info) = buffer;

  return GST_PAD_PROBE_OK;
}

gint
main (gint   argc,
      gchar *argv[])
{
  GMainLoop *loop;
  GstElement *pipeline, *src, *sink, *filter, *csp;
  GstCaps *filtercaps;
  GstPad *pad;

  /* init GStreamer */
  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);

  /* build */
  pipeline = gst_pipeline_new ("my-pipeline");
  src = gst_element_factory_make ("videotestsrc", "src");
  if (src == NULL)
    g_error ("Could not create 'videotestsrc' element");

  filter = gst_element_factory_make ("capsfilter", "filter");
  g_assert (filter != NULL); /* should always exist */

  csp = gst_element_factory_make ("videoconvert", "csp");
  if (csp == NULL)
    g_error ("Could not create 'videoconvert' element");

  sink = gst_element_factory_make ("xvimagesink", "sink");
  if (sink == NULL) {
    sink = gst_element_factory_make ("ximagesink", "sink");
    if (sink == NULL)
      g_error ("Could not create neither 'xvimagesink' nor 'ximagesink' element");
  }

  gst_bin_add_many (GST_BIN (pipeline), src, filter, csp, sink, NULL);
  gst_element_link_many (src, filter, csp, sink, NULL);
  filtercaps = gst_caps_new_simple ("video/x-raw",
               "format", G_TYPE_STRING, "RGB16",
               "width", G_TYPE_INT, 384,
               "height", G_TYPE_INT, 288,
               "framerate", GST_TYPE_FRACTION, 25, 1,
               NULL);
  g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
  gst_caps_unref (filtercaps);

  pad = gst_element_get_static_pad (src, "src");
  gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BUFFER,
      (GstPadProbeCallback) cb_have_data, NULL, NULL);
  gst_object_unref (pad);

  /* run */
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* wait until it's up and running or failed */
  if (gst_element_get_state (pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
    g_error ("Failed to go into PLAYING state");
  }

  g_print ("Running ...\n");
  g_main_loop_run (loop);

  /* exit */
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);

  return 0;
}


      
```

Compare that output with the output of “gst-launch-1.0 videotestsrc \!
xvimagesink”, just so you know what you're looking for.

Strictly speaking, a pad probe callback is only allowed to modify the
buffer content if the buffer is writable. Whether this is the case or
not depends a lot on the pipeline and the elements involved. Often
enough, this is the case, but sometimes it is not, and if it is not then
unexpected modification of the data or metadata can introduce bugs that
are very hard to debug and track down. You can check if a buffer is
writable with `gst_buffer_is_writable ()`. Since you can pass back a
different buffer than the one passed in, it is a good idea to make the
buffer writable in the callback function with `gst_buffer_make_writable
()`.

Pad probes are suited best for looking at data as it passes through the
pipeline. If you need to modify data, you should better write your own
GStreamer element. Base classes like GstAudioFilter, GstVideoFilter or
GstBaseTransform make this fairly easy.

If you just want to inspect buffers as they pass through the pipeline,
you don't even need to set up pad probes. You could also just insert an
identity element into the pipeline and connect to its "handoff" signal.
The identity element also provides a few useful debugging tools like the
"dump" property or the "last-message" property (the latter is enabled by
passing the '-v' switch to gst-launch and by setting the silent property
on the identity to FALSE).

### Play a region of a media file

In this example we will show you how to play back a region of a media
file. The goal is to only play the part of a file from 2 seconds to 5
seconds and then EOS.

In a first step we will set a uridecodebin element to the PAUSED state
and make sure that we block all the source pads that are created. When
all the source pads are blocked, we have data on all source pads and we
say that the uridecodebin is prerolled.

In a prerolled pipeline we can ask for the duration of the media and we
can also perform seeks. We are interested in performing a seek operation
on the pipeline to select the range of media that we are interested in.

After we configure the region we are interested in, we can link the sink
element, unblock the source pads and set the pipeline to the playing
state. You will see that exactly the requested region is played by the
sink before it goes to EOS.

What follows is an example application that loosly follows this
algorithm.

``` c


#include <gst/gst.h>

static GMainLoop *loop;
static volatile gint counter;
static GstBus *bus;
static gboolean prerolled = FALSE;
static GstPad *sinkpad;

static void
dec_counter (GstElement * pipeline)
{
  if (prerolled)
    return;

  if (g_atomic_int_dec_and_test (&counter)) {
    /* all probes blocked and no-more-pads signaled, post
     * message on the bus. */
    prerolled = TRUE;

    gst_bus_post (bus, gst_message_new_application (
          GST_OBJECT_CAST (pipeline),
          gst_structure_new_empty ("ExPrerolled")));
  }
}

/* called when a source pad of uridecodebin is blocked */
static GstPadProbeReturn
cb_blocked (GstPad          *pad,
            GstPadProbeInfo *info,
            gpointer         user_data)
{
  GstElement *pipeline = GST_ELEMENT (user_data);

  if (prerolled)
    return GST_PAD_PROBE_REMOVE;

  dec_counter (pipeline);

  return GST_PAD_PROBE_OK;
}

/* called when uridecodebin has a new pad */
static void
cb_pad_added (GstElement *element,
              GstPad     *pad,
              gpointer    user_data)
{
  GstElement *pipeline = GST_ELEMENT (user_data);

  if (prerolled)
    return;

  g_atomic_int_inc (&counter);

  gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
      (GstPadProbeCallback) cb_blocked, pipeline, NULL);

  /* try to link to the video pad */
  gst_pad_link (pad, sinkpad);
}

/* called when uridecodebin has created all pads */
static void
cb_no_more_pads (GstElement *element,
                 gpointer    user_data)
{
  GstElement *pipeline = GST_ELEMENT (user_data);

  if (prerolled)
    return;

  dec_counter (pipeline);
}

/* called when a new message is posted on the bus */
static void
cb_message (GstBus     *bus,
            GstMessage *message,
            gpointer    user_data)
{
  GstElement *pipeline = GST_ELEMENT (user_data);

  switch (GST_MESSAGE_TYPE (message)) {
    case GST_MESSAGE_ERROR:
      g_print ("we received an error!\n");
      g_main_loop_quit (loop);
      break;
    case GST_MESSAGE_EOS:
      g_print ("we reached EOS\n");
      g_main_loop_quit (loop);
      break;
    case GST_MESSAGE_APPLICATION:
    {
      if (gst_message_has_name (message, "ExPrerolled")) {
        /* it's our message */
        g_print ("we are all prerolled, do seek\n");
        gst_element_seek (pipeline,
            1.0, GST_FORMAT_TIME,
            GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE,
            GST_SEEK_TYPE_SET, 2 * GST_SECOND,
            GST_SEEK_TYPE_SET, 5 * GST_SECOND);

        gst_element_set_state (pipeline, GST_STATE_PLAYING);
      }
      break;
    }
    default:
      break;
  }
}

gint
main (gint   argc,
      gchar *argv[])
{
  GstElement *pipeline, *src, *csp, *vs, *sink;

  /* init GStreamer */
  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);

  if (argc < 2) {
    g_print ("usage: %s <uri>", argv[0]);
    return -1;
  }

  /* build */
  pipeline = gst_pipeline_new ("my-pipeline");

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message", (GCallback) cb_message,
      pipeline);

  src = gst_element_factory_make ("uridecodebin", "src");
  if (src == NULL)
    g_error ("Could not create 'uridecodebin' element");

  g_object_set (src, "uri", argv[1], NULL);

  csp = gst_element_factory_make ("videoconvert", "csp");
  if (csp == NULL)
    g_error ("Could not create 'videoconvert' element");

  vs = gst_element_factory_make ("videoscale", "vs");
  if (csp == NULL)
    g_error ("Could not create 'videoscale' element");

  sink = gst_element_factory_make ("autovideosink", "sink");
  if (sink == NULL)
    g_error ("Could not create 'autovideosink' element");

  gst_bin_add_many (GST_BIN (pipeline), src, csp, vs, sink, NULL);

  /* can't link src yet, it has no pads */
  gst_element_link_many (csp, vs, sink, NULL);

  sinkpad = gst_element_get_static_pad (csp, "sink");

  /* for each pad block that is installed, we will increment
   * the counter. for each pad block that is signaled, we
   * decrement the counter. When the counter is 0 we post
   * an app message to tell the app that all pads are
   * blocked. Start with 1 that is decremented when no-more-pads
   * is signaled to make sure that we only post the message
   * after no-more-pads */
  g_atomic_int_set (&counter, 1);

  g_signal_connect (src, "pad-added",
      (GCallback) cb_pad_added, pipeline);
  g_signal_connect (src, "no-more-pads",
      (GCallback) cb_no_more_pads, pipeline);

  gst_element_set_state (pipeline, GST_STATE_PAUSED);

  g_main_loop_run (loop);

  gst_element_set_state (pipeline, GST_STATE_NULL);

  gst_object_unref (sinkpad);
  gst_object_unref (bus);
  gst_object_unref (pipeline);
  g_main_loop_unref (loop);

  return 0;
}

```

Note that we use a custom application message to signal the main thread
that the uridecidebin is prerolled. The main thread will then issue a
flushing seek to the requested region. The flush will temporarily
unblock the pad and reblock them when new data arrives again. We detect
this second block to remove the probes. Then we set the pipeline to
PLAYING and it should play from 2 to 5 seconds, then EOS and exit the
application.

## Manually adding or removing data from/to a pipeline

Many people have expressed the wish to use their own sources to inject
data into a pipeline. Some people have also expressed the wish to grab
the output in a pipeline and take care of the actual output inside their
application. While either of these methods are strongly discouraged,
GStreamer offers support for this. *Beware\! You need to know what you
are doing.* Since you don't have any support from a base class you need
to thoroughly understand state changes and synchronization. If it
doesn't work, there are a million ways to shoot yourself in the foot.
It's always better to simply write a plugin and have the base class
manage it. See the Plugin Writer's Guide for more information on this
topic. Also see the next section, which will explain how to embed
plugins statically in your application.

There's two possible elements that you can use for the above-mentioned
purposes. Those are called “appsrc” (an imaginary source) and “appsink”
(an imaginary sink). The same method applies to each of those elements.
Here, we will discuss how to use those elements to insert (using appsrc)
or grab (using appsink) data from a pipeline, and how to set
negotiation.

Both appsrc and appsink provide 2 sets of API. One API uses standard
GObject (action) signals and properties. The same API is also available
as a regular C api. The C api is more performant but requires you to
link to the app library in order to use the elements.

### Inserting data with appsrc

First we look at some examples for appsrc, which lets you insert data
into the pipeline from the application. Appsrc has some configuration
options that define how it will operate. You should decide about the
following configurations:

  - Will the appsrc operate in push or pull mode. The stream-type
    property can be used to control this. stream-type of “random-access”
    will activate pull mode scheduling while the other stream-types
    activate push mode.

  - The caps of the buffers that appsrc will push out. This needs to be
    configured with the caps property. The caps must be set to a fixed
    caps and will be used to negotiate a format downstream.

  - If the appsrc operates in live mode or not. This can be configured
    with the is-live property. When operating in live-mode it is
    important to configure the min-latency and max-latency in appsrc.
    The min-latency should be set to the amount of time it takes between
    capturing a buffer and when it is pushed inside appsrc. In live
    mode, you should timestamp the buffers with the pipeline
    running-time when the first byte of the buffer was captured before
    feeding them to appsrc. You can let appsrc do the timestaping with
    the do-timestamp property (but then the min-latency must be set to 0
    because it timestamps based on the running-time when the buffer
    entered appsrc).

  - The format of the SEGMENT event that appsrc will push. The format
    has implications for how the running-time of the buffers will be
    calculated so you must be sure you understand this. For live sources
    you probably want to set the format property to GST\_FORMAT\_TIME.
    For non-live source it depends on the media type that you are
    handling. If you plan to timestamp the buffers, you should probably
    put a GST\_FORMAT\_TIME format, otherwise GST\_FORMAT\_BYTES might
    be appropriate.

  - If appsrc operates in random-access mode, it is important to
    configure the size property of appsrc with the number of bytes in
    the stream. This will allow downstream elements to know the size of
    the media and alows them to seek to the end of the stream when
    needed.

The main way of handling data to appsrc is by using the function
`gst_app_src_push_buffer ()` or by emiting the push-buffer action
signal. This will put the buffer onto a queue from which appsrc will
read from in its streaming thread. It is important to note that data
transport will not happen from the thread that performed the push-buffer
call.

The “max-bytes” property controls how much data can be queued in appsrc
before appsrc considers the queue full. A filled internal queue will
always signal the “enough-data” signal, which signals the application
that it should stop pushing data into appsrc. The “block” property will
cause appsrc to block the push-buffer method until free data becomes
available again.

When the internal queue is running out of data, the “need-data” signal
is emitted, which signals the application that it should start pushing
more data into appsrc.

In addition to the “need-data” and “enough-data” signals, appsrc can
emit the “seek-data” signal when the “stream-mode” property is set to
“seekable” or “random-access”. The signal argument will contain the
new desired position in the stream expressed in the unit set with the
“format” property. After receiving the seek-data signal, the
application should push-buffers from the new position.

When the last byte is pushed into appsrc, you must call
`gst_app_src_end_of_stream ()` to make it send an EOS downstream.

These signals allow the application to operate appsrc in push and pull
mode as will be explained next.

#### Using appsrc in push mode

When appsrc is configured in push mode (stream-type is stream or
seekable), the application repeatedly calls the push-buffer method with
a new buffer. Optionally, the queue size in the appsrc can be controlled
with the enough-data and need-data signals by respectively
stopping/starting the push-buffer calls. The value of the min-percent
property defines how empty the internal appsrc queue needs to be before
the need-data signal will be fired. You can set this to some value \>0
to avoid completely draining the queue.

When the stream-type is set to seekable, don't forget to implement a
seek-data callback.

Use this model when implementing various network protocols or hardware
devices.

#### Using appsrc in pull mode

In the pull model, data is fed to appsrc from the need-data signal
handler. You should push exactly the amount of bytes requested in the
need-data signal. You are only allowed to push less bytes when you are
at the end of the stream.

Use this model for file access or other randomly accessable sources.

#### Appsrc example

This example application will generate black/white (it switches every
second) video to an Xv-window output by using appsrc as a source with
caps to force a format. We use a colorspace conversion element to make
sure that we feed the right format to your X server. We configure a
video stream with a variable framerate (0/1) and we set the timestamps
on the outgoing buffers in such a way that we play 2 frames per second.

Note how we use the pull mode method of pushing new buffers into appsrc
although appsrc is running in push mode.

``` c


#include <gst/gst.h>

static GMainLoop *loop;

static void
cb_need_data (GstElement *appsrc,
          guint       unused_size,
          gpointer    user_data)
{
  static gboolean white = FALSE;
  static GstClockTime timestamp = 0;
  GstBuffer *buffer;
  guint size;
  GstFlowReturn ret;

  size = 385 * 288 * 2;

  buffer = gst_buffer_new_allocate (NULL, size, NULL);

  /* this makes the image black/white */
  gst_buffer_memset (buffer, 0, white ? 0xff : 0x0, size);

  white = !white;

  GST_BUFFER_PTS (buffer) = timestamp;
  GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 2);

  timestamp += GST_BUFFER_DURATION (buffer);

  g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);
  gst_buffer_unref (buffer);

  if (ret != GST_FLOW_OK) {
    /* something wrong, stop pushing */
    g_main_loop_quit (loop);
  }
}

gint
main (gint   argc,
      gchar *argv[])
{
  GstElement *pipeline, *appsrc, *conv, *videosink;

  /* init GStreamer */
  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);

  /* setup pipeline */
  pipeline = gst_pipeline_new ("pipeline");
  appsrc = gst_element_factory_make ("appsrc", "source");
  conv = gst_element_factory_make ("videoconvert", "conv");
  videosink = gst_element_factory_make ("xvimagesink", "videosink");

  /* setup */
  g_object_set (G_OBJECT (appsrc), "caps",
        gst_caps_new_simple ("video/x-raw",
                     "format", G_TYPE_STRING, "RGB16",
                     "width", G_TYPE_INT, 384,
                     "height", G_TYPE_INT, 288,
                     "framerate", GST_TYPE_FRACTION, 0, 1,
                     NULL), NULL);
  gst_bin_add_many (GST_BIN (pipeline), appsrc, conv, videosink, NULL);
  gst_element_link_many (appsrc, conv, videosink, NULL);

  /* setup appsrc */
  g_object_set (G_OBJECT (appsrc),
        "stream-type", 0,
        "format", GST_FORMAT_TIME, NULL);
  g_signal_connect (appsrc, "need-data", G_CALLBACK (cb_need_data), NULL);

  /* play */
  gst_element_set_state (pipeline, GST_STATE_PLAYING);
  g_main_loop_run (loop);

  /* clean up */
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (GST_OBJECT (pipeline));
  g_main_loop_unref (loop);

  return 0;
  }


        
```

### Grabbing data with appsink

Unlike appsrc, appsink is a little easier to use. It also supports a
pull and push based model of getting data from the pipeline.

The normal way of retrieving samples from appsink is by using the
`gst_app_sink_pull_sample()` and `gst_app_sink_pull_preroll()` methods
or by using the “pull-sample” and “pull-preroll” signals. These methods
block until a sample becomes available in the sink or when the sink is
shut down or reaches EOS.

Appsink will internally use a queue to collect buffers from the
streaming thread. If the application is not pulling samples fast enough,
this queue will consume a lot of memory over time. The “max-buffers”
property can be used to limit the queue size. The “drop” property
controls whether the streaming thread blocks or if older buffers are
dropped when the maximum queue size is reached. Note that blocking the
streaming thread can negatively affect real-time performance and should
be avoided.

If a blocking behaviour is not desirable, setting the “emit-signals”
property to TRUE will make appsink emit the “new-sample” and
“new-preroll” signals when a sample can be pulled without blocking.

The “caps” property on appsink can be used to control the formats that
appsink can receive. This property can contain non-fixed caps, the
format of the pulled samples can be obtained by getting the sample caps.

If one of the pull-preroll or pull-sample methods return NULL, the
appsink is stopped or in the EOS state. You can check for the EOS state
with the “eos” property or with the `gst_app_sink_is_eos()` method.

The eos signal can also be used to be informed when the EOS state is
reached to avoid polling.

Consider configuring the following properties in the appsink:

  - The “sync” property if you want to have the sink base class
    synchronize the buffer against the pipeline clock before handing you
    the sample.

  - Enable Quality-of-Service with the “qos” property. If you are
    dealing with raw video frames and let the base class sycnhronize on
    the clock, it might be a good idea to also let the base class send
    QOS events upstream.

  - The caps property that contains the accepted caps. Upstream elements
    will try to convert the format so that it matches the configured
    caps on appsink. You must still check the `GstSample` to get the
    actual caps of the buffer.

#### Appsink example

What follows is an example on how to capture a snapshot of a video
stream using appsink.

``` c


#include <gst/gst.h>
#ifdef HAVE_GTK
#include <gtk/gtk.h>
#endif

#include <stdlib.h>

#define CAPS "video/x-raw,format=RGB,width=160,pixel-aspect-ratio=1/1"

int
main (int argc, char *argv[])
{
  GstElement *pipeline, *sink;
  gint width, height;
  GstSample *sample;
  gchar *descr;
  GError *error = NULL;
  gint64 duration, position;
  GstStateChangeReturn ret;
  gboolean res;
  GstMapInfo map;

  gst_init (&argc, &argv);

  if (argc != 2) {
    g_print ("usage: %s <uri>\n Writes snapshot.png in the current directory\n",
        argv[0]);
    exit (-1);
  }

  /* create a new pipeline */
  descr =
      g_strdup_printf ("uridecodebin uri=%s ! videoconvert ! videoscale ! "
      " appsink name=sink caps=\"" CAPS "\"", argv[1]);
  pipeline = gst_parse_launch (descr, &error);

  if (error != NULL) {
    g_print ("could not construct pipeline: %s\n", error->message);
    g_clear_error (&error);
    exit (-1);
  }

  /* get sink */
  sink = gst_bin_get_by_name (GST_BIN (pipeline), "sink");

  /* set to PAUSED to make the first frame arrive in the sink */
  ret = gst_element_set_state (pipeline, GST_STATE_PAUSED);
  switch (ret) {
    case GST_STATE_CHANGE_FAILURE:
      g_print ("failed to play the file\n");
      exit (-1);
    case GST_STATE_CHANGE_NO_PREROLL:
      /* for live sources, we need to set the pipeline to PLAYING before we can
       * receive a buffer. We don't do that yet */
      g_print ("live sources not supported yet\n");
      exit (-1);
    default:
      break;
  }
  /* This can block for up to 5 seconds. If your machine is really overloaded,
   * it might time out before the pipeline prerolled and we generate an error. A
   * better way is to run a mainloop and catch errors there. */
  ret = gst_element_get_state (pipeline, NULL, NULL, 5 * GST_SECOND);
  if (ret == GST_STATE_CHANGE_FAILURE) {
    g_print ("failed to play the file\n");
    exit (-1);
  }

  /* get the duration */
  gst_element_query_duration (pipeline, GST_FORMAT_TIME, &duration);

  if (duration != -1)
    /* we have a duration, seek to 5% */
    position = duration * 5 / 100;
  else
    /* no duration, seek to 1 second, this could EOS */
    position = 1 * GST_SECOND;

  /* seek to the a position in the file. Most files have a black first frame so
   * by seeking to somewhere else we have a bigger chance of getting something
   * more interesting. An optimisation would be to detect black images and then
   * seek a little more */
  gst_element_seek_simple (pipeline, GST_FORMAT_TIME,
      GST_SEEK_FLAG_KEY_UNIT | GST_SEEK_FLAG_FLUSH, position);

  /* get the preroll buffer from appsink, this block untils appsink really
   * prerolls */
  g_signal_emit_by_name (sink, "pull-preroll", &sample, NULL);

  /* if we have a buffer now, convert it to a pixbuf. It's possible that we
   * don't have a buffer because we went EOS right away or had an error. */
  if (sample) {
    GstBuffer *buffer;
    GstCaps *caps;
    GstStructure *s;

    /* get the snapshot buffer format now. We set the caps on the appsink so
     * that it can only be an rgb buffer. The only thing we have not specified
     * on the caps is the height, which is dependant on the pixel-aspect-ratio
     * of the source material */
    caps = gst_sample_get_caps (sample);
    if (!caps) {
      g_print ("could not get snapshot format\n");
      exit (-1);
    }
    s = gst_caps_get_structure (caps, 0);

    /* we need to get the final caps on the buffer to get the size */
    res = gst_structure_get_int (s, "width", &width);
    res |= gst_structure_get_int (s, "height", &height);
    if (!res) {
      g_print ("could not get snapshot dimension\n");
      exit (-1);
    }

    /* create pixmap from buffer and save, gstreamer video buffers have a stride
     * that is rounded up to the nearest multiple of 4 */
    buffer = gst_sample_get_buffer (sample);
    /* Mapping a buffer can fail (non-readable) */
    if (gst_buffer_map (buffer, &map, GST_MAP_READ)) {
#ifdef HAVE_GTK
      pixbuf = gdk_pixbuf_new_from_data (map.data,
          GDK_COLORSPACE_RGB, FALSE, 8, width, height,
          GST_ROUND_UP_4 (width * 3), NULL, NULL);

      /* save the pixbuf */
      gdk_pixbuf_save (pixbuf, "snapshot.png", "png", &error, NULL);
#endif
      gst_buffer_unmap (buffer, &map);
    }
    gst_sample_unref (sample);
  } else {
    g_print ("could not make snapshot\n");
  }

  /* cleanup and exit */
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);

  exit (0);
}

```

## Forcing a format

Sometimes you'll want to set a specific format, for example a video size
and format or an audio bitsize and number of channels. You can do this
by forcing a specific `GstCaps` on the pipeline, which is possible by
using *filtered caps*. You can set a filtered caps on a link by using
the “capsfilter” element in between the two elements, and specifying a
`GstCaps` as “caps” property on this element. It will then only allow
types matching that specified capability set for negotiation. See also
[Creating capabilities for
filtering](manual-pads.md#creating-capabilities-for-filtering).

### Changing format in a PLAYING pipeline

It is also possible to dynamically change the format in a pipeline while
PLAYING. This can simply be done by changing the caps property on a
capsfilter. The capsfilter will send a RECONFIGURE event upstream that
will make the upstream element attempt to renegotiate a new format and
allocator. This only works if the upstream element is not using fixed
caps on the source pad.

Below is an example of how you can change the caps of a pipeline while
in the PLAYING state:

``` c


#include <stdlib.h>

#include <gst/gst.h>

#define MAX_ROUND 100

int
main (int argc, char **argv)
{
  GstElement *pipe, *filter;
  GstCaps *caps;
  gint width, height;
  gint xdir, ydir;
  gint round;
  GstMessage *message;

  gst_init (&argc, &argv);

  pipe = gst_parse_launch_full ("videotestsrc ! capsfilter name=filter ! "
             "ximagesink", NULL, GST_PARSE_FLAG_NONE, NULL);
  g_assert (pipe != NULL);

  filter = gst_bin_get_by_name (GST_BIN (pipe), "filter");
  g_assert (filter);

  width = 320;
  height = 240;
  xdir = ydir = -10;

  for (round = 0; round < MAX_ROUND; round++) {
    gchar *capsstr;
    g_print ("resize to %dx%d (%d/%d)   \r", width, height, round, MAX_ROUND);

    /* we prefer our fixed width and height but allow other dimensions to pass
     * as well */
    capsstr = g_strdup_printf ("video/x-raw, width=(int)%d, height=(int)%d",
        width, height);

    caps = gst_caps_from_string (capsstr);
    g_free (capsstr);
    g_object_set (filter, "caps", caps, NULL);
    gst_caps_unref (caps);

    if (round == 0)
      gst_element_set_state (pipe, GST_STATE_PLAYING);

    width += xdir;
    if (width >= 320)
      xdir = -10;
    else if (width < 200)
      xdir = 10;

    height += ydir;
    if (height >= 240)
      ydir = -10;
    else if (height < 150)
      ydir = 10;

    message =
        gst_bus_poll (GST_ELEMENT_BUS (pipe), GST_MESSAGE_ERROR,
        50 * GST_MSECOND);
    if (message) {
      g_print ("got error           \n");

      gst_message_unref (message);
    }
  }
  g_print ("done                    \n");

  gst_object_unref (filter);
  gst_element_set_state (pipe, GST_STATE_NULL);
  gst_object_unref (pipe);

  return 0;
}


      
```

Note how we use `gst_bus_poll()` with a small timeout to get messages
and also introduce a short sleep.

It is possible to set multiple caps for the capsfilter separated with a
;. The capsfilter will try to renegotiate to the first possible format
from the list.

## Dynamically changing the pipeline

In this section we talk about some techniques for dynamically modifying
the pipeline. We are talking specifically about changing the pipeline
while it is in the PLAYING state without interrupting the flow.

There are some important things to consider when building dynamic
pipelines:

  - When removing elements from the pipeline, make sure that there is no
    dataflow on unlinked pads because that will cause a fatal pipeline
    error. Always block source pads (in push mode) or sink pads (in pull
    mode) before unlinking pads. See also [Changing elements in a
    pipeline](#changing-elements-in-a-pipeline).

  - When adding elements to a pipeline, make sure to put the element
    into the right state, usually the same state as the parent, before
    allowing dataflow the element. When an element is newly created, it
    is in the NULL state and will return an error when it receives data.
    See also [Changing elements in a
    pipeline](#changing-elements-in-a-pipeline).

  - When adding elements to a pipeline, GStreamer will by default set
    the clock and base-time on the element to the current values of the
    pipeline. This means that the element will be able to construct the
    same pipeline running-time as the other elements in the pipeline.
    This means that sinks will synchronize buffers like the other sinks
    in the pipeline and that sources produce buffers with a running-time
    that matches the other sources.

  - When unlinking elements from an upstream chain, always make sure to
    flush any queued data in the element by sending an EOS event down
    the element sink pad(s) and by waiting that the EOS leaves the
    elements (with an event probe).
    
    If you do not do this, you will lose the data which is buffered by
    the unlinked element. This can result in a simple frame loss (one or
    more video frames, several milliseconds of audio). However if you
    remove a muxer (and in some cases an encoder or similar elements)
    from the pipeline, you risk getting a corrupted file which could not
    be played properly, as some relevant metadata (header, seek/index
    tables, internal sync tags) will not be stored or updated properly.
    
    See also [Changing elements in a
    pipeline](#changing-elements-in-a-pipeline).

  - A live source will produce buffers with a running-time of the
    current running-time in the pipeline.
    
    A pipeline without a live source produces buffers with a
    running-time starting from 0. Likewise, after a flushing seek, those
    pipelines reset the running-time back to 0.
    
    The running-time can be changed with `gst_pad_set_offset ()`. It is
    important to know the running-time of the elements in the pipeline
    in order to maintain synchronization.

  - Adding elements might change the state of the pipeline. Adding a
    non-prerolled sink, for example, brings the pipeline back to the
    prerolling state. Removing a non-prerolled sink, for example, might
    change the pipeline to PAUSED and PLAYING state.
    
    Adding a live source cancels the preroll stage and put the pipeline
    to the playing state. Adding a live source or other live elements
    might also change the latency of a pipeline.
    
    Adding or removing elements to the pipeline might change the clock
    selection of the pipeline. If the newly added element provides a
    clock, it might be worth changing the clock in the pipeline to the
    new clock. If, on the other hand, the element that provides the
    clock for the pipeline is removed, a new clock has to be selected.

  - Adding and removing elements might cause upstream or downstream
    elements to renegotiate caps and or allocators. You don't really
    need to do anything from the application, plugins largely adapt
    themself to the new pipeline topology in order to optimize their
    formats and allocation strategy.
    
    What is important is that when you add, remove or change elements in
    the pipeline, it is possible that the pipeline needs to negotiate a
    new format and this can fail. Usually you can fix this by inserting
    the right converter elements where needed. See also [Changing
    elements in a pipeline](#changing-elements-in-a-pipeline).

GStreamer offers support for doing about any dynamic pipeline
modification but it requires you to know a bit of details before you can
do this without causing pipeline errors. In the following sections we
will demonstrate a couple of typical use-cases.

### Changing elements in a pipeline

In the next example we look at the following chain of elements:

``` 
            - ----.      .----------.      .---- -
         element1 |      | element2 |      | element3
                src -> sink       src -> sink
            - ----'      '----------'      '---- -
      
```

We want to change element2 by element4 while the pipeline is in the
PLAYING state. Let's say that element2 is a visualization and that you
want to switch the visualization in the pipeline.

We can't just unlink element2's sinkpad from element1's source pad
because that would leave element1's source pad unlinked and would cause
a streaming error in the pipeline when data is pushed on the source pad.
The technique is to block the dataflow from element1's source pad before
we change element2 by element4 and then resume dataflow as shown in the
following steps:

  - Block element1's source pad with a blocking pad probe. When the pad
    is blocked, the probe callback will be called.

  - Inside the block callback nothing is flowing between element1 and
    element2 and nothing will flow until unblocked.

  - Unlink element1 and element2.

  - Make sure data is flushed out of element2. Some elements might
    internally keep some data, you need to make sure not to lose data by
    forcing it out of element2. You can do this by pushing EOS into
    element2, like this:
    
      - Put an event probe on element2's source pad.
    
      - Send EOS to element2's sinkpad. This makes sure the all the data
        inside element2 is forced out.
    
      - Wait for the EOS event to appear on element2's source pad. When
        the EOS is received, drop it and remove the event probe.

  - Unlink element2 and element3. You can now also remove element2 from
    the pipeline and set the state to NULL.

  - Add element4 to the pipeline, if not already added. Link element4
    and element3. Link element1 and element4.

  - Make sure element4 is in the same state as the rest of the elements
    in the pipeline. It should be at least in the PAUSED state before it
    can receive buffers and events.

  - Unblock element1's source pad probe. This will let new data into
    element4 and continue streaming.

The above algorithm works when the source pad is blocked, i.e. when
there is dataflow in the pipeline. If there is no dataflow, there is
also no point in changing the element (just yet) so this algorithm can
be used in the PAUSED state as well.

Let show you how this works with an example. This example changes the
video effect on a simple pipeline every second.

``` c


#include <gst/gst.h>

static gchar *opt_effects = NULL;

#define DEFAULT_EFFECTS "identity,exclusion,navigationtest," \
    "agingtv,videoflip,vertigotv,gaussianblur,shagadelictv,edgetv"

static GstPad *blockpad;
static GstElement *conv_before;
static GstElement *conv_after;
static GstElement *cur_effect;
static GstElement *pipeline;

static GQueue effects = G_QUEUE_INIT;

static GstPadProbeReturn
event_probe_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
{
  GMainLoop *loop = user_data;
  GstElement *next;

  if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_DATA (info)) != GST_EVENT_EOS)
    return GST_PAD_PROBE_PASS;

  gst_pad_remove_probe (pad, GST_PAD_PROBE_INFO_ID (info));

  /* push current effect back into the queue */
  g_queue_push_tail (&effects, gst_object_ref (cur_effect));
  /* take next effect from the queue */
  next = g_queue_pop_head (&effects);
  if (next == NULL) {
    GST_DEBUG_OBJECT (pad, "no more effects");
    g_main_loop_quit (loop);
    return GST_PAD_PROBE_DROP;
  }

  g_print ("Switching from '%s' to '%s'..\n", GST_OBJECT_NAME (cur_effect),
      GST_OBJECT_NAME (next));

  gst_element_set_state (cur_effect, GST_STATE_NULL);

  /* remove unlinks automatically */
  GST_DEBUG_OBJECT (pipeline, "removing %" GST_PTR_FORMAT, cur_effect);
  gst_bin_remove (GST_BIN (pipeline), cur_effect);

  GST_DEBUG_OBJECT (pipeline, "adding   %" GST_PTR_FORMAT, next);
  gst_bin_add (GST_BIN (pipeline), next);

  GST_DEBUG_OBJECT (pipeline, "linking..");
  gst_element_link_many (conv_before, next, conv_after, NULL);

  gst_element_set_state (next, GST_STATE_PLAYING);

  cur_effect = next;
  GST_DEBUG_OBJECT (pipeline, "done");

  return GST_PAD_PROBE_DROP;
}

static GstPadProbeReturn
pad_probe_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
{
  GstPad *srcpad, *sinkpad;

  GST_DEBUG_OBJECT (pad, "pad is blocked now");

  /* remove the probe first */
  gst_pad_remove_probe (pad, GST_PAD_PROBE_INFO_ID (info));

  /* install new probe for EOS */
  srcpad = gst_element_get_static_pad (cur_effect, "src");
  gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_BLOCK |
      GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, event_probe_cb, user_data, NULL);
  gst_object_unref (srcpad);

  /* push EOS into the element, the probe will be fired when the
   * EOS leaves the effect and it has thus drained all of its data */
  sinkpad = gst_element_get_static_pad (cur_effect, "sink");
  gst_pad_send_event (sinkpad, gst_event_new_eos ());
  gst_object_unref (sinkpad);

  return GST_PAD_PROBE_OK;
}

static gboolean
timeout_cb (gpointer user_data)
{
  gst_pad_add_probe (blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
      pad_probe_cb, user_data, NULL);

  return TRUE;
}

static gboolean
bus_cb (GstBus * bus, GstMessage * msg, gpointer user_data)
{
  GMainLoop *loop = user_data;

  switch (GST_MESSAGE_TYPE (msg)) {
    case GST_MESSAGE_ERROR:{
      GError *err = NULL;
      gchar *dbg;

      gst_message_parse_error (msg, &err, &dbg);
      gst_object_default_error (msg->src, err, dbg);
      g_clear_error (&err);
      g_free (dbg);
      g_main_loop_quit (loop);
      break;
    }
    default:
      break;
  }
  return TRUE;
}

int
main (int argc, char **argv)
{
  GOptionEntry options[] = {
    {"effects", 'e', 0, G_OPTION_ARG_STRING, &opt_effects,
        "Effects to use (comma-separated list of element names)", NULL},
    {NULL}
  };
  GOptionContext *ctx;
  GError *err = NULL;
  GMainLoop *loop;
  GstElement *src, *q1, *q2, *effect, *filter1, *filter2, *sink;
  gchar **effect_names, **e;

  ctx = g_option_context_new ("");
  g_option_context_add_main_entries (ctx, options, NULL);
  g_option_context_add_group (ctx, gst_init_get_option_group ());
  if (!g_option_context_parse (ctx, &argc, &argv, &err)) {
    g_print ("Error initializing: %s\n", err->message);
    g_clear_error (&amp;err);
    g_option_context_free (ctx);
    return 1;
  }
  g_option_context_free (ctx);

  if (opt_effects != NULL)
    effect_names = g_strsplit (opt_effects, ",", -1);
  else
    effect_names = g_strsplit (DEFAULT_EFFECTS, ",", -1);

  for (e = effect_names; e != NULL && *e != NULL; ++e) {
    GstElement *el;

    el = gst_element_factory_make (*e, NULL);
    if (el) {
      g_print ("Adding effect '%s'\n", *e);
      g_queue_push_tail (&effects, el);
    }
  }

  pipeline = gst_pipeline_new ("pipeline");

  src = gst_element_factory_make ("videotestsrc", NULL);
  g_object_set (src, "is-live", TRUE, NULL);

  filter1 = gst_element_factory_make ("capsfilter", NULL);
  gst_util_set_object_arg (G_OBJECT (filter1), "caps",
      "video/x-raw, width=320, height=240, "
      "format={ I420, YV12, YUY2, UYVY, AYUV, Y41B, Y42B, "
      "YVYU, Y444, v210, v216, NV12, NV21, UYVP, A420, YUV9, YVU9, IYU1 }");

  q1 = gst_element_factory_make ("queue", NULL);

  blockpad = gst_element_get_static_pad (q1, "src");

  conv_before = gst_element_factory_make ("videoconvert", NULL);

  effect = g_queue_pop_head (&effects);
  cur_effect = effect;

  conv_after = gst_element_factory_make ("videoconvert", NULL);

  q2 = gst_element_factory_make ("queue", NULL);

  filter2 = gst_element_factory_make ("capsfilter", NULL);
  gst_util_set_object_arg (G_OBJECT (filter2), "caps",
      "video/x-raw, width=320, height=240, "
      "format={ RGBx, BGRx, xRGB, xBGR, RGBA, BGRA, ARGB, ABGR, RGB, BGR }");

  sink = gst_element_factory_make ("ximagesink", NULL);

  gst_bin_add_many (GST_BIN (pipeline), src, filter1, q1, conv_before, effect,
      conv_after, q2, sink, NULL);

  gst_element_link_many (src, filter1, q1, conv_before, effect, conv_after,
      q2, sink, NULL);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  loop = g_main_loop_new (NULL, FALSE);

  gst_bus_add_watch (GST_ELEMENT_BUS (pipeline), bus_cb, loop);

  g_timeout_add_seconds (1, timeout_cb, loop);

  g_main_loop_run (loop);

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);

  return 0;
}


      
```

Note how we added videoconvert elements before and after the effect.
This is needed because some elements might operate in different
colorspaces than other elements. By inserting the conversion elements
you ensure that the right format can be negotiated at any time.