summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDavid Henningsson <david.henningsson@canonical.com>2013-03-25 09:20:16 +0100
committerTanu Kaskinen <tanuk@iki.fi>2013-03-25 15:44:36 +0200
commitfeeec28de481de41fc99f73ccc9bc64ff06235f8 (patch)
treeadd0f924374cf2cece4580e68571dd8c735cf43f
parent5f326b705d8f7f0c14e7e0c7d7c2751f3a5ebe43 (diff)
client API: Document buffer_attr.maxlength
Let's officially support that people use maxlength to put an upper bound on playback latency. Signed-off-by: David Henningsson <david.henningsson@canonical.com>
-rw-r--r--src/pulse/def.h8
1 files changed, 7 insertions, 1 deletions
diff --git a/src/pulse/def.h b/src/pulse/def.h
index 495302f8..a7f592b3 100644
--- a/src/pulse/def.h
+++ b/src/pulse/def.h
@@ -360,7 +360,13 @@ typedef struct pa_buffer_attr {
uint32_t maxlength;
/**< Maximum length of the buffer in bytes. Setting this to (uint32_t) -1
* will initialize this to the maximum value supported by server,
- * which is recommended. */
+ * which is recommended.
+ *
+ * In strict low-latency playback scenarios you might want to set this to
+ * a lower value, likely together with the PA_STREAM_ADJUST_LATENCY flag.
+ * If you do so, you ensure that the latency doesn't grow beyond what is
+ * acceptable for the use case, at the cost of getting more underruns if
+ * the latency is lower than what the server can reliably handle. */
uint32_t tlength;
/**< Playback only: target length of the buffer. The server tries