From 11a1bbbe6984a3a5e8b38039b587a3b7698efdc9 Mon Sep 17 00:00:00 2001 From: Mathieu Duponchelle Date: Wed, 11 May 2022 01:42:10 +0200 Subject: [PATCH] ts-jitterbuffer: set jbuf delay when instantiating it The internal (C) jitterbuffer needs to know about the configured latency when calculating a PTS, as it otherwise may consider that the packet is too late, trigger a resync and cause the element to discard the packet altogether. I could not identify when this was broken, but the net effect was that in the current state, ts-jitterbuffer was discarding up to half of all the incoming packets. --- generic/threadshare/src/jitterbuffer/imp.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/generic/threadshare/src/jitterbuffer/imp.rs b/generic/threadshare/src/jitterbuffer/imp.rs index c84d8050..2dca5037 100644 --- a/generic/threadshare/src/jitterbuffer/imp.rs +++ b/generic/threadshare/src/jitterbuffer/imp.rs @@ -1080,7 +1080,12 @@ impl TaskImpl for JitterBufferTask { self.sink_pad_handler.clear(); let jb = self.element.imp(); - *jb.state.lock().unwrap() = State::default(); + + let latency = jb.settings.lock().unwrap().latency; + let state = State::default(); + + state.jbuf.borrow().set_delay(latency); + *jb.state.lock().unwrap() = state; gst_log!(CAT, obj: &self.element, "Task started"); Ok(())