forked from buildroot/buildroot
-
Notifications
You must be signed in to change notification settings - Fork 69
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[webkit]: Query playback position from decoders, in case of no more v…
…ideo position updates when there is a still video picture, switch to audio position
- Loading branch information
Showing
1 changed file
with
319 additions
and
0 deletions.
There are no files selected for viewing
319 changes: 319 additions & 0 deletions
319
package/wpe/wpewebkit/0003-make-resume-seek-live-working-for-gst1bcm-18.2.patch
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,319 @@ | ||
diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp | ||
index 4378b29..cbd3851 100644 | ||
--- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp | ||
+++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp | ||
@@ -160,6 +160,10 @@ MediaPlayerPrivateGStreamer::MediaPlayerPrivateGStreamer(MediaPlayer* player) | ||
, m_totalBytes(0) | ||
, m_preservesPitch(false) | ||
, m_lastQuery(-1) | ||
+#if PLATFORM(BCM_NEXUS) | ||
+ , m_videoDecoder(nullptr) | ||
+ , m_audioDecoder(nullptr) | ||
+#endif | ||
{ | ||
#if USE(GLIB) | ||
m_readyTimerHandler.setPriority(G_PRIORITY_DEFAULT_IDLE); | ||
@@ -312,12 +316,12 @@ void MediaPlayerPrivateGStreamer::commitLoad() | ||
} | ||
|
||
#if PLATFORM(BCM_NEXUS) | ||
-// utility function for bcm nexus seek functionality | ||
-static void findDecoders(GstElement *element, GstElement **videoDecoder, GstElement **audioDecoder) | ||
-{ | ||
- if (!(videoDecoder || audioDecoder)) | ||
- return; | ||
- | ||
+// Find a decoder based on plugin name. | ||
+static void findDecoder(GstElement *element, GstElement **decoder, const CString &search) | ||
+{ | ||
+ if (!decoder) | ||
+ return; | ||
+ | ||
if (GST_IS_BIN(element)) { | ||
GstIterator* it = gst_bin_iterate_elements(GST_BIN(element)); | ||
GValue item = G_VALUE_INIT; | ||
@@ -327,8 +331,8 @@ static void findDecoders(GstElement *element, GstElement **videoDecoder, GstElem | ||
case GST_ITERATOR_OK: | ||
{ | ||
GstElement *next = GST_ELEMENT(g_value_get_object(&item)); | ||
- findDecoders(next, videoDecoder, audioDecoder); | ||
- done = (!((videoDecoder && !*videoDecoder) || (audioDecoder && !*audioDecoder))); | ||
+ findDecoder(next, decoder, search); | ||
+ done = (!(decoder && !*decoder)); | ||
g_value_reset (&item); | ||
break; | ||
} | ||
@@ -343,10 +347,9 @@ static void findDecoders(GstElement *element, GstElement **videoDecoder, GstElem | ||
} | ||
g_value_unset (&item); | ||
gst_iterator_free(it); | ||
- } else if (videoDecoder && (GST_IS_VIDEO_DECODER(element) || g_str_has_suffix(G_OBJECT_TYPE_NAME(G_OBJECT(element)), "VideoDecoder"))) | ||
- *videoDecoder = element; | ||
- else if (audioDecoder && (GST_IS_AUDIO_DECODER(element) || g_str_has_suffix(G_OBJECT_TYPE_NAME(G_OBJECT(element)), "AudioDecoder"))) | ||
- *audioDecoder = element; | ||
+ } else if (decoder && (g_strstr_len(gst_element_get_name(element), search.length(), search.data()))) { | ||
+ *decoder = element; | ||
+ } | ||
return; | ||
} | ||
#endif | ||
@@ -355,6 +358,7 @@ static void findDecoders(GstElement *element, GstElement **videoDecoder, GstElem | ||
MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const | ||
{ | ||
|
||
+ GST_INFO("CachedPosition %s", toString(m_cachedPosition).utf8().data()); | ||
if (m_isEndReached) { | ||
// Position queries on a null pipeline return 0. If we're at | ||
// the end of the stream the pipeline is null but we want to | ||
@@ -373,7 +377,6 @@ MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const | ||
if (m_lastQuery > -1 && ((now - m_lastQuery) < 0.01) && m_cachedPosition.isValid()) | ||
return m_cachedPosition; | ||
|
||
- m_lastQuery = now; | ||
|
||
// Position is only available if no async state change is going on and the state is either paused or playing. | ||
gint64 position = GST_CLOCK_TIME_NONE; | ||
@@ -392,32 +395,91 @@ MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const | ||
// Implement getting pts time from broadcom decoder directly for seek functionality. | ||
// In some cases one stream (audio or video) is shorter than the other and its position doesn't | ||
// increase anymore. We need to query both decoders (if available) and choose the highest position. | ||
- GstElement* videoDecoder = nullptr; | ||
- GstElement* audioDecoder = nullptr; | ||
GstClockTime videoPosition = GST_CLOCK_TIME_NONE; | ||
GstClockTime audioPosition = GST_CLOCK_TIME_NONE; | ||
|
||
- findDecoders(m_pipeline.get(), &videoDecoder, &audioDecoder); | ||
- | ||
- GST_TRACE("videoDecoder: %s, audioDecoder: %s", videoDecoder ? GST_ELEMENT_NAME(videoDecoder) : "null", audioDecoder ? GST_ELEMENT_NAME(audioDecoder) : "null"); | ||
+ if (!m_audioDecoder) { | ||
+ | ||
+ if (!m_videoDecoder) { | ||
+ GstElement *videoDecoder = nullptr; | ||
+ if (!(m_videoDecoder)) { | ||
+ findDecoder(m_pipeline.get(), &videoDecoder, "brcmvideodecoder"); | ||
+ } | ||
+ if (!videoDecoder) { | ||
+ m_lastQuery = now; | ||
+ return MediaTime::zeroTime(); | ||
+ } | ||
+ m_videoDecoder = videoDecoder; | ||
+ } | ||
|
||
- if (!(videoDecoder || audioDecoder)) | ||
- return MediaTime::zeroTime(); | ||
- if (videoDecoder && gst_element_query(videoDecoder, query)) | ||
- gst_query_parse_position(query, 0, (gint64*)&videoPosition); | ||
- if (audioDecoder) { | ||
- g_object_set(audioDecoder, "use-audio-position", true, nullptr); | ||
- if (gst_element_query(audioDecoder, query)) | ||
+ if (gst_element_query(m_videoDecoder, query)) { | ||
+ gst_query_parse_position(query, 0, (gint64*)&videoPosition); | ||
+ } /*else { | ||
+ GST_INFO("VideoDecoder is NULL"); | ||
+ m_videoDecoder = nullptr; | ||
+ }*/ | ||
+ | ||
+ | ||
+ if (videoPosition == GST_CLOCK_TIME_NONE) | ||
+ videoPosition = 0; | ||
+ | ||
+ | ||
+ if (!(m_seeking || m_paused)) { | ||
+ if (m_cachedPosition.isValid() && videoPosition != 0 ) { | ||
+ if ((static_cast<GstClockTime>(videoPosition) > toGstClockTime(m_cachedPosition)) || m_cachedPosition == MediaTime::zeroTime()) { | ||
+ // Always video position. | ||
+ position = videoPosition; | ||
+ } else if ((static_cast<GstClockTime>(videoPosition) == toGstClockTime(m_cachedPosition)) && | ||
+ ((m_lastQuery > -1 && (now - m_lastQuery) < 2))) { // TODO: 2 seconds for decision, are there any other ways to switch audio position? | ||
+ // If the reported position is same for 2 seconds, try audio position. | ||
+ gst_query_unref(query); | ||
+ return m_cachedPosition; | ||
+ } else if (m_cachedPosition == m_seekTime) { | ||
+ // When seeking is not completed, report video position. | ||
+ if (videoPosition > 0) | ||
+ position = videoPosition; | ||
+ } else { | ||
+ GST_INFO("Switch to audio position."); | ||
+ GstElement *audioDecoder = nullptr; | ||
+ if (!(m_audioDecoder)) { | ||
+ findDecoder(m_pipeline.get(), &audioDecoder, "brcmaudiodecoder"); | ||
+ } | ||
+ if (!audioDecoder) { | ||
+ m_lastQuery = now; | ||
+ gst_query_unref(query); | ||
+ return m_cachedPosition; | ||
+ } | ||
+ m_audioDecoder = audioDecoder; | ||
+ g_object_set(m_audioDecoder, "use-audio-position", true, nullptr); | ||
+ if (gst_element_query(m_audioDecoder, query)) | ||
+ gst_query_parse_position(query, 0, (gint64*)&audioPosition); | ||
+ | ||
+ if (audioPosition == GST_CLOCK_TIME_NONE) | ||
+ audioPosition = 0; | ||
+ | ||
+ position = audioPosition; | ||
+ } | ||
+ } | ||
+ } | ||
+ else { | ||
+ // Report cached position in case of paused or seeking. | ||
+ position = toGstClockTime(m_cachedPosition); | ||
+ } | ||
+ | ||
+ } else { | ||
+ if (gst_element_query(m_audioDecoder, query)) { | ||
gst_query_parse_position(query, 0, (gint64*)&audioPosition); | ||
+ } /*else { | ||
+ GST_INFO("AudioDecoder is NULL"); | ||
+ m_audioDecoder = nullptr; | ||
+ }*/ | ||
+ | ||
+ if (audioPosition == GST_CLOCK_TIME_NONE) | ||
+ audioPosition = 0; | ||
+ position = audioPosition; | ||
} | ||
- if (videoPosition == GST_CLOCK_TIME_NONE) | ||
- videoPosition = 0; | ||
- if (audioPosition == GST_CLOCK_TIME_NONE) | ||
- audioPosition = 0; | ||
- | ||
- GST_TRACE("videoPosition: %" GST_TIME_FORMAT ", audioPosition: %" GST_TIME_FORMAT, GST_TIME_ARGS(videoPosition), GST_TIME_ARGS(audioPosition)); | ||
- | ||
- position = max(videoPosition, audioPosition); | ||
+ | ||
+ GST_INFO("videoPosition: %" GST_TIME_FORMAT ", audioPosition: %" GST_TIME_FORMAT, GST_TIME_ARGS(videoPosition), GST_TIME_ARGS(audioPosition)); | ||
#else | ||
positionElement = m_pipeline.get(); | ||
#endif | ||
@@ -425,7 +487,7 @@ MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const | ||
gst_query_parse_position(query, 0, &position); | ||
gst_query_unref(query); | ||
|
||
- GST_DEBUG("Position %" GST_TIME_FORMAT, GST_TIME_ARGS(position)); | ||
+ GST_INFO("Position %" GST_TIME_FORMAT, GST_TIME_ARGS(position)); | ||
|
||
MediaTime playbackPosition = MediaTime::zeroTime(); | ||
GstClockTime gstreamerPosition = static_cast<GstClockTime>(position); | ||
@@ -435,6 +497,7 @@ MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const | ||
playbackPosition = m_seekTime; | ||
|
||
m_cachedPosition = playbackPosition; | ||
+ m_lastQuery = now; | ||
return playbackPosition; | ||
} | ||
|
||
diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h | ||
index 5490a2d..d8d85bc 100644 | ||
--- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h | ||
+++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h | ||
@@ -196,7 +196,10 @@ protected: | ||
GRefPtr<GstElement> m_source; | ||
bool m_volumeAndMuteInitialized; | ||
MediaTime m_previousDuration; | ||
- | ||
+#if PLATFORM(BCM_NEXUS) | ||
+ mutable GstElement *m_audioDecoder; | ||
+ mutable GstElement *m_videoDecoder; | ||
+#endif | ||
static GstSeekFlags hardwareDependantSeekFlags(); | ||
void readyTimerFired(); | ||
|
||
diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp | ||
index 4ea79d5..375b583 100644 | ||
--- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp | ||
+++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp | ||
@@ -272,6 +272,9 @@ MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* pl | ||
: m_notifier(MainThreadNotifier<MainThreadNotification>::create()) | ||
, m_player(player) | ||
, m_fpsSink(nullptr) | ||
+#if PLATFORM(BCM_NEXUS) | ||
+ , m_videoBcmSink(nullptr) | ||
+#endif | ||
, m_readyState(MediaPlayer::HaveNothing) | ||
, m_networkState(MediaPlayer::Empty) | ||
, m_isEndReached(false) | ||
@@ -1274,10 +1277,14 @@ unsigned MediaPlayerPrivateGStreamerBase::decodedFrameCount() const | ||
GST_DEBUG("frames decoded: %llu", decodedFrames); | ||
} | ||
#if PLATFORM(BCM_NEXUS) | ||
- GstElement* videoSink = nullptr; | ||
- videoSink = findVideoSink(m_pipeline.get()); | ||
- if (videoSink) { | ||
- g_object_get(videoSink, "frames-rendered", &decodedFrames, nullptr); | ||
+ | ||
+ if (!m_videoBcmSink) { | ||
+ GstElement* videoSink = nullptr; | ||
+ videoSink = findVideoSink(m_pipeline.get()); | ||
+ if (videoSink) | ||
+ m_videoBcmSink = videoSink; | ||
+ } else { | ||
+ g_object_get(m_videoBcmSink, "frames-rendered", &decodedFrames, nullptr); | ||
GST_DEBUG("frames decoded: %llu", decodedFrames); | ||
} | ||
#endif | ||
@@ -1291,10 +1298,13 @@ unsigned MediaPlayerPrivateGStreamerBase::droppedFrameCount() const | ||
g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, nullptr); | ||
|
||
#if PLATFORM(BCM_NEXUS) | ||
- GstElement* videoSink = nullptr; | ||
- videoSink = findVideoSink(m_pipeline.get()); | ||
- if (videoSink) { | ||
- g_object_get(videoSink, "frames-dropped", &framesDropped, nullptr); | ||
+ if (!m_videoBcmSink) { | ||
+ GstElement* videoSink = nullptr; | ||
+ videoSink = findVideoSink(m_pipeline.get()); | ||
+ if (videoSink) | ||
+ m_videoBcmSink = videoSink; | ||
+ } else { | ||
+ g_object_get(m_videoBcmSink, "frames-dropped", &framesDropped, nullptr); | ||
GST_DEBUG("frames dropped: %llu", framesDropped); | ||
} | ||
#endif | ||
diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.h b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.h | ||
index 9e6fbe9..325beaf 100644 | ||
--- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.h | ||
+++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.h | ||
@@ -240,6 +240,9 @@ protected: | ||
GRefPtr<GstElement> m_pipeline; | ||
GRefPtr<GstStreamVolume> m_volumeElement; | ||
GRefPtr<GstElement> m_videoSink; | ||
+#if PLATFORM(BCM_NEXUS) | ||
+ mutable GstElement *m_videoBcmSink; | ||
+#endif | ||
GRefPtr<GstElement> m_fpsSink; | ||
MediaPlayer::ReadyState m_readyState; | ||
mutable MediaPlayer::NetworkState m_networkState; | ||
diff --git a/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp b/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp | ||
index 573020c..2334855 100644 | ||
--- a/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp | ||
+++ b/Source/WebCore/platform/graphics/gstreamer/mse/MediaPlayerPrivateGStreamerMSE.cpp | ||
@@ -626,7 +626,7 @@ void MediaPlayerPrivateGStreamerMSE::updateStates() | ||
ASSERT_NOT_REACHED(); | ||
break; | ||
} | ||
-#if PLATFORM(BROADCOM) | ||
+#if PLATFORM(BCM_NEXUS) | ||
// this code path needs a proper review in case it can be generalized to all platforms. | ||
bool buffering = !isTimeBuffered(currentMediaTime()) && !playbackPipelineHasFutureData(); | ||
#else | ||
@@ -639,16 +639,13 @@ void MediaPlayerPrivateGStreamerMSE::updateStates() | ||
notifyPlayerOfMute(); | ||
m_volumeAndMuteInitialized = true; | ||
} | ||
- | ||
#if PLATFORM(BCM_NEXUS) | ||
- if (!isTimeBuffered(currentMediaTime()) && !playbackPipelineHasFutureData()) { | ||
+ if (buffering) { | ||
m_readyState = MediaPlayer::HaveMetadata; | ||
- } | ||
- else | ||
+ GST_DEBUG("[Buffering] set readystate to HaveMetadata"); | ||
+ } else | ||
#endif | ||
- if (!isTimeBuffered(currentMediaTime()) && !playbackPipelineHasFutureData()) { | ||
- m_readyState = MediaPlayer::HaveMetadata; | ||
- } else if (!seeking() && !buffering && !m_paused && m_playbackRate) { | ||
+ if (!seeking() && !buffering && !m_paused && m_playbackRate) { | ||
GST_DEBUG("[Buffering] Restarting playback."); | ||
changePipelineState(GST_STATE_PLAYING); | ||
} |