From bf59e9eb4d114ffb62e0ccae24caf4f4ce6ed9fd Mon Sep 17 00:00:00 2001 From: Philippe Normand Date: Wed, 1 Feb 2023 12:16:06 +0000 Subject: [PATCH 1/7] wpewebkit: Add option for GstWebRTC support --- package/wpe/wpewebkit/Config.in | 25 ++++++++++++++++++++++--- package/wpe/wpewebkit/wpewebkit.mk | 10 +++++++--- 2 files changed, 29 insertions(+), 6 deletions(-) diff --git a/package/wpe/wpewebkit/Config.in b/package/wpe/wpewebkit/Config.in index ecace712f891..bbdd1f61e513 100644 --- a/package/wpe/wpewebkit/Config.in +++ b/package/wpe/wpewebkit/Config.in @@ -367,13 +367,32 @@ config BR2_PACKAGE_WPEWEBKIT_MULTIMEDIA if BR2_PACKAGE_WPEWEBKIT_MULTIMEDIA +choice + bool "WebRTC support" + default BR2_PACKAGE_WPEWEBKIT_USE_LIBWEBRTC + help + Which WebRTC backend to enable + +config BR2_PACKAGE_WPEWEBKIT_USE_GSTREAMER_WEBRTC + bool "GstWebRTC" + select BR2_PACKAGE_GST1_PLUGINS_BAD_PLUGIN_DTLS + select BR2_PACKAGE_GST1_PLUGINS_BAD_PLUGIN_WEBRTC + select BR2_PACKAGE_GST1_PLUGINS_BAD_PLUGIN_SRTP + select BR2_PACKAGE_GST1_PLUGINS_BAD_PLUGIN_WEBRTCDSP + select BR2_PACKAGE_GST1_PLUGINS_BASE_PLUGIN_VIDEORATE + select BR2_PACKAGE_GST1_PLUGINS_GOOD_PLUGIN_RTP + select BR2_PACKAGE_GST1_PLUGINS_GOOD_PLUGIN_RTPMANAGER + + config BR2_PACKAGE_WPEWEBKIT_USE_LIBWEBRTC bool "LibWebRTC" select BR2_PACKAGE_PULSEAUDIO select BR2_PACKAGE_LIBEVENT - default y - help - Enable LibWebRTC + +config BR2_PACKAGE_WPEWEBKIT_NO_WEBRTC + bool "none" + +endchoice # WebRTC support config BR2_PACKAGE_WPEWEBKIT_USE_GSTREAMER_GL bool "use gstreamer-gl" diff --git a/package/wpe/wpewebkit/wpewebkit.mk b/package/wpe/wpewebkit/wpewebkit.mk index bc4841242001..472a2198b624 100644 --- a/package/wpe/wpewebkit/wpewebkit.mk +++ b/package/wpe/wpewebkit/wpewebkit.mk @@ -196,12 +196,16 @@ ifeq ($(BR2_PACKAGE_WPEWEBKIT_USE_REFTRACKER),y) WPEWEBKIT_CONF_OPTS += -DENABLE_REFTRACKER=ON -DENABLE_JSC_RESTRICTED_OPTIONS_BY_DEFAULT=ON endif -# FIXME: Add an option for USE_GSTREAMER_WEBRTC. Disabling it means libwebrtc is -# enabled, but only if ENABLE_WEB_RTC is ON. ifeq ($(BR2_PACKAGE_WPEWEBKIT_USE_LIBWEBRTC),y) WPEWEBKIT_CONF_OPTS += -DUSE_GSTREAMER_WEBRTC=OFF -DENABLE_MEDIA_STREAM=ON -DENABLE_WEB_RTC=ON WPEWEBKIT_DEPENDENCIES += libevent -else +endif + +ifeq ($(BR2_PACKAGE_WPEWEBKIT_USE_GSTREAMER_WEBRTC),y) +WPEWEBKIT_CONF_OPTS += -DUSE_GSTREAMER_WEBRTC=ON -DENABLE_MEDIA_STREAM=ON -DENABLE_WEB_RTC=ON +endif + +ifeq ($(BR2_PACKAGE_WPEWEBKIT_NO_WEBRTC),y) WPEWEBKIT_CONF_OPTS += -DUSE_GSTREAMER_WEBRTC=OFF -DENABLE_MEDIA_STREAM=OFF -DENABLE_WEB_RTC=OFF endif From 47e816227e3da10a28cb0f36f093e3871877baf0 Mon Sep 17 00:00:00 2001 From: Xabier Rodriguez Calvar Date: Wed, 26 Jun 2024 14:18:03 +0200 Subject: [PATCH 2/7] [sagem][board][gstreamer] add GstWebRTC libraries --- board/sagemcom/sagemcom.txt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/board/sagemcom/sagemcom.txt b/board/sagemcom/sagemcom.txt index a6d54a5075d3..b0642937ef3f 100644 --- a/board/sagemcom/sagemcom.txt +++ b/board/sagemcom/sagemcom.txt @@ -51,6 +51,9 @@ libpcreposix.so libmount.so.1 libintl.so.8 libiconv.so.2 +lib*webrtc* +libsrtp2.so* +libnice.so* ca-certificates.crt libicu*.so libudev.so.1 From be79dcbc8f268f10d2ba230223f2ebc9923019a2 Mon Sep 17 00:00:00 2001 From: Xabier Rodriguez Calvar Date: Fri, 28 Jun 2024 12:26:37 +0200 Subject: [PATCH 3/7] gst1-bcm: fix videosink linkage --- ...sink-add-libbrcmgstutil.la-to-LIBADD.patch | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 package/gstreamer1/gst1-bcm/0003-videosink-add-libbrcmgstutil.la-to-LIBADD.patch diff --git a/package/gstreamer1/gst1-bcm/0003-videosink-add-libbrcmgstutil.la-to-LIBADD.patch b/package/gstreamer1/gst1-bcm/0003-videosink-add-libbrcmgstutil.la-to-LIBADD.patch new file mode 100644 index 000000000000..b9757d35609a --- /dev/null +++ b/package/gstreamer1/gst1-bcm/0003-videosink-add-libbrcmgstutil.la-to-LIBADD.patch @@ -0,0 +1,22 @@ +From 8d4b3b9a1add7b2552fd1f03c7e574a05def074d Mon Sep 17 00:00:00 2001 +From: Xabier Rodriguez Calvar +Date: Thu, 27 Jun 2024 15:17:33 +0200 +Subject: [PATCH] [videosink] add libbrcmgstutil.la to LIBADD + +It was preventing videosink from loading properly in runtime. audiosink uses the same. +--- + reference/videosink/src/Makefile.am | 1 + + 1 file changed, 1 insertion(+) + +diff --git a/reference/videosink/src/Makefile.am b/reference/videosink/src/Makefile.am +index 064abd1..faabae6 100755 +--- a/reference/videosink/src/Makefile.am ++++ b/reference/videosink/src/Makefile.am +@@ -46,3 +46,4 @@ plugin_LTLIBRARIES = libbrcmvideosink.la + libbrcmvideosink_la_SOURCES = gst_brcm_video_sink.c video_sink_properties.c + libbrcmvideosink_la_LDFLAGS = $(AM_LDFLAGS) + libbrcmvideosink_la_LDFLAGS += -module -avoid-version ++libbrcmvideosink_la_LIBADD = $(top_builddir)/reference/util/src/libbrcmgstutil.la +-- +2.43.0 + From 4daa6d06810ed5bc398343bc5c45011279f07a61 Mon Sep 17 00:00:00 2001 From: Philippe Normand Date: Thu, 19 Jan 2023 17:50:14 +0000 Subject: [PATCH 4/7] libnice: Bump to 0.1.22. --- package/libnice/libnice.hash | 2 +- package/libnice/libnice.mk | 15 +++++++-------- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/package/libnice/libnice.hash b/package/libnice/libnice.hash index 61be9517e39a..fb0e02041b63 100644 --- a/package/libnice/libnice.hash +++ b/package/libnice/libnice.hash @@ -1,5 +1,5 @@ # Locally computed after checking pgp signature -sha256 06b678066f94dde595a4291588ed27acd085ee73775b8c4e8399e28c01eeefdf libnice-0.1.16.tar.gz +sha256 a5f724cf09eae50c41a7517141d89da4a61ec9eaca32da4a0073faed5417ad7e libnice-0.1.22.tar.gz # Hash for license files: sha256 9246b2ee8b1db30cf03d5d9719ad8bb5edce1cadc85f8cfef319c23d24d950b5 COPYING diff --git a/package/libnice/libnice.mk b/package/libnice/libnice.mk index 8cbf0035295c..1da7ceadeeb7 100644 --- a/package/libnice/libnice.mk +++ b/package/libnice/libnice.mk @@ -4,29 +4,28 @@ # ################################################################################ -LIBNICE_VERSION = 0.1.16 +LIBNICE_VERSION = 0.1.22 LIBNICE_SITE = http://nice.freedesktop.org/releases LIBNICE_LICENSE = MPL-1.1 or LGPL-2.1 LIBNICE_LICENSE_FILES = COPYING COPYING.MPL COPYING.LGPL LIBNICE_DEPENDENCIES = libglib2 host-pkgconf LIBNICE_INSTALL_STAGING = YES -LIBNICE_CONF_OPTS = --without-gstreamer-0.10 +LIBNICE_CONF_OPTS += -Dintrospection=disabled -Dtests=disabled ifeq ($(BR2_PACKAGE_GNUTLS),y) -LIBNICE_CONF_OPTS += --with-crypto-library=gnutls +LIBNICE_CONF_OPTS += -Dcrypto-library=gnutls LIBNICE_DEPENDENCIES += gnutls else LIBNICE_CONF_OPTS += \ - --with-crypto-library=openssl \ - --with-openssl=$(STAGING_DIR)/usr + -Dcrypto-library=openssl LIBNICE_DEPENDENCIES += openssl endif ifeq ($(BR2_PACKAGE_GST1_PLUGINS_BASE),y) -LIBNICE_CONF_OPTS += --with-gstreamer +LIBNICE_CONF_OPTS += -Dgstreamer=enabled LIBNICE_DEPENDENCIES += gst1-plugins-base else -LIBNICE_CONF_OPTS += --without-gstreamer +LIBNICE_CONF_OPTS += -Dgstreamer=disabled endif -$(eval $(autotools-package)) +$(eval $(meson-package)) From 9fdd4d8039a42de2897966c0d15e2f6c957f8355 Mon Sep 17 00:00:00 2001 From: Carlos Bentzen Date: Thu, 19 Jan 2023 17:50:46 +0000 Subject: [PATCH 5/7] gstreamer1: add GstWebRTC backports for 1.18.6 This patch adds patches to gstreamer when building WPE WebRTC support via the GstWebRTC backend. --- .../0001-Backports-from-GStreamer-1.22.patch | 31347 ++++++++++++++++ ...elated-backports-from-GStreamer-1.24.patch | 4285 +++ .../0003-WebRTC-Fix-track-events.patch | 766 + .../gst1-plugins-bad/gst1-plugins-bad.mk | 10 +- ...Reuse-sample-object-in-pull_sample-i.patch | 170 + .../0002-Backports-from-GStreamer-1.22.patch | 14696 ++++++++ .../gst1-plugins-base/gst1-plugins-base.mk | 9 +- .../0001-Backports-from-GStreamer-1.22.patch | 25445 +++++++++++++ ...ssion-Fix-twcc-stats-structure-leaks.patch | 62 + .../gst1-plugins-good/gst1-plugins-good.mk | 13 +- .../0001-Backports-from-GStreamer-1.22.patch | 2603 ++ package/gstreamer1/gstreamer1/gstreamer1.mk | 7 + 12 files changed, 79408 insertions(+), 5 deletions(-) create mode 100644 package/gstreamer1/gst1-plugins-bad/1.18.6-gstwebrtc/0001-Backports-from-GStreamer-1.22.patch create mode 100644 package/gstreamer1/gst1-plugins-bad/1.18.6-gstwebrtc/0002-WebRTC-related-backports-from-GStreamer-1.24.patch create mode 100644 package/gstreamer1/gst1-plugins-bad/1.18.6-gstwebrtc/0003-WebRTC-Fix-track-events.patch create mode 100644 package/gstreamer1/gst1-plugins-base/1.18.6-gstwebrtc/0001-Reapply-appsink-Reuse-sample-object-in-pull_sample-i.patch create mode 100644 package/gstreamer1/gst1-plugins-base/1.18.6-gstwebrtc/0002-Backports-from-GStreamer-1.22.patch create mode 100644 package/gstreamer1/gst1-plugins-good/1.18.6-gstwebrtc/0001-Backports-from-GStreamer-1.22.patch create mode 100644 package/gstreamer1/gst1-plugins-good/1.18.6-gstwebrtc/0002-rtpsession-Fix-twcc-stats-structure-leaks.patch create mode 100644 package/gstreamer1/gstreamer1/1.18.6-gstwebrtc/0001-Backports-from-GStreamer-1.22.patch diff --git a/package/gstreamer1/gst1-plugins-bad/1.18.6-gstwebrtc/0001-Backports-from-GStreamer-1.22.patch b/package/gstreamer1/gst1-plugins-bad/1.18.6-gstwebrtc/0001-Backports-from-GStreamer-1.22.patch new file mode 100644 index 000000000000..f9bc12e7215a --- /dev/null +++ b/package/gstreamer1/gst1-plugins-bad/1.18.6-gstwebrtc/0001-Backports-from-GStreamer-1.22.patch @@ -0,0 +1,31347 @@ +From 1eab5f1d49a194f29b820360e39619083bfbafc5 Mon Sep 17 00:00:00 2001 +From: Philippe Normand +Date: Thu, 19 Jan 2023 14:52:31 +0000 +Subject: [PATCH 1/5] Backports from GStreamer 1.22 + +--- + ext/dtls/gstdtlsagent.c | 2 +- + ext/dtls/gstdtlsagent.h | 6 +- + ext/dtls/gstdtlscertificate.c | 17 + + ext/dtls/gstdtlsconnection.c | 28 +- + ext/dtls/gstdtlsconnection.h | 10 +- + ext/dtls/gstdtlsdec.c | 15 +- + ext/dtls/gstdtlselement.c | 44 + + ext/dtls/gstdtlselements.h | 38 + + ext/dtls/gstdtlsenc.c | 23 +- + ext/dtls/gstdtlssrtpbin.c | 3 +- + ext/dtls/gstdtlssrtpdec.c | 16 +- + ext/dtls/gstdtlssrtpdemux.c | 4 + + ext/dtls/gstdtlssrtpenc.c | 42 +- + ext/dtls/meson.build | 2 +- + ext/dtls/plugin.c | 27 +- + ext/sctp/gstsctpdec.c | 17 +- + ext/sctp/gstsctpdec.h | 1 + + ext/sctp/gstsctpenc.c | 129 +- + ext/sctp/gstsctpenc.h | 1 + + ext/sctp/gstsctpplugin.c | 11 +- + ext/sctp/meson.build | 1 - + ext/sctp/sctpassociation.c | 3 +- + ext/sctp/usrsctp/meson.build | 3 +- + ext/srtp/gstsrtp.c | 23 - + ext/srtp/gstsrtpdec.c | 161 +- + ext/srtp/gstsrtpdec.h | 3 +- + ext/srtp/gstsrtpelement.c | 41 + + ext/srtp/gstsrtpelements.h | 63 + + ext/srtp/gstsrtpenc.c | 20 +- + ext/srtp/gstsrtpenc.h | 2 - + ext/srtp/gstsrtpplugin.c | 45 + + ext/srtp/meson.build | 4 +- + ext/webrtc/fwd.h | 12 - + ext/webrtc/gstwebrtcbin.c | 4661 +++++++++++++---- + ext/webrtc/gstwebrtcbin.h | 26 +- + ext/webrtc/gstwebrtcstats.c | 975 +++- + ext/webrtc/gstwebrtcstats.h | 3 +- + ext/webrtc/meson.build | 37 +- + ext/webrtc/transportreceivebin.c | 186 +- + ext/webrtc/transportreceivebin.h | 1 + + ext/webrtc/transportsendbin.c | 271 +- + ext/webrtc/transportsendbin.h | 27 +- + ext/webrtc/transportstream.c | 170 +- + ext/webrtc/transportstream.h | 39 +- + ext/webrtc/utils.c | 61 +- + ext/webrtc/utils.h | 25 +- + ext/webrtc/webrtcdatachannel.c | 467 +- + ext/webrtc/webrtcdatachannel.h | 13 +- + ext/webrtc/webrtcsctptransport.c | 251 + + ext/webrtc/webrtcsctptransport.h | 74 + + ext/webrtc/webrtcsdp.c | 61 +- + ext/webrtc/webrtcsdp.h | 3 +- + ext/webrtc/webrtctransceiver.c | 47 +- + ext/webrtc/webrtctransceiver.h | 18 +- + gst-libs/gst/codecparsers/gstav1parser.c | 679 ++- + gst-libs/gst/codecparsers/gstav1parser.h | 70 +- + gst-libs/gst/codecparsers/gsth264bitwriter.c | 1641 ++++++ + gst-libs/gst/codecparsers/gsth264bitwriter.h | 88 + + gst-libs/gst/codecparsers/gsth264parser.c | 476 +- + gst-libs/gst/codecparsers/gsth264parser.h | 166 +- + gst-libs/gst/codecparsers/gsth265bitwriter.c | 2307 ++++++++ + gst-libs/gst/codecparsers/gsth265bitwriter.h | 93 + + gst-libs/gst/codecparsers/gsth265parser.c | 758 ++- + gst-libs/gst/codecparsers/gsth265parser.h | 65 +- + .../gst/codecparsers/gstjpeg2000sampling.c | 6 +- + .../gst/codecparsers/gstjpeg2000sampling.h | 30 +- + gst-libs/gst/codecparsers/gstvp8parser.c | 2 + + gst-libs/gst/codecparsers/gstvp9parser.c | 1 + + gst-libs/gst/codecparsers/gstvp9parser.h | 3 +- + gst-libs/gst/codecparsers/meson.build | 7 +- + gst-libs/gst/codecparsers/nalutils.c | 102 +- + gst-libs/gst/codecparsers/nalutils.h | 3 + + gst-libs/gst/webrtc/datachannel.c | 60 +- + gst-libs/gst/webrtc/datachannel.h | 69 +- + gst-libs/gst/webrtc/dtlstransport.c | 38 +- + gst-libs/gst/webrtc/dtlstransport.h | 33 - + gst-libs/gst/webrtc/ice.c | 622 +++ + gst-libs/gst/webrtc/ice.h | 261 + + gst-libs/gst/webrtc/icestream.c | 137 + + gst-libs/gst/webrtc/icestream.h | 61 + + gst-libs/gst/webrtc/icetransport.c | 8 +- + gst-libs/gst/webrtc/icetransport.h | 9 +- + gst-libs/gst/webrtc/meson.build | 26 +- + gst-libs/gst/webrtc/nice/meson.build | 48 + + gst-libs/gst/webrtc/nice/nice.c | 1677 ++++++ + gst-libs/gst/webrtc/nice/nice.h | 67 + + gst-libs/gst/webrtc/nice/nice_fwd.h | 17 + + gst-libs/gst/webrtc/nice/nicestream.c | 334 ++ + gst-libs/gst/webrtc/nice/nicestream.h | 63 + + gst-libs/gst/webrtc/nice/nicetransport.c | 426 ++ + gst-libs/gst/webrtc/nice/nicetransport.h | 71 + + gst-libs/gst/webrtc/rtpreceiver.c | 54 +- + gst-libs/gst/webrtc/rtpreceiver.h | 30 - + gst-libs/gst/webrtc/rtpsender.c | 101 +- + gst-libs/gst/webrtc/rtpsender.h | 34 +- + gst-libs/gst/webrtc/rtptransceiver.c | 99 +- + gst-libs/gst/webrtc/rtptransceiver.h | 31 - + gst-libs/gst/webrtc/sctptransport.c | 79 + + gst-libs/gst/webrtc/sctptransport.h | 42 + + gst-libs/gst/webrtc/webrtc-priv.h | 274 + + gst-libs/gst/webrtc/webrtc.c | 35 + + gst-libs/gst/webrtc/webrtc.h | 2 + + gst-libs/gst/webrtc/webrtc_fwd.h | 187 +- + gst/videoparsers/gstav1parse.c | 2135 ++++++++ + gst/videoparsers/gstav1parse.h | 34 + + gst/videoparsers/gstdiracparse.c | 3 + + gst/videoparsers/gsth263parse.c | 4 + + gst/videoparsers/gsth264parse.c | 394 +- + gst/videoparsers/gsth264parse.h | 16 +- + gst/videoparsers/gsth265parse.c | 370 +- + gst/videoparsers/gsth265parse.h | 1 + + gst/videoparsers/gstmpeg4videoparse.c | 6 +- + gst/videoparsers/gstmpegvideoparse.c | 4 + + gst/videoparsers/gstpngparse.c | 3 + + gst/videoparsers/gstvc1parse.c | 3 + + gst/videoparsers/gstvideoparserselement.c | 39 + + gst/videoparsers/gstvideoparserselements.h | 46 + + gst/videoparsers/gstvideoparseutils.c | 56 + + gst/videoparsers/gstvideoparseutils.h | 20 + + gst/videoparsers/gstvp9parse.c | 897 ++++ + gst/videoparsers/gstvp9parse.h | 34 + + gst/videoparsers/meson.build | 4 +- + gst/videoparsers/plugin.c | 53 +- + 123 files changed, 20647 insertions(+), 3200 deletions(-) + create mode 100644 ext/dtls/gstdtlselement.c + create mode 100644 ext/dtls/gstdtlselements.h + create mode 100644 ext/srtp/gstsrtpelement.c + create mode 100644 ext/srtp/gstsrtpelements.h + create mode 100644 ext/srtp/gstsrtpplugin.c + create mode 100644 ext/webrtc/webrtcsctptransport.c + create mode 100644 ext/webrtc/webrtcsctptransport.h + create mode 100644 gst-libs/gst/codecparsers/gsth264bitwriter.c + create mode 100644 gst-libs/gst/codecparsers/gsth264bitwriter.h + create mode 100644 gst-libs/gst/codecparsers/gsth265bitwriter.c + create mode 100644 gst-libs/gst/codecparsers/gsth265bitwriter.h + create mode 100644 gst-libs/gst/webrtc/ice.c + create mode 100644 gst-libs/gst/webrtc/ice.h + create mode 100644 gst-libs/gst/webrtc/icestream.c + create mode 100644 gst-libs/gst/webrtc/icestream.h + create mode 100644 gst-libs/gst/webrtc/nice/meson.build + create mode 100644 gst-libs/gst/webrtc/nice/nice.c + create mode 100644 gst-libs/gst/webrtc/nice/nice.h + create mode 100644 gst-libs/gst/webrtc/nice/nice_fwd.h + create mode 100644 gst-libs/gst/webrtc/nice/nicestream.c + create mode 100644 gst-libs/gst/webrtc/nice/nicestream.h + create mode 100644 gst-libs/gst/webrtc/nice/nicetransport.c + create mode 100644 gst-libs/gst/webrtc/nice/nicetransport.h + create mode 100644 gst-libs/gst/webrtc/sctptransport.c + create mode 100644 gst-libs/gst/webrtc/sctptransport.h + create mode 100644 gst-libs/gst/webrtc/webrtc-priv.h + create mode 100644 gst-libs/gst/webrtc/webrtc.c + create mode 100644 gst/videoparsers/gstav1parse.c + create mode 100644 gst/videoparsers/gstav1parse.h + create mode 100644 gst/videoparsers/gstvideoparserselement.c + create mode 100644 gst/videoparsers/gstvideoparserselements.h + create mode 100644 gst/videoparsers/gstvp9parse.c + create mode 100644 gst/videoparsers/gstvp9parse.h + +diff --git a/ext/dtls/gstdtlsagent.c b/ext/dtls/gstdtlsagent.c +index 4070c7957..88cfa167f 100644 +--- a/ext/dtls/gstdtlsagent.c ++++ b/ext/dtls/gstdtlsagent.c +@@ -58,7 +58,7 @@ struct _GstDtlsAgentPrivate + GstDtlsCertificate *certificate; + }; + +-G_DEFINE_TYPE_WITH_PRIVATE (GstDtlsAgent, gst_dtls_agent, G_TYPE_OBJECT); ++G_DEFINE_TYPE_WITH_PRIVATE (GstDtlsAgent, gst_dtls_agent, GST_TYPE_OBJECT); + + static void gst_dtls_agent_finalize (GObject * gobject); + static void gst_dtls_agent_set_property (GObject *, guint prop_id, +diff --git a/ext/dtls/gstdtlsagent.h b/ext/dtls/gstdtlsagent.h +index fbfa1e860..b4a4e209b 100644 +--- a/ext/dtls/gstdtlsagent.h ++++ b/ext/dtls/gstdtlsagent.h +@@ -28,7 +28,7 @@ + + #include "gstdtlscertificate.h" + +-#include ++#include + + G_BEGIN_DECLS + +@@ -52,13 +52,13 @@ typedef struct _GstDtlsAgentPrivate GstDtlsAgentPrivate; + * GstDtlsAgent needs to be constructed with the "certificate" property set. + */ + struct _GstDtlsAgent { +- GObject parent_instance; ++ GstObject parent_instance; + + GstDtlsAgentPrivate *priv; + }; + + struct _GstDtlsAgentClass { +- GObjectClass parent_class; ++ GstObjectClass parent_class; + }; + + GType gst_dtls_agent_get_type(void) G_GNUC_CONST; +diff --git a/ext/dtls/gstdtlscertificate.c b/ext/dtls/gstdtlscertificate.c +index d7411c8f4..dc16a0137 100644 +--- a/ext/dtls/gstdtlscertificate.c ++++ b/ext/dtls/gstdtlscertificate.c +@@ -39,6 +39,7 @@ + #endif + + #ifdef G_OS_WIN32 ++#define _WINSOCKAPI_ + #include + #ifdef X509_NAME + #undef X509_NAME +@@ -221,14 +222,24 @@ init_generated (GstDtlsCertificate * self) + #if OPENSSL_VERSION_NUMBER < 0x10100001L + rsa = RSA_generate_key (2048, RSA_F4, NULL, NULL); + #else ++ /* ++ * OpenSSL 3.0 deprecated all low-level APIs, so we need to rewrite this code ++ * to get rid of the warnings. The porting guide explicitly recommends ++ * disabling the warnings if this is not feasible, so let's do that for now: ++ * https://wiki.openssl.org/index.php/OpenSSL_3.0#Upgrading_to_OpenSSL_3.0_from_OpenSSL_1.1.1 ++ */ ++ G_GNUC_BEGIN_IGNORE_DEPRECATIONS; + rsa = RSA_new (); ++ G_GNUC_END_IGNORE_DEPRECATIONS; + if (rsa != NULL) { + BIGNUM *e = BN_new (); ++ G_GNUC_BEGIN_IGNORE_DEPRECATIONS; + if (e == NULL || !BN_set_word (e, RSA_F4) + || !RSA_generate_key_ex (rsa, 2048, e, NULL)) { + RSA_free (rsa); + rsa = NULL; + } ++ G_GNUC_END_IGNORE_DEPRECATIONS; + if (e) + BN_free (e); + } +@@ -236,16 +247,20 @@ init_generated (GstDtlsCertificate * self) + + if (!rsa) { + GST_WARNING_OBJECT (self, "failed to generate RSA"); ++ G_GNUC_BEGIN_IGNORE_DEPRECATIONS; + EVP_PKEY_free (priv->private_key); ++ G_GNUC_END_IGNORE_DEPRECATIONS; + priv->private_key = NULL; + X509_free (priv->x509); + priv->x509 = NULL; + return; + } + ++ G_GNUC_BEGIN_IGNORE_DEPRECATIONS; + if (!EVP_PKEY_assign_RSA (priv->private_key, rsa)) { + GST_WARNING_OBJECT (self, "failed to assign RSA"); + RSA_free (rsa); ++ G_GNUC_END_IGNORE_DEPRECATIONS; + rsa = NULL; + EVP_PKEY_free (priv->private_key); + priv->private_key = NULL; +@@ -259,7 +274,9 @@ init_generated (GstDtlsCertificate * self) + + /* Set a random 64 bit integer as serial number */ + serial_number = BN_new (); ++ G_GNUC_BEGIN_IGNORE_DEPRECATIONS; + BN_pseudo_rand (serial_number, 64, 0, 0); ++ G_GNUC_END_IGNORE_DEPRECATIONS; + asn1_serial_number = X509_get_serialNumber (priv->x509); + BN_to_ASN1_INTEGER (serial_number, asn1_serial_number); + BN_free (serial_number); +diff --git a/ext/dtls/gstdtlsconnection.c b/ext/dtls/gstdtlsconnection.c +index 1c8364a66..8e6512dbc 100644 +--- a/ext/dtls/gstdtlsconnection.c ++++ b/ext/dtls/gstdtlsconnection.c +@@ -101,13 +101,14 @@ struct _GstDtlsConnectionPrivate + GstDtlsConnectionSendCallback send_callback; + gpointer send_callback_user_data; + GDestroyNotify send_callback_destroy_notify; ++ GstFlowReturn syscall_flow_return; + + gboolean timeout_pending; + GThreadPool *thread_pool; + }; + +-G_DEFINE_TYPE_WITH_CODE (GstDtlsConnection, gst_dtls_connection, G_TYPE_OBJECT, +- G_ADD_PRIVATE (GstDtlsConnection) ++G_DEFINE_TYPE_WITH_CODE (GstDtlsConnection, gst_dtls_connection, ++ GST_TYPE_OBJECT, G_ADD_PRIVATE (GstDtlsConnection) + GST_DEBUG_CATEGORY_INIT (gst_dtls_connection_debug, "dtlsconnection", 0, + "DTLS Connection")); + +@@ -448,7 +449,7 @@ gst_dtls_connection_check_timeout_locked (GstDtlsConnection * self) + priv = self->priv; + + if (DTLSv1_get_timeout (priv->ssl, &timeout)) { +- wait_time = timeout.tv_sec * G_USEC_PER_SEC + timeout.tv_usec; ++ wait_time = ((gint64) timeout.tv_sec) * G_USEC_PER_SEC + timeout.tv_usec; + + GST_DEBUG_OBJECT (self, "waiting for %" G_GINT64_FORMAT " usec", wait_time); + if (wait_time) { +@@ -600,6 +601,14 @@ gst_dtls_connection_set_send_callback (GstDtlsConnection * self, + g_mutex_unlock (&priv->mutex); + } + ++void ++gst_dtls_connection_set_flow_return (GstDtlsConnection * self, ++ GstFlowReturn flow_ret) ++{ ++ g_return_if_fail (GST_IS_DTLS_CONNECTION (self)); ++ self->priv->syscall_flow_return = flow_ret; ++} ++ + GstFlowReturn + gst_dtls_connection_process (GstDtlsConnection * self, gpointer data, gsize len, + gsize * written, GError ** err) +@@ -1002,13 +1011,19 @@ handle_error (GstDtlsConnection * self, int ret, GstResourceError error_type, + case SSL_ERROR_WANT_WRITE: + GST_LOG_OBJECT (self, "SSL wants write"); + return GST_FLOW_OK; +- case SSL_ERROR_SYSCALL: ++ case SSL_ERROR_SYSCALL:{ ++ GstFlowReturn rc = GST_FLOW_OK; + /* OpenSSL shouldn't be making real system calls, so we can safely + * ignore syscall errors. System interactions should happen through + * our BIO. + */ +- GST_DEBUG_OBJECT (self, "OpenSSL reported a syscall error, ignoring."); +- return GST_FLOW_OK; ++ if (error_type == GST_RESOURCE_ERROR_WRITE) { ++ rc = self->priv->syscall_flow_return; ++ } ++ GST_DEBUG_OBJECT (self, ++ "OpenSSL reported a syscall error. flow_return=%i", rc); ++ return rc; ++ } + default: + if (self->priv->connection_state != GST_DTLS_CONNECTION_STATE_FAILED) { + self->priv->connection_state = GST_DTLS_CONNECTION_STATE_FAILED; +@@ -1182,6 +1197,7 @@ bio_method_write (BIO * bio, const char *data, int size) + gboolean ret = TRUE; + + GST_LOG_OBJECT (self, "BIO: writing %d", size); ++ self->priv->syscall_flow_return = GST_FLOW_OK; + + if (self->priv->send_callback) + ret = self->priv->send_callback (self, data, size, +diff --git a/ext/dtls/gstdtlsconnection.h b/ext/dtls/gstdtlsconnection.h +index b590486b9..82234fafd 100644 +--- a/ext/dtls/gstdtlsconnection.h ++++ b/ext/dtls/gstdtlsconnection.h +@@ -85,13 +85,13 @@ GType gst_dtls_connection_state_get_type (void); + * Once the DTLS handshake is completed, on-encoder-key and on-decoder-key will be signalled. + */ + struct _GstDtlsConnection { +- GObject parent_instance; ++ GstObject parent_instance; + + GstDtlsConnectionPrivate *priv; + }; + + struct _GstDtlsConnectionClass { +- GObjectClass parent_class; ++ GstObjectClass parent_class; + }; + + GType gst_dtls_connection_get_type(void) G_GNUC_CONST; +@@ -118,6 +118,11 @@ typedef gboolean (*GstDtlsConnectionSendCallback) (GstDtlsConnection * connectio + */ + void gst_dtls_connection_set_send_callback(GstDtlsConnection *, GstDtlsConnectionSendCallback, gpointer, GDestroyNotify); + ++/* ++ * Sets the GstFlowReturn that be returned from gst_dtls_connection_send() if callback returns FALSE ++ */ ++void gst_dtls_connection_set_flow_return(GstDtlsConnection *, GstFlowReturn); ++ + /* + * Processes data that has been received, the transformation is done in-place. + * +@@ -142,6 +147,7 @@ GstFlowReturn gst_dtls_connection_process(GstDtlsConnection *, gpointer ptr, gsi + * we received an EOS before. + * - GST_FLOW_ERROR + err if an error happened + * - GST_FLOW_OK + written >= 0 if processing was successful ++ * - Any GstFlowReturn set with gst_dtls_connection_set_flow_return() + */ + GstFlowReturn gst_dtls_connection_send(GstDtlsConnection *, gconstpointer ptr, gsize len, gsize *written, GError **err); + +diff --git a/ext/dtls/gstdtlsdec.c b/ext/dtls/gstdtlsdec.c +index 7b370e26b..ca9f8fd6b 100644 +--- a/ext/dtls/gstdtlsdec.c ++++ b/ext/dtls/gstdtlsdec.c +@@ -27,6 +27,7 @@ + #include "config.h" + #endif + ++#include "gstdtlselements.h" + #include "gstdtlsdec.h" + + #include "gstdtlscertificate.h" +@@ -48,6 +49,8 @@ GST_DEBUG_CATEGORY_STATIC (gst_dtls_dec_debug); + #define gst_dtls_dec_parent_class parent_class + G_DEFINE_TYPE_WITH_CODE (GstDtlsDec, gst_dtls_dec, GST_TYPE_ELEMENT, + GST_DEBUG_CATEGORY_INIT (gst_dtls_dec_debug, "dtlsdec", 0, "DTLS Decoder")); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (dtlsdec, "dtlsdec", GST_RANK_NONE, ++ GST_TYPE_DTLS_DEC, dtls_element_init (plugin)); + + enum + { +@@ -422,7 +425,7 @@ static void + on_key_received (GstDtlsConnection * connection, gpointer key, guint cipher, + guint auth, GstDtlsDec * self) + { +- gpointer key_dup; ++ GstBuffer *new_decoder_key; + gchar *key_str; + + g_return_if_fail (GST_IS_DTLS_DEC (self)); +@@ -430,15 +433,13 @@ on_key_received (GstDtlsConnection * connection, gpointer key, guint cipher, + self->srtp_cipher = cipher; + self->srtp_auth = auth; + +- key_dup = g_memdup (key, GST_DTLS_SRTP_MASTER_KEY_LENGTH); ++ new_decoder_key = ++ gst_buffer_new_memdup (key, GST_DTLS_SRTP_MASTER_KEY_LENGTH); + +- if (self->decoder_key) { ++ if (self->decoder_key) + gst_buffer_unref (self->decoder_key); +- self->decoder_key = NULL; +- } + +- self->decoder_key = +- gst_buffer_new_wrapped (key_dup, GST_DTLS_SRTP_MASTER_KEY_LENGTH); ++ self->decoder_key = new_decoder_key; + + key_str = g_base64_encode (key, GST_DTLS_SRTP_MASTER_KEY_LENGTH); + GST_INFO_OBJECT (self, "received key: %s", key_str); +diff --git a/ext/dtls/gstdtlselement.c b/ext/dtls/gstdtlselement.c +new file mode 100644 +index 000000000..6945339f9 +--- /dev/null ++++ b/ext/dtls/gstdtlselement.c +@@ -0,0 +1,44 @@ ++/* ++ * Copyright (c) 2014, Ericsson AB. All rights reserved. ++ * ++ * Redistribution and use in source and binary forms, with or without modification, ++ * are permitted provided that the following conditions are met: ++ * ++ * 1. Redistributions of source code must retain the above copyright notice, this ++ * list of conditions and the following disclaimer. ++ * ++ * 2. Redistributions in binary form must reproduce the above copyright notice, this ++ * list of conditions and the following disclaimer in the documentation and/or other ++ * materials provided with the distribution. ++ * ++ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ++ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. ++ * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, ++ * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT ++ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR ++ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, ++ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ++ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY ++ * OF SUCH DAMAGE. ++ */ ++ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include "gstdtlselements.h" ++#include "gstdtlsconnection.h" ++ ++ ++#include ++ ++void ++dtls_element_init (GstPlugin * plugin) ++{ ++ static gsize res = FALSE; ++ if (g_once_init_enter (&res)) { ++ gst_type_mark_as_plugin_api (GST_DTLS_TYPE_CONNECTION_STATE, 0); ++ g_once_init_leave (&res, TRUE); ++ } ++} +diff --git a/ext/dtls/gstdtlselements.h b/ext/dtls/gstdtlselements.h +new file mode 100644 +index 000000000..50d03284d +--- /dev/null ++++ b/ext/dtls/gstdtlselements.h +@@ -0,0 +1,38 @@ ++/* GStreamer ++ * Copyright (C) <2020> The Gstreamer Contributors. ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++ ++#ifndef __GST_DTLS_ELEMENTS_H__ ++#define __GST_DTLS_ELEMENTS_H__ ++ ++#ifdef HAVE_CONFIG_H ++#include ++#endif ++ ++#include ++ ++void dtls_element_init (GstPlugin * plugin); ++ ++GST_ELEMENT_REGISTER_DECLARE (dtlsdec); ++GST_ELEMENT_REGISTER_DECLARE (dtlsenc); ++GST_ELEMENT_REGISTER_DECLARE (dtlssrtpdec); ++GST_ELEMENT_REGISTER_DECLARE (dtlssrtpdemux); ++GST_ELEMENT_REGISTER_DECLARE (dtlssrtpenc); ++ ++#endif /* __GST_DTLS_ELEMENT_H__ */ +diff --git a/ext/dtls/gstdtlsenc.c b/ext/dtls/gstdtlsenc.c +index e64ee4d6c..d344b96ea 100644 +--- a/ext/dtls/gstdtlsenc.c ++++ b/ext/dtls/gstdtlsenc.c +@@ -27,6 +27,7 @@ + #include "config.h" + #endif + ++#include "gstdtlselements.h" + #include "gstdtlsenc.h" + + #include "gstdtlsdec.h" +@@ -48,6 +49,8 @@ GST_DEBUG_CATEGORY_STATIC (gst_dtls_enc_debug); + #define gst_dtls_enc_parent_class parent_class + G_DEFINE_TYPE_WITH_CODE (GstDtlsEnc, gst_dtls_enc, GST_TYPE_ELEMENT, + GST_DEBUG_CATEGORY_INIT (gst_dtls_enc_debug, "dtlsenc", 0, "DTLS Encoder")); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (dtlsenc, "dtlsenc", GST_RANK_NONE, ++ GST_TYPE_DTLS_ENC, dtls_element_init (plugin)); + + enum + { +@@ -562,6 +565,9 @@ sink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + GST_ELEMENT_ERROR (self, RESOURCE, WRITE, (NULL), ("%s", err->message)); + g_clear_error (&err); + break; ++ case GST_FLOW_FLUSHING: ++ GST_INFO_OBJECT (self, "Flushing"); ++ break; + default: + g_assert_not_reached (); + break; +@@ -626,7 +632,7 @@ static void + on_key_received (GstDtlsConnection * connection, gpointer key, guint cipher, + guint auth, GstDtlsEnc * self) + { +- gpointer key_dup; ++ GstBuffer *new_encoder_key; + gchar *key_str; + + g_return_if_fail (GST_IS_DTLS_ENC (self)); +@@ -635,15 +641,13 @@ on_key_received (GstDtlsConnection * connection, gpointer key, guint cipher, + self->srtp_cipher = cipher; + self->srtp_auth = auth; + +- key_dup = g_memdup (key, GST_DTLS_SRTP_MASTER_KEY_LENGTH); ++ new_encoder_key = ++ gst_buffer_new_memdup (key, GST_DTLS_SRTP_MASTER_KEY_LENGTH); + +- if (self->encoder_key) { ++ if (self->encoder_key) + gst_buffer_unref (self->encoder_key); +- self->encoder_key = NULL; +- } + +- self->encoder_key = +- gst_buffer_new_wrapped (key_dup, GST_DTLS_SRTP_MASTER_KEY_LENGTH); ++ self->encoder_key = new_encoder_key; + + key_str = g_base64_encode (key, GST_DTLS_SRTP_MASTER_KEY_LENGTH); + GST_INFO_OBJECT (self, "received key: %s", key_str); +@@ -662,8 +666,7 @@ on_send_data (GstDtlsConnection * connection, gconstpointer data, gsize length, + GST_DEBUG_OBJECT (self, "sending data from %s with length %" G_GSIZE_FORMAT, + self->connection_id, length); + +- buffer = +- data ? gst_buffer_new_wrapped (g_memdup (data, length), length) : NULL; ++ buffer = data ? gst_buffer_new_memdup (data, length) : NULL; + + GST_TRACE_OBJECT (self, "send data: acquiring lock"); + g_mutex_lock (&self->queue_lock); +@@ -677,6 +680,8 @@ on_send_data (GstDtlsConnection * connection, gconstpointer data, gsize length, + GST_TRACE_OBJECT (self, "send data: releasing lock"); + + ret = self->src_ret == GST_FLOW_OK; ++ if (self->src_ret == GST_FLOW_FLUSHING) ++ gst_dtls_connection_set_flow_return (connection, self->src_ret); + g_mutex_unlock (&self->queue_lock); + + return ret; +diff --git a/ext/dtls/gstdtlssrtpbin.c b/ext/dtls/gstdtlssrtpbin.c +index acbac30ea..2a786e9c5 100644 +--- a/ext/dtls/gstdtlssrtpbin.c ++++ b/ext/dtls/gstdtlssrtpbin.c +@@ -218,7 +218,8 @@ gst_dtls_srtp_bin_get_property (GObject * object, + g_object_get_property (G_OBJECT (self->dtls_element), "connection-id", + value); + } else { +- g_warning ("tried to get connection-id after disabling DTLS"); ++ GST_WARNING_OBJECT (self, ++ "tried to get connection-id after disabling DTLS"); + } + break; + case PROP_KEY: +diff --git a/ext/dtls/gstdtlssrtpdec.c b/ext/dtls/gstdtlssrtpdec.c +index 72abfdacb..f441e253f 100644 +--- a/ext/dtls/gstdtlssrtpdec.c ++++ b/ext/dtls/gstdtlssrtpdec.c +@@ -27,6 +27,7 @@ + #include "config.h" + #endif + ++#include "gstdtlselements.h" + #include "gstdtlssrtpdec.h" + #include "gstdtlsconnection.h" + +@@ -61,7 +62,9 @@ GST_DEBUG_CATEGORY_STATIC (gst_dtls_srtp_dec_debug); + #define gst_dtls_srtp_dec_parent_class parent_class + G_DEFINE_TYPE_WITH_CODE (GstDtlsSrtpDec, gst_dtls_srtp_dec, + GST_TYPE_DTLS_SRTP_BIN, GST_DEBUG_CATEGORY_INIT (gst_dtls_srtp_dec_debug, +- "dtlssrtpdec", 0, "DTLS Decoder")); ++ "dtlssrtpdec", 0, "DTLS-SRTP Decoder")); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (dtlssrtpdec, "dtlssrtpdec", ++ GST_RANK_NONE, GST_TYPE_DTLS_SRTP_DEC, dtls_element_init (plugin)); + + enum + { +@@ -288,8 +291,13 @@ gst_dtls_srtp_dec_get_property (GObject * object, + } + break; + case PROP_CONNECTION_STATE: +- g_object_get_property (G_OBJECT (self->bin.dtls_element), +- "connection-state", value); ++ if (self->bin.dtls_element) { ++ g_object_get_property (G_OBJECT (self->bin.dtls_element), ++ "connection-state", value); ++ } else { ++ GST_WARNING_OBJECT (self, ++ "tried to get connection-state after disabling DTLS"); ++ } + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec); +@@ -313,7 +321,7 @@ gst_dtls_srtp_dec_request_new_pad (GstElement * element, + if (templ == gst_element_class_get_pad_template (klass, "data_src")) { + GstPad *target_pad; + +- target_pad = gst_element_get_request_pad (self->bin.dtls_element, "src"); ++ target_pad = gst_element_request_pad_simple (self->bin.dtls_element, "src"); + + ghost_pad = gst_ghost_pad_new_from_template (name, target_pad, templ); + gst_object_unref (target_pad); +diff --git a/ext/dtls/gstdtlssrtpdemux.c b/ext/dtls/gstdtlssrtpdemux.c +index ab1ef7082..ca7985be1 100644 +--- a/ext/dtls/gstdtlssrtpdemux.c ++++ b/ext/dtls/gstdtlssrtpdemux.c +@@ -27,6 +27,7 @@ + #include "config.h" + #endif + ++#include "gstdtlselements.h" + #include "gstdtlssrtpdemux.h" + + #define PACKET_IS_DTLS(b) (b > 0x13 && b < 0x40) +@@ -59,6 +60,9 @@ GST_DEBUG_CATEGORY_STATIC (gst_gst_dtls_srtp_demux_debug); + G_DEFINE_TYPE_WITH_CODE (GstDtlsSrtpDemux, gst_dtls_srtp_demux, + GST_TYPE_ELEMENT, GST_DEBUG_CATEGORY_INIT (gst_gst_dtls_srtp_demux_debug, + "dtlssrtpdemux", 0, "DTLS SRTP Demultiplexer")); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (dtlssrtpdemux, "dtlssrtpdemux", ++ GST_RANK_NONE, GST_TYPE_DTLS_SRTP_DEMUX, dtls_element_init (plugin)); ++ + + static GstFlowReturn sink_chain (GstPad *, GstObject * self, GstBuffer *); + +diff --git a/ext/dtls/gstdtlssrtpenc.c b/ext/dtls/gstdtlssrtpenc.c +index 283ad9db6..c3f1c581b 100644 +--- a/ext/dtls/gstdtlssrtpenc.c ++++ b/ext/dtls/gstdtlssrtpenc.c +@@ -27,6 +27,7 @@ + #include "config.h" + #endif + ++#include "gstdtlselements.h" + #include "gstdtlssrtpenc.h" + #include "gstdtlsconnection.h" + +@@ -62,8 +63,11 @@ GST_DEBUG_CATEGORY_STATIC (gst_dtls_srtp_enc_debug); + + #define gst_dtls_srtp_enc_parent_class parent_class + G_DEFINE_TYPE_WITH_CODE (GstDtlsSrtpEnc, gst_dtls_srtp_enc, +- GST_TYPE_DTLS_SRTP_BIN, GST_DEBUG_CATEGORY_INIT (gst_dtls_srtp_enc_debug, +- "dtlssrtpenc", 0, "DTLS Decoder")); ++ GST_TYPE_DTLS_SRTP_BIN, ++ GST_DEBUG_CATEGORY_INIT (gst_dtls_srtp_enc_debug, ++ "dtlssrtpenc", 0, "DTLS-SRTP Encoder")); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (dtlssrtpenc, "dtlssrtpenc", ++ GST_RANK_NONE, GST_TYPE_DTLS_SRTP_ENC, dtls_element_init (plugin)); + + enum + { +@@ -265,12 +269,21 @@ gst_dtls_srtp_enc_init (GstDtlsSrtpEnc * self) + NULL, auth_enum_class, NULL); + } + ++#if GLIB_CHECK_VERSION(2,68,0) ++#define binding_get_source(b) g_binding_dup_source(b) ++#define unref_source(s) G_STMT_START { if(s) g_object_unref(s); } G_STMT_END ++#else ++#define binding_get_source(b) g_binding_get_source(b) ++#define unref_source(s) /* no op */ ++#endif ++ + static gboolean + transform_enum (GBinding * binding, const GValue * source_value, + GValue * target_value, GEnumClass * enum_class) + { + GEnumValue *enum_value; + const gchar *nick; ++ GObject *bind_src; + + nick = g_value_get_string (source_value); + g_return_val_if_fail (nick, FALSE); +@@ -278,9 +291,13 @@ transform_enum (GBinding * binding, const GValue * source_value, + enum_value = g_enum_get_value_by_nick (enum_class, nick); + g_return_val_if_fail (enum_value, FALSE); + +- GST_DEBUG_OBJECT (g_binding_get_source (binding), ++ bind_src = binding_get_source (binding); ++ ++ GST_DEBUG_OBJECT (bind_src, + "transforming enum from %s to %d", nick, enum_value->value); + ++ unref_source (bind_src); ++ + g_value_set_enum (target_value, enum_value->value); + + return TRUE; +@@ -327,8 +344,13 @@ gst_dtls_srtp_enc_get_property (GObject * object, + } + break; + case PROP_CONNECTION_STATE: +- g_object_get_property (G_OBJECT (self->bin.dtls_element), +- "connection-state", value); ++ if (self->bin.dtls_element) { ++ g_object_get_property (G_OBJECT (self->bin.dtls_element), ++ "connection-state", value); ++ } else { ++ GST_WARNING_OBJECT (self, ++ "tried to get connection-state after disabling DTLS"); ++ } + break; + case PROP_RTP_SYNC: + g_value_set_boolean (value, self->rtp_sync); +@@ -374,6 +396,9 @@ gst_dtls_srtp_enc_request_new_pad (GstElement * element, + g_return_val_if_fail (templ->direction == GST_PAD_SINK, NULL); + g_return_val_if_fail (self->srtp_enc, NULL); + ++ if (name == NULL) ++ return NULL; ++ + if (templ == gst_element_class_get_pad_template (klass, "rtp_sink_%d")) { + gchar *clocksync_name; + GstElement *clocksync; +@@ -394,7 +419,7 @@ gst_dtls_srtp_enc_request_new_pad (GstElement * element, + gst_bin_add (GST_BIN (self), clocksync); + gst_element_sync_state_with_parent (clocksync); + +- target_pad = gst_element_get_request_pad (self->srtp_enc, name); ++ target_pad = gst_element_request_pad_simple (self->srtp_enc, name); + g_return_val_if_fail (target_pad, NULL); + + srtp_src_name = g_strdup_printf ("rtp_src_%d", pad_n); +@@ -409,7 +434,7 @@ gst_dtls_srtp_enc_request_new_pad (GstElement * element, + GST_LOG_OBJECT (self, "added rtp sink pad"); + } else if (templ == gst_element_class_get_pad_template (klass, + "rtcp_sink_%d")) { +- target_pad = gst_element_get_request_pad (self->srtp_enc, name); ++ target_pad = gst_element_request_pad_simple (self->srtp_enc, name); + g_return_val_if_fail (target_pad, NULL); + + sscanf (GST_PAD_NAME (target_pad), "rtcp_sink_%d", &pad_n); +@@ -424,7 +449,8 @@ gst_dtls_srtp_enc_request_new_pad (GstElement * element, + GST_LOG_OBJECT (self, "added rtcp sink pad"); + } else if (templ == gst_element_class_get_pad_template (klass, "data_sink")) { + g_return_val_if_fail (self->bin.dtls_element, NULL); +- target_pad = gst_element_get_request_pad (self->bin.dtls_element, "sink"); ++ target_pad = ++ gst_element_request_pad_simple (self->bin.dtls_element, "sink"); + + ghost_pad = add_ghost_pad (element, name, target_pad, templ); + +diff --git a/ext/dtls/meson.build b/ext/dtls/meson.build +index 74babae17..afdd8d224 100644 +--- a/ext/dtls/meson.build ++++ b/ext/dtls/meson.build +@@ -9,6 +9,7 @@ dtls_sources = [ + 'gstdtlssrtpdemux.c', + 'gstdtlssrtpenc.c', + 'plugin.c', ++ 'gstdtlselement.c', + ] + + openssl_dep = dependency('openssl', version : '>= 1.0.1', required : get_option('dtls')) +@@ -23,6 +24,5 @@ if openssl_dep.found() and libcrypto_dep.found() + install : true, + install_dir : plugins_install_dir, + ) +- pkgconfig.generate(gstdtls, install_dir : plugins_pkgconfig_install_dir) + plugins += [gstdtls] + endif +diff --git a/ext/dtls/plugin.c b/ext/dtls/plugin.c +index 78a998060..679744965 100644 +--- a/ext/dtls/plugin.c ++++ b/ext/dtls/plugin.c +@@ -27,29 +27,22 @@ + #include "config.h" + #endif + +-#include "gstdtlsdec.h" +-#include "gstdtlsenc.h" +-#include "gstdtlssrtpenc.h" +-#include "gstdtlssrtpdec.h" +-#include "gstdtlssrtpdemux.h" +- + #include + ++#include "gstdtlselements.h" ++ + static gboolean + plugin_init (GstPlugin * plugin) + { +- gst_type_mark_as_plugin_api (GST_DTLS_TYPE_CONNECTION_STATE, 0); ++ gboolean ret = FALSE; ++ ++ ret |= GST_ELEMENT_REGISTER (dtlsenc, plugin); ++ ret |= GST_ELEMENT_REGISTER (dtlsdec, plugin); ++ ret |= GST_ELEMENT_REGISTER (dtlssrtpdec, plugin); ++ ret |= GST_ELEMENT_REGISTER (dtlssrtpenc, plugin); ++ ret |= GST_ELEMENT_REGISTER (dtlssrtpdemux, plugin); + +- return gst_element_register (plugin, "dtlsenc", GST_RANK_NONE, +- GST_TYPE_DTLS_ENC) +- && gst_element_register (plugin, "dtlsdec", GST_RANK_NONE, +- GST_TYPE_DTLS_DEC) +- && gst_element_register (plugin, "dtlssrtpdec", GST_RANK_NONE, +- GST_TYPE_DTLS_SRTP_DEC) +- && gst_element_register (plugin, "dtlssrtpenc", GST_RANK_NONE, +- GST_TYPE_DTLS_SRTP_ENC) +- && gst_element_register (plugin, "dtlssrtpdemux", GST_RANK_NONE, +- GST_TYPE_DTLS_SRTP_DEMUX); ++ return ret; + } + + GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, +diff --git a/ext/sctp/gstsctpdec.c b/ext/sctp/gstsctpdec.c +index c9f6d0827..a90f89428 100644 +--- a/ext/sctp/gstsctpdec.c ++++ b/ext/sctp/gstsctpdec.c +@@ -39,6 +39,8 @@ GST_DEBUG_CATEGORY_STATIC (gst_sctp_dec_debug_category); + + #define gst_sctp_dec_parent_class parent_class + G_DEFINE_TYPE (GstSctpDec, gst_sctp_dec, GST_TYPE_ELEMENT); ++GST_ELEMENT_REGISTER_DEFINE (sctpdec, "sctpdec", GST_RANK_NONE, ++ GST_TYPE_SCTP_DEC); + + static GstStaticPadTemplate sink_template = + GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, +@@ -604,8 +606,11 @@ static void + remove_pad (GstSctpDec * self, GstPad * pad) + { + stop_srcpad_task (pad); ++ GST_PAD_STREAM_LOCK (pad); + gst_pad_set_active (pad, FALSE); +- gst_element_remove_pad (GST_ELEMENT (self), pad); ++ if (gst_object_has_as_parent (GST_OBJECT (pad), GST_OBJECT (self))) ++ gst_element_remove_pad (GST_ELEMENT (self), pad); ++ GST_PAD_STREAM_UNLOCK (pad); + GST_OBJECT_LOCK (self); + gst_flow_combiner_remove_pad (self->flow_combiner, pad); + GST_OBJECT_UNLOCK (self); +@@ -624,8 +629,14 @@ on_gst_sctp_association_stream_reset (GstSctpAssociation * gst_sctp_association, + srcpad = gst_element_get_static_pad (GST_ELEMENT (self), pad_name); + g_free (pad_name); + if (!srcpad) { +- GST_WARNING_OBJECT (self, "Reset called on stream without a srcpad"); +- return; ++ /* This can happen if a stream is created but the peer never sends any data. ++ * We still need to signal the reset by removing the relevant pad. To do ++ * that, we need to add the relevant pad first. */ ++ srcpad = get_pad_for_stream_id (self, stream_id); ++ if (!srcpad) { ++ GST_WARNING_OBJECT (self, "Reset called on stream without a srcpad"); ++ return; ++ } + } + remove_pad (self, srcpad); + gst_object_unref (srcpad); +diff --git a/ext/sctp/gstsctpdec.h b/ext/sctp/gstsctpdec.h +index 6a5591f55..c6c898657 100644 +--- a/ext/sctp/gstsctpdec.h ++++ b/ext/sctp/gstsctpdec.h +@@ -63,6 +63,7 @@ struct _GstSctpDecClass + }; + + GType gst_sctp_dec_get_type (void); ++GST_ELEMENT_REGISTER_DECLARE (sctpdec); + + G_END_DECLS + +diff --git a/ext/sctp/gstsctpenc.c b/ext/sctp/gstsctpenc.c +index 41590b9fc..2fcbecebb 100644 +--- a/ext/sctp/gstsctpenc.c ++++ b/ext/sctp/gstsctpenc.c +@@ -36,6 +36,8 @@ GST_DEBUG_CATEGORY_STATIC (gst_sctp_enc_debug_category); + + #define gst_sctp_enc_parent_class parent_class + G_DEFINE_TYPE (GstSctpEnc, gst_sctp_enc, GST_TYPE_ELEMENT); ++GST_ELEMENT_REGISTER_DEFINE (sctpenc, "sctpenc", GST_RANK_NONE, ++ GST_TYPE_SCTP_ENC); + + static GstStaticPadTemplate sink_template = + GST_STATIC_PAD_TEMPLATE ("sink_%u", GST_PAD_SINK, +@@ -101,6 +103,7 @@ struct _GstSctpEncPad + GMutex lock; + GCond cond; + gboolean flushing; ++ gboolean clear_to_send; + }; + + G_DEFINE_TYPE (GstSctpEncPad, gst_sctp_enc_pad, GST_TYPE_PAD); +@@ -130,6 +133,7 @@ gst_sctp_enc_pad_init (GstSctpEncPad * self) + g_mutex_init (&self->lock); + g_cond_init (&self->cond); + self->flushing = FALSE; ++ self->clear_to_send = FALSE; + } + + static void gst_sctp_enc_finalize (GObject * object); +@@ -327,6 +331,34 @@ gst_sctp_enc_get_property (GObject * object, guint prop_id, GValue * value, + } + } + ++static void ++flush_sinkpad (const GValue * item, gpointer user_data) ++{ ++ GstSctpEncPad *sctpenc_pad = g_value_get_object (item); ++ gboolean flush = GPOINTER_TO_INT (user_data); ++ ++ if (flush) { ++ g_mutex_lock (&sctpenc_pad->lock); ++ sctpenc_pad->flushing = TRUE; ++ g_cond_signal (&sctpenc_pad->cond); ++ g_mutex_unlock (&sctpenc_pad->lock); ++ } else { ++ sctpenc_pad->flushing = FALSE; ++ } ++} ++ ++static void ++flush_sinkpads (GstSctpEnc * self, gboolean state) ++{ ++ GstIterator *it; ++ ++ it = gst_element_iterate_sink_pads (GST_ELEMENT (self)); ++ while (gst_iterator_foreach (it, flush_sinkpad, ++ GINT_TO_POINTER (state)) == GST_ITERATOR_RESYNC) ++ gst_iterator_resync (it); ++ gst_iterator_free (it); ++} ++ + static GstStateChangeReturn + gst_sctp_enc_change_state (GstElement * element, GstStateChange transition) + { +@@ -347,6 +379,7 @@ gst_sctp_enc_change_state (GstElement * element, GstStateChange transition) + break; + case GST_STATE_CHANGE_PAUSED_TO_READY: + stop_srcpad_task (self->src_pad, self); ++ flush_sinkpads (self, TRUE); + self->src_ret = GST_FLOW_FLUSHING; + break; + case GST_STATE_CHANGE_READY_TO_NULL: +@@ -481,7 +514,10 @@ gst_sctp_enc_release_pad (GstElement * element, GstPad * pad) + if (self->sctp_association) + gst_sctp_association_reset_stream (self->sctp_association, stream_id); + +- gst_element_remove_pad (element, pad); ++ GST_PAD_STREAM_LOCK (pad); ++ if (gst_object_has_as_parent (GST_OBJECT (pad), GST_OBJECT (element))) ++ gst_element_remove_pad (element, pad); ++ GST_PAD_STREAM_UNLOCK (pad); + } + + static void +@@ -558,6 +594,7 @@ gst_sctp_enc_sink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + { + GstSctpEnc *self = GST_SCTP_ENC (parent); + GstSctpEncPad *sctpenc_pad = GST_SCTP_ENC_PAD (pad); ++ GstSctpEncPad *sctpenc_pad_next = NULL; + GstMapInfo map; + guint32 ppid; + gboolean ordered; +@@ -569,6 +606,7 @@ gst_sctp_enc_sink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + GstFlowReturn flow_ret = GST_FLOW_ERROR; + const guint8 *data; + guint32 length; ++ gboolean clear_to_send; + + GST_OBJECT_LOCK (self); + if (self->src_ret != GST_FLOW_OK) { +@@ -624,7 +662,21 @@ gst_sctp_enc_sink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + data = map.data; + length = map.size; + ++ GST_OBJECT_LOCK (self); ++ clear_to_send = g_queue_is_empty (&self->pending_pads); ++ g_queue_push_tail (&self->pending_pads, sctpenc_pad); ++ GST_OBJECT_UNLOCK (self); ++ + g_mutex_lock (&sctpenc_pad->lock); ++ ++ if (clear_to_send) { ++ sctpenc_pad->clear_to_send = TRUE; ++ } ++ ++ while (!sctpenc_pad->flushing && !sctpenc_pad->clear_to_send) { ++ g_cond_wait (&sctpenc_pad->cond, &sctpenc_pad->lock); ++ } ++ + while (!sctpenc_pad->flushing) { + guint32 bytes_sent; + +@@ -653,15 +705,8 @@ gst_sctp_enc_sink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + length -= bytes_sent; + + /* The buffer was probably full. Retry in a while */ +- GST_OBJECT_LOCK (self); +- g_queue_push_tail (&self->pending_pads, sctpenc_pad); +- GST_OBJECT_UNLOCK (self); +- + g_cond_wait_until (&sctpenc_pad->cond, &sctpenc_pad->lock, end_time); + +- GST_OBJECT_LOCK (self); +- g_queue_remove (&self->pending_pads, sctpenc_pad); +- GST_OBJECT_UNLOCK (self); + } else if (bytes_sent == length) { + GST_DEBUG_OBJECT (pad, "Successfully sent buffer"); + sctpenc_pad->bytes_sent += bytes_sent; +@@ -671,8 +716,21 @@ gst_sctp_enc_sink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + flow_ret = sctpenc_pad->flushing ? GST_FLOW_FLUSHING : GST_FLOW_OK; + + out: ++ sctpenc_pad->clear_to_send = FALSE; + g_mutex_unlock (&sctpenc_pad->lock); + ++ GST_OBJECT_LOCK (self); ++ g_queue_remove (&self->pending_pads, sctpenc_pad); ++ sctpenc_pad_next = g_queue_peek_head (&self->pending_pads); ++ GST_OBJECT_UNLOCK (self); ++ ++ if (sctpenc_pad_next) { ++ g_mutex_lock (&sctpenc_pad_next->lock); ++ sctpenc_pad_next->clear_to_send = TRUE; ++ g_cond_signal (&sctpenc_pad_next->cond); ++ g_mutex_unlock (&sctpenc_pad_next->lock); ++ } ++ + gst_buffer_unmap (buffer, &map); + error: + gst_buffer_unref (buffer); +@@ -735,22 +793,6 @@ gst_sctp_enc_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) + return ret; + } + +-static void +-flush_sinkpad (const GValue * item, gpointer user_data) +-{ +- GstSctpEncPad *sctpenc_pad = g_value_get_object (item); +- gboolean flush = GPOINTER_TO_INT (user_data); +- +- if (flush) { +- g_mutex_lock (&sctpenc_pad->lock); +- sctpenc_pad->flushing = TRUE; +- g_cond_signal (&sctpenc_pad->cond); +- g_mutex_unlock (&sctpenc_pad->lock); +- } else { +- sctpenc_pad->flushing = FALSE; +- } +-} +- + static gboolean + gst_sctp_enc_src_event (GstPad * pad, GstObject * parent, GstEvent * event) + { +@@ -759,29 +801,17 @@ gst_sctp_enc_src_event (GstPad * pad, GstObject * parent, GstEvent * event) + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_FLUSH_START:{ +- GstIterator *it; +- + gst_data_queue_set_flushing (self->outbound_sctp_packet_queue, TRUE); + gst_data_queue_flush (self->outbound_sctp_packet_queue); + +- it = gst_element_iterate_sink_pads (GST_ELEMENT (self)); +- while (gst_iterator_foreach (it, flush_sinkpad, +- GINT_TO_POINTER (TRUE)) == GST_ITERATOR_RESYNC) +- gst_iterator_resync (it); +- gst_iterator_free (it); ++ flush_sinkpads (self, TRUE); + + ret = gst_pad_event_default (pad, parent, event); + break; + } + case GST_EVENT_RECONFIGURE: + case GST_EVENT_FLUSH_STOP:{ +- GstIterator *it; +- +- it = gst_element_iterate_sink_pads (GST_ELEMENT (self)); +- while (gst_iterator_foreach (it, flush_sinkpad, +- GINT_TO_POINTER (FALSE)) == GST_ITERATOR_RESYNC) +- gst_iterator_resync (it); +- gst_iterator_free (it); ++ flush_sinkpads (self, FALSE); + + gst_data_queue_set_flushing (self->outbound_sctp_packet_queue, FALSE); + self->need_segment = TRUE; +@@ -885,13 +915,12 @@ on_sctp_packet_out (GstSctpAssociation * _association, const guint8 * buf, + GstSctpEnc *self = user_data; + GstBuffer *gstbuf; + GstDataQueueItem *item; +- GList *pending_pads, *l; + GstSctpEncPad *sctpenc_pad; + + GST_DEBUG_OBJECT (self, "Received output packet of size %" G_GSIZE_FORMAT, + length); + +- gstbuf = gst_buffer_new_wrapped (g_memdup (buf, length), length); ++ gstbuf = gst_buffer_new_memdup (buf, length); + + item = g_new0 (GstDataQueueItem, 1); + item->object = GST_MINI_OBJECT (gstbuf); +@@ -904,21 +933,22 @@ on_sctp_packet_out (GstSctpAssociation * _association, const guint8 * buf, + GST_DEBUG_OBJECT (self, "Failed to push item because we're flushing"); + } + +- /* Wake up pads in the order they waited, oldest pad first */ ++ /* Wake up the oldest pad which is the one that needs to finish first */ + GST_OBJECT_LOCK (self); +- pending_pads = NULL; +- while ((sctpenc_pad = g_queue_pop_tail (&self->pending_pads))) { +- pending_pads = g_list_prepend (pending_pads, sctpenc_pad); +- } +- GST_OBJECT_UNLOCK (self); ++ sctpenc_pad = g_queue_peek_head (&self->pending_pads); ++ if (sctpenc_pad) { ++ gst_object_ref (sctpenc_pad); ++ ++ GST_OBJECT_UNLOCK (self); + +- for (l = pending_pads; l; l = l->next) { +- sctpenc_pad = l->data; + g_mutex_lock (&sctpenc_pad->lock); + g_cond_signal (&sctpenc_pad->cond); + g_mutex_unlock (&sctpenc_pad->lock); ++ ++ gst_object_unref (sctpenc_pad); ++ } else { ++ GST_OBJECT_UNLOCK (self); + } +- g_list_free (pending_pads); + } + + static void +@@ -948,7 +978,6 @@ sctpenc_cleanup (GstSctpEnc * self) + + g_signal_handler_disconnect (self->sctp_association, + self->signal_handler_state_changed); +- stop_srcpad_task (self->src_pad, self); + gst_sctp_association_force_close (self->sctp_association); + g_object_unref (self->sctp_association); + self->sctp_association = NULL; +diff --git a/ext/sctp/gstsctpenc.h b/ext/sctp/gstsctpenc.h +index fd4e28e4f..482473d74 100644 +--- a/ext/sctp/gstsctpenc.h ++++ b/ext/sctp/gstsctpenc.h +@@ -72,6 +72,7 @@ struct _GstSctpEncClass + }; + + GType gst_sctp_enc_get_type (void); ++GST_ELEMENT_REGISTER_DECLARE (sctpenc); + + G_END_DECLS + +diff --git a/ext/sctp/gstsctpplugin.c b/ext/sctp/gstsctpplugin.c +index 888a94c84..9f3400ecf 100644 +--- a/ext/sctp/gstsctpplugin.c ++++ b/ext/sctp/gstsctpplugin.c +@@ -35,12 +35,13 @@ + static gboolean + plugin_init (GstPlugin * plugin) + { +- return gst_element_register (plugin, "sctpenc", GST_RANK_NONE, +- GST_TYPE_SCTP_ENC) +- && gst_element_register (plugin, "sctpdec", GST_RANK_NONE, +- GST_TYPE_SCTP_DEC); +-} ++ gboolean ret = FALSE; ++ ++ ret |= GST_ELEMENT_REGISTER (sctpenc, plugin); ++ ret |= GST_ELEMENT_REGISTER (sctpdec, plugin); + ++ return ret; ++} + + #ifndef PACKAGE + #define PACKAGE "sctp" +diff --git a/ext/sctp/meson.build b/ext/sctp/meson.build +index 93f29d7f1..6b3d7f249 100644 +--- a/ext/sctp/meson.build ++++ b/ext/sctp/meson.build +@@ -57,6 +57,5 @@ if sctp_dep.found() and sctp_header + install : true, + install_dir : plugins_install_dir, + ) +- pkgconfig.generate(gstsctp, install_dir : plugins_pkgconfig_install_dir) + plugins += [gstsctp] + endif +diff --git a/ext/sctp/sctpassociation.c b/ext/sctp/sctpassociation.c +index fbf5b4afe..68c05e62f 100644 +--- a/ext/sctp/sctpassociation.c ++++ b/ext/sctp/sctpassociation.c +@@ -234,7 +234,7 @@ gst_sctp_association_init (GstSctpAssociation * self) + + self->state = GST_SCTP_ASSOCIATION_STATE_NEW; + +- self->use_sock_stream = FALSE; ++ self->use_sock_stream = TRUE; + + usrsctp_register_address ((void *) self); + } +@@ -546,6 +546,7 @@ gst_sctp_association_reset_stream (GstSctpAssociation * self, guint16 stream_id) + + length = (socklen_t) (sizeof (struct sctp_reset_streams) + sizeof (guint16)); + srs = (struct sctp_reset_streams *) g_malloc0 (length); ++ srs->srs_assoc_id = SCTP_ALL_ASSOC; + srs->srs_flags = SCTP_STREAM_RESET_OUTGOING; + srs->srs_number_streams = 1; + srs->srs_stream_list[0] = stream_id; +diff --git a/ext/sctp/usrsctp/meson.build b/ext/sctp/usrsctp/meson.build +index 8d474970b..6a1ab845d 100644 +--- a/ext/sctp/usrsctp/meson.build ++++ b/ext/sctp/usrsctp/meson.build +@@ -31,7 +31,6 @@ else + '-Wno-missing-declarations', + '-Wno-old-style-definition', + '-Wno-redundant-decls', +- '-Wno-error', + ]) + endif + +@@ -77,7 +76,6 @@ elif system == 'windows' + if compiler.get_id() == 'gcc' + compile_args += [compiler.get_supported_arguments([ + '-Wno-format', +- '-D_WIN32_WINNT=0x601', # Enables inet_ntop and friends + ])] + endif + else +@@ -170,6 +168,7 @@ usrsctp_static = static_library('usrsctp-static', sources, + c_args: compile_args, + dependencies: dependencies, + include_directories: include_dirs, ++ override_options: ['werror=false'], + install: false) + + # Declare dependency +diff --git a/ext/srtp/gstsrtp.c b/ext/srtp/gstsrtp.c +index b607b2c26..5a3494548 100644 +--- a/ext/srtp/gstsrtp.c ++++ b/ext/srtp/gstsrtp.c +@@ -297,26 +297,3 @@ cipher_key_size (GstSrtpCipherType cipher) + + return size; + } +- +-static gboolean +-plugin_init (GstPlugin * plugin) +-{ +- srtp_init (); +- +- if (!gst_srtp_enc_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_srtp_dec_plugin_init (plugin)) +- return FALSE; +- +- gst_type_mark_as_plugin_api (GST_TYPE_SRTP_AUTH_TYPE, 0); +- gst_type_mark_as_plugin_api (GST_TYPE_SRTP_CIPHER_TYPE, 0); +- +- return TRUE; +-} +- +-GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, +- GST_VERSION_MINOR, +- srtp, +- "GStreamer SRTP", +- plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN) +diff --git a/ext/srtp/gstsrtpdec.c b/ext/srtp/gstsrtpdec.c +index 6d192995f..153712acb 100644 +--- a/ext/srtp/gstsrtpdec.c ++++ b/ext/srtp/gstsrtpdec.c +@@ -115,8 +115,8 @@ + * + */ + ++#include "gstsrtpelements.h" + #include "gstsrtpdec.h" +- + #include + #include + +@@ -177,7 +177,11 @@ GST_STATIC_PAD_TEMPLATE ("rtcp_src", + + static guint gst_srtp_dec_signals[LAST_SIGNAL] = { 0 }; + +-G_DEFINE_TYPE (GstSrtpDec, gst_srtp_dec, GST_TYPE_ELEMENT); ++G_DEFINE_TYPE_WITH_CODE (GstSrtpDec, gst_srtp_dec, GST_TYPE_ELEMENT, ++ GST_DEBUG_CATEGORY_INIT (gst_srtp_dec_debug, "srtpdec", 0, "SRTP dec"); ++ ); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (srtpdec, "srtpdec", GST_RANK_NONE, ++ GST_TYPE_SRTP_DEC, srtp_element_init (plugin)); + + static void gst_srtp_dec_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec); +@@ -225,6 +229,8 @@ struct _GstSrtpDecSsrcStream + GstSrtpCipherType rtcp_cipher; + GstSrtpAuthType rtcp_auth; + GArray *keys; ++ guint recv_count; ++ guint recv_drop_count; + }; + + #ifdef HAVE_SRTP2 +@@ -431,10 +437,11 @@ gst_srtp_dec_create_stats (GstSrtpDec * filter) + + if (filter->session) { + GHashTableIter iter; +- gpointer key; ++ gpointer key, value; + + g_hash_table_iter_init (&iter, filter->streams); +- while (g_hash_table_iter_next (&iter, &key, NULL)) { ++ while (g_hash_table_iter_next (&iter, &key, &value)) { ++ GstSrtpDecSsrcStream *stream = value; + GstStructure *ss; + guint32 ssrc = GPOINTER_TO_UINT (key); + srtp_err_status_t status; +@@ -446,7 +453,9 @@ gst_srtp_dec_create_stats (GstSrtpDec * filter) + } + + ss = gst_structure_new ("application/x-srtp-stream", +- "ssrc", G_TYPE_UINT, ssrc, "roc", G_TYPE_UINT, roc, NULL); ++ "ssrc", G_TYPE_UINT, ssrc, "roc", G_TYPE_UINT, roc, "recv-count", ++ G_TYPE_UINT, stream->recv_count, "recv-drop-count", G_TYPE_UINT, ++ stream->recv_drop_count, NULL); + + g_value_take_boxed (&v, ss); + gst_value_array_append_value (&va, &v); +@@ -454,6 +463,11 @@ gst_srtp_dec_create_stats (GstSrtpDec * filter) + } + + gst_structure_take_value (s, "streams", &va); ++ gst_structure_set (s, "recv-count", G_TYPE_UINT, filter->recv_count, NULL); ++ gst_structure_set (s, "recv-drop-count", G_TYPE_UINT, ++ filter->recv_drop_count, NULL); ++ GST_LOG_OBJECT (filter, "stats: recv-count %u recv-drop-count %u", ++ filter->recv_count, filter->recv_drop_count); + g_value_unset (&v); + + return s; +@@ -592,7 +606,7 @@ get_stream_from_caps (GstSrtpDec * filter, GstCaps * caps, guint32 ssrc) + goto error; + } + +- if (gst_structure_get (s, "srtp-key", GST_TYPE_BUFFER, &buf, NULL) || !buf) { ++ if (gst_structure_get (s, "srtp-key", GST_TYPE_BUFFER, &buf, NULL) && buf) { + #ifdef HAVE_SRTP2 + GstBuffer *mki = NULL; + guint i; +@@ -1246,7 +1260,54 @@ gst_srtp_dec_iterate_internal_links_rtcp (GstPad * pad, GstObject * parent) + return gst_srtp_dec_iterate_internal_links (pad, parent, TRUE); + } + +-static void ++/* Partial backport to 1.22 of `gst_element_decorate_stream_id_internal`, ++ * which was introduced in 1.23 */ ++static gchar * ++decorate_stream_id_private (GstElement * element, const gchar * stream_id) ++{ ++ gchar *upstream_stream_id = NULL, *new_stream_id; ++ GstQuery *query; ++ gchar *uri = NULL; ++ ++ /* Try to generate a stream-id from the URI query and ++ * if it fails take a random number instead */ ++ query = gst_query_new_uri (); ++ if (gst_element_query (element, query)) { ++ gst_query_parse_uri (query, &uri); ++ } ++ ++ if (uri) { ++ GChecksum *cs; ++ ++ /* And then generate an SHA256 sum of the URI */ ++ cs = g_checksum_new (G_CHECKSUM_SHA256); ++ g_checksum_update (cs, (const guchar *) uri, strlen (uri)); ++ g_free (uri); ++ upstream_stream_id = g_strdup (g_checksum_get_string (cs)); ++ g_checksum_free (cs); ++ } else { ++ /* Just get some random number if the URI query fails */ ++ GST_FIXME_OBJECT (element, "Creating random stream-id, consider " ++ "implementing a deterministic way of creating a stream-id"); ++ upstream_stream_id = ++ g_strdup_printf ("%08x%08x%08x%08x", g_random_int (), g_random_int (), ++ g_random_int (), g_random_int ()); ++ } ++ ++ gst_query_unref (query); ++ ++ if (stream_id) { ++ new_stream_id = g_strconcat (upstream_stream_id, "/", stream_id, NULL); ++ } else { ++ new_stream_id = g_strdup (upstream_stream_id); ++ } ++ ++ g_free (upstream_stream_id); ++ ++ return new_stream_id; ++} ++ ++static gboolean + gst_srtp_dec_push_early_events (GstSrtpDec * filter, GstPad * pad, + GstPad * otherpad, gboolean is_rtcp) + { +@@ -1269,7 +1330,7 @@ gst_srtp_dec_push_early_events (GstSrtpDec * filter, GstPad * pad, + is_rtcp ? "rtcp" : "rtp"); + gst_event_unref (otherev); + } else { +- new_stream_id = gst_pad_create_stream_id (pad, GST_ELEMENT (filter), ++ new_stream_id = decorate_stream_id_private (GST_ELEMENT (filter), + is_rtcp ? "rtcp" : "rtp"); + } + +@@ -1290,7 +1351,8 @@ gst_srtp_dec_push_early_events (GstSrtpDec * filter, GstPad * pad, + else + caps = gst_caps_new_empty_simple ("application/x-rtp"); + +- gst_pad_set_caps (pad, caps); ++ ev = gst_event_new_caps (caps); ++ gst_pad_push_event (pad, ev); + gst_caps_unref (caps); + } + +@@ -1300,8 +1362,16 @@ gst_srtp_dec_push_early_events (GstSrtpDec * filter, GstPad * pad, + } else { + ev = gst_pad_get_sticky_event (otherpad, GST_EVENT_SEGMENT, 0); + +- if (ev) ++ if (ev) { + gst_pad_push_event (pad, ev); ++ } else if (GST_PAD_IS_FLUSHING (otherpad)) { ++ /* We didn't get a Segment event from otherpad ++ * and otherpad is flushing => we are most likely shutting down */ ++ goto err; ++ } else { ++ GST_WARNING_OBJECT (filter, "No Segment event to push"); ++ goto err; ++ } + } + + if (is_rtcp) +@@ -1309,6 +1379,10 @@ gst_srtp_dec_push_early_events (GstSrtpDec * filter, GstPad * pad, + else + filter->rtp_has_segment = TRUE; + ++ return TRUE; ++ ++err: ++ return FALSE; + } + + /* +@@ -1321,11 +1395,12 @@ gst_srtp_dec_decode_buffer (GstSrtpDec * filter, GstPad * pad, GstBuffer * buf, + GstMapInfo map; + srtp_err_status_t err; + gint size; ++ GstSrtpDecSsrcStream *stream; + + GST_LOG_OBJECT (pad, "Received %s buffer of size %" G_GSIZE_FORMAT + " with SSRC = %u", is_rtcp ? "RTCP" : "RTP", gst_buffer_get_size (buf), + ssrc); +- ++ filter->recv_count++; + /* Change buffer to remove protection */ + buf = gst_buffer_make_writable (buf); + +@@ -1338,7 +1413,7 @@ unprotect: + + if (is_rtcp) { + #ifdef HAVE_SRTP2 +- GstSrtpDecSsrcStream *stream = find_stream_by_ssrc (filter, ssrc); ++ stream = find_stream_by_ssrc (filter, ssrc); + + err = srtp_unprotect_rtcp_mki (filter->session, map.data, &size, + stream && stream->keys); +@@ -1377,7 +1452,7 @@ unprotect: + + #ifdef HAVE_SRTP2 + { +- GstSrtpDecSsrcStream *stream = find_stream_by_ssrc (filter, ssrc); ++ stream = find_stream_by_ssrc (filter, ssrc); + + err = srtp_unprotect_mki (filter->session, map.data, &size, + stream && stream->keys); +@@ -1386,7 +1461,12 @@ unprotect: + err = srtp_unprotect (filter->session, map.data, &size); + #endif + } +- ++ stream = find_stream_by_ssrc (filter, ssrc); ++ if (stream == NULL) { ++ GST_WARNING_OBJECT (filter, "Could not find matching stream, dropping"); ++ goto err; ++ } ++ stream->recv_count++; + /* Signal user depending on type of error */ + switch (err) { + case srtp_err_status_ok: +@@ -1395,20 +1475,14 @@ unprotect: + case srtp_err_status_replay_fail: + GST_DEBUG_OBJECT (filter, + "Dropping replayed packet, probably retransmission"); ++ stream->recv_drop_count++; + goto err; + case srtp_err_status_replay_old: + GST_DEBUG_OBJECT (filter, + "Dropping replayed old packet, probably retransmission"); ++ stream->recv_drop_count++; + goto err; + case srtp_err_status_key_expired:{ +- GstSrtpDecSsrcStream *stream; +- +- /* Check we have an existing stream to rekey */ +- stream = find_stream_by_ssrc (filter, ssrc); +- if (stream == NULL) { +- GST_WARNING_OBJECT (filter, "Could not find matching stream, dropping"); +- goto err; +- } + + GST_OBJECT_UNLOCK (filter); + stream = request_key_with_signal (filter, ssrc, SIGNAL_HARD_LIMIT); +@@ -1424,21 +1498,24 @@ unprotect: + } + case srtp_err_status_auth_fail: + GST_WARNING_OBJECT (filter, "Error authentication packet, dropping"); ++ stream->recv_drop_count++; + goto err; + case srtp_err_status_cipher_fail: + GST_WARNING_OBJECT (filter, "Error while decrypting packet, dropping"); ++ stream->recv_drop_count++; + goto err; + default: + GST_WARNING_OBJECT (pad, + "Unable to unprotect buffer (unprotect failed code %d)", err); ++ stream->recv_drop_count++; + goto err; + } +- + gst_buffer_unmap (buf, &map); + gst_buffer_set_size (buf, size); + return TRUE; + + err: ++ filter->recv_drop_count++; + gst_buffer_unmap (buf, &map); + return FALSE; + } +@@ -1483,15 +1560,24 @@ push_out: + /* Push buffer to source pad */ + if (is_rtcp) { + otherpad = filter->rtcp_srcpad; +- if (!filter->rtcp_has_segment) +- gst_srtp_dec_push_early_events (filter, filter->rtcp_srcpad, +- filter->rtp_srcpad, TRUE); ++ if (!filter->rtcp_has_segment) { ++ if (!gst_srtp_dec_push_early_events (filter, filter->rtcp_srcpad, ++ filter->rtp_srcpad, TRUE)) { ++ ret = GST_FLOW_FLUSHING; ++ goto drop_buffer; ++ } ++ } + } else { + otherpad = filter->rtp_srcpad; +- if (!filter->rtp_has_segment) +- gst_srtp_dec_push_early_events (filter, filter->rtp_srcpad, +- filter->rtcp_srcpad, FALSE); ++ if (!filter->rtp_has_segment) { ++ if (!gst_srtp_dec_push_early_events (filter, filter->rtp_srcpad, ++ filter->rtcp_srcpad, FALSE)) { ++ ret = GST_FLOW_FLUSHING; ++ goto drop_buffer; ++ } ++ } + } ++ + ret = gst_pad_push (otherpad, buf); + + return ret; +@@ -1537,6 +1623,8 @@ gst_srtp_dec_change_state (GstElement * element, GstStateChange transition) + + filter->rtp_has_segment = FALSE; + filter->rtcp_has_segment = FALSE; ++ filter->recv_count = 0; ++ filter->recv_drop_count = 0; + break; + case GST_STATE_CHANGE_PAUSED_TO_PLAYING: + break; +@@ -1556,7 +1644,6 @@ gst_srtp_dec_change_state (GstElement * element, GstStateChange transition) + gst_srtp_dec_clear_streams (filter); + g_hash_table_unref (filter->streams); + filter->streams = NULL; +- + #ifndef HAVE_SRTP2 + g_hash_table_unref (filter->streams_roc_changed); + filter->streams_roc_changed = NULL; +@@ -1570,17 +1657,3 @@ gst_srtp_dec_change_state (GstElement * element, GstStateChange transition) + } + return res; + } +- +- +-/* entry point to initialize the plug-in +- * initialize the plug-in itself +- * register the element factories and other features +- */ +-gboolean +-gst_srtp_dec_plugin_init (GstPlugin * srtpdec) +-{ +- GST_DEBUG_CATEGORY_INIT (gst_srtp_dec_debug, "srtpdec", 0, "SRTP dec"); +- +- return gst_element_register (srtpdec, "srtpdec", GST_RANK_NONE, +- GST_TYPE_SRTP_DEC); +-} +diff --git a/ext/srtp/gstsrtpdec.h b/ext/srtp/gstsrtpdec.h +index ba8bcff58..e517cab09 100644 +--- a/ext/srtp/gstsrtpdec.h ++++ b/ext/srtp/gstsrtpdec.h +@@ -82,6 +82,8 @@ struct _GstSrtpDec + + gboolean rtp_has_segment; + gboolean rtcp_has_segment; ++ guint recv_count; ++ guint recv_drop_count; + + #ifndef HAVE_SRTP2 + GHashTable *streams_roc_changed; +@@ -98,7 +100,6 @@ struct _GstSrtpDecClass + + GType gst_srtp_dec_get_type (void); + +-gboolean gst_srtp_dec_plugin_init (GstPlugin * plugin); + + G_END_DECLS + +diff --git a/ext/srtp/gstsrtpelement.c b/ext/srtp/gstsrtpelement.c +new file mode 100644 +index 000000000..231b70db5 +--- /dev/null ++++ b/ext/srtp/gstsrtpelement.c +@@ -0,0 +1,41 @@ ++/* ++ * GStreamer - GStreamer SRTP encoder and decoder ++ * ++ * Copyright 2009-2013 Collabora Ltd. ++ * @author: Gabriel Millaire ++ * @author: Olivier Crete ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330, ++ * Boston, MA 02111-1307, USA. ++ */ ++ ++ ++#define GLIB_DISABLE_DEPRECATION_WARNINGS ++ ++#include "gstsrtpelements.h" ++ ++ ++void ++srtp_element_init (GstPlugin * plugin) ++{ ++ static gsize res = FALSE; ++ ++ if (g_once_init_enter (&res)) { ++ srtp_init (); ++ gst_type_mark_as_plugin_api (GST_TYPE_SRTP_AUTH_TYPE, 0); ++ gst_type_mark_as_plugin_api (GST_TYPE_SRTP_CIPHER_TYPE, 0); ++ g_once_init_leave (&res, TRUE); ++ } ++} +diff --git a/ext/srtp/gstsrtpelements.h b/ext/srtp/gstsrtpelements.h +new file mode 100644 +index 000000000..0a223fbdf +--- /dev/null ++++ b/ext/srtp/gstsrtpelements.h +@@ -0,0 +1,63 @@ ++/* ++ * GStreamer - GStreamer SRTP encoder ++ * ++ * Copyright 2011-2013 Collabora Ltd. ++ * @author: Olivier Crete ++ * ++ * Permission is hereby granted, free of charge, to any person obtaining a ++ * copy of this software and associated documentation files (the "Software"), ++ * to deal in the Software without restriction, including without limitation ++ * the rights to use, copy, modify, merge, publish, distribute, sublicense, ++ * and/or sell copies of the Software, and to permit persons to whom the ++ * Software is furnished to do so, subject to the following conditions: ++ * ++ * The above copyright notice and this permission notice shall be included in ++ * all copies or substantial portions of the Software. ++ * ++ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR ++ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, ++ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE ++ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER ++ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING ++ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER ++ * DEALINGS IN THE SOFTWARE. ++ * ++ * Alternatively, the contents of this file may be used under the ++ * GNU Lesser General Public License Version 2.1 (the "LGPL"), in ++ * which case the following provisions apply instead of the ones ++ * mentioned above: ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330, ++ * Boston, MA 02111-1307, USA. ++ */ ++#ifndef __GST_SRTP_ELEMENTS_H__ ++#define __GST_SRTP_ELEMENTS_H__ ++ ++#ifdef HAVE_CONFIG_H ++#include ++#endif ++ ++#include "gstsrtp.h" ++#include "gstsrtpenums.h" ++#include "gstsrtp-enumtypes.h" ++ ++#include ++ ++void srtp_element_init (GstPlugin * plugin); ++ ++GST_ELEMENT_REGISTER_DECLARE (srtpdec); ++GST_ELEMENT_REGISTER_DECLARE (srtpenc); ++ ++#endif /* __GST_SRTP_ELEMENTS_H__ */ +diff --git a/ext/srtp/gstsrtpenc.c b/ext/srtp/gstsrtpenc.c +index d677afcce..291d154a4 100644 +--- a/ext/srtp/gstsrtpenc.c ++++ b/ext/srtp/gstsrtpenc.c +@@ -106,6 +106,7 @@ + * will be added to every buffer. + */ + ++#include "gstsrtpelements.h" + #include "gstsrtpenc.h" + + #include +@@ -201,7 +202,10 @@ GST_STATIC_PAD_TEMPLATE ("rtcp_src_%u", + GST_STATIC_CAPS ("application/x-srtcp") + ); + +-G_DEFINE_TYPE (GstSrtpEnc, gst_srtp_enc, GST_TYPE_ELEMENT); ++G_DEFINE_TYPE_WITH_CODE (GstSrtpEnc, gst_srtp_enc, GST_TYPE_ELEMENT, ++ GST_DEBUG_CATEGORY_INIT (gst_srtp_enc_debug, "srtpenc", 0, "SRTP Enc");); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (srtpenc, "srtpenc", GST_RANK_NONE, ++ GST_TYPE_SRTP_ENC, srtp_element_init (plugin)); + + static guint gst_srtp_enc_signals[LAST_SIGNAL] = { 0 }; + +@@ -1145,6 +1149,7 @@ gst_srtp_enc_process_buffer (GstSrtpEnc * filter, GstPad * pad, + if (filter->session == NULL) { + /* The rtcp session disappeared (element shutting down) */ + GST_OBJECT_UNLOCK (filter); ++ gst_buffer_unmap (bufout, &mapout); + ret = GST_FLOW_FLUSHING; + goto fail; + } +@@ -1492,16 +1497,3 @@ gst_srtp_enc_sink_event_rtcp (GstPad * pad, GstObject * parent, + { + return gst_srtp_enc_sink_event (pad, parent, event, TRUE); + } +- +-/* entry point to initialize the plug-in +- * initialize the plug-in itself +- * register the element factories and other features +- */ +-gboolean +-gst_srtp_enc_plugin_init (GstPlugin * srtpenc) +-{ +- GST_DEBUG_CATEGORY_INIT (gst_srtp_enc_debug, "srtpenc", 0, "SRTP Enc"); +- +- return gst_element_register (srtpenc, "srtpenc", GST_RANK_NONE, +- GST_TYPE_SRTP_ENC); +-} +diff --git a/ext/srtp/gstsrtpenc.h b/ext/srtp/gstsrtpenc.h +index df2f8fd30..ed02df416 100644 +--- a/ext/srtp/gstsrtpenc.h ++++ b/ext/srtp/gstsrtpenc.h +@@ -95,8 +95,6 @@ struct _GstSrtpEncClass + + GType gst_srtp_enc_get_type (void); + +-gboolean gst_srtp_enc_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_SRTPENC_H__ */ +diff --git a/ext/srtp/gstsrtpplugin.c b/ext/srtp/gstsrtpplugin.c +new file mode 100644 +index 000000000..7fb6269f0 +--- /dev/null ++++ b/ext/srtp/gstsrtpplugin.c +@@ -0,0 +1,45 @@ ++/* ++ * GStreamer - GStreamer SRTP encoder and decoder ++ * ++ * Copyright 2009-2013 Collabora Ltd. ++ * @author: Gabriel Millaire ++ * @author: Olivier Crete ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330, ++ * Boston, MA 02111-1307, USA. ++ */ ++ ++ ++#define GLIB_DISABLE_DEPRECATION_WARNINGS ++ ++#include "gstsrtpelements.h" ++ ++ ++static gboolean ++plugin_init (GstPlugin * plugin) ++{ ++ gboolean ret = FALSE; ++ ++ ret |= GST_ELEMENT_REGISTER (srtpenc, plugin); ++ ret |= GST_ELEMENT_REGISTER (srtpdec, plugin); ++ ++ return ret; ++} ++ ++GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, ++ GST_VERSION_MINOR, ++ srtp, ++ "GStreamer SRTP", ++ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN) +diff --git a/ext/srtp/meson.build b/ext/srtp/meson.build +index affdac367..49eed5b0e 100644 +--- a/ext/srtp/meson.build ++++ b/ext/srtp/meson.build +@@ -1,11 +1,14 @@ + srtp_sources = [ + 'gstsrtp.c', ++ 'gstsrtpelement.c', ++ 'gstsrtpplugin.c', + 'gstsrtpdec.c', + 'gstsrtpenc.c', + ] + + srtp_cargs = [] + if get_option('srtp').disabled() ++ srtp_dep = dependency('', required : false) + subdir_done() + endif + +@@ -37,6 +40,5 @@ if srtp_dep.found() + install : true, + install_dir : plugins_install_dir, + ) +- pkgconfig.generate(gstsrtp, install_dir : plugins_pkgconfig_install_dir) + plugins += [gstsrtp] + endif +diff --git a/ext/webrtc/fwd.h b/ext/webrtc/fwd.h +index aa26ec6de..dc7273b60 100644 +--- a/ext/webrtc/fwd.h ++++ b/ext/webrtc/fwd.h +@@ -29,18 +29,6 @@ typedef struct _GstWebRTCBin GstWebRTCBin; + typedef struct _GstWebRTCBinClass GstWebRTCBinClass; + typedef struct _GstWebRTCBinPrivate GstWebRTCBinPrivate; + +-typedef struct _GstWebRTCICE GstWebRTCICE; +-typedef struct _GstWebRTCICEClass GstWebRTCICEClass; +-typedef struct _GstWebRTCICEPrivate GstWebRTCICEPrivate; +- +-typedef struct _GstWebRTCICEStream GstWebRTCICEStream; +-typedef struct _GstWebRTCICEStreamClass GstWebRTCICEStreamClass; +-typedef struct _GstWebRTCICEStreamPrivate GstWebRTCICEStreamPrivate; +- +-typedef struct _GstWebRTCNiceTransport GstWebRTCNiceTransport; +-typedef struct _GstWebRTCNiceTransportClass GstWebRTCNiceTransportClass; +-typedef struct _GstWebRTCNiceTransportPrivate GstWebRTCNiceTransportPrivate; +- + typedef struct _GstWebRTCSCTPTransport GstWebRTCSCTPTransport; + typedef struct _GstWebRTCSCTPTransportClass GstWebRTCSCTPTransportClass; + typedef struct _GstWebRTCSCTPTransportPrivate GstWebRTCSCTPTransportPrivate; +diff --git a/ext/webrtc/gstwebrtcbin.c b/ext/webrtc/gstwebrtcbin.c +index 85435303b..6a8f72236 100644 +--- a/ext/webrtc/gstwebrtcbin.c ++++ b/ext/webrtc/gstwebrtcbin.c +@@ -29,7 +29,11 @@ + #include "webrtcsdp.h" + #include "webrtctransceiver.h" + #include "webrtcdatachannel.h" +-#include "sctptransport.h" ++#include "webrtcsctptransport.h" ++ ++#include "gst/webrtc/webrtc-priv.h" ++#include ++#include + + #include + #include +@@ -53,11 +57,23 @@ + #define ICE_LOCK(w) (g_mutex_lock (ICE_GET_LOCK(w))) + #define ICE_UNLOCK(w) (g_mutex_unlock (ICE_GET_LOCK(w))) + ++#define DC_GET_LOCK(w) (&w->priv->dc_lock) ++#define DC_LOCK(w) (g_mutex_lock (DC_GET_LOCK(w))) ++#define DC_UNLOCK(w) (g_mutex_unlock (DC_GET_LOCK(w))) + + /* The extra time for the rtpstorage compared to the RTP jitterbuffer (in ms) */ + #define RTPSTORAGE_EXTRA_TIME (50) + +-/* ++#define DEFAULT_JB_LATENCY 200 ++ ++#define RTPHDREXT_MID GST_RTP_HDREXT_BASE "sdes:mid" ++#define RTPHDREXT_STREAM_ID GST_RTP_HDREXT_BASE "sdes:rtp-stream-id" ++#define RTPHDREXT_REPAIRED_STREAM_ID GST_RTP_HDREXT_BASE "sdes:repaired-rtp-stream-id" ++ ++/** ++ * SECTION: element-webrtcbin ++ * title: webrtcbin ++ * + * This webrtcbin implements the majority of the W3's peerconnection API and + * implementation guide where possible. Generating offers, answers and setting + * local and remote SDP's are all supported. Both media descriptions and +@@ -78,10 +94,10 @@ + * configuration. Some cases are outlined below for a simple single + * audio/video/data session: + * +- * - max-bundle (requires rtcp-muxing) uses a single transport for all ++ * - max-bundle uses a single transport for all + * media/data transported. Renegotiation involves adding/removing the + * necessary streams to the existing transports. +- * - max-compat without rtcp-mux involves two TransportStream per media stream ++ * - max-compat involves two TransportStream per media stream + * to transport the rtp and the rtcp packets and a single TransportStream for + * all data channels. Each stream change involves modifying the associated + * TransportStream/s as necessary. +@@ -100,6 +116,9 @@ + */ + + static void _update_need_negotiation (GstWebRTCBin * webrtc); ++static GstPad *_connect_input_stream (GstWebRTCBin * webrtc, ++ GstWebRTCBinPad * pad); ++ + + #define GST_CAT_DEFAULT gst_webrtc_bin_debug + GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); +@@ -197,19 +216,23 @@ _have_dtls_elements (GstWebRTCBin * webrtc) + return TRUE; + } + +-G_DEFINE_TYPE (GstWebRTCBinPad, gst_webrtc_bin_pad, GST_TYPE_GHOST_PAD); +- +-static void +-gst_webrtc_bin_pad_set_property (GObject * object, guint prop_id, +- const GValue * value, GParamSpec * pspec) ++static gboolean ++_gst_element_accumulator (GSignalInvocationHint * ihint, ++ GValue * return_accu, const GValue * handler_return, gpointer dummy) + { +- switch (prop_id) { +- default: +- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); +- break; +- } ++ GstElement *element; ++ ++ element = g_value_get_object (handler_return); ++ GST_DEBUG ("got element %" GST_PTR_FORMAT, element); ++ ++ g_value_set_object (return_accu, element); ++ ++ /* stop emission if we have an element */ ++ return (element == NULL); + } + ++G_DEFINE_TYPE (GstWebRTCBinPad, gst_webrtc_bin_pad, GST_TYPE_GHOST_PAD); ++ + static void + gst_webrtc_bin_pad_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec) +@@ -231,13 +254,9 @@ gst_webrtc_bin_pad_finalize (GObject * object) + { + GstWebRTCBinPad *pad = GST_WEBRTC_BIN_PAD (object); + +- if (pad->trans) +- gst_object_unref (pad->trans); +- pad->trans = NULL; +- +- if (pad->received_caps) +- gst_caps_unref (pad->received_caps); +- pad->received_caps = NULL; ++ gst_clear_object (&pad->trans); ++ gst_clear_caps (&pad->received_caps); ++ g_clear_pointer (&pad->msid, g_free); + + G_OBJECT_CLASS (gst_webrtc_bin_pad_parent_class)->finalize (object); + } +@@ -248,7 +267,6 @@ gst_webrtc_bin_pad_class_init (GstWebRTCBinPadClass * klass) + GObjectClass *gobject_class = (GObjectClass *) klass; + + gobject_class->get_property = gst_webrtc_bin_pad_get_property; +- gobject_class->set_property = gst_webrtc_bin_pad_set_property; + gobject_class->finalize = gst_webrtc_bin_pad_finalize; + + g_object_class_install_property (gobject_class, +@@ -259,6 +277,38 @@ gst_webrtc_bin_pad_class_init (GstWebRTCBinPadClass * klass) + G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); + } + ++static void ++gst_webrtc_bin_pad_update_tos_event (GstWebRTCBinPad * wpad) ++{ ++ WebRTCTransceiver *trans = (WebRTCTransceiver *) wpad->trans; ++ ++ if (wpad->received_caps && trans->parent.mid) { ++ GstPad *pad = GST_PAD (wpad); ++ ++ gst_event_take (&trans->tos_event, ++ gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM_STICKY, ++ gst_structure_new ("GstWebRtcBinUpdateTos", "mid", G_TYPE_STRING, ++ trans->parent.mid, NULL))); ++ ++ GST_DEBUG_OBJECT (pad, "sending new tos event %" GST_PTR_FORMAT, ++ trans->tos_event); ++ gst_pad_send_event (pad, gst_event_ref (trans->tos_event)); ++ } ++} ++ ++static GList * ++_get_pending_sink_transceiver (GstWebRTCBin * webrtc, GstWebRTCBinPad * pad) ++{ ++ GList *ret; ++ ++ for (ret = webrtc->priv->pending_sink_transceivers; ret; ret = ret->next) { ++ if (ret->data == pad) ++ break; ++ } ++ ++ return ret; ++} ++ + static gboolean + gst_webrtcbin_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) + { +@@ -271,12 +321,45 @@ gst_webrtcbin_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) + + gst_event_parse_caps (event, &caps); + check_negotiation = (!wpad->received_caps +- || gst_caps_is_equal (wpad->received_caps, caps)); ++ || !gst_caps_is_equal (wpad->received_caps, caps)); + gst_caps_replace (&wpad->received_caps, caps); + + GST_DEBUG_OBJECT (parent, + "On %" GST_PTR_FORMAT " checking negotiation? %u, caps %" + GST_PTR_FORMAT, pad, check_negotiation, caps); ++ ++ if (check_negotiation) { ++ gst_webrtc_bin_pad_update_tos_event (wpad); ++ } ++ ++ /* A remote description might have been set while the pad hadn't ++ * yet received caps, delaying the connection of the input stream ++ */ ++ PC_LOCK (webrtc); ++ if (wpad->trans) { ++ GST_OBJECT_LOCK (wpad->trans); ++ if (wpad->trans->current_direction == ++ GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY ++ || wpad->trans->current_direction == ++ GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV) { ++ GList *pending = _get_pending_sink_transceiver (webrtc, wpad); ++ ++ if (pending) { ++ GST_LOG_OBJECT (pad, "Connecting input stream to rtpbin with " ++ "transceiver %" GST_PTR_FORMAT " and caps %" GST_PTR_FORMAT, ++ wpad->trans, wpad->received_caps); ++ _connect_input_stream (webrtc, wpad); ++ gst_pad_remove_probe (GST_PAD (pad), wpad->block_id); ++ wpad->block_id = 0; ++ gst_object_unref (pending->data); ++ webrtc->priv->pending_sink_transceivers = ++ g_list_delete_link (webrtc->priv->pending_sink_transceivers, ++ pending); ++ } ++ } ++ GST_OBJECT_UNLOCK (wpad->trans); ++ } ++ PC_UNLOCK (webrtc); + } else if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) { + check_negotiation = TRUE; + } +@@ -290,45 +373,263 @@ gst_webrtcbin_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) + return gst_pad_event_default (pad, parent, event); + } + ++static gboolean ++gst_webrtcbin_sink_query (GstPad * pad, GstObject * parent, GstQuery * query) ++{ ++ GstWebRTCBinPad *wpad = GST_WEBRTC_BIN_PAD (pad); ++ gboolean ret = FALSE; ++ ++ switch (GST_QUERY_TYPE (query)) { ++ case GST_QUERY_ACCEPT_CAPS: ++ GST_OBJECT_LOCK (wpad->trans); ++ if (wpad->trans->codec_preferences) { ++ GstCaps *caps; ++ ++ gst_query_parse_accept_caps (query, &caps); ++ ++ gst_query_set_accept_caps_result (query, ++ gst_caps_can_intersect (caps, wpad->trans->codec_preferences)); ++ ret = TRUE; ++ } ++ GST_OBJECT_UNLOCK (wpad->trans); ++ break; ++ ++ case GST_QUERY_CAPS: ++ { ++ GstCaps *codec_preferences = NULL; ++ ++ GST_OBJECT_LOCK (wpad->trans); ++ if (wpad->trans->codec_preferences) ++ codec_preferences = gst_caps_ref (wpad->trans->codec_preferences); ++ GST_OBJECT_UNLOCK (wpad->trans); ++ ++ if (codec_preferences) { ++ GstCaps *filter = NULL; ++ GstCaps *filter_prefs = NULL; ++ GstPad *target; ++ ++ gst_query_parse_caps (query, &filter); ++ ++ if (filter) { ++ filter_prefs = gst_caps_intersect_full (filter, codec_preferences, ++ GST_CAPS_INTERSECT_FIRST); ++ gst_caps_unref (codec_preferences); ++ } else { ++ filter_prefs = codec_preferences; ++ } ++ ++ target = gst_ghost_pad_get_target (GST_GHOST_PAD (pad)); ++ if (target) { ++ GstCaps *result; ++ ++ result = gst_pad_query_caps (target, filter_prefs); ++ gst_query_set_caps_result (query, result); ++ gst_caps_unref (result); ++ ++ gst_object_unref (target); ++ } else { ++ gst_query_set_caps_result (query, filter_prefs); ++ } ++ ++ gst_caps_unref (filter_prefs); ++ ret = TRUE; ++ } ++ break; ++ } ++ default: ++ break; ++ } ++ ++ if (ret) ++ return TRUE; ++ ++ return gst_pad_query_default (pad, parent, query); ++} ++ ++ + static void + gst_webrtc_bin_pad_init (GstWebRTCBinPad * pad) + { + } + + static GstWebRTCBinPad * +-gst_webrtc_bin_pad_new (const gchar * name, GstPadDirection direction) ++gst_webrtc_bin_pad_new (const gchar * name, GstPadDirection direction, ++ char *msid) + { + GstWebRTCBinPad *pad; + GstPadTemplate *template; ++ GType pad_type; + +- if (direction == GST_PAD_SINK) ++ if (direction == GST_PAD_SINK) { + template = gst_static_pad_template_get (&sink_template); +- else if (direction == GST_PAD_SRC) ++ pad_type = GST_TYPE_WEBRTC_BIN_SINK_PAD; ++ } else if (direction == GST_PAD_SRC) { + template = gst_static_pad_template_get (&src_template); +- else ++ pad_type = GST_TYPE_WEBRTC_BIN_SRC_PAD; ++ } else { + g_assert_not_reached (); ++ } + + pad = +- g_object_new (gst_webrtc_bin_pad_get_type (), "name", name, "direction", ++ g_object_new (pad_type, "name", name, "direction", + direction, "template", template, NULL); + gst_object_unref (template); + +- gst_pad_set_event_function (GST_PAD (pad), gst_webrtcbin_sink_event); ++ pad->msid = msid; + + GST_DEBUG_OBJECT (pad, "new visible pad with direction %s", + direction == GST_PAD_SRC ? "src" : "sink"); + return pad; + } + ++enum ++{ ++ PROP_SINK_PAD_MSID = 1, ++}; ++ ++/** ++ * GstWebRTCBinSinkPad: ++ * ++ * Since: 1.22 ++ */ ++struct _GstWebRTCBinSinkPad ++{ ++ GstWebRTCBinPad pad; ++}; ++ ++G_DEFINE_TYPE (GstWebRTCBinSinkPad, gst_webrtc_bin_sink_pad, ++ GST_TYPE_WEBRTC_BIN_PAD); ++ ++static void ++gst_webrtc_bin_sink_pad_get_property (GObject * object, guint prop_id, ++ GValue * value, GParamSpec * pspec) ++{ ++ GstWebRTCBinPad *pad = GST_WEBRTC_BIN_PAD (object); ++ ++ switch (prop_id) { ++ case PROP_SINK_PAD_MSID: ++ g_value_set_string (value, pad->msid); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_webrtc_bin_sink_pad_set_property (GObject * object, guint prop_id, ++ const GValue * value, GParamSpec * pspec) ++{ ++ GstWebRTCBinPad *pad = GST_WEBRTC_BIN_PAD (object); ++ ++ switch (prop_id) { ++ case PROP_SINK_PAD_MSID: ++ g_free (pad->msid); ++ pad->msid = g_value_dup_string (value); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_webrtc_bin_sink_pad_class_init (GstWebRTCBinSinkPadClass * klass) ++{ ++ GObjectClass *gobject_class = (GObjectClass *) klass; ++ ++ gobject_class->get_property = gst_webrtc_bin_sink_pad_get_property; ++ gobject_class->set_property = gst_webrtc_bin_sink_pad_set_property; ++ ++ /** ++ * GstWebRTCBinSinkPad:msid: ++ * ++ * The MediaStream Identifier to use for this pad (MediaStreamTrack). ++ * Fallback is the RTP SDES cname value if not provided. ++ * ++ * Since: 1.22 ++ */ ++ g_object_class_install_property (gobject_class, ++ PROP_SINK_PAD_MSID, ++ g_param_spec_string ("msid", "MSID", ++ "Local MediaStream ID to use for this pad (NULL = unset)", NULL, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++} ++ ++static void ++gst_webrtc_bin_sink_pad_init (GstWebRTCBinSinkPad * pad) ++{ ++ gst_pad_set_event_function (GST_PAD (pad), gst_webrtcbin_sink_event); ++ gst_pad_set_query_function (GST_PAD (pad), gst_webrtcbin_sink_query); ++} ++ ++enum ++{ ++ PROP_SRC_PAD_MSID = 1, ++}; ++ ++/** ++ * GstWebRTCBinSrcPad: ++ * ++ * Since: 1.22 ++ */ ++struct _GstWebRTCBinSrcPad ++{ ++ GstWebRTCBinPad pad; ++}; ++ ++G_DEFINE_TYPE (GstWebRTCBinSrcPad, gst_webrtc_bin_src_pad, ++ GST_TYPE_WEBRTC_BIN_PAD); ++ ++static void ++gst_webrtc_bin_src_pad_get_property (GObject * object, guint prop_id, ++ GValue * value, GParamSpec * pspec) ++{ ++ GstWebRTCBinPad *pad = GST_WEBRTC_BIN_PAD (object); ++ ++ switch (prop_id) { ++ case PROP_SRC_PAD_MSID: ++ g_value_set_string (value, pad->msid); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_webrtc_bin_src_pad_class_init (GstWebRTCBinSrcPadClass * klass) ++{ ++ GObjectClass *gobject_class = (GObjectClass *) klass; ++ ++ gobject_class->get_property = gst_webrtc_bin_src_pad_get_property; ++ ++ /** ++ * GstWebRTCBinSrcPad:msid: ++ * ++ * The MediaStream Identifier the remote peer used for this pad (MediaStreamTrack). ++ * Will be NULL if not advertised in the remote SDP. ++ * ++ * Since: 1.22 ++ */ ++ g_object_class_install_property (gobject_class, ++ PROP_SRC_PAD_MSID, ++ g_param_spec_string ("msid", "MSID", ++ "Remote MediaStream ID in use for this pad (NULL = not advertised)", ++ NULL, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); ++} ++ ++static void ++gst_webrtc_bin_src_pad_init (GstWebRTCBinSrcPad * pad) ++{ ++} ++ + #define gst_webrtc_bin_parent_class parent_class + G_DEFINE_TYPE_WITH_CODE (GstWebRTCBin, gst_webrtc_bin, GST_TYPE_BIN, + G_ADD_PRIVATE (GstWebRTCBin) + GST_DEBUG_CATEGORY_INIT (gst_webrtc_bin_debug, "webrtcbin", 0, + "webrtcbin element");); + +-static GstPad *_connect_input_stream (GstWebRTCBin * webrtc, +- GstWebRTCBinPad * pad); +- + enum + { + SIGNAL_0, +@@ -347,6 +648,8 @@ enum + ADD_TURN_SERVER_SIGNAL, + CREATE_DATA_CHANNEL_SIGNAL, + ON_DATA_CHANNEL_SIGNAL, ++ PREPARE_DATA_CHANNEL_SIGNAL, ++ REQUEST_AUX_SENDER, + LAST_SIGNAL, + }; + +@@ -368,7 +671,9 @@ enum + PROP_BUNDLE_POLICY, + PROP_ICE_TRANSPORT_POLICY, + PROP_ICE_AGENT, +- PROP_LATENCY ++ PROP_LATENCY, ++ PROP_SCTP_TRANSPORT, ++ PROP_HTTP_PROXY + }; + + static guint gst_webrtc_bin_signals[LAST_SIGNAL] = { 0 }; +@@ -412,18 +717,6 @@ _add_ice_stream_item (GstWebRTCBin * webrtc, guint session_id, + g_array_append_val (webrtc->priv->ice_stream_map, item); + } + +-typedef struct +-{ +- guint session_id; +- gchar *mid; +-} SessionMidItem; +- +-static void +-clear_session_mid_item (SessionMidItem * item) +-{ +- g_free (item->mid); +-} +- + typedef gboolean (*FindTransceiverFunc) (GstWebRTCRTPTransceiver * p1, + gconstpointer data); + +@@ -445,7 +738,7 @@ _find_transceiver (GstWebRTCBin * webrtc, gconstpointer data, + } + + static gboolean +-match_for_mid (GstWebRTCRTPTransceiver * trans, const gchar * mid) ++transceiver_match_for_mid (GstWebRTCRTPTransceiver * trans, const gchar * mid) + { + return g_strcmp0 (trans->mid, mid) == 0; + } +@@ -453,6 +746,9 @@ match_for_mid (GstWebRTCRTPTransceiver * trans, const gchar * mid) + static gboolean + transceiver_match_for_mline (GstWebRTCRTPTransceiver * trans, guint * mline) + { ++ if (trans->stopped) ++ return FALSE; ++ + return trans->mline == *mline; + } + +@@ -471,6 +767,20 @@ _find_transceiver_for_mline (GstWebRTCBin * webrtc, guint mlineindex) + return trans; + } + ++static GstWebRTCRTPTransceiver * ++_find_transceiver_for_mid (GstWebRTCBin * webrtc, const char *mid) ++{ ++ GstWebRTCRTPTransceiver *trans; ++ ++ trans = _find_transceiver (webrtc, mid, ++ (FindTransceiverFunc) transceiver_match_for_mid); ++ ++ GST_TRACE_OBJECT (webrtc, "Found transceiver %" GST_PTR_FORMAT " for " ++ "mid %s", trans, mid); ++ ++ return trans; ++} ++ + typedef gboolean (*FindTransportFunc) (TransportStream * p1, + gconstpointer data); + +@@ -571,6 +881,7 @@ data_channel_match_for_id (WebRTCDataChannel * channel, gint * id) + return channel->parent.id == *id; + } + ++/* always called with dc_lock held */ + static WebRTCDataChannel * + _find_data_channel_for_id (GstWebRTCBin * webrtc, gint id) + { +@@ -593,12 +904,23 @@ _add_pad_to_list (GstWebRTCBin * webrtc, GstWebRTCBinPad * pad) + GST_OBJECT_UNLOCK (webrtc); + } + +-static void ++static gboolean + _remove_pending_pad (GstWebRTCBin * webrtc, GstWebRTCBinPad * pad) + { ++ gboolean ret = FALSE; ++ GList *l; ++ + GST_OBJECT_LOCK (webrtc); +- webrtc->priv->pending_pads = g_list_remove (webrtc->priv->pending_pads, pad); ++ l = g_list_find (webrtc->priv->pending_pads, pad); ++ if (l) { ++ webrtc->priv->pending_pads = ++ g_list_remove_link (webrtc->priv->pending_pads, l); ++ g_list_free (l); ++ ret = TRUE; ++ } + GST_OBJECT_UNLOCK (webrtc); ++ ++ return ret; + } + + static void +@@ -622,21 +944,21 @@ _remove_pad (GstWebRTCBin * webrtc, GstWebRTCBinPad * pad) + typedef struct + { + GstPadDirection direction; +- guint mlineindex; ++ guint mline; + } MLineMatch; + + static gboolean + pad_match_for_mline (GstWebRTCBinPad * pad, const MLineMatch * match) + { + return GST_PAD_DIRECTION (pad) == match->direction +- && pad->mlineindex == match->mlineindex; ++ && pad->trans->mline == match->mline; + } + + static GstWebRTCBinPad * + _find_pad_for_mline (GstWebRTCBin * webrtc, GstPadDirection direction, +- guint mlineindex) ++ guint mline) + { +- MLineMatch m = { direction, mlineindex }; ++ MLineMatch m = { direction, mline }; + + return _find_pad (webrtc, &m, (FindPadFunc) pad_match_for_mline); + } +@@ -664,17 +986,82 @@ _find_pad_for_transceiver (GstWebRTCBin * webrtc, GstPadDirection direction, + + #if 0 + static gboolean +-match_for_ssrc (GstWebRTCBinPad * pad, guint * ssrc) ++match_for_pad (GstWebRTCBinPad * pad, GstWebRTCBinPad * other) + { +- return pad->ssrc == *ssrc; ++ return pad == other; + } ++#endif ++ ++struct SsrcMatch ++{ ++ GstWebRTCRTPTransceiverDirection direction; ++ guint32 ssrc; ++}; + + static gboolean +-match_for_pad (GstWebRTCBinPad * pad, GstWebRTCBinPad * other) ++mid_ssrc_match_for_ssrc (SsrcMapItem * entry, const struct SsrcMatch *match) + { +- return pad == other; ++ return entry->direction == match->direction && entry->ssrc == match->ssrc; ++} ++ ++static gboolean ++mid_ssrc_remove_ssrc (SsrcMapItem * item, const struct SsrcMatch *match) ++{ ++ return !mid_ssrc_match_for_ssrc (item, match); ++} ++ ++static SsrcMapItem * ++find_mid_ssrc_for_ssrc (GstWebRTCBin * webrtc, ++ GstWebRTCRTPTransceiverDirection direction, guint rtp_session, guint ssrc) ++{ ++ TransportStream *stream = _find_transport_for_session (webrtc, rtp_session); ++ struct SsrcMatch m = { direction, ssrc }; ++ ++ if (!stream) ++ return NULL; ++ ++ return transport_stream_find_ssrc_map_item (stream, &m, ++ (FindSsrcMapFunc) mid_ssrc_match_for_ssrc); ++} ++ ++static SsrcMapItem * ++find_or_add_ssrc_map_item (GstWebRTCBin * webrtc, ++ GstWebRTCRTPTransceiverDirection direction, guint rtp_session, guint ssrc, ++ guint media_idx) ++{ ++ TransportStream *stream = _find_transport_for_session (webrtc, rtp_session); ++ struct SsrcMatch m = { direction, ssrc }; ++ SsrcMapItem *item; ++ ++ if (!stream) ++ return NULL; ++ ++ if ((item = transport_stream_find_ssrc_map_item (stream, &m, ++ (FindSsrcMapFunc) mid_ssrc_match_for_ssrc))) ++ return item; ++ ++ return transport_stream_add_ssrc_map_item (stream, direction, ssrc, ++ media_idx); ++} ++ ++static void ++remove_ssrc_entry_by_ssrc (GstWebRTCBin * webrtc, guint rtp_session, guint ssrc) ++{ ++ TransportStream *stream; ++ ++ stream = _find_transport_for_session (webrtc, rtp_session); ++ if (stream) { ++ struct SsrcMatch m = ++ { GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY, ssrc }; ++ ++ transport_stream_filter_ssrc_map_item (stream, &m, ++ (FindSsrcMapFunc) mid_ssrc_remove_ssrc); ++ ++ m.direction = GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY; ++ transport_stream_filter_ssrc_map_item (stream, &m, ++ (FindSsrcMapFunc) mid_ssrc_remove_ssrc); ++ } + } +-#endif + + static gboolean + _unlock_pc_thread (GMutex * lock) +@@ -749,14 +1136,17 @@ _stop_thread (GstWebRTCBin * webrtc) + static gboolean + _execute_op (GstWebRTCBinTask * op) + { ++ GstStructure *s; ++ + PC_LOCK (op->webrtc); + if (op->webrtc->priv->is_closed) { ++ PC_UNLOCK (op->webrtc); ++ + if (op->promise) { + GError *error = +- g_error_new (GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_CLOSED, ++ g_error_new (GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_INVALID_STATE, + "webrtcbin is closed. aborting execution."); +- GstStructure *s = +- gst_structure_new ("application/x-gstwebrtcbin-promise-error", ++ GstStructure *s = gst_structure_new ("application/x-gst-promise", + "error", G_TYPE_ERROR, error, NULL); + + gst_promise_reply (op->promise, s); +@@ -768,10 +1158,16 @@ _execute_op (GstWebRTCBinTask * op) + goto out; + } + +- op->op (op->webrtc, op->data); ++ s = op->op (op->webrtc, op->data); + +-out: + PC_UNLOCK (op->webrtc); ++ ++ if (op->promise) ++ gst_promise_reply (op->promise, s); ++ else if (s) ++ gst_structure_free (s); ++ ++out: + return G_SOURCE_REMOVE; + } + +@@ -845,11 +1241,8 @@ _collate_ice_connection_states (GstWebRTCBin * webrtc) + for (i = 0; i < webrtc->priv->transceivers->len; i++) { + GstWebRTCRTPTransceiver *rtp_trans = + g_ptr_array_index (webrtc->priv->transceivers, i); +- WebRTCTransceiver *trans = WEBRTC_TRANSCEIVER (rtp_trans); +- TransportStream *stream = trans->stream; +- GstWebRTCICETransport *transport, *rtcp_transport; ++ GstWebRTCICETransport *transport; + GstWebRTCICEConnectionState ice_state; +- gboolean rtcp_mux = FALSE; + + if (rtp_trans->stopped) { + GST_TRACE_OBJECT (webrtc, "transceiver %p stopped", rtp_trans); +@@ -861,8 +1254,6 @@ _collate_ice_connection_states (GstWebRTCBin * webrtc) + continue; + } + +- g_object_get (stream, "rtcp-mux", &rtcp_mux, NULL); +- + transport = webrtc_transceiver_get_dtls_transport (rtp_trans)->transport; + + /* get transport state */ +@@ -878,24 +1269,6 @@ _collate_ice_connection_states (GstWebRTCBin * webrtc) + if (ice_state != STATE (CONNECTED) && ice_state != STATE (COMPLETED) + && ice_state != STATE (CLOSED)) + all_connected_completed_or_closed = FALSE; +- +- rtcp_transport = +- webrtc_transceiver_get_rtcp_dtls_transport (rtp_trans)->transport; +- +- if (!rtcp_mux && rtcp_transport && transport != rtcp_transport) { +- g_object_get (rtcp_transport, "state", &ice_state, NULL); +- GST_TRACE_OBJECT (webrtc, "transceiver %p RTCP state 0x%x", rtp_trans, +- ice_state); +- any_state |= (1 << ice_state); +- +- if (ice_state != STATE (NEW) && ice_state != STATE (CLOSED)) +- all_new_or_closed = FALSE; +- if (ice_state != STATE (COMPLETED) && ice_state != STATE (CLOSED)) +- all_completed_or_closed = FALSE; +- if (ice_state != STATE (CONNECTED) && ice_state != STATE (COMPLETED) +- && ice_state != STATE (CLOSED)) +- all_connected_completed_or_closed = FALSE; +- } + } + + GST_TRACE_OBJECT (webrtc, "ICE connection state: 0x%x", any_state); +@@ -947,7 +1320,11 @@ _collate_ice_gathering_states (GstWebRTCBin * webrtc) + { + #define STATE(val) GST_WEBRTC_ICE_GATHERING_STATE_ ## val + GstWebRTCICEGatheringState any_state = 0; +- gboolean all_completed = webrtc->priv->transceivers->len > 0; ++ GstWebRTCICEGatheringState ice_state; ++ GstWebRTCDTLSTransport *dtls_transport; ++ GstWebRTCICETransport *transport; ++ gboolean all_completed = webrtc->priv->transceivers->len > 0 || ++ webrtc->priv->data_channel_transport; + int i; + + for (i = 0; i < webrtc->priv->transceivers->len; i++) { +@@ -955,10 +1332,6 @@ _collate_ice_gathering_states (GstWebRTCBin * webrtc) + g_ptr_array_index (webrtc->priv->transceivers, i); + WebRTCTransceiver *trans = WEBRTC_TRANSCEIVER (rtp_trans); + TransportStream *stream = trans->stream; +- GstWebRTCDTLSTransport *dtls_transport; +- GstWebRTCICETransport *transport, *rtcp_transport; +- GstWebRTCICEGatheringState ice_state; +- gboolean rtcp_mux = FALSE; + + if (rtp_trans->stopped || stream == NULL) { + GST_TRACE_OBJECT (webrtc, "transceiver %p stopped or unassociated", +@@ -972,8 +1345,6 @@ _collate_ice_gathering_states (GstWebRTCBin * webrtc) + GST_TRACE_OBJECT (webrtc, "transceiver %p has no mid", rtp_trans); + } + +- g_object_get (stream, "rtcp-mux", &rtcp_mux, NULL); +- + dtls_transport = webrtc_transceiver_get_dtls_transport (rtp_trans); + if (dtls_transport == NULL) { + GST_WARNING ("Transceiver %p has no DTLS transport", rtp_trans); +@@ -989,18 +1360,16 @@ _collate_ice_gathering_states (GstWebRTCBin * webrtc) + any_state |= (1 << ice_state); + if (ice_state != STATE (COMPLETE)) + all_completed = FALSE; ++ } + +- dtls_transport = webrtc_transceiver_get_rtcp_dtls_transport (rtp_trans); +- if (dtls_transport == NULL) { +- GST_WARNING ("Transceiver %p has no DTLS RTCP transport", rtp_trans); +- continue; +- } +- rtcp_transport = dtls_transport->transport; +- +- if (!rtcp_mux && rtcp_transport && rtcp_transport != transport) { +- g_object_get (rtcp_transport, "gathering-state", &ice_state, NULL); +- GST_TRACE_OBJECT (webrtc, "transceiver %p RTCP gathering state: 0x%x", +- rtp_trans, ice_state); ++ /* check data channel transport gathering state */ ++ if (all_completed && webrtc->priv->data_channel_transport) { ++ if ((dtls_transport = webrtc->priv->data_channel_transport->transport)) { ++ transport = dtls_transport->transport; ++ g_object_get (transport, "gathering-state", &ice_state, NULL); ++ GST_TRACE_OBJECT (webrtc, ++ "data channel transport %p gathering state: 0x%x", dtls_transport, ++ ice_state); + any_state |= (1 << ice_state); + if (ice_state != STATE (COMPLETE)) + all_completed = FALSE; +@@ -1048,12 +1417,9 @@ _collate_peer_connection_states (GstWebRTCBin * webrtc) + for (i = 0; i < webrtc->priv->transceivers->len; i++) { + GstWebRTCRTPTransceiver *rtp_trans = + g_ptr_array_index (webrtc->priv->transceivers, i); +- WebRTCTransceiver *trans = WEBRTC_TRANSCEIVER (rtp_trans); +- TransportStream *stream = trans->stream; +- GstWebRTCDTLSTransport *transport, *rtcp_transport; ++ GstWebRTCDTLSTransport *transport; + GstWebRTCICEConnectionState ice_state; + GstWebRTCDTLSTransportState dtls_state; +- gboolean rtcp_mux = FALSE; + + if (rtp_trans->stopped) { + GST_TRACE_OBJECT (webrtc, "transceiver %p stopped", rtp_trans); +@@ -1064,7 +1430,6 @@ _collate_peer_connection_states (GstWebRTCBin * webrtc) + continue; + } + +- g_object_get (stream, "rtcp-mux", &rtcp_mux, NULL); + transport = webrtc_transceiver_get_dtls_transport (rtp_trans); + + /* get transport state */ +@@ -1093,38 +1458,40 @@ _collate_peer_connection_states (GstWebRTCBin * webrtc) + if (ice_state != ICE_STATE (CONNECTED) && ice_state != ICE_STATE (COMPLETED) + && ice_state != ICE_STATE (CLOSED)) + ice_all_connected_completed_or_closed = FALSE; ++ } + +- rtcp_transport = webrtc_transceiver_get_rtcp_dtls_transport (rtp_trans); ++ // also check data channel transport state ++ if (webrtc->priv->data_channel_transport) { ++ GstWebRTCDTLSTransport *transport = ++ webrtc->priv->data_channel_transport->transport; ++ GstWebRTCICEConnectionState ice_state; ++ GstWebRTCDTLSTransportState dtls_state; + +- if (!rtcp_mux && rtcp_transport && rtcp_transport != transport) { +- g_object_get (rtcp_transport, "state", &dtls_state, NULL); +- GST_TRACE_OBJECT (webrtc, "transceiver %p RTCP DTLS state: 0x%x", +- rtp_trans, dtls_state); +- any_dtls_state |= (1 << dtls_state); ++ g_object_get (transport, "state", &dtls_state, NULL); ++ GST_TRACE_OBJECT (webrtc, "data channel transport DTLS state: 0x%x", ++ dtls_state); ++ any_dtls_state |= (1 << dtls_state); + +- if (dtls_state != DTLS_STATE (NEW) && dtls_state != DTLS_STATE (CLOSED)) +- dtls_all_new_or_closed = FALSE; +- if (dtls_state != DTLS_STATE (NEW) +- && dtls_state != DTLS_STATE (CONNECTING)) +- dtls_all_new_connecting_or_checking = FALSE; +- if (dtls_state != DTLS_STATE (CONNECTED) +- && dtls_state != DTLS_STATE (CLOSED)) +- dtls_all_connected_completed_or_closed = FALSE; ++ if (dtls_state != DTLS_STATE (NEW) && dtls_state != DTLS_STATE (CLOSED)) ++ dtls_all_new_or_closed = FALSE; ++ if (dtls_state != DTLS_STATE (NEW) && dtls_state != DTLS_STATE (CONNECTING)) ++ dtls_all_new_connecting_or_checking = FALSE; ++ if (dtls_state != DTLS_STATE (CONNECTED) ++ && dtls_state != DTLS_STATE (CLOSED)) ++ dtls_all_connected_completed_or_closed = FALSE; + +- g_object_get (rtcp_transport->transport, "state", &ice_state, NULL); +- GST_TRACE_OBJECT (webrtc, "transceiver %p RTCP ICE state: 0x%x", +- rtp_trans, ice_state); +- any_ice_state |= (1 << ice_state); ++ g_object_get (transport->transport, "state", &ice_state, NULL); ++ GST_TRACE_OBJECT (webrtc, "data channel transport ICE state: 0x%x", ++ ice_state); ++ any_ice_state |= (1 << ice_state); + +- if (ice_state != ICE_STATE (NEW) && ice_state != ICE_STATE (CLOSED)) +- ice_all_new_or_closed = FALSE; +- if (ice_state != ICE_STATE (NEW) && ice_state != ICE_STATE (CHECKING)) +- ice_all_new_connecting_or_checking = FALSE; +- if (ice_state != ICE_STATE (CONNECTED) +- && ice_state != ICE_STATE (COMPLETED) +- && ice_state != ICE_STATE (CLOSED)) +- ice_all_connected_completed_or_closed = FALSE; +- } ++ if (ice_state != ICE_STATE (NEW) && ice_state != ICE_STATE (CLOSED)) ++ ice_all_new_or_closed = FALSE; ++ if (ice_state != ICE_STATE (NEW) && ice_state != ICE_STATE (CHECKING)) ++ ice_all_new_connecting_or_checking = FALSE; ++ if (ice_state != ICE_STATE (CONNECTED) && ice_state != ICE_STATE (COMPLETED) ++ && ice_state != ICE_STATE (CLOSED)) ++ ice_all_connected_completed_or_closed = FALSE; + } + + GST_TRACE_OBJECT (webrtc, "ICE connection state: 0x%x. DTLS connection " +@@ -1156,7 +1523,7 @@ _collate_peer_connection_states (GstWebRTCBin * webrtc) + /* All RTCIceTransports and RTCDtlsTransports are in the new or closed + * state, or there are no transports. */ + if ((dtls_all_new_or_closed && ice_all_new_or_closed) +- || webrtc->priv->transceivers->len == 0) { ++ || webrtc->priv->transports->len == 0) { + GST_TRACE_OBJECT (webrtc, "returning new"); + return STATE (NEW); + } +@@ -1193,7 +1560,7 @@ _collate_peer_connection_states (GstWebRTCBin * webrtc) + #undef STATE + } + +-static void ++static GstStructure * + _update_ice_gathering_state_task (GstWebRTCBin * webrtc, gpointer data) + { + GstWebRTCICEGatheringState old_state = webrtc->ice_gathering_state; +@@ -1215,7 +1582,7 @@ _update_ice_gathering_state_task (GstWebRTCBin * webrtc, gpointer data) + } + + if (new_state != webrtc->ice_gathering_state) { +- gchar *old_s, *new_s; ++ const gchar *old_s, *new_s; + + old_s = _enum_value_to_string (GST_TYPE_WEBRTC_ICE_GATHERING_STATE, + old_state); +@@ -1223,14 +1590,14 @@ _update_ice_gathering_state_task (GstWebRTCBin * webrtc, gpointer data) + new_state); + GST_INFO_OBJECT (webrtc, "ICE gathering state change from %s(%u) to %s(%u)", + old_s, old_state, new_s, new_state); +- g_free (old_s); +- g_free (new_s); + + webrtc->ice_gathering_state = new_state; + PC_UNLOCK (webrtc); + g_object_notify (G_OBJECT (webrtc), "ice-gathering-state"); + PC_LOCK (webrtc); + } ++ ++ return NULL; + } + + static void +@@ -1240,7 +1607,7 @@ _update_ice_gathering_state (GstWebRTCBin * webrtc) + NULL, NULL); + } + +-static void ++static GstStructure * + _update_ice_connection_state_task (GstWebRTCBin * webrtc, gpointer data) + { + GstWebRTCICEConnectionState old_state = webrtc->ice_connection_state; +@@ -1249,7 +1616,7 @@ _update_ice_connection_state_task (GstWebRTCBin * webrtc, gpointer data) + new_state = _collate_ice_connection_states (webrtc); + + if (new_state != old_state) { +- gchar *old_s, *new_s; ++ const gchar *old_s, *new_s; + + old_s = _enum_value_to_string (GST_TYPE_WEBRTC_ICE_CONNECTION_STATE, + old_state); +@@ -1258,14 +1625,14 @@ _update_ice_connection_state_task (GstWebRTCBin * webrtc, gpointer data) + GST_INFO_OBJECT (webrtc, + "ICE connection state change from %s(%u) to %s(%u)", old_s, old_state, + new_s, new_state); +- g_free (old_s); +- g_free (new_s); + + webrtc->ice_connection_state = new_state; + PC_UNLOCK (webrtc); + g_object_notify (G_OBJECT (webrtc), "ice-connection-state"); + PC_LOCK (webrtc); + } ++ ++ return NULL; + } + + static void +@@ -1275,7 +1642,7 @@ _update_ice_connection_state (GstWebRTCBin * webrtc) + NULL, NULL); + } + +-static void ++static GstStructure * + _update_peer_connection_state_task (GstWebRTCBin * webrtc, gpointer data) + { + GstWebRTCPeerConnectionState old_state = webrtc->peer_connection_state; +@@ -1284,7 +1651,7 @@ _update_peer_connection_state_task (GstWebRTCBin * webrtc, gpointer data) + new_state = _collate_peer_connection_states (webrtc); + + if (new_state != old_state) { +- gchar *old_s, *new_s; ++ const gchar *old_s, *new_s; + + old_s = _enum_value_to_string (GST_TYPE_WEBRTC_PEER_CONNECTION_STATE, + old_state); +@@ -1293,14 +1660,14 @@ _update_peer_connection_state_task (GstWebRTCBin * webrtc, gpointer data) + GST_INFO_OBJECT (webrtc, + "Peer connection state change from %s(%u) to %s(%u)", old_s, old_state, + new_s, new_state); +- g_free (old_s); +- g_free (new_s); + + webrtc->peer_connection_state = new_state; + PC_UNLOCK (webrtc); + g_object_notify (G_OBJECT (webrtc), "connection-state"); + PC_LOCK (webrtc); + } ++ ++ return NULL; + } + + static void +@@ -1327,7 +1694,11 @@ _all_sinks_have_caps (GstWebRTCBin * webrtc) + wpad = GST_WEBRTC_BIN_PAD (l->data); + if (GST_PAD_DIRECTION (l->data) == GST_PAD_SINK && !wpad->received_caps + && (!wpad->trans || !wpad->trans->stopped)) { +- goto done; ++ if (wpad->trans && wpad->trans->codec_preferences) { ++ continue; ++ } else { ++ goto done; ++ } + } + } + +@@ -1354,7 +1725,7 @@ _check_if_negotiation_is_needed (GstWebRTCBin * webrtc) + GST_LOG_OBJECT (webrtc, "checking if negotiation is needed"); + + /* We can't negotiate until we have received caps on all our sink pads, +- * as we will need the ssrcs in our offer / answer */ ++ * as we will need the formats in our offer / answer */ + if (!_all_sinks_have_caps (webrtc)) { + GST_LOG_OBJECT (webrtc, + "no negotiation possible until caps have been received on all sink pads"); +@@ -1436,26 +1807,11 @@ _check_if_negotiation_is_needed (GstWebRTCBin * webrtc) + * nor answer matches t's direction, return "true". */ + + if (local_dir != trans->direction && remote_dir != trans->direction) { +- gchar *local_str, *remote_str, *dir_str; +- +- local_str = +- _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION, +- local_dir); +- remote_str = +- _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION, +- remote_dir); +- dir_str = +- _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION, +- trans->direction); +- + GST_LOG_OBJECT (webrtc, "transceiver direction (%s) doesn't match " +- "description (local %s remote %s)", dir_str, local_str, +- remote_str); +- +- g_free (dir_str); +- g_free (local_str); +- g_free (remote_str); +- ++ "description (local %s remote %s)", ++ gst_webrtc_rtp_transceiver_direction_to_string (trans->direction), ++ gst_webrtc_rtp_transceiver_direction_to_string (local_dir), ++ gst_webrtc_rtp_transceiver_direction_to_string (remote_dir)); + return TRUE; + } + } else if (webrtc->current_local_description->type == +@@ -1471,30 +1827,12 @@ _check_if_negotiation_is_needed (GstWebRTCBin * webrtc) + intersect_dir = _intersect_answer_directions (remote_dir, local_dir); + + if (intersect_dir != trans->direction) { +- gchar *local_str, *remote_str, *inter_str, *dir_str; +- +- local_str = +- _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION, +- local_dir); +- remote_str = +- _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION, +- remote_dir); +- dir_str = +- _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION, +- trans->direction); +- inter_str = +- _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION, +- intersect_dir); +- + GST_LOG_OBJECT (webrtc, "transceiver direction (%s) doesn't match " + "description intersected direction %s (local %s remote %s)", +- dir_str, local_str, inter_str, remote_str); +- +- g_free (dir_str); +- g_free (local_str); +- g_free (remote_str); +- g_free (inter_str); +- ++ gst_webrtc_rtp_transceiver_direction_to_string (trans->direction), ++ gst_webrtc_rtp_transceiver_direction_to_string (local_dir), ++ gst_webrtc_rtp_transceiver_direction_to_string (intersect_dir), ++ gst_webrtc_rtp_transceiver_direction_to_string (remote_dir)); + return TRUE; + } + } +@@ -1505,7 +1843,7 @@ _check_if_negotiation_is_needed (GstWebRTCBin * webrtc) + return FALSE; + } + +-static void ++static GstStructure * + _check_need_negotiation_task (GstWebRTCBin * webrtc, gpointer unused) + { + if (webrtc->priv->need_negotiation) { +@@ -1515,6 +1853,8 @@ _check_need_negotiation_task (GstWebRTCBin * webrtc, gpointer unused) + 0); + PC_LOCK (webrtc); + } ++ ++ return NULL; + } + + /* http://w3c.github.io/webrtc-pc/#dfn-update-the-negotiation-needed-flag */ +@@ -1546,56 +1886,185 @@ _update_need_negotiation (GstWebRTCBin * webrtc) + NULL, NULL); + } + ++static GstCaps * ++_query_pad_caps (GstWebRTCBin * webrtc, GstWebRTCRTPTransceiver * rtp_trans, ++ GstWebRTCBinPad * pad, GstCaps * filter, GError ** error) ++{ ++ GstCaps *caps; ++ guint i, n; ++ ++ caps = gst_pad_peer_query_caps (GST_PAD (pad), filter); ++ GST_LOG_OBJECT (webrtc, "Using peer query caps: %" GST_PTR_FORMAT, caps); ++ ++ /* Only return an error if actual empty caps were returned from the query. */ ++ if (gst_caps_is_empty (caps)) { ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "Caps negotiation on pad %s failed", GST_PAD_NAME (pad)); ++ gst_clear_caps (&caps); ++ gst_caps_unref (filter); ++ return NULL; ++ } ++ ++ n = gst_caps_get_size (caps); ++ if (n > 0) { ++ /* Make sure the caps are complete enough to figure out the media type and ++ * encoding-name, otherwise they would match with basically any media. */ ++ caps = gst_caps_make_writable (caps); ++ for (i = n; i > 0; i--) { ++ const GstStructure *s = gst_caps_get_structure (caps, i - 1); ++ ++ if (!gst_structure_has_name (s, "application/x-rtp") || ++ !gst_structure_has_field (s, "media") || ++ !gst_structure_has_field (s, "encoding-name")) { ++ gst_caps_remove_structure (caps, i - 1); ++ } ++ } ++ } ++ ++ /* If the filtering above resulted in empty caps, or the caps were ANY to ++ * begin with, then don't report and error but just NULL. ++ * ++ * This would be the case if negotiation would not fail but the peer does ++ * not have any specific enough preferred caps that would allow us to ++ * use them further. ++ */ ++ if (gst_caps_is_any (caps) || gst_caps_is_empty (caps)) { ++ GST_DEBUG_OBJECT (webrtc, "Peer caps not specific enough"); ++ gst_clear_caps (&caps); ++ } ++ ++ gst_caps_unref (filter); ++ ++ return caps; ++} ++ + static GstCaps * + _find_codec_preferences (GstWebRTCBin * webrtc, +- GstWebRTCRTPTransceiver * rtp_trans, GstPadDirection direction, +- guint media_idx) ++ GstWebRTCRTPTransceiver * rtp_trans, guint media_idx, GError ** error) + { + WebRTCTransceiver *trans = (WebRTCTransceiver *) rtp_trans; + GstCaps *ret = NULL; ++ GstCaps *codec_preferences = NULL; ++ GstWebRTCBinPad *pad = NULL; ++ GstPadDirection direction; ++ ++ g_assert (rtp_trans); ++ g_assert (error && *error == NULL); + + GST_LOG_OBJECT (webrtc, "retrieving codec preferences from %" GST_PTR_FORMAT, + trans); + +- if (rtp_trans && rtp_trans->codec_preferences) { ++ GST_OBJECT_LOCK (rtp_trans); ++ if (rtp_trans->codec_preferences) { + GST_LOG_OBJECT (webrtc, "Using codec preferences: %" GST_PTR_FORMAT, + rtp_trans->codec_preferences); +- ret = gst_caps_ref (rtp_trans->codec_preferences); +- } else { +- GstWebRTCBinPad *pad = NULL; ++ codec_preferences = gst_caps_ref (rtp_trans->codec_preferences); ++ } ++ GST_OBJECT_UNLOCK (rtp_trans); ++ ++ if (rtp_trans->direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY) ++ direction = GST_PAD_SRC; ++ else ++ direction = GST_PAD_SINK; ++ ++ pad = _find_pad_for_transceiver (webrtc, direction, rtp_trans); ++ ++ /* try to find a pad */ ++ if (!pad) ++ pad = _find_pad_for_mline (webrtc, direction, media_idx); ++ ++ /* For the case where we have set our transceiver to sendrecv, but the ++ * sink pad has not been requested yet. ++ */ ++ if (!pad && ++ rtp_trans->direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV) { ++ ++ pad = _find_pad_for_transceiver (webrtc, GST_PAD_SRC, rtp_trans); + + /* try to find a pad */ +- if (!trans +- || !(pad = _find_pad_for_transceiver (webrtc, direction, rtp_trans))) +- pad = _find_pad_for_mline (webrtc, direction, media_idx); ++ if (!pad) ++ pad = _find_pad_for_mline (webrtc, GST_PAD_SRC, media_idx); ++ } ++ ++ if (pad) { ++ GstCaps *caps = NULL; + +- if (!pad) { +- if (trans && trans->last_configured_caps) +- ret = gst_caps_ref (trans->last_configured_caps); ++ if (pad->received_caps) { ++ caps = gst_caps_ref (pad->received_caps); + } else { +- GstCaps *caps = NULL; ++ static GstStaticCaps static_filter = ++ GST_STATIC_CAPS ("application/x-rtp, " ++ "media = (string) { audio, video }, payload = (int) [ 0, 127 ]"); ++ GstCaps *filter = gst_static_caps_get (&static_filter); + +- if (pad->received_caps) { +- caps = gst_caps_ref (pad->received_caps); +- } else if ((caps = gst_pad_get_current_caps (GST_PAD (pad)))) { +- GST_LOG_OBJECT (webrtc, "Using current pad caps: %" GST_PTR_FORMAT, +- caps); +- } else { +- if ((caps = gst_pad_peer_query_caps (GST_PAD (pad), NULL))) +- GST_LOG_OBJECT (webrtc, "Using peer query caps: %" GST_PTR_FORMAT, +- caps); ++ filter = gst_caps_make_writable (filter); ++ ++ if (rtp_trans->kind == GST_WEBRTC_KIND_AUDIO) ++ gst_caps_set_simple (filter, "media", G_TYPE_STRING, "audio", NULL); ++ else if (rtp_trans->kind == GST_WEBRTC_KIND_VIDEO) ++ gst_caps_set_simple (filter, "media", G_TYPE_STRING, "video", NULL); ++ ++ caps = _query_pad_caps (webrtc, rtp_trans, pad, filter, error); ++ } ++ ++ if (*error) ++ goto out; ++ ++ if (caps && ++ rtp_trans->direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV) { ++ GstWebRTCBinPad *srcpad = ++ _find_pad_for_transceiver (webrtc, GST_PAD_SRC, rtp_trans); ++ ++ if (srcpad) { ++ caps = _query_pad_caps (webrtc, rtp_trans, srcpad, caps, error); ++ gst_object_unref (srcpad); ++ ++ if (*error) ++ goto out; + } +- if (caps) { +- if (trans) +- gst_caps_replace (&trans->last_configured_caps, caps); ++ } + +- ret = caps; ++ if (caps && codec_preferences) { ++ GstCaps *intersection; ++ ++ intersection = gst_caps_intersect_full (codec_preferences, caps, ++ GST_CAPS_INTERSECT_FIRST); ++ gst_clear_caps (&caps); ++ ++ if (gst_caps_is_empty (intersection)) { ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "Caps negotiation on pad %s failed against codec preferences", ++ GST_PAD_NAME (pad)); ++ gst_clear_caps (&intersection); ++ } else { ++ caps = intersection; + } ++ } + +- gst_object_unref (pad); ++ if (caps) { ++ if (trans) ++ gst_caps_replace (&trans->last_retrieved_caps, caps); ++ ++ ret = caps; + } + } + ++ if (!ret) { ++ if (codec_preferences) ++ ret = gst_caps_ref (codec_preferences); ++ else if (trans->last_retrieved_caps) ++ ret = gst_caps_ref (trans->last_retrieved_caps); ++ } ++ ++out: ++ ++ if (pad) ++ gst_object_unref (pad); ++ if (codec_preferences) ++ gst_caps_unref (codec_preferences); ++ + if (!ret) + GST_DEBUG_OBJECT (trans, "Could not find caps for mline %u", media_idx); + +@@ -1606,11 +2075,16 @@ static GstCaps * + _add_supported_attributes_to_caps (GstWebRTCBin * webrtc, + WebRTCTransceiver * trans, const GstCaps * caps) + { ++ GstWebRTCKind kind; + GstCaps *ret; + guint i; + ++ if (caps == NULL) ++ return NULL; ++ + ret = gst_caps_make_writable (caps); + ++ kind = webrtc_kind_from_caps (ret); + for (i = 0; i < gst_caps_get_size (ret); i++) { + GstStructure *s = gst_caps_get_structure (ret, i); + +@@ -1618,11 +2092,14 @@ _add_supported_attributes_to_caps (GstWebRTCBin * webrtc, + if (!gst_structure_has_field (s, "rtcp-fb-nack")) + gst_structure_set (s, "rtcp-fb-nack", G_TYPE_BOOLEAN, TRUE, NULL); + +- if (!gst_structure_has_field (s, "rtcp-fb-nack-pli")) +- gst_structure_set (s, "rtcp-fb-nack-pli", G_TYPE_BOOLEAN, TRUE, NULL); +- /* FIXME: is this needed? */ +- /*if (!gst_structure_has_field (s, "rtcp-fb-transport-cc")) +- gst_structure_set (s, "rtcp-fb-nack-pli", G_TYPE_BOOLEAN, TRUE, NULL); */ ++ if (kind == GST_WEBRTC_KIND_VIDEO) { ++ if (!gst_structure_has_field (s, "rtcp-fb-nack-pli")) ++ gst_structure_set (s, "rtcp-fb-nack-pli", G_TYPE_BOOLEAN, TRUE, NULL); ++ if (!gst_structure_has_field (s, "rtcp-fb-ccm-fir")) ++ gst_structure_set (s, "rtcp-fb-ccm-fir", G_TYPE_BOOLEAN, TRUE, NULL); ++ } ++ if (!gst_structure_has_field (s, "rtcp-fb-transport-cc")) ++ gst_structure_set (s, "rtcp-fb-transport-cc", G_TYPE_BOOLEAN, TRUE, NULL); + + /* FIXME: codec-specific parameters? */ + } +@@ -1652,9 +2129,267 @@ _on_dtls_transport_notify_state (GstWebRTCDTLSTransport * transport, + _update_peer_connection_state (webrtc); + } + ++static gboolean ++_on_sending_rtcp (GObject * internal_session, GstBuffer * buffer, ++ gboolean early, gpointer user_data) ++{ ++ GstWebRTCBin *webrtc = user_data; ++ GstRTCPBuffer rtcp = GST_RTCP_BUFFER_INIT; ++ GstRTCPPacket packet; ++ ++ if (!gst_rtcp_buffer_map (buffer, GST_MAP_READ, &rtcp)) ++ goto done; ++ ++ if (gst_rtcp_buffer_get_first_packet (&rtcp, &packet)) { ++ if (gst_rtcp_packet_get_type (&packet) == GST_RTCP_TYPE_SR) { ++ guint32 ssrc; ++ GstWebRTCRTPTransceiver *rtp_trans = NULL; ++ WebRTCTransceiver *trans; ++ guint rtp_session; ++ SsrcMapItem *mid; ++ ++ gst_rtcp_packet_sr_get_sender_info (&packet, &ssrc, NULL, NULL, NULL, ++ NULL); ++ rtp_session = ++ GPOINTER_TO_UINT (g_object_get_data (internal_session, ++ "GstWebRTCBinRTPSessionID")); ++ ++ mid = find_mid_ssrc_for_ssrc (webrtc, ++ GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY, rtp_session, ssrc); ++ if (mid && mid->mid) { ++ rtp_trans = _find_transceiver_for_mid (webrtc, mid->mid); ++ GST_LOG_OBJECT (webrtc, "found %" GST_PTR_FORMAT " from mid entry " ++ "using rtp session %u ssrc %u -> mid \'%s\'", rtp_trans, ++ rtp_session, ssrc, mid->mid); ++ } ++ trans = (WebRTCTransceiver *) rtp_trans; ++ ++ if (rtp_trans && rtp_trans->sender && trans->tos_event) { ++ GstPad *pad; ++ gchar *pad_name = NULL; ++ ++ pad_name = ++ g_strdup_printf ("send_rtcp_src_%u", ++ rtp_trans->sender->transport->session_id); ++ pad = gst_element_get_static_pad (webrtc->rtpbin, pad_name); ++ g_free (pad_name); ++ if (pad) { ++ gst_pad_push_event (pad, gst_event_ref (trans->tos_event)); ++ gst_object_unref (pad); ++ } ++ } ++ } ++ } ++ ++ gst_rtcp_buffer_unmap (&rtcp); ++ ++done: ++ /* False means we don't care about suppression */ ++ return FALSE; ++} ++ ++static void ++gst_webrtc_bin_attach_tos_to_session (GstWebRTCBin * webrtc, guint session_id) ++{ ++ GObject *internal_session = NULL; ++ ++ g_signal_emit_by_name (webrtc->rtpbin, "get-internal-session", ++ session_id, &internal_session); ++ ++ if (internal_session) { ++ g_object_set_data (internal_session, "GstWebRTCBinRTPSessionID", ++ GUINT_TO_POINTER (session_id)); ++ g_signal_connect (internal_session, "on-sending-rtcp", ++ G_CALLBACK (_on_sending_rtcp), webrtc); ++ g_object_unref (internal_session); ++ } ++} ++ ++static void ++weak_free (GWeakRef * weak) ++{ ++ g_weak_ref_clear (weak); ++ g_free (weak); ++} ++ ++static GstPadProbeReturn ++_nicesink_pad_probe (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) ++{ ++ GstWebRTCBin *webrtc = g_weak_ref_get ((GWeakRef *) user_data); ++ ++ if (!webrtc) ++ return GST_PAD_PROBE_REMOVE; ++ ++ if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_EVENT (info)) ++ == GST_EVENT_CUSTOM_DOWNSTREAM_STICKY) { ++ const GstStructure *s = ++ gst_event_get_structure (GST_PAD_PROBE_INFO_EVENT (info)); ++ ++ if (gst_structure_has_name (s, "GstWebRtcBinUpdateTos")) { ++ const char *mid; ++ gint priority; ++ ++ if ((mid = gst_structure_get_string (s, "mid"))) { ++ GstWebRTCRTPTransceiver *rtp_trans; ++ ++ rtp_trans = _find_transceiver_for_mid (webrtc, mid); ++ if (rtp_trans) { ++ WebRTCTransceiver *trans = WEBRTC_TRANSCEIVER (rtp_trans); ++ GstWebRTCICEStream *stream = _find_ice_stream_for_session (webrtc, ++ trans->stream->session_id); ++ guint8 dscp = 0; ++ ++ /* Set DSCP field based on ++ * https://tools.ietf.org/html/draft-ietf-tsvwg-rtcweb-qos-18#section-5 ++ */ ++ switch (rtp_trans->sender->priority) { ++ case GST_WEBRTC_PRIORITY_TYPE_VERY_LOW: ++ dscp = 8; /* CS1 */ ++ break; ++ case GST_WEBRTC_PRIORITY_TYPE_LOW: ++ dscp = 0; /* DF */ ++ break; ++ case GST_WEBRTC_PRIORITY_TYPE_MEDIUM: ++ switch (rtp_trans->kind) { ++ case GST_WEBRTC_KIND_AUDIO: ++ dscp = 46; /* EF */ ++ break; ++ case GST_WEBRTC_KIND_VIDEO: ++ dscp = 38; /* AF43 *//* TODO: differentiate non-interactive */ ++ break; ++ case GST_WEBRTC_KIND_UNKNOWN: ++ dscp = 0; ++ break; ++ } ++ break; ++ case GST_WEBRTC_PRIORITY_TYPE_HIGH: ++ switch (rtp_trans->kind) { ++ case GST_WEBRTC_KIND_AUDIO: ++ dscp = 46; /* EF */ ++ break; ++ case GST_WEBRTC_KIND_VIDEO: ++ dscp = 36; /* AF42 *//* TODO: differentiate non-interactive */ ++ break; ++ case GST_WEBRTC_KIND_UNKNOWN: ++ dscp = 0; ++ break; ++ } ++ break; ++ } ++ ++ gst_webrtc_ice_set_tos (webrtc->priv->ice, stream, dscp << 2); ++ } ++ } else if (gst_structure_get_enum (s, "sctp-priority", ++ GST_TYPE_WEBRTC_PRIORITY_TYPE, &priority)) { ++ guint8 dscp = 0; ++ ++ /* Set DSCP field based on ++ * https://tools.ietf.org/html/draft-ietf-tsvwg-rtcweb-qos-18#section-5 ++ */ ++ switch (priority) { ++ case GST_WEBRTC_PRIORITY_TYPE_VERY_LOW: ++ dscp = 8; /* CS1 */ ++ break; ++ case GST_WEBRTC_PRIORITY_TYPE_LOW: ++ dscp = 0; /* DF */ ++ break; ++ case GST_WEBRTC_PRIORITY_TYPE_MEDIUM: ++ dscp = 10; /* AF11 */ ++ break; ++ case GST_WEBRTC_PRIORITY_TYPE_HIGH: ++ dscp = 18; /* AF21 */ ++ break; ++ } ++ if (webrtc->priv->data_channel_transport) ++ gst_webrtc_ice_set_tos (webrtc->priv->ice, ++ webrtc->priv->data_channel_transport->stream, dscp << 2); ++ } ++ } ++ } ++ ++ gst_object_unref (webrtc); ++ ++ return GST_PAD_PROBE_OK; ++} ++ ++static void gst_webrtc_bin_attach_tos (GstWebRTCBin * webrtc); ++ ++static void ++gst_webrtc_bin_update_sctp_priority (GstWebRTCBin * webrtc) ++{ ++ GstWebRTCPriorityType sctp_priority = 0; ++ guint i; ++ ++ if (!webrtc->priv->sctp_transport) ++ return; ++ ++ DC_LOCK (webrtc); ++ for (i = 0; i < webrtc->priv->data_channels->len; i++) { ++ GstWebRTCDataChannel *channel ++ = g_ptr_array_index (webrtc->priv->data_channels, i); ++ ++ sctp_priority = MAX (sctp_priority, channel->priority); ++ } ++ DC_UNLOCK (webrtc); ++ ++ /* Default priority is low means DSCP field is left as 0 */ ++ if (sctp_priority == 0) ++ sctp_priority = GST_WEBRTC_PRIORITY_TYPE_LOW; ++ ++ /* Nobody asks for DSCP, leave it as-is */ ++ if (sctp_priority == GST_WEBRTC_PRIORITY_TYPE_LOW && ++ !webrtc->priv->tos_attached) ++ return; ++ ++ /* If one stream has a non-default priority, then everyone else does too */ ++ gst_webrtc_bin_attach_tos (webrtc); ++ ++ webrtc_sctp_transport_set_priority (webrtc->priv->sctp_transport, ++ sctp_priority); ++} ++ ++static void ++gst_webrtc_bin_attach_probe_to_ice_sink (GstWebRTCBin * webrtc, ++ GstWebRTCICETransport * transport) ++{ ++ GstPad *pad; ++ GWeakRef *weak; ++ ++ pad = gst_element_get_static_pad (transport->sink, "sink"); ++ ++ weak = g_new0 (GWeakRef, 1); ++ g_weak_ref_init (weak, webrtc); ++ ++ gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, ++ _nicesink_pad_probe, weak, (GDestroyNotify) weak_free); ++ gst_object_unref (pad); ++} ++ ++static void ++gst_webrtc_bin_attach_tos (GstWebRTCBin * webrtc) ++{ ++ guint i; ++ ++ if (webrtc->priv->tos_attached) ++ return; ++ webrtc->priv->tos_attached = TRUE; ++ ++ for (i = 0; i < webrtc->priv->transports->len; i++) { ++ TransportStream *stream = g_ptr_array_index (webrtc->priv->transports, i); ++ ++ gst_webrtc_bin_attach_tos_to_session (webrtc, stream->session_id); ++ ++ gst_webrtc_bin_attach_probe_to_ice_sink (webrtc, ++ stream->transport->transport); ++ } ++ ++ gst_webrtc_bin_update_sctp_priority (webrtc); ++} ++ + static WebRTCTransceiver * + _create_webrtc_transceiver (GstWebRTCBin * webrtc, +- GstWebRTCRTPTransceiverDirection direction, guint mline) ++ GstWebRTCRTPTransceiverDirection direction, guint mline, GstWebRTCKind kind, ++ GstCaps * codec_preferences) + { + WebRTCTransceiver *trans; + GstWebRTCRTPTransceiver *rtp_trans; +@@ -1667,9 +2402,20 @@ _create_webrtc_transceiver (GstWebRTCBin * webrtc, + rtp_trans = GST_WEBRTC_RTP_TRANSCEIVER (trans); + rtp_trans->direction = direction; + rtp_trans->mline = mline; ++ rtp_trans->kind = kind; ++ rtp_trans->codec_preferences = ++ codec_preferences ? gst_caps_ref (codec_preferences) : NULL; + /* FIXME: We don't support stopping transceiver yet so they're always not stopped */ + rtp_trans->stopped = FALSE; + ++ GST_LOG_OBJECT (webrtc, "created new transceiver %" GST_PTR_FORMAT " with " ++ "direction %s (%d), mline %u, kind %s (%d)", rtp_trans, ++ gst_webrtc_rtp_transceiver_direction_to_string (direction), direction, ++ mline, gst_webrtc_kind_to_string (kind), kind); ++ ++ g_signal_connect_object (sender, "notify::priority", ++ G_CALLBACK (gst_webrtc_bin_attach_tos), webrtc, G_CONNECT_SWAPPED); ++ + g_ptr_array_add (webrtc->priv->transceivers, trans); + + gst_object_unref (sender); +@@ -1686,6 +2432,7 @@ _create_transport_channel (GstWebRTCBin * webrtc, guint session_id) + { + GstWebRTCDTLSTransport *transport; + TransportStream *ret; ++ gchar *pad_name; + + /* FIXME: how to parametrize the sender and the receiver */ + ret = transport_stream_new (webrtc, session_id); +@@ -1698,16 +2445,24 @@ _create_transport_channel (GstWebRTCBin * webrtc, guint session_id) + G_CALLBACK (_on_ice_transport_notify_gathering_state), webrtc); + g_signal_connect (G_OBJECT (transport), "notify::state", + G_CALLBACK (_on_dtls_transport_notify_state), webrtc); ++ if (webrtc->priv->tos_attached) ++ gst_webrtc_bin_attach_probe_to_ice_sink (webrtc, transport->transport); + +- if ((transport = ret->rtcp_transport)) { +- g_signal_connect (G_OBJECT (transport->transport), +- "notify::state", G_CALLBACK (_on_ice_transport_notify_state), webrtc); +- g_signal_connect (G_OBJECT (transport->transport), +- "notify::gathering-state", +- G_CALLBACK (_on_ice_transport_notify_gathering_state), webrtc); +- g_signal_connect (G_OBJECT (transport), "notify::state", +- G_CALLBACK (_on_dtls_transport_notify_state), webrtc); +- } ++ gst_bin_add (GST_BIN (webrtc), GST_ELEMENT (ret->send_bin)); ++ gst_bin_add (GST_BIN (webrtc), GST_ELEMENT (ret->receive_bin)); ++ g_ptr_array_add (webrtc->priv->transports, ret); ++ ++ pad_name = g_strdup_printf ("recv_rtcp_sink_%u", ret->session_id); ++ if (!gst_element_link_pads (GST_ELEMENT (ret->receive_bin), "rtcp_src", ++ GST_ELEMENT (webrtc->rtpbin), pad_name)) ++ g_warn_if_reached (); ++ g_free (pad_name); ++ ++ pad_name = g_strdup_printf ("send_rtcp_src_%u", ret->session_id); ++ if (!gst_element_link_pads (GST_ELEMENT (webrtc->rtpbin), pad_name, ++ GST_ELEMENT (ret->send_bin), "rtcp_sink")) ++ g_warn_if_reached (); ++ g_free (pad_name); + + GST_TRACE_OBJECT (webrtc, + "Create transport %" GST_PTR_FORMAT " for session %u", ret, session_id); +@@ -1719,28 +2474,11 @@ static TransportStream * + _get_or_create_rtp_transport_channel (GstWebRTCBin * webrtc, guint session_id) + { + TransportStream *ret; +- gchar *pad_name; + + ret = _find_transport_for_session (webrtc, session_id); + +- if (!ret) { ++ if (!ret) + ret = _create_transport_channel (webrtc, session_id); +- gst_bin_add (GST_BIN (webrtc), GST_ELEMENT (ret->send_bin)); +- gst_bin_add (GST_BIN (webrtc), GST_ELEMENT (ret->receive_bin)); +- g_ptr_array_add (webrtc->priv->transports, ret); +- +- pad_name = g_strdup_printf ("recv_rtcp_sink_%u", ret->session_id); +- if (!gst_element_link_pads (GST_ELEMENT (ret->receive_bin), "rtcp_src", +- GST_ELEMENT (webrtc->rtpbin), pad_name)) +- g_warn_if_reached (); +- g_free (pad_name); +- +- pad_name = g_strdup_printf ("send_rtcp_src_%u", ret->session_id); +- if (!gst_element_link_pads (GST_ELEMENT (webrtc->rtpbin), pad_name, +- GST_ELEMENT (ret->send_bin), "rtcp_sink")) +- g_warn_if_reached (); +- g_free (pad_name); +- } + + gst_element_sync_state_with_parent (GST_ELEMENT (ret->send_bin)); + gst_element_sync_state_with_parent (GST_ELEMENT (ret->receive_bin)); +@@ -1754,32 +2492,38 @@ _on_data_channel_ready_state (WebRTCDataChannel * channel, + GParamSpec * pspec, GstWebRTCBin * webrtc) + { + GstWebRTCDataChannelState ready_state; +- guint i; + + g_object_get (channel, "ready-state", &ready_state, NULL); + + if (ready_state == GST_WEBRTC_DATA_CHANNEL_STATE_OPEN) { +- gboolean found = FALSE; ++ gboolean found; + +- for (i = 0; i < webrtc->priv->pending_data_channels->len; i++) { +- WebRTCDataChannel *c; +- +- c = g_ptr_array_index (webrtc->priv->pending_data_channels, i); +- if (c == channel) { +- found = TRUE; +- g_ptr_array_remove_index (webrtc->priv->pending_data_channels, i); +- break; +- } +- } ++ DC_LOCK (webrtc); ++ found = g_ptr_array_remove (webrtc->priv->pending_data_channels, channel); + if (found == FALSE) { + GST_FIXME_OBJECT (webrtc, "Received open for unknown data channel"); ++ DC_UNLOCK (webrtc); + return; + } + +- g_ptr_array_add (webrtc->priv->data_channels, channel); ++ g_ptr_array_add (webrtc->priv->data_channels, gst_object_ref (channel)); ++ DC_UNLOCK (webrtc); ++ ++ gst_webrtc_bin_update_sctp_priority (webrtc); + + g_signal_emit (webrtc, gst_webrtc_bin_signals[ON_DATA_CHANNEL_SIGNAL], 0, +- gst_object_ref (channel)); ++ channel); ++ } else if (ready_state == GST_WEBRTC_DATA_CHANNEL_STATE_CLOSED) { ++ gboolean found; ++ ++ DC_LOCK (webrtc); ++ found = g_ptr_array_remove (webrtc->priv->pending_data_channels, channel) ++ || g_ptr_array_remove (webrtc->priv->data_channels, channel); ++ ++ if (found == FALSE) { ++ GST_FIXME_OBJECT (webrtc, "Received close for unknown data channel"); ++ } ++ DC_UNLOCK (webrtc); + } + } + +@@ -1794,37 +2538,40 @@ _on_sctpdec_pad_added (GstElement * sctpdec, GstPad * pad, + if (sscanf (GST_PAD_NAME (pad), "src_%u", &stream_id) != 1) + return; + +- PC_LOCK (webrtc); ++ DC_LOCK (webrtc); + channel = _find_data_channel_for_id (webrtc, stream_id); + if (!channel) { + channel = g_object_new (WEBRTC_TYPE_DATA_CHANNEL, NULL); + channel->parent.id = stream_id; + channel->webrtcbin = webrtc; + +- gst_bin_add (GST_BIN (webrtc), channel->appsrc); +- gst_bin_add (GST_BIN (webrtc), channel->appsink); ++ g_signal_emit (webrtc, gst_webrtc_bin_signals[PREPARE_DATA_CHANNEL_SIGNAL], ++ 0, channel, FALSE); ++ ++ gst_bin_add (GST_BIN (webrtc), channel->src_bin); ++ gst_bin_add (GST_BIN (webrtc), channel->sink_bin); + +- gst_element_sync_state_with_parent (channel->appsrc); +- gst_element_sync_state_with_parent (channel->appsink); ++ gst_element_sync_state_with_parent (channel->src_bin); ++ gst_element_sync_state_with_parent (channel->sink_bin); + + webrtc_data_channel_link_to_sctp (channel, webrtc->priv->sctp_transport); + + g_ptr_array_add (webrtc->priv->pending_data_channels, channel); + } ++ DC_UNLOCK (webrtc); + + g_signal_connect (channel, "notify::ready-state", + G_CALLBACK (_on_data_channel_ready_state), webrtc); + +- sink_pad = gst_element_get_static_pad (channel->appsink, "sink"); ++ sink_pad = gst_element_get_static_pad (channel->sink_bin, "sink"); + if (gst_pad_link (pad, sink_pad) != GST_PAD_LINK_OK) + GST_WARNING_OBJECT (channel, "Failed to link sctp pad %s with channel %" + GST_PTR_FORMAT, GST_PAD_NAME (pad), channel); + gst_object_unref (sink_pad); +- PC_UNLOCK (webrtc); + } + + static void +-_on_sctp_state_notify (GstWebRTCSCTPTransport * sctp, GParamSpec * pspec, ++_on_sctp_state_notify (WebRTCSCTPTransport * sctp, GParamSpec * pspec, + GstWebRTCBin * webrtc) + { + GstWebRTCSCTPTransportState state; +@@ -1834,9 +2581,9 @@ _on_sctp_state_notify (GstWebRTCSCTPTransport * sctp, GParamSpec * pspec, + if (state == GST_WEBRTC_SCTP_TRANSPORT_STATE_CONNECTED) { + int i; + +- PC_LOCK (webrtc); + GST_DEBUG_OBJECT (webrtc, "SCTP association established"); + ++ DC_LOCK (webrtc); + for (i = 0; i < webrtc->priv->data_channels->len; i++) { + WebRTCDataChannel *channel; + +@@ -1847,7 +2594,7 @@ _on_sctp_state_notify (GstWebRTCSCTPTransport * sctp, GParamSpec * pspec, + if (!channel->parent.negotiated && !channel->opened) + webrtc_data_channel_start_negotiation (channel); + } +- PC_UNLOCK (webrtc); ++ DC_UNLOCK (webrtc); + } + } + +@@ -1855,13 +2602,13 @@ _on_sctp_state_notify (GstWebRTCSCTPTransport * sctp, GParamSpec * pspec, + static void _on_sctp_notify_dtls_state (GstWebRTCDTLSTransport * transport, + GParamSpec * pspec, GstWebRTCBin * webrtc); + +-static void ++static GstStructure * + _sctp_check_dtls_state_task (GstWebRTCBin * webrtc, gpointer unused) + { + TransportStream *stream; + GstWebRTCDTLSTransport *transport; + GstWebRTCDTLSTransportState dtls_state; +- GstWebRTCSCTPTransport *sctp_transport; ++ WebRTCSCTPTransport *sctp_transport; + + stream = webrtc->priv->data_channel_transport; + transport = stream->transport; +@@ -1871,7 +2618,7 @@ _sctp_check_dtls_state_task (GstWebRTCBin * webrtc, gpointer unused) + if (dtls_state != GST_WEBRTC_DTLS_TRANSPORT_STATE_CONNECTED) { + GST_DEBUG_OBJECT (webrtc, + "Data channel DTLS connection is not ready yet: %d", dtls_state); +- return; ++ return NULL; + } + + GST_DEBUG_OBJECT (webrtc, "Data channel DTLS connection is now ready"); +@@ -1879,7 +2626,7 @@ _sctp_check_dtls_state_task (GstWebRTCBin * webrtc, gpointer unused) + + /* Not locked state anymore so this was already taken care of before */ + if (!gst_element_is_locked_state (sctp_transport->sctpdec)) +- return; ++ return NULL; + + /* Start up the SCTP elements now that the DTLS connection is established */ + gst_element_set_locked_state (sctp_transport->sctpdec, FALSE); +@@ -1902,6 +2649,8 @@ _sctp_check_dtls_state_task (GstWebRTCBin * webrtc, gpointer unused) + + g_signal_handlers_disconnect_by_func (transport, _on_sctp_notify_dtls_state, + webrtc); ++ ++ return NULL; + } + + static void +@@ -1947,24 +2696,17 @@ _get_or_create_data_channel_transports (GstWebRTCBin * webrtc, guint session_id) + { + if (!webrtc->priv->data_channel_transport) { + TransportStream *stream; +- GstWebRTCSCTPTransport *sctp_transport; +- int i; ++ WebRTCSCTPTransport *sctp_transport; + + stream = _find_transport_for_session (webrtc, session_id); + +- if (!stream) { ++ if (!stream) + stream = _create_transport_channel (webrtc, session_id); +- gst_bin_add (GST_BIN (webrtc), GST_ELEMENT (stream->send_bin)); +- gst_bin_add (GST_BIN (webrtc), GST_ELEMENT (stream->receive_bin)); +- g_ptr_array_add (webrtc->priv->transports, stream); +- } + + webrtc->priv->data_channel_transport = stream; + +- g_object_set (stream, "rtcp-mux", TRUE, NULL); +- + if (!(sctp_transport = webrtc->priv->sctp_transport)) { +- sctp_transport = gst_webrtc_sctp_transport_new (); ++ sctp_transport = webrtc_sctp_transport_new (); + sctp_transport->transport = + g_object_ref (webrtc->priv->data_channel_transport->transport); + sctp_transport->webrtcbin = webrtc; +@@ -2003,14 +2745,6 @@ _get_or_create_data_channel_transports (GstWebRTCBin * webrtc, guint session_id) + GST_ELEMENT (stream->send_bin), "data_sink")) + g_warn_if_reached (); + +- for (i = 0; i < webrtc->priv->data_channels->len; i++) { +- WebRTCDataChannel *channel; +- +- channel = g_ptr_array_index (webrtc->priv->data_channels, i); +- +- webrtc_data_channel_link_to_sctp (channel, webrtc->priv->sctp_transport); +- } +- + gst_element_sync_state_with_parent (GST_ELEMENT (stream->send_bin)); + gst_element_sync_state_with_parent (GST_ELEMENT (stream->receive_bin)); + +@@ -2028,6 +2762,8 @@ _get_or_create_data_channel_transports (GstWebRTCBin * webrtc, guint session_id) + } + + webrtc->priv->sctp_transport = sctp_transport; ++ ++ gst_webrtc_bin_update_sctp_priority (webrtc); + } + + return webrtc->priv->data_channel_transport; +@@ -2043,38 +2779,92 @@ _get_or_create_transport_stream (GstWebRTCBin * webrtc, guint session_id, + return _get_or_create_rtp_transport_channel (webrtc, session_id); + } + +-static guint +-g_array_find_uint (GArray * array, guint val) ++struct media_payload_map_item ++{ ++ guint media_pt; ++ guint red_pt; ++ guint ulpfec_pt; ++ guint rtx_pt; ++ guint red_rtx_pt; ++}; ++ ++static void ++media_payload_map_item_init (struct media_payload_map_item *item, ++ guint media_pt) ++{ ++ item->media_pt = media_pt; ++ item->red_pt = G_MAXUINT; ++ item->rtx_pt = G_MAXUINT; ++ item->ulpfec_pt = G_MAXUINT; ++ item->red_rtx_pt = G_MAXUINT; ++} ++ ++static struct media_payload_map_item * ++find_payload_map_for_media_pt (GArray * media_mapping, guint media_pt) + { + guint i; + +- for (i = 0; i < array->len; i++) { +- if (g_array_index (array, guint, i) == val) +- return i; ++ for (i = 0; i < media_mapping->len; i++) { ++ struct media_payload_map_item *item; ++ ++ item = &g_array_index (media_mapping, struct media_payload_map_item, i); ++ ++ if (item->media_pt == media_pt) ++ return item; + } + +- return G_MAXUINT; ++ return NULL; ++} ++ ++static struct media_payload_map_item * ++find_or_create_payload_map_for_media_pt (GArray * media_mapping, guint media_pt) ++{ ++ struct media_payload_map_item new_item; ++ struct media_payload_map_item *item; ++ ++ if ((item = find_payload_map_for_media_pt (media_mapping, media_pt))) ++ return item; ++ ++ media_payload_map_item_init (&new_item, media_pt); ++ g_array_append_val (media_mapping, new_item); ++ return &g_array_index (media_mapping, struct media_payload_map_item, ++ media_mapping->len - 1); + } + + static gboolean +-_pick_available_pt (GArray * reserved_pts, guint * i) ++_pick_available_pt (GArray * media_mapping, guint * ret) + { +- gboolean ret = FALSE; ++ int i; + +- for (*i = 96; *i <= 127; (*i)++) { +- if (g_array_find_uint (reserved_pts, *i) == G_MAXUINT) { +- g_array_append_val (reserved_pts, *i); +- ret = TRUE; +- break; ++ for (i = 96; i <= 127; i++) { ++ gboolean available = TRUE; ++ int j; ++ ++ for (j = 0; j < media_mapping->len; j++) { ++ struct media_payload_map_item *item; ++ ++ item = &g_array_index (media_mapping, struct media_payload_map_item, j); ++ ++ if (item->media_pt == i || item->red_pt == i || item->rtx_pt == i ++ || item->ulpfec_pt == i || item->red_rtx_pt == i) { ++ available = FALSE; ++ break; ++ } ++ } ++ ++ if (available) { ++ *ret = i; ++ return TRUE; + } + } + +- return ret; ++ *ret = G_MAXUINT; ++ return FALSE; + } + + static gboolean + _pick_fec_payload_types (GstWebRTCBin * webrtc, WebRTCTransceiver * trans, +- GArray * reserved_pts, gint clockrate, gint * rtx_target_pt, ++ GArray * media_mapping, gint clockrate, gint media_pt, gint * rtx_target_pt, + GstSDPMedia * media) + { + gboolean ret = TRUE; +@@ -2083,30 +2873,35 @@ _pick_fec_payload_types (GstWebRTCBin * webrtc, WebRTCTransceiver * trans, + goto done; + + if (trans->fec_type == GST_WEBRTC_FEC_TYPE_ULP_RED && clockrate != -1) { +- guint pt; ++ struct media_payload_map_item *item; + gchar *str; + +- if (!(ret = _pick_available_pt (reserved_pts, &pt))) +- goto done; ++ item = find_or_create_payload_map_for_media_pt (media_mapping, media_pt); ++ if (item->red_pt == G_MAXUINT) { ++ if (!(ret = _pick_available_pt (media_mapping, &item->red_pt))) ++ goto done; ++ } + + /* https://tools.ietf.org/html/rfc5109#section-14.1 */ + +- str = g_strdup_printf ("%u", pt); ++ str = g_strdup_printf ("%u", item->red_pt); + gst_sdp_media_add_format (media, str); + g_free (str); +- str = g_strdup_printf ("%u red/%d", pt, clockrate); ++ str = g_strdup_printf ("%u red/%d", item->red_pt, clockrate); + gst_sdp_media_add_attribute (media, "rtpmap", str); + g_free (str); + +- *rtx_target_pt = pt; ++ *rtx_target_pt = item->red_pt; + +- if (!(ret = _pick_available_pt (reserved_pts, &pt))) +- goto done; ++ if (item->ulpfec_pt == G_MAXUINT) { ++ if (!(ret = _pick_available_pt (media_mapping, &item->ulpfec_pt))) ++ goto done; ++ } + +- str = g_strdup_printf ("%u", pt); ++ str = g_strdup_printf ("%u", item->ulpfec_pt); + gst_sdp_media_add_format (media, str); + g_free (str); +- str = g_strdup_printf ("%u ulpfec/%d", pt, clockrate); ++ str = g_strdup_printf ("%u ulpfec/%d", item->ulpfec_pt, clockrate); + gst_sdp_media_add_attribute (media, "rtpmap", str); + g_free (str); + } +@@ -2115,10 +2910,37 @@ done: + return ret; + } + ++static void ++add_rtx_to_media (WebRTCTransceiver * trans, gint clockrate, gint rtx_pt, ++ gint rtx_target_pt, guint target_ssrc, GstSDPMedia * media) ++{ ++ char *str; ++ ++ /* https://tools.ietf.org/html/rfc4588#section-8.6 */ ++ if (target_ssrc != -1) { ++ str = g_strdup_printf ("%u", target_ssrc); ++ gst_structure_set (trans->local_rtx_ssrc_map, str, G_TYPE_UINT, ++ g_random_int (), NULL); ++ g_free (str); ++ } ++ ++ str = g_strdup_printf ("%u", rtx_pt); ++ gst_sdp_media_add_format (media, str); ++ g_free (str); ++ ++ str = g_strdup_printf ("%u rtx/%d", rtx_pt, clockrate); ++ gst_sdp_media_add_attribute (media, "rtpmap", str); ++ g_free (str); ++ ++ str = g_strdup_printf ("%u apt=%d", rtx_pt, rtx_target_pt); ++ gst_sdp_media_add_attribute (media, "fmtp", str); ++ g_free (str); ++} ++ + static gboolean + _pick_rtx_payload_types (GstWebRTCBin * webrtc, WebRTCTransceiver * trans, +- GArray * reserved_pts, gint clockrate, gint target_pt, guint target_ssrc, +- GstSDPMedia * media) ++ GArray * media_mapping, gint clockrate, gint media_pt, gint target_pt, ++ guint target_ssrc, GstSDPMedia * media) + { + gboolean ret = TRUE; + +@@ -2129,30 +2951,26 @@ _pick_rtx_payload_types (GstWebRTCBin * webrtc, WebRTCTransceiver * trans, + gst_structure_new_empty ("application/x-rtp-ssrc-map"); + + if (trans->do_nack) { +- guint pt; +- gchar *str; +- +- if (!(ret = _pick_available_pt (reserved_pts, &pt))) +- goto done; ++ struct media_payload_map_item *item; + +- /* https://tools.ietf.org/html/rfc4588#section-8.6 */ +- +- str = g_strdup_printf ("%u", target_ssrc); +- gst_structure_set (trans->local_rtx_ssrc_map, str, G_TYPE_UINT, +- g_random_int (), NULL); +- g_free (str); +- +- str = g_strdup_printf ("%u", pt); +- gst_sdp_media_add_format (media, str); +- g_free (str); ++ item = find_or_create_payload_map_for_media_pt (media_mapping, media_pt); ++ if (item->rtx_pt == G_MAXUINT) { ++ if (!(ret = _pick_available_pt (media_mapping, &item->rtx_pt))) ++ goto done; ++ } + +- str = g_strdup_printf ("%u rtx/%d", pt, clockrate); +- gst_sdp_media_add_attribute (media, "rtpmap", str); +- g_free (str); ++ add_rtx_to_media (trans, clockrate, item->rtx_pt, media_pt, target_ssrc, ++ media); + +- str = g_strdup_printf ("%u apt=%d", pt, target_pt); +- gst_sdp_media_add_attribute (media, "fmtp", str); +- g_free (str); ++ if (item->red_pt != G_MAXUINT) { ++ /* Workaround chrome bug: https://bugs.chromium.org/p/webrtc/issues/detail?id=6196 */ ++ if (item->red_rtx_pt == G_MAXUINT) { ++ if (!(ret = _pick_available_pt (media_mapping, &item->red_rtx_pt))) ++ goto done; ++ } ++ add_rtx_to_media (trans, clockrate, item->red_rtx_pt, item->red_pt, ++ target_ssrc, media); ++ } + } + + done: +@@ -2189,15 +3007,27 @@ _media_add_rtx_ssrc (GQuark field_id, const GValue * value, RtxSsrcData * data) + gchar *str; + GstStructure *sdes; + const gchar *cname; ++ GstWebRTCBinPad *sink_pad; ++ const char *msid = NULL; + + g_object_get (data->webrtc->rtpbin, "sdes", &sdes, NULL); + /* http://www.freesoft.org/CIE/RFC/1889/24.htm */ + cname = gst_structure_get_string (sdes, "cname"); + ++ sink_pad = ++ _find_pad_for_transceiver (data->webrtc, GST_PAD_SINK, ++ GST_WEBRTC_RTP_TRANSCEIVER (data->trans)); ++ if (sink_pad) ++ msid = sink_pad->msid; ++ /* fallback to cname if no msid provided */ ++ if (!msid) ++ msid = cname; ++ + /* https://tools.ietf.org/html/draft-ietf-mmusic-msid-16 */ ++ /* FIXME: the ssrc is not present in RFC8830, do we still need that? */ + str = + g_strdup_printf ("%u msid:%s %s", g_value_get_uint (value), +- cname, GST_OBJECT_NAME (data->trans)); ++ msid, GST_OBJECT_NAME (data->trans)); + gst_sdp_media_add_attribute (data->media, "ssrc", str); + g_free (str); + +@@ -2205,6 +3035,7 @@ _media_add_rtx_ssrc (GQuark field_id, const GValue * value, RtxSsrcData * data) + gst_sdp_media_add_attribute (data->media, "ssrc", str); + g_free (str); + ++ gst_clear_object (&sink_pad); + gst_structure_free (sdes); + + return TRUE; +@@ -2233,10 +3064,22 @@ _media_add_ssrcs (GstSDPMedia * media, GstCaps * caps, GstWebRTCBin * webrtc, + + if (gst_structure_get_uint (s, "ssrc", &ssrc)) { + gchar *str; ++ GstWebRTCBinPad *sink_pad; ++ const char *msid = NULL; ++ ++ sink_pad = ++ _find_pad_for_transceiver (webrtc, GST_PAD_SINK, ++ GST_WEBRTC_RTP_TRANSCEIVER (trans)); ++ if (sink_pad) ++ msid = sink_pad->msid; ++ /* fallback to cname if no msid provided */ ++ if (!msid) ++ msid = cname; + + /* https://tools.ietf.org/html/draft-ietf-mmusic-msid-16 */ ++ /* FIXME: the ssrc is not present in RFC8830, do we still need that? */ + str = +- g_strdup_printf ("%u msid:%s %s", ssrc, cname, ++ g_strdup_printf ("%u msid:%s %s", ssrc, msid, + GST_OBJECT_NAME (trans)); + gst_sdp_media_add_attribute (media, "ssrc", str); + g_free (str); +@@ -2244,6 +3087,8 @@ _media_add_ssrcs (GstSDPMedia * media, GstCaps * caps, GstWebRTCBin * webrtc, + str = g_strdup_printf ("%u cname:%s", ssrc, cname); + gst_sdp_media_add_attribute (media, "ssrc", str); + g_free (str); ++ ++ gst_clear_object (&sink_pad); + } + } + +@@ -2274,12 +3119,217 @@ _add_fingerprint_to_media (GstWebRTCDTLSTransport * transport, + g_free (val); + } + ++static gchar * ++_parse_extmap (GQuark field_id, const GValue * value, GError ** error) ++{ ++ gchar *ret = NULL; ++ ++ if (G_VALUE_HOLDS_STRING (value)) { ++ ret = g_value_dup_string (value); ++ } else if (G_VALUE_HOLDS (value, GST_TYPE_ARRAY) ++ && gst_value_array_get_size (value) == 3) { ++ const GValue *val; ++ const gchar *direction, *extensionname, *extensionattributes; ++ ++ val = gst_value_array_get_value (value, 0); ++ direction = g_value_get_string (val); ++ ++ val = gst_value_array_get_value (value, 1); ++ extensionname = g_value_get_string (val); ++ ++ val = gst_value_array_get_value (value, 2); ++ extensionattributes = g_value_get_string (val); ++ ++ if (!extensionname || *extensionname == '\0') ++ goto done; ++ ++ if (direction && *direction != '\0' && extensionattributes ++ && *extensionattributes != '\0') { ++ ret = ++ g_strdup_printf ("/%s %s %s", direction, extensionname, ++ extensionattributes); ++ } else if (direction && *direction != '\0') { ++ ret = g_strdup_printf ("/%s %s", direction, extensionname); ++ } else if (extensionattributes && *extensionattributes != '\0') { ++ ret = g_strdup_printf ("%s %s", extensionname, extensionattributes); ++ } else { ++ ret = g_strdup (extensionname); ++ } ++ } ++ ++ if (!ret && error) { ++ gchar *val_str = gst_value_serialize (value); ++ ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "Invalid value for %s: %s", g_quark_to_string (field_id), val_str); ++ g_free (val_str); ++ } ++ ++done: ++ return ret; ++} ++ ++typedef struct ++{ ++ gboolean ret; ++ GstStructure *extmap; ++ GError **error; ++} ExtmapData; ++ ++static gboolean ++_dedup_extmap_field (GQuark field_id, const GValue * value, ExtmapData * data) ++{ ++ gboolean is_extmap = ++ g_str_has_prefix (g_quark_to_string (field_id), "extmap-"); ++ ++ if (!data->ret) ++ goto done; ++ ++ if (is_extmap) { ++ gchar *new_value = _parse_extmap (field_id, value, data->error); ++ ++ if (!new_value) { ++ data->ret = FALSE; ++ goto done; ++ } ++ ++ if (gst_structure_id_has_field (data->extmap, field_id)) { ++ gchar *old_value = ++ _parse_extmap (field_id, gst_structure_id_get_value (data->extmap, ++ field_id), NULL); ++ ++ g_assert (old_value); ++ ++ if (g_strcmp0 (new_value, old_value)) { ++ GST_ERROR ++ ("extmap contains different values for id %s (%s != %s)", ++ g_quark_to_string (field_id), old_value, new_value); ++ g_set_error (data->error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "extmap contains different values for id %s (%s != %s)", ++ g_quark_to_string (field_id), old_value, new_value); ++ data->ret = FALSE; ++ } ++ ++ g_free (old_value); ++ ++ } ++ ++ if (data->ret) { ++ gst_structure_id_set_value (data->extmap, field_id, value); ++ } ++ ++ g_free (new_value); ++ } ++ ++done: ++ return !is_extmap; ++} ++ ++static GstStructure * ++_gather_extmap (GstCaps * caps, GError ** error) ++{ ++ ExtmapData edata = ++ { TRUE, gst_structure_new_empty ("application/x-extmap"), error }; ++ guint i, n; ++ ++ n = gst_caps_get_size (caps); ++ ++ for (i = 0; i < n; i++) { ++ GstStructure *s = gst_caps_get_structure (caps, i); ++ ++ gst_structure_filter_and_map_in_place (s, ++ (GstStructureFilterMapFunc) _dedup_extmap_field, &edata); ++ ++ if (!edata.ret) { ++ gst_clear_structure (&edata.extmap); ++ break; ++ } ++ } ++ ++ return edata.extmap; ++} ++ ++struct hdrext_id ++{ ++ const char *rtphdrext_uri; ++ guint ext_id; ++}; ++ ++static gboolean ++structure_value_get_rtphdrext_id (GQuark field_id, const GValue * value, ++ gpointer user_data) ++{ ++ struct hdrext_id *rtphdrext = user_data; ++ const char *field_name = g_quark_to_string (field_id); ++ ++ if (g_str_has_prefix (field_name, "extmap-")) { ++ const char *val = NULL; ++ ++ if (GST_VALUE_HOLDS_ARRAY (value) && gst_value_array_get_size (value) >= 2) { ++ value = gst_value_array_get_value (value, 1); ++ } ++ if (G_VALUE_HOLDS_STRING (value)) { ++ val = g_value_get_string (value); ++ } ++ ++ if (g_strcmp0 (val, rtphdrext->rtphdrext_uri) == 0) { ++ gint64 id = g_ascii_strtoll (&field_name[strlen ("extmap-")], NULL, 10); ++ ++ if (id > 0 && id < 256) ++ rtphdrext->ext_id = id; ++ ++ return FALSE; ++ } ++ } ++ ++ return TRUE; ++} ++ ++// Returns -1 when not found ++static guint ++caps_get_rtp_header_extension_id (const GstCaps * caps, ++ const char *rtphdrext_uri) ++{ ++ guint i, n; ++ ++ n = gst_caps_get_size (caps); ++ for (i = 0; i < n; i++) { ++ const GstStructure *s = gst_caps_get_structure (caps, i); ++ struct hdrext_id data = { rtphdrext_uri, -1 }; ++ ++ gst_structure_foreach (s, structure_value_get_rtphdrext_id, &data); ++ ++ if (data.ext_id != -1) ++ return data.ext_id; ++ } ++ ++ return -1; ++} ++ ++static gboolean ++caps_contain_rtp_header_extension (const GstCaps * caps, ++ const char *rtphdrext_uri) ++{ ++ return caps_get_rtp_header_extension_id (caps, rtphdrext_uri) != -1; ++} ++ ++static gboolean ++_copy_field (GQuark field_id, const GValue * value, GstStructure * s) ++{ ++ gst_structure_id_set_value (s, field_id, value); ++ ++ return TRUE; ++} ++ + /* based off https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-18#section-5.2.1 */ + static gboolean + sdp_media_from_transceiver (GstWebRTCBin * webrtc, GstSDPMedia * media, +- GstWebRTCRTPTransceiver * trans, GstWebRTCSDPType type, guint media_idx, +- GString * bundled_mids, guint bundle_idx, gchar * bundle_ufrag, +- gchar * bundle_pwd, GArray * reserved_pts) ++ const GstSDPMedia * last_media, GstWebRTCRTPTransceiver * trans, ++ guint media_idx, GString * bundled_mids, guint bundle_idx, ++ gchar * bundle_ufrag, gchar * bundle_pwd, GArray * media_mapping, ++ GHashTable * all_mids, gboolean * no_more_mlines, GError ** error) + { + /* TODO: + * rtp header extensions +@@ -2292,22 +3342,79 @@ sdp_media_from_transceiver (GstWebRTCBin * webrtc, GstSDPMedia * media, + * multiple dtls fingerprints https://tools.ietf.org/html/draft-ietf-mmusic-4572-update-05 + */ + GstSDPMessage *last_offer = _get_latest_self_generated_sdp (webrtc); +- gchar *direction, *sdp_mid, *ufrag, *pwd; ++ gchar *ufrag, *pwd, *mid = NULL; + gboolean bundle_only; ++ guint rtp_session_idx; + GstCaps *caps; ++ GstStructure *extmap; + int i; + +- if (trans->direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE +- || trans->direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_INACTIVE) ++ if (trans->direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE) + return FALSE; + + g_assert (trans->mline == -1 || trans->mline == media_idx); + ++ rtp_session_idx = bundled_mids ? bundle_idx : media_idx; ++ + bundle_only = bundled_mids && bundle_idx != media_idx + && webrtc->bundle_policy == GST_WEBRTC_BUNDLE_POLICY_MAX_BUNDLE; + +- /* mandated by JSEP */ +- gst_sdp_media_add_attribute (media, "setup", "actpass"); ++ caps = _find_codec_preferences (webrtc, trans, media_idx, error); ++ caps = _add_supported_attributes_to_caps (webrtc, WEBRTC_TRANSCEIVER (trans), ++ caps); ++ ++ if (!caps || gst_caps_is_empty (caps) || gst_caps_is_any (caps)) { ++ gst_clear_caps (&caps); ++ ++ if (last_media) { ++ guint i, n; ++ ++ n = gst_sdp_media_formats_len (last_media); ++ if (n > 0) { ++ caps = gst_caps_new_empty (); ++ for (i = 0; i < n; i++) { ++ guint fmt = atoi (gst_sdp_media_get_format (last_media, i)); ++ GstCaps *tmp = gst_sdp_media_get_caps_from_media (last_media, fmt); ++ GstStructure *s = gst_caps_get_structure (tmp, 0); ++ gst_structure_set_name (s, "application/x-rtp"); ++ gst_caps_append_structure (caps, gst_structure_copy (s)); ++ gst_clear_caps (&tmp); ++ } ++ GST_DEBUG_OBJECT (webrtc, "using previously negotiated caps for " ++ "transceiver %" GST_PTR_FORMAT " %" GST_PTR_FORMAT, trans, caps); ++ } ++ } ++ ++ if (!caps) { ++ if (WEBRTC_TRANSCEIVER (trans)->mline_locked) { ++ GST_WARNING_OBJECT (webrtc, ++ "Transceiver <%s> with mid %s has locked mline %u, but no caps. " ++ "Can't add more lines after this one.", GST_OBJECT_NAME (trans), ++ trans->mid, trans->mline); ++ *no_more_mlines = TRUE; ++ } else { ++ GST_WARNING_OBJECT (webrtc, "no caps available for transceiver %" ++ GST_PTR_FORMAT ", skipping", trans); ++ } ++ return FALSE; ++ } ++ } ++ ++ if (last_media) { ++ const char *setup = gst_sdp_media_get_attribute_val (last_media, "setup"); ++ if (setup) { ++ gst_sdp_media_add_attribute (media, "setup", setup); ++ } else { ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INVALID_MODIFICATION, ++ "media %u cannot renegotiate without an existing a=setup line", ++ media_idx); ++ return FALSE; ++ } ++ } else { ++ /* mandated by JSEP */ ++ gst_sdp_media_add_attribute (media, "setup", "actpass"); ++ } + + /* FIXME: deal with ICE restarts */ + if (last_offer && trans->mline != -1 && trans->mid) { +@@ -2348,83 +3455,158 @@ sdp_media_from_transceiver (GstWebRTCBin * webrtc, GstSDPMedia * media, + gst_sdp_media_add_attribute (media, "rtcp-mux", ""); + gst_sdp_media_add_attribute (media, "rtcp-rsize", NULL); + +- direction = +- _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION, +- trans->direction); +- gst_sdp_media_add_attribute (media, direction, ""); +- g_free (direction); +- +- if (type == GST_WEBRTC_SDP_TYPE_OFFER) { +- caps = _find_codec_preferences (webrtc, trans, GST_PAD_SINK, media_idx); +- caps = +- _add_supported_attributes_to_caps (webrtc, WEBRTC_TRANSCEIVER (trans), +- caps); +- } else { +- g_assert_not_reached (); +- } ++ gst_sdp_media_add_attribute (media, ++ gst_webrtc_rtp_transceiver_direction_to_string (trans->direction), ""); + +- if (!caps || gst_caps_is_empty (caps) || gst_caps_is_any (caps)) { +- GST_WARNING_OBJECT (webrtc, "no caps available for transceiver, skipping"); +- if (caps) +- gst_caps_unref (caps); ++ caps = gst_caps_make_writable (caps); ++ ++ /* When an extmap is defined twice for the same ID, firefox complains and ++ * errors out (chrome is smart enough to accept strict duplicates). ++ * ++ * To work around this, we deduplicate extmap attributes, and also error ++ * out when a different extmap is defined for the same ID. ++ * ++ * _gather_extmap will strip out all extmap- fields, which will then be ++ * added upon adding the first format for the media. ++ */ ++ extmap = _gather_extmap (caps, error); ++ ++ if (!extmap) { ++ GST_ERROR_OBJECT (webrtc, ++ "Failed to build extmap for transceiver %" GST_PTR_FORMAT, trans); ++ gst_caps_unref (caps); + return FALSE; + } + ++ caps = _add_supported_attributes_to_caps (webrtc, WEBRTC_TRANSCEIVER (trans), ++ caps); ++ + for (i = 0; i < gst_caps_get_size (caps); i++) { + GstCaps *format = gst_caps_new_empty (); +- const GstStructure *s = gst_caps_get_structure (caps, i); ++ GstStructure *s = gst_structure_copy (gst_caps_get_structure (caps, i)); + +- gst_caps_append_structure (format, gst_structure_copy (s)); ++ if (i == 0) { ++ gst_structure_foreach (extmap, (GstStructureForeachFunc) _copy_field, s); ++ } ++ ++ gst_caps_append_structure (format, s); + + GST_DEBUG_OBJECT (webrtc, "Adding %u-th caps %" GST_PTR_FORMAT + " to %u-th media", i, format, media_idx); + + /* this only looks at the first structure so we loop over the given caps + * and add each structure inside it piecemeal */ +- gst_sdp_media_set_media_from_caps (format, media); ++ if (gst_sdp_media_set_media_from_caps (format, media) != GST_SDP_OK) { ++ GST_ERROR_OBJECT (webrtc, ++ "Failed to build media from caps %" GST_PTR_FORMAT ++ " for transceiver %" GST_PTR_FORMAT, format, trans); ++ gst_caps_unref (caps); ++ gst_caps_unref (format); ++ gst_structure_free (extmap); ++ return FALSE; ++ } + + gst_caps_unref (format); + } + +- if (type == GST_WEBRTC_SDP_TYPE_OFFER) { ++ gst_clear_structure (&extmap); ++ ++ { + const GstStructure *s = gst_caps_get_structure (caps, 0); + gint clockrate = -1; + gint rtx_target_pt; +- gint original_rtx_target_pt; /* Workaround chrome bug: https://bugs.chromium.org/p/webrtc/issues/detail?id=6196 */ + guint rtx_target_ssrc = -1; ++ gint media_pt; + +- if (gst_structure_get_int (s, "payload", &rtx_target_pt) && ++ if (gst_structure_get_int (s, "payload", &media_pt) && + webrtc->bundle_policy == GST_WEBRTC_BUNDLE_POLICY_NONE) +- g_array_append_val (reserved_pts, rtx_target_pt); ++ find_or_create_payload_map_for_media_pt (media_mapping, media_pt); + +- original_rtx_target_pt = rtx_target_pt; ++ rtx_target_pt = media_pt; + + if (!gst_structure_get_int (s, "clock-rate", &clockrate)) + GST_WARNING_OBJECT (webrtc, + "Caps %" GST_PTR_FORMAT " are missing clock-rate", caps); +- if (!gst_structure_get_uint (s, "ssrc", &rtx_target_ssrc)) +- GST_WARNING_OBJECT (webrtc, "Caps %" GST_PTR_FORMAT " are missing ssrc", +- caps); ++ if (!gst_structure_get_uint (s, "ssrc", &rtx_target_ssrc)) { ++ if (!caps_contain_rtp_header_extension (caps, RTPHDREXT_MID)) { ++ GST_WARNING_OBJECT (webrtc, "Caps %" GST_PTR_FORMAT " are missing ssrc", ++ caps); ++ } ++ } + +- _pick_fec_payload_types (webrtc, WEBRTC_TRANSCEIVER (trans), reserved_pts, +- clockrate, &rtx_target_pt, media); +- _pick_rtx_payload_types (webrtc, WEBRTC_TRANSCEIVER (trans), reserved_pts, +- clockrate, rtx_target_pt, rtx_target_ssrc, media); +- if (original_rtx_target_pt != rtx_target_pt) +- _pick_rtx_payload_types (webrtc, WEBRTC_TRANSCEIVER (trans), reserved_pts, +- clockrate, original_rtx_target_pt, rtx_target_ssrc, media); ++ _pick_fec_payload_types (webrtc, WEBRTC_TRANSCEIVER (trans), media_mapping, ++ clockrate, media_pt, &rtx_target_pt, media); ++ _pick_rtx_payload_types (webrtc, WEBRTC_TRANSCEIVER (trans), media_mapping, ++ clockrate, media_pt, rtx_target_pt, rtx_target_ssrc, media); + } + + _media_add_ssrcs (media, caps, webrtc, WEBRTC_TRANSCEIVER (trans)); + + /* Some identifier; we also add the media name to it so it's identifiable */ + if (trans->mid) { +- gst_sdp_media_add_attribute (media, "mid", trans->mid); +- } else { +- sdp_mid = g_strdup_printf ("%s%u", gst_sdp_media_get_media (media), +- webrtc->priv->media_counter++); +- gst_sdp_media_add_attribute (media, "mid", sdp_mid); +- g_free (sdp_mid); ++ const char *media_mid = gst_sdp_media_get_attribute_val (media, "mid"); ++ ++ if (!media_mid) { ++ gst_sdp_media_add_attribute (media, "mid", trans->mid); ++ } else if (g_strcmp0 (media_mid, trans->mid) != 0) { ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INVALID_MODIFICATION, ++ "Cannot change media %u mid value from \'%s\' to \'%s\'", ++ media_idx, media_mid, trans->mid); ++ return FALSE; ++ } ++ mid = g_strdup (trans->mid); ++ g_hash_table_insert (all_mids, g_strdup (mid), NULL); ++ } ++ ++ if (mid == NULL) { ++ const GstStructure *s = gst_caps_get_structure (caps, 0); ++ ++ mid = g_strdup (gst_structure_get_string (s, "a-mid")); ++ if (mid) { ++ if (g_hash_table_contains (all_mids, (gpointer) mid)) { ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "Cannot re-use mid \'%s\' from the caps in m= line %u that has " ++ "already been used for a previous m= line in the SDP", mid, ++ media_idx); ++ return FALSE; ++ } ++ g_free (WEBRTC_TRANSCEIVER (trans)->pending_mid); ++ WEBRTC_TRANSCEIVER (trans)->pending_mid = g_strdup (mid); ++ g_hash_table_insert (all_mids, g_strdup (mid), NULL); ++ } ++ } ++ ++ if (mid == NULL) { ++ mid = g_strdup (WEBRTC_TRANSCEIVER (trans)->pending_mid); ++ if (mid) { ++ /* If it's already used, just ignore the pending one and generate ++ * a new one */ ++ if (g_hash_table_contains (all_mids, (gpointer) mid)) { ++ g_clear_pointer (&mid, free); ++ g_clear_pointer (&WEBRTC_TRANSCEIVER (trans)->pending_mid, free); ++ } else { ++ gst_sdp_media_add_attribute (media, "mid", mid); ++ g_hash_table_insert (all_mids, g_strdup (mid), NULL); ++ } ++ } ++ } ++ ++ if (mid == NULL) { ++ /* Make sure to avoid mid collisions */ ++ while (TRUE) { ++ mid = g_strdup_printf ("%s%u", gst_sdp_media_get_media (media), ++ webrtc->priv->media_counter++); ++ if (g_hash_table_contains (all_mids, (gpointer) mid)) { ++ g_free (mid); ++ } else { ++ gst_sdp_media_add_attribute (media, "mid", mid); ++ g_hash_table_insert (all_mids, g_strdup (mid), NULL); ++ WEBRTC_TRANSCEIVER (trans)->pending_mid = g_strdup (mid); ++ break; ++ } ++ } + } + + /* TODO: +@@ -2435,9 +3617,7 @@ sdp_media_from_transceiver (GstWebRTCBin * webrtc, GstSDPMedia * media, + if (!trans->sender->transport) { + TransportStream *item; + +- item = +- _get_or_create_transport_stream (webrtc, +- bundled_mids ? bundle_idx : media_idx, FALSE); ++ item = _get_or_create_transport_stream (webrtc, rtp_session_idx, FALSE); + + webrtc_transceiver_set_transport (WEBRTC_TRANSCEIVER (trans), item); + } +@@ -2446,49 +3626,74 @@ sdp_media_from_transceiver (GstWebRTCBin * webrtc, GstSDPMedia * media, + } + + if (bundled_mids) { +- const gchar *mid = gst_sdp_media_get_attribute_val (media, "mid"); +- + g_assert (mid); + g_string_append_printf (bundled_mids, " %s", mid); + } + ++ g_clear_pointer (&mid, g_free); ++ + gst_caps_unref (caps); + + return TRUE; + } + + static void +-gather_pad_pt (GstWebRTCBinPad * pad, GArray * reserved_pts) ++gather_pad_pt (GstWebRTCBinPad * pad, GArray * media_mapping) + { + if (pad->received_caps) { + GstStructure *s = gst_caps_get_structure (pad->received_caps, 0); + gint pt; + + if (gst_structure_get_int (s, "payload", &pt)) { +- g_array_append_val (reserved_pts, pt); ++ GST_TRACE_OBJECT (pad, "have media pt %u from received caps", pt); ++ find_or_create_payload_map_for_media_pt (media_mapping, pt); + } + } + } + + static GArray * +-gather_reserved_pts (GstWebRTCBin * webrtc) ++gather_media_mapping (GstWebRTCBin * webrtc) + { + GstElement *element = GST_ELEMENT (webrtc); +- GArray *reserved_pts = g_array_new (FALSE, FALSE, sizeof (guint)); ++ GArray *media_mapping = ++ g_array_new (FALSE, FALSE, sizeof (struct media_payload_map_item)); ++ guint i; + + GST_OBJECT_LOCK (webrtc); +- g_list_foreach (element->sinkpads, (GFunc) gather_pad_pt, reserved_pts); ++ g_list_foreach (element->sinkpads, (GFunc) gather_pad_pt, media_mapping); + g_list_foreach (webrtc->priv->pending_pads, (GFunc) gather_pad_pt, +- reserved_pts); ++ media_mapping); ++ ++ for (i = 0; i < webrtc->priv->transceivers->len; i++) { ++ GstWebRTCRTPTransceiver *trans; ++ ++ trans = g_ptr_array_index (webrtc->priv->transceivers, i); ++ GST_OBJECT_LOCK (trans); ++ if (trans->codec_preferences) { ++ guint j, n; ++ gint pt; ++ ++ n = gst_caps_get_size (trans->codec_preferences); ++ for (j = 0; j < n; j++) { ++ GstStructure *s = gst_caps_get_structure (trans->codec_preferences, j); ++ if (gst_structure_get_int (s, "payload", &pt)) { ++ GST_TRACE_OBJECT (trans, "have media pt %u from codec preferences", ++ pt); ++ find_or_create_payload_map_for_media_pt (media_mapping, pt); ++ } ++ } ++ } ++ GST_OBJECT_UNLOCK (trans); ++ } + GST_OBJECT_UNLOCK (webrtc); + +- return reserved_pts; ++ return media_mapping; + } + + static gboolean + _add_data_channel_offer (GstWebRTCBin * webrtc, GstSDPMessage * msg, + GstSDPMedia * media, GString * bundled_mids, guint bundle_idx, +- gchar * bundle_ufrag, gchar * bundle_pwd) ++ gchar * bundle_ufrag, gchar * bundle_pwd, GHashTable * all_mids) + { + GstSDPMessage *last_offer = _get_latest_self_generated_sdp (webrtc); + gchar *ufrag, *pwd, *sdp_mid; +@@ -2549,10 +3754,18 @@ _add_data_channel_offer (GstWebRTCBin * webrtc, GstSDPMessage * msg, + + gst_sdp_media_add_attribute (media, "mid", mid); + } else { +- sdp_mid = g_strdup_printf ("%s%u", gst_sdp_media_get_media (media), +- webrtc->priv->media_counter++); +- gst_sdp_media_add_attribute (media, "mid", sdp_mid); +- g_free (sdp_mid); ++ /* Make sure to avoid mid collisions */ ++ while (TRUE) { ++ sdp_mid = g_strdup_printf ("%s%u", gst_sdp_media_get_media (media), ++ webrtc->priv->media_counter++); ++ if (g_hash_table_contains (all_mids, (gpointer) sdp_mid)) { ++ g_free (sdp_mid); ++ } else { ++ gst_sdp_media_add_attribute (media, "mid", sdp_mid); ++ g_hash_table_insert (all_mids, sdp_mid, NULL); ++ break; ++ } ++ } + } + + if (bundled_mids) { +@@ -2574,17 +3787,22 @@ _add_data_channel_offer (GstWebRTCBin * webrtc, GstSDPMessage * msg, + + /* TODO: use the options argument */ + static GstSDPMessage * +-_create_offer_task (GstWebRTCBin * webrtc, const GstStructure * options) ++_create_offer_task (GstWebRTCBin * webrtc, const GstStructure * options, ++ GError ** error) + { +- GstSDPMessage *ret; ++ GstSDPMessage *ret = NULL; + GString *bundled_mids = NULL; + gchar *bundle_ufrag = NULL; + gchar *bundle_pwd = NULL; +- GArray *reserved_pts = NULL; ++ GArray *media_mapping = NULL; ++ GHashTable *all_mids = ++ g_hash_table_new_full (g_str_hash, g_str_equal, g_free, NULL); ++ + GstSDPMessage *last_offer = _get_latest_self_generated_sdp (webrtc); + GList *seen_transceivers = NULL; + guint media_idx = 0; + int i; ++ gboolean no_more_mlines = FALSE; + + gst_sdp_message_new (&ret); + +@@ -2616,8 +3834,8 @@ _create_offer_task (GstWebRTCBin * webrtc, const GstStructure * options) + GStrv last_bundle = NULL; + guint bundle_media_index; + +- reserved_pts = gather_reserved_pts (webrtc); +- if (last_offer && _parse_bundle (last_offer, &last_bundle) && last_bundle ++ media_mapping = gather_media_mapping (webrtc); ++ if (last_offer && _parse_bundle (last_offer, &last_bundle, NULL) + && last_bundle && last_bundle[0] + && _get_bundle_index (last_offer, last_bundle, &bundle_media_index)) { + bundle_ufrag = +@@ -2645,35 +3863,72 @@ _create_offer_task (GstWebRTCBin * webrtc, const GstStructure * options) + || g_strcmp0 (gst_sdp_media_get_media (last_media), "video") == 0) { + const gchar *last_mid; + int j; ++ + last_mid = gst_sdp_media_get_attribute_val (last_media, "mid"); + + for (j = 0; j < webrtc->priv->transceivers->len; j++) { ++ WebRTCTransceiver *wtrans; ++ const gchar *mid; ++ + trans = g_ptr_array_index (webrtc->priv->transceivers, j); ++ wtrans = WEBRTC_TRANSCEIVER (trans); + +- if (trans->mid && g_strcmp0 (trans->mid, last_mid) == 0) { +- GstSDPMedia *media; ++ if (trans->mid) ++ mid = trans->mid; ++ else ++ mid = wtrans->pending_mid; ++ ++ if (mid && g_strcmp0 (mid, last_mid) == 0) { ++ GstSDPMedia media; ++ ++ memset (&media, 0, sizeof (media)); + + g_assert (!g_list_find (seen_transceivers, trans)); + ++ if (wtrans->mline_locked && trans->mline != media_idx) { ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "Previous negotiatied transceiver <%s> with mid %s was in " ++ "mline %d but transceiver has locked mline %u", ++ GST_OBJECT_NAME (trans), trans->mid, media_idx, trans->mline); ++ goto cancel_offer; ++ } ++ + GST_LOG_OBJECT (webrtc, "using previous negotiatied transceiver %" + GST_PTR_FORMAT " with mid %s into media index %u", trans, + trans->mid, media_idx); + +- /* FIXME: deal with format changes */ +- gst_sdp_media_copy (last_media, &media); +- _media_replace_direction (media, trans->direction); ++ if (webrtc->bundle_policy == GST_WEBRTC_BUNDLE_POLICY_NONE) { ++ media_mapping = ++ g_array_new (FALSE, FALSE, ++ sizeof (struct media_payload_map_item)); ++ } + +- if (bundled_mids) { +- const gchar *mid = gst_sdp_media_get_attribute_val (media, "mid"); ++ gst_sdp_media_init (&media); ++ if (!sdp_media_from_transceiver (webrtc, &media, last_media, trans, ++ media_idx, bundled_mids, 0, bundle_ufrag, bundle_pwd, ++ media_mapping, all_mids, &no_more_mlines, error)) { ++ gst_sdp_media_uninit (&media); ++ if (!*error) ++ g_set_error_literal (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "Could not reuse transceiver"); ++ } + +- g_assert (mid); +- g_string_append_printf (bundled_mids, " %s", mid); ++ if (webrtc->bundle_policy == GST_WEBRTC_BUNDLE_POLICY_NONE) { ++ g_array_free (media_mapping, TRUE); ++ media_mapping = NULL; + } ++ if (*error) ++ goto cancel_offer; ++ ++ mid = gst_sdp_media_get_attribute_val (&media, "mid"); ++ g_assert (mid && g_strcmp0 (last_mid, mid) == 0); + +- gst_sdp_message_add_media (ret, media); ++ gst_sdp_message_add_media (ret, &media); + media_idx++; + +- gst_sdp_media_free (media); ++ gst_sdp_media_uninit (&media); + seen_transceivers = g_list_prepend (seen_transceivers, trans); + break; + } +@@ -2683,7 +3938,7 @@ _create_offer_task (GstWebRTCBin * webrtc, const GstStructure * options) + GstSDPMedia media = { 0, }; + gst_sdp_media_init (&media); + if (_add_data_channel_offer (webrtc, ret, &media, bundled_mids, 0, +- bundle_ufrag, bundle_pwd)) { ++ bundle_ufrag, bundle_pwd, all_mids)) { + gst_sdp_message_add_media (ret, &media); + media_idx++; + } else { +@@ -2691,35 +3946,150 @@ _create_offer_task (GstWebRTCBin * webrtc, const GstStructure * options) + } + } + } +- } +- +- /* add any extra streams */ +- for (i = 0; i < webrtc->priv->transceivers->len; i++) { +- GstWebRTCRTPTransceiver *trans; +- GstSDPMedia media = { 0, }; +- +- trans = g_ptr_array_index (webrtc->priv->transceivers, i); +- +- /* don't add transceivers twice */ +- if (g_list_find (seen_transceivers, trans)) +- continue; ++ } ++ ++ /* First, go over all transceivers and gather existing mids */ ++ for (i = 0; i < webrtc->priv->transceivers->len; i++) { ++ GstWebRTCRTPTransceiver *trans; ++ ++ trans = g_ptr_array_index (webrtc->priv->transceivers, i); ++ ++ if (g_list_find (seen_transceivers, trans)) ++ continue; ++ ++ if (trans->mid) { ++ if (g_hash_table_contains (all_mids, trans->mid)) { ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "Duplicate mid %s when creating offer", trans->mid); ++ goto cancel_offer; ++ } ++ ++ g_hash_table_insert (all_mids, g_strdup (trans->mid), NULL); ++ } else if (WEBRTC_TRANSCEIVER (trans)->pending_mid && ++ !g_hash_table_contains (all_mids, ++ WEBRTC_TRANSCEIVER (trans)->pending_mid)) { ++ g_hash_table_insert (all_mids, ++ g_strdup (WEBRTC_TRANSCEIVER (trans)->pending_mid), NULL); ++ } ++ } ++ ++ ++ /* add any extra streams */ ++ for (;;) { ++ GstWebRTCRTPTransceiver *trans = NULL; ++ GstSDPMedia media = { 0, }; ++ ++ /* First find a transceiver requesting this m-line */ ++ trans = _find_transceiver_for_mline (webrtc, media_idx); ++ ++ if (trans) { ++ /* We can't have seen it already, because it is locked to this line, ++ * unless it's a no-more-mlines case ++ */ ++ if (!g_list_find (seen_transceivers, trans)) ++ seen_transceivers = g_list_prepend (seen_transceivers, trans); ++ } else { ++ /* Otherwise find a free transceiver */ ++ for (i = 0; i < webrtc->priv->transceivers->len; i++) { ++ WebRTCTransceiver *wtrans; ++ ++ trans = g_ptr_array_index (webrtc->priv->transceivers, i); ++ wtrans = WEBRTC_TRANSCEIVER (trans); ++ ++ /* don't add transceivers twice */ ++ if (g_list_find (seen_transceivers, trans)) ++ continue; ++ ++ /* Ignore transceivers with a locked mline, as they would have been ++ * found above or will be used later */ ++ if (wtrans->mline_locked) ++ continue; ++ ++ seen_transceivers = g_list_prepend (seen_transceivers, trans); ++ /* don't add stopped transceivers */ ++ if (trans->stopped) { ++ continue; ++ } ++ ++ /* Otherwise take it */ ++ break; ++ } ++ ++ /* Stop if we got all transceivers */ ++ if (i == webrtc->priv->transceivers->len) { ++ ++ /* But try to add a data channel first, we do it here, because ++ * it can allow a locked m-line to be put after, so we need to ++ * do another iteration after. ++ */ ++ if (_message_get_datachannel_index (ret) == G_MAXUINT) { ++ GstSDPMedia media = { 0, }; ++ gst_sdp_media_init (&media); ++ if (_add_data_channel_offer (webrtc, ret, &media, bundled_mids, 0, ++ bundle_ufrag, bundle_pwd, all_mids)) { ++ if (no_more_mlines) { ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "Trying to add data channel but there is a" ++ " transceiver locked to line %d which doesn't have caps", ++ media_idx); ++ gst_sdp_media_uninit (&media); ++ goto cancel_offer; ++ } ++ gst_sdp_message_add_media (ret, &media); ++ media_idx++; ++ continue; ++ } else { ++ gst_sdp_media_uninit (&media); ++ } ++ } ++ ++ /* Verify that we didn't ignore any locked m-line transceivers */ ++ for (i = 0; i < webrtc->priv->transceivers->len; i++) { ++ WebRTCTransceiver *wtrans; ++ ++ trans = g_ptr_array_index (webrtc->priv->transceivers, i); ++ wtrans = WEBRTC_TRANSCEIVER (trans); ++ /* don't add transceivers twice */ ++ if (g_list_find (seen_transceivers, trans)) ++ continue; ++ g_assert (wtrans->mline_locked); ++ ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "Tranceiver <%s> with mid %s has locked mline %d but the offer " ++ "only has %u sections", GST_OBJECT_NAME (trans), trans->mid, ++ trans->mline, media_idx); ++ goto cancel_offer; ++ } ++ break; ++ } ++ } + +- /* don't add stopped transceivers */ +- if (trans->stopped) +- continue; ++ if (no_more_mlines) { ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "Trying to add transceiver at line %u but there is a transceiver " ++ "with a locked mline for this line which doesn't have caps", ++ media_idx); ++ goto cancel_offer; ++ } + + gst_sdp_media_init (&media); + + if (webrtc->bundle_policy == GST_WEBRTC_BUNDLE_POLICY_NONE) { +- reserved_pts = g_array_new (FALSE, FALSE, sizeof (guint)); ++ media_mapping = ++ g_array_new (FALSE, FALSE, sizeof (struct media_payload_map_item)); + } + + GST_LOG_OBJECT (webrtc, "adding transceiver %" GST_PTR_FORMAT " at media " + "index %u", trans, media_idx); + +- if (sdp_media_from_transceiver (webrtc, &media, trans, +- GST_WEBRTC_SDP_TYPE_OFFER, media_idx, bundled_mids, 0, bundle_ufrag, +- bundle_pwd, reserved_pts)) { ++ if (sdp_media_from_transceiver (webrtc, &media, NULL, trans, media_idx, ++ bundled_mids, 0, bundle_ufrag, bundle_pwd, media_mapping, all_mids, ++ &no_more_mlines, error)) { ++ /* as per JSEP, a=rtcp-mux-only is only added for new streams */ ++ gst_sdp_media_add_attribute (&media, "rtcp-mux-only", ""); + gst_sdp_message_add_media (ret, &media); + media_idx++; + } else { +@@ -2727,27 +4097,20 @@ _create_offer_task (GstWebRTCBin * webrtc, const GstStructure * options) + } + + if (webrtc->bundle_policy == GST_WEBRTC_BUNDLE_POLICY_NONE) { +- g_array_free (reserved_pts, TRUE); ++ g_array_free (media_mapping, TRUE); ++ media_mapping = NULL; + } +- seen_transceivers = g_list_prepend (seen_transceivers, trans); ++ if (*error) ++ goto cancel_offer; + } + + if (webrtc->bundle_policy != GST_WEBRTC_BUNDLE_POLICY_NONE) { +- g_array_free (reserved_pts, TRUE); ++ g_array_free (media_mapping, TRUE); ++ media_mapping = NULL; + } + +- /* add a data channel if exists and not renegotiated */ +- if (_message_get_datachannel_index (ret) == G_MAXUINT) { +- GstSDPMedia media = { 0, }; +- gst_sdp_media_init (&media); +- if (_add_data_channel_offer (webrtc, ret, &media, bundled_mids, 0, +- bundle_ufrag, bundle_pwd)) { +- gst_sdp_message_add_media (ret, &media); +- media_idx++; +- } else { +- gst_sdp_media_uninit (&media); +- } +- } ++ webrtc->priv->max_sink_pad_serial = MAX (webrtc->priv->max_sink_pad_serial, ++ media_idx); + + g_assert (media_idx == gst_sdp_message_medias_len (ret)); + +@@ -2756,18 +4119,11 @@ _create_offer_task (GstWebRTCBin * webrtc, const GstStructure * options) + + gst_sdp_message_add_attribute (ret, "group", mids); + g_free (mids); ++ bundled_mids = NULL; + } + +- if (bundle_ufrag) +- g_free (bundle_ufrag); +- +- if (bundle_pwd) +- g_free (bundle_pwd); +- + /* FIXME: pre-emptively setup receiving elements when needed */ + +- g_list_free (seen_transceivers); +- + if (webrtc->priv->last_generated_answer) + gst_webrtc_session_description_free (webrtc->priv->last_generated_answer); + webrtc->priv->last_generated_answer = NULL; +@@ -2780,7 +4136,29 @@ _create_offer_task (GstWebRTCBin * webrtc, const GstStructure * options) + gst_webrtc_session_description_new (GST_WEBRTC_SDP_TYPE_OFFER, copy); + } + ++out: ++ if (media_mapping) ++ g_array_free (media_mapping, TRUE); ++ ++ g_hash_table_unref (all_mids); ++ ++ g_list_free (seen_transceivers); ++ ++ if (bundle_ufrag) ++ g_free (bundle_ufrag); ++ ++ if (bundle_pwd) ++ g_free (bundle_pwd); ++ ++ if (bundled_mids) ++ g_string_free (bundled_mids, TRUE); ++ + return ret; ++ ++cancel_offer: ++ gst_sdp_message_free (ret); ++ ret = NULL; ++ goto out; + } + + static void +@@ -2876,12 +4254,30 @@ _media_add_rtx (GstSDPMedia * media, WebRTCTransceiver * trans, + str = g_strdup_printf ("%u", target_ssrc); + gst_structure_set (trans->local_rtx_ssrc_map, str, G_TYPE_UINT, + g_random_int (), NULL); ++ g_free (str); + } + } + } + } + } + ++static gboolean ++_update_transceiver_kind_from_caps (GstWebRTCRTPTransceiver * trans, ++ const GstCaps * caps) ++{ ++ GstWebRTCKind kind = webrtc_kind_from_caps (caps); ++ ++ if (trans->kind == kind) ++ return TRUE; ++ ++ if (trans->kind == GST_WEBRTC_KIND_UNKNOWN) { ++ trans->kind = kind; ++ return TRUE; ++ } else { ++ return FALSE; ++ } ++} ++ + static void + _get_rtx_target_pt_and_ssrc_from_caps (GstCaps * answer_caps, gint * target_pt, + guint * target_ssrc) +@@ -2894,7 +4290,8 @@ _get_rtx_target_pt_and_ssrc_from_caps (GstCaps * answer_caps, gint * target_pt, + + /* TODO: use the options argument */ + static GstSDPMessage * +-_create_answer_task (GstWebRTCBin * webrtc, const GstStructure * options) ++_create_answer_task (GstWebRTCBin * webrtc, const GstStructure * options, ++ GError ** error) + { + GstSDPMessage *ret = NULL; + const GstWebRTCSessionDescription *pending_remote = +@@ -2909,12 +4306,13 @@ _create_answer_task (GstWebRTCBin * webrtc, const GstStructure * options) + GstSDPMessage *last_answer = _get_latest_self_generated_sdp (webrtc); + + if (!webrtc->pending_remote_description) { +- GST_ERROR_OBJECT (webrtc, ++ g_set_error_literal (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INVALID_STATE, + "Asked to create an answer without a remote description"); + return NULL; + } + +- if (!_parse_bundle (pending_remote->sdp, &bundled)) ++ if (!_parse_bundle (pending_remote->sdp, &bundled, error)) + goto out; + + if (bundled) { +@@ -2922,8 +4320,8 @@ _create_answer_task (GstWebRTCBin * webrtc, const GstStructure * options) + guint bundle_media_index; + + if (!_get_bundle_index (pending_remote->sdp, bundled, &bundle_idx)) { +- GST_ERROR_OBJECT (webrtc, "Bundle tag is %s but no media found matching", +- bundled[0]); ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, ++ "Bundle tag is %s but no media found matching", bundled[0]); + goto out; + } + +@@ -2931,7 +4329,7 @@ _create_answer_task (GstWebRTCBin * webrtc, const GstStructure * options) + bundled_mids = g_string_new ("BUNDLE"); + } + +- if (last_answer && _parse_bundle (last_answer, &last_bundle) ++ if (last_answer && _parse_bundle (last_answer, &last_bundle, NULL) + && last_bundle && last_bundle[0] + && _get_bundle_index (last_answer, last_bundle, &bundle_media_index)) { + bundle_ufrag = +@@ -3079,33 +4477,38 @@ _create_answer_task (GstWebRTCBin * webrtc, const GstStructure * options) + offer_caps = _rtp_caps_from_media (offer_media); + + if (last_answer && i < gst_sdp_message_medias_len (last_answer) +- && (rtp_trans = +- _find_transceiver (webrtc, mid, +- (FindTransceiverFunc) match_for_mid))) { ++ && (rtp_trans = _find_transceiver_for_mid (webrtc, mid))) { + const GstSDPMedia *last_media = + gst_sdp_message_get_media (last_answer, i); + const gchar *last_mid = + gst_sdp_media_get_attribute_val (last_media, "mid"); ++ GstCaps *current_caps; + + /* FIXME: assumes no shenanigans with recycling transceivers */ + g_assert (g_strcmp0 (mid, last_mid) == 0); + +- if (!answer_caps +- && (rtp_trans->direction == +- GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV +- || rtp_trans->direction == +- GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY)) +- answer_caps = +- _find_codec_preferences (webrtc, rtp_trans, GST_PAD_SINK, i); +- if (!answer_caps +- && (rtp_trans->direction == +- GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV +- || rtp_trans->direction == +- GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY)) +- answer_caps = +- _find_codec_preferences (webrtc, rtp_trans, GST_PAD_SRC, i); +- if (!answer_caps) +- answer_caps = _rtp_caps_from_media (last_media); ++ current_caps = _find_codec_preferences (webrtc, rtp_trans, i, error); ++ if (*error) { ++ gst_caps_unref (offer_caps); ++ goto rejected; ++ } ++ if (!current_caps) ++ current_caps = _rtp_caps_from_media (last_media); ++ ++ if (current_caps) { ++ answer_caps = gst_caps_intersect (offer_caps, current_caps); ++ if (gst_caps_is_empty (answer_caps)) { ++ GST_WARNING_OBJECT (webrtc, "Caps from offer for m-line %d (%" ++ GST_PTR_FORMAT ") don't intersect with caps from codec" ++ " preferences and transceiver %" GST_PTR_FORMAT, i, offer_caps, ++ current_caps); ++ gst_caps_unref (current_caps); ++ gst_caps_unref (answer_caps); ++ gst_caps_unref (offer_caps); ++ goto rejected; ++ } ++ gst_caps_unref (current_caps); ++ } + + /* XXX: In theory we're meant to use the sendrecv formats for the + * inactive direction however we don't know what that may be and would +@@ -3126,10 +4529,13 @@ _create_answer_task (GstWebRTCBin * webrtc, const GstStructure * options) + continue; + } + +- trans_caps = +- _find_codec_preferences (webrtc, rtp_trans, GST_PAD_SINK, j); ++ trans_caps = _find_codec_preferences (webrtc, rtp_trans, j, error); ++ if (*error) { ++ gst_caps_unref (offer_caps); ++ goto rejected; ++ } + +- GST_TRACE_OBJECT (webrtc, "trying to compare %" GST_PTR_FORMAT ++ GST_LOG_OBJECT (webrtc, "trying to compare %" GST_PTR_FORMAT + " and %" GST_PTR_FORMAT, offer_caps, trans_caps); + + /* FIXME: technically this is a little overreaching as some fields we +@@ -3137,25 +4543,19 @@ _create_answer_task (GstWebRTCBin * webrtc, const GstStructure * options) + * that we cannot actually support */ + if (trans_caps) { + answer_caps = gst_caps_intersect (offer_caps, trans_caps); +- if (answer_caps && !gst_caps_is_empty (answer_caps)) { +- GST_LOG_OBJECT (webrtc, +- "found compatible transceiver %" GST_PTR_FORMAT +- " for offer media %u", rtp_trans, i); +- if (trans_caps) +- gst_caps_unref (trans_caps); +- break; +- } else { +- if (answer_caps) { +- gst_caps_unref (answer_caps); +- answer_caps = NULL; ++ gst_caps_unref (trans_caps); ++ if (answer_caps) { ++ if (!gst_caps_is_empty (answer_caps)) { ++ GST_LOG_OBJECT (webrtc, ++ "found compatible transceiver %" GST_PTR_FORMAT ++ " for offer media %u", rtp_trans, i); ++ break; + } +- if (trans_caps) +- gst_caps_unref (trans_caps); +- rtp_trans = NULL; ++ gst_caps_unref (answer_caps); ++ answer_caps = NULL; + } +- } else { +- rtp_trans = NULL; + } ++ rtp_trans = NULL; + } + } + +@@ -3164,42 +4564,88 @@ _create_answer_task (GstWebRTCBin * webrtc, const GstStructure * options) + g_assert (answer_caps != NULL); + } else { + /* if no transceiver, then we only receive that stream and respond with +- * the exact same caps */ +- /* FIXME: how to validate that subsequent elements can actually receive +- * this payload/format */ ++ * the intersection with the transceivers codec preferences caps */ + answer_dir = GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY; +- answer_caps = gst_caps_ref (offer_caps); +- } +- +- if (gst_caps_is_empty (answer_caps)) { +- GST_WARNING_OBJECT (webrtc, "Could not create caps for media"); +- if (rtp_trans) +- gst_object_unref (rtp_trans); +- gst_caps_unref (answer_caps); +- goto rejected; ++ GST_WARNING_OBJECT (webrtc, "did not find compatible transceiver for " ++ "offer caps %" GST_PTR_FORMAT ", will only receive", offer_caps); + } + +- seen_transceivers = g_list_prepend (seen_transceivers, rtp_trans); +- + if (!rtp_trans) { +- trans = _create_webrtc_transceiver (webrtc, answer_dir, i); ++ GstCaps *trans_caps; ++ GstWebRTCKind kind = GST_WEBRTC_KIND_UNKNOWN; ++ ++ if (g_strcmp0 (gst_sdp_media_get_media (offer_media), "audio") == 0) ++ kind = GST_WEBRTC_KIND_AUDIO; ++ else if (g_strcmp0 (gst_sdp_media_get_media (offer_media), ++ "video") == 0) ++ kind = GST_WEBRTC_KIND_VIDEO; ++ else ++ GST_LOG_OBJECT (webrtc, "Unknown media kind %s", ++ GST_STR_NULL (gst_sdp_media_get_media (offer_media))); ++ ++ trans = _create_webrtc_transceiver (webrtc, answer_dir, i, kind, NULL); + rtp_trans = GST_WEBRTC_RTP_TRANSCEIVER (trans); + + GST_LOG_OBJECT (webrtc, "Created new transceiver %" GST_PTR_FORMAT +- " for mline %u", trans, i); ++ " for mline %u with media kind %d", trans, i, kind); ++ ++ trans_caps = _find_codec_preferences (webrtc, rtp_trans, i, error); ++ if (*error) { ++ gst_caps_unref (offer_caps); ++ goto rejected; ++ } ++ ++ GST_TRACE_OBJECT (webrtc, "trying to compare %" GST_PTR_FORMAT ++ " and %" GST_PTR_FORMAT, offer_caps, trans_caps); ++ ++ /* FIXME: technically this is a little overreaching as some fields we ++ * we can deal with not having and/or we may have unrecognized fields ++ * that we cannot actually support */ ++ if (trans_caps) { ++ answer_caps = gst_caps_intersect (offer_caps, trans_caps); ++ gst_clear_caps (&trans_caps); ++ } else { ++ answer_caps = gst_caps_ref (offer_caps); ++ } + } else { + trans = WEBRTC_TRANSCEIVER (rtp_trans); + } + +- if (!trans->do_nack) { +- answer_caps = gst_caps_make_writable (answer_caps); +- for (k = 0; k < gst_caps_get_size (answer_caps); k++) { +- GstStructure *s = gst_caps_get_structure (answer_caps, k); ++ seen_transceivers = g_list_prepend (seen_transceivers, rtp_trans); ++ ++ if (gst_caps_is_empty (answer_caps)) { ++ GST_WARNING_OBJECT (webrtc, "Could not create caps for media"); ++ gst_clear_caps (&answer_caps); ++ gst_clear_caps (&offer_caps); ++ goto rejected; ++ } ++ ++ if (!_update_transceiver_kind_from_caps (rtp_trans, answer_caps)) { ++ GstWebRTCKind caps_kind = webrtc_kind_from_caps (answer_caps); ++ ++ GST_WARNING_OBJECT (webrtc, ++ "Trying to change kind of transceiver %" GST_PTR_FORMAT ++ " at m-line %d from %s (%d) to %s (%d)", trans, rtp_trans->mline, ++ gst_webrtc_kind_to_string (rtp_trans->kind), rtp_trans->kind, ++ gst_webrtc_kind_to_string (caps_kind), caps_kind); ++ } ++ ++ answer_caps = gst_caps_make_writable (answer_caps); ++ for (k = 0; k < gst_caps_get_size (answer_caps); k++) { ++ GstStructure *s = gst_caps_get_structure (answer_caps, k); ++ /* taken from the offer sdp already and already intersected above */ ++ gst_structure_remove_field (s, "a-mid"); ++ if (!trans->do_nack) + gst_structure_remove_fields (s, "rtcp-fb-nack", NULL); +- } + } + +- gst_sdp_media_set_media_from_caps (answer_caps, media); ++ if (gst_sdp_media_set_media_from_caps (answer_caps, media) != GST_SDP_OK) { ++ GST_WARNING_OBJECT (webrtc, ++ "Could not build media from caps %" GST_PTR_FORMAT, answer_caps); ++ gst_clear_caps (&answer_caps); ++ gst_clear_caps (&offer_caps); ++ goto rejected; ++ } + + _get_rtx_target_pt_and_ssrc_from_caps (answer_caps, &target_pt, + &target_ssrc); +@@ -3227,6 +4673,7 @@ _create_answer_task (GstWebRTCBin * webrtc, const GstStructure * options) + if (answer_dir == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE) { + GST_WARNING_OBJECT (webrtc, "Could not intersect offer direction with " + "transceiver direction"); ++ gst_caps_unref (offer_caps); + goto rejected; + } + _media_replace_direction (media, answer_dir); +@@ -3258,10 +4705,16 @@ _create_answer_task (GstWebRTCBin * webrtc, const GstStructure * options) + + if (0) { + rejected: +- GST_INFO_OBJECT (webrtc, "media %u rejected", i); ++ if (error && *error) ++ GST_INFO_OBJECT (webrtc, "media %u rejected: %s", i, (*error)->message); ++ else ++ GST_INFO_OBJECT (webrtc, "media %u rejected", i); + gst_sdp_media_free (media); + gst_sdp_media_copy (offer_media, &media); + gst_sdp_media_set_port_info (media, 0, 0); ++ /* Clear error here as it is not propagated to the caller and the media ++ * is just skipped, i.e. more iterations are going to happen. */ ++ g_clear_error (error); + } + gst_sdp_message_add_media (ret, media); + gst_sdp_media_free (media); +@@ -3308,24 +4761,24 @@ out: + struct create_sdp + { + GstStructure *options; +- GstPromise *promise; + GstWebRTCSDPType type; + }; + +-static void ++static GstStructure * + _create_sdp_task (GstWebRTCBin * webrtc, struct create_sdp *data) + { + GstWebRTCSessionDescription *desc = NULL; + GstSDPMessage *sdp = NULL; + GstStructure *s = NULL; ++ GError *error = NULL; + + GST_INFO_OBJECT (webrtc, "creating %s sdp with options %" GST_PTR_FORMAT, + gst_webrtc_sdp_type_to_string (data->type), data->options); + + if (data->type == GST_WEBRTC_SDP_TYPE_OFFER) +- sdp = _create_offer_task (webrtc, data->options); ++ sdp = _create_offer_task (webrtc, data->options, &error); + else if (data->type == GST_WEBRTC_SDP_TYPE_ANSWER) +- sdp = _create_answer_task (webrtc, data->options); ++ sdp = _create_answer_task (webrtc, data->options, &error); + else { + g_assert_not_reached (); + goto out; +@@ -3336,15 +4789,21 @@ _create_sdp_task (GstWebRTCBin * webrtc, struct create_sdp *data) + s = gst_structure_new ("application/x-gst-promise", + gst_webrtc_sdp_type_to_string (data->type), + GST_TYPE_WEBRTC_SESSION_DESCRIPTION, desc, NULL); ++ } else { ++ g_warn_if_fail (error != NULL); ++ GST_WARNING_OBJECT (webrtc, "returning error: %s", ++ error ? error->message : "Unknown"); ++ s = gst_structure_new ("application/x-gst-promise", ++ "error", G_TYPE_ERROR, error, NULL); ++ g_clear_error (&error); + } + + out: +- PC_UNLOCK (webrtc); +- gst_promise_reply (data->promise, s); +- PC_LOCK (webrtc); + + if (desc) + gst_webrtc_session_description_free (desc); ++ ++ return s; + } + + static void +@@ -3352,7 +4811,6 @@ _free_create_sdp_data (struct create_sdp *data) + { + if (data->options) + gst_structure_free (data->options); +- gst_promise_unref (data->promise); + g_free (data); + } + +@@ -3364,16 +4822,14 @@ gst_webrtc_bin_create_offer (GstWebRTCBin * webrtc, + + if (options) + data->options = gst_structure_copy (options); +- data->promise = gst_promise_ref (promise); + data->type = GST_WEBRTC_SDP_TYPE_OFFER; + + if (!gst_webrtc_bin_enqueue_task (webrtc, (GstWebRTCBinFunc) _create_sdp_task, + data, (GDestroyNotify) _free_create_sdp_data, promise)) { + GError *error = +- g_error_new (GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_CLOSED, ++ g_error_new (GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_INVALID_STATE, + "Could not create offer. webrtcbin is closed"); +- GstStructure *s = +- gst_structure_new ("application/x-gstwebrtcbin-promise-error", ++ GstStructure *s = gst_structure_new ("application/x-gst-promise", + "error", G_TYPE_ERROR, error, NULL); + + gst_promise_reply (promise, s); +@@ -3390,16 +4846,14 @@ gst_webrtc_bin_create_answer (GstWebRTCBin * webrtc, + + if (options) + data->options = gst_structure_copy (options); +- data->promise = gst_promise_ref (promise); + data->type = GST_WEBRTC_SDP_TYPE_ANSWER; + + if (!gst_webrtc_bin_enqueue_task (webrtc, (GstWebRTCBinFunc) _create_sdp_task, + data, (GDestroyNotify) _free_create_sdp_data, promise)) { + GError *error = +- g_error_new (GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_CLOSED, ++ g_error_new (GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_INVALID_STATE, + "Could not create answer. webrtcbin is closed."); +- GstStructure *s = +- gst_structure_new ("application/x-gstwebrtcbin-promise-error", ++ GstStructure *s = gst_structure_new ("application/x-gst-promise", + "error", G_TYPE_ERROR, error, NULL); + + gst_promise_reply (promise, s); +@@ -3410,17 +4864,25 @@ gst_webrtc_bin_create_answer (GstWebRTCBin * webrtc, + + static GstWebRTCBinPad * + _create_pad_for_sdp_media (GstWebRTCBin * webrtc, GstPadDirection direction, +- guint media_idx) ++ GstWebRTCRTPTransceiver * trans, guint serial, char *msid) + { + GstWebRTCBinPad *pad; + gchar *pad_name; + ++ if (direction == GST_PAD_SINK) { ++ if (serial == G_MAXUINT) ++ serial = webrtc->priv->max_sink_pad_serial++; ++ } else { ++ serial = webrtc->priv->src_pad_counter++; ++ } ++ + pad_name = + g_strdup_printf ("%s_%u", direction == GST_PAD_SRC ? "src" : "sink", +- media_idx); +- pad = gst_webrtc_bin_pad_new (pad_name, direction); ++ serial); ++ pad = gst_webrtc_bin_pad_new (pad_name, direction, msid); + g_free (pad_name); +- pad->mlineindex = media_idx; ++ ++ pad->trans = gst_object_ref (trans); + + return pad; + } +@@ -3437,9 +4899,7 @@ _find_transceiver_for_sdp_media (GstWebRTCBin * webrtc, + const GstSDPAttribute *attr = gst_sdp_media_get_attribute (media, i); + + if (g_strcmp0 (attr->key, "mid") == 0) { +- if ((ret = +- _find_transceiver (webrtc, attr->value, +- (FindTransceiverFunc) match_for_mid))) ++ if ((ret = _find_transceiver_for_mid (webrtc, attr->value))) + goto out; + } + } +@@ -3452,81 +4912,345 @@ out: + return ret; + } + ++static GstElement * ++_build_fec_encoder (GstWebRTCBin * webrtc, WebRTCTransceiver * trans) ++{ ++ GstWebRTCRTPTransceiver *rtp_trans = GST_WEBRTC_RTP_TRANSCEIVER (trans); ++ guint ulpfec_pt = 0, red_pt = 0; ++ GstPad *sinkpad, *srcpad, *ghost; ++ GstElement *ret; ++ ++ if (trans->stream) { ++ ulpfec_pt = ++ transport_stream_get_pt (trans->stream, "ULPFEC", rtp_trans->mline); ++ red_pt = transport_stream_get_pt (trans->stream, "RED", rtp_trans->mline); ++ } ++ ++ if (trans->ulpfecenc || trans->redenc) { ++ g_critical ("webrtcbin: duplicate call to create a fec encoder or " ++ "red encoder!"); ++ return NULL; ++ } ++ ++ GST_DEBUG_OBJECT (webrtc, ++ "Creating ULPFEC encoder for mline %u with pt %d", rtp_trans->mline, ++ ulpfec_pt); ++ ++ ret = gst_bin_new (NULL); ++ ++ trans->ulpfecenc = gst_element_factory_make ("rtpulpfecenc", NULL); ++ gst_object_ref_sink (trans->ulpfecenc); ++ if (!gst_bin_add (GST_BIN (ret), trans->ulpfecenc)) ++ g_warn_if_reached (); ++ sinkpad = gst_element_get_static_pad (trans->ulpfecenc, "sink"); ++ ++ g_object_bind_property (rtp_trans, "fec-percentage", trans->ulpfecenc, ++ "percentage", G_BINDING_DEFAULT); ++ ++ trans->redenc = gst_element_factory_make ("rtpredenc", NULL); ++ gst_object_ref_sink (trans->redenc); ++ ++ GST_DEBUG_OBJECT (webrtc, "Creating RED encoder for mline %u with pt %d", ++ rtp_trans->mline, red_pt); ++ ++ gst_bin_add (GST_BIN (ret), trans->redenc); ++ gst_element_link (trans->ulpfecenc, trans->redenc); ++ ++ ghost = gst_ghost_pad_new ("sink", sinkpad); ++ gst_clear_object (&sinkpad); ++ gst_element_add_pad (ret, ghost); ++ ghost = NULL; ++ ++ srcpad = gst_element_get_static_pad (trans->redenc, "src"); ++ ghost = gst_ghost_pad_new ("src", srcpad); ++ gst_clear_object (&srcpad); ++ gst_element_add_pad (ret, ghost); ++ ghost = NULL; ++ ++ return ret; ++} ++ ++static gboolean ++_merge_structure (GQuark field_id, const GValue * value, gpointer user_data) ++{ ++ GstStructure *s = user_data; ++ ++ gst_structure_id_set_value (s, field_id, value); ++ ++ return TRUE; ++} ++ ++#define GST_WEBRTC_PAYLOAD_TYPE "gst.webrtcbin.payload.type" ++ ++static void ++try_match_transceiver_with_fec_decoder (GstWebRTCBin * webrtc, ++ WebRTCTransceiver * trans) ++{ ++ GList *l; ++ ++ for (l = trans->stream->fecdecs; l; l = l->next) { ++ GstElement *fecdec = GST_ELEMENT (l->data); ++ gboolean found_transceiver = FALSE; ++ int original_pt; ++ guint i; ++ ++ original_pt = ++ GPOINTER_TO_INT (g_object_get_data (G_OBJECT (fecdec), ++ GST_WEBRTC_PAYLOAD_TYPE)); ++ if (original_pt <= 0) { ++ GST_WARNING_OBJECT (trans, "failed to match fec decoder with " ++ "transceiver, fec decoder %" GST_PTR_FORMAT " does not contain a " ++ "valid payload type", fecdec); ++ continue; ++ } ++ ++ for (i = 0; i < trans->stream->ptmap->len; i++) { ++ PtMapItem *item = &g_array_index (trans->stream->ptmap, PtMapItem, i); ++ ++ /* FIXME: this only works for a 1-1 original_pt->fec_pt mapping */ ++ if (original_pt == item->pt && item->media_idx != -1 ++ && item->media_idx == trans->parent.mline) { ++ if (trans->ulpfecdec) { ++ GST_FIXME_OBJECT (trans, "cannot"); ++ gst_clear_object (&trans->ulpfecdec); ++ } ++ trans->ulpfecdec = gst_object_ref (fecdec); ++ found_transceiver = TRUE; ++ break; ++ } ++ } ++ ++ if (!found_transceiver) { ++ GST_WARNING_OBJECT (trans, "failed to match fec decoder with " ++ "transceiver"); ++ } ++ } ++} ++ ++static void ++_set_internal_rtpbin_element_props_from_stream (GstWebRTCBin * webrtc, ++ TransportStream * stream) ++{ ++ GstStructure *merged_local_rtx_ssrc_map; ++ GstStructure *pt_map = gst_structure_new_empty ("application/x-rtp-pt-map"); ++ GValue red_pt_array = { 0, }; ++ gint *rtx_pt; ++ gsize rtx_count; ++ gsize i; ++ ++ gst_value_array_init (&red_pt_array, 0); ++ ++ rtx_pt = transport_stream_get_all_pt (stream, "RTX", &rtx_count); ++ GST_DEBUG_OBJECT (stream, "have %" G_GSIZE_FORMAT " rtx payloads", rtx_count); ++ ++ for (i = 0; i < rtx_count; i++) { ++ GstCaps *rtx_caps = transport_stream_get_caps_for_pt (stream, rtx_pt[i]); ++ const GstStructure *s = gst_caps_get_structure (rtx_caps, 0); ++ const gchar *apt = gst_structure_get_string (s, "apt"); ++ ++ GST_LOG_OBJECT (stream, "setting rtx mapping: %s -> %u", apt, rtx_pt[i]); ++ gst_structure_set (pt_map, apt, G_TYPE_UINT, rtx_pt[i], NULL); ++ } ++ ++ GST_DEBUG_OBJECT (stream, "setting payload map on %" GST_PTR_FORMAT " : %" ++ GST_PTR_FORMAT " and %" GST_PTR_FORMAT, stream->rtxreceive, ++ stream->rtxsend, pt_map); ++ ++ if (stream->rtxreceive) ++ g_object_set (stream->rtxreceive, "payload-type-map", pt_map, NULL); ++ if (stream->rtxsend) ++ g_object_set (stream->rtxsend, "payload-type-map", pt_map, NULL); ++ ++ gst_structure_free (pt_map); ++ g_clear_pointer (&rtx_pt, g_free); ++ ++ merged_local_rtx_ssrc_map = ++ gst_structure_new_empty ("application/x-rtp-ssrc-map"); ++ ++ for (i = 0; i < webrtc->priv->transceivers->len; i++) { ++ GstWebRTCRTPTransceiver *rtp_trans = ++ g_ptr_array_index (webrtc->priv->transceivers, i); ++ WebRTCTransceiver *trans = WEBRTC_TRANSCEIVER (rtp_trans); ++ ++ if (trans->stream == stream) { ++ gint ulpfec_pt, red_pt = 0; ++ ++ ulpfec_pt = transport_stream_get_pt (stream, "ULPFEC", rtp_trans->mline); ++ if (ulpfec_pt <= 0) ++ ulpfec_pt = 0; ++ ++ red_pt = transport_stream_get_pt (stream, "RED", rtp_trans->mline); ++ if (red_pt <= 0) { ++ red_pt = -1; ++ } else { ++ GValue ptval = { 0, }; ++ ++ g_value_init (&ptval, G_TYPE_INT); ++ g_value_set_int (&ptval, red_pt); ++ gst_value_array_append_value (&red_pt_array, &ptval); ++ g_value_unset (&ptval); ++ } ++ ++ GST_DEBUG_OBJECT (webrtc, "stream %" GST_PTR_FORMAT " transceiver %" ++ GST_PTR_FORMAT " has FEC payload %d and RED payload %d", stream, ++ trans, ulpfec_pt, red_pt); ++ ++ if (trans->ulpfecenc) { ++ guint ulpfecenc_pt = ulpfec_pt; ++ ++ if (ulpfecenc_pt == 0) ++ ulpfecenc_pt = 255; ++ ++ g_object_set (trans->ulpfecenc, "pt", ulpfecenc_pt, "multipacket", ++ rtp_trans->kind == GST_WEBRTC_KIND_VIDEO, "percentage", ++ trans->fec_percentage, NULL); ++ } ++ ++ try_match_transceiver_with_fec_decoder (webrtc, trans); ++ if (trans->ulpfecdec) { ++ g_object_set (trans->ulpfecdec, "passthrough", ulpfec_pt == 0, "pt", ++ ulpfec_pt, NULL); ++ } ++ ++ if (trans->redenc) { ++ gboolean always_produce = TRUE; ++ if (red_pt == -1) { ++ /* passthrough settings */ ++ red_pt = 0; ++ always_produce = FALSE; ++ } ++ g_object_set (trans->redenc, "pt", red_pt, "allow-no-red-blocks", ++ always_produce, NULL); ++ } ++ ++ if (trans->local_rtx_ssrc_map) { ++ gst_structure_foreach (trans->local_rtx_ssrc_map, ++ _merge_structure, merged_local_rtx_ssrc_map); ++ } ++ } ++ } ++ ++ if (stream->rtxsend) ++ g_object_set (stream->rtxsend, "ssrc-map", merged_local_rtx_ssrc_map, NULL); ++ gst_clear_structure (&merged_local_rtx_ssrc_map); ++ ++ if (stream->reddec) { ++ g_object_set_property (G_OBJECT (stream->reddec), "payloads", ++ &red_pt_array); ++ } ++ ++ g_value_unset (&red_pt_array); ++} ++ + static GstPad * + _connect_input_stream (GstWebRTCBin * webrtc, GstWebRTCBinPad * pad) + { + /* + * Not-bundle case: + * +- * ,-------------------------webrtcbin-------------------------, +- * ; ; +- * ; ,-------rtpbin-------, ,--transport_send_%u--, ; +- * ; ; send_rtp_src_%u o---o rtp_sink ; ; +- * ; ; ; ; ; ; +- * ; ; send_rtcp_src_%u o---o rtcp_sink ; ; +- * ; sink_%u ; ; '---------------------' ; +- * o----------o send_rtp_sink_%u ; ; +- * ; '--------------------' ; +- * '--------------------- -------------------------------------' ++ * ,--------------------------------------------webrtcbin--------------------------------------------, ++ * ; ; ++ * ; ,-------rtpbin-------, ,--transport_send_%u--, ; ++ * ; ; send_rtp_src_%u o---o rtp_sink ; ; ++ * ; ,---clocksync---, ; ; ; ; ; ++ * ; ; ; ; send_rtcp_src_%u o---o rtcp_sink ; ; ++ * ; sink_%u ; ; ,---fec encoder---, ; ; '---------------------' ; ++ * o---------o sink src o-o sink src o--o send_rtp_sink_%u ; ; ++ * ; '---------------' ,-----------------, '--------------------' ; ++ * '-------------------------------------------------------------------------------------------------' + */ + + /* + * Bundle case: +- * ,--------------------------------webrtcbin--------------------------------, +- * ; ; +- * ; ,-------rtpbin-------, ,--transport_send_%u--, ; +- * ; ; send_rtp_src_%u o---o rtp_sink ; ; +- * ; ; ; ; ; ; +- * ; ; send_rtcp_src_%u o---o rtcp_sink ; ; +- * ; sink_%u ,---funnel---, ; ; '---------------------' ; +- * o---------o sink_%u ; ; ; ; +- * ; sink_%u ; src o-o send_rtp_sink_%u ; ; +- * o---------o sink_%u ; ; ; ; +- * ; '------------' '--------------------' ; +- * '-------------------------------------------------------------------------' ++ * ,-----------------------------------------------------webrtcbin---------------------------------------------------, ++ * ; ; ++ * ; ,-------rtpbin-------, ,--transport_send_%u--, ; ++ * ; ; send_rtp_src_%u o---o rtp_sink ; ; ++ * ; ; ; ; ; ; ++ * ; sink_%u ,---clocksync---, ,---fec encoder---, ,---funnel---, ; send_rtcp_src_%u o---o rtcp_sink ; ; ++ * o----------o sink src o-o sink src o--o sink_%u ; ; ; '---------------------' ; ++ * ; '---------------' ,-----------------, ; ; ; ; ; ++ * ; ; src o-o send_rtp_sink_%u ; ; ++ * ; sink_%u ,---clocksync---, ,---fec encoder---, ; ; ; ; ; ++ * o----------o sink src o-o sink src o--o sink%u ; '--------------------' ; ++ * ; '---------------' ,-----------------, '------------' ; ++ * '-----------------------------------------------------------------------------------------------------------------' + */ + GstPadTemplate *rtp_templ; +- GstPad *rtp_sink; ++ GstPad *rtp_sink, *sinkpad, *srcpad; + gchar *pad_name; + WebRTCTransceiver *trans; ++ GstElement *clocksync; ++ GstElement *fec_encoder; + + g_return_val_if_fail (pad->trans != NULL, NULL); + +- GST_INFO_OBJECT (pad, "linking input stream %u", pad->mlineindex); +- + trans = WEBRTC_TRANSCEIVER (pad->trans); + ++ GST_INFO_OBJECT (pad, "linking input stream %u", pad->trans->mline); ++ + g_assert (trans->stream); + ++ clocksync = gst_element_factory_make ("clocksync", NULL); ++ g_object_set (clocksync, "sync", TRUE, NULL); ++ gst_bin_add (GST_BIN (webrtc), clocksync); ++ gst_element_sync_state_with_parent (clocksync); ++ ++ srcpad = gst_element_get_static_pad (clocksync, "src"); ++ ++ fec_encoder = _build_fec_encoder (webrtc, trans); ++ if (!fec_encoder) { ++ g_warn_if_reached (); ++ return NULL; ++ } ++ ++ _set_internal_rtpbin_element_props_from_stream (webrtc, trans->stream); ++ ++ gst_bin_add (GST_BIN (webrtc), fec_encoder); ++ gst_element_sync_state_with_parent (fec_encoder); ++ ++ sinkpad = gst_element_get_static_pad (fec_encoder, "sink"); ++ if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) ++ g_warn_if_reached (); ++ gst_clear_object (&srcpad); ++ gst_clear_object (&sinkpad); ++ sinkpad = gst_element_get_static_pad (clocksync, "sink"); ++ srcpad = gst_element_get_static_pad (fec_encoder, "src"); ++ + if (!webrtc->rtpfunnel) { + rtp_templ = + _find_pad_template (webrtc->rtpbin, GST_PAD_SINK, GST_PAD_REQUEST, + "send_rtp_sink_%u"); + g_assert (rtp_templ); + +- pad_name = g_strdup_printf ("send_rtp_sink_%u", pad->mlineindex); ++ pad_name = g_strdup_printf ("send_rtp_sink_%u", pad->trans->mline); + rtp_sink = + gst_element_request_pad (webrtc->rtpbin, rtp_templ, pad_name, NULL); + g_free (pad_name); +- gst_ghost_pad_set_target (GST_GHOST_PAD (pad), rtp_sink); ++ gst_pad_link (srcpad, rtp_sink); + gst_object_unref (rtp_sink); + +- pad_name = g_strdup_printf ("send_rtp_src_%u", pad->mlineindex); ++ pad_name = g_strdup_printf ("send_rtp_src_%u", pad->trans->mline); + if (!gst_element_link_pads (GST_ELEMENT (webrtc->rtpbin), pad_name, + GST_ELEMENT (trans->stream->send_bin), "rtp_sink")) + g_warn_if_reached (); + g_free (pad_name); + } else { +- gchar *pad_name = g_strdup_printf ("sink_%u", pad->mlineindex); ++ gchar *pad_name = g_strdup_printf ("sink_%u", pad->trans->mline); + GstPad *funnel_sinkpad = +- gst_element_get_request_pad (webrtc->rtpfunnel, pad_name); ++ gst_element_request_pad_simple (webrtc->rtpfunnel, pad_name); + +- gst_ghost_pad_set_target (GST_GHOST_PAD (pad), funnel_sinkpad); ++ gst_pad_link (srcpad, funnel_sinkpad); + + g_free (pad_name); + gst_object_unref (funnel_sinkpad); + } + ++ gst_ghost_pad_set_target (GST_GHOST_PAD (pad), sinkpad); ++ ++ gst_clear_object (&srcpad); ++ gst_clear_object (&sinkpad); ++ + gst_element_sync_state_with_parent (GST_ELEMENT (trans->stream->send_bin)); + + return GST_PAD (pad); +@@ -3631,7 +5355,7 @@ _add_ice_candidates_from_sdp (GstWebRTCBin * webrtc, gint mlineindex, + if (stream == NULL) + stream = _find_ice_stream_for_session (webrtc, mlineindex); + if (stream == NULL) { +- GST_WARNING_OBJECT (webrtc, ++ GST_DEBUG_OBJECT (webrtc, + "Unknown mline %u, dropping ICE candidates from SDP", mlineindex); + return; + } +@@ -3681,37 +5405,106 @@ _filter_sdp_fields (GQuark field_id, const GValue * value, + return TRUE; + } + +-static void +-_set_rtx_ptmap_from_stream (GstWebRTCBin * webrtc, TransportStream * stream) ++static guint ++transport_stream_ptmap_get_rtp_header_extension_id (TransportStream * stream, ++ const char *rtphdrext_uri) + { +- gint *rtx_pt; +- gsize rtx_count; ++ guint i; + +- rtx_pt = transport_stream_get_all_pt (stream, "RTX", &rtx_count); +- GST_LOG_OBJECT (stream, "have %" G_GSIZE_FORMAT " rtx payloads", rtx_count); +- if (rtx_pt) { +- GstStructure *pt_map = gst_structure_new_empty ("application/x-rtp-pt-map"); +- gsize i; ++ for (i = 0; i < stream->ptmap->len; i++) { ++ PtMapItem *item = &g_array_index (stream->ptmap, PtMapItem, i); ++ guint id; ++ ++ id = caps_get_rtp_header_extension_id (item->caps, rtphdrext_uri); ++ if (id != -1) ++ return id; ++ } ++ ++ return -1; ++} + +- for (i = 0; i < rtx_count; i++) { +- GstCaps *rtx_caps = transport_stream_get_caps_for_pt (stream, rtx_pt[i]); +- const GstStructure *s = gst_caps_get_structure (rtx_caps, 0); +- const gchar *apt = gst_structure_get_string (s, "apt"); ++static void ++ensure_rtx_hdr_ext (TransportStream * stream) ++{ ++ stream->rtphdrext_id_stream_id = ++ transport_stream_ptmap_get_rtp_header_extension_id (stream, ++ RTPHDREXT_STREAM_ID); ++ stream->rtphdrext_id_repaired_stream_id = ++ transport_stream_ptmap_get_rtp_header_extension_id (stream, ++ RTPHDREXT_REPAIRED_STREAM_ID); ++ ++ /* TODO: removing header extensions usage from rtx on renegotiation */ ++ ++ if (stream->rtxsend) { ++ if (stream->rtphdrext_id_stream_id != -1 && !stream->rtxsend_stream_id) { ++ stream->rtxsend_stream_id = ++ gst_rtp_header_extension_create_from_uri (RTPHDREXT_STREAM_ID); ++ if (!stream->rtxsend_stream_id) ++ g_warn_if_reached (); ++ gst_rtp_header_extension_set_id (stream->rtxsend_stream_id, ++ stream->rtphdrext_id_stream_id); ++ ++ GST_DEBUG_OBJECT (stream, "adding rtp header extension %" GST_PTR_FORMAT ++ " with id %u to %" GST_PTR_FORMAT, stream->rtxsend_stream_id, ++ stream->rtphdrext_id_stream_id, stream->rtxsend); ++ ++ g_signal_emit_by_name (stream->rtxsend, "add-extension", ++ stream->rtxsend_stream_id); ++ } + +- GST_LOG_OBJECT (stream, "setting rtx mapping: %s -> %u", apt, rtx_pt[i]); +- gst_structure_set (pt_map, apt, G_TYPE_UINT, rtx_pt[i], NULL); ++ if (stream->rtphdrext_id_repaired_stream_id != -1 ++ && !stream->rtxsend_repaired_stream_id) { ++ stream->rtxsend_repaired_stream_id = ++ gst_rtp_header_extension_create_from_uri ++ (RTPHDREXT_REPAIRED_STREAM_ID); ++ if (!stream->rtxsend_repaired_stream_id) ++ g_warn_if_reached (); ++ gst_rtp_header_extension_set_id (stream->rtxsend_repaired_stream_id, ++ stream->rtphdrext_id_repaired_stream_id); ++ ++ GST_DEBUG_OBJECT (stream, "adding rtp header extension %" GST_PTR_FORMAT ++ " with id %u to %" GST_PTR_FORMAT, stream->rtxsend_repaired_stream_id, ++ stream->rtphdrext_id_repaired_stream_id, stream->rtxsend); ++ ++ g_signal_emit_by_name (stream->rtxsend, "add-extension", ++ stream->rtxsend_repaired_stream_id); + } ++ } + +- GST_DEBUG_OBJECT (stream, "setting payload map on %" GST_PTR_FORMAT " : %" +- GST_PTR_FORMAT " and %" GST_PTR_FORMAT, stream->rtxreceive, +- stream->rtxsend, pt_map); ++ if (stream->rtxreceive) { ++ if (stream->rtphdrext_id_stream_id != -1 && !stream->rtxreceive_stream_id) { ++ stream->rtxreceive_stream_id = ++ gst_rtp_header_extension_create_from_uri (RTPHDREXT_STREAM_ID); ++ if (!stream->rtxreceive_stream_id) ++ g_warn_if_reached (); ++ gst_rtp_header_extension_set_id (stream->rtxreceive_stream_id, ++ stream->rtphdrext_id_stream_id); + +- if (stream->rtxreceive) +- g_object_set (stream->rtxreceive, "payload-type-map", pt_map, NULL); +- if (stream->rtxsend) +- g_object_set (stream->rtxsend, "payload-type-map", pt_map, NULL); ++ GST_DEBUG_OBJECT (stream, "adding rtp header extension %" GST_PTR_FORMAT ++ " with id %u to %" GST_PTR_FORMAT, stream->rtxsend_stream_id, ++ stream->rtphdrext_id_stream_id, stream->rtxreceive); ++ ++ g_signal_emit_by_name (stream->rtxreceive, "add-extension", ++ stream->rtxreceive_stream_id); ++ } + +- gst_structure_free (pt_map); ++ if (stream->rtphdrext_id_repaired_stream_id != -1 ++ && !stream->rtxreceive_repaired_stream_id) { ++ stream->rtxreceive_repaired_stream_id = ++ gst_rtp_header_extension_create_from_uri ++ (RTPHDREXT_REPAIRED_STREAM_ID); ++ if (!stream->rtxreceive_repaired_stream_id) ++ g_warn_if_reached (); ++ gst_rtp_header_extension_set_id (stream->rtxreceive_repaired_stream_id, ++ stream->rtphdrext_id_repaired_stream_id); ++ ++ GST_DEBUG_OBJECT (stream, "adding rtp header extension %" GST_PTR_FORMAT ++ " with id %u to %" GST_PTR_FORMAT, stream->rtxsend_repaired_stream_id, ++ stream->rtphdrext_id_repaired_stream_id, stream->rtxreceive); ++ ++ g_signal_emit_by_name (stream->rtxreceive, "add-extension", ++ stream->rtxreceive_repaired_stream_id); ++ } + } + } + +@@ -3775,6 +5568,7 @@ _update_transport_ptmap_from_media (GstWebRTCBin * webrtc, + } + + item.pt = pt; ++ item.media_idx = media_idx; + gst_caps_unref (outcaps); + + g_array_append_val (stream->ptmap, item); +@@ -3788,19 +5582,42 @@ static void + _update_transceiver_from_sdp_media (GstWebRTCBin * webrtc, + const GstSDPMessage * sdp, guint media_idx, + TransportStream * stream, GstWebRTCRTPTransceiver * rtp_trans, +- GStrv bundled, guint bundle_idx) ++ GStrv bundled, guint bundle_idx, GError ** error) + { + WebRTCTransceiver *trans = WEBRTC_TRANSCEIVER (rtp_trans); + GstWebRTCRTPTransceiverDirection prev_dir = rtp_trans->current_direction; + GstWebRTCRTPTransceiverDirection new_dir; ++ const GstSDPMedia *local_media, *remote_media; + const GstSDPMedia *media = gst_sdp_message_get_media (sdp, media_idx); + GstWebRTCDTLSSetup new_setup; +- gboolean new_rtcp_mux, new_rtcp_rsize; ++ char *local_msid = NULL; ++ gboolean new_rtcp_rsize; + ReceiveState receive_state = RECEIVE_STATE_UNSET; + int i; + ++ local_media = ++ gst_sdp_message_get_media (webrtc->current_local_description->sdp, ++ media_idx); ++ remote_media = ++ gst_sdp_message_get_media (webrtc->current_remote_description->sdp, ++ media_idx); ++ + rtp_trans->mline = media_idx; + ++ if (!g_strcmp0 (gst_sdp_media_get_media (media), "audio")) { ++ if (rtp_trans->kind == GST_WEBRTC_KIND_VIDEO) ++ GST_FIXME_OBJECT (webrtc, "Updating video transceiver %" GST_PTR_FORMAT ++ " to audio, which isn't fully supported.", rtp_trans); ++ rtp_trans->kind = GST_WEBRTC_KIND_AUDIO; ++ } ++ ++ if (!g_strcmp0 (gst_sdp_media_get_media (media), "video")) { ++ if (rtp_trans->kind == GST_WEBRTC_KIND_AUDIO) ++ GST_FIXME_OBJECT (webrtc, "Updating audio transceiver %" GST_PTR_FORMAT ++ " to video, which isn't fully supported.", rtp_trans); ++ rtp_trans->kind = GST_WEBRTC_KIND_VIDEO; ++ } ++ + for (i = 0; i < gst_sdp_media_attributes_len (media); i++) { + const GstSDPAttribute *attr = gst_sdp_media_get_attribute (media, i); + +@@ -3811,40 +5628,38 @@ _update_transceiver_from_sdp_media (GstWebRTCBin * webrtc, + } + + { +- const GstSDPMedia *local_media, *remote_media; + GstWebRTCRTPTransceiverDirection local_dir, remote_dir; + GstWebRTCDTLSSetup local_setup, remote_setup; + +- local_media = +- gst_sdp_message_get_media (webrtc->current_local_description->sdp, +- media_idx); +- remote_media = +- gst_sdp_message_get_media (webrtc->current_remote_description->sdp, +- media_idx); +- + local_setup = _get_dtls_setup_from_media (local_media); + remote_setup = _get_dtls_setup_from_media (remote_media); + new_setup = _get_final_setup (local_setup, remote_setup); +- if (new_setup == GST_WEBRTC_DTLS_SETUP_NONE) ++ if (new_setup == GST_WEBRTC_DTLS_SETUP_NONE) { ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, ++ "Cannot intersect direction attributes for media %u", media_idx); + return; ++ } + + local_dir = _get_direction_from_media (local_media); + remote_dir = _get_direction_from_media (remote_media); + new_dir = _get_final_direction (local_dir, remote_dir); +- +- if (new_dir == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE) ++ if (new_dir == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE) { ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, ++ "Cannot intersect dtls setup attributes for media %u", media_idx); + return; ++ } + + if (prev_dir != GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE + && new_dir != GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_INACTIVE + && prev_dir != new_dir) { +- GST_FIXME_OBJECT (webrtc, "implement transceiver direction changes"); ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "transceiver direction changes are not implemented. Media %u", ++ media_idx); + return; + } + + if (!bundled || bundle_idx == media_idx) { +- new_rtcp_mux = _media_has_attribute_key (local_media, "rtcp-mux") +- && _media_has_attribute_key (remote_media, "rtcp-mux"); + new_rtcp_rsize = _media_has_attribute_key (local_media, "rtcp-rsize") + && _media_has_attribute_key (remote_media, "rtcp-rsize"); + +@@ -3857,8 +5672,6 @@ _update_transceiver_from_sdp_media (GstWebRTCBin * webrtc, + g_object_unref (session); + } + } +- +- g_object_set (stream, "rtcp-mux", new_rtcp_mux, NULL); + } + } + +@@ -3879,22 +5692,12 @@ _update_transceiver_from_sdp_media (GstWebRTCBin * webrtc, + } + + if (new_dir != prev_dir) { +- gchar *prev_dir_s, *new_dir_s; +- +- prev_dir_s = +- _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION, +- prev_dir); +- new_dir_s = +- _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION, +- new_dir); ++ guint rtp_session_id = bundled ? bundle_idx : media_idx; + + GST_DEBUG_OBJECT (webrtc, "transceiver %" GST_PTR_FORMAT +- " direction change from %s to %s", rtp_trans, prev_dir_s, new_dir_s); +- +- g_free (prev_dir_s); +- prev_dir_s = NULL; +- g_free (new_dir_s); +- new_dir_s = NULL; ++ " direction change from %s to %s", rtp_trans, ++ gst_webrtc_rtp_transceiver_direction_to_string (prev_dir), ++ gst_webrtc_rtp_transceiver_direction_to_string (new_dir)); + + if (new_dir == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_INACTIVE) { + GstWebRTCBinPad *pad; +@@ -3919,18 +5722,31 @@ _update_transceiver_from_sdp_media (GstWebRTCBin * webrtc, + if (new_dir == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY || + new_dir == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV) { + GstWebRTCBinPad *pad = +- _find_pad_for_mline (webrtc, GST_PAD_SINK, media_idx); ++ _find_pad_for_transceiver (webrtc, GST_PAD_SINK, rtp_trans); ++ local_msid = _get_msid_from_media (local_media); ++ + if (pad) { + GST_DEBUG_OBJECT (webrtc, "found existing send pad %" GST_PTR_FORMAT +- " for transceiver %" GST_PTR_FORMAT, pad, trans); +- g_assert (pad->trans == rtp_trans); +- g_assert (pad->mlineindex == media_idx); ++ " for transceiver %" GST_PTR_FORMAT " with msid \'%s\'", pad, trans, ++ pad->msid); ++ if (g_strcmp0 (pad->msid, local_msid) != 0) { ++ GST_DEBUG_OBJECT (webrtc, "send pad %" GST_PTR_FORMAT ++ " transceiver %" GST_PTR_FORMAT " changing msid from \'%s\'" ++ " to \'%s\'", pad, trans, pad->msid, local_msid); ++ g_clear_pointer (&pad->msid, g_free); ++ pad->msid = local_msid; ++ g_object_notify (G_OBJECT (pad), "msid"); ++ local_msid = NULL; ++ } else { ++ g_clear_pointer (&local_msid, g_free); ++ } + gst_object_unref (pad); + } else { + GST_DEBUG_OBJECT (webrtc, + "creating new send pad for transceiver %" GST_PTR_FORMAT, trans); +- pad = _create_pad_for_sdp_media (webrtc, GST_PAD_SINK, media_idx); +- pad->trans = gst_object_ref (rtp_trans); ++ pad = _create_pad_for_sdp_media (webrtc, GST_PAD_SINK, rtp_trans, ++ G_MAXUINT, local_msid); ++ local_msid = NULL; + _connect_input_stream (webrtc, pad); + _add_pad (webrtc, pad); + } +@@ -3938,36 +5754,46 @@ _update_transceiver_from_sdp_media (GstWebRTCBin * webrtc, + if (new_dir == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY || + new_dir == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV) { + GstWebRTCBinPad *pad = +- _find_pad_for_mline (webrtc, GST_PAD_SRC, media_idx); ++ _find_pad_for_transceiver (webrtc, GST_PAD_SRC, rtp_trans); ++ char *remote_msid = _get_msid_from_media (remote_media); ++ + if (pad) { + GST_DEBUG_OBJECT (webrtc, "found existing receive pad %" GST_PTR_FORMAT +- " for transceiver %" GST_PTR_FORMAT, pad, trans); +- g_assert (pad->trans == rtp_trans); +- g_assert (pad->mlineindex == media_idx); ++ " for transceiver %" GST_PTR_FORMAT " with msid \'%s\'", pad, trans, ++ pad->msid); ++ if (g_strcmp0 (pad->msid, remote_msid) != 0) { ++ GST_DEBUG_OBJECT (webrtc, "receive pad %" GST_PTR_FORMAT ++ " transceiver %" GST_PTR_FORMAT " changing msid from \'%s\'" ++ " to \'%s\'", pad, trans, pad->msid, remote_msid); ++ g_clear_pointer (&pad->msid, g_free); ++ pad->msid = remote_msid; ++ remote_msid = NULL; ++ g_object_notify (G_OBJECT (pad), "msid"); ++ } else { ++ g_clear_pointer (&remote_msid, g_free); ++ } + gst_object_unref (pad); + } else { + GST_DEBUG_OBJECT (webrtc, + "creating new receive pad for transceiver %" GST_PTR_FORMAT, trans); +- pad = _create_pad_for_sdp_media (webrtc, GST_PAD_SRC, media_idx); +- pad->trans = gst_object_ref (rtp_trans); ++ pad = _create_pad_for_sdp_media (webrtc, GST_PAD_SRC, rtp_trans, ++ G_MAXUINT, remote_msid); ++ remote_msid = NULL; + + if (!trans->stream) { + TransportStream *item; + + item = +- _get_or_create_transport_stream (webrtc, +- bundled ? bundle_idx : media_idx, FALSE); ++ _get_or_create_transport_stream (webrtc, rtp_session_id, FALSE); + webrtc_transceiver_set_transport (trans, item); + } + +- _connect_output_stream (webrtc, trans->stream, +- bundled ? bundle_idx : media_idx); ++ _connect_output_stream (webrtc, trans->stream, rtp_session_id); + /* delay adding the pad until rtpbin creates the recv output pad + * to ghost to so queries/events travel through the pipeline correctly + * as soon as the pad is added */ + _add_pad_to_list (webrtc, pad); + } +- + } + + rtp_trans->mline = media_idx; +@@ -3976,7 +5802,7 @@ _update_transceiver_from_sdp_media (GstWebRTCBin * webrtc, + + if (!bundled || bundle_idx == media_idx) { + if (stream->rtxsend || stream->rtxreceive) { +- _set_rtx_ptmap_from_stream (webrtc, stream); ++ _set_internal_rtpbin_element_props_from_stream (webrtc, stream); + } + + g_object_set (stream, "dtls-client", +@@ -4023,7 +5849,7 @@ _generate_data_channel_id (GstWebRTCBin * webrtc) + } + + /* client must generate even ids, server must generate odd ids */ +- if (new_id % 2 == ! !is_client) ++ if (new_id % 2 == !(!is_client)) + continue; + + channel = _find_data_channel_for_id (webrtc, new_id); +@@ -4036,7 +5862,8 @@ _generate_data_channel_id (GstWebRTCBin * webrtc) + + static void + _update_data_channel_from_sdp_media (GstWebRTCBin * webrtc, +- const GstSDPMessage * sdp, guint media_idx, TransportStream * stream) ++ const GstSDPMessage * sdp, guint media_idx, TransportStream * stream, ++ GError ** error) + { + const GstSDPMedia *local_media, *remote_media; + GstWebRTCDTLSSetup local_setup, remote_setup, new_setup; +@@ -4055,18 +5882,25 @@ _update_data_channel_from_sdp_media (GstWebRTCBin * webrtc, + local_setup = _get_dtls_setup_from_media (local_media); + remote_setup = _get_dtls_setup_from_media (remote_media); + new_setup = _get_final_setup (local_setup, remote_setup); +- if (new_setup == GST_WEBRTC_DTLS_SETUP_NONE) ++ if (new_setup == GST_WEBRTC_DTLS_SETUP_NONE) { ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, ++ "Cannot intersect dtls setup for media %u", media_idx); + return; ++ } + + /* data channel is always rtcp-muxed to avoid generating ICE candidates + * for RTCP */ +- g_object_set (stream, "rtcp-mux", TRUE, "dtls-client", ++ g_object_set (stream, "dtls-client", + new_setup == GST_WEBRTC_DTLS_SETUP_ACTIVE, NULL); + + local_port = _get_sctp_port_from_media (local_media); + remote_port = _get_sctp_port_from_media (local_media); +- if (local_port == -1 || remote_port == -1) ++ if (local_port == -1 || remote_port == -1) { ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, ++ "Could not find sctp port for media %u (local %i, remote %i)", ++ media_idx, local_port, remote_port); + return; ++ } + + if (0 == (local_max_size = + _get_sctp_max_message_size_from_media (local_media))) +@@ -4096,6 +5930,7 @@ _update_data_channel_from_sdp_media (GstWebRTCBin * webrtc, + remote_port, NULL); + } + ++ DC_LOCK (webrtc); + for (i = 0; i < webrtc->priv->data_channels->len; i++) { + WebRTCDataChannel *channel; + +@@ -4113,6 +5948,7 @@ _update_data_channel_from_sdp_media (GstWebRTCBin * webrtc, + webrtc_data_channel_start_negotiation (channel); + } + } ++ DC_UNLOCK (webrtc); + + stream->active = TRUE; + +@@ -4124,12 +5960,16 @@ static gboolean + _find_compatible_unassociated_transceiver (GstWebRTCRTPTransceiver * p1, + gconstpointer data) + { ++ GstWebRTCKind kind = GPOINTER_TO_INT (data); ++ + if (p1->mid) + return FALSE; + if (p1->mline != -1) + return FALSE; + if (p1->stopped) + return FALSE; ++ if (p1->kind != GST_WEBRTC_KIND_UNKNOWN && p1->kind != kind) ++ return FALSE; + + return TRUE; + } +@@ -4138,10 +5978,9 @@ static void + _connect_rtpfunnel (GstWebRTCBin * webrtc, guint session_id) + { + gchar *pad_name; +- GstPad *queue_srcpad; ++ GstPad *srcpad; + GstPad *rtp_sink; + TransportStream *stream = _find_transport_for_session (webrtc, session_id); +- GstElement *queue; + + g_assert (stream); + +@@ -4152,19 +5991,14 @@ _connect_rtpfunnel (GstWebRTCBin * webrtc, guint session_id) + gst_bin_add (GST_BIN (webrtc), webrtc->rtpfunnel); + gst_element_sync_state_with_parent (webrtc->rtpfunnel); + +- queue = gst_element_factory_make ("queue", NULL); +- gst_bin_add (GST_BIN (webrtc), queue); +- gst_element_sync_state_with_parent (queue); +- +- gst_element_link (webrtc->rtpfunnel, queue); +- +- queue_srcpad = gst_element_get_static_pad (queue, "src"); ++ srcpad = gst_element_get_static_pad (webrtc->rtpfunnel, "src"); + + pad_name = g_strdup_printf ("send_rtp_sink_%d", session_id); +- rtp_sink = gst_element_get_request_pad (webrtc->rtpbin, pad_name); ++ rtp_sink = gst_element_request_pad_simple (webrtc->rtpbin, pad_name); + g_free (pad_name); +- gst_pad_link (queue_srcpad, rtp_sink); +- gst_object_unref (queue_srcpad); ++ ++ gst_pad_link (srcpad, rtp_sink); ++ gst_object_unref (srcpad); + gst_object_unref (rtp_sink); + + pad_name = g_strdup_printf ("send_rtp_src_%d", session_id); +@@ -4179,7 +6013,7 @@ done: + + static gboolean + _update_transceivers_from_sdp (GstWebRTCBin * webrtc, SDPSource source, +- GstWebRTCSessionDescription * sdp) ++ GstWebRTCSessionDescription * sdp, GError ** error) + { + int i; + gboolean ret = FALSE; +@@ -4190,14 +6024,14 @@ _update_transceivers_from_sdp (GstWebRTCBin * webrtc, SDPSource source, + /* FIXME: With some peers, it's possible we could have + * multiple bundles to deal with, although I've never seen one yet */ + if (webrtc->bundle_policy != GST_WEBRTC_BUNDLE_POLICY_NONE) +- if (!_parse_bundle (sdp->sdp, &bundled)) ++ if (!_parse_bundle (sdp->sdp, &bundled, error)) + goto done; + + if (bundled) { + + if (!_get_bundle_index (sdp->sdp, bundled, &bundle_idx)) { +- GST_ERROR_OBJECT (webrtc, "Bundle tag is %s but no media found matching", +- bundled[0]); ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, ++ "Bundle tag is %s but no media found matching", bundled[0]); + goto done; + } + +@@ -4214,6 +6048,7 @@ _update_transceivers_from_sdp (GstWebRTCBin * webrtc, SDPSource source, + * parameters aren't set up properly for the bundled streams */ + _update_transport_ptmap_from_media (webrtc, bundle_stream, sdp->sdp, i); + } ++ ensure_rtx_hdr_ext (bundle_stream); + + _connect_rtpfunnel (webrtc, bundle_idx); + } +@@ -4242,20 +6077,32 @@ _update_transceivers_from_sdp (GstWebRTCBin * webrtc, SDPSource source, + * bundling we need to do it now */ + g_array_set_size (stream->ptmap, 0); + _update_transport_ptmap_from_media (webrtc, stream, sdp->sdp, i); ++ ensure_rtx_hdr_ext (stream); + } + + if (trans) + webrtc_transceiver_set_transport ((WebRTCTransceiver *) trans, stream); + + if (source == SDP_LOCAL && sdp->type == GST_WEBRTC_SDP_TYPE_OFFER && !trans) { +- GST_ERROR ("State mismatch. Could not find local transceiver by mline."); ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, ++ "State mismatch. Could not find local transceiver by mline %u", i); + goto done; + } else { + if (g_strcmp0 (gst_sdp_media_get_media (media), "audio") == 0 || + g_strcmp0 (gst_sdp_media_get_media (media), "video") == 0) { ++ GstWebRTCKind kind = GST_WEBRTC_KIND_UNKNOWN; ++ + /* No existing transceiver, find an unused one */ + if (!trans) { +- trans = _find_transceiver (webrtc, NULL, ++ if (g_strcmp0 (gst_sdp_media_get_media (media), "audio") == 0) ++ kind = GST_WEBRTC_KIND_AUDIO; ++ else if (g_strcmp0 (gst_sdp_media_get_media (media), "video") == 0) ++ kind = GST_WEBRTC_KIND_VIDEO; ++ else ++ GST_LOG_OBJECT (webrtc, "Unknown media kind %s", ++ GST_STR_NULL (gst_sdp_media_get_media (media))); ++ ++ trans = _find_transceiver (webrtc, GINT_TO_POINTER (kind), + (FindTransceiverFunc) _find_compatible_unassociated_transceiver); + } + +@@ -4265,15 +6112,21 @@ _update_transceivers_from_sdp (GstWebRTCBin * webrtc, SDPSource source, + * that calls to setDirection will change the value. Nothing about + * a default value when the transceiver is created internally */ + if (!trans) { +- trans = +- GST_WEBRTC_RTP_TRANSCEIVER (_create_webrtc_transceiver (webrtc, +- _get_direction_from_media (media), i)); ++ WebRTCTransceiver *t = _create_webrtc_transceiver (webrtc, ++ _get_direction_from_media (media), i, kind, NULL); ++ webrtc_transceiver_set_transport (t, stream); ++ trans = GST_WEBRTC_RTP_TRANSCEIVER (t); + } + + _update_transceiver_from_sdp_media (webrtc, sdp->sdp, i, stream, +- trans, bundled, bundle_idx); ++ trans, bundled, bundle_idx, error); ++ if (error && *error) ++ goto done; + } else if (_message_media_is_datachannel (sdp->sdp, i)) { +- _update_data_channel_from_sdp_media (webrtc, sdp->sdp, i, stream); ++ _update_data_channel_from_sdp_media (webrtc, sdp->sdp, i, stream, ++ error); ++ if (error && *error) ++ goto done; + } else { + GST_ERROR_OBJECT (webrtc, "Unknown media type in SDP at index %u", i); + } +@@ -4297,15 +6150,138 @@ done: + return ret; + } + ++static gint ++transceivers_media_num_cmp (GstWebRTCBin * webrtc, ++ GstWebRTCSessionDescription * previous, GstWebRTCSessionDescription * new) ++{ ++ if (!previous) ++ return 0; ++ ++ return gst_sdp_message_medias_len (new->sdp) - ++ gst_sdp_message_medias_len (previous->sdp); ++ ++} ++ ++static gboolean ++check_locked_mlines (GstWebRTCBin * webrtc, GstWebRTCSessionDescription * sdp, ++ GError ** error) ++{ ++ guint i; ++ ++ for (i = 0; i < gst_sdp_message_medias_len (sdp->sdp); i++) { ++ const GstSDPMedia *media = gst_sdp_message_get_media (sdp->sdp, i); ++ GstWebRTCRTPTransceiver *rtp_trans; ++ WebRTCTransceiver *trans; ++ ++ rtp_trans = _find_transceiver_for_sdp_media (webrtc, sdp->sdp, i); ++ /* only look for matching mid */ ++ if (rtp_trans == NULL) ++ continue; ++ ++ trans = WEBRTC_TRANSCEIVER (rtp_trans); ++ ++ /* We only validate the locked mlines for now */ ++ if (!trans->mline_locked) ++ continue; ++ ++ if (rtp_trans->mline != i) { ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "m-line with mid %s is at position %d, but was locked to %d, " ++ "rejecting", rtp_trans->mid, i, rtp_trans->mline); ++ return FALSE; ++ } ++ ++ if (rtp_trans->kind != GST_WEBRTC_KIND_UNKNOWN) { ++ if (!g_strcmp0 (gst_sdp_media_get_media (media), "audio") && ++ rtp_trans->kind != GST_WEBRTC_KIND_AUDIO) { ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "m-line %d with transceiver <%s> was locked to %s, but SDP has " ++ "%s media", i, GST_OBJECT_NAME (rtp_trans), ++ gst_webrtc_kind_to_string (rtp_trans->kind), ++ gst_sdp_media_get_media (media)); ++ return FALSE; ++ } ++ ++ if (!g_strcmp0 (gst_sdp_media_get_media (media), "video") && ++ rtp_trans->kind != GST_WEBRTC_KIND_VIDEO) { ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "m-line %d with transceiver <%s> was locked to %s, but SDP has " ++ "%s media", i, GST_OBJECT_NAME (rtp_trans), ++ gst_webrtc_kind_to_string (rtp_trans->kind), ++ gst_sdp_media_get_media (media)); ++ return FALSE; ++ } ++ } ++ } ++ ++ return TRUE; ++} ++ ++ + struct set_description + { +- GstPromise *promise; + SDPSource source; + GstWebRTCSessionDescription *sdp; + }; + ++static GstWebRTCSessionDescription * ++get_previous_description (GstWebRTCBin * webrtc, SDPSource source, ++ GstWebRTCSDPType type) ++{ ++ switch (type) { ++ case GST_WEBRTC_SDP_TYPE_OFFER: ++ case GST_WEBRTC_SDP_TYPE_PRANSWER: ++ case GST_WEBRTC_SDP_TYPE_ANSWER: ++ if (source == SDP_LOCAL) { ++ return webrtc->current_local_description; ++ } else { ++ return webrtc->current_remote_description; ++ } ++ case GST_WEBRTC_SDP_TYPE_ROLLBACK: ++ return NULL; ++ default: ++ /* other values mean memory corruption/uninitialized! */ ++ g_assert_not_reached (); ++ break; ++ } ++ ++ return NULL; ++} ++ ++static GstWebRTCSessionDescription * ++get_last_generated_description (GstWebRTCBin * webrtc, SDPSource source, ++ GstWebRTCSDPType type) ++{ ++ switch (type) { ++ case GST_WEBRTC_SDP_TYPE_OFFER: ++ if (source == SDP_REMOTE) ++ return webrtc->priv->last_generated_answer; ++ else ++ return webrtc->priv->last_generated_offer; ++ break; ++ case GST_WEBRTC_SDP_TYPE_PRANSWER: ++ case GST_WEBRTC_SDP_TYPE_ANSWER: ++ if (source == SDP_LOCAL) ++ return webrtc->priv->last_generated_answer; ++ else ++ return webrtc->priv->last_generated_offer; ++ case GST_WEBRTC_SDP_TYPE_ROLLBACK: ++ return NULL; ++ default: ++ /* other values mean memory corruption/uninitialized! */ ++ g_assert_not_reached (); ++ break; ++ } ++ ++ return NULL; ++} ++ ++ + /* http://w3c.github.io/webrtc-pc/#set-description */ +-static void ++static GstStructure * + _set_description_task (GstWebRTCBin * webrtc, struct set_description *sd) + { + GstWebRTCSignalingState new_signaling_state = webrtc->signaling_state; +@@ -4316,42 +6292,56 @@ _set_description_task (GstWebRTCBin * webrtc, struct set_description *sd) + guint i; + + { +- gchar *state = _enum_value_to_string (GST_TYPE_WEBRTC_SIGNALING_STATE, ++ const gchar *state = _enum_value_to_string (GST_TYPE_WEBRTC_SIGNALING_STATE, + webrtc->signaling_state); +- gchar *type_str = ++ const gchar *type_str = + _enum_value_to_string (GST_TYPE_WEBRTC_SDP_TYPE, sd->sdp->type); + gchar *sdp_text = gst_sdp_message_as_text (sd->sdp->sdp); + GST_INFO_OBJECT (webrtc, "Attempting to set %s %s in the %s state", + _sdp_source_to_string (sd->source), type_str, state); + GST_TRACE_OBJECT (webrtc, "SDP contents\n%s", sdp_text); + g_free (sdp_text); +- g_free (state); +- g_free (type_str); +- } +- +- if (!validate_sdp (webrtc->signaling_state, sd->source, sd->sdp, &error)) { +- GST_ERROR_OBJECT (webrtc, "%s", error->message); +- g_clear_error (&error); +- goto out; + } + +- if (webrtc->priv->is_closed) { +- GST_WARNING_OBJECT (webrtc, "we are closed"); ++ if (!validate_sdp (webrtc->signaling_state, sd->source, sd->sdp, &error)) + goto out; +- } + + if (webrtc->bundle_policy != GST_WEBRTC_BUNDLE_POLICY_NONE) +- if (!_parse_bundle (sd->sdp->sdp, &bundled)) ++ if (!_parse_bundle (sd->sdp->sdp, &bundled, &error)) + goto out; + + if (bundled) { + if (!_get_bundle_index (sd->sdp->sdp, bundled, &bundle_idx)) { +- GST_ERROR_OBJECT (webrtc, "Bundle tag is %s but no media found matching", +- bundled[0]); ++ g_set_error (&error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, ++ "Bundle tag is %s but no matching media found", bundled[0]); + goto out; + } + } + ++ if (transceivers_media_num_cmp (webrtc, ++ get_previous_description (webrtc, sd->source, sd->sdp->type), ++ sd->sdp) < 0) { ++ g_set_error_literal (&error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, ++ "m=lines removed from the SDP. Processing a completely new connection " ++ "is not currently supported."); ++ goto out; ++ } ++ ++ if ((sd->sdp->type == GST_WEBRTC_SDP_TYPE_PRANSWER || ++ sd->sdp->type == GST_WEBRTC_SDP_TYPE_ANSWER) && ++ transceivers_media_num_cmp (webrtc, ++ get_last_generated_description (webrtc, sd->source, sd->sdp->type), ++ sd->sdp) != 0) { ++ g_set_error_literal (&error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, ++ "Answer doesn't have the same number of m-lines as the offer."); ++ goto out; ++ } ++ ++ if (!check_locked_mlines (webrtc, sd->sdp, &error)) ++ goto out; ++ + switch (sd->sdp->type) { + case GST_WEBRTC_SDP_TYPE_OFFER:{ + if (sd->source == SDP_LOCAL) { +@@ -4497,7 +6487,8 @@ _set_description_task (GstWebRTCBin * webrtc, struct set_description *sd) + GList *tmp; + + /* media modifications */ +- _update_transceivers_from_sdp (webrtc, sd->source, sd->sdp); ++ if (!_update_transceivers_from_sdp (webrtc, sd->source, sd->sdp, &error)) ++ goto out; + + for (tmp = webrtc->priv->pending_sink_transceivers; tmp;) { + GstWebRTCBinPad *pad = GST_WEBRTC_BIN_PAD (tmp->data); +@@ -4511,13 +6502,19 @@ _set_description_task (GstWebRTCBin * webrtc, struct set_description *sd) + continue; + } + +- if (pad->mlineindex >= gst_sdp_message_medias_len (sd->sdp->sdp)) { ++ if (!pad->trans) { ++ GST_LOG_OBJECT (pad, "doesn't have a transceiver"); ++ tmp = tmp->next; ++ continue; ++ } ++ ++ if (pad->trans->mline >= gst_sdp_message_medias_len (sd->sdp->sdp)) { + GST_DEBUG_OBJECT (pad, "not mentioned in this description. Skipping"); + tmp = tmp->next; + continue; + } + +- media = gst_sdp_message_get_media (sd->sdp->sdp, pad->mlineindex); ++ media = gst_sdp_message_get_media (sd->sdp->sdp, pad->trans->mline); + /* skip rejected media */ + if (gst_sdp_media_get_port (media) == 0) { + /* FIXME: arrange for an appropriate flow return */ +@@ -4527,12 +6524,6 @@ _set_description_task (GstWebRTCBin * webrtc, struct set_description *sd) + continue; + } + +- if (!pad->trans) { +- GST_LOG_OBJECT (pad, "doesn't have a transceiver"); +- tmp = tmp->next; +- continue; +- } +- + new_dir = pad->trans->direction; + if (new_dir != GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY && + new_dir != GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV) { +@@ -4560,10 +6551,11 @@ _set_description_task (GstWebRTCBin * webrtc, struct set_description *sd) + const GstSDPMedia *media = gst_sdp_message_get_media (sd->sdp->sdp, i); + gchar *ufrag, *pwd; + TransportStream *item; ++ guint rtp_session_id = bundled ? bundle_idx : i; + + item = +- _get_or_create_transport_stream (webrtc, bundled ? bundle_idx : i, +- _message_media_is_datachannel (sd->sdp->sdp, bundled ? bundle_idx : i)); ++ _get_or_create_transport_stream (webrtc, rtp_session_id, ++ _message_media_is_datachannel (sd->sdp->sdp, rtp_session_id)); + + if (sd->source == SDP_REMOTE) { + guint j; +@@ -4577,11 +6569,11 @@ _set_description_task (GstWebRTCBin * webrtc, struct set_description *sd) + + if (split[0] && sscanf (split[0], "%u", &ssrc) && split[1] + && g_str_has_prefix (split[1], "cname:")) { +- SsrcMapItem ssrc_item; +- +- ssrc_item.media_idx = i; +- ssrc_item.ssrc = ssrc; +- g_array_append_val (item->remote_ssrcmap, ssrc_item); ++ if (!find_mid_ssrc_for_ssrc (webrtc, ++ GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY, ++ rtp_session_id, ssrc)) ++ transport_stream_add_ssrc_map_item (item, ++ GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY, ssrc, i); + } + g_strfreev (split); + } +@@ -4645,18 +6637,15 @@ _set_description_task (GstWebRTCBin * webrtc, struct set_description *sd) + * signalingstatechange at connection. + */ + if (signalling_state_changed) { +- gchar *from = _enum_value_to_string (GST_TYPE_WEBRTC_SIGNALING_STATE, ++ const gchar *from = _enum_value_to_string (GST_TYPE_WEBRTC_SIGNALING_STATE, + webrtc->signaling_state); +- gchar *to = _enum_value_to_string (GST_TYPE_WEBRTC_SIGNALING_STATE, ++ const gchar *to = _enum_value_to_string (GST_TYPE_WEBRTC_SIGNALING_STATE, + new_signaling_state); + GST_TRACE_OBJECT (webrtc, "notify signaling-state from %s " + "to %s", from, to); + PC_UNLOCK (webrtc); + g_object_notify (G_OBJECT (webrtc), "signaling-state"); + PC_LOCK (webrtc); +- +- g_free (from); +- g_free (to); + } + + if (webrtc->signaling_state == GST_WEBRTC_SIGNALING_STATE_STABLE) { +@@ -4676,16 +6665,20 @@ _set_description_task (GstWebRTCBin * webrtc, struct set_description *sd) + out: + g_strfreev (bundled); + +- PC_UNLOCK (webrtc); +- gst_promise_reply (sd->promise, NULL); +- PC_LOCK (webrtc); ++ if (error) { ++ GstStructure *s = gst_structure_new ("application/x-gst-promise", ++ "error", G_TYPE_ERROR, error, NULL); ++ GST_WARNING_OBJECT (webrtc, "returning error: %s", error->message); ++ g_clear_error (&error); ++ return s; ++ } else { ++ return NULL; ++ } + } + + static void + _free_set_description_data (struct set_description *sd) + { +- if (sd->promise) +- gst_promise_unref (sd->promise); + if (sd->sdp) + gst_webrtc_session_description_free (sd->sdp); + g_free (sd); +@@ -4703,8 +6696,6 @@ gst_webrtc_bin_set_remote_description (GstWebRTCBin * webrtc, + goto bad_input; + + sd = g_new0 (struct set_description, 1); +- if (promise != NULL) +- sd->promise = gst_promise_ref (promise); + sd->source = SDP_REMOTE; + sd->sdp = gst_webrtc_session_description_copy (remote_sdp); + +@@ -4712,10 +6703,9 @@ gst_webrtc_bin_set_remote_description (GstWebRTCBin * webrtc, + (GstWebRTCBinFunc) _set_description_task, sd, + (GDestroyNotify) _free_set_description_data, promise)) { + GError *error = +- g_error_new (GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_CLOSED, ++ g_error_new (GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_INVALID_STATE, + "Could not set remote description. webrtcbin is closed."); +- GstStructure *s = +- gst_structure_new ("application/x-gstwebrtcbin-promise-error", ++ GstStructure *s = gst_structure_new ("application/x-gst-promise", + "error", G_TYPE_ERROR, error, NULL); + + gst_promise_reply (promise, s); +@@ -4744,8 +6734,6 @@ gst_webrtc_bin_set_local_description (GstWebRTCBin * webrtc, + goto bad_input; + + sd = g_new0 (struct set_description, 1); +- if (promise != NULL) +- sd->promise = gst_promise_ref (promise); + sd->source = SDP_LOCAL; + sd->sdp = gst_webrtc_session_description_copy (local_sdp); + +@@ -4753,10 +6741,9 @@ gst_webrtc_bin_set_local_description (GstWebRTCBin * webrtc, + (GstWebRTCBinFunc) _set_description_task, sd, + (GDestroyNotify) _free_set_description_data, promise)) { + GError *error = +- g_error_new (GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_CLOSED, ++ g_error_new (GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_INVALID_STATE, + "Could not set local description. webrtcbin is closed"); +- GstStructure *s = +- gst_structure_new ("application/x-gstwebrtcbin-promise-error", ++ GstStructure *s = gst_structure_new ("application/x-gst-promise", + "error", G_TYPE_ERROR, error, NULL); + + gst_promise_reply (promise, s); +@@ -4773,7 +6760,7 @@ bad_input: + } + } + +-static void ++static GstStructure * + _add_ice_candidate_task (GstWebRTCBin * webrtc, IceCandidateItem * item) + { + if (!webrtc->current_local_description || !webrtc->current_remote_description) { +@@ -4787,6 +6774,8 @@ _add_ice_candidate_task (GstWebRTCBin * webrtc, IceCandidateItem * item) + } else { + _add_ice_candidate (webrtc, item, FALSE); + } ++ ++ return NULL; + } + + static void +@@ -4804,16 +6793,18 @@ gst_webrtc_bin_add_ice_candidate (GstWebRTCBin * webrtc, guint mline, + + item = g_new0 (IceCandidateItem, 1); + item->mlineindex = mline; +- if (!g_ascii_strncasecmp (attr, "a=candidate:", 12)) +- item->candidate = g_strdup (attr); +- else if (!g_ascii_strncasecmp (attr, "candidate:", 10)) +- item->candidate = g_strdup_printf ("a=%s", attr); ++ if (attr && attr[0] != 0) { ++ if (!g_ascii_strncasecmp (attr, "a=candidate:", 12)) ++ item->candidate = g_strdup (attr); ++ else if (!g_ascii_strncasecmp (attr, "candidate:", 10)) ++ item->candidate = g_strdup_printf ("a=%s", attr); ++ } + gst_webrtc_bin_enqueue_task (webrtc, + (GstWebRTCBinFunc) _add_ice_candidate_task, item, + (GDestroyNotify) _free_ice_candidate_item, NULL); + } + +-static void ++static GstStructure * + _on_local_ice_candidate_task (GstWebRTCBin * webrtc) + { + gsize i; +@@ -4823,7 +6814,7 @@ _on_local_ice_candidate_task (GstWebRTCBin * webrtc) + if (webrtc->priv->pending_local_ice_candidates->len == 0) { + ICE_UNLOCK (webrtc); + GST_LOG_OBJECT (webrtc, "No ICE candidates to process right now"); +- return; /* Nothing to process */ ++ return NULL; /* Nothing to process */ + } + /* Take the array so we can process it all and free it later + * without holding the lock +@@ -4870,6 +6861,8 @@ _on_local_ice_candidate_task (GstWebRTCBin * webrtc) + + } + g_array_free (items, TRUE); ++ ++ return NULL; + } + + static void +@@ -4898,16 +6891,6 @@ _on_local_ice_candidate_cb (GstWebRTCICE * ice, guint session_id, + } + } + +-/* https://www.w3.org/TR/webrtc/#dfn-stats-selection-algorithm */ +-static GstStructure * +-_get_stats_from_selector (GstWebRTCBin * webrtc, gpointer selector) +-{ +- if (selector) +- GST_FIXME_OBJECT (webrtc, "Implement stats selection"); +- +- return gst_structure_copy (webrtc->priv->stats); +-} +- + struct get_stats + { + GstPad *pad; +@@ -4925,28 +6908,14 @@ _free_get_stats (struct get_stats *stats) + } + + /* https://www.w3.org/TR/webrtc/#dom-rtcpeerconnection-getstats() */ +-static void ++static GstStructure * + _get_stats_task (GstWebRTCBin * webrtc, struct get_stats *stats) + { +- GstStructure *s; +- gpointer selector = NULL; +- +- gst_webrtc_bin_update_stats (webrtc); +- +- if (stats->pad) { +- GstWebRTCBinPad *wpad = GST_WEBRTC_BIN_PAD (stats->pad); +- +- if (wpad->trans) { +- if (GST_PAD_DIRECTION (wpad) == GST_PAD_SRC) { +- selector = wpad->trans->receiver; +- } else { +- selector = wpad->trans->sender; +- } +- } +- } ++ /* Our selector is the pad, ++ * https://www.w3.org/TR/webrtc/#dfn-stats-selection-algorithm ++ */ + +- s = _get_stats_from_selector (webrtc, selector); +- gst_promise_reply (stats->promise, s); ++ return gst_webrtc_bin_create_stats (webrtc, stats->pad); + } + + static void +@@ -4967,9 +6936,9 @@ gst_webrtc_bin_get_stats (GstWebRTCBin * webrtc, GstPad * pad, + if (!gst_webrtc_bin_enqueue_task (webrtc, (GstWebRTCBinFunc) _get_stats_task, + stats, (GDestroyNotify) _free_get_stats, promise)) { + GError *error = +- g_error_new (GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_CLOSED, ++ g_error_new (GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_INVALID_STATE, + "Could not retrieve statistics. webrtcbin is closed."); +- GstStructure *s = gst_structure_new ("application/x-gst-promise-error", ++ GstStructure *s = gst_structure_new ("application/x-gst-promise", + "error", G_TYPE_ERROR, error, NULL); + + gst_promise_reply (promise, s); +@@ -4983,18 +6952,19 @@ gst_webrtc_bin_add_transceiver (GstWebRTCBin * webrtc, + GstWebRTCRTPTransceiverDirection direction, GstCaps * caps) + { + WebRTCTransceiver *trans; +- GstWebRTCRTPTransceiver *rtp_trans; + + g_return_val_if_fail (direction != GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE, + NULL); + +- trans = _create_webrtc_transceiver (webrtc, direction, -1); ++ PC_LOCK (webrtc); ++ ++ trans = ++ _create_webrtc_transceiver (webrtc, direction, -1, ++ webrtc_kind_from_caps (caps), caps); + GST_LOG_OBJECT (webrtc, + "Created new unassociated transceiver %" GST_PTR_FORMAT, trans); + +- rtp_trans = GST_WEBRTC_RTP_TRANSCEIVER (trans); +- if (caps) +- rtp_trans->codec_preferences = gst_caps_ref (caps); ++ PC_UNLOCK (webrtc); + + return gst_object_ref (trans); + } +@@ -5011,6 +6981,8 @@ gst_webrtc_bin_get_transceivers (GstWebRTCBin * webrtc) + GArray *arr = g_array_new (FALSE, TRUE, sizeof (GstWebRTCRTPTransceiver *)); + int i; + ++ PC_LOCK (webrtc); ++ + g_array_set_clear_func (arr, (GDestroyNotify) _deref_and_unref); + + for (i = 0; i < webrtc->priv->transceivers->len; i++) { +@@ -5019,6 +6991,7 @@ gst_webrtc_bin_get_transceivers (GstWebRTCBin * webrtc) + gst_object_ref (trans); + g_array_append_val (arr, trans); + } ++ PC_UNLOCK (webrtc); + + return arr; + } +@@ -5028,6 +7001,8 @@ gst_webrtc_bin_get_transceiver (GstWebRTCBin * webrtc, guint idx) + { + GstWebRTCRTPTransceiver *trans = NULL; + ++ PC_LOCK (webrtc); ++ + if (idx >= webrtc->priv->transceivers->len) { + GST_ERROR_OBJECT (webrtc, "No transceiver for idx %d", idx); + goto done; +@@ -5037,18 +7012,25 @@ gst_webrtc_bin_get_transceiver (GstWebRTCBin * webrtc, guint idx) + gst_object_ref (trans); + + done: ++ PC_UNLOCK (webrtc); + return trans; + } + + static gboolean + gst_webrtc_bin_add_turn_server (GstWebRTCBin * webrtc, const gchar * uri) + { ++ gboolean ret; ++ + g_return_val_if_fail (GST_IS_WEBRTC_BIN (webrtc), FALSE); + g_return_val_if_fail (uri != NULL, FALSE); + + GST_DEBUG_OBJECT (webrtc, "Adding turn server: %s", uri); + +- return gst_webrtc_ice_add_turn_server (webrtc->priv->ice, uri); ++ PC_LOCK (webrtc); ++ ret = gst_webrtc_ice_add_turn_server (webrtc->priv->ice, uri); ++ PC_UNLOCK (webrtc); ++ ++ return ret; + } + + static gboolean +@@ -5121,7 +7103,7 @@ gst_webrtc_bin_create_data_channel (GstWebRTCBin * webrtc, const gchar * label, + if (webrtc->priv->sctp_transport) { + /* Let transport be the connection's [[SctpTransport]] slot. + * +- * If the [[DataChannelId]] slot is not null, transport is in ++ * If the [[DataChannelId]] slot is not null, transport is in + * connected state and [[DataChannelId]] is greater or equal to the + * transport's [[MaxChannels]] slot, throw an OperationError. + */ +@@ -5136,6 +7118,7 @@ gst_webrtc_bin_create_data_channel (GstWebRTCBin * webrtc, const gchar * label, + return NULL; + + PC_LOCK (webrtc); ++ DC_LOCK (webrtc); + /* check if the id has been used already */ + if (id != -1) { + WebRTCDataChannel *channel = _find_data_channel_for_id (webrtc, id); +@@ -5143,6 +7126,7 @@ gst_webrtc_bin_create_data_channel (GstWebRTCBin * webrtc, const gchar * label, + GST_ELEMENT_WARNING (webrtc, LIBRARY, SETTINGS, + ("Attempting to add a data channel with a duplicate ID: %i", id), + NULL); ++ DC_UNLOCK (webrtc); + PC_UNLOCK (webrtc); + return NULL; + } +@@ -5155,6 +7139,7 @@ gst_webrtc_bin_create_data_channel (GstWebRTCBin * webrtc, const gchar * label, + if (id == -1) { + GST_ELEMENT_WARNING (webrtc, RESOURCE, NOT_FOUND, + ("%s", "Failed to generate an identifier for a data channel"), NULL); ++ DC_UNLOCK (webrtc); + PC_UNLOCK (webrtc); + return NULL; + } +@@ -5165,24 +7150,34 @@ gst_webrtc_bin_create_data_channel (GstWebRTCBin * webrtc, const gchar * label, + "max-retransmits", max_retransmits, "protocol", protocol, + "negotiated", negotiated, "id", id, "priority", priority, NULL); + +- if (ret) { +- gst_bin_add (GST_BIN (webrtc), ret->appsrc); +- gst_bin_add (GST_BIN (webrtc), ret->appsink); +- +- gst_element_sync_state_with_parent (ret->appsrc); +- gst_element_sync_state_with_parent (ret->appsink); +- +- ret = gst_object_ref (ret); +- ret->webrtcbin = webrtc; +- g_ptr_array_add (webrtc->priv->data_channels, ret); +- webrtc_data_channel_link_to_sctp (ret, webrtc->priv->sctp_transport); +- if (webrtc->priv->sctp_transport && +- webrtc->priv->sctp_transport->association_established +- && !ret->parent.negotiated) { +- webrtc_data_channel_start_negotiation (ret); +- } else { +- _update_need_negotiation (webrtc); +- } ++ if (!ret) { ++ DC_UNLOCK (webrtc); ++ PC_UNLOCK (webrtc); ++ return ret; ++ } ++ ++ g_signal_emit (webrtc, gst_webrtc_bin_signals[PREPARE_DATA_CHANNEL_SIGNAL], 0, ++ ret, TRUE); ++ ++ gst_bin_add (GST_BIN (webrtc), ret->src_bin); ++ gst_bin_add (GST_BIN (webrtc), ret->sink_bin); ++ ++ gst_element_sync_state_with_parent (ret->src_bin); ++ gst_element_sync_state_with_parent (ret->sink_bin); ++ ++ ret = gst_object_ref (ret); ++ ret->webrtcbin = webrtc; ++ g_ptr_array_add (webrtc->priv->data_channels, ret); ++ DC_UNLOCK (webrtc); ++ ++ gst_webrtc_bin_update_sctp_priority (webrtc); ++ webrtc_data_channel_link_to_sctp (ret, webrtc->priv->sctp_transport); ++ if (webrtc->priv->sctp_transport && ++ webrtc->priv->sctp_transport->association_established ++ && !ret->parent.negotiated) { ++ webrtc_data_channel_start_negotiation (ret); ++ } else { ++ _update_need_negotiation (webrtc); + } + + PC_UNLOCK (webrtc); +@@ -5201,13 +7196,12 @@ on_rtpbin_pad_added (GstElement * rtpbin, GstPad * new_pad, + GST_TRACE_OBJECT (webrtc, "new rtpbin pad %s", new_pad_name); + if (g_str_has_prefix (new_pad_name, "recv_rtp_src_")) { + guint32 session_id = 0, ssrc = 0, pt = 0; +- GstWebRTCRTPTransceiver *rtp_trans; ++ SsrcMapItem *mid_entry; ++ GstWebRTCRTPTransceiver *rtp_trans = NULL; + WebRTCTransceiver *trans; + TransportStream *stream; + GstWebRTCBinPad *pad; +- guint media_idx = 0; +- gboolean found_ssrc = FALSE; +- guint i; ++ guint media_idx; + + if (sscanf (new_pad_name, "recv_rtp_src_%u_%u_%u", &session_id, &ssrc, + &pt) != 3) { +@@ -5215,45 +7209,70 @@ on_rtpbin_pad_added (GstElement * rtpbin, GstPad * new_pad, + return; + } + ++ media_idx = session_id; ++ ++ PC_LOCK (webrtc); + stream = _find_transport_for_session (webrtc, session_id); + if (!stream) + g_warn_if_reached (); + +- media_idx = session_id; ++ mid_entry = ++ find_mid_ssrc_for_ssrc (webrtc, ++ GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY, session_id, ssrc); + +- for (i = 0; i < stream->remote_ssrcmap->len; i++) { +- SsrcMapItem *item = +- &g_array_index (stream->remote_ssrcmap, SsrcMapItem, i); +- if (item->ssrc == ssrc) { +- media_idx = item->media_idx; +- found_ssrc = TRUE; +- break; ++ if (mid_entry) { ++ if (mid_entry->mid) { ++ /* Can't use the mid_entry if the mid doesn't exist */ ++ rtp_trans = _find_transceiver_for_mid (webrtc, mid_entry->mid); ++ if (rtp_trans) { ++ g_assert_cmpint (rtp_trans->mline, ==, mid_entry->media_idx); ++ } + } +- } + +- if (!found_ssrc) { ++ if (mid_entry->media_idx != -1) ++ media_idx = mid_entry->media_idx; ++ } else { + GST_WARNING_OBJECT (webrtc, "Could not find ssrc %u", ssrc); ++ /* TODO: connect up to fakesink and reconnect later when this information ++ * is known from RTCP SDES or RTP Header extension ++ */ + } + +- rtp_trans = _find_transceiver_for_mline (webrtc, media_idx); ++ if (!rtp_trans) ++ rtp_trans = _find_transceiver_for_mline (webrtc, media_idx); + if (!rtp_trans) + g_warn_if_reached (); + trans = WEBRTC_TRANSCEIVER (rtp_trans); + g_assert (trans->stream == stream); + + pad = _find_pad_for_transceiver (webrtc, GST_PAD_SRC, rtp_trans); +- + GST_TRACE_OBJECT (webrtc, "found pad %" GST_PTR_FORMAT + " for rtpbin pad name %s", pad, new_pad_name); ++ if (!_remove_pending_pad (webrtc, pad)) { ++ /* assumption here is that rtpbin doesn't duplicate pads and that if ++ * there is no pending pad, this is a duplicate stream for e.g. simulcast ++ * or somesuch */ ++ gst_clear_object (&pad); ++ pad = ++ _create_pad_for_sdp_media (webrtc, GST_PAD_SRC, rtp_trans, G_MAXUINT, ++ NULL); ++ GST_TRACE_OBJECT (webrtc, ++ "duplicate output ssrc? created new pad %" GST_PTR_FORMAT " for %" ++ GST_PTR_FORMAT " for rtp pad %s", pad, rtp_trans, new_pad_name); ++ gst_object_ref_sink (pad); ++ } ++ + if (!pad) + g_warn_if_reached (); + gst_ghost_pad_set_target (GST_GHOST_PAD (pad), GST_PAD (new_pad)); + + if (webrtc->priv->running) + gst_pad_set_active (GST_PAD (pad), TRUE); ++ ++ PC_UNLOCK (webrtc); ++ + gst_pad_sticky_events_foreach (new_pad, copy_sticky_events, pad); + gst_element_add_pad (GST_ELEMENT (webrtc), GST_PAD (pad)); +- _remove_pending_pad (webrtc, pad); + + gst_object_unref (pad); + } +@@ -5271,6 +7290,7 @@ on_rtpbin_request_pt_map (GstElement * rtpbin, guint session_id, guint pt, + GST_DEBUG_OBJECT (webrtc, "getting pt map for pt %d in session %d", pt, + session_id); + ++ PC_LOCK (webrtc); + stream = _find_transport_for_session (webrtc, session_id); + if (!stream) + goto unknown_session; +@@ -5278,13 +7298,15 @@ on_rtpbin_request_pt_map (GstElement * rtpbin, guint session_id, guint pt, + if ((ret = transport_stream_get_caps_for_pt (stream, pt))) + gst_caps_ref (ret); + +- GST_TRACE_OBJECT (webrtc, "Found caps %" GST_PTR_FORMAT " for pt %d in " ++ GST_DEBUG_OBJECT (webrtc, "Found caps %" GST_PTR_FORMAT " for pt %d in " + "session %d", ret, pt, session_id); + ++ PC_UNLOCK (webrtc); + return ret; + + unknown_session: + { ++ PC_UNLOCK (webrtc); + GST_DEBUG_OBJECT (webrtc, "unknown session %d", session_id); + return NULL; + } +@@ -5295,62 +7317,99 @@ on_rtpbin_request_aux_sender (GstElement * rtpbin, guint session_id, + GstWebRTCBin * webrtc) + { + TransportStream *stream; +- gboolean have_rtx = FALSE; +- GstStructure *pt_map = NULL; +- GstElement *ret = NULL; +- GstWebRTCRTPTransceiver *trans; ++ GstElement *ret, *rtx; ++ GstPad *pad; ++ char *name; ++ GstElement *aux_sender = NULL; + + stream = _find_transport_for_session (webrtc, session_id); +- trans = _find_transceiver (webrtc, &session_id, +- (FindTransceiverFunc) transceiver_match_for_mline); ++ if (!stream) { ++ /* a rtp session without a stream is a webrtcbin bug */ ++ g_warn_if_reached (); ++ return NULL; ++ } + +- if (stream) +- have_rtx = transport_stream_get_pt (stream, "RTX") != 0; ++ if (stream->rtxsend) { ++ GST_WARNING_OBJECT (webrtc, "rtprtxsend already created! rtpbin bug?!"); ++ g_warn_if_reached (); ++ return NULL; ++ } + +- GST_LOG_OBJECT (webrtc, "requesting aux sender for stream %" GST_PTR_FORMAT +- " with transport %" GST_PTR_FORMAT " and pt map %" GST_PTR_FORMAT, stream, +- trans, pt_map); ++ GST_DEBUG_OBJECT (webrtc, "requesting aux sender for session %u " ++ "stream %" GST_PTR_FORMAT, session_id, stream); + +- if (have_rtx) { +- GstElement *rtx; +- GstPad *pad; +- gchar *name; ++ ret = gst_bin_new (NULL); ++ rtx = gst_element_factory_make ("rtprtxsend", NULL); ++ /* XXX: allow control from outside? */ ++ g_object_set (rtx, "max-size-packets", 500, NULL); + +- if (stream->rtxsend) { +- GST_WARNING_OBJECT (webrtc, "rtprtxsend already created! rtpbin bug?!"); +- goto out; +- } ++ if (!gst_bin_add (GST_BIN (ret), rtx)) ++ g_warn_if_reached (); ++ ensure_rtx_hdr_ext (stream); + +- GST_INFO ("creating AUX sender"); +- ret = gst_bin_new (NULL); +- rtx = gst_element_factory_make ("rtprtxsend", NULL); +- g_object_set (rtx, "max-size-packets", 500, NULL); +- _set_rtx_ptmap_from_stream (webrtc, stream); ++ stream->rtxsend = gst_object_ref (rtx); ++ _set_internal_rtpbin_element_props_from_stream (webrtc, stream); + +- if (WEBRTC_TRANSCEIVER (trans)->local_rtx_ssrc_map) +- g_object_set (rtx, "ssrc-map", +- WEBRTC_TRANSCEIVER (trans)->local_rtx_ssrc_map, NULL); ++ name = g_strdup_printf ("src_%u", session_id); ++ pad = gst_element_get_static_pad (rtx, "src"); + +- gst_bin_add (GST_BIN (ret), rtx); + +- pad = gst_element_get_static_pad (rtx, "src"); +- name = g_strdup_printf ("src_%u", session_id); +- gst_element_add_pad (ret, gst_ghost_pad_new (name, pad)); +- g_free (name); +- gst_object_unref (pad); ++ g_signal_emit (webrtc, gst_webrtc_bin_signals[REQUEST_AUX_SENDER], 0, ++ stream->transport, &aux_sender); ++ if (aux_sender) { ++ GstPadLinkReturn link_res; ++ GstPad *sinkpad = gst_element_get_static_pad (aux_sender, "sink"); ++ GstPad *srcpad = gst_element_get_static_pad (aux_sender, "src"); + +- pad = gst_element_get_static_pad (rtx, "sink"); +- name = g_strdup_printf ("sink_%u", session_id); +- gst_element_add_pad (ret, gst_ghost_pad_new (name, pad)); +- g_free (name); +- gst_object_unref (pad); ++ gst_object_ref_sink (aux_sender); ++ ++ if (!sinkpad || !srcpad) { ++ GST_ERROR_OBJECT (webrtc, ++ "Invalid pads for the aux sender %" GST_PTR_FORMAT ++ ". Skipping it.", aux_sender); ++ goto bwe_done; ++ } ++ ++ if (!gst_bin_add (GST_BIN (ret), aux_sender)) { ++ GST_ERROR_OBJECT (webrtc, ++ "Could not add aux sender %" GST_PTR_FORMAT, aux_sender); ++ goto bwe_done; ++ } ++ ++ link_res = gst_pad_link (pad, sinkpad); ++ if (link_res != GST_PAD_LINK_OK) { ++ GST_ERROR_OBJECT (webrtc, ++ "Could not link aux sender %" GST_PTR_FORMAT " %s", aux_sender, ++ gst_pad_link_get_name (link_res)); ++ goto bwe_done; ++ } ++ ++ gst_clear_object (&pad); ++ pad = gst_object_ref (srcpad); + +- stream->rtxsend = gst_object_ref (rtx); ++ bwe_done: ++ if (pad != srcpad) { ++ /* Failed using the provided aux sender */ ++ if (gst_object_has_as_parent (GST_OBJECT (aux_sender), GST_OBJECT (ret))) { ++ gst_bin_remove (GST_BIN (ret), aux_sender); ++ } ++ } ++ gst_clear_object (&aux_sender); ++ gst_clear_object (&srcpad); ++ gst_clear_object (&sinkpad); + } + +-out: +- if (pt_map) +- gst_structure_free (pt_map); ++ if (!gst_element_add_pad (ret, gst_ghost_pad_new (name, pad))) ++ g_warn_if_reached (); ++ gst_clear_object (&pad); ++ g_clear_pointer (&name, g_free); ++ ++ name = g_strdup_printf ("sink_%u", session_id); ++ pad = gst_element_get_static_pad (rtx, "sink"); ++ if (!gst_element_add_pad (ret, gst_ghost_pad_new (name, pad))) ++ g_warn_if_reached (); ++ gst_clear_object (&pad); ++ g_clear_pointer (&name, g_free); + + return ret; + } +@@ -5359,97 +7418,86 @@ static GstElement * + on_rtpbin_request_aux_receiver (GstElement * rtpbin, guint session_id, + GstWebRTCBin * webrtc) + { +- GstElement *ret = NULL; +- GstElement *prev = NULL; +- GstPad *sinkpad = NULL; + TransportStream *stream; +- gint red_pt = 0; +- gint rtx_pt = 0; ++ GstPad *pad, *ghost; ++ GstElement *ret; ++ char *name; + + stream = _find_transport_for_session (webrtc, session_id); +- +- if (stream) { +- red_pt = transport_stream_get_pt (stream, "RED"); +- rtx_pt = transport_stream_get_pt (stream, "RTX"); ++ if (!stream) { ++ /* no transport stream before the session has been created is a webrtcbin ++ * programming error! */ ++ g_warn_if_reached (); ++ return NULL; + } + +- GST_LOG_OBJECT (webrtc, "requesting aux receiver for stream %" GST_PTR_FORMAT, +- stream); +- +- if (red_pt || rtx_pt) +- ret = gst_bin_new (NULL); +- +- if (rtx_pt) { +- if (stream->rtxreceive) { +- GST_WARNING_OBJECT (webrtc, +- "rtprtxreceive already created! rtpbin bug?!"); +- goto error; +- } +- +- stream->rtxreceive = gst_element_factory_make ("rtprtxreceive", NULL); +- _set_rtx_ptmap_from_stream (webrtc, stream); +- +- gst_bin_add (GST_BIN (ret), stream->rtxreceive); +- +- sinkpad = gst_element_get_static_pad (stream->rtxreceive, "sink"); ++ if (stream->rtxreceive) { ++ GST_WARNING_OBJECT (webrtc, "rtprtxreceive already created! rtpbin bug?!"); ++ g_warn_if_reached (); ++ return NULL; ++ } + +- prev = gst_object_ref (stream->rtxreceive); ++ if (stream->reddec) { ++ GST_WARNING_OBJECT (webrtc, "rtpreddec already created! rtpbin bug?!"); ++ g_warn_if_reached (); ++ return NULL; + } + +- if (red_pt) { +- GstElement *rtpreddec = gst_element_factory_make ("rtpreddec", NULL); ++ GST_DEBUG_OBJECT (webrtc, "requesting aux receiver for session %u " ++ "stream %" GST_PTR_FORMAT, session_id, stream); + +- GST_DEBUG_OBJECT (webrtc, "Creating RED decoder for pt %d in session %u", +- red_pt, session_id); ++ ret = gst_bin_new (NULL); + +- gst_bin_add (GST_BIN (ret), rtpreddec); ++ stream->rtxreceive = gst_element_factory_make ("rtprtxreceive", NULL); ++ gst_object_ref (stream->rtxreceive); ++ if (!gst_bin_add (GST_BIN (ret), stream->rtxreceive)) ++ g_warn_if_reached (); + +- g_object_set (rtpreddec, "pt", red_pt, NULL); ++ ensure_rtx_hdr_ext (stream); + +- if (prev) +- gst_element_link (prev, rtpreddec); +- else +- sinkpad = gst_element_get_static_pad (rtpreddec, "sink"); ++ stream->reddec = gst_element_factory_make ("rtpreddec", NULL); ++ gst_object_ref (stream->reddec); ++ if (!gst_bin_add (GST_BIN (ret), stream->reddec)) ++ g_warn_if_reached (); + +- prev = rtpreddec; +- } ++ _set_internal_rtpbin_element_props_from_stream (webrtc, stream); + +- if (sinkpad) { +- gchar *name = g_strdup_printf ("sink_%u", session_id); +- GstPad *ghost = gst_ghost_pad_new (name, sinkpad); +- g_free (name); +- gst_object_unref (sinkpad); +- gst_element_add_pad (ret, ghost); +- } ++ if (!gst_element_link (stream->rtxreceive, stream->reddec)) ++ g_warn_if_reached (); + +- if (prev) { +- gchar *name = g_strdup_printf ("src_%u", session_id); +- GstPad *srcpad = gst_element_get_static_pad (prev, "src"); +- GstPad *ghost = gst_ghost_pad_new (name, srcpad); +- g_free (name); +- gst_object_unref (srcpad); +- gst_element_add_pad (ret, ghost); +- } ++ name = g_strdup_printf ("sink_%u", session_id); ++ pad = gst_element_get_static_pad (stream->rtxreceive, "sink"); ++ ghost = gst_ghost_pad_new (name, pad); ++ g_clear_pointer (&name, g_free); ++ gst_clear_object (&pad); ++ if (!gst_element_add_pad (ret, ghost)) ++ g_warn_if_reached (); + +-out: +- return ret; ++ name = g_strdup_printf ("src_%u", session_id); ++ pad = gst_element_get_static_pad (stream->reddec, "src"); ++ ghost = gst_ghost_pad_new (name, pad); ++ g_clear_pointer (&name, g_free); ++ gst_clear_object (&pad); ++ if (!gst_element_add_pad (ret, ghost)) ++ g_warn_if_reached (); + +-error: +- if (ret) +- gst_object_unref (ret); +- goto out; ++ return ret; + } + + static GstElement * +-on_rtpbin_request_fec_decoder (GstElement * rtpbin, guint session_id, +- GstWebRTCBin * webrtc) ++on_rtpbin_request_fec_decoder_full (GstElement * rtpbin, guint session_id, ++ guint ssrc, guint pt, GstWebRTCBin * webrtc) + { + TransportStream *stream; + GstElement *ret = NULL; +- gint pt = 0; + GObject *internal_storage; + + stream = _find_transport_for_session (webrtc, session_id); ++ if (!stream) { ++ /* a rtp session without a stream is a webrtcbin bug */ ++ g_warn_if_reached (); ++ return NULL; ++ } + + /* TODO: for now, we only support ulpfec, but once we support + * more algorithms, if the remote may use more than one algorithm, +@@ -5457,105 +7505,25 @@ on_rtpbin_request_fec_decoder (GstElement * rtpbin, guint session_id, + * + * + Return a bin here, with the relevant FEC decoders plugged in + * and their payload type set to 0 +- * + Enable the decoders by setting the payload type only when +- * we detect it (by connecting to ptdemux:new-payload-type for +- * example) + */ +- if (stream) +- pt = transport_stream_get_pt (stream, "ULPFEC"); +- +- if (pt) { +- GST_DEBUG_OBJECT (webrtc, "Creating ULPFEC decoder for pt %d in session %u", +- pt, session_id); +- ret = gst_element_factory_make ("rtpulpfecdec", NULL); +- g_signal_emit_by_name (webrtc->rtpbin, "get-internal-storage", session_id, +- &internal_storage); +- +- g_object_set (ret, "pt", pt, "storage", internal_storage, NULL); +- g_object_unref (internal_storage); +- } +- +- return ret; +-} +- +-static GstElement * +-on_rtpbin_request_fec_encoder (GstElement * rtpbin, guint session_id, +- GstWebRTCBin * webrtc) +-{ +- GstElement *ret = NULL; +- GstElement *prev = NULL; +- TransportStream *stream; +- guint ulpfec_pt = 0; +- guint red_pt = 0; +- GstPad *sinkpad = NULL; +- GstWebRTCRTPTransceiver *trans; +- +- stream = _find_transport_for_session (webrtc, session_id); +- trans = _find_transceiver (webrtc, &session_id, +- (FindTransceiverFunc) transceiver_match_for_mline); +- +- if (stream) { +- ulpfec_pt = transport_stream_get_pt (stream, "ULPFEC"); +- red_pt = transport_stream_get_pt (stream, "RED"); +- } +- +- if (ulpfec_pt || red_pt) +- ret = gst_bin_new (NULL); +- +- if (ulpfec_pt) { +- GstElement *fecenc = gst_element_factory_make ("rtpulpfecenc", NULL); +- GstCaps *caps = transport_stream_get_caps_for_pt (stream, ulpfec_pt); +- +- GST_DEBUG_OBJECT (webrtc, +- "Creating ULPFEC encoder for session %d with pt %d", session_id, +- ulpfec_pt); +- +- gst_bin_add (GST_BIN (ret), fecenc); +- sinkpad = gst_element_get_static_pad (fecenc, "sink"); +- g_object_set (fecenc, "pt", ulpfec_pt, "percentage", +- WEBRTC_TRANSCEIVER (trans)->fec_percentage, NULL); +- +- +- if (caps && !gst_caps_is_empty (caps)) { +- const GstStructure *s = gst_caps_get_structure (caps, 0); +- const gchar *media = gst_structure_get_string (s, "media"); +- +- if (!g_strcmp0 (media, "video")) +- g_object_set (fecenc, "multipacket", TRUE, NULL); +- } +- +- prev = fecenc; +- } +- +- if (red_pt) { +- GstElement *redenc = gst_element_factory_make ("rtpredenc", NULL); ++ GST_DEBUG_OBJECT (webrtc, "Creating ULPFEC decoder for pt %d in session %u " ++ "stream %" GST_PTR_FORMAT, pt, session_id, stream); + +- GST_DEBUG_OBJECT (webrtc, "Creating RED encoder for session %d with pt %d", +- session_id, red_pt); ++ ret = gst_element_factory_make ("rtpulpfecdec", NULL); + +- gst_bin_add (GST_BIN (ret), redenc); +- if (prev) +- gst_element_link (prev, redenc); +- else +- sinkpad = gst_element_get_static_pad (redenc, "sink"); +- +- g_object_set (redenc, "pt", red_pt, "allow-no-red-blocks", TRUE, NULL); ++ g_signal_emit_by_name (webrtc->rtpbin, "get-internal-storage", session_id, ++ &internal_storage); + +- prev = redenc; +- } ++ g_object_set (ret, "storage", internal_storage, NULL); ++ g_clear_object (&internal_storage); + +- if (sinkpad) { +- GstPad *ghost = gst_ghost_pad_new ("sink", sinkpad); +- gst_object_unref (sinkpad); +- gst_element_add_pad (ret, ghost); +- } ++ g_object_set_data (G_OBJECT (ret), GST_WEBRTC_PAYLOAD_TYPE, ++ GINT_TO_POINTER (pt)); + +- if (prev) { +- GstPad *srcpad = gst_element_get_static_pad (prev, "src"); +- GstPad *ghost = gst_ghost_pad_new ("src", srcpad); +- gst_object_unref (srcpad); +- gst_element_add_pad (ret, ghost); +- } ++ PC_LOCK (webrtc); ++ stream->fecdecs = g_list_prepend (stream->fecdecs, gst_object_ref (ret)); ++ _set_internal_rtpbin_element_props_from_stream (webrtc, stream); ++ PC_UNLOCK (webrtc); + + return ret; + } +@@ -5565,6 +7533,10 @@ on_rtpbin_bye_ssrc (GstElement * rtpbin, guint session_id, guint ssrc, + GstWebRTCBin * webrtc) + { + GST_INFO_OBJECT (webrtc, "session %u ssrc %u received bye", session_id, ssrc); ++ ++ PC_LOCK (webrtc); ++ remove_ssrc_entry_by_ssrc (webrtc, session_id, ssrc); ++ PC_UNLOCK (webrtc); + } + + static void +@@ -5572,6 +7544,10 @@ on_rtpbin_bye_timeout (GstElement * rtpbin, guint session_id, guint ssrc, + GstWebRTCBin * webrtc) + { + GST_INFO_OBJECT (webrtc, "session %u ssrc %u bye timeout", session_id, ssrc); ++ ++ PC_LOCK (webrtc); ++ remove_ssrc_entry_by_ssrc (webrtc, session_id, ssrc); ++ PC_UNLOCK (webrtc); + } + + static void +@@ -5580,6 +7556,10 @@ on_rtpbin_sender_timeout (GstElement * rtpbin, guint session_id, guint ssrc, + { + GST_INFO_OBJECT (webrtc, "session %u ssrc %u sender timeout", session_id, + ssrc); ++ ++ PC_LOCK (webrtc); ++ remove_ssrc_entry_by_ssrc (webrtc, session_id, ssrc); ++ PC_UNLOCK (webrtc); + } + + static void +@@ -5587,13 +7567,21 @@ on_rtpbin_new_ssrc (GstElement * rtpbin, guint session_id, guint ssrc, + GstWebRTCBin * webrtc) + { + GST_INFO_OBJECT (webrtc, "session %u ssrc %u new ssrc", session_id, ssrc); ++ ++ if (ssrc == 0) ++ return; ++ ++ PC_LOCK (webrtc); ++ find_or_add_ssrc_map_item (webrtc, ++ GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY, session_id, ssrc, -1); ++ PC_UNLOCK (webrtc); + } + + static void + on_rtpbin_ssrc_active (GstElement * rtpbin, guint session_id, guint ssrc, + GstWebRTCBin * webrtc) + { +- GST_INFO_OBJECT (webrtc, "session %u ssrc %u active", session_id, ssrc); ++ GST_TRACE_OBJECT (webrtc, "session %u ssrc %u active", session_id, ssrc); + } + + static void +@@ -5607,7 +7595,30 @@ static void + on_rtpbin_ssrc_sdes (GstElement * rtpbin, guint session_id, guint ssrc, + GstWebRTCBin * webrtc) + { ++ GObject *session; ++ + GST_INFO_OBJECT (webrtc, "session %u ssrc %u sdes", session_id, ssrc); ++ ++ g_signal_emit_by_name (rtpbin, "get-internal-session", session_id, &session); ++ if (session) { ++ GObject *source; ++ ++ g_signal_emit_by_name (session, "get-source-by-ssrc", ssrc, &source); ++ if (source) { ++ GstStructure *sdes; ++ ++ g_object_get (source, "sdes", &sdes, NULL); ++ ++ /* TODO: when the sdes contains the mid, use that to correlate streams ++ * as necessary */ ++ GST_DEBUG_OBJECT (webrtc, "session %u ssrc %u sdes %" GST_PTR_FORMAT, ++ session_id, ssrc, sdes); ++ ++ gst_clear_structure (&sdes); ++ gst_clear_object (&source); ++ } ++ g_clear_object (&session); ++ } + } + + static void +@@ -5622,40 +7633,144 @@ on_rtpbin_timeout (GstElement * rtpbin, guint session_id, guint ssrc, + GstWebRTCBin * webrtc) + { + GST_INFO_OBJECT (webrtc, "session %u ssrc %u timeout", session_id, ssrc); ++ ++ PC_LOCK (webrtc); ++ remove_ssrc_entry_by_ssrc (webrtc, session_id, ssrc); ++ PC_UNLOCK (webrtc); + } + + static void + on_rtpbin_new_sender_ssrc (GstElement * rtpbin, guint session_id, guint ssrc, + GstWebRTCBin * webrtc) + { ++ SsrcMapItem *mid; ++ + GST_INFO_OBJECT (webrtc, "session %u ssrc %u new sender ssrc", session_id, + ssrc); ++ ++ PC_LOCK (webrtc); ++ mid = find_mid_ssrc_for_ssrc (webrtc, ++ GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY, session_id, ssrc); ++ if (!mid) { ++ TransportStream *stream = _find_transport_for_session (webrtc, session_id); ++ transport_stream_add_ssrc_map_item (stream, ++ GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY, ssrc, -1); ++ } else if (mid->mid) { ++ /* XXX: when peers support the sdes rtcp item, use this to send the mid rtcp ++ * sdes item. Requires being able to set the sdes on the rtpsource. */ ++#if 0 ++ GObject *session; ++ ++ g_signal_emit_by_name (rtpbin, "get-internal-session", session_id, ++ &session, NULL); ++ if (session) { ++ GObject *source; ++ ++ g_signal_emit_by_name (session, "get-source-by-ssrc", ssrc, &source); ++ if (source) { ++ GstStructure *sdes; ++ const char *sdes_field_name; ++ ++ g_object_get (source, "sdes", &sdes, NULL); ++ GST_WARNING_OBJECT (webrtc, "session %u ssrc %u retrieve sdes %" ++ GST_PTR_FORMAT, session_id, ssrc, sdes); ++ sdes_field_name = gst_rtcp_sdes_type_to_name (GST_RTCP_SDES_MID); ++ g_assert (sdes_field_name); ++ gst_structure_set (sdes, sdes_field_name, G_TYPE_STRING, mid->mid, ++ NULL); ++ if (mid->rid) { ++ sdes_field_name = ++ gst_rtcp_sdes_type_to_name (GST_RTCP_SDES_RTP_STREAM_ID); ++ g_assert (sdes_field_name); ++ gst_structure_set (sdes, sdes_field_name, mid->rid, NULL); ++ // TODO: repaired-rtp-stream-id ++ } ++ // TODO: writable sdes? ++ g_object_set (source, "sdes", sdes, NULL); ++ GST_INFO_OBJECT (webrtc, ++ "session %u ssrc %u set sdes %" GST_PTR_FORMAT, session_id, ssrc, ++ sdes); ++ ++ gst_clear_structure (&sdes); ++ gst_clear_object (&source); ++ } ++ g_clear_object (&session); ++ } ++#endif ++ } ++ PC_UNLOCK (webrtc); + } + + static void + on_rtpbin_sender_ssrc_active (GstElement * rtpbin, guint session_id, guint ssrc, + GstWebRTCBin * webrtc) + { +- GST_INFO_OBJECT (webrtc, "session %u ssrc %u sender ssrc active", session_id, ++ GST_TRACE_OBJECT (webrtc, "session %u ssrc %u sender ssrc active", session_id, + ssrc); + } + ++struct new_jb_args ++{ ++ GstWebRTCBin *webrtc; ++ GstElement *jitterbuffer; ++ TransportStream *stream; ++ guint ssrc; ++}; ++ ++static gboolean ++jitter_buffer_set_retransmission (SsrcMapItem * item, ++ const struct new_jb_args *data) ++{ ++ GstWebRTCRTPTransceiver *trans; ++ gboolean do_nack; ++ ++ if (item->media_idx == -1) ++ return TRUE; ++ ++ trans = _find_transceiver_for_mline (data->webrtc, item->media_idx); ++ if (!trans) { ++ g_warn_if_reached (); ++ return TRUE; ++ } ++ ++ do_nack = WEBRTC_TRANSCEIVER (trans)->do_nack; ++ /* We don't set do-retransmission on rtpbin as we want per-session control */ ++ GST_LOG_OBJECT (data->webrtc, "setting do-nack=%s for transceiver %" ++ GST_PTR_FORMAT " with transport %" GST_PTR_FORMAT ++ " rtp session %u ssrc %u", do_nack ? "true" : "false", trans, ++ data->stream, data->stream->session_id, data->ssrc); ++ g_object_set (data->jitterbuffer, "do-retransmission", do_nack, NULL); ++ ++ g_weak_ref_set (&item->rtpjitterbuffer, data->jitterbuffer); ++ ++ return TRUE; ++} ++ + static void + on_rtpbin_new_jitterbuffer (GstElement * rtpbin, GstElement * jitterbuffer, + guint session_id, guint ssrc, GstWebRTCBin * webrtc) + { +- GstWebRTCRTPTransceiver *trans; ++ TransportStream *stream; ++ struct new_jb_args d = { 0, }; + +- trans = _find_transceiver (webrtc, &session_id, +- (FindTransceiverFunc) transceiver_match_for_mline); ++ PC_LOCK (webrtc); ++ GST_INFO_OBJECT (webrtc, "new jitterbuffer %" GST_PTR_FORMAT " for " ++ "session %u ssrc %u", jitterbuffer, session_id, ssrc); + +- if (trans) { +- /* We don't set do-retransmission on rtpbin as we want per-session control */ +- g_object_set (jitterbuffer, "do-retransmission", +- WEBRTC_TRANSCEIVER (trans)->do_nack, NULL); +- } else { +- g_assert_not_reached (); ++ if (!(stream = _find_transport_for_session (webrtc, session_id))) { ++ g_warn_if_reached (); ++ goto out; + } ++ ++ d.webrtc = webrtc; ++ d.jitterbuffer = jitterbuffer; ++ d.stream = stream; ++ d.ssrc = ssrc; ++ transport_stream_filter_ssrc_map_item (stream, &d, ++ (FindSsrcMapFunc) jitter_buffer_set_retransmission); ++ ++out: ++ PC_UNLOCK (webrtc); + } + + static void +@@ -5694,10 +7809,8 @@ _create_rtpbin (GstWebRTCBin * webrtc) + G_CALLBACK (on_rtpbin_request_aux_receiver), webrtc); + g_signal_connect (rtpbin, "new-storage", + G_CALLBACK (on_rtpbin_new_storage), webrtc); +- g_signal_connect (rtpbin, "request-fec-decoder", +- G_CALLBACK (on_rtpbin_request_fec_decoder), webrtc); +- g_signal_connect (rtpbin, "request-fec-encoder", +- G_CALLBACK (on_rtpbin_request_fec_encoder), webrtc); ++ g_signal_connect (rtpbin, "request-fec-decoder-full", ++ G_CALLBACK (on_rtpbin_request_fec_decoder_full), webrtc); + g_signal_connect (rtpbin, "on-bye-ssrc", + G_CALLBACK (on_rtpbin_bye_ssrc), webrtc); + g_signal_connect (rtpbin, "on-bye-timeout", +@@ -5786,57 +7899,272 @@ sink_pad_block (GstPad * pad, GstPadProbeInfo * info, gpointer unused) + return GST_PAD_PROBE_OK; + } + ++static void ++peek_sink_buffer (GstWebRTCBin * webrtc, guint rtp_session_id, ++ guint media_idx, WebRTCTransceiver * trans, GstBuffer * buffer) ++{ ++ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; ++ SsrcMapItem *item; ++ guint ssrc; ++ ++ if (!gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp)) ++ return; ++ ssrc = gst_rtp_buffer_get_ssrc (&rtp); ++ gst_rtp_buffer_unmap (&rtp); ++ ++ if (!ssrc) { ++ GST_WARNING_OBJECT (webrtc, ++ "incoming buffer does not contain a valid ssrc"); ++ return; ++ } ++ ++ PC_LOCK (webrtc); ++ item = ++ find_or_add_ssrc_map_item (webrtc, ++ GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY, rtp_session_id, ssrc, ++ media_idx); ++ if (item->media_idx == -1) { ++ char *str; ++ ++ GST_DEBUG_OBJECT (webrtc, "updating media idx of ssrc item %p to %u", item, ++ media_idx); ++ item->media_idx = media_idx; ++ ++ /* ensure that the rtx mapping contains a valid ssrc to use for rtx when ++ * used even when there are no ssrc's in the input/codec preferences caps */ ++ str = g_strdup_printf ("%u", ssrc); ++ if (!gst_structure_has_field_typed (trans->local_rtx_ssrc_map, str, ++ G_TYPE_UINT)) { ++ /* TODO: ssrc-collision? */ ++ gst_structure_set (trans->local_rtx_ssrc_map, str, G_TYPE_UINT, ++ g_random_int (), NULL); ++ _set_internal_rtpbin_element_props_from_stream (webrtc, trans->stream); ++ } ++ g_free (str); ++ } ++ PC_UNLOCK (webrtc); ++} ++ ++static GstPadProbeReturn ++sink_pad_buffer_peek (GstPad * pad, GstPadProbeInfo * info, ++ GstWebRTCBin * webrtc) ++{ ++ GstWebRTCBinPad *webrtc_pad = GST_WEBRTC_BIN_PAD (pad); ++ WebRTCTransceiver *trans; ++ guint rtp_session_id, media_idx; ++ ++ if (!webrtc_pad->trans) ++ return GST_PAD_PROBE_OK; ++ ++ trans = (WebRTCTransceiver *) webrtc_pad->trans; ++ if (!trans->stream) ++ return GST_PAD_PROBE_OK; ++ ++ rtp_session_id = trans->stream->session_id; ++ media_idx = webrtc_pad->trans->mline; ++ ++ if (media_idx != G_MAXUINT) ++ return GST_PAD_PROBE_OK; ++ ++ if (info->type & GST_PAD_PROBE_TYPE_BUFFER) { ++ GstBuffer *buffer = GST_PAD_PROBE_INFO_BUFFER (info); ++ peek_sink_buffer (webrtc, rtp_session_id, media_idx, trans, buffer); ++ } else if (info->type & GST_PAD_PROBE_TYPE_BUFFER_LIST) { ++ GstBufferList *list = GST_PAD_PROBE_INFO_BUFFER_LIST (info); ++ guint i, n; ++ ++ n = gst_buffer_list_length (list); ++ for (i = 0; i < n; i++) { ++ GstBuffer *buffer = gst_buffer_list_get (list, i); ++ peek_sink_buffer (webrtc, rtp_session_id, media_idx, trans, buffer); ++ } ++ } else { ++ g_assert_not_reached (); ++ } ++ ++ return GST_PAD_PROBE_OK; ++} ++ + static GstPad * + gst_webrtc_bin_request_new_pad (GstElement * element, GstPadTemplate * templ, + const gchar * name, const GstCaps * caps) + { + GstWebRTCBin *webrtc = GST_WEBRTC_BIN (element); ++ GstWebRTCRTPTransceiver *trans = NULL; + GstWebRTCBinPad *pad = NULL; + guint serial; ++ gboolean lock_mline = FALSE; + + if (!_have_nice_elements (webrtc) || !_have_dtls_elements (webrtc)) + return NULL; + +- if (templ->direction == GST_PAD_SINK || +- g_strcmp0 (templ->name_template, "sink_%u") == 0) { +- GstWebRTCRTPTransceiver *trans; ++ if (templ->direction != GST_PAD_SINK || ++ g_strcmp0 (templ->name_template, "sink_%u") != 0) { ++ GST_ERROR_OBJECT (element, "Requested pad that shouldn't be requestable"); ++ return NULL; ++ } + +- GST_OBJECT_LOCK (webrtc); +- if (name == NULL || strlen (name) < 6 || !g_str_has_prefix (name, "sink_")) { +- /* no name given when requesting the pad, use next available int */ +- serial = webrtc->priv->max_sink_pad_serial++; +- } else { +- /* parse serial number from requested padname */ +- serial = g_ascii_strtoull (&name[5], NULL, 10); +- if (serial > webrtc->priv->max_sink_pad_serial) +- webrtc->priv->max_sink_pad_serial = serial; +- } +- GST_OBJECT_UNLOCK (webrtc); ++ PC_LOCK (webrtc); ++ ++ if (name == NULL || strlen (name) < 6 || !g_str_has_prefix (name, "sink_")) { ++ /* no name given when requesting the pad, use next available int */ ++ serial = webrtc->priv->max_sink_pad_serial++; ++ } else { ++ /* parse serial number from requested padname */ ++ serial = g_ascii_strtoull (&name[5], NULL, 10); ++ lock_mline = TRUE; ++ } ++ ++ if (lock_mline) { ++ GstWebRTCBinPad *pad2; + +- pad = _create_pad_for_sdp_media (webrtc, GST_PAD_SINK, serial); + trans = _find_transceiver_for_mline (webrtc, serial); +- if (!trans) { +- trans = +- GST_WEBRTC_RTP_TRANSCEIVER (_create_webrtc_transceiver (webrtc, +- GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV, serial)); +- GST_LOG_OBJECT (webrtc, "Created new transceiver %" GST_PTR_FORMAT +- " for mline %u", trans, serial); +- } else { +- GST_LOG_OBJECT (webrtc, "Using existing transceiver %" GST_PTR_FORMAT +- " for mline %u", trans, serial); ++ ++ if (trans) { ++ /* Reject transceivers that are only for receiving ... */ ++ if (trans->direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY || ++ trans->direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_INACTIVE) { ++ GST_ERROR_OBJECT (element, "Tried to request a new sink pad %s for" ++ " existing m-line %d, but the transceiver's direction is %s", ++ name, serial, ++ gst_webrtc_rtp_transceiver_direction_to_string (trans->direction)); ++ goto error_out; ++ } ++ ++ /* Reject transceivers that already have a pad allocated */ ++ pad2 = _find_pad_for_transceiver (webrtc, GST_PAD_SINK, trans); ++ if (pad2) { ++ GST_ERROR_OBJECT (element, "Trying to request pad %s for m-line %d, " ++ " but the transceiver associated with this m-line already has pad" ++ " %s", name, serial, GST_PAD_NAME (pad2)); ++ gst_object_unref (pad2); ++ goto error_out; ++ } ++ ++ if (caps) { ++ GST_OBJECT_LOCK (trans); ++ if (trans->codec_preferences && ++ !gst_caps_can_intersect (caps, trans->codec_preferences)) { ++ GST_ERROR_OBJECT (element, "Tried to request a new sink pad %s for" ++ " existing m-line %d, but requested caps %" GST_PTR_FORMAT ++ " don't match existing codec preferences %" GST_PTR_FORMAT, ++ name, serial, caps, trans->codec_preferences); ++ GST_OBJECT_UNLOCK (trans); ++ goto error_out; ++ } ++ GST_OBJECT_UNLOCK (trans); ++ ++ if (trans->kind != GST_WEBRTC_KIND_UNKNOWN) { ++ GstWebRTCKind kind = webrtc_kind_from_caps (caps); ++ ++ if (trans->kind != kind) { ++ GST_ERROR_OBJECT (element, "Tried to request a new sink pad %s for" ++ " existing m-line %d, but requested caps %" GST_PTR_FORMAT ++ " don't match transceiver kind %d", ++ name, serial, caps, trans->kind); ++ goto error_out; ++ } ++ } ++ } ++ } ++ } ++ ++ /* Let's try to find a free transceiver that matches */ ++ if (!trans) { ++ GstWebRTCKind kind = GST_WEBRTC_KIND_UNKNOWN; ++ guint i; ++ ++ kind = webrtc_kind_from_caps (caps); ++ ++ for (i = 0; i < webrtc->priv->transceivers->len; i++) { ++ GstWebRTCRTPTransceiver *tmptrans = ++ g_ptr_array_index (webrtc->priv->transceivers, i); ++ GstWebRTCBinPad *pad2; ++ gboolean has_matching_caps; ++ ++ /* Ignore transceivers with a non-matching kind */ ++ if (tmptrans->kind != GST_WEBRTC_KIND_UNKNOWN && ++ kind != GST_WEBRTC_KIND_UNKNOWN && tmptrans->kind != kind) ++ continue; ++ ++ /* Ignore stopped transmitters */ ++ if (tmptrans->stopped) ++ continue; ++ ++ /* Ignore transceivers that are only for receiving ... */ ++ if (tmptrans->direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY ++ || tmptrans->direction == ++ GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_INACTIVE) ++ continue; ++ ++ /* Ignore transceivers that already have a pad allocated */ ++ pad2 = _find_pad_for_transceiver (webrtc, GST_PAD_SINK, tmptrans); ++ if (pad2) { ++ gst_object_unref (pad2); ++ continue; ++ } ++ ++ GST_OBJECT_LOCK (tmptrans); ++ has_matching_caps = (caps && tmptrans->codec_preferences && ++ !gst_caps_can_intersect (caps, tmptrans->codec_preferences)); ++ GST_OBJECT_UNLOCK (tmptrans); ++ /* Ignore transceivers with non-matching caps */ ++ if (!has_matching_caps) ++ continue; ++ ++ trans = tmptrans; ++ break; ++ } ++ } ++ ++ if (!trans) { ++ trans = GST_WEBRTC_RTP_TRANSCEIVER (_create_webrtc_transceiver (webrtc, ++ GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV, -1, ++ webrtc_kind_from_caps (caps), NULL)); ++ GST_LOG_OBJECT (webrtc, "Created new transceiver %" GST_PTR_FORMAT, trans); ++ } else { ++ GST_LOG_OBJECT (webrtc, "Using existing transceiver %" GST_PTR_FORMAT ++ " for mline %u", trans, serial); ++ if (caps) { ++ if (!_update_transceiver_kind_from_caps (trans, caps)) { ++ GstWebRTCKind caps_kind = webrtc_kind_from_caps (caps); ++ ++ GST_WARNING_OBJECT (webrtc, ++ "Trying to change kind of transceiver %" GST_PTR_FORMAT ++ " at m-line %d from %s (%d) to %s (%d)", trans, serial, ++ gst_webrtc_kind_to_string (trans->kind), trans->kind, ++ gst_webrtc_kind_to_string (caps_kind), caps_kind); ++ } + } +- pad->trans = gst_object_ref (trans); ++ } ++ pad = _create_pad_for_sdp_media (webrtc, GST_PAD_SINK, trans, serial, NULL); ++ ++ pad->block_id = gst_pad_add_probe (GST_PAD (pad), GST_PAD_PROBE_TYPE_BLOCK | ++ GST_PAD_PROBE_TYPE_BUFFER | GST_PAD_PROBE_TYPE_BUFFER_LIST, ++ (GstPadProbeCallback) sink_pad_block, NULL, NULL); ++ webrtc->priv->pending_sink_transceivers = ++ g_list_append (webrtc->priv->pending_sink_transceivers, ++ gst_object_ref (pad)); + +- pad->block_id = gst_pad_add_probe (GST_PAD (pad), GST_PAD_PROBE_TYPE_BLOCK | +- GST_PAD_PROBE_TYPE_BUFFER | GST_PAD_PROBE_TYPE_BUFFER_LIST, +- (GstPadProbeCallback) sink_pad_block, NULL, NULL); +- webrtc->priv->pending_sink_transceivers = +- g_list_append (webrtc->priv->pending_sink_transceivers, +- gst_object_ref (pad)); +- _add_pad (webrtc, pad); ++ gst_pad_add_probe (GST_PAD (pad), ++ GST_PAD_PROBE_TYPE_BUFFER | GST_PAD_PROBE_TYPE_BUFFER_LIST, ++ (GstPadProbeCallback) sink_pad_buffer_peek, webrtc, NULL); ++ ++ if (lock_mline) { ++ WebRTCTransceiver *wtrans = WEBRTC_TRANSCEIVER (trans); ++ wtrans->mline_locked = TRUE; ++ trans->mline = serial; + } + ++ PC_UNLOCK (webrtc); ++ ++ _add_pad (webrtc, pad); ++ + return GST_PAD (pad); ++ ++error_out: ++ PC_UNLOCK (webrtc); ++ return NULL; + } + + static void +@@ -5853,6 +8181,7 @@ gst_webrtc_bin_release_pad (GstElement * element, GstPad * pad) + if (webrtc_pad->trans) + gst_object_unref (webrtc_pad->trans); + webrtc_pad->trans = NULL; ++ gst_caps_replace (&webrtc_pad->received_caps, NULL); + PC_UNLOCK (webrtc); + + _remove_pad (webrtc, webrtc_pad); +@@ -5918,6 +8247,13 @@ gst_webrtc_bin_set_property (GObject * object, guint prop_id, + webrtc->priv->jb_latency = g_value_get_uint (value); + _update_rtpstorage_latency (webrtc); + break; ++ case PROP_ICE_AGENT: ++ webrtc->priv->ice = g_value_get_object (value); ++ break; ++ case PROP_HTTP_PROXY: ++ gst_webrtc_ice_set_http_proxy (webrtc->priv->ice, ++ g_value_get_string (value)); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -5992,6 +8328,13 @@ gst_webrtc_bin_get_property (GObject * object, guint prop_id, + case PROP_LATENCY: + g_value_set_uint (value, webrtc->priv->jb_latency); + break; ++ case PROP_SCTP_TRANSPORT: ++ g_value_set_object (value, webrtc->priv->sctp_transport); ++ break; ++ case PROP_HTTP_PROXY: ++ g_value_take_string (value, ++ gst_webrtc_ice_get_http_proxy (webrtc->priv->ice)); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -6005,13 +8348,13 @@ gst_webrtc_bin_constructed (GObject * object) + GstWebRTCBin *webrtc = GST_WEBRTC_BIN (object); + gchar *name; + +- name = g_strdup_printf ("%s:ice", GST_OBJECT_NAME (webrtc)); +- webrtc->priv->ice = gst_webrtc_ice_new (name); +- ++ if (!webrtc->priv->ice) { ++ name = g_strdup_printf ("%s:ice", GST_OBJECT_NAME (webrtc)); ++ webrtc->priv->ice = GST_WEBRTC_ICE (gst_webrtc_nice_new (name)); ++ g_free (name); ++ } + gst_webrtc_ice_set_on_ice_candidate (webrtc->priv->ice, +- (GstWebRTCIceOnCandidateFunc) _on_local_ice_candidate_cb, webrtc, NULL); +- +- g_free (name); ++ (GstWebRTCICEOnCandidateFunc) _on_local_ice_candidate_cb, webrtc, NULL); + + G_OBJECT_CLASS (parent_class)->constructed (object); + } +@@ -6069,10 +8412,6 @@ gst_webrtc_bin_finalize (GObject * object) + g_array_free (webrtc->priv->pending_local_ice_candidates, TRUE); + webrtc->priv->pending_local_ice_candidates = NULL; + +- if (webrtc->priv->session_mid_map) +- g_array_free (webrtc->priv->session_mid_map, TRUE); +- webrtc->priv->session_mid_map = NULL; +- + if (webrtc->priv->pending_pads) + g_list_free_full (webrtc->priv->pending_pads, + (GDestroyNotify) _free_pending_pad); +@@ -6104,10 +8443,7 @@ gst_webrtc_bin_finalize (GObject * object) + gst_webrtc_session_description_free (webrtc->priv->last_generated_offer); + webrtc->priv->last_generated_offer = NULL; + +- if (webrtc->priv->stats) +- gst_structure_free (webrtc->priv->stats); +- webrtc->priv->stats = NULL; +- ++ g_mutex_clear (DC_GET_LOCK (webrtc)); + g_mutex_clear (ICE_GET_LOCK (webrtc)); + g_mutex_clear (PC_GET_LOCK (webrtc)); + g_cond_clear (PC_GET_COND (webrtc)); +@@ -6126,8 +8462,9 @@ gst_webrtc_bin_class_init (GstWebRTCBinClass * klass) + element_class->change_state = gst_webrtc_bin_change_state; + + gst_element_class_add_static_pad_template_with_gtype (element_class, +- &sink_template, GST_TYPE_WEBRTC_BIN_PAD); +- gst_element_class_add_static_pad_template (element_class, &src_template); ++ &sink_template, GST_TYPE_WEBRTC_BIN_SINK_PAD); ++ gst_element_class_add_static_pad_template_with_gtype (element_class, ++ &src_template, GST_TYPE_WEBRTC_BIN_SRC_PAD); + + gst_element_class_set_metadata (element_class, "WebRTC Bin", + "Filter/Network/WebRTC", "A bin for webrtc connections", +@@ -6205,6 +8542,10 @@ gst_webrtc_bin_class_init (GstWebRTCBinClass * klass) + PROP_TURN_SERVER, + g_param_spec_string ("turn-server", "TURN Server", + "The TURN server of the form turn(s)://username:password@host:port. " ++ "To use time-limited credentials, the form must be turn(s)://timestamp:" ++ "username:password@host:port. Please note that the ':' character of " ++ "the 'timestamp:username' and the 'password' encoded by base64 should " ++ "be escaped to be parsed properly. " + "This is a convenience property, use #GstWebRTCBin::add-turn-server " + "if you wish to use multiple TURN servers", + NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); +@@ -6261,7 +8602,8 @@ gst_webrtc_bin_class_init (GstWebRTCBinClass * klass) + PROP_ICE_AGENT, + g_param_spec_object ("ice-agent", "WebRTC ICE agent", + "The WebRTC ICE agent", +- GST_TYPE_WEBRTC_ICE, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); ++ GST_TYPE_WEBRTC_ICE, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT_ONLY)); + + /** + * GstWebRTCBin:latency: +@@ -6275,7 +8617,37 @@ gst_webrtc_bin_class_init (GstWebRTCBinClass * klass) + PROP_LATENCY, + g_param_spec_uint ("latency", "Latency", + "Default duration to buffer in the jitterbuffers (in ms)", +- 0, G_MAXUINT, 200, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ 0, G_MAXUINT, DEFAULT_JB_LATENCY, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstWebRTCBin:http-proxy: ++ * ++ * A HTTP proxy for use with TURN/TCP of the form ++ * http://[username:password@]hostname[:port] ++ * ++ * Since: 1.22 ++ */ ++ g_object_class_install_property (gobject_class, ++ PROP_HTTP_PROXY, ++ g_param_spec_string ("http-proxy", "HTTP Proxy", ++ "A HTTP proxy for use with TURN/TCP of the form " ++ "http://[username:password@]hostname[:port]", ++ NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstWebRTCBin:sctp-transport: ++ * ++ * The WebRTC SCTP Transport ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, ++ PROP_SCTP_TRANSPORT, ++ g_param_spec_object ("sctp-transport", "WebRTC SCTP Transport", ++ "The WebRTC SCTP Transport", ++ GST_TYPE_WEBRTC_SCTP_TRANSPORT, ++ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); + + /** + * GstWebRTCBin::create-offer: +@@ -6329,7 +8701,8 @@ gst_webrtc_bin_class_init (GstWebRTCBinClass * klass) + * GstWebRTCBin::add-ice-candidate: + * @object: the #webrtcbin + * @mline_index: the index of the media description in the SDP +- * @ice-candidate: an ice candidate ++ * @ice-candidate: an ice candidate or NULL/"" to mark that no more candidates ++ * will arrive + */ + gst_webrtc_bin_signals[ADD_ICE_CANDIDATE_SIGNAL] = + g_signal_new_class_handler ("add-ice-candidate", +@@ -6373,25 +8746,30 @@ gst_webrtc_bin_class_init (GstWebRTCBinClass * klass) + * "ssrc" G_TYPE_STRING the rtp sequence src in use + * "transport-id" G_TYPE_STRING identifier for the associated RTCTransportStats for this stream + * "codec-id" G_TYPE_STRING identifier for the associated RTCCodecStats for this stream +- * "fir-count" G_TYPE_UINT FIR requests received by the sender (only for local statistics) +- * "pli-count" G_TYPE_UINT PLI requests received by the sender (only for local statistics) +- * "nack-count" G_TYPE_UINT NACK requests received by the sender (only for local statistics) ++ * "kind" G_TYPE_STRING either "audio" or "video", depending on the associated transceiver (Since: 1.22) + * + * RTCReceivedStreamStats supported fields (https://w3c.github.io/webrtc-stats/#receivedrtpstats-dict*) + * +- * "packets-received" G_TYPE_UINT64 number of packets received (only for local inbound) +- * "bytes-received" G_TYPE_UINT64 number of bytes received (only for local inbound) +- * "packets-lost" G_TYPE_UINT number of packets lost +- * "jitter" G_TYPE_DOUBLE packet jitter measured in secondss ++ * "packets-received" G_TYPE_UINT64 number of packets received (only for local inbound) ++ * "packets-lost" G_TYPE_INT64 number of packets lost ++ * "packets-discarded" G_TYPE_UINT64 number of packets discarded ++ * "packets-repaired" G_TYPE_UINT64 number of packets repaired ++ * "jitter" G_TYPE_DOUBLE packet jitter measured in seconds + * + * RTCInboundRTPStreamStats supported fields (https://w3c.github.io/webrtc-stats/#inboundrtpstats-dict*) + * + * "remote-id" G_TYPE_STRING identifier for the associated RTCRemoteOutboundRTPStreamStats ++ * "bytes-received" G_TYPE_UINT64 number of bytes received (only for local inbound) ++ * "packets-duplicated" G_TYPE_UINT64 number of packets duplicated ++ * "fir-count" G_TYPE_UINT FIR packets sent by the receiver ++ * "pli-count" G_TYPE_UINT PLI packets sent by the receiver ++ * "nack-count" G_TYPE_UINT NACK packets sent by the receiver + * + * RTCRemoteInboundRTPStreamStats supported fields (https://w3c.github.io/webrtc-stats/#remoteinboundrtpstats-dict*) + * + * "local-id" G_TYPE_STRING identifier for the associated RTCOutboundRTPSTreamStats + * "round-trip-time" G_TYPE_DOUBLE round trip time of packets measured in seconds ++ * "fraction-lost" G_TYPE_DOUBLE fraction packet loss + * + * RTCSentRTPStreamStats supported fields (https://w3c.github.io/webrtc-stats/#sentrtpstats-dict*) + * +@@ -6400,12 +8778,31 @@ gst_webrtc_bin_class_init (GstWebRTCBinClass * klass) + * + * RTCOutboundRTPStreamStats supported fields (https://w3c.github.io/webrtc-stats/#outboundrtpstats-dict*) + * +- * "remote-id" G_TYPE_STRING identifier for the associated RTCRemoteInboundRTPSTreamStats ++ * "remote-id" G_TYPE_STRING identifier for the associated RTCRemoteInboundRTPSTreamStats (optional since 1.22) ++ * "fir-count" G_TYPE_UINT FIR packets received by the sender ++ * "pli-count" G_TYPE_UINT PLI packets received by the sender ++ * "nack-count" G_TYPE_UINT NACK packets received by the sender + * + * RTCRemoteOutboundRTPStreamStats supported fields (https://w3c.github.io/webrtc-stats/#remoteoutboundrtpstats-dict*) + * + * "local-id" G_TYPE_STRING identifier for the associated RTCInboundRTPSTreamStats ++ * "remote-timestamp" G_TYPE_DOUBLE remote timestamp the statistics were sent by the remote ++ * ++ * RTCIceCandidateStats supported fields (https://www.w3.org/TR/webrtc-stats/#icecandidate-dict*) (Since: 1.22) + * ++ * "transport-id" G_TYPE_STRING identifier for the associated RTCTransportStats for this stream ++ * "address" G_TYPE_STRING address of the candidate, allowing for IPv4, IPv6 and FQDNs ++ * "port" G_TYPE_UINT port number of the candidate ++ * "candidate-type" G_TYPE_STRING RTCIceCandidateType ++ * "priority" G_TYPE_UINT64 calculated as defined in RFC 5245 ++ * "protocol" G_TYPE_STRING Either "udp" or "tcp". Based on the "transport" defined in RFC 5245 ++ * "relay-protocol" G_TYPE_STRING protocol used by the endpoint to communicate with the TURN server. Only present for local candidates. Either "udp", "tcp" or "tls" ++ * "url" G_TYPE_STRING URL of the ICE server from which the candidate was obtained. Only present for local candidates ++ * ++ * RTCIceCandidatePairStats supported fields (https://www.w3.org/TR/webrtc-stats/#candidatepair-dict*) (Since: 1.22) ++ * ++ * "local-candidate-id" G_TYPE_STRING unique identifier that is associated to the object that was inspected to produce the RTCIceCandidateStats for the local candidate associated with this candidate pair. ++ * "remote-candidate-id" G_TYPE_STRING unique identifier that is associated to the object that was inspected to produce the RTCIceCandidateStats for the remote candidate associated with this candidate pair. + */ + gst_webrtc_bin_signals[GET_STATS_SIGNAL] = + g_signal_new_class_handler ("get-stats", +@@ -6445,13 +8842,46 @@ gst_webrtc_bin_class_init (GstWebRTCBinClass * klass) + /** + * GstWebRTCBin::on-data-channel: + * @object: the #GstWebRTCBin +- * @candidate: the new `GstWebRTCDataChannel` ++ * @channel: the new `GstWebRTCDataChannel` + */ + gst_webrtc_bin_signals[ON_DATA_CHANNEL_SIGNAL] = + g_signal_new ("on-data-channel", G_TYPE_FROM_CLASS (klass), + G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, + G_TYPE_NONE, 1, GST_TYPE_WEBRTC_DATA_CHANNEL); + ++ /** ++ * GstWebRTCBin::prepare-data-channel: ++ * @object: the #GstWebRTCBin ++ * @channel: the new `GstWebRTCDataChannel` ++ * @is_local: Whether this channel is local or remote ++ * ++ * Allows data-channel consumers to configure signal handlers on a newly ++ * created data-channel, before any data or state change has been notified. ++ * ++ * Since: 1.22 ++ */ ++ gst_webrtc_bin_signals[PREPARE_DATA_CHANNEL_SIGNAL] = ++ g_signal_new ("prepare-data-channel", G_TYPE_FROM_CLASS (klass), ++ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 2, ++ GST_TYPE_WEBRTC_DATA_CHANNEL, G_TYPE_BOOLEAN); ++ ++ /** ++ * GstWebRTCBin::request-aux-sender: ++ * @object: the #GstWebRTCBin ++ * @dtls-transport: The #GstWebRTCDTLSTransport object for which the aux ++ * sender will be used. ++ * ++ * Request an AUX sender element for the given @dtls-transport. ++ * ++ * Returns: (transfer full): A new GStreamer element ++ * ++ * Since: 1.22 ++ */ ++ gst_webrtc_bin_signals[REQUEST_AUX_SENDER] = ++ g_signal_new ("request-aux-sender", G_TYPE_FROM_CLASS (klass), ++ G_SIGNAL_RUN_LAST, 0, _gst_element_accumulator, NULL, NULL, ++ GST_TYPE_ELEMENT, 1, GST_TYPE_WEBRTC_DTLS_TRANSPORT); ++ + /** + * GstWebRTCBin::add-transceiver: + * @object: the #webrtcbin +@@ -6533,7 +8963,8 @@ gst_webrtc_bin_class_init (GstWebRTCBinClass * klass) + NULL, GST_TYPE_WEBRTC_DATA_CHANNEL, 2, G_TYPE_STRING, GST_TYPE_STRUCTURE); + + gst_type_mark_as_plugin_api (GST_TYPE_WEBRTC_BIN_PAD, 0); +- gst_type_mark_as_plugin_api (GST_TYPE_WEBRTC_ICE, 0); ++ gst_type_mark_as_plugin_api (GST_TYPE_WEBRTC_BIN_SINK_PAD, 0); ++ gst_type_mark_as_plugin_api (GST_TYPE_WEBRTC_BIN_SRC_PAD, 0); + } + + static void +@@ -6558,11 +8989,6 @@ _transport_free (GObject * object) + g_signal_handlers_disconnect_by_data (stream->transport->transport, webrtc); + g_signal_handlers_disconnect_by_data (stream->transport, webrtc); + } +- if (stream->rtcp_transport) { +- g_signal_handlers_disconnect_by_data (stream->rtcp_transport->transport, +- webrtc); +- g_signal_handlers_disconnect_by_data (stream->rtcp_transport, webrtc); +- } + + gst_object_unref (object); + } +@@ -6585,6 +9011,7 @@ gst_webrtc_bin_init (GstWebRTCBin * webrtc) + g_cond_init (PC_GET_COND (webrtc)); + + g_mutex_init (ICE_GET_LOCK (webrtc)); ++ g_mutex_init (DC_GET_LOCK (webrtc)); + + webrtc->rtpbin = _create_rtpbin (webrtc); + gst_bin_add (GST_BIN (webrtc), webrtc->rtpbin); +@@ -6600,11 +9027,6 @@ gst_webrtc_bin_init (GstWebRTCBin * webrtc) + webrtc->priv->pending_data_channels = + g_ptr_array_new_with_free_func ((GDestroyNotify) gst_object_unref); + +- webrtc->priv->session_mid_map = +- g_array_new (FALSE, TRUE, sizeof (SessionMidItem)); +- g_array_set_clear_func (webrtc->priv->session_mid_map, +- (GDestroyNotify) clear_session_mid_item); +- + webrtc->priv->ice_stream_map = + g_array_new (FALSE, TRUE, sizeof (IceStreamItem)); + webrtc->priv->pending_remote_ice_candidates = +@@ -6619,4 +9041,5 @@ gst_webrtc_bin_init (GstWebRTCBin * webrtc) + + /* we start off closed until we move to READY */ + webrtc->priv->is_closed = TRUE; ++ webrtc->priv->jb_latency = DEFAULT_JB_LATENCY; + } +diff --git a/ext/webrtc/gstwebrtcbin.h b/ext/webrtc/gstwebrtcbin.h +index e4b462f2e..9445d9e5a 100644 +--- a/ext/webrtc/gstwebrtcbin.h ++++ b/ext/webrtc/gstwebrtcbin.h +@@ -22,8 +22,8 @@ + + #include + #include "fwd.h" +-#include "gstwebrtcice.h" + #include "transportstream.h" ++#include "webrtcsctptransport.h" + + G_BEGIN_DECLS + +@@ -38,16 +38,17 @@ GType gst_webrtc_bin_pad_get_type(void); + typedef struct _GstWebRTCBinPad GstWebRTCBinPad; + typedef struct _GstWebRTCBinPadClass GstWebRTCBinPadClass; + ++G_DEFINE_AUTOPTR_CLEANUP_FUNC (GstWebRTCBinPad, gst_object_unref); ++ + struct _GstWebRTCBinPad + { + GstGhostPad parent; + +- guint mlineindex; +- + GstWebRTCRTPTransceiver *trans; + gulong block_id; + + GstCaps *received_caps; ++ char *msid; + }; + + struct _GstWebRTCBinPadClass +@@ -55,6 +56,14 @@ struct _GstWebRTCBinPadClass + GstGhostPadClass parent_class; + }; + ++G_DECLARE_FINAL_TYPE (GstWebRTCBinSinkPad, gst_webrtc_bin_sink_pad, GST, ++ WEBRTC_BIN_SINK_PAD, GstWebRTCBinPad); ++#define GST_TYPE_WEBRTC_BIN_SINK_PAD (gst_webrtc_bin_sink_pad_get_type()) ++ ++G_DECLARE_FINAL_TYPE (GstWebRTCBinSrcPad, gst_webrtc_bin_src_pad, GST, ++ WEBRTC_BIN_SRC_PAD, GstWebRTCBinPad); ++#define GST_TYPE_WEBRTC_BIN_SRC_PAD (gst_webrtc_bin_src_pad_get_type()) ++ + GType gst_webrtc_bin_get_type(void); + #define GST_TYPE_WEBRTC_BIN (gst_webrtc_bin_get_type()) + #define GST_WEBRTC_BIN(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_WEBRTC_BIN,GstWebRTCBin)) +@@ -94,19 +103,22 @@ struct _GstWebRTCBinClass + struct _GstWebRTCBinPrivate + { + guint max_sink_pad_serial; ++ guint src_pad_counter; + + gboolean bundle; + GPtrArray *transceivers; +- GArray *session_mid_map; + GPtrArray *transports; + GPtrArray *data_channels; + /* list of data channels we've received a sctp stream for but no data + * channel protocol for */ + GPtrArray *pending_data_channels; ++ /* dc_lock protects data_channels and pending_data_channels */ ++ /* lock ordering is pc_lock first, then dc_lock */ ++ GMutex dc_lock; + + guint jb_latency; + +- GstWebRTCSCTPTransport *sctp_transport; ++ WebRTCSCTPTransport *sctp_transport; + TransportStream *data_channel_transport; + + GstWebRTCICE *ice; +@@ -140,10 +152,10 @@ struct _GstWebRTCBinPrivate + GstWebRTCSessionDescription *last_generated_offer; + GstWebRTCSessionDescription *last_generated_answer; + +- GstStructure *stats; ++ gboolean tos_attached; + }; + +-typedef void (*GstWebRTCBinFunc) (GstWebRTCBin * webrtc, gpointer data); ++typedef GstStructure *(*GstWebRTCBinFunc) (GstWebRTCBin * webrtc, gpointer data); + + typedef struct + { +diff --git a/ext/webrtc/gstwebrtcstats.c b/ext/webrtc/gstwebrtcstats.c +index 7ecf9b9aa..5ff2bd6d2 100644 +--- a/ext/webrtc/gstwebrtcstats.c ++++ b/ext/webrtc/gstwebrtcstats.c +@@ -31,6 +31,8 @@ + #include "utils.h" + #include "webrtctransceiver.h" + ++#include ++ + #define GST_CAT_DEFAULT gst_webrtc_stats_debug + GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); + +@@ -56,7 +58,7 @@ static void + _set_base_stats (GstStructure * s, GstWebRTCStatsType type, double ts, + const char *id) + { +- gchar *name = _enum_value_to_string (GST_TYPE_WEBRTC_STATS_TYPE, ++ const gchar *name = _enum_value_to_string (GST_TYPE_WEBRTC_STATS_TYPE, + type); + + g_return_if_fail (name != NULL); +@@ -64,8 +66,6 @@ _set_base_stats (GstStructure * s, GstWebRTCStatsType type, double ts, + gst_structure_set_name (s, name); + gst_structure_set (s, "type", GST_TYPE_WEBRTC_STATS_TYPE, type, "timestamp", + G_TYPE_DOUBLE, ts, "id", G_TYPE_STRING, id, NULL); +- +- g_free (name); + } + + static GstStructure * +@@ -81,19 +81,136 @@ _get_peer_connection_stats (GstWebRTCBin * webrtc) + return s; + } + ++static void ++_gst_structure_take_structure (GstStructure * s, const char *fieldname, ++ GstStructure ** value_s) ++{ ++ GValue v = G_VALUE_INIT; ++ ++ g_return_if_fail (GST_IS_STRUCTURE (*value_s)); ++ ++ g_value_init (&v, GST_TYPE_STRUCTURE); ++ g_value_take_boxed (&v, *value_s); ++ ++ gst_structure_take_value (s, fieldname, &v); ++ ++ *value_s = NULL; ++} ++ + #define CLOCK_RATE_VALUE_TO_SECONDS(v,r) ((double) v / (double) clock_rate) + #define FIXED_16_16_TO_DOUBLE(v) ((double) ((v & 0xffff0000) >> 16) + ((v & 0xffff) / 65536.0)) + #define FIXED_32_32_TO_DOUBLE(v) ((double) ((v & G_GUINT64_CONSTANT (0xffffffff00000000)) >> 32) + ((v & G_GUINT64_CONSTANT (0xffffffff)) / 4294967296.0)) + ++/* https://www.w3.org/TR/webrtc-stats/#remoteinboundrtpstats-dict* */ ++static gboolean ++_get_stats_from_remote_rtp_source_stats (GstWebRTCBin * webrtc, ++ TransportStream * stream, const GstStructure * source_stats, ++ guint ssrc, guint clock_rate, const gchar * codec_id, const gchar * kind, ++ const gchar * transport_id, GstStructure * s) ++{ ++ gboolean have_rb = FALSE, internal = FALSE; ++ int lost; ++ GstStructure *r_in; ++ gchar *r_in_id, *out_id; ++ guint32 rtt; ++ guint fraction_lost, jitter; ++ double ts; ++ ++ gst_structure_get_double (s, "timestamp", &ts); ++ gst_structure_get (source_stats, "internal", G_TYPE_BOOLEAN, &internal, ++ "have-rb", G_TYPE_BOOLEAN, &have_rb, NULL); ++ ++ /* This isn't what we're looking for */ ++ if (internal == TRUE || have_rb == FALSE) ++ return FALSE; ++ ++ r_in_id = g_strdup_printf ("rtp-remote-inbound-stream-stats_%u", ssrc); ++ out_id = g_strdup_printf ("rtp-outbound-stream-stats_%u", ssrc); ++ ++ r_in = gst_structure_new_empty (r_in_id); ++ _set_base_stats (r_in, GST_WEBRTC_STATS_REMOTE_INBOUND_RTP, ts, r_in_id); ++ ++ /* RTCRtpStreamStats */ ++ gst_structure_set (r_in, "local-id", G_TYPE_STRING, out_id, NULL); ++ gst_structure_set (r_in, "ssrc", G_TYPE_UINT, ssrc, NULL); ++ gst_structure_set (r_in, "codec-id", G_TYPE_STRING, codec_id, NULL); ++ gst_structure_set (r_in, "transport-id", G_TYPE_STRING, transport_id, NULL); ++ if (kind) ++ gst_structure_set (r_in, "kind", G_TYPE_STRING, kind, NULL); ++ ++ /* RTCReceivedRtpStreamStats */ ++ ++ if (gst_structure_get_int (source_stats, "rb-packetslost", &lost)) ++ gst_structure_set (r_in, "packets-lost", G_TYPE_INT64, (gint64) lost, NULL); ++ ++ if (clock_rate && gst_structure_get_uint (source_stats, "rb-jitter", &jitter)) ++ gst_structure_set (r_in, "jitter", G_TYPE_DOUBLE, ++ CLOCK_RATE_VALUE_TO_SECONDS (jitter, clock_rate), NULL); ++ ++ /* RTCReceivedRtpStreamStats: ++ ++ unsigned long long packetsReceived; ++ unsigned long packetsDiscarded; ++ unsigned long packetsRepaired; ++ unsigned long burstPacketsLost; ++ unsigned long burstPacketsDiscarded; ++ unsigned long burstLossCount; ++ unsigned long burstDiscardCount; ++ double burstLossRate; ++ double burstDiscardRate; ++ double gapLossRate; ++ double gapDiscardRate; ++ ++ Can't be implemented frame re-assembly happens after rtpbin: ++ ++ unsigned long framesDropped; ++ unsigned long partialFramesLost; ++ unsigned long fullFramesLost; ++ */ ++ ++ /* RTCRemoteInboundRTPStreamStats */ ++ ++ if (gst_structure_get_uint (source_stats, "rb-fractionlost", &fraction_lost)) ++ gst_structure_set (r_in, "fraction-lost", G_TYPE_DOUBLE, ++ (double) fraction_lost / 256.0, NULL); ++ ++ if (gst_structure_get_uint (source_stats, "rb-round-trip", &rtt)) { ++ /* 16.16 fixed point to double */ ++ double val = FIXED_16_16_TO_DOUBLE (rtt); ++ gst_structure_set (r_in, "round-trip-time", G_TYPE_DOUBLE, val, NULL); ++ } ++ ++ /* RTCRemoteInboundRTPStreamStats: ++ ++ To be added: ++ ++ DOMString localId; ++ double totalRoundTripTime; ++ unsigned long long reportsReceived; ++ unsigned long long roundTripTimeMeasurements; ++ */ ++ ++ gst_structure_set (r_in, "gst-rtpsource-stats", GST_TYPE_STRUCTURE, ++ source_stats, NULL); ++ ++ _gst_structure_take_structure (s, r_in_id, &r_in); ++ ++ g_free (r_in_id); ++ g_free (out_id); ++ ++ return TRUE; ++} ++ + /* https://www.w3.org/TR/webrtc-stats/#inboundrtpstats-dict* + https://www.w3.org/TR/webrtc-stats/#outboundrtpstats-dict* */ + static void + _get_stats_from_rtp_source_stats (GstWebRTCBin * webrtc, +- const GstStructure * source_stats, const gchar * codec_id, +- const gchar * transport_id, GstStructure * s) ++ TransportStream * stream, const GstStructure * source_stats, ++ const gchar * codec_id, const gchar * kind, const gchar * transport_id, ++ GstStructure * s) + { + guint ssrc, fir, pli, nack, jitter; +- int lost, clock_rate; ++ int clock_rate; + guint64 packets, bytes; + gboolean internal; + double ts; +@@ -103,48 +220,10 @@ _get_stats_from_rtp_source_stats (GstWebRTCBin * webrtc, + G_TYPE_INT, &clock_rate, "internal", G_TYPE_BOOLEAN, &internal, NULL); + + if (internal) { +- GstStructure *r_in, *out; ++ GstStructure *out; + gchar *out_id, *r_in_id; + + out_id = g_strdup_printf ("rtp-outbound-stream-stats_%u", ssrc); +- r_in_id = g_strdup_printf ("rtp-remote-inbound-stream-stats_%u", ssrc); +- +- r_in = gst_structure_new_empty (r_in_id); +- _set_base_stats (r_in, GST_WEBRTC_STATS_REMOTE_INBOUND_RTP, ts, r_in_id); +- +- /* RTCStreamStats */ +- gst_structure_set (r_in, "local-id", G_TYPE_STRING, out_id, NULL); +- gst_structure_set (r_in, "ssrc", G_TYPE_UINT, ssrc, NULL); +- gst_structure_set (r_in, "codec-id", G_TYPE_STRING, codec_id, NULL); +- gst_structure_set (r_in, "transport-id", G_TYPE_STRING, transport_id, NULL); +- /* XXX: mediaType, trackId, sliCount, qpSum */ +- +- if (gst_structure_get_uint64 (source_stats, "packets-received", &packets)) +- gst_structure_set (r_in, "packets-received", G_TYPE_UINT64, packets, +- NULL); +- if (gst_structure_get_int (source_stats, "packets-lost", &lost)) +- gst_structure_set (r_in, "packets-lost", G_TYPE_INT, lost, NULL); +- if (gst_structure_get_uint (source_stats, "jitter", &jitter)) +- gst_structure_set (r_in, "jitter", G_TYPE_DOUBLE, +- CLOCK_RATE_VALUE_TO_SECONDS (jitter, clock_rate), NULL); +- +-/* XXX: RTCReceivedRTPStreamStats +- double fractionLost; +- unsigned long packetsDiscarded; +- unsigned long packetsFailedDecryption; +- unsigned long packetsRepaired; +- unsigned long burstPacketsLost; +- unsigned long burstPacketsDiscarded; +- unsigned long burstLossCount; +- unsigned long burstDiscardCount; +- double burstLossRate; +- double burstDiscardRate; +- double gapLossRate; +- double gapDiscardRate; +-*/ +- +- /* RTCRemoteInboundRTPStreamStats */ +- /* XXX: framesDecoded, lastPacketReceivedTimestamp */ + + out = gst_structure_new_empty (out_id); + _set_base_stats (out, GST_WEBRTC_STATS_OUTBOUND_RTP, ts, out_id); +@@ -153,48 +232,115 @@ _get_stats_from_rtp_source_stats (GstWebRTCBin * webrtc, + gst_structure_set (out, "ssrc", G_TYPE_UINT, ssrc, NULL); + gst_structure_set (out, "codec-id", G_TYPE_STRING, codec_id, NULL); + gst_structure_set (out, "transport-id", G_TYPE_STRING, transport_id, NULL); +- if (gst_structure_get_uint (source_stats, "sent-fir-count", &fir)) +- gst_structure_set (out, "fir-count", G_TYPE_UINT, fir, NULL); +- if (gst_structure_get_uint (source_stats, "sent-pli-count", &pli)) +- gst_structure_set (out, "pli-count", G_TYPE_UINT, pli, NULL); +- if (gst_structure_get_uint (source_stats, "sent-nack-count", &nack)) +- gst_structure_set (out, "nack-count", G_TYPE_UINT, nack, NULL); +- /* XXX: mediaType, trackId, sliCount, qpSum */ ++ if (kind) ++ gst_structure_set (out, "kind", G_TYPE_STRING, kind, NULL); + +-/* RTCSentRTPStreamStats */ ++ /* RTCSentRtpStreamStats */ + if (gst_structure_get_uint64 (source_stats, "octets-sent", &bytes)) + gst_structure_set (out, "bytes-sent", G_TYPE_UINT64, bytes, NULL); + if (gst_structure_get_uint64 (source_stats, "packets-sent", &packets)) + gst_structure_set (out, "packets-sent", G_TYPE_UINT64, packets, NULL); +-/* XXX: +- unsigned long packetsDiscardedOnSend; +- unsigned long long bytesDiscardedOnSend; +-*/ + + /* RTCOutboundRTPStreamStats */ +- gst_structure_set (out, "remote-id", G_TYPE_STRING, r_in_id, NULL); +-/* XXX: +- DOMHighResTimeStamp lastPacketSentTimestamp; +- double targetBitrate; +- unsigned long framesEncoded; +- double totalEncodeTime; +- double averageRTCPInterval; +-*/ +- gst_structure_set (s, out_id, GST_TYPE_STRUCTURE, out, NULL); +- gst_structure_set (s, r_in_id, GST_TYPE_STRUCTURE, r_in, NULL); + +- gst_structure_free (out); +- gst_structure_free (r_in); ++ if (gst_structure_get_uint (source_stats, "recv-fir-count", &fir)) ++ gst_structure_set (out, "fir-count", G_TYPE_UINT, fir, NULL); ++ if (gst_structure_get_uint (source_stats, "recv-pli-count", &pli)) ++ gst_structure_set (out, "pli-count", G_TYPE_UINT, pli, NULL); ++ if (gst_structure_get_uint (source_stats, "recv-nack-count", &nack)) ++ gst_structure_set (out, "nack-count", G_TYPE_UINT, nack, NULL); ++ /* XXX: mediaType, trackId, sliCount, qpSum */ + +- g_free (out_id); ++ r_in_id = g_strdup_printf ("rtp-remote-inbound-stream-stats_%u", ssrc); ++ if (gst_structure_has_field (s, r_in_id)) ++ gst_structure_set (out, "remote-id", G_TYPE_STRING, r_in_id, NULL); + g_free (r_in_id); ++ ++ /* RTCOutboundRTPStreamStats: ++ ++ To be added: ++ ++ unsigned long sliCount; ++ unsigned long rtxSsrc; ++ DOMString mediaSourceId; ++ DOMString senderId; ++ DOMString remoteId; ++ DOMString rid; ++ DOMHighResTimeStamp lastPacketSentTimestamp; ++ unsigned long long headerBytesSent; ++ unsigned long packetsDiscardedOnSend; ++ unsigned long long bytesDiscardedOnSend; ++ unsigned long fecPacketsSent; ++ unsigned long long retransmittedPacketsSent; ++ unsigned long long retransmittedBytesSent; ++ double averageRtcpInterval; ++ record perDscpPacketsSent; ++ ++ Not relevant because webrtcbin doesn't encode: ++ ++ double targetBitrate; ++ unsigned long long totalEncodedBytesTarget; ++ unsigned long frameWidth; ++ unsigned long frameHeight; ++ unsigned long frameBitDepth; ++ double framesPerSecond; ++ unsigned long framesSent; ++ unsigned long hugeFramesSent; ++ unsigned long framesEncoded; ++ unsigned long keyFramesEncoded; ++ unsigned long framesDiscardedOnSend; ++ unsigned long long qpSum; ++ unsigned long long totalSamplesSent; ++ unsigned long long samplesEncodedWithSilk; ++ unsigned long long samplesEncodedWithCelt; ++ boolean voiceActivityFlag; ++ double totalEncodeTime; ++ double totalPacketSendDelay; ++ RTCQualityLimitationReason qualityLimitationReason; ++ record qualityLimitationDurations; ++ unsigned long qualityLimitationResolutionChanges; ++ DOMString encoderImplementation; ++ */ ++ ++ /* Store the raw stats from GStreamer into the structure for advanced ++ * information. ++ */ ++ gst_structure_set (out, "gst-rtpsource-stats", GST_TYPE_STRUCTURE, ++ source_stats, NULL); ++ ++ _gst_structure_take_structure (s, out_id, &out); ++ ++ g_free (out_id); + } else { + GstStructure *in, *r_out; + gchar *r_out_id, *in_id; +- gboolean have_rb = FALSE, have_sr = FALSE; ++ gboolean have_sr = FALSE; ++ GstStructure *jb_stats = NULL; ++ guint i; ++ guint64 jb_lost, duplicates, late, rtx_success; + +- gst_structure_get (source_stats, "have-rb", G_TYPE_BOOLEAN, &have_rb, +- "have-sr", G_TYPE_BOOLEAN, &have_sr, NULL); ++ gst_structure_get (source_stats, "have-sr", G_TYPE_BOOLEAN, &have_sr, NULL); ++ ++ for (i = 0; i < stream->ssrcmap->len; i++) { ++ SsrcMapItem *item = g_ptr_array_index (stream->ssrcmap, i); ++ ++ if (item->direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY ++ && item->ssrc == ssrc) { ++ GObject *jb = g_weak_ref_get (&item->rtpjitterbuffer); ++ ++ if (jb) { ++ g_object_get (jb, "stats", &jb_stats, NULL); ++ g_object_unref (jb); ++ } ++ break; ++ } ++ } ++ ++ if (jb_stats) ++ gst_structure_get (jb_stats, "num-lost", G_TYPE_UINT64, &jb_lost, ++ "num-duplicates", G_TYPE_UINT64, &duplicates, "num-late", ++ G_TYPE_UINT64, &late, "rtx-success-count", G_TYPE_UINT64, ++ &rtx_success, NULL); + + in_id = g_strdup_printf ("rtp-inbound-stream-stats_%u", ssrc); + r_out_id = g_strdup_printf ("rtp-remote-outbound-stream-stats_%u", ssrc); +@@ -202,47 +348,116 @@ _get_stats_from_rtp_source_stats (GstWebRTCBin * webrtc, + in = gst_structure_new_empty (in_id); + _set_base_stats (in, GST_WEBRTC_STATS_INBOUND_RTP, ts, in_id); + +- /* RTCStreamStats */ ++ /* RTCRtpStreamStats */ + gst_structure_set (in, "ssrc", G_TYPE_UINT, ssrc, NULL); + gst_structure_set (in, "codec-id", G_TYPE_STRING, codec_id, NULL); + gst_structure_set (in, "transport-id", G_TYPE_STRING, transport_id, NULL); +- if (gst_structure_get_uint (source_stats, "recv-fir-count", &fir)) +- gst_structure_set (in, "fir-count", G_TYPE_UINT, fir, NULL); +- if (gst_structure_get_uint (source_stats, "recv-pli-count", &pli)) +- gst_structure_set (in, "pli-count", G_TYPE_UINT, pli, NULL); +- if (gst_structure_get_uint (source_stats, "recv-nack-count", &nack)) +- gst_structure_set (in, "nack-count", G_TYPE_UINT, nack, NULL); +- /* XXX: mediaType, trackId, sliCount, qpSum */ ++ if (kind) ++ gst_structure_set (in, "kind", G_TYPE_STRING, kind, NULL); ++ ++ /* RTCReceivedRtpStreamStats */ + +- /* RTCReceivedRTPStreamStats */ + if (gst_structure_get_uint64 (source_stats, "packets-received", &packets)) + gst_structure_set (in, "packets-received", G_TYPE_UINT64, packets, NULL); +- if (gst_structure_get_uint64 (source_stats, "octets-received", &bytes)) +- gst_structure_set (in, "bytes-received", G_TYPE_UINT64, bytes, NULL); +- if (gst_structure_get_int (source_stats, "packets-lost", &lost)) +- gst_structure_set (in, "packets-lost", G_TYPE_INT, lost, NULL); ++ if (jb_stats) { ++ gint64 packets_lost = jb_lost > G_MAXINT64 ? ++ G_MAXINT64 : (gint64) jb_lost; ++ gst_structure_set (in, "packets-lost", G_TYPE_INT64, packets_lost, NULL); ++ } + if (gst_structure_get_uint (source_stats, "jitter", &jitter)) + gst_structure_set (in, "jitter", G_TYPE_DOUBLE, + CLOCK_RATE_VALUE_TO_SECONDS (jitter, clock_rate), NULL); +-/* +- RTCReceivedRTPStreamStats +- double fractionLost; +- unsigned long packetsDiscarded; +- unsigned long packetsFailedDecryption; +- unsigned long packetsRepaired; +- unsigned long burstPacketsLost; +- unsigned long burstPacketsDiscarded; +- unsigned long burstLossCount; +- unsigned long burstDiscardCount; +- double burstLossRate; +- double burstDiscardRate; +- double gapLossRate; +- double gapDiscardRate; +-*/ + +- /* RTCInboundRTPStreamStats */ ++ if (jb_stats) ++ gst_structure_set (in, "packets-discarded", G_TYPE_UINT64, late, ++ "packets-repaired", G_TYPE_UINT64, rtx_success, NULL); ++ ++ /* ++ RTCReceivedRtpStreamStats ++ ++ To be added: ++ ++ unsigned long long burstPacketsLost; ++ unsigned long long burstPacketsDiscarded; ++ unsigned long burstLossCount; ++ unsigned long burstDiscardCount; ++ double burstLossRate; ++ double burstDiscardRate; ++ double gapLossRate; ++ double gapDiscardRate; ++ ++ Not relevant because webrtcbin doesn't decode: ++ ++ unsigned long framesDropped; ++ unsigned long partialFramesLost; ++ unsigned long fullFramesLost; ++ */ ++ ++ /* RTCInboundRtpStreamStats */ + gst_structure_set (in, "remote-id", G_TYPE_STRING, r_out_id, NULL); +- /* XXX: framesDecoded, lastPacketReceivedTimestamp */ ++ ++ if (gst_structure_get_uint64 (source_stats, "octets-received", &bytes)) ++ gst_structure_set (in, "bytes-received", G_TYPE_UINT64, bytes, NULL); ++ ++ if (gst_structure_get_uint (source_stats, "sent-fir-count", &fir)) ++ gst_structure_set (in, "fir-count", G_TYPE_UINT, fir, NULL); ++ if (gst_structure_get_uint (source_stats, "sent-pli-count", &pli)) ++ gst_structure_set (in, "pli-count", G_TYPE_UINT, pli, NULL); ++ if (gst_structure_get_uint (source_stats, "sent-nack-count", &nack)) ++ gst_structure_set (in, "nack-count", G_TYPE_UINT, nack, NULL); ++ if (jb_stats) ++ gst_structure_set (in, "packets-duplicated", G_TYPE_UINT64, duplicates, ++ NULL); ++ ++ /* RTCInboundRtpStreamStats: ++ ++ To be added: ++ ++ required DOMString receiverId; ++ double averageRtcpInterval; ++ unsigned long long headerBytesReceived; ++ unsigned long long fecPacketsReceived; ++ unsigned long long fecPacketsDiscarded; ++ unsigned long long bytesReceived; ++ unsigned long long packetsFailedDecryption; ++ record perDscpPacketsReceived; ++ unsigned long nackCount; ++ unsigned long firCount; ++ unsigned long pliCount; ++ unsigned long sliCount; ++ double jitterBufferDelay; ++ ++ Not relevant because webrtcbin doesn't decode or depayload: ++ unsigned long framesDecoded; ++ unsigned long keyFramesDecoded; ++ unsigned long frameWidth; ++ unsigned long frameHeight; ++ unsigned long frameBitDepth; ++ double framesPerSecond; ++ unsigned long long qpSum; ++ double totalDecodeTime; ++ double totalInterFrameDelay; ++ double totalSquaredInterFrameDelay; ++ boolean voiceActivityFlag; ++ DOMHighResTimeStamp lastPacketReceivedTimestamp; ++ double totalProcessingDelay; ++ DOMHighResTimeStamp estimatedPlayoutTimestamp; ++ unsigned long long jitterBufferEmittedCount; ++ unsigned long long totalSamplesReceived; ++ unsigned long long totalSamplesDecoded; ++ unsigned long long samplesDecodedWithSilk; ++ unsigned long long samplesDecodedWithCelt; ++ unsigned long long concealedSamples; ++ unsigned long long silentConcealedSamples; ++ unsigned long long concealmentEvents; ++ unsigned long long insertedSamplesForDeceleration; ++ unsigned long long removedSamplesForAcceleration; ++ double audioLevel; ++ double totalAudioEnergy; ++ double totalSamplesDuration; ++ unsigned long framesReceived; ++ DOMString decoderImplementation; ++ */ + + r_out = gst_structure_new_empty (r_out_id); + _set_base_stats (r_out, GST_WEBRTC_STATS_REMOTE_OUTBOUND_RTP, ts, r_out_id); +@@ -251,30 +466,67 @@ _get_stats_from_rtp_source_stats (GstWebRTCBin * webrtc, + gst_structure_set (r_out, "codec-id", G_TYPE_STRING, codec_id, NULL); + gst_structure_set (r_out, "transport-id", G_TYPE_STRING, transport_id, + NULL); +- if (have_rb) { +- guint32 rtt; +- if (gst_structure_get_uint (source_stats, "rb-round-trip", &rtt)) { +- /* 16.16 fixed point to double */ +- double val = FIXED_16_16_TO_DOUBLE (rtt); +- gst_structure_set (r_out, "round-trip-time", G_TYPE_DOUBLE, val, NULL); +- } +- } else { +- /* default values */ +- gst_structure_set (r_out, "round-trip-time", G_TYPE_DOUBLE, 0.0, NULL); +- } +- /* XXX: mediaType, trackId, sliCount, qpSum */ ++ /* XXX: mediaType, trackId */ ++ ++ /* RTCSentRtpStreamStats */ + +-/* RTCSentRTPStreamStats */ + if (have_sr) { +- if (gst_structure_get_uint64 (source_stats, "sr-octet-count", &bytes)) +- gst_structure_set (r_out, "bytes-sent", G_TYPE_UINT64, bytes, NULL); +- if (gst_structure_get_uint64 (source_stats, "sr-packet-count", &packets)) +- gst_structure_set (r_out, "packets-sent", G_TYPE_UINT64, packets, NULL); ++ guint sr_bytes, sr_packets; ++ ++ if (gst_structure_get_uint (source_stats, "sr-octet-count", &sr_bytes)) ++ gst_structure_set (r_out, "bytes-sent", G_TYPE_UINT, sr_bytes, NULL); ++ if (gst_structure_get_uint (source_stats, "sr-packet-count", &sr_packets)) ++ gst_structure_set (r_out, "packets-sent", G_TYPE_UINT, sr_packets, ++ NULL); + } +-/* XXX: +- unsigned long packetsDiscardedOnSend; +- unsigned long long bytesDiscardedOnSend; +-*/ ++ ++ /* RTCSentRtpStreamStats: ++ ++ To be added: ++ ++ unsigned long rtxSsrc; ++ DOMString mediaSourceId; ++ DOMString senderId; ++ DOMString remoteId; ++ DOMString rid; ++ DOMHighResTimeStamp lastPacketSentTimestamp; ++ unsigned long long headerBytesSent; ++ unsigned long packetsDiscardedOnSend; ++ unsigned long long bytesDiscardedOnSend; ++ unsigned long fecPacketsSent; ++ unsigned long long retransmittedPacketsSent; ++ unsigned long long retransmittedBytesSent; ++ double averageRtcpInterval; ++ unsigned long sliCount; ++ ++ Can't be implemented because we don't decode: ++ ++ double targetBitrate; ++ unsigned long long totalEncodedBytesTarget; ++ unsigned long frameWidth; ++ unsigned long frameHeight; ++ unsigned long frameBitDepth; ++ double framesPerSecond; ++ unsigned long framesSent; ++ unsigned long hugeFramesSent; ++ unsigned long framesEncoded; ++ unsigned long keyFramesEncoded; ++ unsigned long framesDiscardedOnSend; ++ unsigned long long qpSum; ++ unsigned long long totalSamplesSent; ++ unsigned long long samplesEncodedWithSilk; ++ unsigned long long samplesEncodedWithCelt; ++ boolean voiceActivityFlag; ++ double totalEncodeTime; ++ double totalPacketSendDelay; ++ RTCQualityLimitationReason qualityLimitationReason; ++ record qualityLimitationDurations; ++ unsigned long qualityLimitationResolutionChanges; ++ record perDscpPacketsSent; ++ DOMString encoderImplementation; ++ */ ++ ++ /* RTCRemoteOutboundRtpStreamStats */ + + if (have_sr) { + guint64 ntptime; +@@ -290,79 +542,177 @@ _get_stats_from_rtp_source_stats (GstWebRTCBin * webrtc, + + gst_structure_set (r_out, "local-id", G_TYPE_STRING, in_id, NULL); + +- gst_structure_set (s, in_id, GST_TYPE_STRUCTURE, in, NULL); +- gst_structure_set (s, r_out_id, GST_TYPE_STRUCTURE, r_out, NULL); ++ /* To be added: ++ reportsSent ++ */ + +- gst_structure_free (in); +- gst_structure_free (r_out); ++ /* Store the raw stats from GStreamer into the structure for advanced ++ * information. ++ */ ++ if (jb_stats) ++ _gst_structure_take_structure (in, "gst-rtpjitterbuffer-stats", ++ &jb_stats); ++ ++ gst_structure_set (in, "gst-rtpsource-stats", GST_TYPE_STRUCTURE, ++ source_stats, NULL); ++ ++ _gst_structure_take_structure (s, in_id, &in); ++ _gst_structure_take_structure (s, r_out_id, &r_out); + + g_free (in_id); + g_free (r_out_id); + } + } + ++/* https://www.w3.org/TR/webrtc-stats/#icecandidate-dict* */ ++static gchar * ++_get_stats_from_ice_candidates (GstWebRTCBin * webrtc, ++ GstWebRTCICECandidateStats * can, const gchar * transport_id, ++ const gchar * candidate_tag, GstStructure * s) ++{ ++ GstStructure *stats; ++ GstWebRTCStatsType type; ++ gchar *id; ++ double ts; ++ ++ gst_structure_get_double (s, "timestamp", &ts); ++ ++ id = g_strdup_printf ("ice-candidate-%s_%u_%s_%u", candidate_tag, ++ can->stream_id, can->ipaddr, can->port); ++ stats = gst_structure_new_empty (id); ++ ++ if (strcmp (candidate_tag, "local")) { ++ type = GST_WEBRTC_STATS_LOCAL_CANDIDATE; ++ } else if (strcmp (candidate_tag, "remote")) { ++ type = GST_WEBRTC_STATS_REMOTE_CANDIDATE; ++ } else { ++ GST_WARNING_OBJECT (webrtc, "Invalid ice candidate tag: %s", candidate_tag); ++ return NULL; ++ } ++ _set_base_stats (stats, type, ts, id); ++ ++ /* RTCIceCandidateStats ++ DOMString transportId; ++ DOMString address; ++ long port; ++ DOMString protocol; ++ RTCIceCandidateType candidateType; ++ long priority; ++ DOMString url; ++ DOMString relayProtocol; ++ */ ++ ++ if (transport_id) ++ gst_structure_set (stats, "transport-id", G_TYPE_STRING, transport_id, ++ NULL); ++ gst_structure_set (stats, "address", G_TYPE_STRING, can->ipaddr, NULL); ++ gst_structure_set (stats, "port", G_TYPE_UINT, can->port, NULL); ++ gst_structure_set (stats, "candidate-type", G_TYPE_STRING, can->type, NULL); ++ gst_structure_set (stats, "priority", G_TYPE_UINT, can->prio, NULL); ++ gst_structure_set (stats, "protocol", G_TYPE_STRING, can->proto, NULL); ++ if (can->relay_proto) ++ gst_structure_set (stats, "relay-protocol", G_TYPE_STRING, can->relay_proto, ++ NULL); ++ if (can->url) ++ gst_structure_set (stats, "url", G_TYPE_STRING, can->url, NULL); ++ ++ gst_structure_set (s, id, GST_TYPE_STRUCTURE, stats, NULL); ++ gst_structure_free (stats); ++ ++ return id; ++} ++ + /* https://www.w3.org/TR/webrtc-stats/#candidatepair-dict* */ + static gchar * + _get_stats_from_ice_transport (GstWebRTCBin * webrtc, +- GstWebRTCICETransport * transport, GstStructure * s) ++ GstWebRTCICETransport * transport, GstWebRTCICEStream * stream, ++ const GstStructure * twcc_stats, const gchar * transport_id, ++ GstStructure * s) + { + GstStructure *stats; + gchar *id; ++ gchar *local_cand_id = NULL, *remote_cand_id = NULL; + double ts; ++ GstWebRTCICECandidateStats *local_cand = NULL, *remote_cand = NULL; + + gst_structure_get_double (s, "timestamp", &ts); + + id = g_strdup_printf ("ice-candidate-pair_%s", GST_OBJECT_NAME (transport)); + stats = gst_structure_new_empty (id); +- _set_base_stats (stats, GST_WEBRTC_STATS_TRANSPORT, ts, id); ++ _set_base_stats (stats, GST_WEBRTC_STATS_CANDIDATE_PAIR, ts, id); ++ ++ /* RTCIceCandidatePairStats ++ DOMString transportId; ++ DOMString localCandidateId; ++ DOMString remoteCandidateId; ++ ++ XXX: To be added: ++ ++ RTCStatsIceCandidatePairState state; ++ boolean nominated; ++ unsigned long packetsSent; ++ unsigned long packetsReceived; ++ unsigned long long bytesSent; ++ unsigned long long bytesReceived; ++ DOMHighResTimeStamp lastPacketSentTimestamp; ++ DOMHighResTimeStamp lastPacketReceivedTimestamp; ++ DOMHighResTimeStamp firstRequestTimestamp; ++ DOMHighResTimeStamp lastRequestTimestamp; ++ DOMHighResTimeStamp lastResponseTimestamp; ++ double totalRoundTripTime; ++ double currentRoundTripTime; ++ double availableOutgoingBitrate; ++ double availableIncomingBitrate; ++ unsigned long circuitBreakerTriggerCount; ++ unsigned long long requestsReceived; ++ unsigned long long requestsSent; ++ unsigned long long responsesReceived; ++ unsigned long long responsesSent; ++ unsigned long long retransmissionsReceived; ++ unsigned long long retransmissionsSent; ++ unsigned long long consentRequestsSent; ++ DOMHighResTimeStamp consentExpiredTimestamp; ++ unsigned long packetsDiscardedOnSend; ++ unsigned long long bytesDiscardedOnSend; ++ unsigned long long requestBytesSent; ++ unsigned long long consentRequestBytesSent; ++ unsigned long long responseBytesSent; ++ */ ++ ++ if (gst_webrtc_ice_get_selected_pair (webrtc->priv->ice, stream, ++ &local_cand, &remote_cand)) { ++ local_cand_id = ++ _get_stats_from_ice_candidates (webrtc, local_cand, transport_id, ++ "local", s); ++ remote_cand_id = ++ _get_stats_from_ice_candidates (webrtc, remote_cand, transport_id, ++ "remote", s); ++ ++ gst_structure_set (stats, "local-candidate-id", G_TYPE_STRING, ++ local_cand_id, NULL); ++ gst_structure_set (stats, "remote-candidate-id", G_TYPE_STRING, ++ remote_cand_id, NULL); ++ } else ++ GST_INFO_OBJECT (webrtc, ++ "No selected ICE candidate pair was found for transport %s", ++ GST_OBJECT_NAME (transport)); ++ ++ /* XXX: these stats are at the rtp session level but there isn't a specific ++ * stats structure for that. The RTCIceCandidatePairStats is the closest with ++ * the 'availableIncomingBitrate' and 'availableOutgoingBitrate' fields ++ */ ++ if (twcc_stats) ++ gst_structure_set (stats, "gst-twcc-stats", GST_TYPE_STRUCTURE, twcc_stats, ++ NULL); + +-/* XXX: RTCIceCandidatePairStats +- DOMString transportId; +- DOMString localCandidateId; +- DOMString remoteCandidateId; +- RTCStatsIceCandidatePairState state; +- unsigned long long priority; +- boolean nominated; +- unsigned long packetsSent; +- unsigned long packetsReceived; +- unsigned long long bytesSent; +- unsigned long long bytesReceived; +- DOMHighResTimeStamp lastPacketSentTimestamp; +- DOMHighResTimeStamp lastPacketReceivedTimestamp; +- DOMHighResTimeStamp firstRequestTimestamp; +- DOMHighResTimeStamp lastRequestTimestamp; +- DOMHighResTimeStamp lastResponseTimestamp; +- double totalRoundTripTime; +- double currentRoundTripTime; +- double availableOutgoingBitrate; +- double availableIncomingBitrate; +- unsigned long circuitBreakerTriggerCount; +- unsigned long long requestsReceived; +- unsigned long long requestsSent; +- unsigned long long responsesReceived; +- unsigned long long responsesSent; +- unsigned long long retransmissionsReceived; +- unsigned long long retransmissionsSent; +- unsigned long long consentRequestsSent; +- DOMHighResTimeStamp consentExpiredTimestamp; +-*/ ++ gst_structure_set (s, id, GST_TYPE_STRUCTURE, stats, NULL); + +-/* XXX: RTCIceCandidateStats +- DOMString transportId; +- boolean isRemote; +- RTCNetworkType networkType; +- DOMString ip; +- long port; +- DOMString protocol; +- RTCIceCandidateType candidateType; +- long priority; +- DOMString url; +- DOMString relayProtocol; +- boolean deleted = false; +-}; +-*/ ++ g_free (local_cand_id); ++ g_free (remote_cand_id); ++ ++ gst_webrtc_ice_candidate_stats_free (local_cand); ++ gst_webrtc_ice_candidate_stats_free (remote_cand); + +- gst_structure_set (s, id, GST_TYPE_STRUCTURE, stats, NULL); + gst_structure_free (stats); + + return id; +@@ -371,7 +721,8 @@ _get_stats_from_ice_transport (GstWebRTCBin * webrtc, + /* https://www.w3.org/TR/webrtc-stats/#dom-rtctransportstats */ + static gchar * + _get_stats_from_dtls_transport (GstWebRTCBin * webrtc, +- GstWebRTCDTLSTransport * transport, GstStructure * s) ++ GstWebRTCDTLSTransport * transport, GstWebRTCICEStream * stream, ++ const GstStructure * twcc_stats, GstStructure * s) + { + GstStructure *stats; + gchar *id; +@@ -404,94 +755,34 @@ _get_stats_from_dtls_transport (GstWebRTCBin * webrtc, + DOMString issuerCertificateId; + */ + +-/* XXX: RTCIceCandidateStats +- DOMString transportId; +- boolean isRemote; +- DOMString ip; +- long port; +- DOMString protocol; +- RTCIceCandidateType candidateType; +- long priority; +- DOMString url; +- boolean deleted = false; +-*/ ++ ice_id = ++ _get_stats_from_ice_transport (webrtc, transport->transport, stream, ++ twcc_stats, id, s); ++ if (ice_id) { ++ gst_structure_set (stats, "selected-candidate-pair-id", G_TYPE_STRING, ++ ice_id, NULL); ++ g_free (ice_id); ++ } + + gst_structure_set (s, id, GST_TYPE_STRUCTURE, stats, NULL); + gst_structure_free (stats); + +- ice_id = _get_stats_from_ice_transport (webrtc, transport->transport, s); +- g_free (ice_id); +- + return id; + } + +-static void +-_get_stats_from_transport_channel (GstWebRTCBin * webrtc, +- TransportStream * stream, const gchar * codec_id, guint ssrc, +- GstStructure * s) +-{ +- GstWebRTCDTLSTransport *transport; +- GObject *rtp_session; +- GstStructure *rtp_stats; +- GValueArray *source_stats; +- gchar *transport_id; +- double ts; +- int i; +- +- gst_structure_get_double (s, "timestamp", &ts); +- +- transport = stream->transport; +- if (!transport) +- transport = stream->transport; +- if (!transport) +- return; +- +- g_signal_emit_by_name (webrtc->rtpbin, "get-internal-session", +- stream->session_id, &rtp_session); +- g_object_get (rtp_session, "stats", &rtp_stats, NULL); +- +- gst_structure_get (rtp_stats, "source-stats", G_TYPE_VALUE_ARRAY, +- &source_stats, NULL); +- +- GST_DEBUG_OBJECT (webrtc, "retrieving rtp stream stats from transport %" +- GST_PTR_FORMAT " rtp session %" GST_PTR_FORMAT " with %u rtp sources, " +- "transport %" GST_PTR_FORMAT, stream, rtp_session, source_stats->n_values, +- transport); +- +- transport_id = _get_stats_from_dtls_transport (webrtc, transport, s); +- +- /* construct stats objects */ +- for (i = 0; i < source_stats->n_values; i++) { +- const GstStructure *stats; +- const GValue *val = g_value_array_get_nth (source_stats, i); +- guint stats_ssrc = 0; +- +- stats = gst_value_get_structure (val); +- +- /* skip foreign sources */ +- gst_structure_get (stats, "ssrc", G_TYPE_UINT, &stats_ssrc, NULL); +- if (ssrc && stats_ssrc && ssrc != stats_ssrc) +- continue; +- +- _get_stats_from_rtp_source_stats (webrtc, stats, codec_id, transport_id, s); +- } +- +- g_object_unref (rtp_session); +- gst_structure_free (rtp_stats); +- g_value_array_free (source_stats); +- g_free (transport_id); +-} +- + /* https://www.w3.org/TR/webrtc-stats/#codec-dict* */ +-static void ++static gboolean + _get_codec_stats_from_pad (GstWebRTCBin * webrtc, GstPad * pad, +- GstStructure * s, gchar ** out_id, guint * out_ssrc) ++ GstStructure * s, gchar ** out_id, guint * out_ssrc, guint * out_clock_rate) + { ++ GstWebRTCBinPad *wpad = GST_WEBRTC_BIN_PAD (pad); + GstStructure *stats; +- GstCaps *caps; ++ GstCaps *caps = NULL; + gchar *id; + double ts; + guint ssrc = 0; ++ gint clock_rate = 0; ++ gboolean has_caps_ssrc = FALSE; + + gst_structure_get_double (s, "timestamp", &ts); + +@@ -499,10 +790,15 @@ _get_codec_stats_from_pad (GstWebRTCBin * webrtc, GstPad * pad, + id = g_strdup_printf ("codec-stats-%s", GST_OBJECT_NAME (pad)); + _set_base_stats (stats, GST_WEBRTC_STATS_CODEC, ts, id); + +- caps = gst_pad_get_current_caps (pad); ++ if (wpad->received_caps) ++ caps = gst_caps_ref (wpad->received_caps); ++ GST_DEBUG_OBJECT (pad, "Pad caps are: %" GST_PTR_FORMAT, caps); + if (caps && gst_caps_is_fixed (caps)) { + GstStructure *caps_s = gst_caps_get_structure (caps, 0); +- gint pt, clock_rate; ++ gint pt; ++ const gchar *encoding_name, *media, *encoding_params; ++ GstSDPMedia sdp_media = { 0 }; ++ guint channels = 0; + + if (gst_structure_get_int (caps_s, "payload", &pt)) + gst_structure_set (stats, "payload-type", G_TYPE_UINT, pt, NULL); +@@ -510,10 +806,45 @@ _get_codec_stats_from_pad (GstWebRTCBin * webrtc, GstPad * pad, + if (gst_structure_get_int (caps_s, "clock-rate", &clock_rate)) + gst_structure_set (stats, "clock-rate", G_TYPE_UINT, clock_rate, NULL); + +- if (gst_structure_get_uint (caps_s, "ssrc", &ssrc)) ++ if (gst_structure_get_uint (caps_s, "ssrc", &ssrc)) { + gst_structure_set (stats, "ssrc", G_TYPE_UINT, ssrc, NULL); ++ has_caps_ssrc = TRUE; ++ } ++ ++ media = gst_structure_get_string (caps_s, "media"); ++ encoding_name = gst_structure_get_string (caps_s, "encoding-name"); ++ encoding_params = gst_structure_get_string (caps_s, "encoding-params"); ++ ++ if (media || encoding_name) { ++ gchar *mime_type; ++ ++ mime_type = g_strdup_printf ("%s/%s", media ? media : "", ++ encoding_name ? encoding_name : ""); ++ gst_structure_set (stats, "mime-type", G_TYPE_STRING, mime_type, NULL); ++ g_free (mime_type); ++ } ++ ++ if (encoding_params) ++ channels = atoi (encoding_params); ++ if (channels) ++ gst_structure_set (stats, "channels", G_TYPE_UINT, channels, NULL); ++ ++ if (gst_pad_get_direction (pad) == GST_PAD_SRC) ++ gst_structure_set (stats, "codec-type", G_TYPE_STRING, "decode", NULL); ++ else ++ gst_structure_set (stats, "codec-type", G_TYPE_STRING, "encode", NULL); ++ ++ gst_sdp_media_init (&sdp_media); ++ if (gst_sdp_media_set_media_from_caps (caps, &sdp_media) == GST_SDP_OK) { ++ const gchar *fmtp = gst_sdp_media_get_attribute_val (&sdp_media, "fmtp"); ++ ++ if (fmtp) { ++ gst_structure_set (stats, "sdp-fmtp-line", G_TYPE_STRING, fmtp, NULL); ++ } ++ } ++ gst_sdp_media_uninit (&sdp_media); + +- /* FIXME: codecType, mimeType, channels, sdpFmtpLine, implementation, transportId */ ++ /* FIXME: transportId */ + } + + if (caps) +@@ -529,34 +860,139 @@ _get_codec_stats_from_pad (GstWebRTCBin * webrtc, GstPad * pad, + + if (out_ssrc) + *out_ssrc = ssrc; ++ ++ if (out_clock_rate) ++ *out_clock_rate = clock_rate; ++ ++ return has_caps_ssrc; ++} ++ ++struct transport_stream_stats ++{ ++ GstWebRTCBin *webrtc; ++ TransportStream *stream; ++ char *transport_id; ++ char *codec_id; ++ const char *kind; ++ guint clock_rate; ++ GValueArray *source_stats; ++ GstStructure *s; ++}; ++ ++static gboolean ++webrtc_stats_get_from_transport (SsrcMapItem * entry, ++ struct transport_stream_stats *ts_stats) ++{ ++ double ts; ++ int i; ++ ++ gst_structure_get_double (ts_stats->s, "timestamp", &ts); ++ ++ /* construct stats objects */ ++ for (i = 0; i < ts_stats->source_stats->n_values; i++) { ++ const GstStructure *stats; ++ const GValue *val = g_value_array_get_nth (ts_stats->source_stats, i); ++ guint stats_ssrc = 0; ++ ++ stats = gst_value_get_structure (val); ++ ++ /* skip foreign sources */ ++ if (gst_structure_get_uint (stats, "ssrc", &stats_ssrc) && ++ entry->ssrc == stats_ssrc) ++ _get_stats_from_rtp_source_stats (ts_stats->webrtc, ts_stats->stream, ++ stats, ts_stats->codec_id, ts_stats->kind, ts_stats->transport_id, ++ ts_stats->s); ++ else if (gst_structure_get_uint (stats, "rb-ssrc", &stats_ssrc) ++ && entry->ssrc == stats_ssrc) ++ _get_stats_from_remote_rtp_source_stats (ts_stats->webrtc, ++ ts_stats->stream, stats, entry->ssrc, ts_stats->clock_rate, ++ ts_stats->codec_id, ts_stats->kind, ts_stats->transport_id, ++ ts_stats->s); ++ } ++ ++ /* we want to look at all the entries */ ++ return FALSE; + } + + static gboolean + _get_stats_from_pad (GstWebRTCBin * webrtc, GstPad * pad, GstStructure * s) + { + GstWebRTCBinPad *wpad = GST_WEBRTC_BIN_PAD (pad); +- TransportStream *stream; +- gchar *codec_id; +- guint ssrc; ++ struct transport_stream_stats ts_stats = { NULL, }; ++ guint ssrc, clock_rate; ++ GObject *rtp_session; ++ GObject *gst_rtp_session; ++ GstStructure *rtp_stats, *twcc_stats; ++ GstWebRTCKind kind; + +- _get_codec_stats_from_pad (webrtc, pad, s, &codec_id, &ssrc); ++ _get_codec_stats_from_pad (webrtc, pad, s, &ts_stats.codec_id, &ssrc, ++ &clock_rate); + + if (!wpad->trans) + goto out; + +- stream = WEBRTC_TRANSCEIVER (wpad->trans)->stream; +- if (!stream) ++ g_object_get (wpad->trans, "kind", &kind, NULL); ++ switch (kind) { ++ case GST_WEBRTC_KIND_AUDIO: ++ ts_stats.kind = "audio"; ++ break; ++ case GST_WEBRTC_KIND_VIDEO: ++ ts_stats.kind = "video"; ++ break; ++ case GST_WEBRTC_KIND_UNKNOWN: ++ ts_stats.kind = NULL; ++ break; ++ }; ++ ++ ts_stats.stream = WEBRTC_TRANSCEIVER (wpad->trans)->stream; ++ if (!ts_stats.stream) + goto out; + +- _get_stats_from_transport_channel (webrtc, stream, codec_id, ssrc, s); ++ if (wpad->trans->mline == G_MAXUINT) ++ goto out; ++ ++ if (!ts_stats.stream->transport) ++ goto out; ++ ++ g_signal_emit_by_name (webrtc->rtpbin, "get-internal-session", ++ ts_stats.stream->session_id, &rtp_session); ++ g_object_get (rtp_session, "stats", &rtp_stats, NULL); ++ g_signal_emit_by_name (webrtc->rtpbin, "get-session", ++ ts_stats.stream->session_id, &gst_rtp_session); ++ g_object_get (gst_rtp_session, "twcc-stats", &twcc_stats, NULL); ++ ++ gst_structure_get (rtp_stats, "source-stats", G_TYPE_VALUE_ARRAY, ++ &ts_stats.source_stats, NULL); ++ ++ ts_stats.transport_id = ++ _get_stats_from_dtls_transport (webrtc, ts_stats.stream->transport, ++ GST_WEBRTC_ICE_STREAM (ts_stats.stream->stream), twcc_stats, s); ++ ++ GST_DEBUG_OBJECT (webrtc, "retrieving rtp stream stats from transport %" ++ GST_PTR_FORMAT " rtp session %" GST_PTR_FORMAT " with %u rtp sources, " ++ "transport %" GST_PTR_FORMAT, ts_stats.stream, rtp_session, ++ ts_stats.source_stats->n_values, ts_stats.stream->transport); ++ ++ ts_stats.s = s; ++ ++ transport_stream_find_ssrc_map_item (ts_stats.stream, &ts_stats, ++ (FindSsrcMapFunc) webrtc_stats_get_from_transport); ++ ++ g_clear_object (&rtp_session); ++ g_clear_object (&gst_rtp_session); ++ gst_clear_structure (&rtp_stats); ++ gst_clear_structure (&twcc_stats); ++ g_value_array_free (ts_stats.source_stats); ++ ts_stats.source_stats = NULL; ++ g_clear_pointer (&ts_stats.transport_id, g_free); + + out: +- g_free (codec_id); ++ g_clear_pointer (&ts_stats.codec_id, g_free); + return TRUE; + } + +-void +-gst_webrtc_bin_update_stats (GstWebRTCBin * webrtc) ++GstStructure * ++gst_webrtc_bin_create_stats (GstWebRTCBin * webrtc, GstPad * pad) + { + GstStructure *s = gst_structure_new_empty ("application/x-webrtc-stats"); + double ts = monotonic_time_as_double_milliseconds (); +@@ -579,12 +1015,13 @@ gst_webrtc_bin_update_stats (GstWebRTCBin * webrtc) + gst_structure_free (pc_stats); + } + +- gst_element_foreach_pad (GST_ELEMENT (webrtc), +- (GstElementForeachPadFunc) _get_stats_from_pad, s); ++ if (pad) ++ _get_stats_from_pad (webrtc, pad, s); ++ else ++ gst_element_foreach_pad (GST_ELEMENT (webrtc), ++ (GstElementForeachPadFunc) _get_stats_from_pad, s); + + gst_structure_remove_field (s, "timestamp"); + +- if (webrtc->priv->stats) +- gst_structure_free (webrtc->priv->stats); +- webrtc->priv->stats = s; ++ return s; + } +diff --git a/ext/webrtc/gstwebrtcstats.h b/ext/webrtc/gstwebrtcstats.h +index e67ba47d6..0573df76c 100644 +--- a/ext/webrtc/gstwebrtcstats.h ++++ b/ext/webrtc/gstwebrtcstats.h +@@ -28,7 +28,8 @@ + G_BEGIN_DECLS + + G_GNUC_INTERNAL +-void gst_webrtc_bin_update_stats (GstWebRTCBin * webrtc); ++GstStructure * gst_webrtc_bin_create_stats (GstWebRTCBin * webrtc, ++ GstPad * pad); + + G_END_DECLS + +diff --git a/ext/webrtc/meson.build b/ext/webrtc/meson.build +index 3e7a5d1d8..151cb9a8e 100644 +--- a/ext/webrtc/meson.build ++++ b/ext/webrtc/meson.build +@@ -1,10 +1,7 @@ + webrtc_sources = [ + 'gstwebrtc.c', +- 'gstwebrtcice.c', + 'gstwebrtcstats.c', +- 'icestream.c', +- 'nicetransport.c', +- 'sctptransport.c', ++ 'webrtcsctptransport.c', + 'gstwebrtcbin.c', + 'transportreceivebin.c', + 'transportsendbin.c', +@@ -15,20 +12,20 @@ webrtc_sources = [ + 'webrtcdatachannel.c', + ] + +-libnice_dep = dependency('nice', version : '>=0.1.14', required : get_option('webrtc'), +- fallback : ['libnice', 'libnice_dep'], +- default_options: ['tests=disabled']) +- +-if libnice_dep.found() +- gstwebrtc_plugin = library('gstwebrtc', +- webrtc_sources, +- c_args : gst_plugins_bad_args + ['-DGST_USE_UNSTABLE_API'], +- include_directories : [configinc], +- dependencies : [gio_dep, libnice_dep, gstbase_dep, gstsdp_dep, +- gstapp_dep, gstwebrtc_dep, gstsctp_dep], +- install : true, +- install_dir : plugins_install_dir, +- ) +- pkgconfig.generate(gstwebrtc_plugin, install_dir : plugins_pkgconfig_install_dir) +- plugins += [gstwebrtc_plugin] ++webrtc_option = get_option('webrtc').require( ++ libgstwebrtcnice_dep.found(), error_message: 'webrtc plugin requires libgstwebrtcnice.') ++if webrtc_option.disabled() ++ subdir_done() + endif ++ ++gstwebrtc_plugin = library('gstwebrtc', ++ webrtc_sources, ++ c_args : gst_plugins_bad_args + ['-DGST_USE_UNSTABLE_API'], ++ include_directories : [configinc], ++ dependencies : [gstbase_dep, gstsdp_dep, ++ gstapp_dep, gstwebrtc_dep, gstsctp_dep, gstrtp_dep, gio_dep, libgstwebrtcnice_dep], ++ install : true, ++ install_dir : plugins_install_dir, ++) ++plugins += [gstwebrtc_plugin] ++ +diff --git a/ext/webrtc/transportreceivebin.c b/ext/webrtc/transportreceivebin.c +index 6d38a8325..8f8a44452 100644 +--- a/ext/webrtc/transportreceivebin.c ++++ b/ext/webrtc/transportreceivebin.c +@@ -23,26 +23,19 @@ + + #include "transportreceivebin.h" + #include "utils.h" ++#include "gst/webrtc/webrtc-priv.h" + + /* +- * ,----------------------------transport_receive_%u---------------------------, +- * ; (rtp/data) ; +- * ; ,-nicesrc-, ,-capsfilter-, ,--queue--, ,-dtlssrtpdec-, ,-funnel-, ; +- * ; ; src o-o sink src o-osink srco-osink rtp_srco-------o sink_0 ; ; +- * ; '---------' '------------' '---------' ; ; ; src o--o rtp_src +- * ; ; rtcp_srco---, ,-o sink_1 ; ; +- * ; ; ; ; ; '--------' ; +- * ; ; data_srco-, ; ; ,-funnel-, ; +- * ; (rtcp) '-------------' ; '-+-o sink_0 ; ; +- * ; ,-nicesrc-, ,-capsfilter-, ,--queue--, ,-dtlssrtpdec-, ; ,-' ; src o--o rtcp_src +- * ; ; src o-o sink src o-osink srco-osink rtp_srco-+-' ,-o sink_1 ; ; +- * ; '---------' '------------' '---------' ; ; ; ; '--------' ; +- * ; ; rtcp_srco-+---' ,-funnel-, ; +- * ; ; ; '-----o sink_0 ; ; +- * ; ; data_srco-, ; src o--o data_src +- * ; '-------------' '-----o sink_1 ; ; +- * ; '--------' ; +- * '---------------------------------------------------------------------------' ++ * ,-----------------------transport_receive_%u------------------, ++ * ; ; ++ * ; ,-nicesrc-, ,-capsfilter-, ,---queue---, ,-dtlssrtpdec-, ; ++ * ; ; src o-o sink src o-o sink src o-osink rtp_srco---o rtp_src ++ * ; '---------' '------------' '-----------' ; ; ; ++ * ; ; rtcp_srco---o rtcp_src ++ * ; ; ; ; ++ * ; ; data_srco---o data_src ++ * ; '-------------' ; ++ * '-------------------------------------------------------------' + * + * Do we really wnat to be *that* permissive in what we accept? + * +@@ -103,7 +96,7 @@ pad_block (GstPad * pad, GstPadProbeInfo * info, TransportReceiveBin * receive) + * them. Sticky events would be forwarded again later once we unblock + * and we don't want to forward them here already because that might + * cause a spurious GST_FLOW_FLUSHING */ +- if (GST_IS_EVENT (info->data)) ++ if (GST_IS_EVENT (info->data) || GST_IS_QUERY (info->data)) + return GST_PAD_PROBE_DROP; + + /* But block on any actual data-flow so we don't accidentally send that +@@ -119,14 +112,32 @@ void + transport_receive_bin_set_receive_state (TransportReceiveBin * receive, + ReceiveState state) + { ++ GstWebRTCICEConnectionState icestate; + + g_mutex_lock (&receive->pad_block_lock); + if (receive->receive_state != state) { +- GST_DEBUG_OBJECT (receive, "changing receive state to %s", ++ GST_DEBUG_OBJECT (receive, "Requested change of receive state to %s", + _receive_state_to_string (state)); + } + ++ receive->receive_state = state; ++ ++ g_object_get (receive->stream->transport->transport, "state", &icestate, ++ NULL); ++ if (state == RECEIVE_STATE_PASS) { ++ if (icestate == GST_WEBRTC_ICE_CONNECTION_STATE_CONNECTED || ++ icestate == GST_WEBRTC_ICE_CONNECTION_STATE_COMPLETED) { ++ GST_LOG_OBJECT (receive, "Unblocking nicesrc because ICE is connected."); ++ } else { ++ GST_LOG_OBJECT (receive, "Can't unblock nicesrc yet because ICE " ++ "is not connected, it is %d", icestate); ++ state = RECEIVE_STATE_BLOCK; ++ } ++ } ++ + if (state == RECEIVE_STATE_PASS) { ++ g_object_set (receive->queue, "leaky", 0, NULL); ++ + if (receive->rtp_block) + _free_pad_block (receive->rtp_block); + receive->rtp_block = NULL; +@@ -136,6 +147,7 @@ transport_receive_bin_set_receive_state (TransportReceiveBin * receive, + receive->rtcp_block = NULL; + } else { + g_assert (state == RECEIVE_STATE_BLOCK); ++ g_object_set (receive->queue, "leaky", 2, NULL); + if (receive->rtp_block == NULL) { + GstWebRTCDTLSTransport *transport; + GstElement *dtlssrtpdec; +@@ -155,28 +167,19 @@ transport_receive_bin_set_receive_state (TransportReceiveBin * receive, + (GstPadProbeCallback) pad_block, receive, NULL); + gst_object_unref (peer_pad); + gst_object_unref (pad); +- +- transport = receive->stream->rtcp_transport; +- dtlssrtpdec = transport->dtlssrtpdec; +- pad = gst_element_get_static_pad (dtlssrtpdec, "sink"); +- peer_pad = gst_pad_get_peer (pad); +- receive->rtcp_block = +- _create_pad_block (GST_ELEMENT (receive), peer_pad, 0, NULL, NULL); +- receive->rtcp_block->block_id = +- gst_pad_add_probe (peer_pad, +- GST_PAD_PROBE_TYPE_BLOCK | +- GST_PAD_PROBE_TYPE_DATA_DOWNSTREAM, +- (GstPadProbeCallback) pad_block, receive, NULL); +- gst_object_unref (peer_pad); +- gst_object_unref (pad); + } + } + } +- +- receive->receive_state = state; + g_mutex_unlock (&receive->pad_block_lock); + } + ++static void ++_on_notify_ice_connection_state (GstWebRTCICETransport * transport, ++ GParamSpec * pspec, TransportReceiveBin * receive) ++{ ++ transport_receive_bin_set_receive_state (receive, receive->receive_state); ++} ++ + static void + transport_receive_bin_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +@@ -250,9 +253,6 @@ transport_receive_bin_change_state (GstElement * element, + elem = receive->stream->transport->transport->src; + gst_element_set_locked_state (elem, TRUE); + gst_element_set_state (elem, GST_STATE_PLAYING); +- elem = receive->stream->rtcp_transport->transport->src; +- gst_element_set_locked_state (elem, TRUE); +- gst_element_set_state (elem, GST_STATE_PLAYING); + break; + } + default: +@@ -270,9 +270,6 @@ transport_receive_bin_change_state (GstElement * element, + elem = receive->stream->transport->transport->src; + gst_element_set_locked_state (elem, FALSE); + gst_element_set_state (elem, GST_STATE_NULL); +- elem = receive->stream->rtcp_transport->transport->src; +- gst_element_set_locked_state (elem, FALSE); +- gst_element_set_state (elem, GST_STATE_NULL); + + if (receive->rtp_block) + _free_pad_block (receive->rtp_block); +@@ -297,13 +294,25 @@ rtp_queue_overrun (GstElement * queue, TransportReceiveBin * receive) + GST_WARNING_OBJECT (receive, "Internal receive queue overrun. Dropping data"); + } + ++static GstPadProbeReturn ++drop_serialized_queries (GstPad * pad, GstPadProbeInfo * info, ++ TransportReceiveBin * receive) ++{ ++ GstQuery *query = GST_PAD_PROBE_INFO_QUERY (info); ++ ++ if (GST_QUERY_IS_SERIALIZED (query)) ++ return GST_PAD_PROBE_DROP; ++ else ++ return GST_PAD_PROBE_PASS; ++} ++ + static void + transport_receive_bin_constructed (GObject * object) + { + TransportReceiveBin *receive = TRANSPORT_RECEIVE_BIN (object); + GstWebRTCDTLSTransport *transport; + GstPad *ghost, *pad; +- GstElement *capsfilter, *funnel, *queue; ++ GstElement *capsfilter; + GstCaps *caps; + + g_return_if_fail (receive->stream); +@@ -317,46 +326,25 @@ transport_receive_bin_constructed (GObject * object) + g_object_set (capsfilter, "caps", caps, NULL); + gst_caps_unref (caps); + +- queue = gst_element_factory_make ("queue", NULL); ++ receive->queue = gst_element_factory_make ("queue", NULL); + /* FIXME: make this configurable? */ +- g_object_set (queue, "leaky", 2, "max-size-time", (guint64) 0, ++ g_object_set (receive->queue, "leaky", 2, "max-size-time", (guint64) 0, + "max-size-buffers", 0, "max-size-bytes", 5 * 1024 * 1024, NULL); +- g_signal_connect (queue, "overrun", G_CALLBACK (rtp_queue_overrun), receive); +- +- gst_bin_add (GST_BIN (receive), GST_ELEMENT (queue)); +- gst_bin_add (GST_BIN (receive), GST_ELEMENT (capsfilter)); +- if (!gst_element_link_pads (capsfilter, "src", queue, "sink")) +- g_warn_if_reached (); +- +- if (!gst_element_link_pads (queue, "src", transport->dtlssrtpdec, "sink")) +- g_warn_if_reached (); +- +- gst_bin_add (GST_BIN (receive), GST_ELEMENT (transport->transport->src)); +- if (!gst_element_link_pads (GST_ELEMENT (transport->transport->src), "src", +- GST_ELEMENT (capsfilter), "sink")) +- g_warn_if_reached (); ++ g_signal_connect (receive->queue, "overrun", G_CALLBACK (rtp_queue_overrun), ++ receive); + +- /* link ice src, dtlsrtp together for rtcp */ +- transport = receive->stream->rtcp_transport; +- gst_bin_add (GST_BIN (receive), GST_ELEMENT (transport->dtlssrtpdec)); +- +- capsfilter = gst_element_factory_make ("capsfilter", NULL); +- caps = gst_caps_new_empty_simple ("application/x-rtcp"); +- g_object_set (capsfilter, "caps", caps, NULL); +- gst_caps_unref (caps); +- +- queue = gst_element_factory_make ("queue", NULL); +- /* FIXME: make this configurable? */ +- g_object_set (queue, "leaky", 2, "max-size-time", (guint64) 0, +- "max-size-buffers", 0, "max-size-bytes", 5 * 1024 * 1024, NULL); +- g_signal_connect (queue, "overrun", G_CALLBACK (rtp_queue_overrun), receive); ++ pad = gst_element_get_static_pad (receive->queue, "sink"); ++ gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM, ++ (GstPadProbeCallback) drop_serialized_queries, receive, NULL); ++ gst_object_unref (pad); + +- gst_bin_add (GST_BIN (receive), queue); ++ gst_bin_add (GST_BIN (receive), GST_ELEMENT (receive->queue)); + gst_bin_add (GST_BIN (receive), GST_ELEMENT (capsfilter)); +- if (!gst_element_link_pads (capsfilter, "src", queue, "sink")) ++ if (!gst_element_link_pads (capsfilter, "src", receive->queue, "sink")) + g_warn_if_reached (); + +- if (!gst_element_link_pads (queue, "src", transport->dtlssrtpdec, "sink")) ++ if (!gst_element_link_pads (receive->queue, "src", transport->dtlssrtpdec, ++ "sink")) + g_warn_if_reached (); + + gst_bin_add (GST_BIN (receive), GST_ELEMENT (transport->transport->src)); +@@ -364,52 +352,32 @@ transport_receive_bin_constructed (GObject * object) + GST_ELEMENT (capsfilter), "sink")) + g_warn_if_reached (); + +- /* create funnel for rtp_src */ +- funnel = gst_element_factory_make ("funnel", NULL); +- gst_bin_add (GST_BIN (receive), funnel); +- if (!gst_element_link_pads (receive->stream->transport->dtlssrtpdec, +- "rtp_src", funnel, "sink_0")) +- g_warn_if_reached (); +- if (!gst_element_link_pads (receive->stream->rtcp_transport->dtlssrtpdec, +- "rtp_src", funnel, "sink_1")) +- g_warn_if_reached (); +- +- pad = gst_element_get_static_pad (funnel, "src"); ++ /* expose rtp_src */ ++ pad = ++ gst_element_get_static_pad (receive->stream->transport->dtlssrtpdec, ++ "rtp_src"); + receive->rtp_src = gst_ghost_pad_new ("rtp_src", pad); + + gst_element_add_pad (GST_ELEMENT (receive), receive->rtp_src); + gst_object_unref (pad); + +- /* create funnel for rtcp_src */ +- funnel = gst_element_factory_make ("funnel", NULL); +- gst_bin_add (GST_BIN (receive), funnel); +- if (!gst_element_link_pads (receive->stream->transport->dtlssrtpdec, +- "rtcp_src", funnel, "sink_0")) +- g_warn_if_reached (); +- if (!gst_element_link_pads (receive->stream->rtcp_transport->dtlssrtpdec, +- "rtcp_src", funnel, "sink_1")) +- g_warn_if_reached (); +- +- pad = gst_element_get_static_pad (funnel, "src"); ++ /* expose rtcp_rtc */ ++ pad = gst_element_get_static_pad (receive->stream->transport->dtlssrtpdec, ++ "rtcp_src"); + receive->rtcp_src = gst_ghost_pad_new ("rtcp_src", pad); + gst_element_add_pad (GST_ELEMENT (receive), receive->rtcp_src); + gst_object_unref (pad); + +- /* create funnel for data_src */ +- funnel = gst_element_factory_make ("funnel", NULL); +- gst_bin_add (GST_BIN (receive), funnel); +- if (!gst_element_link_pads (receive->stream->transport->dtlssrtpdec, +- "data_src", funnel, "sink_0")) +- g_warn_if_reached (); +- if (!gst_element_link_pads (receive->stream->rtcp_transport->dtlssrtpdec, +- "data_src", funnel, "sink_1")) +- g_warn_if_reached (); +- +- pad = gst_element_get_static_pad (funnel, "src"); ++ /* expose data_src */ ++ pad = gst_element_request_pad_simple (receive->stream->transport->dtlssrtpdec, ++ "data_src"); + ghost = gst_ghost_pad_new ("data_src", pad); + gst_element_add_pad (GST_ELEMENT (receive), ghost); + gst_object_unref (pad); + ++ g_signal_connect_after (receive->stream->transport->transport, ++ "notify::state", G_CALLBACK (_on_notify_ice_connection_state), receive); ++ + G_OBJECT_CLASS (parent_class)->constructed (object); + } + +diff --git a/ext/webrtc/transportreceivebin.h b/ext/webrtc/transportreceivebin.h +index 50449e327..905628c64 100644 +--- a/ext/webrtc/transportreceivebin.h ++++ b/ext/webrtc/transportreceivebin.h +@@ -52,6 +52,7 @@ struct _TransportReceiveBin + struct pad_block *rtcp_block; + GMutex pad_block_lock; + ReceiveState receive_state; ++ GstElement *queue; + }; + + struct _TransportReceiveBinClass +diff --git a/ext/webrtc/transportsendbin.c b/ext/webrtc/transportsendbin.c +index dc5c1ff0e..ee2312e6a 100644 +--- a/ext/webrtc/transportsendbin.c ++++ b/ext/webrtc/transportsendbin.c +@@ -23,22 +23,19 @@ + + #include "transportsendbin.h" + #include "utils.h" ++#include "gst/webrtc/webrtc-priv.h" + + /* +- * ,------------------------transport_send_%u-------------------------, +- * ; ,-----dtlssrtpenc---, ; +- * data_sink o--------------------------o data_sink ; ; +- * ; ; ; ,---nicesink---, ; +- * rtp_sink o--------------------------o rtp_sink_0 src o--o sink ; ; +- * ; ; ; '--------------' ; +- * ; ,--outputselector--, ,-o rtcp_sink_0 ; ; +- * ; ; src_0 o-' '-------------------' ; +- * rtcp_sink ;---o sink ; ,----dtlssrtpenc----, ,---nicesink---, ; +- * ; ; src_1 o---o rtcp_sink_0 src o--o sink ; ; +- * ; '------------------' '-------------------' '--------------' ; +- * '------------------------------------------------------------------' ++ * ,--------------transport_send_%u-------- ---, ++ * ; ,-----dtlssrtpenc---, ; ++ * data_sink o---o data_sink ; ; ++ * ; ; ; ,---nicesink---, ; ++ * rtp_sink o---o rtp_sink_0 src o--o sink ; ; ++ * ; ; ; '--------------' ; ++ * rtcp_sink o---o rtcp_sink_0 ; ; ++ * ; '-------------------' ++ * '-------------------------------------------' + * +- * outputselecter is used to switch between rtcp-mux and no rtcp-mux + * + * FIXME: Do we need a valve drop=TRUE for the no RTCP case? + */ +@@ -73,7 +70,6 @@ enum + { + PROP_0, + PROP_STREAM, +- PROP_RTCP_MUX, + }; + + #define TSB_GET_LOCK(tsb) (&tsb->lock) +@@ -82,24 +78,6 @@ enum + + static void cleanup_blocks (TransportSendBin * send); + +-static void +-_set_rtcp_mux (TransportSendBin * send, gboolean rtcp_mux) +-{ +- GstPad *active_pad; +- +- if (rtcp_mux) +- active_pad = gst_element_get_static_pad (send->outputselector, "src_0"); +- else +- active_pad = gst_element_get_static_pad (send->outputselector, "src_1"); +- send->rtcp_mux = rtcp_mux; +- GST_OBJECT_UNLOCK (send); +- +- g_object_set (send->outputselector, "active-pad", active_pad, NULL); +- +- gst_object_unref (active_pad); +- GST_OBJECT_LOCK (send); +-} +- + static void + transport_send_bin_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +@@ -112,9 +90,6 @@ transport_send_bin_set_property (GObject * object, guint prop_id, + /* XXX: weak-ref this? Note, it's construct-only so can't be changed later */ + send->stream = TRANSPORT_STREAM (g_value_get_object (value)); + break; +- case PROP_RTCP_MUX: +- _set_rtcp_mux (send, g_value_get_boolean (value)); +- break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -133,9 +108,6 @@ transport_send_bin_get_property (GObject * object, guint prop_id, + case PROP_STREAM: + g_value_set_object (value, send->stream); + break; +- case PROP_RTCP_MUX: +- g_value_set_boolean (value, send->rtcp_mux); +- break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -200,9 +172,9 @@ transport_send_bin_change_state (GstElement * element, + * arguably the element should be able to deal with this itself or + * we should only add it once/if we get the encoding keys */ + TSB_LOCK (send); +- gst_element_set_locked_state (send->rtp_ctx.dtlssrtpenc, TRUE); +- gst_element_set_locked_state (send->rtcp_ctx.dtlssrtpenc, TRUE); ++ gst_element_set_locked_state (send->dtlssrtpenc, TRUE); + send->active = TRUE; ++ send->has_clientness = FALSE; + TSB_UNLOCK (send); + break; + } +@@ -213,20 +185,12 @@ transport_send_bin_change_state (GstElement * element, + /* RTP */ + /* unblock the encoder once the key is set, this should also be automatic */ + elem = send->stream->transport->dtlssrtpenc; +- send->rtp_ctx.rtp_block = block_peer_pad (elem, "rtp_sink_0"); ++ send->rtp_block = block_peer_pad (elem, "rtp_sink_0"); + /* Also block the RTCP pad on the RTP encoder, in case we mux RTCP */ +- send->rtp_ctx.rtcp_block = block_peer_pad (elem, "rtcp_sink_0"); ++ send->rtcp_block = block_peer_pad (elem, "rtcp_sink_0"); + /* unblock ice sink once a connection is made, this should also be automatic */ + elem = send->stream->transport->transport->sink; +- send->rtp_ctx.nice_block = block_peer_pad (elem, "sink"); + +- /* RTCP */ +- elem = send->stream->rtcp_transport->dtlssrtpenc; +- /* Block the RTCP DTLS encoder */ +- send->rtcp_ctx.rtcp_block = block_peer_pad (elem, "rtcp_sink_0"); +- /* unblock ice sink once a connection is made, this should also be automatic */ +- elem = send->stream->rtcp_transport->transport->sink; +- send->rtcp_ctx.nice_block = block_peer_pad (elem, "sink"); + TSB_UNLOCK (send); + break; + } +@@ -256,8 +220,7 @@ transport_send_bin_change_state (GstElement * element, + send->active = FALSE; + cleanup_blocks (send); + +- gst_element_set_locked_state (send->rtp_ctx.dtlssrtpenc, FALSE); +- gst_element_set_locked_state (send->rtcp_ctx.dtlssrtpenc, FALSE); ++ gst_element_set_locked_state (send->dtlssrtpenc, FALSE); + TSB_UNLOCK (send); + + break; +@@ -272,13 +235,7 @@ transport_send_bin_change_state (GstElement * element, + static void + _on_dtls_enc_key_set (GstElement * dtlssrtpenc, TransportSendBin * send) + { +- TransportSendBinDTLSContext *ctx; +- +- if (dtlssrtpenc == send->rtp_ctx.dtlssrtpenc) +- ctx = &send->rtp_ctx; +- else if (dtlssrtpenc == send->rtcp_ctx.dtlssrtpenc) +- ctx = &send->rtcp_ctx; +- else { ++ if (dtlssrtpenc != send->dtlssrtpenc) { + GST_WARNING_OBJECT (send, + "Received dtls-enc key info for unknown element %" GST_PTR_FORMAT, + dtlssrtpenc); +@@ -293,24 +250,40 @@ _on_dtls_enc_key_set (GstElement * dtlssrtpenc, TransportSendBin * send) + } + + GST_LOG_OBJECT (send, "Unblocking %" GST_PTR_FORMAT " pads", dtlssrtpenc); +- _free_pad_block (ctx->rtp_block); +- _free_pad_block (ctx->rtcp_block); +- ctx->rtp_block = ctx->rtcp_block = NULL; ++ _free_pad_block (send->rtp_block); ++ _free_pad_block (send->rtcp_block); ++ send->rtp_block = send->rtcp_block = NULL; + + done: + TSB_UNLOCK (send); + } + ++static void ++maybe_start_enc (TransportSendBin * send) ++{ ++ GstWebRTCICEConnectionState state; ++ ++ if (!send->has_clientness) { ++ GST_LOG_OBJECT (send, "Can't start DTLS because doesn't know client-ness"); ++ return; ++ } ++ ++ g_object_get (send->stream->transport->transport, "state", &state, NULL); ++ if (state != GST_WEBRTC_ICE_CONNECTION_STATE_CONNECTED && ++ state != GST_WEBRTC_ICE_CONNECTION_STATE_COMPLETED) { ++ GST_LOG_OBJECT (send, "Can't start DTLS yet because ICE is not connected."); ++ return; ++ } ++ ++ gst_element_set_locked_state (send->dtlssrtpenc, FALSE); ++ gst_element_sync_state_with_parent (send->dtlssrtpenc); ++} ++ + static void + _on_notify_dtls_client_status (GstElement * dtlssrtpenc, + GParamSpec * pspec, TransportSendBin * send) + { +- TransportSendBinDTLSContext *ctx; +- if (dtlssrtpenc == send->rtp_ctx.dtlssrtpenc) +- ctx = &send->rtp_ctx; +- else if (dtlssrtpenc == send->rtcp_ctx.dtlssrtpenc) +- ctx = &send->rtcp_ctx; +- else { ++ if (dtlssrtpenc != send->dtlssrtpenc) { + GST_WARNING_OBJECT (send, + "Received dtls-enc client mode for unknown element %" GST_PTR_FORMAT, + dtlssrtpenc); +@@ -324,11 +297,12 @@ _on_notify_dtls_client_status (GstElement * dtlssrtpenc, + goto done; + } + ++ send->has_clientness = TRUE; + GST_DEBUG_OBJECT (send, +- "DTLS-SRTP encoder configured. Unlocking it and changing state %" +- GST_PTR_FORMAT, ctx->dtlssrtpenc); +- gst_element_set_locked_state (ctx->dtlssrtpenc, FALSE); +- gst_element_sync_state_with_parent (ctx->dtlssrtpenc); ++ "DTLS-SRTP encoder configured. Unlocking it and maybe changing state %" ++ GST_PTR_FORMAT, dtlssrtpenc); ++ maybe_start_enc (send); ++ + done: + TSB_UNLOCK (send); + } +@@ -337,116 +311,62 @@ static void + _on_notify_ice_connection_state (GstWebRTCICETransport * transport, + GParamSpec * pspec, TransportSendBin * send) + { +- GstWebRTCICEConnectionState state; +- +- g_object_get (transport, "state", &state, NULL); +- +- if (state == GST_WEBRTC_ICE_CONNECTION_STATE_CONNECTED || +- state == GST_WEBRTC_ICE_CONNECTION_STATE_COMPLETED) { +- TSB_LOCK (send); +- if (transport == send->stream->transport->transport) { +- if (send->rtp_ctx.nice_block) { +- GST_LOG_OBJECT (send, "Unblocking pad %" GST_PTR_FORMAT, +- send->rtp_ctx.nice_block->pad); +- _free_pad_block (send->rtp_ctx.nice_block); +- send->rtp_ctx.nice_block = NULL; +- } +- } else if (transport == send->stream->rtcp_transport->transport) { +- if (send->rtcp_ctx.nice_block) { +- GST_LOG_OBJECT (send, "Unblocking pad %" GST_PTR_FORMAT, +- send->rtcp_ctx.nice_block->pad); +- _free_pad_block (send->rtcp_ctx.nice_block); +- send->rtcp_ctx.nice_block = NULL; +- } +- } +- TSB_UNLOCK (send); +- } +-} +- +-static void +-tsb_setup_ctx (TransportSendBin * send, TransportSendBinDTLSContext * ctx, +- GstWebRTCDTLSTransport * transport) +-{ +- GstElement *dtlssrtpenc, *nicesink; +- +- dtlssrtpenc = ctx->dtlssrtpenc = transport->dtlssrtpenc; +- nicesink = ctx->nicesink = transport->transport->sink; +- +- /* unblock the encoder once the key is set */ +- g_signal_connect (dtlssrtpenc, "on-key-set", +- G_CALLBACK (_on_dtls_enc_key_set), send); +- /* Bring the encoder up to current state only once the is-client prop is set */ +- g_signal_connect (dtlssrtpenc, "notify::is-client", +- G_CALLBACK (_on_notify_dtls_client_status), send); +- gst_bin_add (GST_BIN (send), GST_ELEMENT (dtlssrtpenc)); +- +- /* unblock ice sink once it signals a connection */ +- g_signal_connect (transport->transport, "notify::state", +- G_CALLBACK (_on_notify_ice_connection_state), send); +- gst_bin_add (GST_BIN (send), GST_ELEMENT (nicesink)); +- +- if (!gst_element_link_pads (GST_ELEMENT (dtlssrtpenc), "src", nicesink, +- "sink")) +- g_warn_if_reached (); ++ TSB_LOCK (send); ++ maybe_start_enc (send); ++ TSB_UNLOCK (send); + } + + static void + transport_send_bin_constructed (GObject * object) + { + TransportSendBin *send = TRANSPORT_SEND_BIN (object); +- GstWebRTCDTLSTransport *transport; + GstPadTemplate *templ; + GstPad *ghost, *pad; + + g_return_if_fail (send->stream); + +- g_object_bind_property (send, "rtcp-mux", send->stream, "rtcp-mux", +- G_BINDING_BIDIRECTIONAL); ++ send->dtlssrtpenc = send->stream->transport->dtlssrtpenc; ++ send->nicesink = send->stream->transport->transport->sink; + +- /* Output selector to direct the RTCP for muxed-mode */ +- send->outputselector = gst_element_factory_make ("output-selector", NULL); +- gst_bin_add (GST_BIN (send), send->outputselector); +- +- /* RTP */ +- transport = send->stream->transport; +- /* Do the common init for the context struct */ +- tsb_setup_ctx (send, &send->rtp_ctx, transport); ++ /* unblock the encoder once the key is set */ ++ g_signal_connect (send->dtlssrtpenc, "on-key-set", ++ G_CALLBACK (_on_dtls_enc_key_set), send); ++ /* Bring the encoder up to current state only once the is-client prop is set */ ++ g_signal_connect (send->dtlssrtpenc, "notify::is-client", ++ G_CALLBACK (_on_notify_dtls_client_status), send); ++ /* unblock ice sink once it signals a connection */ ++ g_signal_connect (send->stream->transport->transport, "notify::state", ++ G_CALLBACK (_on_notify_ice_connection_state), send); + +- templ = _find_pad_template (transport->dtlssrtpenc, +- GST_PAD_SINK, GST_PAD_REQUEST, "rtp_sink_%d"); +- pad = gst_element_request_pad (transport->dtlssrtpenc, templ, "rtp_sink_0", +- NULL); ++ gst_bin_add (GST_BIN (send), GST_ELEMENT (send->dtlssrtpenc)); ++ gst_bin_add (GST_BIN (send), GST_ELEMENT (send->nicesink)); + +- if (!gst_element_link_pads (GST_ELEMENT (send->outputselector), "src_0", +- GST_ELEMENT (transport->dtlssrtpenc), "rtcp_sink_0")) ++ if (!gst_element_link_pads (GST_ELEMENT (send->dtlssrtpenc), "src", ++ send->nicesink, "sink")) + g_warn_if_reached (); + ++ templ = _find_pad_template (send->dtlssrtpenc, GST_PAD_SINK, GST_PAD_REQUEST, ++ "rtp_sink_%d"); ++ pad = gst_element_request_pad (send->dtlssrtpenc, templ, "rtp_sink_0", NULL); ++ + ghost = gst_ghost_pad_new ("rtp_sink", pad); + gst_element_add_pad (GST_ELEMENT (send), ghost); + gst_object_unref (pad); + + /* push the data stream onto the RTP dtls element */ +- templ = _find_pad_template (transport->dtlssrtpenc, +- GST_PAD_SINK, GST_PAD_REQUEST, "data_sink"); +- pad = gst_element_request_pad (transport->dtlssrtpenc, templ, "data_sink", +- NULL); ++ templ = _find_pad_template (send->dtlssrtpenc, GST_PAD_SINK, GST_PAD_REQUEST, ++ "data_sink"); ++ pad = gst_element_request_pad (send->dtlssrtpenc, templ, "data_sink", NULL); + + ghost = gst_ghost_pad_new ("data_sink", pad); + gst_element_add_pad (GST_ELEMENT (send), ghost); + gst_object_unref (pad); + + /* RTCP */ +- transport = send->stream->rtcp_transport; + /* Do the common init for the context struct */ +- tsb_setup_ctx (send, &send->rtcp_ctx, transport); +- templ = _find_pad_template (transport->dtlssrtpenc, +- GST_PAD_SINK, GST_PAD_REQUEST, "rtcp_sink_%d"); +- +- if (!gst_element_link_pads (GST_ELEMENT (send->outputselector), "src_1", +- GST_ELEMENT (transport->dtlssrtpenc), "rtcp_sink_0")) +- g_warn_if_reached (); +- +- pad = gst_element_get_static_pad (send->outputselector, "sink"); ++ templ = _find_pad_template (send->dtlssrtpenc, GST_PAD_SINK, GST_PAD_REQUEST, ++ "rtcp_sink_%d"); ++ pad = gst_element_request_pad (send->dtlssrtpenc, templ, "rtcp_sink_0", NULL); + + ghost = gst_ghost_pad_new ("rtcp_sink", pad); + gst_element_add_pad (GST_ELEMENT (send), ghost); +@@ -456,45 +376,30 @@ transport_send_bin_constructed (GObject * object) + } + + static void +-cleanup_ctx_blocks (TransportSendBinDTLSContext * ctx) ++cleanup_blocks (TransportSendBin * send) + { +- if (ctx->rtp_block) { +- _free_pad_block (ctx->rtp_block); +- ctx->rtp_block = NULL; +- } +- +- if (ctx->rtcp_block) { +- _free_pad_block (ctx->rtcp_block); +- ctx->rtcp_block = NULL; ++ if (send->rtp_block) { ++ _free_pad_block (send->rtp_block); ++ send->rtp_block = NULL; + } + +- if (ctx->nice_block) { +- _free_pad_block (ctx->nice_block); +- ctx->nice_block = NULL; ++ if (send->rtcp_block) { ++ _free_pad_block (send->rtcp_block); ++ send->rtcp_block = NULL; + } + } + +-static void +-cleanup_blocks (TransportSendBin * send) +-{ +- cleanup_ctx_blocks (&send->rtp_ctx); +- cleanup_ctx_blocks (&send->rtcp_ctx); +-} +- + static void + transport_send_bin_dispose (GObject * object) + { + TransportSendBin *send = TRANSPORT_SEND_BIN (object); + + TSB_LOCK (send); +- if (send->rtp_ctx.nicesink) { +- g_signal_handlers_disconnect_by_data (send->rtp_ctx.nicesink, send); +- send->rtp_ctx.nicesink = NULL; +- } +- if (send->rtcp_ctx.nicesink) { +- g_signal_handlers_disconnect_by_data (send->rtcp_ctx.nicesink, send); +- send->rtcp_ctx.nicesink = NULL; ++ if (send->nicesink) { ++ g_signal_handlers_disconnect_by_data (send->nicesink, send); ++ send->nicesink = NULL; + } ++ + cleanup_blocks (send); + + TSB_UNLOCK (send); +@@ -623,12 +528,6 @@ transport_send_bin_class_init (TransportSendBinClass * klass) + "The TransportStream for this sending bin", + transport_stream_get_type (), + G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY | G_PARAM_STATIC_STRINGS)); +- +- g_object_class_install_property (gobject_class, +- PROP_RTCP_MUX, +- g_param_spec_boolean ("rtcp-mux", "RTCP Mux", +- "Whether RTCP packets are muxed with RTP packets", +- FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + } + + static void +diff --git a/ext/webrtc/transportsendbin.h b/ext/webrtc/transportsendbin.h +index ed10a0723..5266c8ce9 100644 +--- a/ext/webrtc/transportsendbin.h ++++ b/ext/webrtc/transportsendbin.h +@@ -34,18 +34,6 @@ GType transport_send_bin_get_type(void); + + typedef struct _TransportSendBinDTLSContext TransportSendBinDTLSContext; + +-struct _TransportSendBinDTLSContext { +- GstElement *dtlssrtpenc; +- GstElement *nicesink; +- +- /* Block on the dtlssrtpenc RTP sink pad, if any */ +- struct pad_block *rtp_block; +- /* Block on the dtlssrtpenc RTCP sink pad, if any */ +- struct pad_block *rtcp_block; +- /* Block on the nicesink sink pad, if any */ +- struct pad_block *nice_block; +-}; +- + struct _TransportSendBin + { + GstBin parent; +@@ -54,21 +42,16 @@ struct _TransportSendBin + gboolean active; /* Flag that's cleared on shutdown */ + + TransportStream *stream; /* parent transport stream */ +- gboolean rtcp_mux; + +- GstElement *outputselector; ++ GstElement *dtlssrtpenc; ++ GstElement *nicesink; + +- TransportSendBinDTLSContext rtp_ctx; +- TransportSendBinDTLSContext rtcp_ctx; ++ gboolean has_clientness; + +- /* ++ /* Block on the dtlssrtpenc RTP sink pad, if any */ + struct pad_block *rtp_block; +- struct pad_block *rtcp_mux_block; +- struct pad_block *rtp_nice_block; +- ++ /* Block on the dtlssrtpenc RTCP sink pad, if any */ + struct pad_block *rtcp_block; +- struct pad_block *rtcp_nice_block; +- */ + }; + + struct _TransportSendBinClass +diff --git a/ext/webrtc/transportstream.c b/ext/webrtc/transportstream.c +index 01261ae1b..f1811a025 100644 +--- a/ext/webrtc/transportstream.c ++++ b/ext/webrtc/transportstream.c +@@ -24,19 +24,23 @@ + #include "transportstream.h" + #include "transportsendbin.h" + #include "transportreceivebin.h" +-#include "gstwebrtcice.h" + #include "gstwebrtcbin.h" + #include "utils.h" ++#include "gst/webrtc/webrtc-priv.h" ++ ++#define GST_CAT_DEFAULT transport_stream_debug ++GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); + + #define transport_stream_parent_class parent_class +-G_DEFINE_TYPE (TransportStream, transport_stream, GST_TYPE_OBJECT); ++G_DEFINE_TYPE_WITH_CODE (TransportStream, transport_stream, GST_TYPE_OBJECT, ++ GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, "webrtctransportstream", 0, ++ "webrtctransportstream");); + + enum + { + PROP_0, + PROP_WEBRTC, + PROP_SESSION_ID, +- PROP_RTCP_MUX, + PROP_DTLS_CLIENT, + }; + +@@ -55,13 +59,18 @@ transport_stream_get_caps_for_pt (TransportStream * stream, guint pt) + } + + int +-transport_stream_get_pt (TransportStream * stream, const gchar * encoding_name) ++transport_stream_get_pt (TransportStream * stream, const gchar * encoding_name, ++ guint media_idx) + { + guint i; +- gint ret = 0; ++ gint ret = -1; + + for (i = 0; i < stream->ptmap->len; i++) { + PtMapItem *item = &g_array_index (stream->ptmap, PtMapItem, i); ++ ++ if (media_idx != -1 && media_idx != item->media_idx) ++ continue; ++ + if (!gst_caps_is_empty (item->caps)) { + GstStructure *s = gst_caps_get_structure (item->caps, 0); + if (!g_strcmp0 (gst_structure_get_string (s, "encoding-name"), +@@ -124,9 +133,6 @@ transport_stream_set_property (GObject * object, guint prop_id, + case PROP_SESSION_ID: + stream->session_id = g_value_get_uint (value); + break; +- case PROP_RTCP_MUX: +- stream->rtcp_mux = g_value_get_boolean (value); +- break; + case PROP_DTLS_CLIENT: + stream->dtls_client = g_value_get_boolean (value); + break; +@@ -148,9 +154,6 @@ transport_stream_get_property (GObject * object, guint prop_id, + case PROP_SESSION_ID: + g_value_set_uint (value, stream->session_id); + break; +- case PROP_RTCP_MUX: +- g_value_set_boolean (value, stream->rtcp_mux); +- break; + case PROP_DTLS_CLIENT: + g_value_set_boolean (value, stream->dtls_client); + break; +@@ -166,29 +169,14 @@ transport_stream_dispose (GObject * object) + { + TransportStream *stream = TRANSPORT_STREAM (object); + +- if (stream->send_bin) +- gst_object_unref (stream->send_bin); +- stream->send_bin = NULL; +- +- if (stream->receive_bin) +- gst_object_unref (stream->receive_bin); +- stream->receive_bin = NULL; +- +- if (stream->transport) +- gst_object_unref (stream->transport); +- stream->transport = NULL; +- +- if (stream->rtcp_transport) +- gst_object_unref (stream->rtcp_transport); +- stream->rtcp_transport = NULL; +- +- if (stream->rtxsend) +- gst_object_unref (stream->rtxsend); +- stream->rtxsend = NULL; +- +- if (stream->rtxreceive) +- gst_object_unref (stream->rtxreceive); +- stream->rtxreceive = NULL; ++ gst_clear_object (&stream->send_bin); ++ gst_clear_object (&stream->receive_bin); ++ gst_clear_object (&stream->transport); ++ gst_clear_object (&stream->rtxsend); ++ gst_clear_object (&stream->rtxreceive); ++ gst_clear_object (&stream->reddec); ++ g_list_free_full (stream->fecdecs, (GDestroyNotify) gst_object_unref); ++ stream->fecdecs = NULL; + + GST_OBJECT_PARENT (object) = NULL; + +@@ -201,7 +189,12 @@ transport_stream_finalize (GObject * object) + TransportStream *stream = TRANSPORT_STREAM (object); + + g_array_free (stream->ptmap, TRUE); +- g_array_free (stream->remote_ssrcmap, TRUE); ++ g_ptr_array_free (stream->ssrcmap, TRUE); ++ ++ gst_clear_object (&stream->rtxsend_stream_id); ++ gst_clear_object (&stream->rtxsend_repaired_stream_id); ++ gst_clear_object (&stream->rtxreceive_stream_id); ++ gst_clear_object (&stream->rtxreceive_repaired_stream_id); + + G_OBJECT_CLASS (parent_class)->finalize (object); + } +@@ -213,19 +206,12 @@ transport_stream_constructed (GObject * object) + GstWebRTCBin *webrtc; + GstWebRTCICETransport *ice_trans; + +- stream->transport = gst_webrtc_dtls_transport_new (stream->session_id, FALSE); +- stream->rtcp_transport = +- gst_webrtc_dtls_transport_new (stream->session_id, TRUE); ++ stream->transport = gst_webrtc_dtls_transport_new (stream->session_id); + + webrtc = GST_WEBRTC_BIN (gst_object_get_parent (GST_OBJECT (object))); + + g_object_bind_property (stream->transport, "client", stream, "dtls-client", + G_BINDING_BIDIRECTIONAL); +- g_object_bind_property (stream->rtcp_transport, "client", stream, +- "dtls-client", G_BINDING_BIDIRECTIONAL); +- +- g_object_bind_property (stream->transport, "certificate", +- stream->rtcp_transport, "certificate", G_BINDING_BIDIRECTIONAL); + + /* Need to go full Java and have a transport manager? + * Or make the caller set the ICE transport up? */ +@@ -242,12 +228,6 @@ transport_stream_constructed (GObject * object) + gst_webrtc_dtls_transport_set_transport (stream->transport, ice_trans); + gst_object_unref (ice_trans); + +- ice_trans = +- gst_webrtc_ice_find_transport (webrtc->priv->ice, stream->stream, +- GST_WEBRTC_ICE_COMPONENT_RTCP); +- gst_webrtc_dtls_transport_set_transport (stream->rtcp_transport, ice_trans); +- gst_object_unref (ice_trans); +- + stream->send_bin = g_object_new (transport_send_bin_get_type (), "stream", + stream, NULL); + gst_object_ref_sink (stream->send_bin); +@@ -287,12 +267,6 @@ transport_stream_class_init (TransportStreamClass * klass) + 0, G_MAXUINT, 0, + G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY | G_PARAM_STATIC_STRINGS)); + +- g_object_class_install_property (gobject_class, +- PROP_RTCP_MUX, +- g_param_spec_boolean ("rtcp-mux", "RTCP Mux", +- "Whether RTCP packets are muxed with RTP packets", +- FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); +- + g_object_class_install_property (gobject_class, + PROP_DTLS_CLIENT, + g_param_spec_boolean ("dtls-client", "DTLS client", +@@ -307,12 +281,96 @@ clear_ptmap_item (PtMapItem * item) + gst_caps_unref (item->caps); + } + ++static SsrcMapItem * ++ssrcmap_item_new (GstWebRTCRTPTransceiverDirection direction, guint32 ssrc, ++ guint media_idx) ++{ ++ SsrcMapItem *ssrc_item = g_new0 (SsrcMapItem, 1); ++ ++ ssrc_item->direction = direction; ++ ssrc_item->media_idx = media_idx; ++ ssrc_item->ssrc = ssrc; ++ g_weak_ref_init (&ssrc_item->rtpjitterbuffer, NULL); ++ ++ return ssrc_item; ++} ++ ++static void ++ssrcmap_item_free (SsrcMapItem * item) ++{ ++ g_weak_ref_clear (&item->rtpjitterbuffer); ++ g_clear_pointer (&item->mid, g_free); ++ g_clear_pointer (&item->rid, g_free); ++ g_free (item); ++} ++ ++SsrcMapItem * ++transport_stream_find_ssrc_map_item (TransportStream * stream, ++ gconstpointer data, FindSsrcMapFunc func) ++{ ++ int i; ++ ++ for (i = 0; i < stream->ssrcmap->len; i++) { ++ SsrcMapItem *item = g_ptr_array_index (stream->ssrcmap, i); ++ ++ if (func (item, data)) ++ return item; ++ } ++ ++ return NULL; ++} ++ ++void ++transport_stream_filter_ssrc_map_item (TransportStream * stream, ++ gconstpointer data, FindSsrcMapFunc func) ++{ ++ int i; ++ ++ for (i = 0; i < stream->ssrcmap->len;) { ++ SsrcMapItem *item = g_ptr_array_index (stream->ssrcmap, i); ++ ++ if (!func (item, data)) { ++ GST_TRACE_OBJECT (stream, "removing ssrc %u", item->ssrc); ++ g_ptr_array_remove_index_fast (stream->ssrcmap, i); ++ } else { ++ i++; ++ } ++ } ++} ++ ++SsrcMapItem * ++transport_stream_add_ssrc_map_item (TransportStream * stream, ++ GstWebRTCRTPTransceiverDirection direction, guint32 ssrc, guint media_idx) ++{ ++ SsrcMapItem *ret = NULL; ++ ++ g_return_val_if_fail (direction == ++ GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY ++ || direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY, NULL); ++ g_return_val_if_fail (ssrc != 0, NULL); ++ ++ GST_INFO_OBJECT (stream, "Adding mapping for rtp session %u media_idx %u " ++ "direction %s ssrc %u", stream->session_id, media_idx, ++ gst_webrtc_rtp_transceiver_direction_to_string (direction), ssrc); ++ ++ /* XXX: duplicates? */ ++ ret = ssrcmap_item_new (direction, ssrc, media_idx); ++ ++ g_ptr_array_add (stream->ssrcmap, ret); ++ ++ return ret; ++} ++ + static void + transport_stream_init (TransportStream * stream) + { + stream->ptmap = g_array_new (FALSE, TRUE, sizeof (PtMapItem)); + g_array_set_clear_func (stream->ptmap, (GDestroyNotify) clear_ptmap_item); +- stream->remote_ssrcmap = g_array_new (FALSE, TRUE, sizeof (SsrcMapItem)); ++ stream->ssrcmap = g_ptr_array_new_with_free_func ( ++ (GDestroyNotify) ssrcmap_item_free); ++ ++ stream->rtphdrext_id_stream_id = -1; ++ stream->rtphdrext_id_repaired_stream_id = -1; + } + + TransportStream * +diff --git a/ext/webrtc/transportstream.h b/ext/webrtc/transportstream.h +index 174d93e90..de46009bf 100644 +--- a/ext/webrtc/transportstream.h ++++ b/ext/webrtc/transportstream.h +@@ -21,6 +21,7 @@ + #define __TRANSPORT_STREAM_H__ + + #include "fwd.h" ++#include + #include + + G_BEGIN_DECLS +@@ -34,13 +35,18 @@ GType transport_stream_get_type(void); + typedef struct + { + guint8 pt; ++ guint media_idx; + GstCaps *caps; + } PtMapItem; + + typedef struct + { ++ GstWebRTCRTPTransceiverDirection direction; + guint32 ssrc; + guint media_idx; ++ char *mid; ++ char *rid; ++ GWeakRef rtpjitterbuffer; /* for stats */ + } SsrcMapItem; + + struct _TransportStream +@@ -48,9 +54,6 @@ struct _TransportStream + GstObject parent; + + guint session_id; /* session_id */ +- gboolean rtcp; +- gboolean rtcp_mux; +- gboolean rtcp_rsize; + gboolean dtls_client; + gboolean active; /* TRUE if any mline in the bundle/transport is active */ + TransportSendBin *send_bin; /* bin containing all the sending transport elements */ +@@ -58,14 +61,22 @@ struct _TransportStream + GstWebRTCICEStream *stream; + + GstWebRTCDTLSTransport *transport; +- GstWebRTCDTLSTransport *rtcp_transport; + + GArray *ptmap; /* array of PtMapItem's */ +- GArray *remote_ssrcmap; /* array of SsrcMapItem's */ ++ GPtrArray *ssrcmap; /* array of SsrcMapItem's */ + gboolean output_connected; /* whether receive bin is connected to rtpbin */ + ++ guint rtphdrext_id_stream_id; ++ guint rtphdrext_id_repaired_stream_id; + GstElement *rtxsend; ++ GstRTPHeaderExtension *rtxsend_stream_id; ++ GstRTPHeaderExtension *rtxsend_repaired_stream_id; + GstElement *rtxreceive; ++ GstRTPHeaderExtension *rtxreceive_stream_id; ++ GstRTPHeaderExtension *rtxreceive_repaired_stream_id; ++ ++ GstElement *reddec; ++ GList *fecdecs; + }; + + struct _TransportStreamClass +@@ -76,13 +87,29 @@ struct _TransportStreamClass + TransportStream * transport_stream_new (GstWebRTCBin * webrtc, + guint session_id); + int transport_stream_get_pt (TransportStream * stream, +- const gchar * encoding_name); ++ const gchar * encoding_name, ++ guint media_idx); + int * transport_stream_get_all_pt (TransportStream * stream, + const gchar * encoding_name, + gsize * pt_len); + GstCaps * transport_stream_get_caps_for_pt (TransportStream * stream, + guint pt); + ++typedef gboolean (*FindSsrcMapFunc) (SsrcMapItem * e1, gconstpointer data); ++ ++SsrcMapItem * transport_stream_find_ssrc_map_item (TransportStream * stream, ++ gconstpointer data, ++ FindSsrcMapFunc func); ++ ++void transport_stream_filter_ssrc_map_item (TransportStream * stream, ++ gconstpointer data, ++ FindSsrcMapFunc func); ++ ++SsrcMapItem * transport_stream_add_ssrc_map_item (TransportStream * stream, ++ GstWebRTCRTPTransceiverDirection direction, ++ guint32 ssrc, ++ guint media_idx); ++ + G_END_DECLS + + #endif /* __TRANSPORT_STREAM_H__ */ +diff --git a/ext/webrtc/utils.c b/ext/webrtc/utils.c +index 044d58322..f0741d1e5 100644 +--- a/ext/webrtc/utils.c ++++ b/ext/webrtc/utils.c +@@ -26,12 +26,6 @@ + #include "utils.h" + #include "gstwebrtcbin.h" + +-GQuark +-gst_webrtc_bin_error_quark (void) +-{ +- return g_quark_from_static_string ("gst-webrtc-bin-error-quark"); +-} +- + GstPadTemplate * + _find_pad_template (GstElement * element, GstPadDirection direction, + GstPadPresence presence, const gchar * name) +@@ -138,18 +132,18 @@ _free_pad_block (struct pad_block *block) + g_free (block); + } + +-gchar * ++const gchar * + _enum_value_to_string (GType type, guint value) + { + GEnumClass *enum_class; + GEnumValue *enum_value; +- gchar *str = NULL; ++ const gchar *str = NULL; + + enum_class = g_type_class_ref (type); + enum_value = g_enum_get_value (enum_class, value); + + if (enum_value) +- str = g_strdup (enum_value->value_nick); ++ str = enum_value->value_nick; + + g_type_class_unref (enum_class); + +@@ -205,3 +199,52 @@ _rtp_caps_from_media (const GstSDPMedia * media) + + return ret; + } ++ ++GstWebRTCKind ++webrtc_kind_from_caps (const GstCaps * caps) ++{ ++ GstStructure *s; ++ const gchar *media; ++ ++ if (!caps || gst_caps_get_size (caps) == 0) ++ return GST_WEBRTC_KIND_UNKNOWN; ++ ++ s = gst_caps_get_structure (caps, 0); ++ ++ media = gst_structure_get_string (s, "media"); ++ if (media == NULL) ++ return GST_WEBRTC_KIND_UNKNOWN; ++ ++ if (!g_strcmp0 (media, "audio")) ++ return GST_WEBRTC_KIND_AUDIO; ++ ++ if (!g_strcmp0 (media, "video")) ++ return GST_WEBRTC_KIND_VIDEO; ++ ++ return GST_WEBRTC_KIND_UNKNOWN; ++} ++ ++char * ++_get_msid_from_media (const GstSDPMedia * media) ++{ ++ int i; ++ ++ for (i = 0; i < gst_sdp_media_attributes_len (media); i++) { ++ const GstSDPAttribute *attr = gst_sdp_media_get_attribute (media, i); ++ const char *start, *end; ++ ++ if (!attr->value) ++ continue; ++ ++ start = strstr (attr->value, "msid:"); ++ if (!start) ++ continue; ++ ++ start += strlen ("msid:"); ++ end = strstr (start, " "); ++ if (end) ++ return g_strndup (start, end - start); ++ } ++ ++ return NULL; ++} +diff --git a/ext/webrtc/utils.h b/ext/webrtc/utils.h +index ab4d58e87..e5d3d124a 100644 +--- a/ext/webrtc/utils.h ++++ b/ext/webrtc/utils.h +@@ -26,22 +26,6 @@ + + G_BEGIN_DECLS + +-#define GST_WEBRTC_BIN_ERROR gst_webrtc_bin_error_quark () +-GQuark gst_webrtc_bin_error_quark (void); +- +-typedef enum +-{ +- GST_WEBRTC_BIN_ERROR_FAILED, +- GST_WEBRTC_BIN_ERROR_INVALID_SYNTAX, +- GST_WEBRTC_BIN_ERROR_INVALID_MODIFICATION, +- GST_WEBRTC_BIN_ERROR_INVALID_STATE, +- GST_WEBRTC_BIN_ERROR_BAD_SDP, +- GST_WEBRTC_BIN_ERROR_FINGERPRINT, +- GST_WEBRTC_BIN_ERROR_SCTP_FAILURE, +- GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE, +- GST_WEBRTC_BIN_ERROR_CLOSED, +-} GstWebRTCError; +- + GstPadTemplate * _find_pad_template (GstElement * element, + GstPadDirection direction, + GstPadPresence presence, +@@ -75,11 +59,18 @@ struct pad_block * _create_pad_block (GstElement * element, + GDestroyNotify notify); + + G_GNUC_INTERNAL +-gchar * _enum_value_to_string (GType type, guint value); ++const gchar * _enum_value_to_string (GType type, guint value); + G_GNUC_INTERNAL + const gchar * _g_checksum_to_webrtc_string (GChecksumType type); + G_GNUC_INTERNAL + GstCaps * _rtp_caps_from_media (const GstSDPMedia * media); ++G_GNUC_INTERNAL ++GstWebRTCKind webrtc_kind_from_caps (const GstCaps * caps); ++G_GNUC_INTERNAL ++char * _get_msid_from_media (const GstSDPMedia * media); ++ ++#define gst_webrtc_kind_to_string(kind) _enum_value_to_string(GST_TYPE_WEBRTC_KIND, kind) ++#define gst_webrtc_rtp_transceiver_direction_to_string(dir) _enum_value_to_string(GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION, dir) + + G_END_DECLS + +diff --git a/ext/webrtc/webrtcdatachannel.c b/ext/webrtc/webrtcdatachannel.c +index fde12613c..0260c6172 100644 +--- a/ext/webrtc/webrtcdatachannel.c ++++ b/ext/webrtc/webrtcdatachannel.c +@@ -44,12 +44,150 @@ + #define GST_CAT_DEFAULT webrtc_data_channel_debug + GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); + ++static void _close_procedure (WebRTCDataChannel * channel, gpointer user_data); ++ ++typedef void (*ChannelTask) (GstWebRTCDataChannel * channel, ++ gpointer user_data); ++ ++struct task ++{ ++ GstWebRTCDataChannel *channel; ++ ChannelTask func; ++ gpointer user_data; ++ GDestroyNotify notify; ++}; ++ ++static GstStructure * ++_execute_task (GstWebRTCBin * webrtc, struct task *task) ++{ ++ if (task->func) ++ task->func (task->channel, task->user_data); ++ ++ return NULL; ++} ++ ++static void ++_free_task (struct task *task) ++{ ++ gst_object_unref (task->channel); ++ ++ if (task->notify) ++ task->notify (task->user_data); ++ g_free (task); ++} ++ ++static void ++_channel_enqueue_task (WebRTCDataChannel * channel, ChannelTask func, ++ gpointer user_data, GDestroyNotify notify) ++{ ++ struct task *task = g_new0 (struct task, 1); ++ ++ task->channel = gst_object_ref (channel); ++ task->func = func; ++ task->user_data = user_data; ++ task->notify = notify; ++ ++ gst_webrtc_bin_enqueue_task (channel->webrtcbin, ++ (GstWebRTCBinFunc) _execute_task, task, (GDestroyNotify) _free_task, ++ NULL); ++} ++ ++static void ++_channel_store_error (WebRTCDataChannel * channel, GError * error) ++{ ++ GST_WEBRTC_DATA_CHANNEL_LOCK (channel); ++ if (error) { ++ GST_WARNING_OBJECT (channel, "Error: %s", ++ error ? error->message : "Unknown"); ++ if (!channel->stored_error) ++ channel->stored_error = error; ++ else ++ g_clear_error (&error); ++ } ++ GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); ++} ++ ++struct _WebRTCErrorIgnoreBin ++{ ++ GstBin bin; ++ ++ WebRTCDataChannel *data_channel; ++}; ++ ++G_DEFINE_TYPE (WebRTCErrorIgnoreBin, webrtc_error_ignore_bin, GST_TYPE_BIN); ++ ++static void ++webrtc_error_ignore_bin_handle_message (GstBin * bin, GstMessage * message) ++{ ++ WebRTCErrorIgnoreBin *self = WEBRTC_ERROR_IGNORE_BIN (bin); ++ ++ switch (GST_MESSAGE_TYPE (message)) { ++ case GST_MESSAGE_ERROR:{ ++ GError *error = NULL; ++ gst_message_parse_error (message, &error, NULL); ++ GST_DEBUG_OBJECT (bin, "handling error message from internal element"); ++ _channel_store_error (self->data_channel, error); ++ _channel_enqueue_task (self->data_channel, (ChannelTask) _close_procedure, ++ NULL, NULL); ++ break; ++ } ++ default: ++ GST_BIN_CLASS (webrtc_error_ignore_bin_parent_class)->handle_message (bin, ++ message); ++ break; ++ } ++} ++ ++static void ++webrtc_error_ignore_bin_class_init (WebRTCErrorIgnoreBinClass * klass) ++{ ++ GstBinClass *bin_class = (GstBinClass *) klass; ++ ++ bin_class->handle_message = webrtc_error_ignore_bin_handle_message; ++} ++ ++static void ++webrtc_error_ignore_bin_init (WebRTCErrorIgnoreBin * bin) ++{ ++} ++ ++static GstElement * ++webrtc_error_ignore_bin_new (WebRTCDataChannel * data_channel, ++ GstElement * other) ++{ ++ WebRTCErrorIgnoreBin *self; ++ GstPad *pad; ++ ++ self = g_object_new (webrtc_error_ignore_bin_get_type (), NULL); ++ self->data_channel = data_channel; ++ ++ gst_bin_add (GST_BIN (self), other); ++ ++ pad = gst_element_get_static_pad (other, "src"); ++ if (pad) { ++ GstPad *ghost_pad = gst_ghost_pad_new ("src", pad); ++ gst_element_add_pad (GST_ELEMENT (self), ghost_pad); ++ gst_clear_object (&pad); ++ } ++ pad = gst_element_get_static_pad (other, "sink"); ++ if (pad) { ++ GstPad *ghost_pad = gst_ghost_pad_new ("sink", pad); ++ gst_element_add_pad (GST_ELEMENT (self), ghost_pad); ++ gst_clear_object (&pad); ++ } ++ ++ return (GstElement *) self; ++} ++ + #define webrtc_data_channel_parent_class parent_class + G_DEFINE_TYPE_WITH_CODE (WebRTCDataChannel, webrtc_data_channel, + GST_TYPE_WEBRTC_DATA_CHANNEL, + GST_DEBUG_CATEGORY_INIT (webrtc_data_channel_debug, "webrtcdatachannel", 0, + "webrtcdatachannel");); + ++G_LOCK_DEFINE_STATIC (outstanding_channels_lock); ++static GList *outstanding_channels; ++ + typedef enum + { + DATA_CHANNEL_PPID_WEBRTC_CONTROL = 50, +@@ -210,65 +348,6 @@ construct_ack_packet (WebRTCDataChannel * channel) + return buf; + } + +-typedef void (*ChannelTask) (GstWebRTCDataChannel * channel, +- gpointer user_data); +- +-struct task +-{ +- GstWebRTCDataChannel *channel; +- ChannelTask func; +- gpointer user_data; +- GDestroyNotify notify; +-}; +- +-static void +-_execute_task (GstWebRTCBin * webrtc, struct task *task) +-{ +- if (task->func) +- task->func (task->channel, task->user_data); +-} +- +-static void +-_free_task (struct task *task) +-{ +- gst_object_unref (task->channel); +- +- if (task->notify) +- task->notify (task->user_data); +- g_free (task); +-} +- +-static void +-_channel_enqueue_task (WebRTCDataChannel * channel, ChannelTask func, +- gpointer user_data, GDestroyNotify notify) +-{ +- struct task *task = g_new0 (struct task, 1); +- +- task->channel = gst_object_ref (channel); +- task->func = func; +- task->user_data = user_data; +- task->notify = notify; +- +- gst_webrtc_bin_enqueue_task (channel->webrtcbin, +- (GstWebRTCBinFunc) _execute_task, task, (GDestroyNotify) _free_task, +- NULL); +-} +- +-static void +-_channel_store_error (WebRTCDataChannel * channel, GError * error) +-{ +- GST_WEBRTC_DATA_CHANNEL_LOCK (channel); +- if (error) { +- GST_WARNING_OBJECT (channel, "Error: %s", +- error ? error->message : "Unknown"); +- if (!channel->stored_error) +- channel->stored_error = error; +- else +- g_clear_error (&error); +- } +- GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); +-} +- + static void + _emit_on_open (WebRTCDataChannel * channel, gpointer user_data) + { +@@ -279,17 +358,30 @@ static void + _transport_closed (WebRTCDataChannel * channel) + { + GError *error; ++ gboolean both_sides_closed; + + GST_WEBRTC_DATA_CHANNEL_LOCK (channel); + error = channel->stored_error; + channel->stored_error = NULL; ++ ++ GST_TRACE_OBJECT (channel, "transport closed, peer closed %u error %p " ++ "buffered %" G_GUINT64_FORMAT, channel->peer_closed, error, ++ channel->parent.buffered_amount); ++ ++ both_sides_closed = ++ channel->peer_closed && channel->parent.buffered_amount <= 0; ++ if (both_sides_closed || error) { ++ channel->peer_closed = FALSE; ++ } + GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); + + if (error) { + gst_webrtc_data_channel_on_error (GST_WEBRTC_DATA_CHANNEL (channel), error); + g_clear_error (&error); + } +- gst_webrtc_data_channel_on_close (GST_WEBRTC_DATA_CHANNEL (channel)); ++ if (both_sides_closed || error) { ++ gst_webrtc_data_channel_on_close (GST_WEBRTC_DATA_CHANNEL (channel)); ++ } + } + + static void +@@ -297,7 +389,10 @@ _close_sctp_stream (WebRTCDataChannel * channel, gpointer user_data) + { + GstPad *pad, *peer; + +- pad = gst_element_get_static_pad (channel->appsrc, "src"); ++ GST_INFO_OBJECT (channel, "Closing outgoing SCTP stream %i label \"%s\"", ++ channel->parent.id, channel->parent.label); ++ ++ pad = gst_element_get_static_pad (channel->src_bin, "src"); + peer = gst_pad_get_peer (pad); + gst_object_unref (pad); + +@@ -305,6 +400,7 @@ _close_sctp_stream (WebRTCDataChannel * channel, gpointer user_data) + GstElement *sctpenc = gst_pad_get_parent_element (peer); + + if (sctpenc) { ++ GST_TRACE_OBJECT (channel, "removing sctpenc pad %" GST_PTR_FORMAT, peer); + gst_element_release_request_pad (sctpenc, peer); + gst_object_unref (sctpenc); + } +@@ -319,31 +415,44 @@ _close_procedure (WebRTCDataChannel * channel, gpointer user_data) + { + /* https://www.w3.org/TR/webrtc/#data-transport-closing-procedure */ + GST_WEBRTC_DATA_CHANNEL_LOCK (channel); +- if (channel->parent.ready_state == GST_WEBRTC_DATA_CHANNEL_STATE_CLOSED +- || channel->parent.ready_state == GST_WEBRTC_DATA_CHANNEL_STATE_CLOSING) { ++ if (channel->parent.ready_state == GST_WEBRTC_DATA_CHANNEL_STATE_CLOSED) { + GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); + return; +- } +- channel->parent.ready_state = GST_WEBRTC_DATA_CHANNEL_STATE_CLOSING; +- GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); +- g_object_notify (G_OBJECT (channel), "ready-state"); ++ } else if (channel->parent.ready_state == ++ GST_WEBRTC_DATA_CHANNEL_STATE_CLOSING) { ++ _channel_enqueue_task (channel, (ChannelTask) _transport_closed, NULL, ++ NULL); ++ } else if (channel->parent.ready_state == GST_WEBRTC_DATA_CHANNEL_STATE_OPEN) { ++ channel->parent.ready_state = GST_WEBRTC_DATA_CHANNEL_STATE_CLOSING; ++ GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); ++ g_object_notify (G_OBJECT (channel), "ready-state"); + +- GST_WEBRTC_DATA_CHANNEL_LOCK (channel); +- if (channel->parent.buffered_amount <= 0) { +- _channel_enqueue_task (channel, (ChannelTask) _close_sctp_stream, +- NULL, NULL); ++ GST_WEBRTC_DATA_CHANNEL_LOCK (channel); ++ if (channel->parent.buffered_amount <= 0) { ++ _channel_enqueue_task (channel, (ChannelTask) _close_sctp_stream, ++ NULL, NULL); ++ } + } + + GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); + } + + static void +-_on_sctp_reset_stream (GstWebRTCSCTPTransport * sctp, guint stream_id, ++_on_sctp_stream_reset (WebRTCSCTPTransport * sctp, guint stream_id, + WebRTCDataChannel * channel) + { +- if (channel->parent.id == stream_id) +- _channel_enqueue_task (channel, (ChannelTask) _transport_closed, ++ if (channel->parent.id == stream_id) { ++ GST_INFO_OBJECT (channel, ++ "Received channel close for SCTP stream %i label \"%s\"", ++ channel->parent.id, channel->parent.label); ++ ++ GST_WEBRTC_DATA_CHANNEL_LOCK (channel); ++ channel->peer_closed = TRUE; ++ GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); ++ ++ _channel_enqueue_task (channel, (ChannelTask) _close_procedure, + GUINT_TO_POINTER (stream_id), NULL); ++ } + } + + static void +@@ -386,8 +495,8 @@ _parse_control_packet (WebRTCDataChannel * channel, guint8 * data, + GST_INFO_OBJECT (channel, "Received channel open"); + + if (channel->parent.negotiated) { +- g_set_error (error, GST_WEBRTC_BIN_ERROR, +- GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE, ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_DATA_CHANNEL_FAILURE, + "Data channel was signalled as negotiated already"); + g_return_val_if_reached (GST_FLOW_ERROR); + } +@@ -437,7 +546,7 @@ _parse_control_packet (WebRTCDataChannel * channel, guint8 * data, + channel->opened = TRUE; + + GST_INFO_OBJECT (channel, "Received channel open for SCTP stream %i " +- "label %s protocol %s ordered %s", channel->parent.id, ++ "label \"%s\" protocol %s ordered %s", channel->parent.id, + channel->parent.label, channel->parent.protocol, + channel->parent.ordered ? "true" : "false"); + +@@ -452,16 +561,17 @@ _parse_control_packet (WebRTCDataChannel * channel, guint8 * data, + + ret = gst_app_src_push_buffer (GST_APP_SRC (channel->appsrc), buffer); + if (ret != GST_FLOW_OK) { +- g_set_error (error, GST_WEBRTC_BIN_ERROR, +- GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE, +- "Could not send ack packet"); ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_DATA_CHANNEL_FAILURE, "Could not send ack packet"); ++ GST_WARNING_OBJECT (channel, "push returned %i, %s", ret, ++ gst_flow_get_name (ret)); + return ret; + } + + return ret; + } else { +- g_set_error (error, GST_WEBRTC_BIN_ERROR, +- GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE, ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_DATA_CHANNEL_FAILURE, + "Unknown message type in control protocol"); + return GST_FLOW_ERROR; + } +@@ -470,8 +580,8 @@ parse_error: + { + g_free (label); + g_free (proto); +- g_set_error (error, GST_WEBRTC_BIN_ERROR, +- GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE, "Failed to parse packet"); ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_DATA_CHANNEL_FAILURE, "Failed to parse packet"); + g_return_val_if_reached (GST_FLOW_ERROR); + } + } +@@ -523,14 +633,14 @@ _data_channel_have_sample (WebRTCDataChannel * channel, GstSample * sample, + + buffer = gst_sample_get_buffer (sample); + if (!buffer) { +- g_set_error (error, GST_WEBRTC_BIN_ERROR, +- GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE, "No buffer to handle"); ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_DATA_CHANNEL_FAILURE, "No buffer to handle"); + return GST_FLOW_ERROR; + } + receive = gst_sctp_buffer_get_receive_meta (buffer); + if (!receive) { +- g_set_error (error, GST_WEBRTC_BIN_ERROR, +- GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE, ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_DATA_CHANNEL_FAILURE, + "No SCTP Receive meta on the buffer"); + return GST_FLOW_ERROR; + } +@@ -539,8 +649,8 @@ _data_channel_have_sample (WebRTCDataChannel * channel, GstSample * sample, + case DATA_CHANNEL_PPID_WEBRTC_CONTROL:{ + GstMapInfo info = GST_MAP_INFO_INIT; + if (!gst_buffer_map (buffer, &info, GST_MAP_READ)) { +- g_set_error (error, GST_WEBRTC_BIN_ERROR, +- GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE, ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_DATA_CHANNEL_FAILURE, + "Failed to map received buffer"); + ret = GST_FLOW_ERROR; + } else { +@@ -553,8 +663,8 @@ _data_channel_have_sample (WebRTCDataChannel * channel, GstSample * sample, + case DATA_CHANNEL_PPID_WEBRTC_STRING_PARTIAL:{ + GstMapInfo info = GST_MAP_INFO_INIT; + if (!gst_buffer_map (buffer, &info, GST_MAP_READ)) { +- g_set_error (error, GST_WEBRTC_BIN_ERROR, +- GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE, ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_DATA_CHANNEL_FAILURE, + "Failed to map received buffer"); + ret = GST_FLOW_ERROR; + } else { +@@ -569,8 +679,8 @@ _data_channel_have_sample (WebRTCDataChannel * channel, GstSample * sample, + case DATA_CHANNEL_PPID_WEBRTC_BINARY_PARTIAL:{ + struct map_info *info = g_new0 (struct map_info, 1); + if (!gst_buffer_map (buffer, &info->map_info, GST_MAP_READ)) { +- g_set_error (error, GST_WEBRTC_BIN_ERROR, +- GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE, ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_DATA_CHANNEL_FAILURE, + "Failed to map received buffer"); + ret = GST_FLOW_ERROR; + } else { +@@ -591,8 +701,8 @@ _data_channel_have_sample (WebRTCDataChannel * channel, GstSample * sample, + NULL); + break; + default: +- g_set_error (error, GST_WEBRTC_BIN_ERROR, +- GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE, ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_DATA_CHANNEL_FAILURE, + "Unknown SCTP PPID %u received", receive->ppid); + ret = GST_FLOW_ERROR; + break; +@@ -671,13 +781,14 @@ webrtc_data_channel_start_negotiation (WebRTCDataChannel * channel) + buffer = construct_open_packet (channel); + + GST_INFO_OBJECT (channel, "Sending channel open for SCTP stream %i " +- "label %s protocol %s ordered %s", channel->parent.id, ++ "label \"%s\" protocol %s ordered %s", channel->parent.id, + channel->parent.label, channel->parent.protocol, + channel->parent.ordered ? "true" : "false"); + + GST_WEBRTC_DATA_CHANNEL_LOCK (channel); + channel->parent.buffered_amount += gst_buffer_get_size (buffer); + GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); ++ g_object_notify (G_OBJECT (&channel->parent), "buffered-amount"); + + if (gst_app_src_push_buffer (GST_APP_SRC (channel->appsrc), + buffer) == GST_FLOW_OK) { +@@ -685,8 +796,8 @@ webrtc_data_channel_start_negotiation (WebRTCDataChannel * channel) + _channel_enqueue_task (channel, (ChannelTask) _emit_on_open, NULL, NULL); + } else { + GError *error = NULL; +- g_set_error (&error, GST_WEBRTC_BIN_ERROR, +- GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE, ++ g_set_error (&error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_DATA_CHANNEL_FAILURE, + "Failed to send DCEP open packet"); + _channel_store_error (channel, error); + _channel_enqueue_task (channel, (ChannelTask) _close_procedure, NULL, NULL); +@@ -715,35 +826,30 @@ _is_within_max_message_size (WebRTCDataChannel * channel, gsize size) + return size <= channel->sctp_transport->max_message_size; + } + +-static void ++static gboolean + webrtc_data_channel_send_data (GstWebRTCDataChannel * base_channel, +- GBytes * bytes) ++ GBytes * bytes, GError ** error) + { + WebRTCDataChannel *channel = WEBRTC_DATA_CHANNEL (base_channel); + GstSctpSendMetaPartiallyReliability reliability; + guint rel_param; + guint32 ppid; + GstBuffer *buffer; ++ gsize size = 0; + GstFlowReturn ret; + + if (!bytes) { + buffer = gst_buffer_new (); + ppid = DATA_CHANNEL_PPID_WEBRTC_BINARY_EMPTY; + } else { +- gsize size; + guint8 *data; + + data = (guint8 *) g_bytes_get_data (bytes, &size); +- g_return_if_fail (data != NULL); ++ g_return_val_if_fail (data != NULL, FALSE); + if (!_is_within_max_message_size (channel, size)) { +- GError *error = NULL; +- g_set_error (&error, GST_WEBRTC_BIN_ERROR, +- GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE, ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_TYPE_ERROR, + "Requested to send data that is too large"); +- _channel_store_error (channel, error); +- _channel_enqueue_task (channel, (ChannelTask) _close_procedure, NULL, +- NULL); +- return; ++ return FALSE; + } + + buffer = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, data, size, +@@ -759,53 +865,66 @@ webrtc_data_channel_send_data (GstWebRTCDataChannel * base_channel, + buffer); + + GST_WEBRTC_DATA_CHANNEL_LOCK (channel); +- channel->parent.buffered_amount += gst_buffer_get_size (buffer); ++ if (channel->parent.ready_state == GST_WEBRTC_DATA_CHANNEL_STATE_OPEN) { ++ channel->parent.buffered_amount += size; ++ } else { ++ GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INVALID_STATE, "channel is not open"); ++ return FALSE; ++ } + GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); + + ret = gst_app_src_push_buffer (GST_APP_SRC (channel->appsrc), buffer); ++ if (ret == GST_FLOW_OK) { ++ g_object_notify (G_OBJECT (&channel->parent), "buffered-amount"); ++ } else { ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_DATA_CHANNEL_FAILURE, "Failed to send data"); ++ GST_WARNING_OBJECT (channel, "push returned %i, %s", ret, ++ gst_flow_get_name (ret)); ++ ++ GST_WEBRTC_DATA_CHANNEL_LOCK (channel); ++ channel->parent.buffered_amount -= size; ++ GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); + +- if (ret != GST_FLOW_OK) { +- GError *error = NULL; +- g_set_error (&error, GST_WEBRTC_BIN_ERROR, +- GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE, "Failed to send data"); +- _channel_store_error (channel, error); + _channel_enqueue_task (channel, (ChannelTask) _close_procedure, NULL, NULL); ++ return FALSE; + } ++ ++ return TRUE; + } + +-static void ++static gboolean + webrtc_data_channel_send_string (GstWebRTCDataChannel * base_channel, +- const gchar * str) ++ const gchar * str, GError ** error) + { + WebRTCDataChannel *channel = WEBRTC_DATA_CHANNEL (base_channel); + GstSctpSendMetaPartiallyReliability reliability; + guint rel_param; + guint32 ppid; + GstBuffer *buffer; ++ gsize size = 0; + GstFlowReturn ret; + + if (!channel->parent.negotiated) +- g_return_if_fail (channel->opened); +- g_return_if_fail (channel->sctp_transport != NULL); ++ g_return_val_if_fail (channel->opened, FALSE); ++ g_return_val_if_fail (channel->sctp_transport != NULL, FALSE); + + if (!str) { + buffer = gst_buffer_new (); + ppid = DATA_CHANNEL_PPID_WEBRTC_STRING_EMPTY; + } else { +- gsize size = strlen (str); +- gchar *str_copy = g_strdup (str); ++ gchar *str_copy; ++ size = strlen (str); + + if (!_is_within_max_message_size (channel, size)) { +- GError *error = NULL; +- g_set_error (&error, GST_WEBRTC_BIN_ERROR, +- GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE, ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_TYPE_ERROR, + "Requested to send a string that is too large"); +- _channel_store_error (channel, error); +- _channel_enqueue_task (channel, (ChannelTask) _close_procedure, NULL, +- NULL); +- return; ++ return FALSE; + } + ++ str_copy = g_strdup (str); + buffer = + gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, str_copy, + size, 0, size, str_copy, g_free); +@@ -820,18 +939,32 @@ webrtc_data_channel_send_string (GstWebRTCDataChannel * base_channel, + buffer); + + GST_WEBRTC_DATA_CHANNEL_LOCK (channel); +- channel->parent.buffered_amount += gst_buffer_get_size (buffer); ++ if (channel->parent.ready_state == GST_WEBRTC_DATA_CHANNEL_STATE_OPEN) { ++ channel->parent.buffered_amount += size; ++ } else { ++ GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INVALID_STATE, "channel is not open"); ++ return FALSE; ++ } + GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); + + ret = gst_app_src_push_buffer (GST_APP_SRC (channel->appsrc), buffer); ++ if (ret == GST_FLOW_OK) { ++ g_object_notify (G_OBJECT (&channel->parent), "buffered-amount"); ++ } else { ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_DATA_CHANNEL_FAILURE, "Failed to send string"); ++ ++ GST_WEBRTC_DATA_CHANNEL_LOCK (channel); ++ channel->parent.buffered_amount -= size; ++ GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); + +- if (ret != GST_FLOW_OK) { +- GError *error = NULL; +- g_set_error (&error, GST_WEBRTC_BIN_ERROR, +- GST_WEBRTC_BIN_ERROR_DATA_CHANNEL_FAILURE, "Failed to send string"); +- _channel_store_error (channel, error); + _channel_enqueue_task (channel, (ChannelTask) _close_procedure, NULL, NULL); ++ return FALSE; + } ++ ++ return TRUE; + } + + static void +@@ -847,13 +980,37 @@ _on_sctp_notify_state_unlocked (GObject * sctp_transport, + } + } + ++static WebRTCDataChannel * ++ensure_channel_alive (WebRTCDataChannel * channel) ++{ ++ /* ghetto impl of, does the channel still exist?. ++ * Needed because g_signal_handler_disconnect*() will not disconnect any ++ * running functions and _finalize() implementation can complete and ++ * invalidate channel */ ++ G_LOCK (outstanding_channels_lock); ++ if (g_list_find (outstanding_channels, channel)) { ++ g_object_ref (channel); ++ } else { ++ G_UNLOCK (outstanding_channels_lock); ++ return NULL; ++ } ++ G_UNLOCK (outstanding_channels_lock); ++ ++ return channel; ++} ++ + static void + _on_sctp_notify_state (GObject * sctp_transport, GParamSpec * pspec, + WebRTCDataChannel * channel) + { ++ if (!(channel = ensure_channel_alive (channel))) ++ return; ++ + GST_WEBRTC_DATA_CHANNEL_LOCK (channel); + _on_sctp_notify_state_unlocked (sctp_transport, channel); + GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); ++ ++ g_object_unref (channel); + } + + static void +@@ -888,7 +1045,7 @@ on_appsrc_data (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) + channel->parent.buffered_amount_low_threshold, + channel->parent.buffered_amount); + if (prev_amount >= channel->parent.buffered_amount_low_threshold +- && channel->parent.buffered_amount < ++ && channel->parent.buffered_amount <= + channel->parent.buffered_amount_low_threshold) { + _channel_enqueue_task (channel, (ChannelTask) _emit_low_threshold, NULL, + NULL); +@@ -900,6 +1057,7 @@ on_appsrc_data (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) + NULL); + } + GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); ++ g_object_notify (G_OBJECT (&channel->parent), "buffered-amount"); + } + + return GST_PAD_PROBE_OK; +@@ -908,10 +1066,15 @@ on_appsrc_data (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) + static void + gst_webrtc_data_channel_constructed (GObject * object) + { +- WebRTCDataChannel *channel = WEBRTC_DATA_CHANNEL (object); ++ WebRTCDataChannel *channel; + GstPad *pad; + GstCaps *caps; + ++ G_OBJECT_CLASS (parent_class)->constructed (object); ++ ++ channel = WEBRTC_DATA_CHANNEL (object); ++ GST_DEBUG ("New channel %p constructed", channel); ++ + caps = gst_caps_new_any (); + + channel->appsrc = gst_element_factory_make ("appsrc", NULL); +@@ -921,6 +1084,8 @@ gst_webrtc_data_channel_constructed (GObject * object) + channel->src_probe = gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_DATA_BOTH, + (GstPadProbeCallback) on_appsrc_data, channel, NULL); + ++ channel->src_bin = webrtc_error_ignore_bin_new (channel, channel->appsrc); ++ + channel->appsink = gst_element_factory_make ("appsink", NULL); + gst_object_ref_sink (channel->appsink); + g_object_set (channel->appsink, "sync", FALSE, "async", FALSE, "caps", caps, +@@ -928,10 +1093,22 @@ gst_webrtc_data_channel_constructed (GObject * object) + gst_app_sink_set_callbacks (GST_APP_SINK (channel->appsink), &sink_callbacks, + channel, NULL); + ++ channel->sink_bin = webrtc_error_ignore_bin_new (channel, channel->appsink); ++ + gst_object_unref (pad); + gst_caps_unref (caps); + } + ++static void ++gst_webrtc_data_channel_dispose (GObject * object) ++{ ++ G_LOCK (outstanding_channels_lock); ++ outstanding_channels = g_list_remove (outstanding_channels, object); ++ G_UNLOCK (outstanding_channels_lock); ++ ++ G_OBJECT_CLASS (parent_class)->dispose (object); ++} ++ + static void + gst_webrtc_data_channel_finalize (GObject * object) + { +@@ -962,6 +1139,7 @@ webrtc_data_channel_class_init (WebRTCDataChannelClass * klass) + (GstWebRTCDataChannelClass *) klass; + + gobject_class->constructed = gst_webrtc_data_channel_constructed; ++ gobject_class->dispose = gst_webrtc_data_channel_dispose; + gobject_class->finalize = gst_webrtc_data_channel_finalize; + + channel_class->send_data = webrtc_data_channel_send_data; +@@ -972,11 +1150,14 @@ webrtc_data_channel_class_init (WebRTCDataChannelClass * klass) + static void + webrtc_data_channel_init (WebRTCDataChannel * channel) + { ++ G_LOCK (outstanding_channels_lock); ++ outstanding_channels = g_list_prepend (outstanding_channels, channel); ++ G_UNLOCK (outstanding_channels_lock); + } + + static void + _data_channel_set_sctp_transport (WebRTCDataChannel * channel, +- GstWebRTCSCTPTransport * sctp) ++ WebRTCSCTPTransport * sctp) + { + g_return_if_fail (GST_IS_WEBRTC_DATA_CHANNEL (channel)); + g_return_if_fail (GST_IS_WEBRTC_SCTP_TRANSPORT (sctp)); +@@ -984,23 +1165,23 @@ _data_channel_set_sctp_transport (WebRTCDataChannel * channel, + GST_WEBRTC_DATA_CHANNEL_LOCK (channel); + if (channel->sctp_transport) + g_signal_handlers_disconnect_by_data (channel->sctp_transport, channel); ++ GST_TRACE_OBJECT (channel, "set sctp %p", sctp); + + gst_object_replace ((GstObject **) & channel->sctp_transport, + GST_OBJECT (sctp)); + + if (sctp) { +- g_signal_connect (sctp, "stream-reset", G_CALLBACK (_on_sctp_reset_stream), ++ g_signal_connect (sctp, "stream-reset", G_CALLBACK (_on_sctp_stream_reset), + channel); + g_signal_connect (sctp, "notify::state", G_CALLBACK (_on_sctp_notify_state), + channel); +- _on_sctp_notify_state_unlocked (G_OBJECT (sctp), channel); + } + GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); + } + + void + webrtc_data_channel_link_to_sctp (WebRTCDataChannel * channel, +- GstWebRTCSCTPTransport * sctp_transport) ++ WebRTCSCTPTransport * sctp_transport) + { + if (sctp_transport && !channel->sctp_transport) { + gint id; +@@ -1012,10 +1193,12 @@ webrtc_data_channel_link_to_sctp (WebRTCDataChannel * channel, + + _data_channel_set_sctp_transport (channel, sctp_transport); + pad_name = g_strdup_printf ("sink_%u", id); +- if (!gst_element_link_pads (channel->appsrc, "src", ++ if (!gst_element_link_pads (channel->src_bin, "src", + channel->sctp_transport->sctpenc, pad_name)) + g_warn_if_reached (); + g_free (pad_name); ++ ++ _on_sctp_notify_state_unlocked (G_OBJECT (sctp_transport), channel); + } + } + } +diff --git a/ext/webrtc/webrtcdatachannel.h b/ext/webrtc/webrtcdatachannel.h +index 7ca3c0d17..dd65a66ae 100644 +--- a/ext/webrtc/webrtcdatachannel.h ++++ b/ext/webrtc/webrtcdatachannel.h +@@ -24,7 +24,9 @@ + #include + #include + #include +-#include "sctptransport.h" ++#include "webrtcsctptransport.h" ++ ++#include "gst/webrtc/webrtc-priv.h" + + G_BEGIN_DECLS + +@@ -43,14 +45,17 @@ struct _WebRTCDataChannel + { + GstWebRTCDataChannel parent; + +- GstWebRTCSCTPTransport *sctp_transport; ++ WebRTCSCTPTransport *sctp_transport; ++ GstElement *src_bin; + GstElement *appsrc; ++ GstElement *sink_bin; + GstElement *appsink; + + GstWebRTCBin *webrtcbin; + gboolean opened; + gulong src_probe; + GError *stored_error; ++ gboolean peer_closed; + + gpointer _padding[GST_PADDING]; + }; +@@ -65,7 +70,9 @@ struct _WebRTCDataChannelClass + void webrtc_data_channel_start_negotiation (WebRTCDataChannel *channel); + G_GNUC_INTERNAL + void webrtc_data_channel_link_to_sctp (WebRTCDataChannel *channel, +- GstWebRTCSCTPTransport *sctp_transport); ++ WebRTCSCTPTransport *sctp_transport); ++ ++G_DECLARE_FINAL_TYPE (WebRTCErrorIgnoreBin, webrtc_error_ignore_bin, WEBRTC, ERROR_IGNORE_BIN, GstBin); + + G_END_DECLS + +diff --git a/ext/webrtc/webrtcsctptransport.c b/ext/webrtc/webrtcsctptransport.c +new file mode 100644 +index 000000000..c65dd1973 +--- /dev/null ++++ b/ext/webrtc/webrtcsctptransport.c +@@ -0,0 +1,251 @@ ++/* GStreamer ++ * Copyright (C) 2018 Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifdef HAVE_CONFIG_H ++# include "config.h" ++#endif ++ ++#include ++ ++#include "webrtcsctptransport.h" ++#include "gstwebrtcbin.h" ++ ++#define GST_CAT_DEFAULT webrtc_sctp_transport_debug ++GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); ++ ++enum ++{ ++ SIGNAL_0, ++ ON_STREAM_RESET_SIGNAL, ++ LAST_SIGNAL, ++}; ++ ++enum ++{ ++ PROP_0, ++ PROP_TRANSPORT, ++ PROP_STATE, ++ PROP_MAX_MESSAGE_SIZE, ++ PROP_MAX_CHANNELS, ++}; ++ ++static guint webrtc_sctp_transport_signals[LAST_SIGNAL] = { 0 }; ++ ++#define webrtc_sctp_transport_parent_class parent_class ++G_DEFINE_TYPE_WITH_CODE (WebRTCSCTPTransport, webrtc_sctp_transport, ++ GST_TYPE_WEBRTC_SCTP_TRANSPORT, ++ GST_DEBUG_CATEGORY_INIT (webrtc_sctp_transport_debug, ++ "webrtcsctptransport", 0, "webrtcsctptransport");); ++ ++typedef void (*SCTPTask) (WebRTCSCTPTransport * sctp, gpointer user_data); ++ ++struct task ++{ ++ WebRTCSCTPTransport *sctp; ++ SCTPTask func; ++ gpointer user_data; ++ GDestroyNotify notify; ++}; ++ ++static GstStructure * ++_execute_task (GstWebRTCBin * webrtc, struct task *task) ++{ ++ if (task->func) ++ task->func (task->sctp, task->user_data); ++ return NULL; ++} ++ ++static void ++_free_task (struct task *task) ++{ ++ gst_object_unref (task->sctp); ++ ++ if (task->notify) ++ task->notify (task->user_data); ++ g_free (task); ++} ++ ++static void ++_sctp_enqueue_task (WebRTCSCTPTransport * sctp, SCTPTask func, ++ gpointer user_data, GDestroyNotify notify) ++{ ++ struct task *task = g_new0 (struct task, 1); ++ ++ task->sctp = gst_object_ref (sctp); ++ task->func = func; ++ task->user_data = user_data; ++ task->notify = notify; ++ ++ gst_webrtc_bin_enqueue_task (sctp->webrtcbin, ++ (GstWebRTCBinFunc) _execute_task, task, (GDestroyNotify) _free_task, ++ NULL); ++} ++ ++static void ++_emit_stream_reset (WebRTCSCTPTransport * sctp, gpointer user_data) ++{ ++ guint stream_id = GPOINTER_TO_UINT (user_data); ++ ++ g_signal_emit (sctp, ++ webrtc_sctp_transport_signals[ON_STREAM_RESET_SIGNAL], 0, stream_id); ++} ++ ++static void ++_on_sctp_dec_pad_removed (GstElement * sctpdec, GstPad * pad, ++ WebRTCSCTPTransport * sctp) ++{ ++ guint stream_id; ++ ++ if (sscanf (GST_PAD_NAME (pad), "src_%u", &stream_id) != 1) ++ return; ++ ++ _sctp_enqueue_task (sctp, (SCTPTask) _emit_stream_reset, ++ GUINT_TO_POINTER (stream_id), NULL); ++} ++ ++static void ++_on_sctp_association_established (GstElement * sctpenc, gboolean established, ++ WebRTCSCTPTransport * sctp) ++{ ++ GST_OBJECT_LOCK (sctp); ++ if (established) ++ sctp->state = GST_WEBRTC_SCTP_TRANSPORT_STATE_CONNECTED; ++ else ++ sctp->state = GST_WEBRTC_SCTP_TRANSPORT_STATE_CLOSED; ++ sctp->association_established = established; ++ GST_OBJECT_UNLOCK (sctp); ++ ++ g_object_notify (G_OBJECT (sctp), "state"); ++} ++ ++void ++webrtc_sctp_transport_set_priority (WebRTCSCTPTransport * sctp, ++ GstWebRTCPriorityType priority) ++{ ++ GstPad *pad; ++ ++ pad = gst_element_get_static_pad (sctp->sctpenc, "src"); ++ gst_pad_push_event (pad, ++ gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM_STICKY, ++ gst_structure_new ("GstWebRtcBinUpdateTos", "sctp-priority", ++ GST_TYPE_WEBRTC_PRIORITY_TYPE, priority, NULL))); ++ gst_object_unref (pad); ++} ++ ++static void ++webrtc_sctp_transport_get_property (GObject * object, guint prop_id, ++ GValue * value, GParamSpec * pspec) ++{ ++ WebRTCSCTPTransport *sctp = WEBRTC_SCTP_TRANSPORT (object); ++ ++ switch (prop_id) { ++ case PROP_TRANSPORT: ++ g_value_set_object (value, sctp->transport); ++ break; ++ case PROP_STATE: ++ g_value_set_enum (value, sctp->state); ++ break; ++ case PROP_MAX_MESSAGE_SIZE: ++ g_value_set_uint64 (value, sctp->max_message_size); ++ break; ++ case PROP_MAX_CHANNELS: ++ g_value_set_uint (value, sctp->max_channels); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++webrtc_sctp_transport_finalize (GObject * object) ++{ ++ WebRTCSCTPTransport *sctp = WEBRTC_SCTP_TRANSPORT (object); ++ ++ g_signal_handlers_disconnect_by_data (sctp->sctpdec, sctp); ++ g_signal_handlers_disconnect_by_data (sctp->sctpenc, sctp); ++ ++ gst_object_unref (sctp->sctpdec); ++ gst_object_unref (sctp->sctpenc); ++ ++ g_clear_object (&sctp->transport); ++ ++ G_OBJECT_CLASS (parent_class)->finalize (object); ++} ++ ++static void ++webrtc_sctp_transport_constructed (GObject * object) ++{ ++ WebRTCSCTPTransport *sctp = WEBRTC_SCTP_TRANSPORT (object); ++ guint association_id; ++ ++ association_id = g_random_int_range (0, G_MAXUINT16); ++ ++ sctp->sctpdec = ++ g_object_ref_sink (gst_element_factory_make ("sctpdec", NULL)); ++ g_object_set (sctp->sctpdec, "sctp-association-id", association_id, NULL); ++ sctp->sctpenc = ++ g_object_ref_sink (gst_element_factory_make ("sctpenc", NULL)); ++ g_object_set (sctp->sctpenc, "sctp-association-id", association_id, NULL); ++ g_object_set (sctp->sctpenc, "use-sock-stream", TRUE, NULL); ++ ++ g_signal_connect (sctp->sctpdec, "pad-removed", ++ G_CALLBACK (_on_sctp_dec_pad_removed), sctp); ++ g_signal_connect (sctp->sctpenc, "sctp-association-established", ++ G_CALLBACK (_on_sctp_association_established), sctp); ++ ++ G_OBJECT_CLASS (parent_class)->constructed (object); ++} ++ ++static void ++webrtc_sctp_transport_class_init (WebRTCSCTPTransportClass * klass) ++{ ++ GObjectClass *gobject_class = (GObjectClass *) klass; ++ ++ gobject_class->constructed = webrtc_sctp_transport_constructed; ++ gobject_class->get_property = webrtc_sctp_transport_get_property; ++ gobject_class->finalize = webrtc_sctp_transport_finalize; ++ ++ g_object_class_override_property (gobject_class, PROP_TRANSPORT, "transport"); ++ g_object_class_override_property (gobject_class, PROP_STATE, "state"); ++ g_object_class_override_property (gobject_class, ++ PROP_MAX_MESSAGE_SIZE, "max-message-size"); ++ g_object_class_override_property (gobject_class, ++ PROP_MAX_CHANNELS, "max-channels"); ++ ++ /** ++ * WebRTCSCTPTransport::stream-reset: ++ * @object: the #WebRTCSCTPTransport ++ * @stream_id: the SCTP stream that was reset ++ */ ++ webrtc_sctp_transport_signals[ON_STREAM_RESET_SIGNAL] = ++ g_signal_new ("stream-reset", G_TYPE_FROM_CLASS (klass), ++ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_UINT); ++} ++ ++static void ++webrtc_sctp_transport_init (WebRTCSCTPTransport * nice) ++{ ++} ++ ++WebRTCSCTPTransport * ++webrtc_sctp_transport_new (void) ++{ ++ return g_object_new (TYPE_WEBRTC_SCTP_TRANSPORT, NULL); ++} +diff --git a/ext/webrtc/webrtcsctptransport.h b/ext/webrtc/webrtcsctptransport.h +new file mode 100644 +index 000000000..5661fc349 +--- /dev/null ++++ b/ext/webrtc/webrtcsctptransport.h +@@ -0,0 +1,74 @@ ++/* GStreamer ++ * Copyright (C) 2018 Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __WEBRTC_SCTP_TRANSPORT_H__ ++#define __WEBRTC_SCTP_TRANSPORT_H__ ++ ++#include ++#include ++#include ++#include "fwd.h" ++ ++#include "gst/webrtc/webrtc-priv.h" ++ ++G_BEGIN_DECLS ++ ++GType webrtc_sctp_transport_get_type(void); ++#define TYPE_WEBRTC_SCTP_TRANSPORT (webrtc_sctp_transport_get_type()) ++#define WEBRTC_SCTP_TRANSPORT(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),TYPE_WEBRTC_SCTP_TRANSPORT,WebRTCSCTPTransport)) ++#define WEBRTC_IS_SCTP_TRANSPORT(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),TYPE_WEBRTC_SCTP_TRANSPORT)) ++#define WEBRTC_SCTP_TRANSPORT_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,TYPE_WEBRTC_SCTP_TRANSPORT,WebRTCSCTPTransportClass)) ++#define WEBRTC_SCTP_IS_TRANSPORT_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,TYPE_WEBRTC_SCTP_TRANSPORT)) ++#define WEBRTC_SCTP_TRANSPORT_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,TYPE_WEBRTC_SCTP_TRANSPORT,WebRTCSCTPTransportClass)) ++ ++typedef struct _WebRTCSCTPTransport WebRTCSCTPTransport; ++typedef struct _WebRTCSCTPTransportClass WebRTCSCTPTransportClass; ++ ++struct _WebRTCSCTPTransport ++{ ++ GstWebRTCSCTPTransport parent; ++ ++ GstWebRTCDTLSTransport *transport; ++ GstWebRTCSCTPTransportState state; ++ guint64 max_message_size; ++ guint max_channels; ++ ++ gboolean association_established; ++ ++ gulong sctpdec_block_id; ++ GstElement *sctpdec; ++ GstElement *sctpenc; ++ ++ GstWebRTCBin *webrtcbin; ++}; ++ ++struct _WebRTCSCTPTransportClass ++{ ++ GstWebRTCSCTPTransportClass parent_class; ++}; ++ ++WebRTCSCTPTransport * webrtc_sctp_transport_new (void); ++ ++void ++webrtc_sctp_transport_set_priority (WebRTCSCTPTransport *sctp, ++ GstWebRTCPriorityType priority); ++ ++G_END_DECLS ++ ++#endif /* __WEBRTC_SCTP_TRANSPORT_H__ */ +diff --git a/ext/webrtc/webrtcsdp.c b/ext/webrtc/webrtcsdp.c +index 6e7f4b3d1..1abd4b115 100644 +--- a/ext/webrtc/webrtcsdp.c ++++ b/ext/webrtc/webrtcsdp.c +@@ -81,15 +81,14 @@ _check_valid_state_for_sdp_change (GstWebRTCSignalingState state, + return TRUE; + + { +- gchar *state_str = _enum_value_to_string (GST_TYPE_WEBRTC_SIGNALING_STATE, ++ const gchar *state_str = ++ _enum_value_to_string (GST_TYPE_WEBRTC_SIGNALING_STATE, + state); +- gchar *type_str = _enum_value_to_string (GST_TYPE_WEBRTC_SDP_TYPE, type); +- g_set_error (error, GST_WEBRTC_BIN_ERROR, +- GST_WEBRTC_BIN_ERROR_INVALID_STATE, ++ const gchar *type_str = ++ _enum_value_to_string (GST_TYPE_WEBRTC_SDP_TYPE, type); ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_INVALID_STATE, + "Not in the correct state (%s) for setting %s %s description", + state_str, _sdp_source_to_string (source), type_str); +- g_free (state_str); +- g_free (type_str); + } + + return FALSE; +@@ -108,8 +107,8 @@ _check_sdp_crypto (SDPSource source, GstWebRTCSessionDescription * sdp, + + key = gst_sdp_message_get_key (sdp->sdp); + if (!IS_EMPTY_SDP_ATTRIBUTE (key->data)) { +- g_set_error_literal (error, GST_WEBRTC_BIN_ERROR, +- GST_WEBRTC_BIN_ERROR_BAD_SDP, "sdp contains a k line"); ++ g_set_error_literal (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, "sdp contains a k line"); + return FALSE; + } + +@@ -122,8 +121,8 @@ _check_sdp_crypto (SDPSource source, GstWebRTCSessionDescription * sdp, + + if (!IS_EMPTY_SDP_ATTRIBUTE (message_fingerprint) + && !IS_EMPTY_SDP_ATTRIBUTE (media_fingerprint)) { +- g_set_error (error, GST_WEBRTC_BIN_ERROR, +- GST_WEBRTC_BIN_ERROR_FINGERPRINT, ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_FINGERPRINT_FAILURE, + "No fingerprint lines in sdp for media %u", i); + return FALSE; + } +@@ -132,8 +131,8 @@ _check_sdp_crypto (SDPSource source, GstWebRTCSessionDescription * sdp, + } + if (!IS_EMPTY_SDP_ATTRIBUTE (media_fingerprint) + && g_strcmp0 (fingerprint, media_fingerprint) != 0) { +- g_set_error (error, GST_WEBRTC_BIN_ERROR, +- GST_WEBRTC_BIN_ERROR_FINGERPRINT, ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_FINGERPRINT_FAILURE, + "Fingerprint in media %u differs from %s fingerprint. " + "\'%s\' != \'%s\'", i, message_fingerprint ? "global" : "previous", + fingerprint, media_fingerprint); +@@ -178,8 +177,8 @@ static gboolean + _check_trickle_ice (GstSDPMessage * msg, GError ** error) + { + if (!_session_has_attribute_key_value (msg, "ice-options", "trickle")) { +- g_set_error_literal (error, GST_WEBRTC_BIN_ERROR, +- GST_WEBRTC_BIN_ERROR_BAD_SDP, ++ g_set_error_literal (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, + "No required \'a=ice-options:trickle\' line in sdp"); + } + return TRUE; +@@ -204,7 +203,7 @@ _media_has_mid (const GstSDPMedia * media, guint media_idx, GError ** error) + { + const gchar *mid = gst_sdp_media_get_attribute_val (media, "mid"); + if (IS_EMPTY_SDP_ATTRIBUTE (mid)) { +- g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP, ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, + "media %u is missing or contains an empty \'mid\' attribute", + media_idx); + return FALSE; +@@ -248,13 +247,13 @@ _media_has_setup (const GstSDPMedia * media, guint media_idx, GError ** error) + static const gchar *valid_setups[] = { "actpass", "active", "passive", NULL }; + const gchar *setup = gst_sdp_media_get_attribute_val (media, "setup"); + if (IS_EMPTY_SDP_ATTRIBUTE (setup)) { +- g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP, ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, + "media %u is missing or contains an empty \'setup\' attribute", + media_idx); + return FALSE; + } + if (!g_strv_contains (valid_setups, setup)) { +- g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP, ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, + "media %u contains unknown \'setup\' attribute, \'%s\'", media_idx, + setup); + return FALSE; +@@ -268,7 +267,7 @@ _media_has_dtls_id (const GstSDPMedia * media, guint media_idx, GError ** error) + { + const gchar *dtls_id = gst_sdp_media_get_attribute_val (media, "ice-pwd"); + if (IS_EMPTY_SDP_ATTRIBUTE (dtls_id)) { +- g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP, ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, + "media %u is missing or contains an empty \'dtls-id\' attribute", + media_idx); + return FALSE; +@@ -307,13 +306,13 @@ validate_sdp (GstWebRTCSignalingState state, SDPSource source, + media_in_bundle = is_bundle + && g_strv_contains ((const gchar **) group_members, mid); + if (!_media_get_ice_ufrag (sdp->sdp, i)) { +- g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP, ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, + "media %u is missing or contains an empty \'ice-ufrag\' attribute", + i); + goto fail; + } + if (!_media_get_ice_pwd (sdp->sdp, i)) { +- g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP, ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, + "media %u is missing or contains an empty \'ice-pwd\' attribute", i); + goto fail; + } +@@ -327,7 +326,7 @@ validate_sdp (GstWebRTCSignalingState state, SDPSource source, + if (!bundle_ice_ufrag) + bundle_ice_ufrag = ice_ufrag; + else if (g_strcmp0 (bundle_ice_ufrag, ice_ufrag) != 0) { +- g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP, ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, + "media %u has different ice-ufrag values in bundle. " + "%s != %s", i, bundle_ice_ufrag, ice_ufrag); + goto fail; +@@ -335,7 +334,7 @@ validate_sdp (GstWebRTCSignalingState state, SDPSource source, + if (!bundle_ice_pwd) { + bundle_ice_pwd = ice_pwd; + } else if (g_strcmp0 (bundle_ice_pwd, ice_pwd) != 0) { +- g_set_error (error, GST_WEBRTC_BIN_ERROR, GST_WEBRTC_BIN_ERROR_BAD_SDP, ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, + "media %u has different ice-pwd values in bundle. " + "%s != %s", i, bundle_ice_pwd, ice_pwd); + goto fail; +@@ -425,12 +424,10 @@ void + _media_replace_direction (GstSDPMedia * media, + GstWebRTCRTPTransceiverDirection direction) + { +- gchar *dir_str; ++ const gchar *dir_str; + int i; + +- dir_str = +- _enum_value_to_string (GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION, +- direction); ++ dir_str = gst_webrtc_rtp_transceiver_direction_to_string (direction); + + for (i = 0; i < gst_sdp_media_attributes_len (media); i++) { + const GstSDPAttribute *attr = gst_sdp_media_get_attribute (media, i); +@@ -443,14 +440,12 @@ _media_replace_direction (GstSDPMedia * media, + GST_TRACE ("replace %s with %s", attr->key, dir_str); + gst_sdp_attribute_set (&new_attr, dir_str, ""); + gst_sdp_media_replace_attribute (media, i, &new_attr); +- g_free (dir_str); + return; + } + } + + GST_TRACE ("add %s", dir_str); + gst_sdp_media_add_attribute (media, dir_str, ""); +- g_free (dir_str); + } + + GstWebRTCRTPTransceiverDirection +@@ -556,7 +551,7 @@ _intersect_dtls_setup (GstWebRTCDTLSSetup offer) + void + _media_replace_setup (GstSDPMedia * media, GstWebRTCDTLSSetup setup) + { +- gchar *setup_str; ++ const gchar *setup_str; + int i; + + setup_str = _enum_value_to_string (GST_TYPE_WEBRTC_DTLS_SETUP, setup); +@@ -575,7 +570,6 @@ _media_replace_setup (GstSDPMedia * media, GstWebRTCDTLSSetup setup) + + GST_TRACE ("add setup:%s", setup_str); + gst_sdp_media_add_attribute (media, "setup", setup_str); +- g_free (setup_str); + } + + GstWebRTCDTLSSetup +@@ -872,7 +866,7 @@ _get_ice_credentials_from_sdp_media (const GstSDPMessage * sdp, guint media_idx, + } + + gboolean +-_parse_bundle (GstSDPMessage * sdp, GStrv * bundled) ++_parse_bundle (GstSDPMessage * sdp, GStrv * bundled, GError ** error) + { + const gchar *group; + gboolean ret = FALSE; +@@ -883,8 +877,9 @@ _parse_bundle (GstSDPMessage * sdp, GStrv * bundled) + *bundled = g_strsplit (group + strlen ("BUNDLE "), " ", 0); + + if (!(*bundled)[0]) { +- GST_ERROR ("Invalid format for BUNDLE group, expected at least " +- "one mid (%s)", group); ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, ++ "Invalid format for BUNDLE group, expected at least one mid (%s)", ++ group); + g_strfreev (*bundled); + *bundled = NULL; + goto done; +diff --git a/ext/webrtc/webrtcsdp.h b/ext/webrtc/webrtcsdp.h +index 1501cbc93..c55709b50 100644 +--- a/ext/webrtc/webrtcsdp.h ++++ b/ext/webrtc/webrtcsdp.h +@@ -101,7 +101,8 @@ gboolean _get_bundle_index (Gst + guint * idx); + G_GNUC_INTERNAL + gboolean _parse_bundle (GstSDPMessage * sdp, +- GStrv * bundled); ++ GStrv * bundled, ++ GError ** error); + + G_GNUC_INTERNAL + const gchar * _media_get_ice_pwd (const GstSDPMessage * msg, +diff --git a/ext/webrtc/webrtctransceiver.c b/ext/webrtc/webrtctransceiver.c +index f26536741..ba9c944d3 100644 +--- a/ext/webrtc/webrtctransceiver.c ++++ b/ext/webrtc/webrtctransceiver.c +@@ -32,7 +32,8 @@ GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); + G_DEFINE_TYPE_WITH_CODE (WebRTCTransceiver, webrtc_transceiver, + GST_TYPE_WEBRTC_RTP_TRANSCEIVER, + GST_DEBUG_CATEGORY_INIT (webrtc_transceiver_debug, +- "webrtctransceiver", 0, "webrtctransceiver");); ++ "webrtctransceiver", 0, "webrtctransceiver"); ++ ); + + #define DEFAULT_FEC_TYPE GST_WEBRTC_FEC_TYPE_NONE + #define DEFAULT_DO_NACK FALSE +@@ -59,19 +60,17 @@ webrtc_transceiver_set_transport (WebRTCTransceiver * trans, + + gst_object_replace ((GstObject **) & trans->stream, (GstObject *) stream); + +- if (rtp_trans->sender) ++ if (rtp_trans->sender) { + gst_object_replace ((GstObject **) & rtp_trans->sender->transport, + (GstObject *) stream->transport); +- if (rtp_trans->receiver) ++ g_object_notify (G_OBJECT (rtp_trans->sender), "transport"); ++ } ++ ++ if (rtp_trans->receiver) { + gst_object_replace ((GstObject **) & rtp_trans->receiver->transport, + (GstObject *) stream->transport); +- +- if (rtp_trans->sender) +- gst_object_replace ((GstObject **) & rtp_trans->sender->rtcp_transport, +- (GstObject *) stream->rtcp_transport); +- if (rtp_trans->receiver) +- gst_object_replace ((GstObject **) & rtp_trans->receiver->rtcp_transport, +- (GstObject *) stream->rtcp_transport); ++ g_object_notify (G_OBJECT (rtp_trans->receiver), "transport"); ++ } + } + + GstWebRTCDTLSTransport * +@@ -88,20 +87,6 @@ webrtc_transceiver_get_dtls_transport (GstWebRTCRTPTransceiver * trans) + return NULL; + } + +-GstWebRTCDTLSTransport * +-webrtc_transceiver_get_rtcp_dtls_transport (GstWebRTCRTPTransceiver * trans) +-{ +- g_return_val_if_fail (WEBRTC_IS_TRANSCEIVER (trans), NULL); +- +- if (trans->sender) { +- return trans->sender->rtcp_transport; +- } else if (trans->receiver) { +- return trans->receiver->rtcp_transport; +- } +- +- return NULL; +-} +- + static void + webrtc_transceiver_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +@@ -163,15 +148,21 @@ webrtc_transceiver_finalize (GObject * object) + { + WebRTCTransceiver *trans = WEBRTC_TRANSCEIVER (object); + +- if (trans->stream) +- gst_object_unref (trans->stream); +- trans->stream = NULL; ++ gst_clear_object (&trans->stream); ++ gst_clear_object (&trans->ulpfecdec); ++ gst_clear_object (&trans->ulpfecenc); ++ gst_clear_object (&trans->redenc); + + if (trans->local_rtx_ssrc_map) + gst_structure_free (trans->local_rtx_ssrc_map); + trans->local_rtx_ssrc_map = NULL; + +- gst_caps_replace (&trans->last_configured_caps, NULL); ++ gst_caps_replace (&trans->last_retrieved_caps, NULL); ++ gst_caps_replace (&trans->last_send_configured_caps, NULL); ++ ++ g_free (trans->pending_mid); ++ ++ gst_event_replace (&trans->tos_event, NULL); + + G_OBJECT_CLASS (parent_class)->finalize (object); + } +diff --git a/ext/webrtc/webrtctransceiver.h b/ext/webrtc/webrtctransceiver.h +index c03730415..9f0e93c01 100644 +--- a/ext/webrtc/webrtctransceiver.h ++++ b/ext/webrtc/webrtctransceiver.h +@@ -22,6 +22,7 @@ + + #include "fwd.h" + #include ++#include "gst/webrtc/webrtc-priv.h" + #include "transportstream.h" + + G_BEGIN_DECLS +@@ -39,13 +40,27 @@ struct _WebRTCTransceiver + + TransportStream *stream; + GstStructure *local_rtx_ssrc_map; ++ GstEvent *tos_event; + + /* Properties */ + GstWebRTCFECType fec_type; + guint fec_percentage; + gboolean do_nack; + +- GstCaps *last_configured_caps; ++ /* The last caps that we put into to a SDP media section */ ++ GstCaps *last_retrieved_caps; ++ /* The last caps that we successfully configured from a valid ++ * set_local/remote description call. ++ */ ++ GstCaps *last_send_configured_caps; ++ ++ gchar *pending_mid; ++ ++ gboolean mline_locked; ++ ++ GstElement *ulpfecdec; ++ GstElement *ulpfecenc; ++ GstElement *redenc; + }; + + struct _WebRTCTransceiverClass +@@ -61,7 +76,6 @@ void webrtc_transceiver_set_transport (WebRTCTransceiver * + TransportStream * stream); + + GstWebRTCDTLSTransport * webrtc_transceiver_get_dtls_transport (GstWebRTCRTPTransceiver * trans); +-GstWebRTCDTLSTransport * webrtc_transceiver_get_rtcp_dtls_transport (GstWebRTCRTPTransceiver * trans); + + G_END_DECLS + +diff --git a/gst-libs/gst/codecparsers/gstav1parser.c b/gst-libs/gst/codecparsers/gstav1parser.c +index 6f6b74125..9803148fb 100644 +--- a/gst-libs/gst/codecparsers/gstav1parser.c ++++ b/gst-libs/gst/codecparsers/gstav1parser.c +@@ -67,10 +67,6 @@ + * should call gst_av1_parser_reference_frame_update() to update the parser's inside + * state(such as reference information, global segmentation information, etc). + * +- * Note: If the frame is actived by show_existing_frame in #GST_AV1_OBU_FRAME_HEADER, +- * the function of gst_av1_parser_reference_frame_loading() should be called before +- * really showing that frame. +- * + * @since: 1.18.00 + */ + +@@ -195,6 +191,88 @@ av1_helper_inverse_recenter (gint r, gint v) + return r + (v >> 1); + } + ++/* Shift down with rounding for use when n >= 0, value >= 0 */ ++static guint64 ++av1_helper_round_power_of_two (guint64 value, guint16 n) ++{ ++ return (value + (((guint64) (1) << n) >> 1)) >> n; ++} ++ ++ /* Shift down with rounding for signed integers, for use when n >= 0 */ ++static gint64 ++av1_helper_round_power_of_two_signed (gint64 value, guint16 n) ++{ ++ return (value < 0) ? -((gint64) (av1_helper_round_power_of_two (-value, n))) ++ : (gint64) av1_helper_round_power_of_two (value, n); ++} ++ ++static gint ++av1_helper_msb (guint n) ++{ ++ int log = 0; ++ guint value = n; ++ int i; ++ ++ g_assert (n != 0); ++ ++ for (i = 4; i >= 0; --i) { ++ const gint shift = (1 << i); ++ const guint x = value >> shift; ++ if (x != 0) { ++ value = x; ++ log += shift; ++ } ++ } ++ return log; ++} ++ ++static const guint16 div_lut[GST_AV1_DIV_LUT_NUM + 1] = { ++ 16384, 16320, 16257, 16194, 16132, 16070, 16009, 15948, 15888, 15828, 15768, ++ 15709, 15650, 15592, 15534, 15477, 15420, 15364, 15308, 15252, 15197, 15142, ++ 15087, 15033, 14980, 14926, 14873, 14821, 14769, 14717, 14665, 14614, 14564, ++ 14513, 14463, 14413, 14364, 14315, 14266, 14218, 14170, 14122, 14075, 14028, ++ 13981, 13935, 13888, 13843, 13797, 13752, 13707, 13662, 13618, 13574, 13530, ++ 13487, 13443, 13400, 13358, 13315, 13273, 13231, 13190, 13148, 13107, 13066, ++ 13026, 12985, 12945, 12906, 12866, 12827, 12788, 12749, 12710, 12672, 12633, ++ 12596, 12558, 12520, 12483, 12446, 12409, 12373, 12336, 12300, 12264, 12228, ++ 12193, 12157, 12122, 12087, 12053, 12018, 11984, 11950, 11916, 11882, 11848, ++ 11815, 11782, 11749, 11716, 11683, 11651, 11619, 11586, 11555, 11523, 11491, ++ 11460, 11429, 11398, 11367, 11336, 11305, 11275, 11245, 11215, 11185, 11155, ++ 11125, 11096, 11067, 11038, 11009, 10980, 10951, 10923, 10894, 10866, 10838, ++ 10810, 10782, 10755, 10727, 10700, 10673, 10645, 10618, 10592, 10565, 10538, ++ 10512, 10486, 10460, 10434, 10408, 10382, 10356, 10331, 10305, 10280, 10255, ++ 10230, 10205, 10180, 10156, 10131, 10107, 10082, 10058, 10034, 10010, 9986, ++ 9963, 9939, 9916, 9892, 9869, 9846, 9823, 9800, 9777, 9754, 9732, ++ 9709, 9687, 9664, 9642, 9620, 9598, 9576, 9554, 9533, 9511, 9489, ++ 9468, 9447, 9425, 9404, 9383, 9362, 9341, 9321, 9300, 9279, 9259, ++ 9239, 9218, 9198, 9178, 9158, 9138, 9118, 9098, 9079, 9059, 9039, ++ 9020, 9001, 8981, 8962, 8943, 8924, 8905, 8886, 8867, 8849, 8830, ++ 8812, 8793, 8775, 8756, 8738, 8720, 8702, 8684, 8666, 8648, 8630, ++ 8613, 8595, 8577, 8560, 8542, 8525, 8508, 8490, 8473, 8456, 8439, ++ 8422, 8405, 8389, 8372, 8355, 8339, 8322, 8306, 8289, 8273, 8257, ++ 8240, 8224, 8208, 8192, ++}; ++ ++static gint16 ++av1_helper_resolve_divisor_32 (guint32 D, gint16 * shift) ++{ ++ gint32 f; ++ gint32 e; ++ ++ *shift = av1_helper_msb (D); ++ // e is obtained from D after resetting the most significant 1 bit. ++ e = D - ((guint32) 1 << *shift); ++ // Get the most significant DIV_LUT_BITS (8) bits of e into f ++ if (*shift > GST_AV1_DIV_LUT_BITS) ++ f = av1_helper_round_power_of_two (e, *shift - GST_AV1_DIV_LUT_BITS); ++ else ++ f = e << (GST_AV1_DIV_LUT_BITS - *shift); ++ g_assert (f <= GST_AV1_DIV_LUT_NUM); ++ *shift += GST_AV1_DIV_LUT_PREC_BITS; ++ // Use f as lookup into the precomputed table of multipliers ++ return div_lut[f]; ++} ++ + /************************************* + * * + * Bitstream Functions * +@@ -293,8 +371,8 @@ av1_bitstreamfn_su (GstBitReader * br, guint8 n, GstAV1ParserResult * retval) + /* 4.10.7 + * + * Unsigned encoded integer with maximum number of values n */ +-static guint8 +-av1_bitstreamfn_ns (GstBitReader * br, guint8 n, GstAV1ParserResult * retval) ++static guint32 ++av1_bitstreamfn_ns (GstBitReader * br, guint32 n, GstAV1ParserResult * retval) + { + gint w, m, v; + gint extra_bit; +@@ -438,7 +516,6 @@ av1_parser_init_sequence_header (GstAV1SequenceHeaderOBU * seq_header) + static void + gst_av1_parse_reset_state (GstAV1Parser * parser, gboolean free_sps) + { +- parser->state.seen_frame_header = 0; + parser->state.begin_first_frame = FALSE; + + parser->state.prev_frame_id = 0; +@@ -487,29 +564,44 @@ gst_av1_parser_reset (GstAV1Parser * parser, gboolean annex_b) + { + g_return_if_fail (parser != NULL); + +- if (parser->annex_b) { +- g_assert (parser->temporal_unit_consumed <= parser->temporal_unit_size); +- if (parser->temporal_unit_consumed < parser->temporal_unit_size) +- GST_DEBUG ("temporal_unit_consumed: %d, temporal_unit_size:%d, " +- "discard the left %d bytes for a temporal_unit.", +- parser->temporal_unit_consumed, parser->temporal_unit_size, +- parser->temporal_unit_size - parser->temporal_unit_consumed); +- +- g_assert (parser->frame_unit_consumed <= parser->frame_unit_size); +- if (parser->frame_unit_consumed < parser->frame_unit_size) +- GST_DEBUG (" frame_unit_consumed %d, frame_unit_size: %d " +- "discard the left %d bytes for a frame_unit.", +- parser->frame_unit_consumed, parser->frame_unit_size, +- parser->frame_unit_size - parser->frame_unit_consumed); +- } ++ parser->annex_b = annex_b; ++ if (parser->annex_b) ++ gst_av1_parser_reset_annex_b (parser); ++ ++ gst_av1_parse_reset_state (parser, TRUE); ++} ++ ++/** ++ * gst_av1_parser_reset_annex_b: ++ * @parser: the #GstAV1Parser ++ * ++ * Only reset the current #GstAV1Parser's annex b context. ++ * The other part of the state is kept. ++ * ++ * Since: 1.20 ++ */ ++void ++gst_av1_parser_reset_annex_b (GstAV1Parser * parser) ++{ ++ g_return_if_fail (parser != NULL); ++ g_return_if_fail (parser->annex_b); ++ ++ if (parser->temporal_unit_consumed < parser->temporal_unit_size) ++ GST_DEBUG ("temporal_unit_consumed: %d, temporal_unit_size:%d, " ++ "discard the left %d bytes for a temporal_unit.", ++ parser->temporal_unit_consumed, parser->temporal_unit_size, ++ parser->temporal_unit_size - parser->temporal_unit_consumed); ++ ++ if (parser->frame_unit_consumed < parser->frame_unit_size) ++ GST_DEBUG (" frame_unit_consumed %d, frame_unit_size: %d " ++ "discard the left %d bytes for a frame_unit.", ++ parser->frame_unit_consumed, parser->frame_unit_size, ++ parser->frame_unit_size - parser->frame_unit_consumed); + + parser->temporal_unit_consumed = 0; + parser->temporal_unit_size = 0; + parser->frame_unit_consumed = 0; + parser->frame_unit_size = 0; +- parser->annex_b = annex_b; +- +- gst_av1_parse_reset_state (parser, TRUE); + } + + /* 5.3.2 */ +@@ -605,7 +697,7 @@ gst_av1_parser_identify_one_obu (GstAV1Parser * parser, const guint8 * data, + } + + if (!size) { +- return ret = GST_AV1_PARSER_NO_MORE_DATA; ++ ret = GST_AV1_PARSER_NO_MORE_DATA; + goto error; + } + +@@ -615,14 +707,17 @@ gst_av1_parser_identify_one_obu (GstAV1Parser * parser, const guint8 * data, + annex_b_again: + last_pos = 0; + +- g_assert (*consumed <= size); ++ if (*consumed > size) ++ goto error; + if (*consumed == size) { + ret = GST_AV1_PARSER_NO_MORE_DATA; + goto error; + } + gst_bit_reader_init (&br, data + *consumed, size - *consumed); + +- g_assert (parser->temporal_unit_consumed <= parser->temporal_unit_size); ++ if (parser->temporal_unit_consumed > parser->temporal_unit_size) ++ goto error; ++ + if (parser->temporal_unit_consumed && + parser->temporal_unit_consumed == parser->temporal_unit_size) { + GST_LOG ("Complete a temporal unit of size %d", +@@ -647,7 +742,9 @@ gst_av1_parser_identify_one_obu (GstAV1Parser * parser, const guint8 * data, + } + } + +- g_assert (parser->frame_unit_consumed <= parser->frame_unit_size); ++ if (parser->frame_unit_consumed > parser->frame_unit_size) ++ goto error; ++ + if (parser->frame_unit_consumed && + parser->frame_unit_consumed == parser->frame_unit_size) { + GST_LOG ("Complete a frame unit of size %d", parser->frame_unit_size); +@@ -702,12 +799,12 @@ gst_av1_parser_identify_one_obu (GstAV1Parser * parser, const guint8 * data, + + if (obu_length == 0) { + /* An empty obu? let continue to the next */ +- ret = GST_AV1_PARSER_DROP; +- goto error; ++ return GST_AV1_PARSER_DROP; + } + } + +- g_assert (*consumed <= size); ++ if (*consumed > size) ++ goto error; + if (*consumed == size) { + ret = GST_AV1_PARSER_NO_MORE_DATA; + goto error; +@@ -722,12 +819,16 @@ gst_av1_parser_identify_one_obu (GstAV1Parser * parser, const guint8 * data, + GST_LOG ("identify obu type is %d", obu->obu_type); + + if (obu->header.obu_has_size_field) { ++ guint size_sz = gst_bit_reader_get_pos (&br) / 8; ++ + obu->obu_size = av1_bitstreamfn_leb128 (&br, &ret); + if (ret != GST_AV1_PARSER_OK) + goto error; + ++ size_sz = gst_bit_reader_get_pos (&br) / 8 - size_sz; + if (obu_length +- && obu_length - 1 - obu->header.obu_extention_flag != obu->obu_size) { ++ && obu_length - 1 - obu->header.obu_extention_flag - size_sz != ++ obu->obu_size) { + /* If obu_size and obu_length are both present, but inconsistent, + then the packed bitstream is deemed invalid. */ + ret = GST_AV1_PARSER_BITSTREAM_ERROR; +@@ -779,8 +880,7 @@ gst_av1_parser_identify_one_obu (GstAV1Parser * parser, const guint8 * data, + (parser->state.operating_point_idc >> (obu->header.obu_spatial_id + + 8)) & 1; + if (!inTemporalLayer || !inSpatialLayer) { +- ret = GST_AV1_PARSER_DROP; +- goto error; ++ return GST_AV1_PARSER_DROP; + } + } + +@@ -1133,12 +1233,12 @@ gst_av1_parser_parse_sequence_header_obu (GstAV1Parser * parser, + seq_header->operating_points[i].idc = AV1_READ_BITS (br, 12); + seq_header->operating_points[i].seq_level_idx = AV1_READ_BITS (br, 5); + if (!av1_seq_level_idx_is_valid +- (seq_header->operating_points[0].seq_level_idx)) { ++ (seq_header->operating_points[i].seq_level_idx)) { + GST_INFO ("The seq_level_idx is unsupported"); + retval = GST_AV1_PARSER_BITSTREAM_ERROR; + goto error; + } +- if (seq_header->operating_points[i].seq_level_idx > GST_AV1_SEQ_LEVEL_4_0) { ++ if (seq_header->operating_points[i].seq_level_idx > GST_AV1_SEQ_LEVEL_3_3) { + seq_header->operating_points[i].seq_tier = AV1_READ_BIT (br); + } else { + seq_header->operating_points[i].seq_tier = 0; +@@ -1187,7 +1287,8 @@ gst_av1_parser_parse_sequence_header_obu (GstAV1Parser * parser, + } + } + +- /* Let user decide the operatingPoint, move it later ++ /* Let user decide the operatingPoint, ++ implemented by calling gst_av1_parser_set_operating_point() + operatingPoint = choose_operating_point( ) + operating_point_idc = operating_point_idc[ operatingPoint ] */ + +@@ -1346,10 +1447,8 @@ gst_av1_parser_parse_sequence_header_obu (GstAV1Parser * parser, + gst_av1_parse_reset_state (parser, FALSE); + + /* choose_operating_point() set the operating_point */ +- if (parser->state.operating_point < 0 || +- parser->state.operating_point > +- seq_header->operating_points_cnt_minus_1) { +- GST_INFO ("Invalid operating_point %d set by user, just use 0", ++ if (parser->state.operating_point > seq_header->operating_points_cnt_minus_1) { ++ GST_WARNING ("Invalid operating_point %d set by user, just use 0", + parser->state.operating_point); + parser->state.operating_point_idc = seq_header->operating_points[0].idc; + } else { +@@ -1414,7 +1513,7 @@ gst_av1_parse_metadata_itut_t35 (GstAV1Parser * parser, GstBitReader * br, + if (ret != GST_AV1_PARSER_OK) + return ret; + +- if (itut_t35->itu_t_t35_country_code) { ++ if (itut_t35->itu_t_t35_country_code == 0xFF) { + itut_t35->itu_t_t35_country_code_extention_byte = + AV1_READ_BITS_CHECKED (br, 8, &ret); + if (ret != GST_AV1_PARSER_OK) +@@ -1512,7 +1611,7 @@ gst_av1_parse_metadata_scalability (GstAV1Parser * parser, + + if (scalability->spatial_layer_description_present_flag) { + for (i = 0; i <= scalability->spatial_layers_cnt_minus_1; i++) { +- scalability->spatial_layer_ref_id[i] = AV1_READ_BIT_CHECKED (br, &ret); ++ scalability->spatial_layer_ref_id[i] = AV1_READ_UINT8_CHECKED (br, &ret); + if (ret != GST_AV1_PARSER_OK) + goto error; + } +@@ -1682,13 +1781,20 @@ gst_av1_parser_parse_metadata_obu (GstAV1Parser * parser, GstAV1OBU * obu, + &bit_reader, &(metadata->timecode)); + break; + default: +- return GST_AV1_PARSER_BITSTREAM_ERROR; ++ GST_WARNING ("Unknown metadata type %u", metadata->metadata_type); ++ return GST_AV1_PARSER_OK; + } + + if (retval != GST_AV1_PARSER_OK) + goto error; + + retval = av1_skip_trailing_bits (parser, &bit_reader, obu); ++ if (retval != GST_AV1_PARSER_OK) { ++ GST_WARNING ("Metadata type %d may have wrong trailings.", ++ metadata->metadata_type); ++ retval = GST_AV1_PARSER_OK; ++ } ++ + return retval; + + error: +@@ -2005,7 +2111,8 @@ gst_av1_parse_segmentation_params (GstAV1Parser * parser, GstBitReader * br, + gint bits_to_read = segmentation_feature_bits[j]; + gint limit = segmentation_feature_max[j]; + if (segmentation_feature_signed[j]) { +- feature_value = av1_bitstreamfn_su (br, bits_to_read, &retval); ++ feature_value = ++ av1_bitstreamfn_su (br, 1 + bits_to_read, &retval); + if (retval != GST_AV1_PARSER_OK) + goto error; + +@@ -2022,20 +2129,37 @@ gst_av1_parse_segmentation_params (GstAV1Parser * parser, GstBitReader * br, + } + } + } else { ++ gint8 ref_idx; ++ GstAV1SegmenationParams *ref_seg_params; ++ + /* Copy it from prime_ref */ +- g_assert (frame_header->primary_ref_frame != GST_AV1_PRIMARY_REF_NONE); +- g_assert (parser->state.ref_info. +- entry[frame_header->ref_frame_idx[frame_header->primary_ref_frame]]. +- ref_valid); +- memcpy (seg_params, +- &parser->state.ref_info. +- entry[frame_header->ref_frame_idx[frame_header-> +- primary_ref_frame]].ref_segmentation_params, +- sizeof (GstAV1SegmenationParams)); +- +- seg_params->segmentation_update_map = 0; +- seg_params->segmentation_temporal_update = 0; +- seg_params->segmentation_update_data = 0; ++ if (frame_header->primary_ref_frame >= GST_AV1_PRIMARY_REF_NONE) { ++ GST_WARNING ("Invalid primary_ref_frame %d", ++ frame_header->primary_ref_frame); ++ return GST_AV1_PARSER_BITSTREAM_ERROR; ++ } ++ ++ ref_idx = frame_header->ref_frame_idx[frame_header->primary_ref_frame]; ++ if (ref_idx >= GST_AV1_NUM_REF_FRAMES || ref_idx < 0) { ++ GST_WARNING ("Invalid ref_frame_idx %d", ref_idx); ++ return GST_AV1_PARSER_BITSTREAM_ERROR; ++ } ++ ++ if (!parser->state.ref_info.entry[ref_idx].ref_valid) { ++ GST_WARNING ("Reference frame at index %d is unavailable", ref_idx); ++ return GST_AV1_PARSER_BITSTREAM_ERROR; ++ } ++ ++ ref_seg_params = ++ &parser->state.ref_info.entry[ref_idx].ref_segmentation_params; ++ ++ for (i = 0; i < GST_AV1_MAX_SEGMENTS; i++) { ++ for (j = 0; j < GST_AV1_SEG_LVL_MAX; j++) { ++ seg_params->feature_enabled[i][j] = ++ ref_seg_params->feature_enabled[i][j]; ++ seg_params->feature_data[i][j] = ref_seg_params->feature_data[i][j]; ++ } ++ } + } + } else { + seg_params->segmentation_update_map = 0; +@@ -2096,7 +2220,6 @@ gst_av1_parse_tile_info (GstAV1Parser * parser, GstBitReader * br, + gint max_width /* maxWidth */ , max_height /* maxHeight */ ; + gint size_sb /* sizeSb */ ; + gint widest_tile_sb /* widestTileSb */ ; +- gint min_inner_tile_width = G_MAXINT /* min width of non-rightmost tile */ ; + + g_assert (parser->seq_header); + seq_header = parser->seq_header; +@@ -2137,14 +2260,21 @@ gst_av1_parse_tile_info (GstAV1Parser * parser, GstBitReader * br, + tile_width_sb = (sb_cols + (1 << parser->state.tile_cols_log2) - + 1) >> parser->state.tile_cols_log2; + i = 0; +- for (start_sb = 0; start_sb < sb_cols; start_sb += tile_width_sb) { ++ /* Fill mi_col_starts[] and make sure to not exceed array range */ ++ for (start_sb = 0; start_sb < sb_cols && i < GST_AV1_MAX_TILE_COLS; ++ start_sb += tile_width_sb) { + parser->state.mi_col_starts[i] = start_sb << sb_shift; + i += 1; + } + parser->state.mi_col_starts[i] = parser->state.mi_cols; + parser->state.tile_cols = i; +- if (parser->state.tile_cols > 1) +- min_inner_tile_width = tile_width_sb << sb_size; ++ ++ while (i >= 1) { ++ tile_info->width_in_sbs_minus_1[i - 1] = ++ ((parser->state.mi_col_starts[i] - parser->state.mi_col_starts[i - 1] ++ + ((1 << sb_shift) - 1)) >> sb_shift) - 1; ++ i--; ++ } + + min_log2_tile_rows = MAX (min_log2_tiles - parser->state.tile_cols_log2, 0); + parser->state.tile_rows_log2 = min_log2_tile_rows; +@@ -2161,16 +2291,25 @@ gst_av1_parse_tile_info (GstAV1Parser * parser, GstBitReader * br, + tile_height_sb = (sb_rows + (1 << parser->state.tile_rows_log2) - + 1) >> parser->state.tile_rows_log2; + i = 0; +- for (start_sb = 0; start_sb < sb_rows; start_sb += tile_height_sb) { ++ /* Fill mi_row_starts[] and make sure to not exceed array range */ ++ for (start_sb = 0; start_sb < sb_rows && i < GST_AV1_MAX_TILE_ROWS; ++ start_sb += tile_height_sb) { + parser->state.mi_row_starts[i] = start_sb << sb_shift; + i += 1; + } + parser->state.mi_row_starts[i] = parser->state.mi_rows; + parser->state.tile_rows = i; ++ while (i >= 1) { ++ tile_info->height_in_sbs_minus_1[i - 1] = ++ ((parser->state.mi_row_starts[i] - parser->state.mi_row_starts[i - 1] ++ + ((1 << sb_shift) - 1)) >> sb_shift) - 1; ++ i--; ++ } + } else { + widest_tile_sb = 0; + start_sb = 0; +- for (i = 0; start_sb < sb_cols; i++) { ++ /* Fill mi_col_starts[] and make sure to not exceed array range */ ++ for (i = 0; start_sb < sb_cols && i < GST_AV1_MAX_TILE_COLS; i++) { + parser->state.mi_col_starts[i] = start_sb << sb_shift; + max_width = MIN (sb_cols - start_sb, max_tile_width_sb); + tile_info->width_in_sbs_minus_1[i] = +@@ -2181,8 +2320,6 @@ gst_av1_parse_tile_info (GstAV1Parser * parser, GstBitReader * br, + size_sb = tile_info->width_in_sbs_minus_1[i] + 1; + widest_tile_sb = MAX (size_sb, widest_tile_sb); + start_sb += size_sb; +- if (i > 0 && ((size_sb << sb_size) < min_inner_tile_width)) +- min_inner_tile_width = size_sb << sb_size; + } + parser->state.mi_col_starts[i] = parser->state.mi_cols; + parser->state.tile_cols = i; +@@ -2197,7 +2334,8 @@ gst_av1_parse_tile_info (GstAV1Parser * parser, GstBitReader * br, + max_tile_height_sb = MAX (max_tile_area_sb / widest_tile_sb, 1); + + start_sb = 0; +- for (i = 0; start_sb < sb_rows; i++) { ++ /* Fill mi_row_starts[] and make sure to not exceed array range */ ++ for (i = 0; start_sb < sb_rows && i < GST_AV1_MAX_TILE_ROWS; i++) { + parser->state.mi_row_starts[i] = start_sb << sb_shift; + max_height = MIN (sb_rows - start_sb, max_tile_height_sb); + tile_info->height_in_sbs_minus_1[i] = +@@ -2222,7 +2360,7 @@ gst_av1_parse_tile_info (GstAV1Parser * parser, GstBitReader * br, + if (retval != GST_AV1_PARSER_OK) + goto error; + +- tile_info->tile_size_bytes_minus_1 = AV1_READ_BIT_CHECKED (br, &retval); ++ tile_info->tile_size_bytes_minus_1 = AV1_READ_BITS_CHECKED (br, 2, &retval); + if (retval != GST_AV1_PARSER_OK) + goto error; + +@@ -2231,13 +2369,6 @@ gst_av1_parse_tile_info (GstAV1Parser * parser, GstBitReader * br, + tile_info->context_update_tile_id = 0; + } + +- if (min_inner_tile_width < (64 << (parser->state.upscaled_width != +- parser->state.frame_width))) { +- GST_INFO ("Minimum tile width requirement not satisfied"); +- retval = GST_AV1_PARSER_BITSTREAM_ERROR; +- goto error; +- } +- + memcpy (tile_info->mi_col_starts, parser->state.mi_col_starts, + sizeof (guint32) * (GST_AV1_MAX_TILE_COLS + 1)); + memcpy (tile_info->mi_row_starts, parser->state.mi_row_starts, +@@ -2270,13 +2401,8 @@ gst_av1_parse_loop_filter_params (GstAV1Parser * parser, + lf_params = &frame_header->loop_filter_params; + + if (frame_header->coded_lossless || frame_header->allow_intrabc) { +- lf_params->loop_filter_delta_enabled = 0; +- lf_params->loop_filter_delta_update = 0; +- lf_params->loop_filter_sharpness = 0; + lf_params->loop_filter_level[0] = 0; + lf_params->loop_filter_level[1] = 0; +- lf_params->loop_filter_level[2] = 0; +- lf_params->loop_filter_level[3] = 0; + lf_params->loop_filter_ref_deltas[GST_AV1_REF_INTRA_FRAME] = 1; + lf_params->loop_filter_ref_deltas[GST_AV1_REF_LAST_FRAME] = 0; + lf_params->loop_filter_ref_deltas[GST_AV1_REF_LAST2_FRAME] = 0; +@@ -2291,58 +2417,6 @@ gst_av1_parse_loop_filter_params (GstAV1Parser * parser, + goto success; + } + +- lf_params->loop_filter_delta_enabled = 0; +- lf_params->loop_filter_delta_update = 0; +- lf_params->loop_filter_sharpness = 0; +- lf_params->loop_filter_level[0] = 0; +- lf_params->loop_filter_level[1] = 0; +- lf_params->loop_filter_level[2] = 0; +- lf_params->loop_filter_level[3] = 0; +- if (frame_header->primary_ref_frame != GST_AV1_PRIMARY_REF_NONE) { +- /* Copy it from prime_ref */ +- GstAV1LoopFilterParams *ref_lf_params = +- &parser->state.ref_info.entry[frame_header-> +- ref_frame_idx[frame_header->primary_ref_frame]].ref_lf_params; +- +- g_assert (parser->state.ref_info. +- entry[frame_header->ref_frame_idx[frame_header->primary_ref_frame]]. +- ref_valid); +- lf_params->loop_filter_ref_deltas[GST_AV1_REF_INTRA_FRAME] = +- ref_lf_params->loop_filter_ref_deltas[GST_AV1_REF_INTRA_FRAME]; +- lf_params->loop_filter_ref_deltas[GST_AV1_REF_LAST_FRAME] = +- ref_lf_params->loop_filter_ref_deltas[GST_AV1_REF_LAST_FRAME]; +- lf_params->loop_filter_ref_deltas[GST_AV1_REF_LAST2_FRAME] = +- ref_lf_params->loop_filter_ref_deltas[GST_AV1_REF_LAST2_FRAME]; +- lf_params->loop_filter_ref_deltas[GST_AV1_REF_LAST3_FRAME] = +- ref_lf_params->loop_filter_ref_deltas[GST_AV1_REF_LAST3_FRAME]; +- lf_params->loop_filter_ref_deltas[GST_AV1_REF_BWDREF_FRAME] = +- ref_lf_params->loop_filter_ref_deltas[GST_AV1_REF_BWDREF_FRAME]; +- lf_params->loop_filter_ref_deltas[GST_AV1_REF_GOLDEN_FRAME] = +- ref_lf_params->loop_filter_ref_deltas[GST_AV1_REF_GOLDEN_FRAME]; +- lf_params->loop_filter_ref_deltas[GST_AV1_REF_ALTREF2_FRAME] = +- ref_lf_params->loop_filter_ref_deltas[GST_AV1_REF_ALTREF2_FRAME]; +- lf_params->loop_filter_ref_deltas[GST_AV1_REF_ALTREF_FRAME] = +- ref_lf_params->loop_filter_ref_deltas[GST_AV1_REF_ALTREF_FRAME]; +- for (i = 0; i < 2; i++) +- lf_params->loop_filter_mode_deltas[i] = +- ref_lf_params->loop_filter_mode_deltas[i]; +- } else { +- /* Set default value */ +- lf_params->loop_filter_ref_deltas[GST_AV1_REF_INTRA_FRAME] = 1; +- lf_params->loop_filter_ref_deltas[GST_AV1_REF_LAST_FRAME] = 0; +- lf_params->loop_filter_ref_deltas[GST_AV1_REF_LAST2_FRAME] = +- lf_params->loop_filter_ref_deltas[GST_AV1_REF_LAST_FRAME]; +- lf_params->loop_filter_ref_deltas[GST_AV1_REF_LAST3_FRAME] = +- lf_params->loop_filter_ref_deltas[GST_AV1_REF_LAST_FRAME]; +- lf_params->loop_filter_ref_deltas[GST_AV1_REF_BWDREF_FRAME] = +- lf_params->loop_filter_ref_deltas[GST_AV1_REF_LAST_FRAME]; +- lf_params->loop_filter_ref_deltas[GST_AV1_REF_GOLDEN_FRAME] = -1; +- lf_params->loop_filter_ref_deltas[GST_AV1_REF_ALTREF2_FRAME] = -1; +- lf_params->loop_filter_ref_deltas[GST_AV1_REF_ALTREF_FRAME] = -1; +- for (i = 0; i < 2; i++) +- lf_params->loop_filter_mode_deltas[i] = 0; +- } +- + if (AV1_REMAINING_BITS (br) < 6 + 6) { + retval = GST_AV1_PARSER_NO_MORE_DATA; + goto error; +@@ -2386,8 +2460,7 @@ gst_av1_parse_loop_filter_params (GstAV1Parser * parser, + av1_bitstreamfn_su (br, 7, &retval); + if (retval != GST_AV1_PARSER_OK) + goto error; +- } else +- lf_params->loop_filter_ref_deltas[i] = 0; ++ } + } + for (i = 0; i < 2; i++) { + update_mode_deltas = AV1_READ_BIT_CHECKED (br, &retval); +@@ -2399,8 +2472,7 @@ gst_av1_parse_loop_filter_params (GstAV1Parser * parser, + av1_bitstreamfn_su (br, 7, &retval); + if (retval != GST_AV1_PARSER_OK) + goto error; +- } else +- lf_params->loop_filter_mode_deltas[i] = 0; ++ } + } + } + } +@@ -2546,9 +2618,9 @@ gst_av1_parse_loop_restoration_params (GstAV1Parser * parser, + + if (frame_header->all_lossless || frame_header->allow_intrabc + || !seq_header->enable_restoration) { +- lr_params->frame_restoration_type[0] = GST_AV1_FRAME_RESTORE_NONE; +- lr_params->frame_restoration_type[0] = GST_AV1_FRAME_RESTORE_NONE; +- lr_params->frame_restoration_type[0] = GST_AV1_FRAME_RESTORE_NONE; ++ for (i = 0; i < GST_AV1_MAX_NUM_PLANES; i++) ++ lr_params->frame_restoration_type[i] = GST_AV1_FRAME_RESTORE_NONE; ++ + lr_params->uses_lr = 0; + goto success; + } +@@ -2851,6 +2923,66 @@ gst_av1_parse_global_param (GstAV1Parser * parser, + return GST_AV1_PARSER_OK; + } + ++static gboolean ++gst_av1_parser_is_shear_params_valid (gint32 gm_params[6]) ++{ ++ const gint32 *mat = gm_params; ++ gint16 alpha, beta, gamma, delta; ++ gint16 shift; ++ gint16 y; ++ gint16 v; ++ guint i; ++ gboolean default_warp_params; ++ ++ if (!(mat[2] > 0)) ++ return FALSE; ++ ++ default_warp_params = TRUE; ++ for (i = 0; i < 6; i++) { ++ if (gm_params[i] != ((i % 3 == 2) ? 1 << GST_AV1_WARPEDMODEL_PREC_BITS : 0)) { ++ default_warp_params = FALSE; ++ break; ++ } ++ } ++ if (default_warp_params) ++ return TRUE; ++ ++ alpha = CLAMP (mat[2] - (1 << GST_AV1_WARPEDMODEL_PREC_BITS), ++ G_MININT16, G_MAXINT16); ++ beta = CLAMP (mat[3], G_MININT16, G_MAXINT16); ++ y = av1_helper_resolve_divisor_32 (ABS (mat[2]), &shift) ++ * (mat[2] < 0 ? -1 : 1); ++ v = ((gint64) mat[4] * (1 << GST_AV1_WARPEDMODEL_PREC_BITS)) * y; ++ gamma = ++ CLAMP ((gint) av1_helper_round_power_of_two_signed (v, shift), G_MININT16, ++ G_MAXINT16); ++ v = ((gint64) mat[3] * mat[4]) * y; ++ delta = ++ CLAMP (mat[5] - (gint) av1_helper_round_power_of_two_signed (v, ++ shift) - (1 << GST_AV1_WARPEDMODEL_PREC_BITS), G_MININT16, ++ G_MAXINT16); ++ ++ alpha = ++ av1_helper_round_power_of_two_signed (alpha, ++ GST_AV1_WARP_PARAM_REDUCE_BITS) * (1 << GST_AV1_WARP_PARAM_REDUCE_BITS); ++ beta = ++ av1_helper_round_power_of_two_signed (beta, ++ GST_AV1_WARP_PARAM_REDUCE_BITS) * (1 << GST_AV1_WARP_PARAM_REDUCE_BITS); ++ gamma = ++ av1_helper_round_power_of_two_signed (gamma, ++ GST_AV1_WARP_PARAM_REDUCE_BITS) * (1 << GST_AV1_WARP_PARAM_REDUCE_BITS); ++ delta = ++ av1_helper_round_power_of_two_signed (delta, ++ GST_AV1_WARP_PARAM_REDUCE_BITS) * (1 << GST_AV1_WARP_PARAM_REDUCE_BITS); ++ ++ if ((4 * ABS (alpha) + 7 * ABS (beta) >= (1 << GST_AV1_WARPEDMODEL_PREC_BITS)) ++ || (4 * ABS (gamma) + 4 * ABS (delta) >= ++ (1 << GST_AV1_WARPEDMODEL_PREC_BITS))) ++ return FALSE; ++ ++ return TRUE; ++} ++ + /* 5.9.24 */ + static GstAV1ParserResult + gst_av1_parse_global_motion_params (GstAV1Parser * parser, +@@ -2865,6 +2997,7 @@ gst_av1_parse_global_motion_params (GstAV1Parser * parser, + /* init value */ + gm_params->gm_type[GST_AV1_REF_INTRA_FRAME] = GST_AV1_WARP_MODEL_IDENTITY; + for (ref = GST_AV1_REF_LAST_FRAME; ref <= GST_AV1_REF_ALTREF_FRAME; ref++) { ++ gm_params->invalid[ref] = 0; + gm_params->gm_type[ref] = GST_AV1_WARP_MODEL_IDENTITY; + for (i = 0; i < 6; i++) { + gm_params->gm_params[ref][i] = +@@ -2956,6 +3089,10 @@ gst_av1_parse_global_motion_params (GstAV1Parser * parser, + if (retval != GST_AV1_PARSER_OK) + goto error; + } ++ ++ if (type <= GST_AV1_WARP_MODEL_AFFINE) ++ gm_params->invalid[ref] = ++ !gst_av1_parser_is_shear_params_valid (gm_params->gm_params[ref]); + } + + success: +@@ -3375,6 +3512,64 @@ gst_av1_set_frame_refs (GstAV1Parser * parser, + frame_header->ref_frame_idx[i] = ref; + } + ++/* 7.21 */ ++static void ++gst_av1_parser_reference_frame_loading (GstAV1Parser * parser, ++ GstAV1FrameHeaderOBU * frame_header) ++{ ++ GstAV1ReferenceFrameInfo *ref_info = &(parser->state.ref_info); ++ gint idx = frame_header->frame_to_show_map_idx; ++ GstAV1TileInfo *ref_tile_info = &ref_info->entry[idx].ref_tile_info; ++ const gint all_frames = (1 << GST_AV1_NUM_REF_FRAMES) - 1; ++ ++ /* copy the relevant frame information as these will be needed by ++ * all subclasses. */ ++ frame_header->frame_type = ref_info->entry[idx].ref_frame_type; ++ frame_header->upscaled_width = ref_info->entry[idx].ref_upscaled_width; ++ frame_header->frame_width = ref_info->entry[idx].ref_frame_width; ++ frame_header->frame_height = ref_info->entry[idx].ref_frame_height; ++ frame_header->render_width = ref_info->entry[idx].ref_render_width; ++ frame_header->render_height = ref_info->entry[idx].ref_render_height; ++ ++ if (parser->seq_header->film_grain_params_present) ++ frame_header->film_grain_params = ++ ref_info->entry[idx].ref_film_grain_params; ++ ++ /* the remaining is only relevant to ensure proper state update and only ++ * keyframe updates the state. */ ++ if (frame_header->frame_type != GST_AV1_KEY_FRAME) ++ return; ++ ++ frame_header->refresh_frame_flags = all_frames; ++ frame_header->current_frame_id = ref_info->entry[idx].ref_frame_id; ++ frame_header->order_hint = ref_info->entry[idx].ref_order_hint; ++ frame_header->segmentation_params = ++ ref_info->entry[idx].ref_segmentation_params; ++ frame_header->global_motion_params = ++ ref_info->entry[idx].ref_global_motion_params; ++ frame_header->loop_filter_params = ref_info->entry[idx].ref_lf_params; ++ frame_header->tile_info = *ref_tile_info; ++ ++ parser->state.current_frame_id = ref_info->entry[idx].ref_frame_id; ++ parser->state.upscaled_width = ref_info->entry[idx].ref_upscaled_width; ++ parser->state.frame_width = ref_info->entry[idx].ref_frame_width; ++ parser->state.frame_height = ref_info->entry[idx].ref_frame_height; ++ parser->state.render_width = ref_info->entry[idx].ref_render_width; ++ parser->state.render_height = ref_info->entry[idx].ref_render_height; ++ parser->state.mi_cols = ref_info->entry[idx].ref_mi_cols; ++ parser->state.mi_rows = ref_info->entry[idx].ref_mi_rows; ++ ++ memcpy (parser->state.mi_col_starts, ref_tile_info->mi_col_starts, ++ sizeof (guint32) * (GST_AV1_MAX_TILE_COLS + 1)); ++ memcpy (parser->state.mi_row_starts, ref_tile_info->mi_row_starts, ++ sizeof (guint32) * (GST_AV1_MAX_TILE_ROWS + 1)); ++ parser->state.tile_cols_log2 = ref_tile_info->tile_cols_log2; ++ parser->state.tile_cols = ref_tile_info->tile_cols; ++ parser->state.tile_rows_log2 = ref_tile_info->tile_rows_log2; ++ parser->state.tile_rows = ref_tile_info->tile_rows; ++ parser->state.tile_size_bytes = ref_tile_info->tile_size_bytes; ++} ++ + /* 5.9.2 */ + static GstAV1ParserResult + gst_av1_parse_uncompressed_frame_header (GstAV1Parser * parser, GstAV1OBU * obu, +@@ -3460,16 +3655,7 @@ gst_av1_parse_uncompressed_frame_header (GstAV1Parser * parser, GstAV1OBU * obu, + } + } + +- frame_header->frame_type = +- ref_info->entry[frame_header->frame_to_show_map_idx].ref_frame_type; +- if (frame_header->frame_type == GST_AV1_KEY_FRAME) { +- frame_header->refresh_frame_flags = all_frames; +- } +- +- /* just use the frame_to_show's grain_params +- * if (seq_header->film_grain_params_present) +- * load_grain_params () */ +- ++ gst_av1_parser_reference_frame_loading (parser, frame_header); + goto success; + } + +@@ -3875,13 +4061,61 @@ gst_av1_parse_uncompressed_frame_header (GstAV1Parser * parser, GstAV1OBU * obu, + goto error; + } + ++ if (frame_header->primary_ref_frame == GST_AV1_PRIMARY_REF_NONE) { ++ /* do something in setup_past_independence() of parser level */ ++ gint8 *loop_filter_ref_deltas = ++ frame_header->loop_filter_params.loop_filter_ref_deltas; ++ ++ frame_header->loop_filter_params.loop_filter_delta_enabled = 1; ++ loop_filter_ref_deltas[GST_AV1_REF_INTRA_FRAME] = 1; ++ loop_filter_ref_deltas[GST_AV1_REF_LAST_FRAME] = 0; ++ loop_filter_ref_deltas[GST_AV1_REF_LAST2_FRAME] = 0; ++ loop_filter_ref_deltas[GST_AV1_REF_LAST3_FRAME] = 0; ++ loop_filter_ref_deltas[GST_AV1_REF_BWDREF_FRAME] = 0; ++ loop_filter_ref_deltas[GST_AV1_REF_GOLDEN_FRAME] = -1; ++ loop_filter_ref_deltas[GST_AV1_REF_ALTREF_FRAME] = -1; ++ loop_filter_ref_deltas[GST_AV1_REF_ALTREF2_FRAME] = -1; ++ frame_header->loop_filter_params.loop_filter_mode_deltas[0] = 0; ++ frame_header->loop_filter_params.loop_filter_mode_deltas[1] = 0; ++ } else { ++ /* do something in load_previous() of parser level */ ++ /* load_loop_filter_params() */ ++ GstAV1LoopFilterParams *ref_lf_params = ++ &parser->state.ref_info.entry[frame_header-> ++ ref_frame_idx[frame_header->primary_ref_frame]].ref_lf_params; ++ gint8 *loop_filter_ref_deltas = ++ frame_header->loop_filter_params.loop_filter_ref_deltas; ++ ++ /* Copy all from prime_ref */ ++ g_assert (parser->state.ref_info. ++ entry[frame_header->ref_frame_idx[frame_header->primary_ref_frame]]. ++ ref_valid); ++ loop_filter_ref_deltas[GST_AV1_REF_INTRA_FRAME] = ++ ref_lf_params->loop_filter_ref_deltas[GST_AV1_REF_INTRA_FRAME]; ++ loop_filter_ref_deltas[GST_AV1_REF_LAST_FRAME] = ++ ref_lf_params->loop_filter_ref_deltas[GST_AV1_REF_LAST_FRAME]; ++ loop_filter_ref_deltas[GST_AV1_REF_LAST2_FRAME] = ++ ref_lf_params->loop_filter_ref_deltas[GST_AV1_REF_LAST2_FRAME]; ++ loop_filter_ref_deltas[GST_AV1_REF_LAST3_FRAME] = ++ ref_lf_params->loop_filter_ref_deltas[GST_AV1_REF_LAST3_FRAME]; ++ loop_filter_ref_deltas[GST_AV1_REF_BWDREF_FRAME] = ++ ref_lf_params->loop_filter_ref_deltas[GST_AV1_REF_BWDREF_FRAME]; ++ loop_filter_ref_deltas[GST_AV1_REF_GOLDEN_FRAME] = ++ ref_lf_params->loop_filter_ref_deltas[GST_AV1_REF_GOLDEN_FRAME]; ++ loop_filter_ref_deltas[GST_AV1_REF_ALTREF2_FRAME] = ++ ref_lf_params->loop_filter_ref_deltas[GST_AV1_REF_ALTREF2_FRAME]; ++ loop_filter_ref_deltas[GST_AV1_REF_ALTREF_FRAME] = ++ ref_lf_params->loop_filter_ref_deltas[GST_AV1_REF_ALTREF_FRAME]; ++ for (i = 0; i < 2; i++) ++ frame_header->loop_filter_params.loop_filter_mode_deltas[i] = ++ ref_lf_params->loop_filter_mode_deltas[i]; ++ } ++ + /* @TODO: + if ( primary_ref_frame == PRIMARY_REF_NONE ) { + init_non_coeff_cdfs( ) +- setup_past_independence( ) + } else { + load_cdfs( ref_frame_idx[primary_ref_frame] ) +- load_previous( ) + } + */ + /* @TODO: +@@ -4005,74 +4239,6 @@ error: + return retval; + } + +-/* 7.21 */ +-/** +- * gst_av1_parser_reference_frame_loading: +- * @parser: the #GstAV1Parser +- * @frame_header: a #GstAV1FrameHeaderOBU to load +- * +- * Load the context of @frame_header to parser's state. This function is +- * used when we want to show already parsed frames before. +- * +- * Returns: The #GstAV1ParserResult. +- * +- * Since: 1.18 +- */ +-GstAV1ParserResult +-gst_av1_parser_reference_frame_loading (GstAV1Parser * parser, +- GstAV1FrameHeaderOBU * frame_header) +-{ +- GstAV1ReferenceFrameInfo *ref_info; +- GstAV1TileInfo *ref_tile_info; +- +- g_return_val_if_fail (parser != NULL, GST_AV1_PARSER_INVALID_OPERATION); +- g_return_val_if_fail (frame_header != NULL, GST_AV1_PARSER_INVALID_OPERATION); +- +- if (!parser->seq_header) { +- GST_WARNING ("Missing OBU Reference: seq_header"); +- return GST_AV1_PARSER_MISSING_OBU_REFERENCE; +- } +- +- ref_info = &(parser->state.ref_info); +- +- if (frame_header->frame_to_show_map_idx > GST_AV1_NUM_REF_FRAMES - 1) +- return GST_AV1_PARSER_BITSTREAM_ERROR; +- +- g_assert (ref_info->entry[frame_header->frame_to_show_map_idx].ref_valid); +- +- parser->state.current_frame_id = +- ref_info->entry[frame_header->frame_to_show_map_idx].ref_frame_id; +- parser->state.upscaled_width = +- ref_info->entry[frame_header->frame_to_show_map_idx].ref_upscaled_width; +- parser->state.frame_width = +- ref_info->entry[frame_header->frame_to_show_map_idx].ref_frame_width; +- parser->state.frame_height = +- ref_info->entry[frame_header->frame_to_show_map_idx].ref_frame_height; +- parser->state.render_width = +- ref_info->entry[frame_header->frame_to_show_map_idx].ref_render_width; +- parser->state.render_height = +- ref_info->entry[frame_header->frame_to_show_map_idx].ref_render_height; +- parser->state.mi_cols = +- ref_info->entry[frame_header->frame_to_show_map_idx].ref_mi_cols; +- parser->state.mi_rows = +- ref_info->entry[frame_header->frame_to_show_map_idx].ref_mi_rows; +- +- ref_tile_info = +- &ref_info->entry[frame_header->frame_to_show_map_idx].ref_tile_info; +- +- memcpy (parser->state.mi_col_starts, ref_tile_info->mi_col_starts, +- sizeof (guint32) * (GST_AV1_MAX_TILE_COLS + 1)); +- memcpy (parser->state.mi_row_starts, ref_tile_info->mi_row_starts, +- sizeof (guint32) * (GST_AV1_MAX_TILE_ROWS + 1)); +- parser->state.tile_cols_log2 = ref_tile_info->tile_cols_log2; +- parser->state.tile_cols = ref_tile_info->tile_cols; +- parser->state.tile_rows_log2 = ref_tile_info->tile_rows_log2; +- parser->state.tile_rows = ref_tile_info->tile_rows; +- parser->state.tile_size_bytes = ref_tile_info->tile_size_bytes; +- +- return GST_AV1_PARSER_OK; +-} +- + /** + * gst_av1_parser_reference_frame_update: + * @parser: the #GstAV1Parser +@@ -4181,6 +4347,13 @@ gst_av1_parser_parse_tile_list_obu (GstAV1Parser * parser, + tile_list->output_frame_width_in_tiles_minus_1 = AV1_READ_BITS (br, 8); + tile_list->output_frame_height_in_tiles_minus_1 = AV1_READ_BITS (br, 8); + tile_list->tile_count_minus_1 = AV1_READ_BITS (br, 16); ++ if (tile_list->tile_count_minus_1 + 1 > GST_AV1_MAX_TILE_COUNT) { ++ GST_WARNING ("Invalid tile_count_minus_1 %d", ++ tile_list->tile_count_minus_1); ++ retval = GST_AV1_PARSER_BITSTREAM_ERROR; ++ goto error; ++ } ++ + for (tile = 0; tile <= tile_list->tile_count_minus_1; tile++) { + if (AV1_REMAINING_BITS (br) < 8 + 8 + 8 + 16) { + retval = GST_AV1_PARSER_NO_MORE_DATA; +@@ -4253,6 +4426,11 @@ gst_av1_parse_tile_group (GstAV1Parser * parser, GstBitReader * br, + goto error; + } + ++ if (tile_group->tg_end < tile_group->tg_start) { ++ retval = GST_AV1_PARSER_NO_MORE_DATA; ++ goto error; ++ } ++ + if (!gst_bit_reader_skip_to_byte (br)) { + retval = GST_AV1_PARSER_NO_MORE_DATA; + goto error; +@@ -4261,6 +4439,7 @@ gst_av1_parse_tile_group (GstAV1Parser * parser, GstBitReader * br, + end_bit_pos = gst_bit_reader_get_pos (br); + header_bytes = (end_bit_pos - start_bitpos) / 8; + sz -= header_bytes; ++ + for (tile_num = tile_group->tg_start; tile_num <= tile_group->tg_end; + tile_num++) { + tile_row = tile_num / parser->state.tile_cols; +@@ -4274,9 +4453,14 @@ gst_av1_parse_tile_group (GstAV1Parser * parser, GstBitReader * br, + if (retval != GST_AV1_PARSER_OK) + goto error; + tile_size = tile_size_minus_1 + 1; +- sz -= tile_size - parser->state.tile_size_bytes; ++ sz -= (tile_size + parser->state.tile_size_bytes); + } + ++ tile_group->entry[tile_num].tile_size = tile_size; ++ tile_group->entry[tile_num].tile_offset = gst_bit_reader_get_pos (br) / 8; ++ tile_group->entry[tile_num].tile_row = tile_row; ++ tile_group->entry[tile_num].tile_col = tile_col; ++ + tile_group->entry[tile_num].mi_row_start = + parser->state.mi_row_starts[tile_row]; + tile_group->entry[tile_num].mi_row_end = +@@ -4292,20 +4476,22 @@ gst_av1_parse_tile_group (GstAV1Parser * parser, GstBitReader * br, + */ + + /* Skip the real data to the next one */ +- if (!gst_bit_reader_skip (br, tile_size)) { ++ if (tile_num < tile_group->tg_end && ++ !gst_bit_reader_skip (br, tile_size * 8)) { + retval = GST_AV1_PARSER_NO_MORE_DATA; + goto error; + } + } + +- /* Not implement here, the real decoder process +- if (tile_group->tg_end == tile_group->num_tiles - 1) { +- if ( !disable_frame_end_update_cdf ) { +- frame_end_update_cdf( ) +- } +- decode_frame_wrapup( ) +- } +- */ ++ if (tile_group->tg_end == tile_group->num_tiles - 1) { ++ /* Not implement here, the real decoder process ++ if ( !disable_frame_end_update_cdf ) { ++ frame_end_update_cdf( ) ++ } ++ decode_frame_wrapup( ) ++ */ ++ parser->state.seen_frame_header = 0; ++ } + + return GST_AV1_PARSER_OK; + +@@ -4355,9 +4541,14 @@ gst_av1_parse_frame_header (GstAV1Parser * parser, GstAV1OBU * obu, + GstBitReader * bit_reader, GstAV1FrameHeaderOBU * frame_header) + { + GstAV1ParserResult ret; ++ guint i; + + memset (frame_header, 0, sizeof (*frame_header)); + frame_header->frame_is_intra = 1; ++ frame_header->last_frame_idx = -1; ++ frame_header->gold_frame_idx = -1; ++ for (i = 0; i < GST_AV1_REFS_PER_FRAME; i++) ++ frame_header->ref_frame_idx[i] = -1; + + ret = gst_av1_parse_uncompressed_frame_header (parser, obu, bit_reader, + frame_header); +@@ -4462,10 +4653,36 @@ gst_av1_parser_parse_frame_obu (GstAV1Parser * parser, GstAV1OBU * obu, + return GST_AV1_PARSER_NO_MORE_DATA; + + retval = gst_av1_parse_tile_group (parser, &bit_reader, &(frame->tile_group)); +- parser->state.seen_frame_header = 0; + return retval; + } + ++/** ++ * gst_av1_parser_set_operating_point: ++ * @parser: the #GstAV1Parser ++ * @operating_point: the operating point to set ++ * ++ * Set the operating point to filter OBUs. ++ * ++ * Returns: The #GstAV1ParserResult. ++ * ++ * Since: 1.20 ++ */ ++GstAV1ParserResult ++gst_av1_parser_set_operating_point (GstAV1Parser * parser, ++ gint32 operating_point) ++{ ++ g_return_val_if_fail (parser != NULL, GST_AV1_PARSER_INVALID_OPERATION); ++ g_return_val_if_fail (operating_point >= 0, GST_AV1_PARSER_INVALID_OPERATION); ++ ++ if (parser->seq_header && ++ operating_point > parser->seq_header->operating_points_cnt_minus_1) ++ return GST_AV1_PARSER_INVALID_OPERATION; ++ ++ /* Decide whether it is valid when sequence comes. */ ++ parser->state.operating_point = operating_point; ++ return GST_AV1_PARSER_OK; ++} ++ + /** + * gst_av1_parser_new: + * +diff --git a/gst-libs/gst/codecparsers/gstav1parser.h b/gst-libs/gst/codecparsers/gstav1parser.h +index 4b49a356e..7d2ec69fb 100644 +--- a/gst-libs/gst/codecparsers/gstav1parser.h ++++ b/gst-libs/gst/codecparsers/gstav1parser.h +@@ -52,7 +52,6 @@ G_BEGIN_DECLS + #define GST_AV1_SUPERRES_DENOM_MIN 9 + #define GST_AV1_SUPERRES_DENOM_BITS 3 + #define GST_AV1_MAX_LOOP_FILTER 63 +-#define GST_AV1_GM_ABS_ALPHA_BITS 12 + #define GST_AV1_GM_ABS_TRANS_BITS 12 + #define GST_AV1_GM_ABS_TRANS_ONLY_BITS 9 + #define GST_AV1_GM_ABS_ALPHA_BITS 12 +@@ -60,6 +59,7 @@ G_BEGIN_DECLS + #define GST_AV1_GM_TRANS_PREC_BITS 6 + #define GST_AV1_GM_TRANS_ONLY_PREC_BITS 3 + #define GST_AV1_WARPEDMODEL_PREC_BITS 16 ++#define GST_AV1_WARP_PARAM_REDUCE_BITS 6 + #define GST_AV1_SELECT_SCREEN_CONTENT_TOOLS 2 + #define GST_AV1_SELECT_INTEGER_MV 2 + #define GST_AV1_RESTORATION_TILESIZE_MAX 256 +@@ -71,15 +71,19 @@ G_BEGIN_DECLS + #define GST_AV1_MAX_TILE_COUNT 512 + #define GST_AV1_MAX_OPERATING_POINTS \ + (GST_AV1_MAX_NUM_TEMPORAL_LAYERS * GST_AV1_MAX_NUM_SPATIAL_LAYERS) +-#define GST_AV1_MAX_SPATIAL_LAYERS 2 /* correct? */ +-#define GST_AV1_MAX_TEMPORAL_GROUP_SIZE 8 /* correct? */ +-#define GST_AV1_MAX_TEMPORAL_GROUP_REFERENCES 8 /* correct? */ ++#define GST_AV1_MAX_TEMPORAL_GROUP_SIZE 255 ++#define GST_AV1_MAX_TEMPORAL_GROUP_REFERENCES 7 + #define GST_AV1_MAX_NUM_Y_POINTS 16 + #define GST_AV1_MAX_NUM_CB_POINTS 16 + #define GST_AV1_MAX_NUM_CR_POINTS 16 + #define GST_AV1_MAX_NUM_POS_LUMA 25 + #define GST_AV1_MAX_NUM_PLANES 3 + ++#define GST_AV1_DIV_LUT_PREC_BITS 14 ++#define GST_AV1_DIV_LUT_BITS 8 ++#define GST_AV1_DIV_LUT_NUM (1 << GST_AV1_DIV_LUT_BITS) ++ ++ + typedef struct _GstAV1Parser GstAV1Parser; + + typedef struct _GstAV1OBUHeader GstAV1OBUHeader; +@@ -137,13 +141,22 @@ typedef enum { + * @GST_AV1_PROFILE_0: 8-bit and 10-bit 4:2:0 and 4:0:0 only. + * @GST_AV1_PROFILE_1: 8-bit and 10-bit 4:4:4. + * @GST_AV1_PROFILE_2: 8-bit and 10-bit 4:2:2, 12-bit 4:0:0 4:2:2 and 4:4:4 ++ * @GST_AV1_PROFILE_UNDEFINED: unknow AV1 profile (Since: 1.20) + * + * Defines the AV1 profiles + */ ++/** ++ * GST_AV1_PROFILE_UNDEFINED: ++ * ++ * unknow AV1 profile ++ * ++ * Since: 1.20 ++ */ + typedef enum { + GST_AV1_PROFILE_0 = 0, + GST_AV1_PROFILE_1 = 1, + GST_AV1_PROFILE_2 = 2, ++ GST_AV1_PROFILE_UNDEFINED, + } GstAV1Profile; + + /** +@@ -954,9 +967,9 @@ struct _GstAV1MetadataScalability { + gboolean spatial_layer_dimensions_present_flag; + gboolean spatial_layer_description_present_flag; + gboolean temporal_group_description_present_flag; +- guint16 spatial_layer_max_width[GST_AV1_MAX_SPATIAL_LAYERS]; +- guint16 spatial_layer_max_height[GST_AV1_MAX_SPATIAL_LAYERS]; +- guint8 spatial_layer_ref_id[GST_AV1_MAX_SPATIAL_LAYERS]; ++ guint16 spatial_layer_max_width[GST_AV1_MAX_NUM_SPATIAL_LAYERS]; ++ guint16 spatial_layer_max_height[GST_AV1_MAX_NUM_SPATIAL_LAYERS]; ++ guint8 spatial_layer_ref_id[GST_AV1_MAX_NUM_SPATIAL_LAYERS]; + guint8 temporal_group_size; + + guint8 temporal_group_temporal_id[GST_AV1_MAX_TEMPORAL_GROUP_SIZE]; +@@ -1087,8 +1100,8 @@ struct _GstAV1LoopFilterParams { + gboolean loop_filter_delta_enabled; + gboolean loop_filter_delta_update; + +- guint8 loop_filter_ref_deltas[GST_AV1_TOTAL_REFS_PER_FRAME]; +- guint8 loop_filter_mode_deltas[2]; ++ gint8 loop_filter_ref_deltas[GST_AV1_TOTAL_REFS_PER_FRAME]; ++ gint8 loop_filter_mode_deltas[2]; + + gboolean delta_lf_present; + guint8 delta_lf_res; +@@ -1258,6 +1271,14 @@ struct _GstAV1LoopRestorationParams { + * @gm_params: is set equal to SavedGmParams[ frame_to_show_map_idx ][ ref ][ j ] for + * ref = LAST_FRAME..ALTREF_FRAME, for j = 0..5. + * @gm_type: specifying the type of global motion. ++ * @invalid: whether this global motion parameters is invalid. (Since: 1.20) ++ */ ++/** ++ * _GstAV1GlobalMotionParams.invalid: ++ * ++ * whether this global motion parameters is invalid. ++ * ++ * Since: 1.20 + */ + struct _GstAV1GlobalMotionParams { + gboolean is_global[GST_AV1_NUM_REF_FRAMES]; +@@ -1266,6 +1287,7 @@ struct _GstAV1GlobalMotionParams { + gint32 gm_params[GST_AV1_NUM_REF_FRAMES][6]; + + GstAV1WarpModelType gm_type[GST_AV1_NUM_REF_FRAMES]; /* GmType */ ++ gboolean invalid[GST_AV1_NUM_REF_FRAMES]; + }; + + /** +@@ -1398,7 +1420,7 @@ struct _GstAV1FilmGrainParams { + * of bitstream conformance that whenever @display_frame_id is read, the value matches + * @ref_frame_id[ @frame_to_show_map_idx ] (the value of @current_frame_id at the time that the + * frame indexed by @frame_to_show_map_idx was stored), and that +- * @ref_valid[ @frame_to_show_map_idx ] is equjal to 1. It is a requirement of bitstream ++ * @ref_valid[ @frame_to_show_map_idx ] is equal to 1. It is a requirement of bitstream + * conformance that the number of bits needed to read @display_frame_id does not exceed 16. + * This is equivalent to the constraint that idLen <= 16 + * @frame_type: specifies the type of the frame. +@@ -1516,7 +1538,7 @@ struct _GstAV1FilmGrainParams { + */ + struct _GstAV1FrameHeaderOBU { + gboolean show_existing_frame; +- guint8 frame_to_show_map_idx; ++ gint8 frame_to_show_map_idx; + guint32 frame_presentation_time; + guint32 tu_presentation_delay; + guint32 display_frame_id; +@@ -1537,9 +1559,9 @@ struct _GstAV1FrameHeaderOBU { + guint32 ref_order_hint[GST_AV1_NUM_REF_FRAMES]; + gboolean allow_intrabc; + gboolean frame_refs_short_signaling; +- guint8 last_frame_idx; +- guint8 gold_frame_idx; +- guint8 ref_frame_idx[GST_AV1_REFS_PER_FRAME]; ++ gint8 last_frame_idx; ++ gint8 gold_frame_idx; ++ gint8 ref_frame_idx[GST_AV1_REFS_PER_FRAME]; + gboolean allow_high_precision_mv; + gboolean is_motion_mode_switchable; + gboolean use_ref_frame_mvs; +@@ -1636,7 +1658,7 @@ struct _GstAV1TileListOBU { + guint8 output_frame_height_in_tiles_minus_1; + guint16 tile_count_minus_1; + struct { +- guint8 anchor_frame_idx; ++ gint8 anchor_frame_idx; + guint8 anchor_tile_row; + guint8 anchor_tile_col; + guint16 tile_data_size_minus_1; +@@ -1659,6 +1681,10 @@ struct _GstAV1TileListOBU { + * It is a requirement of bitstream conformance that the value of tg_end is greater + * than or equal to tg_start. It is a requirement of bitstream conformance that the + * value of tg_end for the last tile group in each frame is equal to num_tiles-1. ++ * @tile_offset: Offset from the OBU data, the real data start of this tile. ++ * @tg_size: Data size of this tile. ++ * @tile_row: Tile index in row. ++ * @tile_col: Tile index in column. + * @mi_row_start: start position in mi rows + * @mi_row_end: end position in mi rows + * @mi_col_start: start position in mi cols +@@ -1670,6 +1696,10 @@ struct _GstAV1TileGroupOBU { + guint8 tg_start; + guint8 tg_end; + struct { ++ guint32 tile_offset; /* Tile data offset from the OBU data. */ ++ guint32 tile_size; /* Data size of this tile */ ++ guint32 tile_row; /* tileRow */ ++ guint32 tile_col; /* tileCol */ + /* global varialbes */ + guint32 mi_row_start; /* MiRowStart */ + guint32 mi_row_end; /* MiRowEnd */ +@@ -1745,6 +1775,10 @@ GST_CODEC_PARSERS_API + void + gst_av1_parser_reset (GstAV1Parser * parser, gboolean annex_b); + ++GST_CODEC_PARSERS_API ++void ++gst_av1_parser_reset_annex_b (GstAV1Parser * parser); ++ + GST_CODEC_PARSERS_API + GstAV1ParserResult + gst_av1_parser_identify_one_obu (GstAV1Parser * parser, const guint8 * data, +@@ -1787,13 +1821,13 @@ gst_av1_parser_parse_frame_obu (GstAV1Parser * parser, GstAV1OBU * obu, + + GST_CODEC_PARSERS_API + GstAV1ParserResult +-gst_av1_parser_reference_frame_loading (GstAV1Parser * parser, ++gst_av1_parser_reference_frame_update (GstAV1Parser * parser, + GstAV1FrameHeaderOBU * frame_header); + + GST_CODEC_PARSERS_API + GstAV1ParserResult +-gst_av1_parser_reference_frame_update (GstAV1Parser * parser, +- GstAV1FrameHeaderOBU * frame_header); ++gst_av1_parser_set_operating_point (GstAV1Parser * parser, ++ gint32 operating_point); + + GST_CODEC_PARSERS_API + GstAV1Parser * gst_av1_parser_new (void); +diff --git a/gst-libs/gst/codecparsers/gsth264bitwriter.c b/gst-libs/gst/codecparsers/gsth264bitwriter.c +new file mode 100644 +index 000000000..b4ae920af +--- /dev/null ++++ b/gst-libs/gst/codecparsers/gsth264bitwriter.c +@@ -0,0 +1,1641 @@ ++/* GStreamer ++ * Copyright (C) 2020 Intel Corporation ++ * Author: He Junyan ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the0 ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifdef HAVE_CONFIG_H ++# include "config.h" ++#endif ++ ++#include "gsth264bitwriter.h" ++#include ++#include ++ ++/******************************** Utils ********************************/ ++#define SIGNED(val) (2 * ABS(val) - ((val) > 0)) ++ ++/* Write an unsigned integer Exp-Golomb-coded syntax element. i.e. ue(v) */ ++static gboolean ++_bs_write_ue (GstBitWriter * bs, guint32 value) ++{ ++ guint32 size_in_bits = 0; ++ guint32 tmp_value = ++value; ++ ++ while (tmp_value) { ++ ++size_in_bits; ++ tmp_value >>= 1; ++ } ++ if (size_in_bits > 1 ++ && !gst_bit_writer_put_bits_uint32 (bs, 0, size_in_bits - 1)) ++ return FALSE; ++ if (!gst_bit_writer_put_bits_uint32 (bs, value, size_in_bits)) ++ return FALSE; ++ return TRUE; ++} ++ ++#define WRITE_BITS_UNCHECK(bw, val, nbits) \ ++ (nbits <= 8 ? gst_bit_writer_put_bits_uint8 (bw, val, nbits) : \ ++ (nbits <= 16 ? gst_bit_writer_put_bits_uint16 (bw, val, nbits) : \ ++ (nbits <= 32 ? gst_bit_writer_put_bits_uint32 (bw, val, nbits) : \ ++ FALSE))) ++ ++#define WRITE_BITS(bw, val, nbits) \ ++ if (!WRITE_BITS_UNCHECK (bw, val, nbits)) { \ ++ g_warning ("Unsupported bit size: %u", nbits); \ ++ have_space = FALSE; \ ++ goto error; \ ++ } ++ ++#define WRITE_UE_UNCHECK(bw, val) _bs_write_ue (bw, val) ++ ++#ifdef WRITE_UE ++#undef WRITE_UE ++#endif ++#define WRITE_UE(bw, val) \ ++ if (!(have_space = WRITE_UE_UNCHECK (bw, val))) \ ++ goto error; \ ++ ++#define WRITE_UE_MAX(bw, val, max) \ ++ if ((guint32) val > (max) || !(have_space = WRITE_UE_UNCHECK (bw, val))) \ ++ goto error; ++ ++#define WRITE_SE(bw, val) WRITE_UE (bw, SIGNED (val)) ++ ++#define WRITE_SE_RANGE(bw, val, min, max) \ ++ if (val > max || val < min || \ ++ !(have_space = WRITE_UE_UNCHECK (bw, SIGNED (val)))) \ ++ goto error; ++ ++#define WRITE_BYTES_UNCHECK(bw, ptr, nbytes) \ ++ gst_bit_writer_put_bytes(bw, ptr, nbytes) ++ ++#ifdef WRITE_BYTES ++#undef WRITE_BYTES ++#endif ++#define WRITE_BYTES(bw, ptr, nbytes) \ ++ if (!(have_space = WRITE_BYTES_UNCHECK (bw, ptr, nbytes))) \ ++ goto error; ++ ++/***************************** End of Utils ****************************/ ++ ++/**** Default scaling_lists according to Table 7-2 *****/ ++static const guint8 default_4x4_intra[16] = { ++ 6, 13, 13, 20, 20, 20, 28, 28, 28, 28, 32, 32, ++ 32, 37, 37, 42 ++}; ++ ++static const guint8 default_4x4_inter[16] = { ++ 10, 14, 14, 20, 20, 20, 24, 24, 24, 24, 27, 27, ++ 27, 30, 30, 34 ++}; ++ ++static const guint8 default_8x8_intra[64] = { ++ 6, 10, 10, 13, 11, 13, 16, 16, 16, 16, 18, 18, ++ 18, 18, 18, 23, 23, 23, 23, 23, 23, 25, 25, 25, 25, 25, 25, 25, 27, 27, 27, ++ 27, 27, 27, 27, 27, 29, 29, 29, 29, 29, 29, 29, 31, 31, 31, 31, 31, 31, 33, ++ 33, 33, 33, 33, 36, 36, 36, 36, 38, 38, 38, 40, 40, 42 ++}; ++ ++static const guint8 default_8x8_inter[64] = { ++ 9, 13, 13, 15, 13, 15, 17, 17, 17, 17, 19, 19, ++ 19, 19, 19, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 24, 24, 24, ++ 24, 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 27, 27, 27, 27, 27, 27, 28, ++ 28, 28, 28, 28, 30, 30, 30, 30, 32, 32, 32, 33, 33, 35 ++}; ++ ++static gboolean ++_h264_bit_writer_scaling_list (GstBitWriter * bw, gboolean * space, ++ const guint8 scaling_lists_4x4[6][16], ++ const guint8 scaling_lists_8x8[6][64], const guint8 fallback_4x4_inter[16], ++ const guint8 fallback_4x4_intra[16], const guint8 fallback_8x8_inter[64], ++ const guint8 fallback_8x8_intra[64], guint8 n_lists) ++{ ++ gboolean have_space = TRUE; ++ guint i, j; ++ ++ const guint8 *default_lists[12] = { ++ fallback_4x4_intra, fallback_4x4_intra, fallback_4x4_intra, ++ fallback_4x4_inter, fallback_4x4_inter, fallback_4x4_inter, ++ fallback_8x8_intra, fallback_8x8_inter, ++ fallback_8x8_intra, fallback_8x8_inter, ++ fallback_8x8_intra, fallback_8x8_inter ++ }; ++ ++ GST_DEBUG ("writing scaling lists"); ++ ++ for (i = 0; i < 12; i++) { ++ if (i < n_lists) { ++ guint8 scaling_list_present_flag = FALSE; ++ const guint8 *scaling_list; ++ guint size; ++ ++ if (i < 6) { ++ scaling_list = scaling_lists_4x4[i]; ++ size = 16; ++ } else { ++ scaling_list = scaling_lists_8x8[i - 6]; ++ size = 64; ++ } ++ ++ if (memcmp (scaling_list, default_lists[i], size)) ++ scaling_list_present_flag = TRUE; ++ ++ WRITE_BITS (bw, scaling_list_present_flag, 1); ++ if (scaling_list_present_flag) { ++ guint8 last_scale, next_scale; ++ gint8 delta_scale; ++ ++ for (j = 0; j < size; j++) { ++ last_scale = next_scale = 8; ++ ++ for (j = 0; j < size; j++) { ++ if (next_scale != 0) { ++ delta_scale = (gint8) (scaling_list[j] - last_scale); ++ ++ WRITE_SE (bw, delta_scale); ++ ++ next_scale = scaling_list[j]; ++ } ++ last_scale = (next_scale == 0) ? last_scale : next_scale; ++ } ++ } ++ } ++ } ++ } ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("error to write scaling lists"); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h264_bit_writer_hrd_parameters (const GstH264HRDParams * hrd, ++ GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ guint sched_sel_idx; ++ ++ GST_DEBUG ("writing \"HRD Parameters\""); ++ ++ WRITE_UE_MAX (bw, hrd->cpb_cnt_minus1, 31); ++ WRITE_BITS (bw, hrd->bit_rate_scale, 4); ++ WRITE_BITS (bw, hrd->cpb_size_scale, 4); ++ ++ for (sched_sel_idx = 0; sched_sel_idx <= hrd->cpb_cnt_minus1; sched_sel_idx++) { ++ WRITE_UE (bw, hrd->bit_rate_value_minus1[sched_sel_idx]); ++ WRITE_UE (bw, hrd->cpb_size_value_minus1[sched_sel_idx]); ++ WRITE_BITS (bw, hrd->cbr_flag[sched_sel_idx], 1); ++ } ++ ++ WRITE_BITS (bw, hrd->initial_cpb_removal_delay_length_minus1, 5); ++ WRITE_BITS (bw, hrd->cpb_removal_delay_length_minus1, 5); ++ WRITE_BITS (bw, hrd->dpb_output_delay_length_minus1, 5); ++ WRITE_BITS (bw, hrd->time_offset_length, 5); ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("error to write \"HRD Parameters\""); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++#define EXTENDED_SAR 255 ++ ++static gboolean ++_h264_bit_writer_vui_parameters (const GstH264SPS * sps, GstBitWriter * bw, ++ gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ const GstH264VUIParams *vui = &sps->vui_parameters; ++ ++ GST_DEBUG ("writing \"VUI Parameters\""); ++ ++ WRITE_BITS (bw, vui->aspect_ratio_info_present_flag, 1); ++ if (vui->aspect_ratio_info_present_flag) { ++ WRITE_BITS (bw, vui->aspect_ratio_idc, 8); ++ if (vui->aspect_ratio_idc == EXTENDED_SAR) { ++ WRITE_BITS (bw, vui->sar_width, 16); ++ WRITE_BITS (bw, vui->sar_height, 16); ++ } ++ } ++ ++ WRITE_BITS (bw, vui->overscan_info_present_flag, 1); ++ if (vui->overscan_info_present_flag) ++ WRITE_BITS (bw, vui->overscan_appropriate_flag, 1); ++ ++ WRITE_BITS (bw, vui->video_signal_type_present_flag, 1); ++ if (vui->video_signal_type_present_flag) { ++ WRITE_BITS (bw, vui->video_format, 3); ++ WRITE_BITS (bw, vui->video_full_range_flag, 1); ++ WRITE_BITS (bw, vui->colour_description_present_flag, 1); ++ if (vui->colour_description_present_flag) { ++ WRITE_BITS (bw, vui->colour_primaries, 8); ++ WRITE_BITS (bw, vui->transfer_characteristics, 8); ++ WRITE_BITS (bw, vui->matrix_coefficients, 8); ++ } ++ } ++ ++ WRITE_BITS (bw, vui->chroma_loc_info_present_flag, 1); ++ if (vui->chroma_loc_info_present_flag) { ++ WRITE_UE_MAX (bw, vui->chroma_sample_loc_type_top_field, 5); ++ WRITE_UE_MAX (bw, vui->chroma_sample_loc_type_bottom_field, 5); ++ } ++ ++ WRITE_BITS (bw, vui->timing_info_present_flag, 1); ++ if (vui->timing_info_present_flag) { ++ WRITE_BITS (bw, vui->num_units_in_tick, 32); ++ if (vui->num_units_in_tick == 0) ++ GST_WARNING ("num_units_in_tick = 0 write to stream " ++ "(incompliant to H.264 E.2.1)."); ++ ++ WRITE_BITS (bw, vui->time_scale, 32); ++ if (vui->time_scale == 0) ++ GST_WARNING ("time_scale = 0 write to stream " ++ "(incompliant to H.264 E.2.1)."); ++ ++ WRITE_BITS (bw, vui->fixed_frame_rate_flag, 1); ++ } ++ ++ WRITE_BITS (bw, vui->nal_hrd_parameters_present_flag, 1); ++ if (vui->nal_hrd_parameters_present_flag) { ++ if (!_h264_bit_writer_hrd_parameters (&vui->nal_hrd_parameters, bw, ++ &have_space)) ++ goto error; ++ } ++ ++ WRITE_BITS (bw, vui->vcl_hrd_parameters_present_flag, 1); ++ if (vui->vcl_hrd_parameters_present_flag) { ++ if (!_h264_bit_writer_hrd_parameters (&vui->vcl_hrd_parameters, bw, ++ &have_space)) ++ goto error; ++ } ++ ++ if (vui->nal_hrd_parameters_present_flag || ++ vui->vcl_hrd_parameters_present_flag) ++ WRITE_BITS (bw, vui->low_delay_hrd_flag, 1); ++ ++ WRITE_BITS (bw, vui->pic_struct_present_flag, 1); ++ WRITE_BITS (bw, vui->bitstream_restriction_flag, 1); ++ if (vui->bitstream_restriction_flag) { ++ WRITE_BITS (bw, vui->motion_vectors_over_pic_boundaries_flag, 1); ++ WRITE_UE (bw, vui->max_bytes_per_pic_denom); ++ WRITE_UE_MAX (bw, vui->max_bits_per_mb_denom, 16); ++ WRITE_UE_MAX (bw, vui->log2_max_mv_length_horizontal, 16); ++ WRITE_UE_MAX (bw, vui->log2_max_mv_length_vertical, 16); ++ WRITE_UE (bw, vui->num_reorder_frames); ++ WRITE_UE (bw, vui->max_dec_frame_buffering); ++ } ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("error to write \"VUI Parameters\""); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h264_bit_writer_sps (const GstH264SPS * sps, GstBitWriter * bw, ++ gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ ++ GST_DEBUG ("writing SPS"); ++ ++ WRITE_BITS (bw, sps->profile_idc, 8); ++ WRITE_BITS (bw, sps->constraint_set0_flag, 1); ++ WRITE_BITS (bw, sps->constraint_set1_flag, 1); ++ WRITE_BITS (bw, sps->constraint_set2_flag, 1); ++ WRITE_BITS (bw, sps->constraint_set3_flag, 1); ++ WRITE_BITS (bw, sps->constraint_set4_flag, 1); ++ WRITE_BITS (bw, sps->constraint_set5_flag, 1); ++ /* reserved_zero_2bits */ ++ WRITE_BITS (bw, 0, 2); ++ ++ WRITE_BITS (bw, sps->level_idc, 8); ++ ++ WRITE_UE_MAX (bw, sps->id, GST_H264_MAX_SPS_COUNT - 1); ++ ++ if (sps->profile_idc == 100 || sps->profile_idc == 110 || ++ sps->profile_idc == 122 || sps->profile_idc == 244 || ++ sps->profile_idc == 44 || sps->profile_idc == 83 || ++ sps->profile_idc == 86 || sps->profile_idc == 118 || ++ sps->profile_idc == 128 || sps->profile_idc == 138 || ++ sps->profile_idc == 139 || sps->profile_idc == 134 || ++ sps->profile_idc == 135) { ++ WRITE_UE_MAX (bw, sps->chroma_format_idc, 3); ++ if (sps->chroma_format_idc == 3) ++ WRITE_BITS (bw, sps->separate_colour_plane_flag, 1); ++ ++ WRITE_UE_MAX (bw, sps->bit_depth_luma_minus8, 6); ++ WRITE_UE_MAX (bw, sps->bit_depth_chroma_minus8, 6); ++ WRITE_BITS (bw, sps->qpprime_y_zero_transform_bypass_flag, 1); ++ ++ WRITE_BITS (bw, sps->scaling_matrix_present_flag, 1); ++ if (sps->scaling_matrix_present_flag) { ++ guint8 n_lists; ++ ++ n_lists = (sps->chroma_format_idc != 3) ? 8 : 12; ++ if (!_h264_bit_writer_scaling_list (bw, &have_space, ++ sps->scaling_lists_4x4, sps->scaling_lists_8x8, ++ default_4x4_inter, default_4x4_intra, ++ default_8x8_inter, default_8x8_intra, n_lists)) ++ goto error; ++ } ++ } ++ ++ WRITE_UE_MAX (bw, sps->log2_max_frame_num_minus4, 12); ++ ++ WRITE_UE_MAX (bw, sps->pic_order_cnt_type, 2); ++ if (sps->pic_order_cnt_type == 0) { ++ WRITE_UE_MAX (bw, sps->log2_max_pic_order_cnt_lsb_minus4, 12); ++ } else if (sps->pic_order_cnt_type == 1) { ++ guint i; ++ ++ WRITE_BITS (bw, sps->delta_pic_order_always_zero_flag, 1); ++ WRITE_SE (bw, sps->offset_for_non_ref_pic); ++ WRITE_SE (bw, sps->offset_for_top_to_bottom_field); ++ WRITE_UE_MAX (bw, sps->num_ref_frames_in_pic_order_cnt_cycle, 255); ++ ++ for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++) ++ WRITE_SE (bw, sps->offset_for_ref_frame[i]); ++ } ++ ++ WRITE_UE (bw, sps->num_ref_frames); ++ WRITE_BITS (bw, sps->gaps_in_frame_num_value_allowed_flag, 1); ++ WRITE_UE (bw, sps->pic_width_in_mbs_minus1); ++ WRITE_UE (bw, sps->pic_height_in_map_units_minus1); ++ WRITE_BITS (bw, sps->frame_mbs_only_flag, 1); ++ ++ if (!sps->frame_mbs_only_flag) ++ WRITE_BITS (bw, sps->mb_adaptive_frame_field_flag, 1); ++ ++ WRITE_BITS (bw, sps->direct_8x8_inference_flag, 1); ++ WRITE_BITS (bw, sps->frame_cropping_flag, 1); ++ if (sps->frame_cropping_flag) { ++ WRITE_UE (bw, sps->frame_crop_left_offset); ++ WRITE_UE (bw, sps->frame_crop_right_offset); ++ WRITE_UE (bw, sps->frame_crop_top_offset); ++ WRITE_UE (bw, sps->frame_crop_bottom_offset); ++ } ++ ++ WRITE_BITS (bw, sps->vui_parameters_present_flag, 1); ++ if (sps->vui_parameters_present_flag) ++ if (!_h264_bit_writer_vui_parameters (sps, bw, &have_space)) ++ goto error; ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("error to write sps"); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++/** ++ * gst_h264_bit_writer_sps: ++ * @sps: the sps of #GstH264SPS to write ++ * @start_code: whether adding the nal start code ++ * @data: (out): the bit stream generated by the sps ++ * @size: (inout): the size in bytes of the input and output ++ * ++ * Generating the according h264 bit stream by providing the sps. ++ * ++ * Returns: a #GstH264BitWriterResult ++ * ++ * Since: 1.22 ++ **/ ++GstH264BitWriterResult ++gst_h264_bit_writer_sps (const GstH264SPS * sps, gboolean start_code, ++ guint8 * data, guint * size) ++{ ++ gboolean have_space = TRUE; ++ GstBitWriter bw; ++ ++ g_return_val_if_fail (sps != NULL, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (data != NULL, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (size != NULL, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (*size > 0, GST_H264_BIT_WRITER_ERROR); ++ ++ gst_bit_writer_init_with_data (&bw, data, *size, FALSE); ++ ++ if (start_code) ++ WRITE_BITS (&bw, 0x00000001, 32); ++ ++ /* nal header */ ++ /* forbidden_zero_bit */ ++ WRITE_BITS (&bw, 0, 1); ++ /* nal_ref_idc */ ++ WRITE_BITS (&bw, 1, 2); ++ /* nal_unit_type */ ++ WRITE_BITS (&bw, GST_H264_NAL_SPS, 5); ++ ++ if (!_h264_bit_writer_sps (sps, &bw, &have_space)) ++ goto error; ++ ++ /* Add trailings. */ ++ WRITE_BITS (&bw, 1, 1); ++ if (!gst_bit_writer_align_bytes (&bw, 0)) { ++ have_space = FALSE; ++ goto error; ++ } ++ ++ *size = (gst_bit_writer_get_size (&bw)) / 8; ++ gst_bit_writer_reset (&bw); ++ ++ return GST_H264_BIT_WRITER_OK; ++ ++error: ++ gst_bit_writer_reset (&bw); ++ *size = 0; ++ ++ return have_space ? GST_H264_BIT_WRITER_INVALID_DATA : ++ GST_H264_BIT_WRITER_NO_MORE_SPACE; ++} ++ ++static gboolean ++_h264_bit_writer_pps (const GstH264PPS * pps, GstBitWriter * bw, ++ gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ gint qp_bd_offset; ++ ++ GST_DEBUG ("writing SPS"); ++ ++ qp_bd_offset = 6 * (pps->sequence->bit_depth_luma_minus8 + ++ pps->sequence->separate_colour_plane_flag); ++ ++ WRITE_UE_MAX (bw, pps->id, GST_H264_MAX_PPS_COUNT - 1); ++ WRITE_UE_MAX (bw, pps->sequence->id, GST_H264_MAX_SPS_COUNT - 1); ++ ++ WRITE_BITS (bw, pps->entropy_coding_mode_flag, 1); ++ WRITE_BITS (bw, pps->pic_order_present_flag, 1); ++ ++ WRITE_UE_MAX (bw, pps->num_slice_groups_minus1, 7); ++ if (pps->num_slice_groups_minus1 > 0) { ++ WRITE_UE_MAX (bw, pps->slice_group_map_type, 6); ++ ++ if (pps->slice_group_map_type == 0) { ++ gint i; ++ ++ for (i = 0; i <= pps->num_slice_groups_minus1; i++) ++ WRITE_UE (bw, pps->run_length_minus1[i]); ++ } else if (pps->slice_group_map_type == 2) { ++ gint i; ++ ++ for (i = 0; i < pps->num_slice_groups_minus1; i++) { ++ WRITE_UE (bw, pps->top_left[i]); ++ WRITE_UE (bw, pps->bottom_right[i]); ++ } ++ } else if (pps->slice_group_map_type >= 3 && pps->slice_group_map_type <= 5) { ++ WRITE_BITS (bw, pps->slice_group_change_direction_flag, 1); ++ WRITE_UE (bw, pps->slice_group_change_rate_minus1); ++ } else if (pps->slice_group_map_type == 6) { ++ gint bits; ++ gint i; ++ ++ WRITE_UE (bw, pps->pic_size_in_map_units_minus1); ++ bits = g_bit_storage (pps->num_slice_groups_minus1); ++ ++ g_assert (pps->slice_group_id); ++ for (i = 0; i <= pps->pic_size_in_map_units_minus1; i++) ++ WRITE_BITS (bw, pps->slice_group_id[i], bits); ++ } ++ } ++ ++ WRITE_UE_MAX (bw, pps->num_ref_idx_l0_active_minus1, 31); ++ WRITE_UE_MAX (bw, pps->num_ref_idx_l1_active_minus1, 31); ++ WRITE_BITS (bw, pps->weighted_pred_flag, 1); ++ WRITE_BITS (bw, pps->weighted_bipred_idc, 2); ++ WRITE_SE_RANGE (bw, pps->pic_init_qp_minus26, -(26 + qp_bd_offset), 25); ++ WRITE_SE_RANGE (bw, pps->pic_init_qs_minus26, -26, 25); ++ WRITE_SE_RANGE (bw, pps->chroma_qp_index_offset, -12, 12); ++ ++ WRITE_BITS (bw, pps->deblocking_filter_control_present_flag, 1); ++ WRITE_BITS (bw, pps->constrained_intra_pred_flag, 1); ++ WRITE_BITS (bw, pps->redundant_pic_cnt_present_flag, 1); ++ ++ /* A.2.1 Baseline profile, A.2.2 Main profile and ++ A.2.3 Extended profile: ++ The syntax elements transform_8x8_mode_flag, ++ pic_scaling_matrix_present_flag, second_chroma_qp_index_offset ++ shall not be present in picture parameter sets. */ ++ if (pps->sequence->profile_idc == 66 || ++ pps->sequence->profile_idc == 77 || pps->sequence->profile_idc == 88) ++ return TRUE; ++ ++ WRITE_BITS (bw, pps->transform_8x8_mode_flag, 1); ++ ++ WRITE_BITS (bw, pps->pic_scaling_matrix_present_flag, 1); ++ ++ if (pps->pic_scaling_matrix_present_flag) { ++ guint8 n_lists; ++ ++ n_lists = 6 + ((pps->sequence->chroma_format_idc != 3) ? 2 : 6) * ++ pps->transform_8x8_mode_flag; ++ ++ if (pps->sequence->scaling_matrix_present_flag) { ++ if (!_h264_bit_writer_scaling_list (bw, &have_space, ++ pps->scaling_lists_4x4, pps->scaling_lists_8x8, ++ pps->sequence->scaling_lists_4x4[3], ++ pps->sequence->scaling_lists_4x4[0], ++ pps->sequence->scaling_lists_8x8[3], ++ pps->sequence->scaling_lists_8x8[0], n_lists)) ++ goto error; ++ } else { ++ if (!_h264_bit_writer_scaling_list (bw, &have_space, ++ pps->scaling_lists_4x4, pps->scaling_lists_8x8, ++ default_4x4_inter, default_4x4_intra, ++ default_8x8_inter, default_8x8_intra, n_lists)) ++ goto error; ++ } ++ } ++ ++ WRITE_SE_RANGE (bw, ((gint) pps->second_chroma_qp_index_offset), -12, 12); ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("error to write pps"); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++/** ++ * gst_h264_bit_writer_pps: ++ * @pps: the pps of #GstH264PPS to write ++ * @start_code: whether adding the nal start code ++ * @data: (out): the bit stream generated by the pps ++ * @size: (inout): the size in bytes of the input and output ++ * ++ * Generating the according h264 bit stream by providing the pps. ++ * ++ * Returns: a #GstH264BitWriterResult ++ * ++ * Since: 1.22 ++ **/ ++GstH264BitWriterResult ++gst_h264_bit_writer_pps (const GstH264PPS * pps, gboolean start_code, ++ guint8 * data, guint * size) ++{ ++ gboolean have_space = TRUE; ++ GstBitWriter bw; ++ ++ g_return_val_if_fail (pps != NULL, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (pps->sequence != NULL, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (data != NULL, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (size != NULL, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (*size > 0, GST_H264_BIT_WRITER_ERROR); ++ ++ gst_bit_writer_init_with_data (&bw, data, *size, FALSE); ++ ++ if (start_code) ++ WRITE_BITS (&bw, 0x00000001, 32); ++ ++ /* nal header */ ++ /* forbidden_zero_bit */ ++ WRITE_BITS (&bw, 0, 1); ++ /* nal_ref_idc */ ++ WRITE_BITS (&bw, 1, 2); ++ /* nal_unit_type */ ++ WRITE_BITS (&bw, GST_H264_NAL_PPS, 5); ++ ++ if (!_h264_bit_writer_pps (pps, &bw, &have_space)) ++ goto error; ++ ++ /* Add trailings. */ ++ WRITE_BITS (&bw, 1, 1); ++ if (!gst_bit_writer_align_bytes (&bw, 0)) { ++ have_space = FALSE; ++ goto error; ++ } ++ ++ *size = (gst_bit_writer_get_size (&bw)) / 8; ++ gst_bit_writer_reset (&bw); ++ return GST_H264_BIT_WRITER_OK; ++ ++error: ++ gst_bit_writer_reset (&bw); ++ *size = 0; ++ ++ return have_space ? GST_H264_BIT_WRITER_INVALID_DATA : ++ GST_H264_BIT_WRITER_NO_MORE_SPACE; ++} ++ ++static gboolean ++_h264_slice_bit_writer_ref_pic_list_modification_1 (const GstH264SliceHdr * ++ slice, guint list, gboolean is_mvc, GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ const GstH264RefPicListModification *entries; ++ guint8 ref_pic_list_modification_flag = 0; ++ guint i; ++ ++ if (list == 0) { ++ entries = slice->ref_pic_list_modification_l0; ++ ref_pic_list_modification_flag = slice->ref_pic_list_modification_flag_l0; ++ } else { ++ entries = slice->ref_pic_list_modification_l1; ++ ref_pic_list_modification_flag = slice->ref_pic_list_modification_flag_l1; ++ } ++ ++ WRITE_BITS (bw, ref_pic_list_modification_flag, 1); ++ if (ref_pic_list_modification_flag) { ++ i = 0; ++ do { ++ g_assert (i < 32); ++ ++ WRITE_UE (bw, entries[i].modification_of_pic_nums_idc); ++ if (entries[i].modification_of_pic_nums_idc == 0 || ++ entries[i].modification_of_pic_nums_idc == 1) { ++ WRITE_UE_MAX (bw, entries[i].value.abs_diff_pic_num_minus1, ++ slice->max_pic_num - 1); ++ } else if (entries[i].modification_of_pic_nums_idc == 2) { ++ WRITE_UE (bw, entries[i].value.long_term_pic_num); ++ } else if (is_mvc && (entries[i].modification_of_pic_nums_idc == 4 || ++ entries[i].modification_of_pic_nums_idc == 5)) { ++ WRITE_UE (bw, entries[i].value.abs_diff_view_idx_minus1); ++ } ++ } while (entries[i++].modification_of_pic_nums_idc != 3); ++ } ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("error to write \"Reference picture list %u modification\"", ++ list); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h264_slice_bit_writer_ref_pic_list_modification (const GstH264SliceHdr * ++ slice, gboolean is_mvc, GstBitWriter * bw, gboolean * space) ++{ ++ if (!GST_H264_IS_I_SLICE (slice) && !GST_H264_IS_SI_SLICE (slice)) { ++ if (!_h264_slice_bit_writer_ref_pic_list_modification_1 (slice, 0, ++ is_mvc, bw, space)) ++ return FALSE; ++ } ++ ++ if (GST_H264_IS_B_SLICE (slice)) { ++ if (!_h264_slice_bit_writer_ref_pic_list_modification_1 (slice, 1, ++ is_mvc, bw, space)) ++ return FALSE; ++ } ++ ++ *space = TRUE; ++ return TRUE; ++} ++ ++static gboolean ++_h264_slice_bit_writer_pred_weight_table (const GstH264SliceHdr * slice, ++ guint8 chroma_array_type, GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ const GstH264PredWeightTable *p; ++ gint i; ++ gint16 default_luma_weight, default_chroma_weight; ++ ++ GST_DEBUG ("writing \"Prediction weight table\""); ++ ++ p = &slice->pred_weight_table; ++ default_luma_weight = 1 << p->luma_log2_weight_denom; ++ default_chroma_weight = 1 << p->chroma_log2_weight_denom; ++ ++ WRITE_UE_MAX (bw, p->luma_log2_weight_denom, 7); ++ ++ if (chroma_array_type != 0) ++ WRITE_UE_MAX (bw, p->chroma_log2_weight_denom, 7); ++ ++ for (i = 0; i <= slice->num_ref_idx_l0_active_minus1; i++) { ++ guint8 luma_weight_l0_flag = 0; ++ ++ if (p->luma_weight_l0[i] != default_luma_weight || ++ p->luma_offset_l0[i] != 0) ++ luma_weight_l0_flag = 1; ++ ++ WRITE_BITS (bw, luma_weight_l0_flag, 1); ++ if (luma_weight_l0_flag) { ++ WRITE_SE_RANGE (bw, p->luma_weight_l0[i], -128, 127); ++ WRITE_SE_RANGE (bw, p->luma_offset_l0[i], -128, 127); ++ } ++ if (chroma_array_type != 0) { ++ guint8 chroma_weight_l0_flag = 0; ++ gint j; ++ ++ for (j = 0; j < 2; j++) { ++ if (p->chroma_weight_l0[i][j] != default_chroma_weight || ++ p->chroma_offset_l0[i][j] != 0) ++ chroma_weight_l0_flag = 1; ++ } ++ ++ WRITE_BITS (bw, chroma_weight_l0_flag, 1); ++ if (chroma_weight_l0_flag) { ++ for (j = 0; j < 2; j++) { ++ WRITE_SE_RANGE (bw, p->chroma_weight_l0[i][j], -128, 127); ++ WRITE_SE_RANGE (bw, p->chroma_offset_l0[i][j], -128, 127); ++ } ++ } ++ } ++ } ++ ++ if (GST_H264_IS_B_SLICE (slice)) { ++ for (i = 0; i <= slice->num_ref_idx_l1_active_minus1; i++) { ++ guint8 luma_weight_l1_flag = 0; ++ ++ if (p->luma_weight_l1[i] != default_luma_weight || ++ p->luma_offset_l1[i] != 0) ++ luma_weight_l1_flag = 1; ++ ++ WRITE_BITS (bw, luma_weight_l1_flag, 1); ++ if (luma_weight_l1_flag) { ++ WRITE_SE_RANGE (bw, p->luma_weight_l1[i], -128, 127); ++ WRITE_SE_RANGE (bw, p->luma_offset_l1[i], -128, 127); ++ } ++ if (chroma_array_type != 0) { ++ guint8 chroma_weight_l1_flag = 0; ++ gint j; ++ ++ for (j = 0; j < 2; j++) { ++ if (p->chroma_weight_l1[i][j] != default_chroma_weight || ++ p->chroma_offset_l1[i][j] != 0) ++ chroma_weight_l1_flag = 1; ++ } ++ ++ WRITE_BITS (bw, chroma_weight_l1_flag, 1); ++ if (chroma_weight_l1_flag) { ++ for (j = 0; j < 2; j++) { ++ WRITE_SE_RANGE (bw, p->chroma_weight_l1[i][j], -128, 127); ++ WRITE_SE_RANGE (bw, p->chroma_offset_l1[i][j], -128, 127); ++ } ++ } ++ } ++ } ++ } ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("error to write \"Prediction weight table\""); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h264_bit_writer_slice_dec_ref_pic_marking (const GstH264SliceHdr * slice, ++ guint32 nal_type, GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ ++ GST_DEBUG ("writing \"Dec Ref Pic Marking\""); ++ ++ if (nal_type == GST_H264_NAL_SLICE_IDR) { ++ WRITE_BITS (bw, slice->dec_ref_pic_marking.no_output_of_prior_pics_flag, 1); ++ WRITE_BITS (bw, slice->dec_ref_pic_marking.long_term_reference_flag, 1); ++ } else { ++ WRITE_BITS (bw, ++ slice->dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag, 1); ++ ++ if (slice->dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag) { ++ const GstH264RefPicMarking *refpicmarking; ++ guint i; ++ ++ for (i = 0; i < slice->dec_ref_pic_marking.n_ref_pic_marking; i++) { ++ refpicmarking = &slice->dec_ref_pic_marking.ref_pic_marking[i]; ++ ++ WRITE_UE_MAX (bw, ++ refpicmarking->memory_management_control_operation, 6); ++ ++ if (refpicmarking->memory_management_control_operation == 0) ++ break; ++ ++ if (refpicmarking->memory_management_control_operation == 1 ++ || refpicmarking->memory_management_control_operation == 3) ++ WRITE_UE (bw, refpicmarking->difference_of_pic_nums_minus1); ++ ++ if (refpicmarking->memory_management_control_operation == 2) ++ WRITE_UE (bw, refpicmarking->long_term_pic_num); ++ ++ if (refpicmarking->memory_management_control_operation == 3 ++ || refpicmarking->memory_management_control_operation == 6) ++ WRITE_UE (bw, refpicmarking->long_term_frame_idx); ++ ++ if (refpicmarking->memory_management_control_operation == 4) ++ WRITE_UE (bw, refpicmarking->max_long_term_frame_idx_plus1); ++ } ++ } ++ } ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("error to write \"Dec Ref Pic Marking\""); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h264_bit_writer_slice_hdr (const GstH264SliceHdr * slice, guint32 nal_type, ++ guint32 ext_type, gboolean is_ref, GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ ++ GST_DEBUG ("writing slice header"); ++ ++ WRITE_UE (bw, slice->first_mb_in_slice); ++ WRITE_UE (bw, slice->type); ++ ++ WRITE_UE_MAX (bw, slice->pps->id, GST_H264_MAX_PPS_COUNT - 1); ++ ++ if (slice->pps->sequence->separate_colour_plane_flag) ++ WRITE_BITS (bw, slice->colour_plane_id, 2); ++ ++ WRITE_BITS (bw, slice->frame_num, ++ slice->pps->sequence->log2_max_frame_num_minus4 + 4); ++ ++ if (!slice->pps->sequence->frame_mbs_only_flag) { ++ WRITE_BITS (bw, slice->field_pic_flag, 1); ++ if (slice->field_pic_flag) ++ WRITE_BITS (bw, slice->bottom_field_flag, 1); ++ } ++ ++ if (nal_type == GST_H264_NAL_SLICE_IDR) ++ WRITE_UE_MAX (bw, slice->idr_pic_id, G_MAXUINT16); ++ ++ if (slice->pps->sequence->pic_order_cnt_type == 0) { ++ WRITE_BITS (bw, slice->pic_order_cnt_lsb, ++ slice->pps->sequence->log2_max_pic_order_cnt_lsb_minus4 + 4); ++ ++ if (slice->pps->pic_order_present_flag && !slice->field_pic_flag) ++ WRITE_SE (bw, slice->delta_pic_order_cnt_bottom); ++ } ++ ++ if (slice->pps->sequence->pic_order_cnt_type == 1 ++ && !slice->pps->sequence->delta_pic_order_always_zero_flag) { ++ WRITE_SE (bw, slice->delta_pic_order_cnt[0]); ++ if (slice->pps->pic_order_present_flag && !slice->field_pic_flag) ++ WRITE_SE (bw, slice->delta_pic_order_cnt[1]); ++ } ++ ++ if (slice->pps->redundant_pic_cnt_present_flag) ++ WRITE_UE_MAX (bw, slice->redundant_pic_cnt, G_MAXINT8); ++ ++ if (GST_H264_IS_B_SLICE (slice)) ++ WRITE_BITS (bw, slice->direct_spatial_mv_pred_flag, 1); ++ ++ if (GST_H264_IS_P_SLICE (slice) || GST_H264_IS_SP_SLICE (slice) || ++ GST_H264_IS_B_SLICE (slice)) { ++ WRITE_BITS (bw, slice->num_ref_idx_active_override_flag, 1); ++ if (slice->num_ref_idx_active_override_flag) { ++ WRITE_UE_MAX (bw, slice->num_ref_idx_l0_active_minus1, 31); ++ ++ if (GST_H264_IS_B_SLICE (slice)) ++ WRITE_UE_MAX (bw, slice->num_ref_idx_l1_active_minus1, 31); ++ } ++ } ++ ++ if (!_h264_slice_bit_writer_ref_pic_list_modification (slice, ++ ext_type == GST_H264_NAL_EXTENSION_MVC, bw, &have_space)) ++ goto error; ++ ++ if ((slice->pps->weighted_pred_flag && (GST_H264_IS_P_SLICE (slice) ++ || GST_H264_IS_SP_SLICE (slice))) ++ || (slice->pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE (slice))) { ++ if (!_h264_slice_bit_writer_pred_weight_table (slice, ++ slice->pps->sequence->chroma_array_type, bw, &have_space)) ++ goto error; ++ } ++ ++ if (is_ref) { ++ if (!_h264_bit_writer_slice_dec_ref_pic_marking (slice, nal_type, bw, ++ &have_space)) ++ goto error; ++ } ++ ++ if (slice->pps->entropy_coding_mode_flag && !GST_H264_IS_I_SLICE (slice) && ++ !GST_H264_IS_SI_SLICE (slice)) ++ WRITE_UE_MAX (bw, slice->cabac_init_idc, 2); ++ ++ WRITE_SE_RANGE (bw, slice->slice_qp_delta, -87, 77); ++ ++ if (GST_H264_IS_SP_SLICE (slice) || GST_H264_IS_SI_SLICE (slice)) { ++ if (GST_H264_IS_SP_SLICE (slice)) ++ WRITE_BITS (bw, slice->sp_for_switch_flag, 1); ++ ++ WRITE_SE_RANGE (bw, slice->slice_qs_delta, -51, 51); ++ } ++ ++ if (slice->pps->deblocking_filter_control_present_flag) { ++ WRITE_UE_MAX (bw, slice->disable_deblocking_filter_idc, 2); ++ if (slice->disable_deblocking_filter_idc != 1) { ++ WRITE_SE_RANGE (bw, slice->slice_alpha_c0_offset_div2, -6, 6); ++ WRITE_SE_RANGE (bw, slice->slice_beta_offset_div2, -6, 6); ++ } ++ } ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("error to write slice header"); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++/** ++ * gst_h264_bit_writer_slice_hdr: ++ * @slice: the slice header of #GstH264SliceHdr to write ++ * @start_code: whether adding the nal start code ++ * @nal_type: the slice's nal type of #GstH264NalUnitType ++ * @is_ref: whether the slice is a reference ++ * @data: (out): the bit stream generated by the slice header ++ * @size: (inout): the size in bytes of the input and output ++ * @trail_bits_num: (out): the trail bits number which is not byte aligned. ++ * ++ * Generating the according h264 bit stream by providing the slice header. ++ * ++ * Returns: a #GstH264BitWriterResult ++ * ++ * Since: 1.22 ++ **/ ++GstH264BitWriterResult ++gst_h264_bit_writer_slice_hdr (const GstH264SliceHdr * slice, ++ gboolean start_code, GstH264NalUnitType nal_type, gboolean is_ref, ++ guint8 * data, guint * size, guint * trail_bits_num) ++{ ++ gboolean have_space = TRUE; ++ GstBitWriter bw; ++ ++ g_return_val_if_fail (slice != NULL, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (slice->pps != NULL, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (slice->pps->sequence != NULL, ++ GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (nal_type >= GST_H264_NAL_SLICE ++ && nal_type <= GST_H264_NAL_SLICE_IDR, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (data != NULL, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (size != NULL, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (*size > 0, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (trail_bits_num != NULL, GST_H264_BIT_WRITER_ERROR); ++ ++ if (nal_type == GST_H264_NAL_SLICE_IDR) ++ g_return_val_if_fail (is_ref, GST_H264_BIT_WRITER_ERROR); ++ ++ gst_bit_writer_init_with_data (&bw, data, *size, FALSE); ++ ++ if (start_code) ++ WRITE_BITS (&bw, 0x00000001, 32); ++ ++ /* nal header */ ++ /* forbidden_zero_bit */ ++ WRITE_BITS (&bw, 0, 1); ++ /* nal_ref_idc, zero for non-reference picture */ ++ WRITE_BITS (&bw, is_ref, 2); ++ /* nal_unit_type */ ++ WRITE_BITS (&bw, nal_type, 5); ++ ++ if (!_h264_bit_writer_slice_hdr (slice, nal_type, ++ GST_H264_NAL_EXTENSION_NONE, is_ref, &bw, &have_space)) ++ goto error; ++ ++ /* We do not add trailing bits here, the slice data should follow it. */ ++ ++ *size = gst_bit_writer_get_size (&bw) / 8; ++ *trail_bits_num = gst_bit_writer_get_size (&bw) % 8; ++ gst_bit_writer_reset (&bw); ++ return GST_H264_BIT_WRITER_OK; ++ ++error: ++ gst_bit_writer_reset (&bw); ++ *size = 0; ++ ++ return have_space ? GST_H264_BIT_WRITER_INVALID_DATA : ++ GST_H264_BIT_WRITER_NO_MORE_SPACE; ++} ++ ++static gboolean ++_h264_bit_writer_sei_registered_user_data (const GstH264RegisteredUserData * ++ rud, GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ ++ GST_DEBUG ("Writing \"Registered user data\""); ++ ++ WRITE_BITS (bw, rud->country_code, 8); ++ if (rud->country_code == 0xff) ++ WRITE_BITS (bw, rud->country_code_extension, 8); ++ ++ WRITE_BYTES (bw, rud->data, rud->size); ++ ++ *space = TRUE; ++ return TRUE; ++ ++error:GST_WARNING ("Failed to write \"Registered user data\""); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h264_bit_writer_sei_frame_packing (const GstH264FramePacking * ++ frame_packing, GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ ++ GST_DEBUG ("Writing \"Frame packing\""); ++ ++ WRITE_UE (bw, frame_packing->frame_packing_id); ++ WRITE_BITS (bw, frame_packing->frame_packing_cancel_flag, 1); ++ ++ if (!frame_packing->frame_packing_cancel_flag) { ++ WRITE_BITS (bw, frame_packing->frame_packing_type, 7); ++ WRITE_BITS (bw, frame_packing->quincunx_sampling_flag, 1); ++ WRITE_BITS (bw, frame_packing->content_interpretation_type, 6); ++ WRITE_BITS (bw, frame_packing->spatial_flipping_flag, 1); ++ WRITE_BITS (bw, frame_packing->frame0_flipped_flag, 1); ++ WRITE_BITS (bw, frame_packing->field_views_flag, 1); ++ WRITE_BITS (bw, frame_packing->current_frame_is_frame0_flag, 1); ++ WRITE_BITS (bw, frame_packing->frame0_self_contained_flag, 1); ++ WRITE_BITS (bw, frame_packing->frame1_self_contained_flag, 1); ++ ++ if (!frame_packing->quincunx_sampling_flag && ++ frame_packing->frame_packing_type != ++ GST_H264_FRAME_PACKING_TEMPORAL_INTERLEAVING) { ++ WRITE_BITS (bw, frame_packing->frame0_grid_position_x, 4); ++ WRITE_BITS (bw, frame_packing->frame0_grid_position_y, 4); ++ WRITE_BITS (bw, frame_packing->frame1_grid_position_x, 4); ++ WRITE_BITS (bw, frame_packing->frame1_grid_position_y, 4); ++ } ++ ++ /* frame_packing_arrangement_reserved_byte */ ++ WRITE_BITS (bw, 0, 8); ++ WRITE_UE (bw, frame_packing->frame_packing_repetition_period); ++ } ++ ++ /* frame_packing_arrangement_extension_flag */ ++ WRITE_BITS (bw, 0, 1); ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("Failed to write \"Frame packing\""); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h264_bit_writer_sei_mastering_display_colour_volume (const ++ GstH264MasteringDisplayColourVolume * mdcv, GstBitWriter * bw, ++ gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ gint i; ++ ++ GST_DEBUG ("Wrtiting \"Mastering display colour volume\""); ++ ++ for (i = 0; i < 3; i++) { ++ WRITE_BITS (bw, mdcv->display_primaries_x[i], 16); ++ WRITE_BITS (bw, mdcv->display_primaries_y[i], 16); ++ } ++ ++ WRITE_BITS (bw, mdcv->white_point_x, 16); ++ WRITE_BITS (bw, mdcv->white_point_y, 16); ++ WRITE_BITS (bw, mdcv->max_display_mastering_luminance, 32); ++ WRITE_BITS (bw, mdcv->min_display_mastering_luminance, 32); ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("Failed to write \"Mastering display colour volume\""); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h264_bit_writer_sei_content_light_level_info (const ++ GstH264ContentLightLevel * cll, GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ ++ GST_DEBUG ("Writing \"Content light level\""); ++ ++ WRITE_BITS (bw, cll->max_content_light_level, 16); ++ WRITE_BITS (bw, cll->max_pic_average_light_level, 16); ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("Failed to write \"Content light level\""); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h264_bit_writer_sei_pic_timing (const GstH264PicTiming * tim, ++ GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ ++ GST_DEBUG ("Writing \"Picture timing\""); ++ ++ if (tim->CpbDpbDelaysPresentFlag) { ++ WRITE_BITS (bw, tim->cpb_removal_delay, ++ tim->cpb_removal_delay_length_minus1 + 1); ++ WRITE_BITS (bw, tim->dpb_output_delay, ++ tim->dpb_output_delay_length_minus1 + 1); ++ } ++ ++ if (tim->pic_struct_present_flag) { ++ const guint8 num_clock_ts_table[9] = { ++ 1, 1, 1, 2, 2, 3, 3, 2, 3 ++ }; ++ guint8 num_clock_num_ts; ++ guint i; ++ ++ WRITE_BITS (bw, tim->pic_struct, 4); ++ ++ num_clock_num_ts = num_clock_ts_table[tim->pic_struct]; ++ for (i = 0; i < num_clock_num_ts; i++) { ++ WRITE_BITS (bw, tim->clock_timestamp_flag[i], 1); ++ if (tim->clock_timestamp_flag[i]) { ++ const GstH264ClockTimestamp *timestamp = &tim->clock_timestamp[i]; ++ ++ WRITE_BITS (bw, timestamp->ct_type, 2); ++ WRITE_BITS (bw, timestamp->nuit_field_based_flag, 1); ++ WRITE_BITS (bw, timestamp->counting_type, 5); ++ WRITE_BITS (bw, timestamp->full_timestamp_flag, 1); ++ WRITE_BITS (bw, timestamp->discontinuity_flag, 1); ++ WRITE_BITS (bw, timestamp->cnt_dropped_flag, 1); ++ WRITE_BITS (bw, timestamp->n_frames, 8); ++ ++ if (timestamp->full_timestamp_flag) { ++ if (!timestamp->seconds_flag || !timestamp->minutes_flag ++ || !timestamp->hours_flag) ++ goto error; ++ ++ WRITE_BITS (bw, timestamp->seconds_value, 6); ++ WRITE_BITS (bw, timestamp->minutes_value, 6); ++ WRITE_BITS (bw, timestamp->hours_value, 5); ++ } else { ++ WRITE_BITS (bw, timestamp->seconds_flag, 1); ++ if (timestamp->seconds_flag) { ++ WRITE_BITS (bw, timestamp->seconds_value, 6); ++ WRITE_BITS (bw, timestamp->minutes_flag, 1); ++ if (timestamp->minutes_flag) { ++ WRITE_BITS (bw, timestamp->minutes_value, 6); ++ WRITE_BITS (bw, timestamp->hours_flag, 1); ++ if (timestamp->hours_flag) ++ WRITE_BITS (bw, timestamp->hours_value, 5); ++ } ++ } ++ } ++ ++ if (tim->time_offset_length > 0) { ++ WRITE_BITS (bw, timestamp->time_offset, tim->time_offset_length); ++ } ++ } ++ } ++ } ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("Failed to write \"Picture timing\""); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h264_bit_writer_sei_buffering_period (const GstH264BufferingPeriod * per, ++ GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ ++ GST_DEBUG ("Writing \"Buffering period\""); ++ ++ if (!per->sps) ++ goto error; ++ ++ WRITE_UE_MAX (bw, per->sps->id, GST_H264_MAX_SPS_COUNT - 1); ++ ++ if (per->sps->vui_parameters_present_flag) { ++ GstH264VUIParams *vui = &per->sps->vui_parameters; ++ ++ if (vui->nal_hrd_parameters_present_flag) { ++ GstH264HRDParams *hrd = &vui->nal_hrd_parameters; ++ const guint8 nbits = hrd->initial_cpb_removal_delay_length_minus1 + 1; ++ guint8 sched_sel_idx; ++ ++ for (sched_sel_idx = 0; sched_sel_idx <= hrd->cpb_cnt_minus1; ++ sched_sel_idx++) { ++ WRITE_BITS (bw, per->nal_initial_cpb_removal_delay[sched_sel_idx], ++ nbits); ++ WRITE_BITS (bw, ++ per->nal_initial_cpb_removal_delay_offset[sched_sel_idx], nbits); ++ } ++ } ++ ++ if (vui->vcl_hrd_parameters_present_flag) { ++ GstH264HRDParams *hrd = &vui->vcl_hrd_parameters; ++ const guint8 nbits = hrd->initial_cpb_removal_delay_length_minus1 + 1; ++ guint8 sched_sel_idx; ++ ++ for (sched_sel_idx = 0; sched_sel_idx <= hrd->cpb_cnt_minus1; ++ sched_sel_idx++) { ++ WRITE_BITS (bw, per->vcl_initial_cpb_removal_delay[sched_sel_idx], ++ nbits); ++ WRITE_BITS (bw, ++ per->vcl_initial_cpb_removal_delay_offset[sched_sel_idx], nbits); ++ } ++ } ++ } ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("Failed to write \"Buffering period\""); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h264_bit_writer_sei_message (const GstH264SEIMessage * msg, ++ GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ ++ GST_DEBUG ("writing SEI message"); ++ ++ switch (msg->payloadType) { ++ case GST_H264_SEI_REGISTERED_USER_DATA: ++ if (!_h264_bit_writer_sei_registered_user_data ++ (&msg->payload.registered_user_data, bw, &have_space)) ++ goto error; ++ break; ++ case GST_H264_SEI_FRAME_PACKING: ++ if (!_h264_bit_writer_sei_frame_packing ++ (&msg->payload.frame_packing, bw, &have_space)) ++ goto error; ++ break; ++ case GST_H264_SEI_MASTERING_DISPLAY_COLOUR_VOLUME: ++ if (!_h264_bit_writer_sei_mastering_display_colour_volume ++ (&msg->payload.mastering_display_colour_volume, bw, &have_space)) ++ goto error; ++ break; ++ case GST_H264_SEI_CONTENT_LIGHT_LEVEL: ++ if (!_h264_bit_writer_sei_content_light_level_info ++ (&msg->payload.content_light_level, bw, &have_space)) ++ goto error; ++ break; ++ case GST_H264_SEI_PIC_TIMING: ++ if (!_h264_bit_writer_sei_pic_timing (&msg->payload.pic_timing, bw, ++ &have_space)) ++ goto error; ++ break; ++ case GST_H264_SEI_BUF_PERIOD: ++ if (!_h264_bit_writer_sei_buffering_period ++ (&msg->payload.buffering_period, bw, &have_space)) ++ goto error; ++ break; ++ default: ++ break; ++ } ++ ++ /* Add trailings. */ ++ WRITE_BITS (bw, 1, 1); ++ gst_bit_writer_align_bytes_unchecked (bw, 0); ++ ++ *space = TRUE; ++ ++ return TRUE; ++ ++error: ++ GST_WARNING ("error to write SEI message"); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++/** ++ * gst_h264_bit_writer_sei: ++ * @sei_messages: An array of #GstH264SEIMessage to write ++ * @start_code: whether adding the nal start code ++ * @data: (out): the bit stream generated by the sei messages ++ * @size: (inout): the size in bytes of the input and output ++ * ++ * Generating the according h264 bit stream by providing sei messages. ++ * ++ * Returns: a #GstH264BitWriterResult ++ * ++ * Since: 1.22 ++ **/ ++GstH264BitWriterResult ++gst_h264_bit_writer_sei (GArray * sei_messages, gboolean start_code, ++ guint8 * data, guint * size) ++{ ++ gboolean have_space = TRUE; ++ GstBitWriter bw; ++ GstBitWriter bw_msg; ++ GstH264SEIMessage *sei; ++ gboolean have_written_data = FALSE; ++ guint i; ++ ++ g_return_val_if_fail (sei_messages != NULL, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (data != NULL, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (size != NULL, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (*size > 0, GST_H264_BIT_WRITER_ERROR); ++ ++ gst_bit_writer_init_with_data (&bw, data, *size, FALSE); ++ ++ if (start_code) ++ WRITE_BITS (&bw, 0x00000001, 32); ++ ++ /* nal header */ ++ /* forbidden_zero_bit */ ++ WRITE_BITS (&bw, 0, 1); ++ /* nal_ref_idc, zero for sei nalu */ ++ WRITE_BITS (&bw, 0, 2); ++ /* nal_unit_type */ ++ WRITE_BITS (&bw, GST_H264_NAL_SEI, 5); ++ ++ for (i = 0; i < sei_messages->len; i++) { ++ guint32 payload_size_data; ++ guint32 payload_type_data; ++ guint32 sz; ++ ++ gst_bit_writer_init (&bw_msg); ++ ++ sei = &g_array_index (sei_messages, GstH264SEIMessage, i); ++ if (!_h264_bit_writer_sei_message (sei, &bw_msg, &have_space)) ++ goto error; ++ ++ if (gst_bit_writer_get_size (&bw_msg) == 0) { ++ GST_FIXME ("Unsupported SEI type %d", sei->payloadType); ++ continue; ++ } ++ ++ have_written_data = TRUE; ++ ++ g_assert (gst_bit_writer_get_size (&bw_msg) % 8 == 0); ++ payload_size_data = gst_bit_writer_get_size (&bw_msg) / 8; ++ payload_type_data = sei->payloadType; ++ ++ /* write payload type bytes */ ++ while (payload_type_data >= 0xff) { ++ WRITE_BITS (&bw, 0xff, 8); ++ payload_type_data -= 0xff; ++ } ++ WRITE_BITS (&bw, payload_type_data, 8); ++ ++ /* write payload size bytes */ ++ sz = payload_size_data; ++ while (sz >= 0xff) { ++ WRITE_BITS (&bw, 0xff, 8); ++ sz -= 0xff; ++ } ++ WRITE_BITS (&bw, sz, 8); ++ ++ if (payload_size_data > 0) ++ WRITE_BYTES (&bw, gst_bit_writer_get_data (&bw_msg), payload_size_data); ++ ++ gst_bit_writer_reset (&bw_msg); ++ } ++ ++ if (!have_written_data) { ++ GST_WARNING ("No written sei data"); ++ goto error; ++ } ++ ++ /* Add trailings. */ ++ WRITE_BITS (&bw, 1, 1); ++ if (!gst_bit_writer_align_bytes (&bw, 0)) { ++ have_space = FALSE; ++ goto error; ++ } ++ ++ *size = (gst_bit_writer_get_size (&bw)) / 8; ++ gst_bit_writer_reset (&bw); ++ return GST_H264_BIT_WRITER_OK; ++ ++error: ++ gst_bit_writer_reset (&bw); ++ *size = 0; ++ ++ return have_space ? GST_H264_BIT_WRITER_INVALID_DATA : ++ GST_H264_BIT_WRITER_NO_MORE_SPACE; ++} ++ ++/** ++ * gst_h264_bit_writer_aud: ++ * @primary_pic_type: indicate the possible slice types list just ++ * as the H264 spec defines ++ * @start_code: whether adding the nal start code ++ * @data: (out): the bit stream generated by the aud ++ * @size: (inout): the size in bytes of the input and output ++ * ++ * Generating the according h264 bit stream of an aud. ++ * ++ * Returns: a #GstH264BitWriterResult ++ * ++ * Since: 1.22 ++ **/ ++GstH264BitWriterResult ++gst_h264_bit_writer_aud (guint8 primary_pic_type, gboolean start_code, ++ guint8 * data, guint * size) ++{ ++ gboolean have_space = TRUE; ++ GstBitWriter bw; ++ ++ g_return_val_if_fail (primary_pic_type <= 7, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (data != NULL, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (size != NULL, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (*size > 0, GST_H264_BIT_WRITER_ERROR); ++ ++ gst_bit_writer_init_with_data (&bw, data, *size, FALSE); ++ ++ if (start_code) ++ WRITE_BITS (&bw, 0x00000001, 32); ++ ++ /* nal header */ ++ /* forbidden_zero_bit */ ++ WRITE_BITS (&bw, 0, 1); ++ /* nal_ref_idc */ ++ WRITE_BITS (&bw, 0, 2); ++ /* nal_unit_type */ ++ WRITE_BITS (&bw, GST_H264_NAL_AU_DELIMITER, 5); ++ ++ WRITE_BITS (&bw, primary_pic_type, 3); ++ ++ /* Add trailings. */ ++ WRITE_BITS (&bw, 1, 1); ++ if (!gst_bit_writer_align_bytes (&bw, 0)) { ++ goto error; ++ } ++ ++ *size = (gst_bit_writer_get_size (&bw)) / 8; ++ gst_bit_writer_reset (&bw); ++ ++ return GST_H264_BIT_WRITER_OK; ++ ++error: ++ gst_bit_writer_reset (&bw); ++ *size = 0; ++ ++ return have_space ? GST_H264_BIT_WRITER_INVALID_DATA : ++ GST_H264_BIT_WRITER_NO_MORE_SPACE; ++} ++ ++/** ++ * gst_h264_bit_writer_convert_to_nal: ++ * @nal_prefix_size: the size in bytes for the prefix of a nal, may ++ * be 2, 3 or 4 ++ * @packetized: whether to write the bit stream in packetized format, ++ * which does not have the start code but has a @nal_prefix_size bytes' ++ * size prepending to the real nal data ++ * @has_startcode: whether the input already has a start code ++ * @add_trailings: whether to add rbsp trailing bits to make the output ++ * aligned to byte ++ * @raw_data: the input bit stream ++ * @raw_size: the size in bits of the input bit stream ++ * @nal_data: (out): the output bit stream converted to a real nal ++ * @nal_size: (inout): the size in bytes of the output ++ * ++ * Converting a bit stream into a real nal packet. If the bit stream already ++ * has a start code, it will be replaced by the new one specified by the ++ * @nal_prefix_size and @packetized. It is assured that the output aligns to ++ * the byte and the all the emulations are inserted. ++ * ++ * Returns: a #GstH264BitWriterResult ++ * ++ * Since: 1.22 ++ **/ ++GstH264BitWriterResult ++gst_h264_bit_writer_convert_to_nal (guint nal_prefix_size, gboolean packetized, ++ gboolean has_startcode, gboolean add_trailings, const guint8 * raw_data, ++ gsize raw_size, guint8 * nal_data, guint * nal_size) ++{ ++ NalWriter nw; ++ guint8 *data; ++ guint32 size = 0; ++ gboolean need_more_space = FALSE; ++ ++ g_return_val_if_fail ( ++ (packetized && nal_prefix_size > 1 && nal_prefix_size < 5) || ++ (!packetized && (nal_prefix_size == 3 || nal_prefix_size == 4)), ++ GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (raw_data != NULL, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (raw_size > 0, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (nal_data != NULL, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (nal_size != NULL, GST_H264_BIT_WRITER_ERROR); ++ g_return_val_if_fail (*nal_size > 0, GST_H264_BIT_WRITER_ERROR); ++ ++ if (has_startcode) { ++ /* Skip the start code, the NalWriter will add it automatically. */ ++ if (raw_size >= 4 && raw_data[0] == 0 ++ && raw_data[1] == 0 && raw_data[2] == 0 && raw_data[3] == 0x01) { ++ raw_data += 4; ++ raw_size -= 4 * 8; ++ } else if (raw_size >= 3 && raw_data[0] == 0 && raw_data[1] == 0 ++ && raw_data[2] == 0x01) { ++ raw_data += 3; ++ raw_size -= 3 * 8; ++ } else { ++ /* Fail to find the start code. */ ++ g_return_val_if_reached (GST_H264_BIT_WRITER_ERROR); ++ } ++ } ++ ++ /* If no RBSP trailing needed, it must align to byte. We assume ++ that the rbsp trailing bits are already added. */ ++ if (!add_trailings) ++ g_return_val_if_fail (raw_size % 8 == 0, GST_H264_BIT_WRITER_ERROR); ++ ++ nal_writer_init (&nw, nal_prefix_size, packetized); ++ ++ if (!nal_writer_put_bytes (&nw, raw_data, raw_size / 8)) ++ goto error; ++ ++ if (raw_size % 8) { ++ guint8 data = *(raw_data + raw_size / 8); ++ ++ if (!nal_writer_put_bits_uint8 (&nw, ++ data >> (8 - raw_size % 8), raw_size % 8)) ++ goto error; ++ } ++ ++ if (add_trailings) { ++ if (!nal_writer_do_rbsp_trailing_bits (&nw)) ++ goto error; ++ } ++ ++ data = nal_writer_reset_and_get_data (&nw, &size); ++ if (!data) ++ goto error; ++ ++ if (size > *nal_size) { ++ need_more_space = TRUE; ++ g_free (data); ++ goto error; ++ } ++ ++ memcpy (nal_data, data, size); ++ *nal_size = size; ++ g_free (data); ++ nal_writer_reset (&nw); ++ return GST_H264_BIT_WRITER_OK; ++ ++error: ++ nal_writer_reset (&nw); ++ *nal_size = 0; ++ ++ GST_WARNING ("Failed to convert nal data"); ++ ++ return need_more_space ? GST_H264_BIT_WRITER_INVALID_DATA : ++ GST_H264_BIT_WRITER_NO_MORE_SPACE; ++} +diff --git a/gst-libs/gst/codecparsers/gsth264bitwriter.h b/gst-libs/gst/codecparsers/gsth264bitwriter.h +new file mode 100644 +index 000000000..67e526152 +--- /dev/null ++++ b/gst-libs/gst/codecparsers/gsth264bitwriter.h +@@ -0,0 +1,88 @@ ++/* GStreamer ++ * Copyright (C) 2020 Intel Corporation ++ * Author: He Junyan ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the0 ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_H264_BIT_WRITER_H__ ++#define __GST_H264_BIT_WRITER_H__ ++ ++#include ++#include ++ ++G_BEGIN_DECLS ++ ++/** ++ * GstH264BitWriterResult: ++ * @GST_H264_BIT_WRITER_OK: The writing succeeded ++ * @GST_H264_BIT_WRITER_INVALID_DATA: The input data to write is invalid ++ * @GST_H264_BIT_WRITER_NO_MORE_SPACE: The output does not have enough size ++ * @GST_H264_BIT_WRITER_ERROR: An general error occurred when writing ++ * ++ * The result of writing H264 data into bit stream. ++ * ++ * Since: 1.22 ++ */ ++typedef enum ++{ ++ GST_H264_BIT_WRITER_OK, ++ GST_H264_BIT_WRITER_INVALID_DATA, ++ GST_H264_BIT_WRITER_NO_MORE_SPACE, ++ GST_H264_BIT_WRITER_ERROR ++} GstH264BitWriterResult; ++ ++GST_CODEC_PARSERS_API ++GstH264BitWriterResult gst_h264_bit_writer_sps (const GstH264SPS * sps, ++ gboolean start_code, ++ guint8 * data, ++ guint * size); ++GST_CODEC_PARSERS_API ++GstH264BitWriterResult gst_h264_bit_writer_pps (const GstH264PPS * pps, ++ gboolean start_code, ++ guint8 * data, ++ guint * size); ++GST_CODEC_PARSERS_API ++GstH264BitWriterResult gst_h264_bit_writer_slice_hdr (const GstH264SliceHdr * slice, ++ gboolean start_code, ++ GstH264NalUnitType nal_type, ++ gboolean is_ref, ++ guint8 * data, ++ guint * size, ++ guint * trail_bits_num); ++GST_CODEC_PARSERS_API ++GstH264BitWriterResult gst_h264_bit_writer_sei (GArray * sei_messages, ++ gboolean start_code, ++ guint8 * data, ++ guint * size); ++GST_CODEC_PARSERS_API ++GstH264BitWriterResult gst_h264_bit_writer_aud (guint8 primary_pic_type, ++ gboolean start_code, ++ guint8 * data, ++ guint * size); ++GST_CODEC_PARSERS_API ++GstH264BitWriterResult gst_h264_bit_writer_convert_to_nal (guint nal_prefix_size, ++ gboolean packetized, ++ gboolean has_startcode, ++ gboolean add_trailings, ++ const guint8 * raw_data, ++ gsize raw_size, ++ guint8 * nal_data, ++ guint * nal_size); ++ ++G_END_DECLS ++ ++#endif /* __GST_H264_BIT_WRITER_H__ */ +diff --git a/gst-libs/gst/codecparsers/gsth264parser.c b/gst-libs/gst/codecparsers/gsth264parser.c +index 68aa25068..ea3c6fb99 100644 +--- a/gst-libs/gst/codecparsers/gsth264parser.c ++++ b/gst-libs/gst/codecparsers/gsth264parser.c +@@ -271,7 +271,7 @@ gst_h264_pps_copy (GstH264PPS * dst_pps, const GstH264PPS * src_pps) + *dst_pps = *src_pps; + + if (src_pps->slice_group_id) +- dst_pps->slice_group_id = g_memdup (src_pps->slice_group_id, ++ dst_pps->slice_group_id = g_memdup2 (src_pps->slice_group_id, + src_pps->pic_size_in_map_units_minus1 + 1); + + return TRUE; +@@ -704,11 +704,12 @@ gst_h264_slice_parse_dec_ref_pic_marking (GstH264SliceHdr * slice, + GstH264NalUnit * nalu, NalReader * nr) + { + GstH264DecRefPicMarking *dec_ref_pic_m; +- guint start_pos; ++ guint start_pos, start_epb; + + GST_DEBUG ("parsing \"Decoded reference picture marking\""); + + start_pos = nal_reader_get_pos (nr); ++ start_epb = nal_reader_get_epb_count (nr); + + dec_ref_pic_m = &slice->dec_ref_pic_marking; + +@@ -723,7 +724,7 @@ gst_h264_slice_parse_dec_ref_pic_marking (GstH264SliceHdr * slice, + + dec_ref_pic_m->n_ref_pic_marking = 0; + while (1) { +- READ_UE (nr, mem_mgmt_ctrl_op); ++ READ_UE_MAX (nr, mem_mgmt_ctrl_op, 6); + if (mem_mgmt_ctrl_op == 0) + break; + +@@ -753,7 +754,8 @@ gst_h264_slice_parse_dec_ref_pic_marking (GstH264SliceHdr * slice, + } + } + +- dec_ref_pic_m->bit_size = nal_reader_get_pos (nr) - start_pos; ++ dec_ref_pic_m->bit_size = (nal_reader_get_pos (nr) - start_pos) - ++ (8 * (nal_reader_get_epb_count (nr) - start_epb)); + + return TRUE; + +@@ -793,7 +795,7 @@ gst_h264_slice_parse_pred_weight_table (GstH264SliceHdr * slice, + p->chroma_weight_l0[i][1] = default_chroma_weight; + } + if (GST_H264_IS_B_SLICE (slice)) { +- for (i = 0; i <= slice->num_ref_idx_l0_active_minus1; i++) { ++ for (i = 0; i <= slice->num_ref_idx_l1_active_minus1; i++) { + p->chroma_weight_l1[i][0] = default_chroma_weight; + p->chroma_weight_l1[i][1] = default_chroma_weight; + } +@@ -1086,6 +1088,48 @@ error: + } + } + ++static GstH264ParserResult ++gst_h264_parser_parse_user_data_unregistered (GstH264NalParser * nalparser, ++ GstH264UserDataUnregistered * urud, NalReader * nr, guint payload_size) ++{ ++ guint8 *data = NULL; ++ gint i; ++ ++ if (payload_size < 16) { ++ GST_WARNING ("Too small payload size %d", payload_size); ++ return GST_H264_PARSER_BROKEN_DATA; ++ } ++ ++ for (int i = 0; i < 16; i++) { ++ READ_UINT8 (nr, urud->uuid[i], 8); ++ --payload_size; ++ } ++ ++ urud->size = payload_size; ++ ++ data = g_malloc0 (payload_size); ++ for (i = 0; i < payload_size; ++i) { ++ READ_UINT8 (nr, data[i], 8); ++ } ++ ++ if (payload_size < 1) { ++ GST_WARNING ("No more remaining payload data to store"); ++ g_clear_pointer (&data, g_free); ++ return GST_H264_PARSER_BROKEN_DATA; ++ } ++ ++ urud->data = data; ++ GST_MEMDUMP ("SEI user data unregistered", data, payload_size); ++ return GST_H264_PARSER_OK; ++ ++error: ++ { ++ GST_WARNING ("error parsing \"User Data Unregistered\""); ++ g_clear_pointer (&data, g_free); ++ return GST_H264_PARSER_ERROR; ++ } ++} ++ + static GstH264ParserResult + gst_h264_parser_parse_recovery_point (GstH264NalParser * nalparser, + GstH264RecoveryPoint * rp, NalReader * nr) +@@ -1308,6 +1352,10 @@ gst_h264_parser_parse_sei_message (GstH264NalParser * nalparser, + res = gst_h264_parser_parse_registered_user_data (nalparser, + &sei->payload.registered_user_data, nr, payload_size >> 3); + break; ++ case GST_H264_SEI_USER_DATA_UNREGISTERED: ++ res = gst_h264_parser_parse_user_data_unregistered (nalparser, ++ &sei->payload.user_data_unregistered, nr, payload_size >> 3); ++ break; + case GST_H264_SEI_RECOVERY_POINT: + res = gst_h264_parser_parse_recovery_point (nalparser, + &sei->payload.recovery_point, nr); +@@ -1603,6 +1651,179 @@ gst_h264_parser_identify_nalu_avc (GstH264NalParser * nalparser, + return GST_H264_PARSER_OK; + } + ++/** ++ * gst_h264_parser_identify_and_split_nalu_avc: ++ * @nalparser: a #GstH264NalParser ++ * @data: The data to parse, containing an AVC coded NAL unit ++ * @offset: the offset in @data from which to parse the NAL unit ++ * @size: the size of @data ++ * @nal_length_size: the size in bytes of the AVC nal length prefix. ++ * @nalus: a caller allocated GArray of #GstH264NalUnit where to store parsed nal headers ++ * @consumed: (out): the size of consumed bytes ++ * ++ * Parses @data for packetized (e.g., avc/avc3) bitstream and ++ * sets @nalus. In addition to nal identifying process, ++ * this method scans start-code prefix to split malformed packet into ++ * actual nal chunks. ++ * ++ * Returns: a #GstH264ParserResult ++ * ++ * Since: 1.22.9 ++ */ ++GstH264ParserResult ++gst_h264_parser_identify_and_split_nalu_avc (GstH264NalParser * nalparser, ++ const guint8 * data, guint offset, gsize size, guint8 nal_length_size, ++ GArray * nalus, gsize * consumed) ++{ ++ GstBitReader br; ++ guint nalu_size; ++ guint remaining; ++ guint off; ++ guint sc_size; ++ ++ g_return_val_if_fail (data != NULL, GST_H264_PARSER_ERROR); ++ g_return_val_if_fail (nalus != NULL, GST_H264_PARSER_ERROR); ++ g_return_val_if_fail (nal_length_size > 0 && nal_length_size < 5, ++ GST_H264_PARSER_ERROR); ++ ++ g_array_set_size (nalus, 0); ++ ++ if (consumed) ++ *consumed = 0; ++ ++ /* Would overflow guint below otherwise: the callers needs to ensure that ++ * this never happens */ ++ if (offset > G_MAXUINT32 - nal_length_size) { ++ GST_WARNING ("offset + nal_length_size overflow"); ++ return GST_H264_PARSER_BROKEN_DATA; ++ } ++ ++ if (size < offset + nal_length_size) { ++ GST_DEBUG ("Can't parse, buffer has too small size %" G_GSIZE_FORMAT ++ ", offset %u", size, offset); ++ return GST_H264_PARSER_ERROR; ++ } ++ ++ /* Read nal unit size and unwrap the size field */ ++ gst_bit_reader_init (&br, data + offset, size - offset); ++ nalu_size = gst_bit_reader_get_bits_uint32_unchecked (&br, ++ nal_length_size * 8); ++ ++ if (nalu_size < 1) { ++ GST_WARNING ("too small nal size %d", nalu_size); ++ return GST_H264_PARSER_BROKEN_DATA; ++ } ++ ++ if (size < (gsize) nalu_size + nal_length_size) { ++ GST_WARNING ("larger nalu size %d than data size %" G_GSIZE_FORMAT, ++ nalu_size + nal_length_size, size); ++ return GST_H264_PARSER_BROKEN_DATA; ++ } ++ ++ if (consumed) ++ *consumed = nalu_size + nal_length_size; ++ ++ off = offset + nal_length_size; ++ remaining = nalu_size; ++ sc_size = nal_length_size; ++ ++ /* Drop trailing start-code since it will not be scanned */ ++ if (remaining >= 3) { ++ if (data[off + remaining - 1] == 0x01 && data[off + remaining - 2] == 0x00 ++ && data[off + remaining - 3] == 0x00) { ++ remaining -= 3; ++ ++ /* 4 bytes start-code */ ++ if (remaining > 0 && data[off + remaining - 1] == 0x00) ++ remaining--; ++ } ++ } ++ ++ /* Looping to split malformed nal units. nal-length field was dropped above ++ * so expected bitstream structure are: ++ * ++ * ++ * | nalu | ++ * sc scan result will be -1 and handled in CONDITION-A ++ * ++ * ++ * | SC | nalu | ++ * Hit CONDITION-C first then terminated in CONDITION-A ++ * ++ * ++ * | nalu | SC | nalu | ... ++ * CONDITION-B handles those cases ++ */ ++ do { ++ GstH264NalUnit nalu; ++ gint sc_offset = -1; ++ guint skip_size = 0; ++ ++ memset (&nalu, 0, sizeof (GstH264NalUnit)); ++ ++ /* startcode 3 bytes + minimum nal size 1 */ ++ if (remaining >= 4) ++ sc_offset = scan_for_start_codes (data + off, remaining); ++ ++ if (sc_offset < 0) { ++ if (remaining >= 1) { ++ /* CONDITION-A */ ++ /* Last chunk */ ++ nalu.size = remaining; ++ nalu.sc_offset = off - sc_size; ++ nalu.offset = off; ++ nalu.data = (guint8 *) data; ++ nalu.valid = TRUE; ++ ++ gst_h264_parse_nalu_header (&nalu); ++ g_array_append_val (nalus, nalu); ++ } ++ break; ++ } else if ((sc_offset == 2 && data[off + sc_offset - 1] != 0) ++ || sc_offset > 2) { ++ /* CONDITION-B */ ++ /* Found trailing startcode prefix */ ++ ++ nalu.size = sc_offset; ++ if (data[off + sc_offset - 1] == 0) { ++ /* 4 bytes start code */ ++ nalu.size--; ++ } ++ ++ nalu.sc_offset = off - sc_size; ++ nalu.offset = off; ++ nalu.data = (guint8 *) data; ++ nalu.valid = TRUE; ++ ++ gst_h264_parse_nalu_header (&nalu); ++ g_array_append_val (nalus, nalu); ++ } else { ++ /* CONDITION-C */ ++ /* startcode located at beginning of this chunk without actual nal data. ++ * skip this start code */ ++ } ++ ++ skip_size = sc_offset + 3; ++ if (skip_size >= remaining) ++ break; ++ ++ /* no more nal-length bytes but 3bytes startcode */ ++ sc_size = 3; ++ if (sc_offset > 0 && data[off + sc_offset - 1] == 0) ++ sc_size++; ++ ++ remaining -= skip_size; ++ off += skip_size; ++ } while (remaining >= 1); ++ ++ if (nalus->len > 0) ++ return GST_H264_PARSER_OK; ++ ++ GST_WARNING ("No nal found"); ++ ++ return GST_H264_PARSER_BROKEN_DATA; ++} ++ + /** + * gst_h264_parser_parse_nal: + * @nalparser: a #GstH264NalParser +@@ -2228,7 +2449,7 @@ gst_h264_parser_parse_slice_hdr (GstH264NalParser * nalparser, + gint pps_id; + GstH264PPS *pps; + GstH264SPS *sps; +- guint start_pos; ++ guint start_pos, start_epb; + + memset (slice, 0, sizeof (*slice)); + +@@ -2304,6 +2525,7 @@ gst_h264_parser_parse_slice_hdr (GstH264NalParser * nalparser, + READ_UE_MAX (&nr, slice->idr_pic_id, G_MAXUINT16); + + start_pos = nal_reader_get_pos (&nr); ++ start_epb = nal_reader_get_epb_count (&nr); + + if (sps->pic_order_cnt_type == 0) { + READ_UINT16 (&nr, slice->pic_order_cnt_lsb, +@@ -2319,7 +2541,8 @@ gst_h264_parser_parse_slice_hdr (GstH264NalParser * nalparser, + READ_SE (&nr, slice->delta_pic_order_cnt[1]); + } + +- slice->pic_order_cnt_bit_size = nal_reader_get_pos (&nr) - start_pos; ++ slice->pic_order_cnt_bit_size = (nal_reader_get_pos (&nr) - start_pos) - ++ (8 * (nal_reader_get_epb_count (&nr) - start_epb)); + + if (pps->redundant_pic_cnt_present_flag) + READ_UE_MAX (&nr, slice->redundant_pic_cnt, G_MAXINT8); +@@ -2465,6 +2688,13 @@ gst_h264_sei_clear (GstH264SEIMessage * sei) + rud->data = NULL; + break; + } ++ case GST_H264_SEI_USER_DATA_UNREGISTERED:{ ++ GstH264UserDataUnregistered *udu = &sei->payload.user_data_unregistered; ++ ++ g_free ((guint8 *) udu->data); ++ udu->data = NULL; ++ break; ++ } + case GST_H264_SEI_UNHANDLED_PAYLOAD:{ + GstH264SEIUnhandledPayload *payload = &sei->payload.unhandled_payload; + +@@ -3138,14 +3368,14 @@ gst_h264_create_sei_memory_internal (guint8 nal_prefix_size, + /* write payload type bytes */ + while (payload_type_data >= 0xff) { + WRITE_UINT8 (&nw, 0xff, 8); +- payload_type_data -= -0xff; ++ payload_type_data -= 0xff; + } + WRITE_UINT8 (&nw, payload_type_data, 8); + + /* write payload size bytes */ + while (payload_size_data >= 0xff) { + WRITE_UINT8 (&nw, 0xff, 8); +- payload_size_data -= -0xff; ++ payload_size_data -= 0xff; + } + WRITE_UINT8 (&nw, payload_size_data, 8); + +@@ -3169,7 +3399,7 @@ gst_h264_create_sei_memory_internal (guint8 nal_prefix_size, + have_written_data = TRUE; + break; + case GST_H264_SEI_MASTERING_DISPLAY_COLOUR_VOLUME: +- GST_DEBUG ("Wrtiting \"Mastering display colour volume\""); ++ GST_DEBUG ("Writing \"Mastering display colour volume\""); + if (!gst_h264_write_sei_mastering_display_colour_volume (&nw, + &msg->payload.mastering_display_colour_volume)) { + GST_WARNING ("Failed to write \"Mastering display colour volume\""); +@@ -3406,3 +3636,229 @@ gst_h264_parser_insert_sei_avc (GstH264NalParser * nalparser, + return gst_h264_parser_insert_sei_internal (nalparser, nal_length_size, TRUE, + au, sei); + } ++ ++static GstH264DecoderConfigRecord * ++gst_h264_decoder_config_record_new (void) ++{ ++ GstH264DecoderConfigRecord *config; ++ ++ config = g_new0 (GstH264DecoderConfigRecord, 1); ++ config->sps = g_array_new (FALSE, FALSE, sizeof (GstH264NalUnit)); ++ config->pps = g_array_new (FALSE, FALSE, sizeof (GstH264NalUnit)); ++ config->sps_ext = g_array_new (FALSE, FALSE, sizeof (GstH264NalUnit)); ++ ++ return config; ++} ++ ++/** ++ * gst_h264_decoder_config_record_free: ++ * @config: (nullable): a #GstH264DecoderConfigRecord data ++ * ++ * Free @config data ++ * ++ * Since: 1.22 ++ */ ++void ++gst_h264_decoder_config_record_free (GstH264DecoderConfigRecord * config) ++{ ++ if (!config) ++ return; ++ ++ if (config->sps) ++ g_array_unref (config->sps); ++ ++ if (config->pps) ++ g_array_unref (config->pps); ++ ++ if (config->sps_ext) ++ g_array_unref (config->sps_ext); ++ ++ g_free (config); ++} ++ ++/** ++ * gst_h264_parser_parse_decoder_config_record: ++ * @nalparser: a #GstH264NalParser ++ * @data: the data to parse ++ * @size: the size of @data ++ * @config: (out): parsed #GstH264DecoderConfigRecord data ++ * ++ * Parses AVCDecoderConfigurationRecord data and fill into @config. ++ * The caller must free @config via gst_h264_decoder_config_record_free() ++ * ++ * This method does not parse SPS and PPS and therefore the caller needs to ++ * parse each NAL unit via appropriate parsing method. ++ * ++ * Returns: a #GstH264ParserResult ++ * ++ * Since: 1.22 ++ */ ++GstH264ParserResult ++gst_h264_parser_parse_decoder_config_record (GstH264NalParser * nalparser, ++ const guint8 * data, gsize size, GstH264DecoderConfigRecord ** config) ++{ ++ GstH264DecoderConfigRecord *ret; ++ GstBitReader br; ++ GstH264ParserResult result = GST_H264_PARSER_OK; ++ guint8 num_sps, num_pps, i; ++ guint offset; ++ ++ g_return_val_if_fail (nalparser != NULL, GST_H264_PARSER_ERROR); ++ g_return_val_if_fail (data != NULL, GST_H264_PARSER_ERROR); ++ g_return_val_if_fail (config != NULL, GST_H264_PARSER_ERROR); ++ ++#define READ_CONFIG_UINT8(val, nbits) G_STMT_START { \ ++ if (!gst_bit_reader_get_bits_uint8 (&br, &val, nbits)) { \ ++ GST_WARNING ("Failed to read " G_STRINGIFY (val)); \ ++ result = GST_H264_PARSER_ERROR; \ ++ goto error; \ ++ } \ ++} G_STMT_END; ++ ++#define SKIP_CONFIG_BITS(nbits) G_STMT_START { \ ++ if (!gst_bit_reader_skip (&br, nbits)) { \ ++ GST_WARNING ("Failed to skip %d bits", nbits); \ ++ result = GST_H264_PARSER_ERROR; \ ++ goto error; \ ++ } \ ++} G_STMT_END; ++ ++ *config = NULL; ++ ++ if (size < 7) { ++ GST_WARNING ("Too small size avcC"); ++ return GST_H264_PARSER_ERROR; ++ } ++ ++ gst_bit_reader_init (&br, data, size); ++ ++ ret = gst_h264_decoder_config_record_new (); ++ ++ READ_CONFIG_UINT8 (ret->configuration_version, 8); ++ /* Keep parsing, caller can decide whether this data needs to be discarded ++ * or not */ ++ if (ret->configuration_version != 1) { ++ GST_WARNING ("Wrong configurationVersion %d", ret->configuration_version); ++ result = GST_H264_PARSER_ERROR; ++ goto error; ++ } ++ ++ READ_CONFIG_UINT8 (ret->profile_indication, 8); ++ READ_CONFIG_UINT8 (ret->profile_compatibility, 8); ++ READ_CONFIG_UINT8 (ret->level_indication, 8); ++ /* reserved 6bits */ ++ SKIP_CONFIG_BITS (6); ++ READ_CONFIG_UINT8 (ret->length_size_minus_one, 2); ++ if (ret->length_size_minus_one == 2) { ++ /* "length_size_minus_one + 1" should be 1, 2, or 4 */ ++ GST_WARNING ("Wrong nal-length-size"); ++ result = GST_H264_PARSER_ERROR; ++ goto error; ++ } ++ ++ /* reserved 3bits */ ++ SKIP_CONFIG_BITS (3); ++ ++ READ_CONFIG_UINT8 (num_sps, 5); ++ offset = gst_bit_reader_get_pos (&br); ++ ++ g_assert (offset % 8 == 0); ++ offset /= 8; ++ for (i = 0; i < num_sps; i++) { ++ GstH264NalUnit nalu; ++ ++ result = gst_h264_parser_identify_nalu_avc (nalparser, ++ data, offset, size, 2, &nalu); ++ if (result != GST_H264_PARSER_OK) ++ goto error; ++ ++ g_array_append_val (ret->sps, nalu); ++ offset = nalu.offset + nalu.size; ++ } ++ ++ if (!gst_bit_reader_set_pos (&br, offset * 8)) { ++ result = GST_H264_PARSER_ERROR; ++ goto error; ++ } ++ ++ READ_CONFIG_UINT8 (num_pps, 8); ++ offset = gst_bit_reader_get_pos (&br); ++ ++ g_assert (offset % 8 == 0); ++ offset /= 8; ++ for (i = 0; i < num_pps; i++) { ++ GstH264NalUnit nalu; ++ ++ result = gst_h264_parser_identify_nalu_avc (nalparser, ++ data, offset, size, 2, &nalu); ++ if (result != GST_H264_PARSER_OK) ++ goto error; ++ ++ g_array_append_val (ret->pps, nalu); ++ offset = nalu.offset + nalu.size; ++ } ++ ++ /* Parse chroma format and SPS ext data. We will silently ignore any ++ * error while parsing below data since it's not essential data for ++ * decoding */ ++ if (ret->profile_indication == 100 || ret->profile_indication == 110 || ++ ret->profile_indication == 122 || ret->profile_indication == 144) { ++ guint8 num_sps_ext; ++ ++ if (!gst_bit_reader_set_pos (&br, offset * 8)) ++ goto out; ++ ++ if (!gst_bit_reader_skip (&br, 6)) ++ goto out; ++ ++ if (!gst_bit_reader_get_bits_uint8 (&br, &ret->chroma_format, 2)) ++ goto out; ++ ++ if (!gst_bit_reader_skip (&br, 5)) ++ goto out; ++ ++ if (!gst_bit_reader_get_bits_uint8 (&br, &ret->bit_depth_luma_minus8, 3)) ++ goto out; ++ ++ if (!gst_bit_reader_skip (&br, 5)) ++ goto out; ++ ++ if (!gst_bit_reader_get_bits_uint8 (&br, &ret->bit_depth_chroma_minus8, 3)) ++ goto out; ++ ++ if (!gst_bit_reader_get_bits_uint8 (&br, &num_sps_ext, 8)) ++ goto out; ++ ++ offset = gst_bit_reader_get_pos (&br); ++ ++ g_assert (offset % 8 == 0); ++ offset /= 8; ++ for (i = 0; i < num_sps_ext; i++) { ++ GstH264NalUnit nalu; ++ ++ result = gst_h264_parser_identify_nalu_avc (nalparser, ++ data, offset, size, 2, &nalu); ++ if (result != GST_H264_PARSER_OK) ++ goto out; ++ ++ g_array_append_val (ret->sps_ext, nalu); ++ offset = nalu.offset + nalu.size; ++ } ++ ++ ret->chroma_format_present = TRUE; ++ } ++ ++out: ++ { ++ *config = ret; ++ return GST_H264_PARSER_OK; ++ } ++error: ++ { ++ gst_h264_decoder_config_record_free (ret); ++ return result; ++ } ++ ++#undef READ_CONFIG_UINT8 ++#undef SKIP_CONFIG_BITS ++} +diff --git a/gst-libs/gst/codecparsers/gsth264parser.h b/gst-libs/gst/codecparsers/gsth264parser.h +index d2f954232..23e3f9af3 100644 +--- a/gst-libs/gst/codecparsers/gsth264parser.h ++++ b/gst-libs/gst/codecparsers/gsth264parser.h +@@ -218,7 +218,8 @@ typedef enum + * @GST_H264_FRAME_PACKING_COLUMN_INTERLEAVING: Column based interleaving + * @GST_H264_FRAME_PACKING_ROW_INTERLEAVING: Row based interleaving + * @GST_H264_FRAME_PACKING_SIDE_BY_SIDE: Side-by-side packing +- * @GST_H264_FRMAE_PACKING_TOP_BOTTOM: Top-Bottom packing ++ * @GST_H264_FRMAE_PACKING_TOP_BOTTOM: Deprecated; use GST_H264_FRAME_PACKING_TOP_BOTTOM instead ++ * @GST_H264_FRAME_PACKING_TOP_BOTTOM: Top-Bottom packing (Since: 1.22) + * @GST_H264_FRAME_PACKING_TEMPORAL_INTERLEAVING: Temporal interleaving + * + * Frame packing arrangement types. +@@ -233,6 +234,15 @@ typedef enum + GST_H264_FRAME_PACKING_ROW_INTERLEAVING = 2, + GST_H264_FRAME_PACKING_SIDE_BY_SIDE = 3, + GST_H264_FRMAE_PACKING_TOP_BOTTOM = 4, ++ ++ /** ++ * GST_H264_FRAME_PACKING_TOP_BOTTOM: ++ * ++ * Top-Bottom packing ++ * ++ * Since: 1.22 ++ */ ++ GST_H264_FRAME_PACKING_TOP_BOTTOM = 4, + GST_H264_FRAME_PACKING_TEMPORAL_INTERLEAVING = 5 + } GstH264FramePackingType; + +@@ -253,11 +263,20 @@ typedef enum + * + * The type of SEI message. + */ ++/** ++ * GST_H264_SEI_USER_DATA_UNREGISTERED: ++ * ++ * User Data Unregistered (D.2.6) ++ * ++ * Since: 1.22 ++ */ ++ + typedef enum + { + GST_H264_SEI_BUF_PERIOD = 0, + GST_H264_SEI_PIC_TIMING = 1, + GST_H264_SEI_REGISTERED_USER_DATA = 4, ++ GST_H264_SEI_USER_DATA_UNREGISTERED = 5, + GST_H264_SEI_RECOVERY_POINT = 6, + GST_H264_SEI_STEREO_VIDEO_INFO = 21, + GST_H264_SEI_FRAME_PACKING = 45, +@@ -357,6 +376,7 @@ typedef struct _GstH264SliceHdr GstH264SliceHdr; + typedef struct _GstH264ClockTimestamp GstH264ClockTimestamp; + typedef struct _GstH264PicTiming GstH264PicTiming; + typedef struct _GstH264RegisteredUserData GstH264RegisteredUserData; ++typedef struct _GstH264UserDataUnregistered GstH264UserDataUnregistered; + typedef struct _GstH264BufferingPeriod GstH264BufferingPeriod; + typedef struct _GstH264RecoveryPoint GstH264RecoveryPoint; + typedef struct _GstH264StereoVideoInfo GstH264StereoVideoInfo; +@@ -365,6 +385,7 @@ typedef struct _GstH264MasteringDisplayColourVolume GstH264MasteringDisplayColou + typedef struct _GstH264ContentLightLevel GstH264ContentLightLevel; + typedef struct _GstH264SEIUnhandledPayload GstH264SEIUnhandledPayload; + typedef struct _GstH264SEIMessage GstH264SEIMessage; ++typedef struct _GstH264DecoderConfigRecord GstH264DecoderConfigRecord; + + /** + * GstH264NalUnitExtensionMVC: +@@ -1111,6 +1132,23 @@ struct _GstH264RegisteredUserData + guint size; + }; + ++/** ++ * GstH264UserDataUnregistered: ++ * @uuid: an uuid_iso_iec_11578. ++ * @data: the data of user_data_payload_byte ++ * @size: the size of @data in bytes ++ * ++ * The User data unregistered SEI message syntax. ++ * ++ * Since: 1.22 ++ */ ++struct _GstH264UserDataUnregistered ++{ ++ guint8 uuid[16]; ++ const guint8 *data; ++ guint size; ++}; ++ + struct _GstH264BufferingPeriod + { + GstH264SPS *sps; +@@ -1185,6 +1223,13 @@ struct _GstH264SEIUnhandledPayload + guint size; + }; + ++/** ++ * _GstH264SEIMessage.payload.user_data_unregistered: ++ * ++ * User Data Unregistered ++ * ++ * Since: 1.22 ++ */ + struct _GstH264SEIMessage + { + GstH264SEIPayloadType payloadType; +@@ -1199,10 +1244,111 @@ struct _GstH264SEIMessage + GstH264MasteringDisplayColourVolume mastering_display_colour_volume; + GstH264ContentLightLevel content_light_level; + GstH264SEIUnhandledPayload unhandled_payload; ++ GstH264UserDataUnregistered user_data_unregistered; + /* ... could implement more */ + } payload; + }; + ++/** ++ * GstH264DecoderConfigRecord: ++ * ++ * Contains AVCDecoderConfigurationRecord data as defined in ISO/IEC 14496-15 ++ * ++ * Since: 1.22 ++ */ ++struct _GstH264DecoderConfigRecord ++{ ++ /** ++ * GstH264DecoderConfigRecord.configuration_version: ++ * ++ * Indicates configurationVersion, must be 1 ++ */ ++ guint8 configuration_version; ++ ++ /** ++ * GstH264DecoderConfigRecord.profile_indication: ++ * ++ * H.264 profile indication ++ */ ++ guint8 profile_indication; ++ ++ /** ++ * GstH264DecoderConfigRecord.profile_compatibility: ++ * ++ * H.264 profile compatibility ++ */ ++ guint8 profile_compatibility; ++ ++ /** ++ * GstH264DecoderConfigRecord.level_indication: ++ * ++ * H.264 level indiction ++ */ ++ guint8 level_indication; ++ ++ /** ++ * GstH264DecoderConfigRecord.length_size_minus_one: ++ * ++ * Indicates the length in bytes of the NAL unit length field ++ */ ++ guint8 length_size_minus_one; ++ ++ /** ++ * GstH264DecoderConfigRecord.sps ++ * ++ * Array of identified #GstH264NalUnit from sequenceParameterSetNALUnit. ++ * This array may contain non-SPS nal units such as SEI message ++ */ ++ GArray *sps; ++ ++ /** ++ * GstH264DecoderConfigRecord.pps ++ * ++ * Array of identified #GstH264NalUnit from pictureParameterSetNALUnit. ++ * This array may contain non-PPS nal units such as SEI message ++ */ ++ GArray *pps; ++ ++ /** ++ * GstH264DecoderConfigRecord.chroma_format_present ++ * ++ * %TRUE if chroma information is present. Otherwise below values ++ * have no meaning ++ */ ++ gboolean chroma_format_present; ++ ++ /** ++ * GstH264DecoderConfigRecord.chroma_format ++ * ++ * chroma_format_idc defined in ISO/IEC 14496-10 ++ */ ++ guint8 chroma_format; ++ ++ /** ++ * GstH264DecoderConfigRecord.bit_depth_luma_minus8 ++ * ++ * Indicates bit depth of luma component ++ */ ++ guint8 bit_depth_luma_minus8; ++ ++ /** ++ * GstH264DecoderConfigRecord.bit_depth_chroma_minus8 ++ * ++ * Indicates bit depth of chroma component ++ */ ++ guint8 bit_depth_chroma_minus8; ++ ++ /** ++ * GstH264DecoderConfigRecord.sps_ext ++ * ++ * Array of identified #GstH264NalUnit from sequenceParameterSetExtNALUnit. ++ */ ++ GArray *sps_ext; ++ ++ /*< private >*/ ++ gpointer _gst_reserved[GST_PADDING]; ++}; ++ + /** + * GstH264NalParser: + * +@@ -1235,6 +1381,15 @@ GstH264ParserResult gst_h264_parser_identify_nalu_avc (GstH264NalParser *nalpars + guint offset, gsize size, guint8 nal_length_size, + GstH264NalUnit *nalu); + ++GST_CODEC_PARSERS_API ++GstH264ParserResult gst_h264_parser_identify_and_split_nalu_avc (GstH264NalParser *nalparser, ++ const guint8 *data, ++ guint offset, ++ gsize size, ++ guint8 nal_length_size, ++ GArray * nalus, ++ gsize * consumed); ++ + GST_CODEC_PARSERS_API + GstH264ParserResult gst_h264_parser_parse_nal (GstH264NalParser *nalparser, + GstH264NalUnit *nalu); +@@ -1331,6 +1486,15 @@ GstBuffer * gst_h264_parser_insert_sei_avc (GstH264NalParser * nalparser, + GstBuffer * au, + GstMemory * sei); + ++GST_CODEC_PARSERS_API ++void gst_h264_decoder_config_record_free (GstH264DecoderConfigRecord * config); ++ ++GST_CODEC_PARSERS_API ++GstH264ParserResult gst_h264_parser_parse_decoder_config_record (GstH264NalParser * nalparser, ++ const guint8 * data, ++ gsize size, ++ GstH264DecoderConfigRecord ** config); ++ + G_END_DECLS + + #endif +diff --git a/gst-libs/gst/codecparsers/gsth265bitwriter.c b/gst-libs/gst/codecparsers/gsth265bitwriter.c +new file mode 100644 +index 000000000..52efe4ab5 +--- /dev/null ++++ b/gst-libs/gst/codecparsers/gsth265bitwriter.c +@@ -0,0 +1,2307 @@ ++/* GStreamer ++ * Copyright (C) 2021 Intel Corporation ++ * Author: He Junyan ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the0 ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include "gsth265bitwriter.h" ++#include ++#include ++#include ++ ++/******************************** Utils ********************************/ ++#define SIGNED(val) (2 * ABS(val) - ((val) > 0)) ++ ++/* Write an unsigned integer Exp-Golomb-coded syntax element. i.e. ue(v) */ ++static gboolean ++_bs_write_ue (GstBitWriter * bs, guint32 value) ++{ ++ guint32 size_in_bits = 0; ++ guint32 tmp_value = ++value; ++ ++ while (tmp_value) { ++ ++size_in_bits; ++ tmp_value >>= 1; ++ } ++ if (size_in_bits > 1 ++ && !gst_bit_writer_put_bits_uint32 (bs, 0, size_in_bits - 1)) ++ return FALSE; ++ if (!gst_bit_writer_put_bits_uint32 (bs, value, size_in_bits)) ++ return FALSE; ++ return TRUE; ++} ++ ++#define WRITE_BITS_UNCHECK(bw, val, nbits) \ ++ (nbits <= 8 ? gst_bit_writer_put_bits_uint8 (bw, val, nbits) : \ ++ (nbits <= 16 ? gst_bit_writer_put_bits_uint16 (bw, val, nbits) : \ ++ (nbits <= 32 ? gst_bit_writer_put_bits_uint32 (bw, val, nbits) : \ ++ FALSE))) ++ ++#define WRITE_BITS(bw, val, nbits) \ ++ if (!WRITE_BITS_UNCHECK (bw, val, nbits)) { \ ++ g_warning ("Unsupported bit size: %u", nbits); \ ++ have_space = FALSE; \ ++ goto error; \ ++ } ++ ++#define WRITE_UE_UNCHECK(bw, val) _bs_write_ue (bw, val) ++ ++#ifdef WRITE_UE ++#undef WRITE_UE ++#endif ++#define WRITE_UE(bw, val) \ ++ if (!(have_space = WRITE_UE_UNCHECK (bw, val))) \ ++ goto error; \ ++ ++#define WRITE_UE_MAX(bw, val, max) \ ++ if ((guint32) val > (max) || !(have_space = WRITE_UE_UNCHECK (bw, val))) \ ++ goto error; ++ ++#define WRITE_SE(bw, val) WRITE_UE (bw, SIGNED (val)) ++ ++#define WRITE_SE_RANGE(bw, val, min, max) \ ++ if (val > max || val < min || \ ++ !(have_space = WRITE_UE_UNCHECK (bw, SIGNED (val)))) \ ++ goto error; ++ ++#define WRITE_BYTES_UNCHECK(bw, ptr, nbytes) \ ++ gst_bit_writer_put_bytes(bw, ptr, nbytes) ++ ++#ifdef WRITE_BYTES ++#undef WRITE_BYTES ++#endif ++#define WRITE_BYTES(bw, ptr, nbytes) \ ++ if (!(have_space = WRITE_BYTES_UNCHECK (bw, ptr, nbytes))) \ ++ goto error; ++ ++/***************************** End of Utils ****************************/ ++ ++#define EXTENDED_SAR 255 ++ ++/**** Default scaling_lists according to Table 7-5 and 7-6 *****/ ++/* Table 7-5 */ ++static const guint8 default_scaling_list0[16] = { ++ 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, ++ 16, 16, 16, 16 ++}; ++ ++/* Combined the values in Table 7-6 to make the calculation easier ++ * Default scaling list of 8x8 and 16x16 matrices for matrixId = 0, 1 and 2 ++ * Default scaling list of 32x32 matrix for matrixId = 0 ++ */ ++static const guint8 default_scaling_list1[64] = { ++ 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 17, 16, ++ 17, 16, 17, 18, 17, 18, 18, 17, 18, 21, 19, 20, ++ 21, 20, 19, 21, 24, 22, 22, 24, 24, 22, 22, 24, ++ 25, 25, 27, 30, 27, 25, 25, 29, 31, 35, 35, 31, ++ 29, 36, 41, 44, 41, 36, 47, 54, 54, 47, 65, 70, ++ 65, 88, 88, 115 ++}; ++ ++/* Combined the values in Table 7-6 to make the calculation easier ++ * Default scaling list of 8x8 and 16x16 matrices for matrixId = 3, 4 and 5 ++ * Default scaling list of 32x32 matrix for matrixId = 1 ++ */ ++static const guint8 default_scaling_list2[64] = { ++ 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, ++ 17, 17, 17, 18, 18, 18, 18, 18, 18, 20, 20, 20, ++ 20, 20, 20, 20, 24, 24, 24, 24, 24, 24, 24, 24, ++ 25, 25, 25, 25, 25, 25, 25, 28, 28, 28, 28, 28, ++ 28, 33, 33, 33, 33, 33, 41, 41, 41, 41, 54, 54, ++ 54, 71, 71, 91 ++}; ++ ++static gboolean ++_h265_bit_writer_profile_tier_level (const GstH265ProfileTierLevel * ptl, ++ guint8 maxNumSubLayersMinus1, GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ guint i, j; ++ ++ GST_DEBUG ("writing profile_tier_level"); ++ ++ WRITE_BITS (bw, ptl->profile_space, 2); ++ WRITE_BITS (bw, ptl->tier_flag, 1); ++ WRITE_BITS (bw, ptl->profile_idc, 5); ++ ++ for (j = 0; j < 32; j++) ++ WRITE_BITS (bw, ptl->profile_compatibility_flag[j], 1); ++ ++ WRITE_BITS (bw, ptl->progressive_source_flag, 1); ++ WRITE_BITS (bw, ptl->interlaced_source_flag, 1); ++ WRITE_BITS (bw, ptl->non_packed_constraint_flag, 1); ++ WRITE_BITS (bw, ptl->frame_only_constraint_flag, 1); ++ ++ if (ptl->profile_idc == 4 || ptl->profile_compatibility_flag[4] || ++ ptl->profile_idc == 5 || ptl->profile_compatibility_flag[5] || ++ ptl->profile_idc == 6 || ptl->profile_compatibility_flag[6] || ++ ptl->profile_idc == 7 || ptl->profile_compatibility_flag[7] || ++ ptl->profile_idc == 8 || ptl->profile_compatibility_flag[8] || ++ ptl->profile_idc == 9 || ptl->profile_compatibility_flag[9] || ++ ptl->profile_idc == 10 || ptl->profile_compatibility_flag[10] || ++ ptl->profile_idc == 11 || ptl->profile_compatibility_flag[11]) { ++ WRITE_BITS (bw, ptl->max_12bit_constraint_flag, 1); ++ WRITE_BITS (bw, ptl->max_10bit_constraint_flag, 1); ++ WRITE_BITS (bw, ptl->max_8bit_constraint_flag, 1); ++ WRITE_BITS (bw, ptl->max_422chroma_constraint_flag, 1); ++ WRITE_BITS (bw, ptl->max_420chroma_constraint_flag, 1); ++ WRITE_BITS (bw, ptl->max_monochrome_constraint_flag, 1); ++ WRITE_BITS (bw, ptl->intra_constraint_flag, 1); ++ WRITE_BITS (bw, ptl->one_picture_only_constraint_flag, 1); ++ WRITE_BITS (bw, ptl->lower_bit_rate_constraint_flag, 1); ++ ++ if (ptl->profile_idc == 5 || ptl->profile_compatibility_flag[5] || ++ ptl->profile_idc == 9 || ptl->profile_compatibility_flag[9] || ++ ptl->profile_idc == 10 || ptl->profile_compatibility_flag[10] || ++ ptl->profile_idc == 11 || ptl->profile_compatibility_flag[11]) { ++ WRITE_BITS (bw, ptl->max_14bit_constraint_flag, 1); ++ /* general_reserved_zero_33bits */ ++ WRITE_BITS (bw, 0, 32); ++ WRITE_BITS (bw, 0, 1); ++ } else { ++ /* general_reserved_zero_34bits */ ++ WRITE_BITS (bw, 0, 32); ++ WRITE_BITS (bw, 0, 2); ++ } ++ } else if (ptl->profile_idc == 2 || ptl->profile_compatibility_flag[2]) { ++ /* general_reserved_zero_7bits */ ++ WRITE_BITS (bw, 0, 7); ++ WRITE_BITS (bw, ptl->one_picture_only_constraint_flag, 1); ++ /* general_reserved_zero_35bits */ ++ WRITE_BITS (bw, 0, 32); ++ WRITE_BITS (bw, 0, 3); ++ } else { ++ /* general_reserved_zero_43bits */ ++ WRITE_BITS (bw, 0, 32); ++ WRITE_BITS (bw, 0, 11); ++ } ++ ++ /* general_inbld_flag, just set to 0 */ ++ WRITE_BITS (bw, 0, 1); ++ ++ WRITE_BITS (bw, ptl->level_idc, 8); ++ ++ for (j = 0; j < maxNumSubLayersMinus1; j++) { ++ if (ptl->sub_layer_profile_present_flag[j]) { ++ GST_WARNING ("sub layer profile does not supported now"); ++ goto error; ++ } ++ WRITE_BITS (bw, ptl->sub_layer_profile_present_flag[j], 1); ++ ++ if (ptl->sub_layer_level_present_flag[j]) { ++ GST_WARNING ("sub layer level does not supported now"); ++ goto error; ++ } ++ WRITE_BITS (bw, ptl->sub_layer_level_present_flag[j], 1); ++ } ++ ++ if (maxNumSubLayersMinus1 > 0) { ++ for (i = maxNumSubLayersMinus1; i < 8; i++) ++ /* reserved_zero_2bits */ ++ WRITE_BITS (bw, 0, 2); ++ } ++ ++ /* TODO: Add sub layers profiles. */ ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("error to write profile_tier_level"); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h265_bit_writer_sub_layer_hrd_parameters (const GstH265SubLayerHRDParams * ++ sub_hrd, guint8 CpbCnt, guint8 sub_pic_hrd_params_present_flag, ++ GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ guint i; ++ ++ GST_DEBUG ("writing \"subLayer HRD Parameters\""); ++ ++ for (i = 0; i <= CpbCnt; i++) { ++ WRITE_UE_MAX (bw, sub_hrd->bit_rate_value_minus1[i], G_MAXUINT32 - 1); ++ WRITE_UE_MAX (bw, sub_hrd->cpb_size_value_minus1[i], G_MAXUINT32 - 1); ++ ++ if (sub_pic_hrd_params_present_flag) { ++ WRITE_UE_MAX (bw, sub_hrd->cpb_size_du_value_minus1[i], G_MAXUINT32 - 1); ++ WRITE_UE_MAX (bw, sub_hrd->bit_rate_du_value_minus1[i], G_MAXUINT32 - 1); ++ } ++ ++ WRITE_BITS (bw, sub_hrd->cbr_flag[i], 1); ++ } ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("error to write \"subLayer HRD Parameters \""); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h265_bit_writer_hrd_parameters (const GstH265HRDParams * hrd, ++ guint8 commonInfPresentFlag, guint8 maxNumSubLayersMinus1, ++ GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ guint i; ++ ++ GST_DEBUG ("writing \"HRD Parameters\""); ++ ++ if (commonInfPresentFlag) { ++ WRITE_BITS (bw, hrd->nal_hrd_parameters_present_flag, 1); ++ WRITE_BITS (bw, hrd->vcl_hrd_parameters_present_flag, 1); ++ ++ if (hrd->nal_hrd_parameters_present_flag ++ || hrd->vcl_hrd_parameters_present_flag) { ++ WRITE_BITS (bw, hrd->sub_pic_hrd_params_present_flag, 1); ++ ++ if (hrd->sub_pic_hrd_params_present_flag) { ++ WRITE_BITS (bw, hrd->tick_divisor_minus2, 8); ++ WRITE_BITS (bw, hrd->du_cpb_removal_delay_increment_length_minus1, 5); ++ WRITE_BITS (bw, hrd->sub_pic_cpb_params_in_pic_timing_sei_flag, 1); ++ WRITE_BITS (bw, hrd->dpb_output_delay_du_length_minus1, 5); ++ } ++ ++ WRITE_BITS (bw, hrd->bit_rate_scale, 4); ++ WRITE_BITS (bw, hrd->cpb_size_scale, 4); ++ ++ if (hrd->sub_pic_hrd_params_present_flag) ++ WRITE_BITS (bw, hrd->cpb_size_du_scale, 4); ++ ++ WRITE_BITS (bw, hrd->initial_cpb_removal_delay_length_minus1, 5); ++ WRITE_BITS (bw, hrd->au_cpb_removal_delay_length_minus1, 5); ++ WRITE_BITS (bw, hrd->dpb_output_delay_length_minus1, 5); ++ } ++ } ++ ++ for (i = 0; i <= maxNumSubLayersMinus1; i++) { ++ WRITE_BITS (bw, hrd->fixed_pic_rate_general_flag[i], 1); ++ ++ if (!hrd->fixed_pic_rate_general_flag[i]) { ++ WRITE_BITS (bw, hrd->fixed_pic_rate_within_cvs_flag[i], 1); ++ } else { ++ if (hrd->fixed_pic_rate_within_cvs_flag[i] == 0) ++ goto error; ++ } ++ ++ if (hrd->fixed_pic_rate_within_cvs_flag[i]) { ++ WRITE_UE_MAX (bw, hrd->elemental_duration_in_tc_minus1[i], 2047); ++ } else { ++ WRITE_BITS (bw, hrd->low_delay_hrd_flag[i], 1); ++ } ++ ++ if (!hrd->low_delay_hrd_flag[i]) ++ WRITE_UE_MAX (bw, hrd->cpb_cnt_minus1[i], 31); ++ ++ if (hrd->nal_hrd_parameters_present_flag) ++ if (!_h265_bit_writer_sub_layer_hrd_parameters ++ (&hrd->sublayer_hrd_params[i], hrd->cpb_cnt_minus1[i], ++ hrd->sub_pic_hrd_params_present_flag, bw, &have_space)) ++ goto error; ++ ++ /* TODO: need to separate nal and vcl from hrd_parameters. */ ++ if (hrd->vcl_hrd_parameters_present_flag) ++ if (!_h265_bit_writer_sub_layer_hrd_parameters ++ (&hrd->sublayer_hrd_params[i], hrd->cpb_cnt_minus1[i], ++ hrd->sub_pic_hrd_params_present_flag, bw, &have_space)) ++ goto error; ++ } ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("error to write \"HRD Parameters\""); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h265_bit_writer_vps (const GstH265VPS * vps, GstBitWriter * bw, ++ gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ guint i, j; ++ ++ GST_DEBUG ("writing VPS"); ++ ++ WRITE_BITS (bw, vps->id, 4); ++ ++ WRITE_BITS (bw, vps->base_layer_internal_flag, 1); ++ WRITE_BITS (bw, vps->base_layer_available_flag, 1); ++ ++ WRITE_BITS (bw, vps->max_layers_minus1, 6); ++ WRITE_BITS (bw, vps->max_sub_layers_minus1, 3); ++ WRITE_BITS (bw, vps->temporal_id_nesting_flag, 1); ++ ++ /* reserved_0xffff_16bits */ ++ WRITE_BITS (bw, 0xffff, 16); ++ ++ if (!_h265_bit_writer_profile_tier_level (&vps->profile_tier_level, ++ vps->max_sub_layers_minus1, bw, &have_space)) ++ goto error; ++ ++ WRITE_BITS (bw, vps->sub_layer_ordering_info_present_flag, 1); ++ ++ for (i = (vps->sub_layer_ordering_info_present_flag ? 0 : ++ vps->max_sub_layers_minus1); i <= vps->max_sub_layers_minus1; i++) { ++ WRITE_UE (bw, vps->max_dec_pic_buffering_minus1[i]); ++ WRITE_UE_MAX (bw, vps->max_num_reorder_pics[i], ++ vps->max_dec_pic_buffering_minus1[i]); ++ WRITE_UE_MAX (bw, vps->max_latency_increase_plus1[i], G_MAXUINT32 - 1); ++ } ++ ++ /* max_layer_id should be <63, but only support 1 layer now. */ ++ if (vps->max_layer_id > 1) { ++ GST_WARNING ("multi layers are not supported now"); ++ goto error; ++ } ++ ++ WRITE_BITS (bw, vps->max_layer_id, 6); ++ ++ if (vps->num_layer_sets_minus1 >= 1) { ++ GST_WARNING ("layer set is not supported now"); ++ goto error; ++ } ++ WRITE_UE_MAX (bw, vps->num_layer_sets_minus1, 1023); ++ ++ /* TODO: support multi-layer. */ ++ for (i = 1; i <= vps->num_layer_sets_minus1; i++) { ++ for (j = 0; j <= vps->max_layer_id; j++) { ++ /* layer_id_included_flag[i][j] */ ++ WRITE_BITS (bw, 0, 1); ++ } ++ } ++ ++ WRITE_BITS (bw, vps->timing_info_present_flag, 1); ++ if (vps->timing_info_present_flag) { ++ WRITE_BITS (bw, vps->num_units_in_tick, 32); ++ WRITE_BITS (bw, vps->time_scale, 32); ++ WRITE_BITS (bw, vps->poc_proportional_to_timing_flag, 1); ++ ++ if (vps->poc_proportional_to_timing_flag) ++ WRITE_UE_MAX (bw, vps->num_ticks_poc_diff_one_minus1, G_MAXUINT32 - 1); ++ ++ /* TODO: VPS can have multiple hrd parameters, and therefore hrd_params ++ * should be an array (like Garray). Just support 1 hdr parameter now. ++ */ ++ if (vps->num_hrd_parameters > 1) { ++ GST_WARNING ("HRD parameters > 1 is not supported now"); ++ goto error; ++ } ++ WRITE_UE_MAX (bw, vps->num_hrd_parameters, vps->num_layer_sets_minus1 + 1); ++ ++ if (vps->num_hrd_parameters) { ++ WRITE_UE_MAX (bw, vps->hrd_layer_set_idx, vps->num_layer_sets_minus1); ++ ++ if (!_h265_bit_writer_hrd_parameters (&vps->hrd_params, ++ vps->cprms_present_flag, vps->max_sub_layers_minus1, ++ bw, &have_space)) ++ goto error; ++ } ++ ++ } ++ ++ if (vps->vps_extension) { ++ GST_WARNING ("vps extension is not supported now"); ++ goto error; ++ } ++ WRITE_BITS (bw, 0, 1); ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("failed to write VPS"); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++/** ++ * gst_h265_bit_writer_vps: ++ * @vps: the vps of #GstH265VPS to write ++ * @start_code: whether adding the nal start code ++ * @data: (out): the bit stream generated by the sps ++ * @size: (inout): the size in bytes of the input and output ++ * ++ * Generating the according h265 bit stream by providing the vps. ++ * ++ * Returns: a #GstH265BitWriterResult ++ * ++ * Since: 1.22 ++ **/ ++GstH265BitWriterResult ++gst_h265_bit_writer_vps (const GstH265VPS * vps, gboolean start_code, ++ guint8 * data, guint * size) ++{ ++ gboolean have_space = TRUE; ++ GstBitWriter bw; ++ ++ g_return_val_if_fail (vps != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (data != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (size != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (*size > 0, GST_H265_BIT_WRITER_ERROR); ++ ++ gst_bit_writer_init_with_data (&bw, data, *size, FALSE); ++ ++ if (start_code) ++ WRITE_BITS (&bw, 0x00000001, 32); ++ ++ /* NAL unit header */ ++ /* forbidden_zero_bit */ ++ WRITE_BITS (&bw, 0, 1); ++ /* nal_unit_type */ ++ WRITE_BITS (&bw, GST_H265_NAL_VPS, 6); ++ /* nuh_layer_id, only support 0 now */ ++ WRITE_BITS (&bw, 0, 6); ++ /* nuh_temporal_id_plus1, only support 1 now */ ++ WRITE_BITS (&bw, 1, 3); ++ ++ if (!_h265_bit_writer_vps (vps, &bw, &have_space)) ++ goto error; ++ ++ /* Add trailings. */ ++ WRITE_BITS (&bw, 1, 1); ++ if (!gst_bit_writer_align_bytes (&bw, 0)) { ++ have_space = FALSE; ++ goto error; ++ } ++ ++ *size = gst_bit_writer_get_size (&bw) / 8; ++ gst_bit_writer_reset (&bw); ++ ++ return GST_H265_BIT_WRITER_OK; ++ ++error: ++ gst_bit_writer_reset (&bw); ++ *size = 0; ++ ++ if (!have_space) ++ return GST_H265_BIT_WRITER_NO_MORE_SPACE; ++ return GST_H265_BIT_WRITER_INVALID_DATA; ++} ++ ++static gboolean ++_get_scaling_list_params (const GstH265ScalingList * dest_scaling_list, ++ guint8 sizeId, guint8 matrixId, const guint8 ** sl, guint8 * size, ++ gint16 * scaling_list_dc_coef_minus8) ++{ ++ switch (sizeId) { ++ case GST_H265_QUANT_MATIX_4X4: ++ *sl = dest_scaling_list->scaling_lists_4x4[matrixId]; ++ if (size) ++ *size = 16; ++ break; ++ case GST_H265_QUANT_MATIX_8X8: ++ *sl = dest_scaling_list->scaling_lists_8x8[matrixId]; ++ if (size) ++ *size = 64; ++ break; ++ case GST_H265_QUANT_MATIX_16X16: ++ *sl = dest_scaling_list->scaling_lists_16x16[matrixId]; ++ if (size) ++ *size = 64; ++ if (scaling_list_dc_coef_minus8) ++ *scaling_list_dc_coef_minus8 = ++ dest_scaling_list->scaling_list_dc_coef_minus8_16x16[matrixId]; ++ break; ++ case GST_H265_QUANT_MATIX_32X32: ++ *sl = dest_scaling_list->scaling_lists_32x32[matrixId]; ++ if (size) ++ *size = 64; ++ if (scaling_list_dc_coef_minus8) ++ *scaling_list_dc_coef_minus8 = ++ dest_scaling_list->scaling_list_dc_coef_minus8_32x32[matrixId]; ++ break; ++ default: ++ g_assert_not_reached (); ++ return FALSE; ++ } ++ ++ return TRUE; ++} ++ ++static const guint8 * ++_get_default_scaling_lists (GstH265QuantMatrixSize sizeId, guint8 matrixId) ++{ ++ const guint8 *sl; ++ ++ switch (sizeId) { ++ case GST_H265_QUANT_MATIX_4X4: ++ sl = default_scaling_list0; ++ break; ++ ++ case GST_H265_QUANT_MATIX_8X8: ++ case GST_H265_QUANT_MATIX_16X16: ++ if (matrixId <= 2) { ++ sl = default_scaling_list1; ++ } else { ++ sl = default_scaling_list2; ++ } ++ break; ++ ++ case GST_H265_QUANT_MATIX_32X32: ++ if (matrixId == 0) { ++ sl = default_scaling_list1; ++ } else { ++ sl = default_scaling_list2; ++ } ++ break; ++ ++ default: ++ g_assert_not_reached (); ++ return NULL; ++ } ++ ++ return sl; ++} ++ ++static gboolean ++_compare_scaling_list_matrix (GstH265QuantMatrixSize sizeId, ++ const guint8 * sl0, const guint8 * sl1, ++ gint16 dc_coef_minus8_0, gint16 dc_coef_minus8_1) ++{ ++ guint size = sizeId == GST_H265_QUANT_MATIX_4X4 ? 16 : 64; ++ ++ if (memcmp (sl0, sl1, size * sizeof (guint8))) ++ return FALSE; ++ ++ if (sizeId <= GST_H265_QUANT_MATIX_8X8) ++ return TRUE; ++ ++ return dc_coef_minus8_0 == dc_coef_minus8_1; ++} ++ ++static gboolean ++_h265_bit_writer_scaling_lists (const GstH265ScalingList * src_scaling_list, ++ GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ GstH265QuantMatrixSize sizeId; ++ guint8 matrixId; ++ guint8 scaling_list_pred_mode_flag = 0; ++ guint8 size, i, j; ++ ++ GST_DEBUG ("writing scaling lists"); ++ ++ for (sizeId = 0; sizeId <= GST_H265_QUANT_MATIX_32X32; sizeId++) { ++ for (matrixId = 0; ++ matrixId < ((sizeId == GST_H265_QUANT_MATIX_32X32) ? 2 : 6); ++ matrixId++) { ++ gint16 scaling_list_dc_coef_minus8 = 8; ++ const guint8 *sl; ++ const guint8 *default_sl; ++ guint8 nextCoef; ++ gint8 coef_val; ++ guint8 scaling_list_pred_matrix_id_delta; ++ ++ if (!_get_scaling_list_params (src_scaling_list, sizeId, matrixId, ++ &sl, &size, &scaling_list_dc_coef_minus8)) ++ goto error; ++ ++ /* Check whether it is the default matrix. */ ++ default_sl = _get_default_scaling_lists (sizeId, matrixId); ++ if (_compare_scaling_list_matrix (sizeId, sl, default_sl, ++ scaling_list_dc_coef_minus8, 8)) { ++ scaling_list_pred_mode_flag = 0; ++ WRITE_BITS (bw, scaling_list_pred_mode_flag, 1); ++ scaling_list_pred_matrix_id_delta = 0; ++ WRITE_UE_MAX (bw, scaling_list_pred_matrix_id_delta, matrixId); ++ continue; ++ } ++ ++ /* If some previous matrix is the same, just ref it. */ ++ scaling_list_pred_matrix_id_delta = 0; ++ for (j = 0; j < matrixId; j++) { ++ gboolean ret; ++ guint8 size2; ++ const guint8 *sl2; ++ gint16 scaling_list_dc_coef_minus8_2 = 8; ++ ++ ret = _get_scaling_list_params (src_scaling_list, sizeId, j, ++ &sl2, &size2, &scaling_list_dc_coef_minus8_2); ++ g_assert (ret); ++ g_assert (size == size2); ++ ++ if (_compare_scaling_list_matrix (sizeId, sl, sl2, ++ scaling_list_dc_coef_minus8, scaling_list_dc_coef_minus8_2)) { ++ scaling_list_pred_matrix_id_delta = matrixId - j; ++ break; ++ } ++ } ++ ++ if (scaling_list_pred_matrix_id_delta > 0) { ++ scaling_list_pred_mode_flag = 0; ++ WRITE_BITS (bw, scaling_list_pred_mode_flag, 1); ++ WRITE_UE_MAX (bw, scaling_list_pred_matrix_id_delta, matrixId); ++ continue; ++ } ++ ++ /* Just explicitly signal all matrix coef. */ ++ scaling_list_pred_mode_flag = 1; ++ WRITE_BITS (bw, scaling_list_pred_mode_flag, 1); ++ ++ nextCoef = 8; ++ ++ if (sizeId > 1) { ++ WRITE_SE_RANGE (bw, scaling_list_dc_coef_minus8, -7, 247); ++ nextCoef = scaling_list_dc_coef_minus8 + 8; ++ } ++ ++ for (i = 0; i < size; i++) { ++ coef_val = sl[i] - nextCoef; ++ nextCoef = sl[i]; ++ ++ if (coef_val > 127) { ++ coef_val = coef_val - 256; ++ } ++ if (coef_val < -128) { ++ coef_val = coef_val + 256; ++ } ++ ++ WRITE_SE_RANGE (bw, coef_val, -128, 127); ++ } ++ } ++ } ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("error to write scaling lists"); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h265_bit_writer_short_term_ref_pic_set (const GstH265ShortTermRefPicSet * ++ stRPS, guint8 stRpsIdx, const GstH265SPS * sps, ++ GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ gint32 prev; ++ gint i = 0; ++ ++ GST_DEBUG ("writing \"ShortTermRefPicSetParameter\""); ++ ++ if (stRPS->inter_ref_pic_set_prediction_flag) { ++ /* TODO */ ++ GST_WARNING ("inter_ref_pic_set_prediction_flag mode not supported"); ++ goto error; ++ } ++ ++ if (stRpsIdx != 0) ++ WRITE_BITS (bw, stRPS->inter_ref_pic_set_prediction_flag, 1); ++ ++ if (stRPS->NumNegativePics + stRPS->NumPositivePics != stRPS->NumDeltaPocs) ++ goto error; ++ ++ /* 7-49 */ ++ WRITE_UE_MAX (bw, stRPS->NumNegativePics, ++ sps->max_dec_pic_buffering_minus1[sps->max_sub_layers_minus1]); ++ /* 7-50 */ ++ WRITE_UE_MAX (bw, stRPS->NumPositivePics, ++ (sps->max_dec_pic_buffering_minus1[sps->max_sub_layers_minus1] - ++ stRPS->NumNegativePics)); ++ ++ prev = 0; ++ for (i = 0; i < stRPS->NumNegativePics; i++) { ++ WRITE_UE_MAX (bw, prev - stRPS->DeltaPocS0[i] - 1, 32767); ++ prev = stRPS->DeltaPocS0[i]; ++ /* 7-51 */ ++ WRITE_BITS (bw, stRPS->UsedByCurrPicS0[i], 1); ++ } ++ ++ prev = 0; ++ for (i = 0; i < stRPS->NumPositivePics; i++) { ++ WRITE_UE_MAX (bw, stRPS->DeltaPocS1[i] - prev - 1, 32767); ++ prev = stRPS->DeltaPocS1[i]; ++ /* 7-52 */ ++ WRITE_BITS (bw, stRPS->UsedByCurrPicS1[i], 1); ++ } ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("error to write \"ShortTermRefPicSet Parameters\""); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h265_bit_writer_vui_parameters (const GstH265SPS * sps, ++ GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ const GstH265VUIParams *vui = &sps->vui_params; ++ ++ GST_DEBUG ("writing \"VUI Parameters\""); ++ ++ WRITE_BITS (bw, vui->aspect_ratio_info_present_flag, 1); ++ if (vui->aspect_ratio_info_present_flag) { ++ WRITE_BITS (bw, vui->aspect_ratio_idc, 8); ++ if (vui->aspect_ratio_idc == EXTENDED_SAR) { ++ WRITE_BITS (bw, vui->sar_width, 16); ++ WRITE_BITS (bw, vui->sar_height, 16); ++ } ++ } ++ ++ WRITE_BITS (bw, vui->overscan_info_present_flag, 1); ++ if (vui->overscan_info_present_flag) ++ WRITE_BITS (bw, vui->overscan_appropriate_flag, 1); ++ ++ WRITE_BITS (bw, vui->video_signal_type_present_flag, 1); ++ if (vui->video_signal_type_present_flag) { ++ WRITE_BITS (bw, vui->video_format, 3); ++ WRITE_BITS (bw, vui->video_full_range_flag, 1); ++ WRITE_BITS (bw, vui->colour_description_present_flag, 1); ++ if (vui->colour_description_present_flag) { ++ WRITE_BITS (bw, vui->colour_primaries, 8); ++ WRITE_BITS (bw, vui->transfer_characteristics, 8); ++ WRITE_BITS (bw, vui->matrix_coefficients, 8); ++ } ++ } ++ ++ WRITE_BITS (bw, vui->chroma_loc_info_present_flag, 1); ++ if (vui->chroma_loc_info_present_flag) { ++ WRITE_UE_MAX (bw, vui->chroma_sample_loc_type_top_field, 5); ++ WRITE_UE_MAX (bw, vui->chroma_sample_loc_type_bottom_field, 5); ++ } ++ ++ WRITE_BITS (bw, vui->neutral_chroma_indication_flag, 1); ++ WRITE_BITS (bw, vui->field_seq_flag, 1); ++ WRITE_BITS (bw, vui->frame_field_info_present_flag, 1); ++ ++ WRITE_BITS (bw, vui->default_display_window_flag, 1); ++ if (vui->default_display_window_flag) { ++ WRITE_UE (bw, vui->def_disp_win_left_offset); ++ WRITE_UE (bw, vui->def_disp_win_right_offset); ++ WRITE_UE (bw, vui->def_disp_win_top_offset); ++ WRITE_UE (bw, vui->def_disp_win_bottom_offset); ++ } ++ ++ WRITE_BITS (bw, vui->timing_info_present_flag, 1); ++ if (vui->timing_info_present_flag) { ++ if (vui->num_units_in_tick == 0) ++ GST_WARNING ("num_units_in_tick = 0 (incompliant to H.265 E.2.1)."); ++ WRITE_BITS (bw, vui->num_units_in_tick, 32); ++ ++ if (vui->time_scale == 0) ++ GST_WARNING ("time_scale = 0 (incompliant to H.265 E.2.1)."); ++ WRITE_BITS (bw, vui->time_scale, 32); ++ ++ WRITE_BITS (bw, vui->poc_proportional_to_timing_flag, 1); ++ if (vui->poc_proportional_to_timing_flag) ++ WRITE_UE_MAX (bw, vui->num_ticks_poc_diff_one_minus1, G_MAXUINT32 - 1); ++ ++ WRITE_BITS (bw, vui->hrd_parameters_present_flag, 1); ++ if (vui->hrd_parameters_present_flag) ++ if (!_h265_bit_writer_hrd_parameters (&vui->hrd_params, 1, ++ sps->max_sub_layers_minus1, bw, &have_space)) ++ goto error; ++ } ++ ++ WRITE_BITS (bw, vui->bitstream_restriction_flag, 1); ++ if (vui->bitstream_restriction_flag) { ++ WRITE_BITS (bw, vui->tiles_fixed_structure_flag, 1); ++ WRITE_BITS (bw, vui->motion_vectors_over_pic_boundaries_flag, 1); ++ WRITE_BITS (bw, vui->restricted_ref_pic_lists_flag, 1); ++ WRITE_UE_MAX (bw, vui->min_spatial_segmentation_idc, 4096); ++ WRITE_UE_MAX (bw, vui->max_bytes_per_pic_denom, 16); ++ WRITE_UE_MAX (bw, vui->max_bits_per_min_cu_denom, 16); ++ WRITE_UE_MAX (bw, vui->log2_max_mv_length_horizontal, 16); ++ WRITE_UE_MAX (bw, vui->log2_max_mv_length_vertical, 15); ++ } ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("error to write \"VUI Parameters\""); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h265_bit_writer_sps (const GstH265SPS * sps, ++ GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ guint i; ++ ++ GST_DEBUG ("writing SPS"); ++ ++ WRITE_BITS (bw, sps->vps->id, 4); ++ ++ WRITE_BITS (bw, sps->max_sub_layers_minus1, 3); ++ WRITE_BITS (bw, sps->temporal_id_nesting_flag, 1); ++ ++ if (!_h265_bit_writer_profile_tier_level (&sps->profile_tier_level, ++ sps->max_sub_layers_minus1, bw, &have_space)) ++ goto error; ++ ++ WRITE_UE_MAX (bw, sps->id, GST_H265_MAX_SPS_COUNT - 1); ++ ++ WRITE_UE_MAX (bw, sps->chroma_format_idc, 3); ++ if (sps->chroma_format_idc == 3) ++ WRITE_BITS (bw, sps->separate_colour_plane_flag, 1); ++ ++ if (sps->pic_width_in_luma_samples < 1) ++ goto error; ++ WRITE_UE_MAX (bw, sps->pic_width_in_luma_samples, 16888); ++ ++ if (sps->pic_height_in_luma_samples < 1) ++ goto error; ++ WRITE_UE_MAX (bw, sps->pic_height_in_luma_samples, 16888); ++ ++ WRITE_BITS (bw, sps->conformance_window_flag, 1); ++ if (sps->conformance_window_flag) { ++ WRITE_UE (bw, sps->conf_win_left_offset); ++ WRITE_UE (bw, sps->conf_win_right_offset); ++ WRITE_UE (bw, sps->conf_win_top_offset); ++ WRITE_UE (bw, sps->conf_win_bottom_offset); ++ } ++ ++ WRITE_UE_MAX (bw, sps->bit_depth_luma_minus8, 6); ++ WRITE_UE_MAX (bw, sps->bit_depth_chroma_minus8, 6); ++ WRITE_UE_MAX (bw, sps->log2_max_pic_order_cnt_lsb_minus4, 12); ++ ++ WRITE_BITS (bw, sps->sub_layer_ordering_info_present_flag, 1); ++ for (i = (sps->sub_layer_ordering_info_present_flag ? 0 : ++ sps->max_sub_layers_minus1); i <= sps->max_sub_layers_minus1; i++) { ++ WRITE_UE_MAX (bw, sps->max_dec_pic_buffering_minus1[i], 16); ++ WRITE_UE_MAX (bw, sps->max_num_reorder_pics[i], ++ sps->max_dec_pic_buffering_minus1[i]); ++ WRITE_UE (bw, sps->max_latency_increase_plus1[i]); ++ } ++ ++ /* The limits are calculted based on the profile_tier_level constraint ++ * in Annex-A: CtbLog2SizeY = 4 to 6 */ ++ WRITE_UE_MAX (bw, sps->log2_min_luma_coding_block_size_minus3, 3); ++ WRITE_UE_MAX (bw, sps->log2_diff_max_min_luma_coding_block_size, 6); ++ WRITE_UE_MAX (bw, sps->log2_min_transform_block_size_minus2, 3); ++ WRITE_UE_MAX (bw, sps->log2_diff_max_min_transform_block_size, 3); ++ WRITE_UE_MAX (bw, sps->max_transform_hierarchy_depth_inter, 4); ++ WRITE_UE_MAX (bw, sps->max_transform_hierarchy_depth_intra, 4); ++ ++ WRITE_BITS (bw, sps->scaling_list_enabled_flag, 1); ++ if (sps->scaling_list_enabled_flag) { ++ WRITE_BITS (bw, sps->scaling_list_data_present_flag, 1); ++ ++ if (sps->scaling_list_data_present_flag) ++ if (!_h265_bit_writer_scaling_lists (&sps->scaling_list, bw, &have_space)) ++ goto error; ++ } ++ ++ WRITE_BITS (bw, sps->amp_enabled_flag, 1); ++ WRITE_BITS (bw, sps->sample_adaptive_offset_enabled_flag, 1); ++ WRITE_BITS (bw, sps->pcm_enabled_flag, 1); ++ ++ if (sps->pcm_enabled_flag) { ++ WRITE_BITS (bw, sps->pcm_sample_bit_depth_luma_minus1, 4); ++ WRITE_BITS (bw, sps->pcm_sample_bit_depth_chroma_minus1, 4); ++ WRITE_UE_MAX (bw, sps->log2_min_pcm_luma_coding_block_size_minus3, 2); ++ WRITE_UE_MAX (bw, sps->log2_diff_max_min_pcm_luma_coding_block_size, 2); ++ WRITE_BITS (bw, sps->pcm_loop_filter_disabled_flag, 1); ++ } ++ ++ WRITE_UE_MAX (bw, sps->num_short_term_ref_pic_sets, 64); ++ for (i = 0; i < sps->num_short_term_ref_pic_sets; i++) { ++ if (!_h265_bit_writer_short_term_ref_pic_set ++ (&sps->short_term_ref_pic_set[i], i, sps, bw, &have_space)) ++ goto error; ++ } ++ ++ WRITE_BITS (bw, sps->long_term_ref_pics_present_flag, 1); ++ if (sps->long_term_ref_pics_present_flag) { ++ WRITE_UE_MAX (bw, sps->num_long_term_ref_pics_sps, 32); ++ for (i = 0; i < sps->num_long_term_ref_pics_sps; i++) { ++ WRITE_BITS (bw, sps->lt_ref_pic_poc_lsb_sps[i], ++ sps->log2_max_pic_order_cnt_lsb_minus4 + 4); ++ WRITE_BITS (bw, sps->used_by_curr_pic_lt_sps_flag[i], 1); ++ } ++ } ++ ++ WRITE_BITS (bw, sps->temporal_mvp_enabled_flag, 1); ++ WRITE_BITS (bw, sps->strong_intra_smoothing_enabled_flag, 1); ++ WRITE_BITS (bw, sps->vui_parameters_present_flag, 1); ++ ++ if (sps->vui_parameters_present_flag) { ++ if (!_h265_bit_writer_vui_parameters (sps, bw, &have_space)) ++ goto error; ++ } ++ ++ WRITE_BITS (bw, sps->sps_extension_flag, 1); ++ ++ if (sps->sps_extension_flag) { ++ WRITE_BITS (bw, sps->sps_range_extension_flag, 1); ++ WRITE_BITS (bw, sps->sps_multilayer_extension_flag, 1); ++ WRITE_BITS (bw, sps->sps_3d_extension_flag, 1); ++ WRITE_BITS (bw, sps->sps_scc_extension_flag, 1); ++ WRITE_BITS (bw, sps->sps_extension_4bits, 4); ++ } ++ ++ if (sps->sps_range_extension_flag) { ++ WRITE_BITS (bw, ++ sps->sps_extension_params.transform_skip_rotation_enabled_flag, 1); ++ WRITE_BITS (bw, ++ sps->sps_extension_params.transform_skip_context_enabled_flag, 1); ++ WRITE_BITS (bw, sps->sps_extension_params.implicit_rdpcm_enabled_flag, 1); ++ WRITE_BITS (bw, sps->sps_extension_params.explicit_rdpcm_enabled_flag, 1); ++ WRITE_BITS (bw, ++ sps->sps_extension_params.extended_precision_processing_flag, 1); ++ WRITE_BITS (bw, sps->sps_extension_params.intra_smoothing_disabled_flag, 1); ++ WRITE_BITS (bw, ++ sps->sps_extension_params.high_precision_offsets_enabled_flag, 1); ++ WRITE_BITS (bw, ++ sps->sps_extension_params.persistent_rice_adaptation_enabled_flag, 1); ++ WRITE_BITS (bw, ++ sps->sps_extension_params.cabac_bypass_alignment_enabled_flag, 1); ++ } ++ ++ if (sps->sps_multilayer_extension_flag) { ++ GST_WARNING ("do not support multilayer extension"); ++ goto error; ++ } ++ if (sps->sps_3d_extension_flag) { ++ GST_WARNING ("do not support 3d extension"); ++ goto error; ++ } ++ ++ if (sps->sps_scc_extension_flag) { ++ const GstH265SPSSccExtensionParams *scc_params = ++ &sps->sps_scc_extension_params; ++ ++ WRITE_BITS (bw, scc_params->sps_curr_pic_ref_enabled_flag, 1); ++ WRITE_BITS (bw, scc_params->palette_mode_enabled_flag, 1); ++ if (scc_params->palette_mode_enabled_flag) { ++ WRITE_UE_MAX (bw, scc_params->palette_max_size, 64); ++ WRITE_UE_MAX (bw, scc_params->delta_palette_max_predictor_size, ++ 128 - scc_params->palette_max_size); ++ ++ WRITE_BITS (bw, ++ scc_params->sps_palette_predictor_initializers_present_flag, 1); ++ if (scc_params->sps_palette_predictor_initializers_present_flag) { ++ guint comp; ++ WRITE_UE_MAX (bw, ++ scc_params->sps_num_palette_predictor_initializer_minus1, ++ scc_params->palette_max_size + ++ scc_params->delta_palette_max_predictor_size - 1); ++ ++ for (comp = 0; comp < (sps->chroma_format_idc == 0 ? 1 : 3); comp++) { ++ guint num_bits; ++ guint num = ++ scc_params->sps_num_palette_predictor_initializer_minus1 + 1; ++ ++ num_bits = (comp == 0 ? sps->bit_depth_luma_minus8 + 8 : ++ sps->bit_depth_chroma_minus8 + 8); ++ for (i = 0; i < num; i++) ++ WRITE_BITS (bw, ++ scc_params->sps_palette_predictor_initializer[comp][i], ++ num_bits); ++ } ++ } ++ } ++ ++ WRITE_BITS (bw, scc_params->motion_vector_resolution_control_idc, 2); ++ WRITE_BITS (bw, scc_params->intra_boundary_filtering_disabled_flag, 1); ++ } ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("failed to write SPS"); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++/** ++ * gst_h265_bit_writer_sps: ++ * @sps: the sps of #GstH265SPS to write ++ * @start_code: whether adding the nal start code ++ * @data: (out): the bit stream generated by the sps ++ * @size: (inout): the size in bytes of the input and output ++ * ++ * Generating the according h265 bit stream by providing the sps. ++ * ++ * Returns: a #GstH265BitWriterResult ++ * ++ * Since: 1.22 ++ **/ ++GstH265BitWriterResult ++gst_h265_bit_writer_sps (const GstH265SPS * sps, gboolean start_code, ++ guint8 * data, guint * size) ++{ ++ gboolean have_space = TRUE; ++ GstBitWriter bw; ++ ++ g_return_val_if_fail (sps != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (sps->vps != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (data != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (size != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (*size > 0, GST_H265_BIT_WRITER_ERROR); ++ ++ gst_bit_writer_init_with_data (&bw, data, *size, FALSE); ++ ++ if (start_code) ++ WRITE_BITS (&bw, 0x00000001, 32); ++ ++ /* NAL unit header */ ++ /* forbidden_zero_bit */ ++ WRITE_BITS (&bw, 0, 1); ++ /* nal_unit_type */ ++ WRITE_BITS (&bw, GST_H265_NAL_SPS, 6); ++ /* nuh_layer_id, only support 0 now */ ++ WRITE_BITS (&bw, 0, 6); ++ /* nuh_temporal_id_plus1, only support 1 now */ ++ WRITE_BITS (&bw, 1, 3); ++ ++ if (!_h265_bit_writer_sps (sps, &bw, &have_space)) ++ goto error; ++ ++ /* Add trailings. */ ++ WRITE_BITS (&bw, 1, 1); ++ if (!gst_bit_writer_align_bytes (&bw, 0)) { ++ have_space = FALSE; ++ goto error; ++ } ++ ++ *size = gst_bit_writer_get_size (&bw) / 8; ++ gst_bit_writer_reset (&bw); ++ ++ return GST_H265_BIT_WRITER_OK; ++ ++error: ++ gst_bit_writer_reset (&bw); ++ *size = 0; ++ ++ if (!have_space) ++ return GST_H265_BIT_WRITER_NO_MORE_SPACE; ++ return GST_H265_BIT_WRITER_INVALID_DATA; ++} ++ ++static gboolean ++_h265_bit_writer_pps (const GstH265PPS * pps, GstBitWriter * bw, ++ gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ ++ GST_DEBUG ("writing PPS"); ++ ++ WRITE_UE_MAX (bw, pps->id, GST_H265_MAX_PPS_COUNT - 1); ++ WRITE_UE_MAX (bw, pps->sps->id, GST_H265_MAX_SPS_COUNT - 1); ++ ++ WRITE_BITS (bw, pps->dependent_slice_segments_enabled_flag, 1); ++ WRITE_BITS (bw, pps->output_flag_present_flag, 1); ++ WRITE_BITS (bw, pps->num_extra_slice_header_bits, 3); ++ WRITE_BITS (bw, pps->sign_data_hiding_enabled_flag, 1); ++ WRITE_BITS (bw, pps->cabac_init_present_flag, 1); ++ ++ WRITE_UE_MAX (bw, pps->num_ref_idx_l0_default_active_minus1, 14); ++ WRITE_UE_MAX (bw, pps->num_ref_idx_l1_default_active_minus1, 14); ++ WRITE_SE_RANGE (bw, pps->init_qp_minus26, ++ -(26 + 6 * pps->sps->bit_depth_luma_minus8), 25); ++ ++ WRITE_BITS (bw, pps->constrained_intra_pred_flag, 1); ++ WRITE_BITS (bw, pps->transform_skip_enabled_flag, 1); ++ ++ WRITE_BITS (bw, pps->cu_qp_delta_enabled_flag, 1); ++ if (pps->cu_qp_delta_enabled_flag) ++ WRITE_UE_MAX (bw, pps->diff_cu_qp_delta_depth, ++ pps->sps->log2_diff_max_min_luma_coding_block_size); ++ ++ WRITE_SE_RANGE (bw, pps->cb_qp_offset, -12, 12); ++ WRITE_SE_RANGE (bw, pps->cr_qp_offset, -12, 12); ++ ++ WRITE_BITS (bw, pps->slice_chroma_qp_offsets_present_flag, 1); ++ WRITE_BITS (bw, pps->weighted_pred_flag, 1); ++ WRITE_BITS (bw, pps->weighted_bipred_flag, 1); ++ WRITE_BITS (bw, pps->transquant_bypass_enabled_flag, 1); ++ WRITE_BITS (bw, pps->tiles_enabled_flag, 1); ++ WRITE_BITS (bw, pps->entropy_coding_sync_enabled_flag, 1); ++ ++ if (pps->tiles_enabled_flag) { ++ if (pps->num_tile_columns_minus1 + 1 > ++ G_N_ELEMENTS (pps->column_width_minus1)) { ++ GST_WARNING ("Invalid \"num_tile_columns_minus1\" %d", ++ pps->num_tile_columns_minus1); ++ goto error; ++ } ++ ++ if (pps->num_tile_rows_minus1 + 1 > G_N_ELEMENTS (pps->row_height_minus1)) { ++ GST_WARNING ("Invalid \"num_tile_rows_minus1\" %d", ++ pps->num_tile_rows_minus1); ++ goto error; ++ } ++ ++ WRITE_UE_MAX (bw, pps->num_tile_columns_minus1, pps->PicWidthInCtbsY - 1); ++ WRITE_UE_MAX (bw, pps->num_tile_rows_minus1, pps->PicHeightInCtbsY - 1); ++ ++ WRITE_BITS (bw, pps->uniform_spacing_flag, 1); ++ ++ /* 6.5.1, 6-4, 6-5, 7.4.3.3.1 */ ++ if (!pps->uniform_spacing_flag) { ++ guint i; ++ ++ for (i = 0; i < pps->num_tile_columns_minus1; i++) ++ WRITE_UE (bw, pps->column_width_minus1[i]); ++ ++ for (i = 0; i < pps->num_tile_rows_minus1; i++) ++ WRITE_UE (bw, pps->row_height_minus1[i]); ++ } ++ WRITE_BITS (bw, pps->loop_filter_across_tiles_enabled_flag, 1); ++ } ++ ++ WRITE_BITS (bw, pps->loop_filter_across_slices_enabled_flag, 1); ++ ++ WRITE_BITS (bw, pps->deblocking_filter_control_present_flag, 1); ++ if (pps->deblocking_filter_control_present_flag) { ++ WRITE_BITS (bw, pps->deblocking_filter_override_enabled_flag, 1); ++ ++ WRITE_BITS (bw, pps->deblocking_filter_disabled_flag, 1); ++ if (!pps->deblocking_filter_disabled_flag) { ++ WRITE_SE_RANGE (bw, pps->beta_offset_div2, -6, 6); ++ WRITE_SE_RANGE (bw, pps->tc_offset_div2, -6, +6); ++ } ++ } ++ ++ WRITE_BITS (bw, pps->scaling_list_data_present_flag, 1); ++ if (pps->scaling_list_data_present_flag) ++ if (!_h265_bit_writer_scaling_lists (&pps->scaling_list, bw, &have_space)) ++ goto error; ++ ++ WRITE_BITS (bw, pps->lists_modification_present_flag, 1); ++ WRITE_UE_MAX (bw, pps->log2_parallel_merge_level_minus2, 4); ++ ++ /* TODO: slice_segment_header */ ++ if (pps->slice_segment_header_extension_present_flag) { ++ GST_WARNING ++ ("slice_segment_header_extension_present_flag is not supported"); ++ goto error; ++ } ++ WRITE_BITS (bw, pps->slice_segment_header_extension_present_flag, 1); ++ ++ WRITE_BITS (bw, pps->pps_extension_flag, 1); ++ ++ if (pps->pps_extension_flag) { ++ WRITE_BITS (bw, pps->pps_range_extension_flag, 1); ++ WRITE_BITS (bw, pps->pps_multilayer_extension_flag, 1); ++ WRITE_BITS (bw, pps->pps_3d_extension_flag, 1); ++ WRITE_BITS (bw, pps->pps_scc_extension_flag, 1); ++ WRITE_BITS (bw, pps->pps_extension_4bits, 4); ++ } ++ ++ if (pps->pps_range_extension_flag) { ++ guint i; ++ guint32 MaxBitDepthY, MaxBitDepthC; ++ ++ if (pps->transform_skip_enabled_flag) ++ WRITE_UE (bw, ++ pps->pps_extension_params.log2_max_transform_skip_block_size_minus2); ++ ++ WRITE_BITS (bw, ++ pps->pps_extension_params.cross_component_prediction_enabled_flag, 1); ++ WRITE_BITS (bw, ++ pps->pps_extension_params.chroma_qp_offset_list_enabled_flag, 1); ++ ++ if (pps->pps_extension_params.chroma_qp_offset_list_enabled_flag) { ++ WRITE_UE_MAX (bw, ++ pps->pps_extension_params.diff_cu_chroma_qp_offset_depth, ++ pps->sps->log2_diff_max_min_luma_coding_block_size); ++ ++ WRITE_UE_MAX (bw, ++ pps->pps_extension_params.chroma_qp_offset_list_len_minus1, 5); ++ for (i = 0; ++ i <= pps->pps_extension_params.chroma_qp_offset_list_len_minus1; ++ i++) { ++ WRITE_SE_RANGE (bw, pps->pps_extension_params.cb_qp_offset_list[i], ++ -12, 12); ++ WRITE_SE_RANGE (bw, pps->pps_extension_params.cr_qp_offset_list[i], ++ -12, 12); ++ } ++ } ++ ++ MaxBitDepthY = pps->sps->bit_depth_luma_minus8 > 2 ? ++ pps->sps->bit_depth_luma_minus8 - 2 : 0; ++ MaxBitDepthC = pps->sps->bit_depth_chroma_minus8 > 2 ? ++ pps->sps->bit_depth_chroma_minus8 - 2 : 0; ++ WRITE_UE_MAX (bw, pps->pps_extension_params.log2_sao_offset_scale_luma, ++ MaxBitDepthY); ++ WRITE_UE_MAX (bw, pps->pps_extension_params.log2_sao_offset_scale_chroma, ++ MaxBitDepthC); ++ } ++ ++ if (pps->pps_multilayer_extension_flag) { ++ GST_WARNING ("do not support multilayer extension"); ++ goto error; ++ } ++ ++ if (pps->pps_3d_extension_flag) { ++ GST_WARNING ("do not support 3d extension"); ++ goto error; ++ } ++ ++ if (pps->pps_scc_extension_flag) { ++ const GstH265PPSSccExtensionParams *pps_scc = ++ &pps->pps_scc_extension_params; ++ ++ WRITE_BITS (bw, pps_scc->pps_curr_pic_ref_enabled_flag, 1); ++ WRITE_BITS (bw, ++ pps_scc->residual_adaptive_colour_transform_enabled_flag, 1); ++ if (pps_scc->residual_adaptive_colour_transform_enabled_flag) { ++ WRITE_BITS (bw, pps_scc->pps_slice_act_qp_offsets_present_flag, 1); ++ WRITE_SE_RANGE (bw, (gint8) pps_scc->pps_act_y_qp_offset_plus5, -7, 17); ++ WRITE_SE_RANGE (bw, (gint8) pps_scc->pps_act_cb_qp_offset_plus5, -7, 17); ++ WRITE_SE_RANGE (bw, (gint8) pps_scc->pps_act_cr_qp_offset_plus3, -9, 15); ++ } ++ ++ WRITE_BITS (bw, ++ pps_scc->pps_palette_predictor_initializers_present_flag, 1); ++ if (pps_scc->pps_palette_predictor_initializers_present_flag) { ++ guint i; ++ ++ WRITE_UE_MAX (bw, ++ pps_scc->pps_num_palette_predictor_initializer, ++ pps->sps->sps_scc_extension_params.palette_max_size + ++ pps->sps->sps_scc_extension_params.delta_palette_max_predictor_size); ++ if (pps_scc->pps_num_palette_predictor_initializer > 0) { ++ guint comp; ++ ++ WRITE_BITS (bw, pps_scc->monochrome_palette_flag, 1); ++ /* It is a requirement of bitstream conformance that the value of ++ luma_bit_depth_entry_minus8 shall be equal to the value of ++ bit_depth_luma_minus8 */ ++ WRITE_UE_MAX (bw, pps_scc->luma_bit_depth_entry_minus8, ++ pps->sps->bit_depth_luma_minus8); ++ if (!pps_scc->monochrome_palette_flag) { ++ /* It is a requirement of bitstream conformance that the value ++ of chroma_bit_depth_entry_minus8 shall be equal to the value ++ of bit_depth_chroma_minus8. */ ++ WRITE_UE_MAX (bw, pps_scc->chroma_bit_depth_entry_minus8, ++ pps->sps->bit_depth_chroma_minus8); ++ } ++ ++ for (comp = 0; comp < (pps_scc->monochrome_palette_flag ? 1 : 3); ++ comp++) { ++ guint num_bits; ++ guint num = pps_scc->pps_num_palette_predictor_initializer; ++ ++ num_bits = (comp == 0 ? ++ pps_scc->luma_bit_depth_entry_minus8 + 8 : ++ pps_scc->chroma_bit_depth_entry_minus8 + 8); ++ for (i = 0; i < num; i++) ++ WRITE_BITS (bw, ++ pps_scc->pps_palette_predictor_initializer[comp][i], num_bits); ++ } ++ } ++ } ++ } ++ ++ return TRUE; ++ ++error: ++ GST_WARNING ("failed to write PPS"); ++ return FALSE; ++} ++ ++/** ++ * gst_h265_bit_writer_pps: ++ * @pps: the pps of #GstH265PPS to write ++ * @start_code: whether adding the nal start code ++ * @data: (out): the bit stream generated by the pps ++ * @size: (inout): the size in bytes of the input and output ++ * ++ * Generating the according h265 bit stream by providing the pps. ++ * ++ * Returns: a #GstH265BitWriterResult ++ * ++ * Since: 1.22 ++ **/ ++GstH265BitWriterResult ++gst_h265_bit_writer_pps (const GstH265PPS * pps, gboolean start_code, ++ guint8 * data, guint * size) ++{ ++ gboolean have_space = TRUE; ++ GstBitWriter bw; ++ ++ g_return_val_if_fail (pps != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (pps->sps != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (data != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (size != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (*size > 0, GST_H265_BIT_WRITER_ERROR); ++ ++ gst_bit_writer_init_with_data (&bw, data, *size, FALSE); ++ ++ if (start_code) ++ WRITE_BITS (&bw, 0x00000001, 32); ++ ++ /* NAL unit header */ ++ /* forbidden_zero_bit */ ++ WRITE_BITS (&bw, 0, 1); ++ /* nal_unit_type */ ++ WRITE_BITS (&bw, GST_H265_NAL_PPS, 6); ++ /* nuh_layer_id, only support 0 now */ ++ WRITE_BITS (&bw, 0, 6); ++ /* nuh_temporal_id_plus1, only support 1 now */ ++ WRITE_BITS (&bw, 1, 3); ++ ++ if (!_h265_bit_writer_pps (pps, &bw, &have_space)) ++ goto error; ++ ++ /* Add trailings. */ ++ WRITE_BITS (&bw, 1, 1); ++ if (!gst_bit_writer_align_bytes (&bw, 0)) { ++ have_space = FALSE; ++ goto error; ++ } ++ ++ *size = gst_bit_writer_get_size (&bw) / 8; ++ gst_bit_writer_reset (&bw); ++ ++ return GST_H265_BIT_WRITER_OK; ++ ++error: ++ gst_bit_writer_reset (&bw); ++ *size = 0; ++ ++ if (!have_space) ++ return GST_H265_BIT_WRITER_NO_MORE_SPACE; ++ return GST_H265_BIT_WRITER_INVALID_DATA; ++} ++ ++static gboolean ++_h265_slice_bit_writer_ref_pic_list_modification (const GstH265SliceHdr * ++ slice, gint NumPocTotalCurr, GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ guint i; ++ const GstH265RefPicListModification *rpl_mod = ++ &slice->ref_pic_list_modification; ++ const guint n = ceil_log2 (NumPocTotalCurr); ++ ++ WRITE_BITS (bw, rpl_mod->ref_pic_list_modification_flag_l0, 1); ++ ++ if (rpl_mod->ref_pic_list_modification_flag_l0) { ++ for (i = 0; i <= slice->num_ref_idx_l0_active_minus1; i++) { ++ WRITE_BITS (bw, rpl_mod->list_entry_l0[i], n); ++ } ++ } ++ ++ if (GST_H265_IS_B_SLICE (slice)) { ++ WRITE_BITS (bw, rpl_mod->ref_pic_list_modification_flag_l1, 1); ++ ++ if (rpl_mod->ref_pic_list_modification_flag_l1) ++ for (i = 0; i <= slice->num_ref_idx_l1_active_minus1; i++) { ++ WRITE_BITS (bw, rpl_mod->list_entry_l1[i], n); ++ } ++ } ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("error to write \"Reference picture list modifications\""); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h265_slice_bit_writer_pred_weight_table (const GstH265SliceHdr * slice, ++ GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ const GstH265PredWeightTable *p; ++ const GstH265PPS *pps = slice->pps; ++ const GstH265SPS *sps = pps->sps; ++ gint i, j; ++ ++ GST_DEBUG ("writing \"Prediction weight table\""); ++ ++ p = &slice->pred_weight_table; ++ ++ WRITE_UE_MAX (bw, p->luma_log2_weight_denom, 7); ++ ++ if (sps->chroma_format_idc != 0) { ++ WRITE_SE_RANGE (bw, p->delta_chroma_log2_weight_denom, ++ (0 - p->luma_log2_weight_denom), (7 - p->luma_log2_weight_denom)); ++ } ++ ++ for (i = 0; i <= slice->num_ref_idx_l0_active_minus1; i++) ++ WRITE_BITS (bw, p->luma_weight_l0_flag[i], 1); ++ ++ if (sps->chroma_format_idc != 0) ++ for (i = 0; i <= slice->num_ref_idx_l0_active_minus1; i++) ++ WRITE_BITS (bw, p->chroma_weight_l0_flag[i], 1); ++ ++ for (i = 0; i <= slice->num_ref_idx_l0_active_minus1; i++) { ++ if (p->luma_weight_l0_flag[i]) { ++ WRITE_SE_RANGE (bw, p->delta_luma_weight_l0[i], -128, 127); ++ WRITE_SE_RANGE (bw, p->luma_offset_l0[i], -128, 127); ++ } ++ if (p->chroma_weight_l0_flag[i]) ++ for (j = 0; j < 2; j++) { ++ WRITE_SE_RANGE (bw, p->delta_chroma_weight_l0[i][j], -128, 127); ++ WRITE_SE_RANGE (bw, p->delta_chroma_offset_l0[i][j], -512, 511); ++ } ++ } ++ ++ if (GST_H265_IS_B_SLICE (slice)) { ++ for (i = 0; i <= slice->num_ref_idx_l1_active_minus1; i++) ++ WRITE_BITS (bw, p->luma_weight_l1_flag[i], 1); ++ ++ if (sps->chroma_format_idc != 0) ++ for (i = 0; i <= slice->num_ref_idx_l1_active_minus1; i++) ++ WRITE_BITS (bw, p->chroma_weight_l1_flag[i], 1); ++ ++ for (i = 0; i <= slice->num_ref_idx_l1_active_minus1; i++) { ++ if (p->luma_weight_l1_flag[i]) { ++ WRITE_SE_RANGE (bw, p->delta_luma_weight_l1[i], -128, 127); ++ WRITE_SE_RANGE (bw, p->luma_offset_l1[i], -128, 127); ++ } ++ if (p->chroma_weight_l1_flag[i]) ++ for (j = 0; j < 2; j++) { ++ WRITE_SE_RANGE (bw, p->delta_chroma_weight_l1[i][j], -128, 127); ++ WRITE_SE_RANGE (bw, p->delta_chroma_offset_l1[i][j], -512, 511); ++ } ++ } ++ } ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("error to write \"Prediction weight table\""); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h265_bit_writer_slice_header (const GstH265SliceHdr * slice, ++ guint32 nal_type, GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ guint i; ++ const GstH265SPS *sps = slice->pps->sps; ++ const GstH265PPSSccExtensionParams *pps_scc_extension_params = ++ &slice->pps->pps_scc_extension_params; ++ const GstH265PPSExtensionParams *pps_extension_params = ++ &slice->pps->pps_extension_params; ++ ++ GST_DEBUG ("writing slice header"); ++ ++ WRITE_BITS (bw, slice->first_slice_segment_in_pic_flag, 1); ++ ++ if (GST_H265_IS_NAL_TYPE_IRAP (nal_type)) ++ WRITE_BITS (bw, slice->no_output_of_prior_pics_flag, 1); ++ ++ WRITE_UE_MAX (bw, slice->pps->id, GST_H265_MAX_PPS_COUNT - 1); ++ ++ if (!slice->first_slice_segment_in_pic_flag) { ++ guint32 PicSizeInCtbsY; ++ guint32 PicWidthInCtbsY; ++ guint32 PicHeightInCtbsY; ++ guint32 CtbSizeY, MinCbLog2SizeY, CtbLog2SizeY; ++ guint n; ++ ++ /* We can not directly use slice->pps->PicWidthInCtbsY/PicHeightInCtbsY, ++ they are calculated value when parsing but may not have value here. */ ++ MinCbLog2SizeY = sps->log2_min_luma_coding_block_size_minus3 + 3; ++ CtbLog2SizeY = ++ MinCbLog2SizeY + sps->log2_diff_max_min_luma_coding_block_size; ++ CtbSizeY = 1 << CtbLog2SizeY; ++ PicHeightInCtbsY = ++ ceil ((gdouble) sps->pic_height_in_luma_samples / (gdouble) CtbSizeY); ++ PicWidthInCtbsY = ++ ceil ((gdouble) sps->pic_width_in_luma_samples / (gdouble) CtbSizeY); ++ PicSizeInCtbsY = PicWidthInCtbsY * PicHeightInCtbsY; ++ ++ n = ceil_log2 (PicSizeInCtbsY); ++ ++ if (slice->pps->dependent_slice_segments_enabled_flag) ++ WRITE_BITS (bw, slice->dependent_slice_segment_flag, 1); ++ /* sice_segment_address parsing */ ++ WRITE_BITS (bw, slice->segment_address, n); ++ } ++ ++ if (!slice->dependent_slice_segment_flag) { ++ for (i = 0; i < slice->pps->num_extra_slice_header_bits; i++) { ++ /* slice_reserved_flag */ ++ WRITE_BITS (bw, 0, 1); ++ } ++ ++ WRITE_UE_MAX (bw, slice->type, 63); ++ ++ if (slice->pps->output_flag_present_flag) ++ WRITE_BITS (bw, slice->pic_output_flag, 1); ++ ++ if (sps->separate_colour_plane_flag == 1) ++ WRITE_BITS (bw, slice->colour_plane_id, 2); ++ ++ if (!GST_H265_IS_NAL_TYPE_IDR (nal_type)) { ++ WRITE_BITS (bw, slice->pic_order_cnt_lsb, ++ (sps->log2_max_pic_order_cnt_lsb_minus4 + 4)); ++ ++ WRITE_BITS (bw, slice->short_term_ref_pic_set_sps_flag, 1); ++ if (!slice->short_term_ref_pic_set_sps_flag) { ++ if (!_h265_bit_writer_short_term_ref_pic_set ++ (&slice->short_term_ref_pic_sets, sps->num_short_term_ref_pic_sets, ++ slice->pps->sps, bw, &have_space)) ++ goto error; ++ } else if (sps->num_short_term_ref_pic_sets > 1) { ++ const guint n = ceil_log2 (sps->num_short_term_ref_pic_sets); ++ ++ if (slice->short_term_ref_pic_set_idx > ++ sps->num_short_term_ref_pic_sets - 1) ++ goto error; ++ ++ WRITE_BITS (bw, slice->short_term_ref_pic_set_idx, n); ++ } ++ ++ if (sps->long_term_ref_pics_present_flag) { ++ guint32 limit; ++ ++ if (sps->num_long_term_ref_pics_sps > 0) ++ WRITE_UE_MAX (bw, slice->num_long_term_sps, ++ sps->num_long_term_ref_pics_sps); ++ ++ WRITE_UE_MAX (bw, slice->num_long_term_pics, 16); ++ limit = slice->num_long_term_sps + slice->num_long_term_pics; ++ for (i = 0; i < limit; i++) { ++ if (i < slice->num_long_term_sps) { ++ if (sps->num_long_term_ref_pics_sps > 1) { ++ const guint n = ceil_log2 (sps->num_long_term_ref_pics_sps); ++ WRITE_BITS (bw, slice->lt_idx_sps[i], n); ++ } ++ } else { ++ WRITE_BITS (bw, slice->poc_lsb_lt[i], ++ (sps->log2_max_pic_order_cnt_lsb_minus4 + 4)); ++ WRITE_BITS (bw, slice->used_by_curr_pic_lt_flag[i], 1); ++ } ++ ++ WRITE_BITS (bw, slice->delta_poc_msb_present_flag[i], 1); ++ if (slice->delta_poc_msb_present_flag[i]) ++ WRITE_UE (bw, slice->delta_poc_msb_cycle_lt[i]); ++ } ++ } ++ ++ if (sps->temporal_mvp_enabled_flag) ++ WRITE_BITS (bw, slice->temporal_mvp_enabled_flag, 1); ++ } ++ ++ if (sps->sample_adaptive_offset_enabled_flag) { ++ gboolean ChromaArrayType = ++ sps->separate_colour_plane_flag == 0 ? sps->chroma_format_idc : 0; ++ ++ WRITE_BITS (bw, slice->sao_luma_flag, 1); ++ if (ChromaArrayType) ++ WRITE_BITS (bw, slice->sao_chroma_flag, 1); ++ } ++ ++ if (GST_H265_IS_B_SLICE (slice) || GST_H265_IS_P_SLICE (slice)) { ++ WRITE_BITS (bw, slice->num_ref_idx_active_override_flag, 1); ++ ++ if (slice->num_ref_idx_active_override_flag) { ++ WRITE_UE_MAX (bw, slice->num_ref_idx_l0_active_minus1, 14); ++ if (GST_H265_IS_B_SLICE (slice)) ++ WRITE_UE_MAX (bw, slice->num_ref_idx_l1_active_minus1, 14); ++ } ++ ++ if (slice->pps->lists_modification_present_flag ++ && slice->NumPocTotalCurr > 1) { ++ if (!_h265_slice_bit_writer_ref_pic_list_modification (slice, ++ slice->NumPocTotalCurr, bw, &have_space)) ++ goto error; ++ } ++ ++ if (GST_H265_IS_B_SLICE (slice)) ++ WRITE_BITS (bw, slice->mvd_l1_zero_flag, 1); ++ ++ if (slice->pps->cabac_init_present_flag) ++ WRITE_BITS (bw, slice->cabac_init_flag, 1); ++ ++ if (slice->temporal_mvp_enabled_flag) { ++ if (GST_H265_IS_B_SLICE (slice)) ++ WRITE_BITS (bw, slice->collocated_from_l0_flag, 1); ++ ++ if ((slice->collocated_from_l0_flag ++ && slice->num_ref_idx_l0_active_minus1 > 0) ++ || (!slice->collocated_from_l0_flag ++ && slice->num_ref_idx_l1_active_minus1 > 0)) { ++ if ((GST_H265_IS_P_SLICE (slice)) ++ || ((GST_H265_IS_B_SLICE (slice)) ++ && (slice->collocated_from_l0_flag))) { ++ WRITE_UE_MAX (bw, slice->collocated_ref_idx, ++ slice->num_ref_idx_l0_active_minus1); ++ } else if ((GST_H265_IS_B_SLICE (slice)) ++ && (!slice->collocated_from_l0_flag)) { ++ WRITE_UE_MAX (bw, slice->collocated_ref_idx, ++ slice->num_ref_idx_l1_active_minus1); ++ } ++ } ++ } ++ ++ if ((slice->pps->weighted_pred_flag && GST_H265_IS_P_SLICE (slice)) || ++ (slice->pps->weighted_bipred_flag && GST_H265_IS_B_SLICE (slice))) ++ if (!_h265_slice_bit_writer_pred_weight_table (slice, bw, &have_space)) ++ goto error; ++ ++ WRITE_UE_MAX (bw, slice->five_minus_max_num_merge_cand, 4); ++ ++ if (sps->sps_scc_extension_params.motion_vector_resolution_control_idc ++ == 2) ++ WRITE_BITS (bw, slice->use_integer_mv_flag, 1); ++ } ++ ++ WRITE_SE_RANGE (bw, slice->qp_delta, -87, 77); ++ if (slice->pps->slice_chroma_qp_offsets_present_flag) { ++ WRITE_SE_RANGE (bw, slice->cb_qp_offset, -12, 12); ++ WRITE_SE_RANGE (bw, slice->cr_qp_offset, -12, 12); ++ } ++ ++ if (pps_scc_extension_params->pps_slice_act_qp_offsets_present_flag) { ++ WRITE_SE_RANGE (bw, slice->slice_act_y_qp_offset, -12, 12); ++ WRITE_SE_RANGE (bw, slice->slice_act_cb_qp_offset, -12, 12); ++ WRITE_SE_RANGE (bw, slice->slice_act_cr_qp_offset, -12, 12); ++ } ++ ++ if (pps_extension_params->chroma_qp_offset_list_enabled_flag) ++ WRITE_BITS (bw, slice->cu_chroma_qp_offset_enabled_flag, 1); ++ ++ if (slice->pps->deblocking_filter_override_enabled_flag) ++ WRITE_BITS (bw, slice->deblocking_filter_override_flag, 1); ++ ++ if (slice->deblocking_filter_override_flag) { ++ WRITE_BITS (bw, slice->deblocking_filter_disabled_flag, 1); ++ ++ if (!slice->deblocking_filter_disabled_flag) { ++ WRITE_SE_RANGE (bw, slice->beta_offset_div2, -6, 6); ++ WRITE_SE_RANGE (bw, slice->tc_offset_div2, -6, 6); ++ } ++ } ++ ++ if (slice->pps->loop_filter_across_slices_enabled_flag && ++ (slice->sao_luma_flag || slice->sao_chroma_flag || ++ !slice->deblocking_filter_disabled_flag)) ++ WRITE_BITS (bw, slice->loop_filter_across_slices_enabled_flag, 1); ++ } ++ ++ if (slice->pps->tiles_enabled_flag ++ || slice->pps->entropy_coding_sync_enabled_flag) { ++ guint32 offset_max; ++ ++ if (!slice->pps->tiles_enabled_flag ++ && slice->pps->entropy_coding_sync_enabled_flag) { ++ offset_max = slice->pps->PicHeightInCtbsY - 1; ++ } else if (slice->pps->tiles_enabled_flag ++ && !slice->pps->entropy_coding_sync_enabled_flag) { ++ offset_max = ++ (slice->pps->num_tile_columns_minus1 + ++ 1) * (slice->pps->num_tile_rows_minus1 + 1) - 1; ++ } else { ++ offset_max = ++ (slice->pps->num_tile_columns_minus1 + ++ 1) * slice->pps->PicHeightInCtbsY - 1; ++ } ++ ++ WRITE_UE_MAX (bw, slice->num_entry_point_offsets, offset_max); ++ if (slice->num_entry_point_offsets > 0) { ++ WRITE_UE_MAX (bw, slice->offset_len_minus1, 31); ++ for (i = 0; i < slice->num_entry_point_offsets; i++) ++ WRITE_BITS (bw, slice->entry_point_offset_minus1[i], ++ (slice->offset_len_minus1 + 1)); ++ } ++ } ++ ++ /* TODO */ ++ if (slice->pps->slice_segment_header_extension_present_flag) { ++ GST_WARNING ++ ("slice_segment_header_extension_present_flag is not supported"); ++ goto error; ++ } ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("error to write slice header"); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++/** ++ * gst_h265_bit_writer_slice_hdr: ++ * @slice: the slice header of #GstH265SliceHdr to write ++ * @start_code: whether adding the nal start code ++ * @nal_type: the slice's nal type of #GstH265NalUnitType ++ * @data: (out): the bit stream generated by the slice header ++ * @size: (inout): the size in bytes of the input and output ++ * ++ * Generating the according h265 bit stream by providing the slice header. ++ * ++ * Returns: a #GstH265BitWriterResult ++ * ++ * Since: 1.22 ++ **/ ++GstH265BitWriterResult ++gst_h265_bit_writer_slice_hdr (const GstH265SliceHdr * slice, ++ gboolean start_code, guint32 nal_type, guint8 * data, guint * size) ++{ ++ gboolean have_space = TRUE; ++ GstBitWriter bw; ++ ++ g_return_val_if_fail (slice != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (slice->pps != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (slice->pps->sps != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (data != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (size != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (*size > 0, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (nal_type <= GST_H265_NAL_SLICE_CRA_NUT, ++ GST_H265_BIT_WRITER_ERROR); ++ ++ gst_bit_writer_init_with_data (&bw, data, *size, FALSE); ++ ++ if (start_code) ++ WRITE_BITS (&bw, 0x00000001, 32); ++ ++ /* NAL unit header */ ++ /* forbidden_zero_bit */ ++ WRITE_BITS (&bw, 0, 1); ++ /* nal_unit_type */ ++ WRITE_BITS (&bw, nal_type, 6); ++ /* nuh_layer_id, only support 0 now */ ++ WRITE_BITS (&bw, 0, 6); ++ /* nuh_temporal_id_plus1, only support 1 now */ ++ WRITE_BITS (&bw, 1, 3); ++ ++ if (!_h265_bit_writer_slice_header (slice, nal_type, &bw, &have_space)) ++ goto error; ++ ++ /* Add trailings. */ ++ WRITE_BITS (&bw, 1, 1); ++ if (!gst_bit_writer_align_bytes (&bw, 0)) { ++ have_space = FALSE; ++ goto error; ++ } ++ ++ *size = gst_bit_writer_get_size (&bw) / 8; ++ gst_bit_writer_reset (&bw); ++ return GST_H265_BIT_WRITER_OK; ++ ++error: ++ gst_bit_writer_reset (&bw); ++ *size = 0; ++ ++ if (!have_space) ++ return GST_H265_BIT_WRITER_NO_MORE_SPACE; ++ return GST_H265_BIT_WRITER_INVALID_DATA; ++} ++ ++ ++static gboolean ++_h265_bit_writer_sei_registered_user_data (const GstH265RegisteredUserData * ++ rud, GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ ++ GST_DEBUG ("Writing \"Registered user data\""); ++ ++ WRITE_BITS (bw, rud->country_code, 8); ++ if (rud->country_code == 0xff) ++ WRITE_BITS (bw, rud->country_code_extension, 8); ++ ++ WRITE_BYTES (bw, rud->data, rud->size); ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_DEBUG ("Failed to write \"Registered user data\""); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h265_bit_writer_sei_time_code (const GstH265TimeCode * tc, ++ GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ gint i; ++ ++ GST_DEBUG ("Wrtiting \"Time code\""); ++ ++ WRITE_BITS (bw, tc->num_clock_ts, 2); ++ ++ for (i = 0; i < tc->num_clock_ts; i++) { ++ WRITE_BITS (bw, tc->clock_timestamp_flag[i], 1); ++ if (tc->clock_timestamp_flag[i]) { ++ WRITE_BITS (bw, tc->units_field_based_flag[i], 1); ++ WRITE_BITS (bw, tc->counting_type[i], 5); ++ WRITE_BITS (bw, tc->full_timestamp_flag[i], 1); ++ WRITE_BITS (bw, tc->discontinuity_flag[i], 1); ++ WRITE_BITS (bw, tc->cnt_dropped_flag[i], 1); ++ WRITE_BITS (bw, tc->n_frames[i], 9); ++ ++ if (tc->full_timestamp_flag[i]) { ++ WRITE_BITS (bw, tc->seconds_value[i], 6); ++ WRITE_BITS (bw, tc->minutes_value[i], 6); ++ WRITE_BITS (bw, tc->hours_value[i], 5); ++ } else { ++ WRITE_BITS (bw, tc->seconds_flag[i], 1); ++ if (tc->seconds_flag[i]) { ++ WRITE_BITS (bw, tc->seconds_value[i], 6); ++ WRITE_BITS (bw, tc->minutes_flag[i], 1); ++ if (tc->minutes_flag[i]) { ++ WRITE_BITS (bw, tc->minutes_value[i], 6); ++ WRITE_BITS (bw, tc->hours_flag[i], 1); ++ if (tc->hours_flag[i]) { ++ WRITE_BITS (bw, tc->hours_value[i], 5); ++ } ++ } ++ } ++ } ++ } ++ ++ WRITE_BITS (bw, tc->time_offset_length[i], 5); ++ ++ if (tc->time_offset_length[i] > 0) ++ WRITE_BITS (bw, tc->time_offset_value[i], tc->time_offset_length[i]); ++ } ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("Failed to write \"Time code\""); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h265_bit_writer_sei_mastering_display_colour_volume (const ++ GstH265MasteringDisplayColourVolume * mdcv, GstBitWriter * bw, ++ gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ gint i; ++ ++ GST_DEBUG ("Wrtiting \"Mastering display colour volume\""); ++ ++ for (i = 0; i < 3; i++) { ++ WRITE_BITS (bw, mdcv->display_primaries_x[i], 16); ++ WRITE_BITS (bw, mdcv->display_primaries_y[i], 16); ++ } ++ ++ WRITE_BITS (bw, mdcv->white_point_x, 16); ++ WRITE_BITS (bw, mdcv->white_point_y, 16); ++ WRITE_BITS (bw, mdcv->max_display_mastering_luminance, 32); ++ WRITE_BITS (bw, mdcv->min_display_mastering_luminance, 32); ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("Failed to write \"Mastering display colour volume\""); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h265_bit_writer_sei_content_light_level_info (const ++ GstH265ContentLightLevel * cll, GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ ++ GST_DEBUG ("Writing \"Content light level\""); ++ ++ WRITE_BITS (bw, cll->max_content_light_level, 16); ++ WRITE_BITS (bw, cll->max_pic_average_light_level, 16); ++ ++ *space = TRUE; ++ return TRUE; ++ ++error: ++ GST_WARNING ("Failed to write \"Content light level\""); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++static gboolean ++_h265_bit_writer_sei_message (const GstH265SEIMessage * msg, ++ GstBitWriter * bw, gboolean * space) ++{ ++ gboolean have_space = TRUE; ++ ++ GST_DEBUG ("writing SEI message"); ++ ++ switch (msg->payloadType) { ++ case GST_H265_SEI_REGISTERED_USER_DATA: ++ if (!_h265_bit_writer_sei_registered_user_data ++ (&msg->payload.registered_user_data, bw, &have_space)) ++ goto error; ++ break; ++ case GST_H265_SEI_TIME_CODE: ++ if (!_h265_bit_writer_sei_time_code ++ (&msg->payload.time_code, bw, &have_space)) ++ goto error; ++ break; ++ case GST_H265_SEI_MASTERING_DISPLAY_COLOUR_VOLUME: ++ if (!_h265_bit_writer_sei_mastering_display_colour_volume ++ (&msg->payload.mastering_display_colour_volume, bw, &have_space)) ++ goto error; ++ break; ++ case GST_H265_SEI_CONTENT_LIGHT_LEVEL: ++ if (!_h265_bit_writer_sei_content_light_level_info ++ (&msg->payload.content_light_level, bw, &have_space)) ++ goto error; ++ break; ++ default: ++ break; ++ } ++ ++ /* Add trailings. */ ++ WRITE_BITS (bw, 1, 1); ++ gst_bit_writer_align_bytes_unchecked (bw, 0); ++ ++ *space = TRUE; ++ ++ return TRUE; ++ ++error: ++ GST_WARNING ("error to write SEI message"); ++ ++ *space = have_space; ++ return FALSE; ++} ++ ++/** ++ * gst_h265_bit_writer_sei: ++ * @sei_messages: An array of #GstH265SEIMessage to write ++ * @start_code: whether adding the nal start code ++ * @data: (out): the bit stream generated by the sei messages ++ * @size: (inout): the size in bytes of the input and output ++ * ++ * Generating the according h265 bit stream by providing sei messages. ++ * ++ * Returns: a #GstH265BitWriterResult ++ * ++ * Since: 1.22 ++ **/ ++GstH265BitWriterResult ++gst_h265_bit_writer_sei (GArray * sei_messages, ++ GstH265NalUnitType nal_type, gboolean start_code, guint8 * data, ++ guint * size) ++{ ++ gboolean have_space = TRUE; ++ GstBitWriter bw; ++ GstH265SEIMessage *sei; ++ gboolean have_written_data = FALSE; ++ guint i; ++ ++ g_return_val_if_fail (sei_messages != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (nal_type == GST_H265_NAL_PREFIX_SEI ++ || nal_type == GST_H265_NAL_SUFFIX_SEI, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (data != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (size != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (*size > 0, GST_H265_BIT_WRITER_ERROR); ++ ++ if (nal_type == GST_H265_NAL_PREFIX_SEI) { ++ GST_WARNING ("prefix sei is not supported"); ++ return GST_H265_BIT_WRITER_ERROR; ++ } ++ ++ gst_bit_writer_init_with_data (&bw, data, *size, FALSE); ++ ++ if (start_code) ++ WRITE_BITS (&bw, 0x00000001, 32); ++ ++ /* NAL unit header */ ++ /* forbidden_zero_bit */ ++ WRITE_BITS (&bw, 0, 1); ++ /* nal_unit_type */ ++ WRITE_BITS (&bw, nal_type, 6); ++ /* nuh_layer_id, only support 0 now */ ++ WRITE_BITS (&bw, 0, 6); ++ /* nuh_temporal_id_plus1, only support 1 now */ ++ WRITE_BITS (&bw, 1, 3); ++ ++ for (i = 0; i < sei_messages->len; i++) { ++ guint32 payload_size_data; ++ guint32 payload_type_data; ++ ++ gst_bit_writer_init (&bw); ++ ++ sei = &g_array_index (sei_messages, GstH265SEIMessage, i); ++ if (!_h265_bit_writer_sei_message (sei, &bw, &have_space)) ++ goto error; ++ ++ if (gst_bit_writer_get_size (&bw) == 0) { ++ GST_FIXME ("Unsupported SEI type %d", sei->payloadType); ++ continue; ++ } ++ ++ have_written_data = TRUE; ++ ++ g_assert (gst_bit_writer_get_size (&bw) % 8 == 0); ++ payload_size_data = (gst_bit_writer_get_size (&bw) + 7) / 8; ++ payload_type_data = sei->payloadType; ++ ++ /* write payload type bytes */ ++ while (payload_type_data >= 0xff) { ++ WRITE_BITS (&bw, 0xff, 8); ++ payload_type_data -= -0xff; ++ } ++ WRITE_BITS (&bw, payload_type_data, 8); ++ ++ /* write payload size bytes */ ++ while (payload_size_data >= 0xff) { ++ WRITE_BITS (&bw, 0xff, 8); ++ payload_size_data -= -0xff; ++ } ++ WRITE_BITS (&bw, payload_size_data, 8); ++ ++ if (gst_bit_writer_get_size (&bw) / 8) ++ WRITE_BYTES (&bw, gst_bit_writer_get_data (&bw), ++ gst_bit_writer_get_size (&bw) / 8); ++ ++ gst_bit_writer_reset (&bw); ++ } ++ ++ if (!have_written_data) { ++ GST_WARNING ("No written sei data"); ++ goto error; ++ } ++ ++ /* Add trailings. */ ++ WRITE_BITS (&bw, 1, 1); ++ if (!gst_bit_writer_align_bytes (&bw, 0)) { ++ have_space = FALSE; ++ goto error; ++ } ++ ++ *size = gst_bit_writer_get_size (&bw) / 8; ++ gst_bit_writer_reset (&bw); ++ return GST_H265_BIT_WRITER_OK; ++ ++error: ++ gst_bit_writer_reset (&bw); ++ *size = 0; ++ ++ if (!have_space) ++ return GST_H265_BIT_WRITER_NO_MORE_SPACE; ++ return GST_H265_BIT_WRITER_INVALID_DATA; ++} ++ ++/** ++ * gst_h265_bit_writer_aud: ++ * @pic_type: indicate the possible slice types list just ++ * as the H265 spec Table 7-2 defines ++ * @start_code: whether adding the nal start code ++ * @data: (out): the bit stream generated by the aud ++ * @size: (inout): the size in bytes of the input and output ++ * ++ * Generating the according h265 bit stream of an aud. ++ * ++ * Returns: a #GstH265BitWriterResult ++ * ++ * Since: 1.22 ++ **/ ++GstH265BitWriterResult ++gst_h265_bit_writer_aud (guint8 pic_type, gboolean start_code, ++ guint8 * data, guint * size) ++{ ++ gboolean have_space = TRUE; ++ GstBitWriter bw; ++ ++ g_return_val_if_fail (pic_type <= 2, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (data != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (size != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (*size > 0, GST_H265_BIT_WRITER_ERROR); ++ ++ gst_bit_writer_init_with_data (&bw, data, *size, FALSE); ++ ++ if (start_code) ++ WRITE_BITS (&bw, 0x00000001, 32); ++ ++ /* NAL unit header */ ++ /* forbidden_zero_bit */ ++ WRITE_BITS (&bw, 0, 1); ++ /* nal_unit_type */ ++ WRITE_BITS (&bw, GST_H265_NAL_AUD, 6); ++ /* nuh_layer_id, only support 0 now */ ++ WRITE_BITS (&bw, 0, 6); ++ /* nuh_temporal_id_plus1, only support 1 now */ ++ WRITE_BITS (&bw, 1, 3); ++ ++ WRITE_BITS (&bw, pic_type, 3); ++ ++ /* Add trailings. */ ++ WRITE_BITS (&bw, 1, 1); ++ if (!gst_bit_writer_align_bytes (&bw, 0)) { ++ goto error; ++ } ++ ++ *size = gst_bit_writer_get_size (&bw) / 8; ++ gst_bit_writer_reset (&bw); ++ ++ return GST_H265_BIT_WRITER_OK; ++ ++error: ++ gst_bit_writer_reset (&bw); ++ *size = 0; ++ ++ if (!have_space) ++ return GST_H265_BIT_WRITER_NO_MORE_SPACE; ++ return GST_H265_BIT_WRITER_INVALID_DATA; ++} ++ ++/** ++ * gst_h265_bit_writer_convert_to_nal: ++ * @nal_prefix_size: the size in bytes for the prefix of a nal, may ++ * be 2, 3 or 4 ++ * @packetized: whether to write the bit stream in packetized format, ++ * which does not have the start code but has a @nal_prefix_size bytes' ++ * size prepending to the real nal data ++ * @has_startcode: whether the input already has a start code ++ * @add_trailings: whether to add rbsp trailing bits to make the output ++ * aligned to byte ++ * @raw_data: the input bit stream ++ * @raw_size: the size in bits of the input bit stream ++ * @nal_data: (out): the output bit stream converted to a real nal ++ * @nal_size: (inout): the size in bytes of the output ++ * ++ * Converting a bit stream into a real nal packet. If the bit stream already ++ * has a start code, it will be replaced by the new one specified by the ++ * @nal_prefix_size and @packetized. It is assured that the output aligns to ++ * the byte and the all the emulations are inserted. ++ * ++ * Returns: a #GstH265BitWriterResult ++ * ++ * Since: 1.22 ++ **/ ++GstH265BitWriterResult ++gst_h265_bit_writer_convert_to_nal (guint nal_prefix_size, ++ gboolean packetized, gboolean has_startcode, gboolean add_trailings, ++ const guint8 * raw_data, gsize raw_size, guint8 * nal_data, ++ guint * nal_size) ++{ ++ NalWriter nw; ++ guint8 *data; ++ guint32 size = 0; ++ gboolean need_more_space = FALSE; ++ ++ g_return_val_if_fail ( ++ (packetized && nal_prefix_size > 1 && nal_prefix_size < 5) || ++ (!packetized && (nal_prefix_size == 3 || nal_prefix_size == 4)), ++ GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (raw_data != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (raw_size > 0, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (nal_data != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (nal_size != NULL, GST_H265_BIT_WRITER_ERROR); ++ g_return_val_if_fail (*nal_size > 0, GST_H265_BIT_WRITER_ERROR); ++ ++ if (has_startcode) { ++ /* Skip the start code, the NalWriter will add it automatically. */ ++ if (raw_size >= 4 && raw_data[0] == 0 ++ && raw_data[1] == 0 && raw_data[2] == 0 && raw_data[3] == 0x01) { ++ raw_data += 4; ++ raw_size -= 4 * 8; ++ } else if (raw_size >= 3 && raw_data[0] == 0 && raw_data[1] == 0 ++ && raw_data[2] == 0x01) { ++ raw_data += 3; ++ raw_size -= 3 * 8; ++ } else { ++ /* Fail to find the start code. */ ++ g_return_val_if_reached (GST_H265_BIT_WRITER_ERROR); ++ } ++ } ++ ++ /* If no RBSP trailing needed, it must align to byte. We assume ++ that the rbsp trailing bits are already added. */ ++ if (!add_trailings) ++ g_return_val_if_fail (raw_size % 8 == 0, GST_H265_BIT_WRITER_ERROR); ++ ++ nal_writer_init (&nw, nal_prefix_size, packetized); ++ ++ if (!nal_writer_put_bytes (&nw, raw_data, raw_size / 8)) ++ goto error; ++ ++ if (raw_size % 8) { ++ guint8 data = *(raw_data + raw_size / 8); ++ ++ if (!nal_writer_put_bits_uint8 (&nw, ++ data >> (8 - raw_size % 8), raw_size % 8)) ++ goto error; ++ } ++ ++ if (add_trailings) { ++ if (!nal_writer_do_rbsp_trailing_bits (&nw)) ++ goto error; ++ } ++ ++ data = nal_writer_reset_and_get_data (&nw, &size); ++ if (!data) ++ goto error; ++ ++ if (size > *nal_size) { ++ need_more_space = TRUE; ++ g_free (data); ++ goto error; ++ } ++ ++ memcpy (nal_data, data, size); ++ *nal_size = size; ++ g_free (data); ++ nal_writer_reset (&nw); ++ return GST_H265_BIT_WRITER_OK; ++ ++error: ++ nal_writer_reset (&nw); ++ *nal_size = 0; ++ ++ GST_WARNING ("Failed to convert nal data"); ++ ++ if (need_more_space) ++ return GST_H265_BIT_WRITER_NO_MORE_SPACE; ++ return GST_H265_BIT_WRITER_INVALID_DATA; ++} +diff --git a/gst-libs/gst/codecparsers/gsth265bitwriter.h b/gst-libs/gst/codecparsers/gsth265bitwriter.h +new file mode 100644 +index 000000000..88da8104c +--- /dev/null ++++ b/gst-libs/gst/codecparsers/gsth265bitwriter.h +@@ -0,0 +1,93 @@ ++/* GStreamer ++ * Copyright (C) 2021 Intel Corporation ++ * Author: He Junyan ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the0 ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_H265_BIT_WRITER_H__ ++#define __GST_H265_BIT_WRITER_H__ ++ ++#include ++#include ++ ++G_BEGIN_DECLS ++ ++/** ++ * GstH265BitWriterResult: ++ * @GST_H265_BIT_WRITER_OK: The writing succeeded ++ * @GST_H265_BIT_WRITER_INVALID_DATA: The input data to write is invalid ++ * @GST_H265_BIT_WRITER_NO_MORE_SPACE: The output does not have enough size ++ * @GST_H265_BIT_WRITER_ERROR: An general error occurred when writing ++ * ++ * The result of writing H265 data into bit stream. ++ * ++ * Since: 1.22 ++ */ ++typedef enum ++{ ++ GST_H265_BIT_WRITER_OK, ++ GST_H265_BIT_WRITER_INVALID_DATA, ++ GST_H265_BIT_WRITER_NO_MORE_SPACE, ++ GST_H265_BIT_WRITER_ERROR ++} GstH265BitWriterResult; ++ ++GST_CODEC_PARSERS_API ++GstH265BitWriterResult gst_h265_bit_writer_vps (const GstH265VPS * vps, ++ gboolean start_code, ++ guint8 * data, ++ guint * size); ++GST_CODEC_PARSERS_API ++GstH265BitWriterResult gst_h265_bit_writer_sps (const GstH265SPS * sps, ++ gboolean start_code, ++ guint8 * data, ++ guint * size); ++GST_CODEC_PARSERS_API ++GstH265BitWriterResult gst_h265_bit_writer_pps (const GstH265PPS * pps, ++ gboolean start_code, ++ guint8 * data, ++ guint * size); ++GST_CODEC_PARSERS_API ++GstH265BitWriterResult gst_h265_bit_writer_slice_hdr (const GstH265SliceHdr * slice, ++ gboolean start_code, ++ guint32 nal_type, ++ guint8 * data, ++ guint * size); ++GST_CODEC_PARSERS_API ++GstH265BitWriterResult gst_h265_bit_writer_sei (GArray * sei_messages, ++ GstH265NalUnitType nal_type, ++ gboolean start_code, ++ guint8 * data, ++ guint * size); ++ ++GST_CODEC_PARSERS_API ++GstH265BitWriterResult gst_h265_bit_writer_aud (guint8 pic_type, ++ gboolean start_code, ++ guint8 * data, ++ guint * size); ++ ++GST_CODEC_PARSERS_API ++GstH265BitWriterResult gst_h265_bit_writer_convert_to_nal (guint nal_prefix_size, ++ gboolean packetized, ++ gboolean has_startcode, ++ gboolean add_trailings, ++ const guint8 * raw_data, ++ gsize raw_size, ++ guint8 * nal_data, ++ guint * nal_size); ++G_END_DECLS ++ ++#endif /* __GST_H265_BIT_WRITER_H__ */ +diff --git a/gst-libs/gst/codecparsers/gsth265parser.c b/gst-libs/gst/codecparsers/gsth265parser.c +index dc7f27aa9..44b723737 100644 +--- a/gst-libs/gst/codecparsers/gsth265parser.c ++++ b/gst-libs/gst/codecparsers/gsth265parser.c +@@ -597,10 +597,12 @@ gst_h265_parse_vui_parameters (GstH265SPS * sps, NalReader * nr) + READ_UE_MAX (nr, vui->log2_max_mv_length_vertical, 15); + } + ++ vui->parsed = TRUE; + return TRUE; + + error: + GST_WARNING ("error parsing \"VUI Parameters\""); ++ vui->parsed = FALSE; + return FALSE; + } + +@@ -940,7 +942,7 @@ gst_h265_slice_parse_ref_pic_list_modification (GstH265SliceHdr * slice, + return TRUE; + + error: +- GST_WARNING ("error parsing \"Prediction weight table\""); ++ GST_WARNING ("error parsing \"Reference picture list modifications\""); + return FALSE; + } + +@@ -1581,6 +1583,179 @@ gst_h265_parser_identify_nalu_hevc (GstH265Parser * parser, + return GST_H265_PARSER_OK; + } + ++/** ++ * gst_h265_parser_identify_and_split_nalu_hevc: ++ * @parser: a #GstH265Parser ++ * @data: The data to parse, must be the beging of the Nal unit ++ * @offset: the offset from which to parse @data ++ * @size: the size of @data ++ * @nal_length_size: the size in bytes of the HEVC nal length prefix. ++ * @nalus: a caller allocated GArray of #GstH265NalUnit where to store parsed nal headers ++ * @consumed: the size of consumed bytes ++ * ++ * Parses @data for packetized (e.g., hvc1/hev1) bitstream and ++ * sets @nalus. In addition to nal identifying process, ++ * this method scans start-code prefix to split malformed packet into ++ * actual nal chunks. ++ * ++ * Returns: a #GstH265ParserResult ++ * ++ * Since: 1.22 ++ */ ++GstH265ParserResult ++gst_h265_parser_identify_and_split_nalu_hevc (GstH265Parser * parser, ++ const guint8 * data, guint offset, gsize size, guint8 nal_length_size, ++ GArray * nalus, gsize * consumed) ++{ ++ GstBitReader br; ++ guint nalu_size; ++ guint remaining; ++ guint off; ++ guint sc_size; ++ ++ g_return_val_if_fail (data != NULL, GST_H265_PARSER_ERROR); ++ g_return_val_if_fail (nalus != NULL, GST_H265_PARSER_ERROR); ++ g_return_val_if_fail (nal_length_size > 0 && nal_length_size < 5, ++ GST_H265_PARSER_ERROR); ++ ++ g_array_set_size (nalus, 0); ++ ++ if (consumed) ++ *consumed = 0; ++ ++ /* Would overflow guint below otherwise: the callers needs to ensure that ++ * this never happens */ ++ if (offset > G_MAXUINT32 - nal_length_size) { ++ GST_WARNING ("offset + nal_length_size overflow"); ++ return GST_H265_PARSER_BROKEN_DATA; ++ } ++ ++ if (size < offset + nal_length_size) { ++ GST_DEBUG ("Can't parse, buffer has too small size %" G_GSIZE_FORMAT ++ ", offset %u", size, offset); ++ return GST_H265_PARSER_ERROR; ++ } ++ ++ /* Read nal unit size and unwrap the size field */ ++ gst_bit_reader_init (&br, data + offset, size - offset); ++ nalu_size = gst_bit_reader_get_bits_uint32_unchecked (&br, ++ nal_length_size * 8); ++ ++ if (nalu_size < 2) { ++ GST_WARNING ("too small nal size %d", nalu_size); ++ return GST_H265_PARSER_BROKEN_DATA; ++ } ++ ++ if (size < (gsize) nalu_size + nal_length_size) { ++ GST_WARNING ("larger nalu size %d than data size %" G_GSIZE_FORMAT, ++ nalu_size + nal_length_size, size); ++ return GST_H265_PARSER_BROKEN_DATA; ++ } ++ ++ if (consumed) ++ *consumed = nalu_size + nal_length_size; ++ ++ off = offset + nal_length_size; ++ remaining = nalu_size; ++ sc_size = nal_length_size; ++ ++ /* Drop trailing start-code since it will not be scanned */ ++ if (remaining >= 3) { ++ if (data[off + remaining - 1] == 0x01 && data[off + remaining - 2] == 0x00 ++ && data[off + remaining - 3] == 0x00) { ++ remaining -= 3; ++ ++ /* 4 bytes start-code */ ++ if (remaining > 0 && data[off + remaining - 1] == 0x00) ++ remaining--; ++ } ++ } ++ ++ /* Looping to split malformed nal units. nal-length field was dropped above ++ * so expected bitstream structure are: ++ * ++ * ++ * | nalu | ++ * sc scan result will be -1 and handled in CONDITION-A ++ * ++ * ++ * | SC | nalu | ++ * Hit CONDITION-C first then terminated in CONDITION-A ++ * ++ * ++ * | nalu | SC | nalu | ... ++ * CONDITION-B handles those cases ++ */ ++ do { ++ GstH265NalUnit nalu; ++ gint sc_offset = -1; ++ guint skip_size = 0; ++ ++ memset (&nalu, 0, sizeof (GstH265NalUnit)); ++ ++ /* startcode 3 bytes + minimum nal size 2 */ ++ if (remaining >= 5) ++ sc_offset = scan_for_start_codes (data + off, remaining); ++ ++ if (sc_offset < 0) { ++ if (remaining >= 2) { ++ /* CONDITION-A */ ++ /* Last chunk */ ++ nalu.size = remaining; ++ nalu.sc_offset = off - sc_size; ++ nalu.offset = off; ++ nalu.data = (guint8 *) data; ++ nalu.valid = TRUE; ++ ++ gst_h265_parse_nalu_header (&nalu); ++ g_array_append_val (nalus, nalu); ++ } ++ break; ++ } else if ((sc_offset == 2 && data[off + sc_offset - 1] != 0) ++ || sc_offset > 2) { ++ /* CONDITION-B */ ++ /* Found trailing startcode prefix */ ++ ++ nalu.size = sc_offset; ++ if (data[off + sc_offset - 1] == 0) { ++ /* 4 bytes start code */ ++ nalu.size--; ++ } ++ ++ nalu.sc_offset = off - sc_size; ++ nalu.offset = off; ++ nalu.data = (guint8 *) data; ++ nalu.valid = TRUE; ++ ++ gst_h265_parse_nalu_header (&nalu); ++ g_array_append_val (nalus, nalu); ++ } else { ++ /* CONDITION-C */ ++ /* startcode located at beginning of this chunk without actual nal data. ++ * skip this start code */ ++ } ++ ++ skip_size = sc_offset + 3; ++ if (skip_size >= remaining) ++ break; ++ ++ /* no more nal-length bytes but 3bytes startcode */ ++ sc_size = 3; ++ if (sc_offset > 0 && data[off + sc_offset - 1] == 0) ++ sc_size++; ++ ++ remaining -= skip_size; ++ off += skip_size; ++ } while (remaining >= 2); ++ ++ if (nalus->len > 0) ++ return GST_H265_PARSER_OK; ++ ++ GST_WARNING ("No nal found"); ++ ++ return GST_H265_PARSER_BROKEN_DATA; ++} ++ + /** + * gst_h265_parser_parse_nal: + * @parser: a #GstH265Parser +@@ -1670,6 +1845,7 @@ gst_h265_parse_vps (GstH265NalUnit * nalu, GstH265VPS * vps) + + READ_UINT8 (&nr, vps->max_layers_minus1, 6); + READ_UINT8 (&nr, vps->max_sub_layers_minus1, 3); ++ CHECK_ALLOWED (vps->max_sub_layers_minus1, 0, 6); + READ_UINT8 (&nr, vps->temporal_id_nesting_flag, 1); + + /* skip reserved_0xffff_16bits */ +@@ -1714,7 +1890,8 @@ gst_h265_parse_vps (GstH265NalUnit * nalu, GstH265VPS * vps) + for (j = 0; j <= vps->max_layer_id; j++) { + /* layer_id_included_flag[i][j] */ + /* FIXME: need to parse this when we can support parsing multi-layer info. */ +- nal_reader_skip (&nr, 1); ++ if (!nal_reader_skip (&nr, 1)) ++ goto error; + } + } + +@@ -1825,12 +2002,9 @@ gst_h265_parse_sps (GstH265Parser * parser, GstH265NalUnit * nalu, + GstH265SPS * sps, gboolean parse_vui_params) + { + NalReader nr; +- GstH265VPS *vps; +- guint8 vps_id; + guint i; + guint subwc[] = { 1, 2, 2, 1, 1 }; + guint subhc[] = { 1, 2, 1, 1, 1 }; +- GstH265VUIParams *vui = NULL; + + GST_DEBUG ("parsing SPS"); + +@@ -1839,15 +2013,10 @@ gst_h265_parse_sps (GstH265Parser * parser, GstH265NalUnit * nalu, + + memset (sps, 0, sizeof (*sps)); + +- READ_UINT8 (&nr, vps_id, 4); +- vps = gst_h265_parser_get_vps (parser, vps_id); +- if (!vps) { +- GST_DEBUG ("couldn't find associated video parameter set with id: %d", +- vps_id); +- } +- sps->vps = vps; ++ READ_UINT8 (&nr, sps->vps_id, 4); + + READ_UINT8 (&nr, sps->max_sub_layers_minus1, 3); ++ CHECK_ALLOWED (sps->max_sub_layers_minus1, 0, 6); + READ_UINT8 (&nr, sps->temporal_id_nesting_flag, 1); + + if (!gst_h265_parse_profile_tier_level (&sps->profile_tier_level, &nr, +@@ -1946,11 +2115,9 @@ gst_h265_parse_sps (GstH265Parser * parser, GstH265NalUnit * nalu, + READ_UINT8 (&nr, sps->strong_intra_smoothing_enabled_flag, 1); + READ_UINT8 (&nr, sps->vui_parameters_present_flag, 1); + +- if (sps->vui_parameters_present_flag && parse_vui_params) { ++ if (sps->vui_parameters_present_flag && parse_vui_params) + if (!gst_h265_parse_vui_parameters (sps, &nr)) + goto error; +- vui = &sps->vui_params; +- } + + READ_UINT8 (&nr, sps->sps_extension_flag, 1); + +@@ -1964,20 +2131,21 @@ gst_h265_parse_sps (GstH265Parser * parser, GstH265NalUnit * nalu, + + if (sps->sps_range_extension_flag) { + READ_UINT8 (&nr, +- sps->sps_extnsion_params.transform_skip_rotation_enabled_flag, 1); ++ sps->sps_extension_params.transform_skip_rotation_enabled_flag, 1); + READ_UINT8 (&nr, +- sps->sps_extnsion_params.transform_skip_context_enabled_flag, 1); +- READ_UINT8 (&nr, sps->sps_extnsion_params.implicit_rdpcm_enabled_flag, 1); +- READ_UINT8 (&nr, sps->sps_extnsion_params.explicit_rdpcm_enabled_flag, 1); ++ sps->sps_extension_params.transform_skip_context_enabled_flag, 1); ++ READ_UINT8 (&nr, sps->sps_extension_params.implicit_rdpcm_enabled_flag, 1); ++ READ_UINT8 (&nr, sps->sps_extension_params.explicit_rdpcm_enabled_flag, 1); + READ_UINT8 (&nr, +- sps->sps_extnsion_params.extended_precision_processing_flag, 1); +- READ_UINT8 (&nr, sps->sps_extnsion_params.intra_smoothing_disabled_flag, 1); ++ sps->sps_extension_params.extended_precision_processing_flag, 1); ++ READ_UINT8 (&nr, sps->sps_extension_params.intra_smoothing_disabled_flag, ++ 1); + READ_UINT8 (&nr, +- sps->sps_extnsion_params.high_precision_offsets_enabled_flag, 1); ++ sps->sps_extension_params.high_precision_offsets_enabled_flag, 1); + READ_UINT8 (&nr, +- sps->sps_extnsion_params.persistent_rice_adaptation_enabled_flag, 1); ++ sps->sps_extension_params.persistent_rice_adaptation_enabled_flag, 1); + READ_UINT8 (&nr, +- sps->sps_extnsion_params.cabac_bypass_alignment_enabled_flag, 1); ++ sps->sps_extension_params.cabac_bypass_alignment_enabled_flag, 1); + } + + if (sps->sps_multilayer_extension_flag) { +@@ -2002,22 +2170,25 @@ gst_h265_parse_sps (GstH265Parser * parser, GstH265NalUnit * nalu, + 128 - sps->sps_scc_extension_params.palette_max_size); + + READ_UINT8 (&nr, +- sps->sps_scc_extension_params. +- sps_palette_predictor_initializers_present_flag, 1); +- if (sps->sps_scc_extension_params. +- sps_palette_predictor_initializers_present_flag) { ++ sps-> ++ sps_scc_extension_params.sps_palette_predictor_initializers_present_flag, ++ 1); ++ if (sps-> ++ sps_scc_extension_params.sps_palette_predictor_initializers_present_flag) ++ { + guint comp; + READ_UE_MAX (&nr, +- sps->sps_scc_extension_params. +- sps_num_palette_predictor_initializer_minus1, ++ sps-> ++ sps_scc_extension_params.sps_num_palette_predictor_initializer_minus1, + sps->sps_scc_extension_params.palette_max_size + + sps->sps_scc_extension_params.delta_palette_max_predictor_size - 1); + + for (comp = 0; comp < (sps->chroma_format_idc == 0 ? 1 : 3); comp++) { + guint num_bits; + guint num = +- sps->sps_scc_extension_params. +- sps_num_palette_predictor_initializer_minus1 + 1; ++ sps-> ++ sps_scc_extension_params.sps_num_palette_predictor_initializer_minus1 ++ + 1; + + num_bits = (comp == 0 ? sps->bit_depth_luma_minus8 + 8 : + sps->bit_depth_chroma_minus8 + 8); +@@ -2068,24 +2239,6 @@ done: + sps->fps_num = 0; + sps->fps_den = 1; + +- if (vui && vui->timing_info_present_flag) { +- /* derive framerate for progressive stream if the pic_struct +- * syntax element is not present in picture timing SEI messages */ +- /* Fixme: handle other cases also */ +- if (parse_vui_params && vui->timing_info_present_flag +- && !vui->field_seq_flag && !vui->frame_field_info_present_flag) { +- sps->fps_num = vui->time_scale; +- sps->fps_den = vui->num_units_in_tick; +- GST_LOG ("framerate %d/%d in VUI", sps->fps_num, sps->fps_den); +- } +- } else if (vps && vps->timing_info_present_flag) { +- sps->fps_num = vps->time_scale; +- sps->fps_den = vps->num_units_in_tick; +- GST_LOG ("framerate %d/%d in VPS", sps->fps_num, sps->fps_den); +- } else { +- GST_LOG ("No VUI, unknown framerate"); +- } +- + sps->valid = TRUE; + + return GST_H265_PARSER_OK; +@@ -2110,11 +2263,8 @@ GstH265ParserResult + gst_h265_parse_pps (GstH265Parser * parser, GstH265NalUnit * nalu, + GstH265PPS * pps) + { ++ guint32 MaxBitDepthY, MaxBitDepthC; + NalReader nr; +- GstH265SPS *sps; +- gint sps_id; +- gint qp_bd_offset; +- guint32 CtbSizeY, MinCbLog2SizeY, CtbLog2SizeY, MaxBitDepthY, MaxBitDepthC; + guint8 i; + + GST_DEBUG ("parsing PPS"); +@@ -2125,24 +2275,7 @@ gst_h265_parse_pps (GstH265Parser * parser, GstH265NalUnit * nalu, + memset (pps, 0, sizeof (*pps)); + + READ_UE_MAX (&nr, pps->id, GST_H265_MAX_PPS_COUNT - 1); +- READ_UE_MAX (&nr, sps_id, GST_H265_MAX_SPS_COUNT - 1); +- +- sps = gst_h265_parser_get_sps (parser, sps_id); +- if (!sps) { +- GST_WARNING ("couldn't find associated sequence parameter set with id: %d", +- sps_id); +- return GST_H265_PARSER_BROKEN_LINK; +- } +- pps->sps = sps; +- qp_bd_offset = 6 * sps->bit_depth_luma_minus8; +- +- MinCbLog2SizeY = sps->log2_min_luma_coding_block_size_minus3 + 3; +- CtbLog2SizeY = MinCbLog2SizeY + sps->log2_diff_max_min_luma_coding_block_size; +- CtbSizeY = 1 << CtbLog2SizeY; +- pps->PicHeightInCtbsY = +- ceil ((gdouble) sps->pic_height_in_luma_samples / (gdouble) CtbSizeY); +- pps->PicWidthInCtbsY = +- ceil ((gdouble) sps->pic_width_in_luma_samples / (gdouble) CtbSizeY); ++ READ_UE_MAX (&nr, pps->sps_id, GST_H265_MAX_SPS_COUNT - 1); + + /* set default values for fields that might not be present in the bitstream + and have valid defaults */ +@@ -2157,15 +2290,21 @@ gst_h265_parse_pps (GstH265Parser * parser, GstH265NalUnit * nalu, + + READ_UE_MAX (&nr, pps->num_ref_idx_l0_default_active_minus1, 14); + READ_UE_MAX (&nr, pps->num_ref_idx_l1_default_active_minus1, 14); +- READ_SE_ALLOWED (&nr, pps->init_qp_minus26, -(26 + qp_bd_offset), 25); ++ ++ /* The value of init_qp_minus26 shall be in the range of ++ * −( 26 + QpBdOffsetY ) to +25, inclusive. ++ * QpBdOffsetY = 6 * bit_depth_luma_minus8 (7-5) ++ * and bit_depth_luma_minus8 shall be in the range of 0 to 8, inclusive. ++ * so the minimum possible value of init_qp_minus26 is -(26 + 6*8) */ ++ READ_SE_ALLOWED (&nr, pps->init_qp_minus26, -(26 + 6 * 8), 25); + + READ_UINT8 (&nr, pps->constrained_intra_pred_flag, 1); + READ_UINT8 (&nr, pps->transform_skip_enabled_flag, 1); + + READ_UINT8 (&nr, pps->cu_qp_delta_enabled_flag, 1); +- if (pps->cu_qp_delta_enabled_flag) +- READ_UE_MAX (&nr, pps->diff_cu_qp_delta_depth, +- sps->log2_diff_max_min_luma_coding_block_size); ++ if (pps->cu_qp_delta_enabled_flag) { ++ READ_UE_MAX (&nr, pps->diff_cu_qp_delta_depth, 6); ++ } + + READ_SE_ALLOWED (&nr, pps->cb_qp_offset, -12, 12); + READ_SE_ALLOWED (&nr, pps->cr_qp_offset, -12, 12); +@@ -2178,6 +2317,26 @@ gst_h265_parse_pps (GstH265Parser * parser, GstH265NalUnit * nalu, + READ_UINT8 (&nr, pps->entropy_coding_sync_enabled_flag, 1); + + if (pps->tiles_enabled_flag) { ++ GstH265SPS *sps; ++ guint32 CtbSizeY, MinCbLog2SizeY, CtbLog2SizeY; ++ ++ sps = gst_h265_parser_get_sps (parser, pps->sps_id); ++ if (!sps) { ++ GST_WARNING ++ ("couldn't find associated sequence parameter set with id: %d", ++ pps->sps_id); ++ return GST_H265_PARSER_BROKEN_LINK; ++ } ++ ++ MinCbLog2SizeY = sps->log2_min_luma_coding_block_size_minus3 + 3; ++ CtbLog2SizeY = ++ MinCbLog2SizeY + sps->log2_diff_max_min_luma_coding_block_size; ++ CtbSizeY = 1 << CtbLog2SizeY; ++ pps->PicHeightInCtbsY = ++ ceil ((gdouble) sps->pic_height_in_luma_samples / (gdouble) CtbSizeY); ++ pps->PicWidthInCtbsY = ++ ceil ((gdouble) sps->pic_width_in_luma_samples / (gdouble) CtbSizeY); ++ + READ_UE_ALLOWED (&nr, + pps->num_tile_columns_minus1, 0, pps->PicWidthInCtbsY - 1); + READ_UE_ALLOWED (&nr, +@@ -2248,10 +2407,6 @@ gst_h265_parse_pps (GstH265Parser * parser, GstH265NalUnit * nalu, + if (pps->scaling_list_data_present_flag) + if (!gst_h265_parser_parse_scaling_lists (&nr, &pps->scaling_list, FALSE)) + goto error; +- if (sps->scaling_list_enabled_flag && !sps->scaling_list_data_present_flag +- && !pps->scaling_list_data_present_flag) +- if (!gst_h265_parser_parse_scaling_lists (&nr, &pps->scaling_list, TRUE)) +- goto error; + + READ_UINT8 (&nr, pps->lists_modification_present_flag, 1); + READ_UE_MAX (&nr, pps->log2_parallel_merge_level_minus2, 4); +@@ -2267,6 +2422,16 @@ gst_h265_parse_pps (GstH265Parser * parser, GstH265NalUnit * nalu, + } + + if (pps->pps_range_extension_flag) { ++ GstH265SPS *sps; ++ ++ sps = gst_h265_parser_get_sps (parser, pps->sps_id); ++ if (!sps) { ++ GST_WARNING ++ ("couldn't find associated sequence parameter set with id: %d", ++ pps->sps_id); ++ return GST_H265_PARSER_BROKEN_LINK; ++ } ++ + if (pps->transform_skip_enabled_flag) + READ_UE (&nr, + pps->pps_extension_params.log2_max_transform_skip_block_size_minus2); +@@ -2311,13 +2476,25 @@ gst_h265_parse_pps (GstH265Parser * parser, GstH265NalUnit * nalu, + } + + if (pps->pps_scc_extension_flag) { ++ GstH265SPS *sps; ++ ++ sps = gst_h265_parser_get_sps (parser, pps->sps_id); ++ if (!sps) { ++ GST_WARNING ++ ("couldn't find associated sequence parameter set with id: %d", ++ pps->sps_id); ++ return GST_H265_PARSER_BROKEN_LINK; ++ } ++ + READ_UINT8 (&nr, + pps->pps_scc_extension_params.pps_curr_pic_ref_enabled_flag, 1); + READ_UINT8 (&nr, +- pps->pps_scc_extension_params. +- residual_adaptive_colour_transform_enabled_flag, 1); +- if (pps->pps_scc_extension_params. +- residual_adaptive_colour_transform_enabled_flag) { ++ pps-> ++ pps_scc_extension_params.residual_adaptive_colour_transform_enabled_flag, ++ 1); ++ if (pps-> ++ pps_scc_extension_params.residual_adaptive_colour_transform_enabled_flag) ++ { + READ_UINT8 (&nr, + pps->pps_scc_extension_params.pps_slice_act_qp_offsets_present_flag, + 1); +@@ -2330,10 +2507,12 @@ gst_h265_parse_pps (GstH265Parser * parser, GstH265NalUnit * nalu, + } + + READ_UINT8 (&nr, +- pps->pps_scc_extension_params. +- pps_palette_predictor_initializers_present_flag, 1); +- if (pps->pps_scc_extension_params. +- pps_palette_predictor_initializers_present_flag) { ++ pps-> ++ pps_scc_extension_params.pps_palette_predictor_initializers_present_flag, ++ 1); ++ if (pps-> ++ pps_scc_extension_params.pps_palette_predictor_initializers_present_flag) ++ { + READ_UE_MAX (&nr, + pps->pps_scc_extension_params.pps_num_palette_predictor_initializer, + sps->sps_scc_extension_params.palette_max_size + +@@ -2364,8 +2543,8 @@ gst_h265_parse_pps (GstH265Parser * parser, GstH265NalUnit * nalu, + comp++) { + guint num_bits; + guint num = +- pps->pps_scc_extension_params. +- pps_num_palette_predictor_initializer; ++ pps-> ++ pps_scc_extension_params.pps_num_palette_predictor_initializer; + + num_bits = (comp == 0 ? + pps->pps_scc_extension_params.luma_bit_depth_entry_minus8 + 8 : +@@ -2414,6 +2593,90 @@ gst_h265_parser_parse_pps (GstH265Parser * parser, + return res; + } + ++static GstH265ParserResult ++gst_h265_parser_fill_sps (GstH265Parser * parser, GstH265SPS * sps) ++{ ++ GstH265VPS *vps; ++ GstH265VUIParams *vui = &sps->vui_params; ++ GstH265ParserResult ret = GST_H265_PARSER_OK; ++ ++ vps = gst_h265_parser_get_vps (parser, sps->vps_id); ++ if (!vps) { ++ GST_DEBUG ("couldn't find associated video parameter set with id: %d", ++ sps->vps_id); ++ return GST_H265_PARSER_BROKEN_LINK; ++ } ++ sps->vps = vps; ++ ++ if (vui && vui->timing_info_present_flag) { ++ /* derive framerate for progressive stream if the pic_struct ++ * syntax element is not present in picture timing SEI messages */ ++ /* Fixme: handle other cases also */ ++ if (vui->parsed && vui->timing_info_present_flag ++ && !vui->field_seq_flag && !vui->frame_field_info_present_flag) { ++ sps->fps_num = vui->time_scale; ++ sps->fps_den = vui->num_units_in_tick; ++ GST_LOG ("framerate %d/%d in VUI", sps->fps_num, sps->fps_den); ++ } ++ } else if (vps && vps->timing_info_present_flag) { ++ sps->fps_num = vps->time_scale; ++ sps->fps_den = vps->num_units_in_tick; ++ GST_LOG ("framerate %d/%d in VPS", sps->fps_num, sps->fps_den); ++ } else { ++ GST_LOG ("No VUI, unknown framerate"); ++ } ++ ++ return ret; ++} ++ ++static GstH265ParserResult ++gst_h265_parser_fill_pps (GstH265Parser * parser, GstH265PPS * pps) ++{ ++ GstH265SPS *sps; ++ gint qp_bd_offset; ++ guint32 CtbSizeY, MinCbLog2SizeY, CtbLog2SizeY; ++ GstH265ParserResult ret = GST_H265_PARSER_OK; ++ ++ sps = gst_h265_parser_get_sps (parser, pps->sps_id); ++ if (!sps) { ++ GST_WARNING ("couldn't find associated sequence parameter set with id: %d", ++ pps->sps_id); ++ return GST_H265_PARSER_BROKEN_LINK; ++ } ++ ++ ret = gst_h265_parser_fill_sps (parser, sps); ++ if (ret != GST_H265_PARSER_OK) { ++ GST_WARNING ("couldn't fill sps id: %d", pps->sps_id); ++ return ret; ++ } ++ ++ pps->sps = sps; ++ qp_bd_offset = 6 * sps->bit_depth_luma_minus8; ++ ++ MinCbLog2SizeY = sps->log2_min_luma_coding_block_size_minus3 + 3; ++ CtbLog2SizeY = MinCbLog2SizeY + sps->log2_diff_max_min_luma_coding_block_size; ++ CtbSizeY = 1 << CtbLog2SizeY; ++ pps->PicHeightInCtbsY = ++ ceil ((gdouble) sps->pic_height_in_luma_samples / (gdouble) CtbSizeY); ++ pps->PicWidthInCtbsY = ++ ceil ((gdouble) sps->pic_width_in_luma_samples / (gdouble) CtbSizeY); ++ ++ if (pps->init_qp_minus26 < -(26 + qp_bd_offset)) ++ return GST_H265_PARSER_BROKEN_LINK; ++ ++ if (sps->scaling_list_enabled_flag && !sps->scaling_list_data_present_flag ++ && !pps->scaling_list_data_present_flag) ++ if (!gst_h265_parser_parse_scaling_lists (NULL, &pps->scaling_list, TRUE)) ++ return GST_H265_PARSER_BROKEN_LINK; ++ ++ if (pps->cu_qp_delta_enabled_flag) ++ if (pps->diff_cu_qp_delta_depth > ++ sps->log2_diff_max_min_luma_coding_block_size) ++ return GST_H265_PARSER_BROKEN_LINK; ++ ++ return ret; ++} ++ + /** + * gst_h265_parser_parse_slice_hdr: + * @parser: a #GstH265Parser +@@ -2439,6 +2702,7 @@ gst_h265_parser_parse_slice_hdr (GstH265Parser * parser, + guint32 UsedByCurrPicLt[16]; + guint32 PicSizeInCtbsY; + gint NumPocTotalCurr = 0; ++ GstH265ParserResult err; + + memset (slice, 0, sizeof (*slice)); + +@@ -2465,6 +2729,12 @@ gst_h265_parser_parse_slice_hdr (GstH265Parser * parser, + return GST_H265_PARSER_BROKEN_LINK; + } + ++ err = gst_h265_parser_fill_pps (parser, pps); ++ if (err != GST_H265_PARSER_OK) { ++ GST_WARNING ("couldn't fill pps id: %d", pps_id); ++ return err; ++ } ++ + slice->pps = pps; + sps = pps->sps; + if (!sps) { +@@ -2495,11 +2765,12 @@ gst_h265_parser_parse_slice_hdr (GstH265Parser * parser, + } + + if (!slice->dependent_slice_segment_flag) { +- for (i = 0; i < pps->num_extra_slice_header_bits; i++) +- nal_reader_skip (&nr, 1); ++ for (i = 0; i < pps->num_extra_slice_header_bits; i++) { ++ if (!nal_reader_skip (&nr, 1)) ++ goto error; ++ } + READ_UE_MAX (&nr, slice->type, 63); + +- + if (pps->output_flag_present_flag) + READ_UINT8 (&nr, slice->pic_output_flag, 1); + if (sps->separate_colour_plane_flag == 1) +@@ -2512,12 +2783,16 @@ gst_h265_parser_parse_slice_hdr (GstH265Parser * parser, + READ_UINT8 (&nr, slice->short_term_ref_pic_set_sps_flag, 1); + if (!slice->short_term_ref_pic_set_sps_flag) { + guint pos = nal_reader_get_pos (&nr); ++ guint epb_pos = nal_reader_get_epb_count (&nr); ++ + if (!gst_h265_parser_parse_short_term_ref_pic_sets + (&slice->short_term_ref_pic_sets, &nr, + sps->num_short_term_ref_pic_sets, sps)) + goto error; + +- slice->short_term_ref_pic_set_size = nal_reader_get_pos (&nr) - pos; ++ slice->short_term_ref_pic_set_size = ++ (nal_reader_get_pos (&nr) - pos) - ++ (8 * (nal_reader_get_epb_count (&nr) - epb_pos)); + } else if (sps->num_short_term_ref_pic_sets > 1) { + const guint n = ceil_log2 (sps->num_short_term_ref_pic_sets); + READ_UINT8 (&nr, slice->short_term_ref_pic_set_idx, n); +@@ -2527,6 +2802,8 @@ gst_h265_parser_parse_slice_hdr (GstH265Parser * parser, + + if (sps->long_term_ref_pics_present_flag) { + guint32 limit; ++ guint pos = nal_reader_get_pos (&nr); ++ guint epb_pos = nal_reader_get_epb_count (&nr); + + if (sps->num_long_term_ref_pics_sps > 0) + READ_UE_MAX (&nr, slice->num_long_term_sps, +@@ -2556,6 +2833,10 @@ gst_h265_parser_parse_slice_hdr (GstH265Parser * parser, + if (slice->delta_poc_msb_present_flag[i]) + READ_UE (&nr, slice->delta_poc_msb_cycle_lt[i]); + } ++ ++ slice->long_term_ref_pic_set_size = ++ (nal_reader_get_pos (&nr) - pos) - ++ (8 * (nal_reader_get_epb_count (&nr) - epb_pos)); + } + if (sps->temporal_mvp_enabled_flag) + READ_UINT8 (&nr, slice->temporal_mvp_enabled_flag, 1); +@@ -2563,7 +2844,8 @@ gst_h265_parser_parse_slice_hdr (GstH265Parser * parser, + + if (sps->sample_adaptive_offset_enabled_flag) { + READ_UINT8 (&nr, slice->sao_luma_flag, 1); +- READ_UINT8 (&nr, slice->sao_chroma_flag, 1); ++ if (sps->chroma_array_type) ++ READ_UINT8 (&nr, slice->sao_chroma_flag, 1); + } + + if (GST_H265_IS_B_SLICE (slice) || GST_H265_IS_P_SLICE (slice)) { +@@ -3376,7 +3658,7 @@ sort_fre_profile_matches (H265ExtensionProfileMatch * a, + + static GstH265Profile + get_extension_profile (H265ExtensionProfile * profiles, guint num, +- GstH265ProfileTierLevel * ptl) ++ const GstH265ProfileTierLevel * ptl) + { + GstH265Profile result = GST_H265_PROFILE_INVALID; + guint i; +@@ -3479,7 +3761,7 @@ get_extension_profile (H265ExtensionProfile * profiles, guint num, + } + + static GstH265Profile +-get_format_range_extension_profile (GstH265ProfileTierLevel * ptl) ++get_format_range_extension_profile (const GstH265ProfileTierLevel * ptl) + { + /* Profile idc: GST_H265_PROFILE_IDC_FORMAT_RANGE_EXTENSION + See Table A.2 for the definition of those formats */ +@@ -3532,7 +3814,7 @@ get_format_range_extension_profile (GstH265ProfileTierLevel * ptl) + } + + static GstH265Profile +-get_3d_profile (GstH265ProfileTierLevel * ptl) ++get_3d_profile (const GstH265ProfileTierLevel * ptl) + { + /* profile idc: GST_H265_PROFILE_IDC_3D_MAIN */ + static H265ExtensionProfile profiles[] = { +@@ -3544,7 +3826,7 @@ get_3d_profile (GstH265ProfileTierLevel * ptl) + } + + static GstH265Profile +-get_multiview_profile (GstH265ProfileTierLevel * ptl) ++get_multiview_profile (const GstH265ProfileTierLevel * ptl) + { + static H265ExtensionProfile profiles[] = { + {GST_H265_PROFILE_MULTIVIEW_MAIN, +@@ -3555,7 +3837,7 @@ get_multiview_profile (GstH265ProfileTierLevel * ptl) + } + + static GstH265Profile +-get_scalable_profile (GstH265ProfileTierLevel * ptl) ++get_scalable_profile (const GstH265ProfileTierLevel * ptl) + { + static H265ExtensionProfile profiles[] = { + {GST_H265_PROFILE_SCALABLE_MAIN, +@@ -3568,7 +3850,7 @@ get_scalable_profile (GstH265ProfileTierLevel * ptl) + } + + static GstH265Profile +-get_high_throughput_profile (GstH265ProfileTierLevel * ptl) ++get_high_throughput_profile (const GstH265ProfileTierLevel * ptl) + { + static H265ExtensionProfile profiles[] = { + {GST_H265_PROFILE_HIGH_THROUGHPUT_444, +@@ -3585,7 +3867,8 @@ get_high_throughput_profile (GstH265ProfileTierLevel * ptl) + } + + static GstH265Profile +-get_screen_content_coding_extensions_profile (GstH265ProfileTierLevel * ptl) ++get_screen_content_coding_extensions_profile (const GstH265ProfileTierLevel * ++ ptl) + { + static H265ExtensionProfile profiles[] = { + {GST_H265_PROFILE_SCREEN_EXTENDED_MAIN, +@@ -3596,21 +3879,14 @@ get_screen_content_coding_extensions_profile (GstH265ProfileTierLevel * ptl) + 1, 1, 1, 1, 0, 0, 0, 0, 0, TRUE, 2}, + {GST_H265_PROFILE_SCREEN_EXTENDED_MAIN_444_10, + 1, 1, 1, 0, 0, 0, 0, 0, 0, TRUE, 3}, +- /* identical to screen-extended-main-444 */ +- {GST_H265_PROFILE_SCREEN_EXTENDED_HIGH_THROUGHPUT_444, +- 1, 1, 1, 1, 0, 0, 0, 0, 0, TRUE, 4}, +- /* identical to screen-extended-main-444-10 */ +- {GST_H265_PROFILE_SCREEN_EXTENDED_HIGH_THROUGHPUT_444_10, +- 1, 1, 1, 0, 0, 0, 0, 0, 0, TRUE, 5}, +- {GST_H265_PROFILE_SCREEN_EXTENDED_HIGH_THROUGHPUT_444_14, +- 1, 0, 0, 0, 0, 0, 0, 0, 0, TRUE, 6}, + }; + + return get_extension_profile (profiles, G_N_ELEMENTS (profiles), ptl); + } + + static GstH265Profile +-get_scalable_format_range_extensions_profile (GstH265ProfileTierLevel * ptl) ++get_scalable_format_range_extensions_profile (const GstH265ProfileTierLevel * ++ ptl) + { + static H265ExtensionProfile profiles[] = { + {GST_H265_PROFILE_SCALABLE_MONOCHROME, +@@ -3626,6 +3902,99 @@ get_scalable_format_range_extensions_profile (GstH265ProfileTierLevel * ptl) + return get_extension_profile (profiles, G_N_ELEMENTS (profiles), ptl); + } + ++static GstH265Profile ++ get_screen_content_coding_extensions_high_throughput_profile ++ (const GstH265ProfileTierLevel * ptl) ++{ ++ static H265ExtensionProfile profiles[] = { ++ {GST_H265_PROFILE_SCREEN_EXTENDED_HIGH_THROUGHPUT_444, ++ 1, 1, 1, 1, 0, 0, 0, 0, 0, TRUE, 0}, ++ {GST_H265_PROFILE_SCREEN_EXTENDED_HIGH_THROUGHPUT_444_10, ++ 1, 1, 1, 0, 0, 0, 0, 0, 0, TRUE, 1}, ++ {GST_H265_PROFILE_SCREEN_EXTENDED_HIGH_THROUGHPUT_444_14, ++ 1, 0, 0, 0, 0, 0, 0, 0, 0, TRUE, 2}, ++ }; ++ ++ return get_extension_profile (profiles, G_N_ELEMENTS (profiles), ptl); ++} ++ ++static inline void ++append_profile (GstH265Profile profiles[GST_H265_PROFILE_MAX], guint * idx, ++ GstH265Profile profile) ++{ ++ if (profile == GST_H265_PROFILE_INVALID) ++ return; ++ profiles[*idx] = profile; ++ (*idx)++; ++} ++ ++/* *INDENT-OFF* */ ++struct h265_profiles_map ++{ ++ GstH265ProfileIDC profile_idc; ++ GstH265Profile (*get_profile) (const GstH265ProfileTierLevel *); ++ GstH265Profile profile; ++}; ++/* *INDENT-ON* */ ++ ++static const struct h265_profiles_map profiles_map[] = { ++ /* keep profile check in asc order */ ++ {GST_H265_PROFILE_IDC_MAIN, NULL, GST_H265_PROFILE_MAIN}, ++ {GST_H265_PROFILE_IDC_MAIN_10, NULL, GST_H265_PROFILE_MAIN_10}, ++ {GST_H265_PROFILE_IDC_MAIN_STILL_PICTURE, NULL, ++ GST_H265_PROFILE_MAIN_STILL_PICTURE}, ++ {GST_H265_PROFILE_IDC_FORMAT_RANGE_EXTENSION, ++ get_format_range_extension_profile, GST_H265_PROFILE_INVALID}, ++ {GST_H265_PROFILE_IDC_HIGH_THROUGHPUT, get_high_throughput_profile, ++ GST_H265_PROFILE_INVALID}, ++ {GST_H265_PROFILE_IDC_MULTIVIEW_MAIN, get_multiview_profile, ++ GST_H265_PROFILE_INVALID}, ++ {GST_H265_PROFILE_IDC_SCALABLE_MAIN, get_scalable_profile, ++ GST_H265_PROFILE_INVALID}, ++ {GST_H265_PROFILE_IDC_3D_MAIN, get_3d_profile, GST_H265_PROFILE_INVALID}, ++ {GST_H265_PROFILE_IDC_SCREEN_CONTENT_CODING, ++ get_screen_content_coding_extensions_profile, ++ GST_H265_PROFILE_INVALID}, ++ {GST_H265_PROFILE_IDC_SCALABLE_FORMAT_RANGE_EXTENSION, ++ get_scalable_format_range_extensions_profile, ++ GST_H265_PROFILE_INVALID}, ++ {GST_H265_PROFILE_IDC_HIGH_THROUGHPUT_SCREEN_CONTENT_CODING_EXTENSION, ++ get_screen_content_coding_extensions_high_throughput_profile, ++ GST_H265_PROFILE_INVALID}, ++}; ++ ++static void ++gst_h265_profile_tier_level_get_profiles (const GstH265ProfileTierLevel * ptl, ++ GstH265Profile profiles[GST_H265_PROFILE_MAX], guint * len) ++{ ++ guint i = 0, j; ++ ++ /* First add profile idc */ ++ for (j = 0; j < G_N_ELEMENTS (profiles_map); j++) { ++ if (ptl->profile_idc == profiles_map[j].profile_idc) { ++ if (profiles_map[j].get_profile) ++ append_profile (profiles, &i, profiles_map[j].get_profile (ptl)); ++ else ++ profiles[i++] = profiles_map[j].profile; ++ break; ++ } ++ } ++ ++ /* Later add compatibility flags */ ++ for (j = 0; j < G_N_ELEMENTS (profiles_map); j++) { ++ if (i > 0 && ptl->profile_idc == profiles_map[j].profile_idc) ++ continue; ++ if (ptl->profile_compatibility_flag[profiles_map[j].profile_idc]) { ++ if (profiles_map[j].get_profile) ++ append_profile (profiles, &i, profiles_map[j].get_profile (ptl)); ++ else ++ profiles[i++] = profiles_map[j].profile; ++ } ++ } ++ ++ *len = i; ++} ++ + /** + * gst_h265_profile_tier_level_get_profile: + * @ptl: a #GstH265ProfileTierLevel +@@ -3636,50 +4005,15 @@ get_scalable_format_range_extensions_profile (GstH265ProfileTierLevel * ptl) + * Since: 1.14 + */ + GstH265Profile +-gst_h265_profile_tier_level_get_profile (GstH265ProfileTierLevel * ptl) ++gst_h265_profile_tier_level_get_profile (const GstH265ProfileTierLevel * ptl) + { +- if (ptl->profile_idc == GST_H265_PROFILE_IDC_MAIN +- || ptl->profile_compatibility_flag[1]) +- return GST_H265_PROFILE_MAIN; +- +- if (ptl->profile_idc == GST_H265_PROFILE_IDC_MAIN_10 +- || ptl->profile_compatibility_flag[2]) +- return GST_H265_PROFILE_MAIN_10; +- +- if (ptl->profile_idc == GST_H265_PROFILE_IDC_MAIN_STILL_PICTURE +- || ptl->profile_compatibility_flag[3]) +- return GST_H265_PROFILE_MAIN_STILL_PICTURE; +- +- if (ptl->profile_idc == GST_H265_PROFILE_IDC_FORMAT_RANGE_EXTENSION +- || ptl->profile_compatibility_flag[4]) +- return get_format_range_extension_profile (ptl); ++ guint len = 0; ++ GstH265Profile profiles[GST_H265_PROFILE_MAX] = { GST_H265_PROFILE_INVALID, }; + +- if (ptl->profile_idc == GST_H265_PROFILE_IDC_HIGH_THROUGHPUT +- || ptl->profile_compatibility_flag[5]) +- return get_high_throughput_profile (ptl); ++ gst_h265_profile_tier_level_get_profiles (ptl, profiles, &len); + +- if (ptl->profile_idc == GST_H265_PROFILE_IDC_MULTIVIEW_MAIN +- || ptl->profile_compatibility_flag[6]) +- return get_multiview_profile (ptl); +- +- if (ptl->profile_idc == GST_H265_PROFILE_IDC_SCALABLE_MAIN +- || ptl->profile_compatibility_flag[7]) +- return get_scalable_profile (ptl); +- +- if (ptl->profile_idc == GST_H265_PROFILE_IDC_3D_MAIN +- || ptl->profile_compatibility_flag[8]) +- return get_3d_profile (ptl); +- +- if (ptl->profile_idc == GST_H265_PROFILE_IDC_SCREEN_CONTENT_CODING +- || ptl->profile_compatibility_flag[9] +- || ptl->profile_idc == +- GST_H265_PROFILE_IDC_HIGH_THROUGHPUT_SCREEN_CONTENT_CODING_EXTENSION +- || ptl->profile_compatibility_flag[11]) +- return get_screen_content_coding_extensions_profile (ptl); +- +- if (ptl->profile_idc == GST_H265_PROFILE_IDC_SCALABLE_FORMAT_RANGE_EXTENSION +- || ptl->profile_compatibility_flag[10]) +- return get_scalable_format_range_extensions_profile (ptl); ++ if (len > 0) ++ return profiles[0]; + + return GST_H265_PROFILE_INVALID; + } +@@ -3975,14 +4309,14 @@ gst_h265_create_sei_memory_internal (guint8 layer_id, guint8 temporal_id_plus1, + /* write payload type bytes */ + while (payload_type_data >= 0xff) { + WRITE_UINT8 (&nw, 0xff, 8); +- payload_type_data -= -0xff; ++ payload_type_data -= 0xff; + } + WRITE_UINT8 (&nw, payload_type_data, 8); + + /* write payload size bytes */ + while (payload_size_data >= 0xff) { + WRITE_UINT8 (&nw, 0xff, 8); +- payload_size_data -= -0xff; ++ payload_size_data -= 0xff; + } + WRITE_UINT8 (&nw, payload_size_data, 8); + +@@ -4293,3 +4627,119 @@ gst_h265_parser_insert_sei_hevc (GstH265Parser * parser, guint8 nal_length_size, + return gst_h265_parser_insert_sei_internal (parser, nal_length_size, TRUE, + au, sei); + } ++ ++/** ++ * gst_h265_get_profile_from_sps: ++ * @sps: a #GstH265SPS ++ * ++ * Return the H265 profile from @sps. ++ * ++ * Returns: a #GstH265Profile ++ * Since: 1.20 ++ */ ++GstH265Profile ++gst_h265_get_profile_from_sps (GstH265SPS * sps) ++{ ++ GstH265Profile profiles[GST_H265_PROFILE_MAX] = { GST_H265_PROFILE_INVALID, }; ++ GstH265ProfileTierLevel tmp_ptl; ++ guint i, len = 0; ++ guint chroma_format_idc, bit_depth_luma, bit_depth_chroma; ++ ++ g_return_val_if_fail (sps != NULL, GST_H265_PROFILE_INVALID); ++ ++ tmp_ptl = sps->profile_tier_level; ++ chroma_format_idc = sps->chroma_format_idc; ++ bit_depth_luma = sps->bit_depth_luma_minus8 + 8; ++ bit_depth_chroma = sps->bit_depth_chroma_minus8 + 8; ++ ++ gst_h265_profile_tier_level_get_profiles (&sps->profile_tier_level, profiles, ++ &len); ++ ++ for (i = 0; i < len && i < G_N_ELEMENTS (profiles); i++) { ++ switch (profiles[i]) { ++ case GST_H265_PROFILE_INVALID: ++ break; ++ case GST_H265_PROFILE_MAIN: ++ case GST_H265_PROFILE_MAIN_STILL_PICTURE: ++ /* A.3.2 or A.3.5 */ ++ if (chroma_format_idc == 1 ++ && bit_depth_luma == 8 && bit_depth_chroma == 8) ++ return profiles[i]; ++ break; ++ case GST_H265_PROFILE_MAIN_10: ++ /* A.3.3 */ ++ if (chroma_format_idc == 1 ++ && bit_depth_luma >= 8 && bit_depth_luma <= 10 ++ && bit_depth_chroma >= 8 && bit_depth_chroma <= 10) ++ return profiles[i]; ++ break; ++ default: ++ return profiles[i]; ++ } ++ } ++ ++ /* Invalid profile: */ ++ /* Set the conformance indicators based on chroma_format_idc / bit_depth */ ++ switch (chroma_format_idc) { ++ case 0: ++ tmp_ptl.max_monochrome_constraint_flag = 1; ++ tmp_ptl.max_420chroma_constraint_flag = 1; ++ tmp_ptl.max_422chroma_constraint_flag = 1; ++ break; ++ ++ case 1: ++ tmp_ptl.max_monochrome_constraint_flag = 0; ++ tmp_ptl.max_420chroma_constraint_flag = 1; ++ tmp_ptl.max_422chroma_constraint_flag = 1; ++ break; ++ ++ case 2: ++ tmp_ptl.max_monochrome_constraint_flag = 0; ++ tmp_ptl.max_420chroma_constraint_flag = 0; ++ tmp_ptl.max_422chroma_constraint_flag = 1; ++ break; ++ ++ case 3: ++ tmp_ptl.max_monochrome_constraint_flag = 0; ++ tmp_ptl.max_420chroma_constraint_flag = 0; ++ tmp_ptl.max_422chroma_constraint_flag = 0; ++ break; ++ ++ default: ++ g_assert_not_reached (); ++ break; ++ } ++ ++ tmp_ptl.max_8bit_constraint_flag = 1; ++ tmp_ptl.max_10bit_constraint_flag = 1; ++ tmp_ptl.max_12bit_constraint_flag = 1; ++ tmp_ptl.max_14bit_constraint_flag = 1; ++ ++ if (bit_depth_luma > 8 || bit_depth_chroma > 8) ++ tmp_ptl.max_8bit_constraint_flag = 0; ++ ++ if (bit_depth_luma > 10 || bit_depth_chroma > 10) ++ tmp_ptl.max_10bit_constraint_flag = 0; ++ ++ if (bit_depth_luma > 12 || bit_depth_chroma > 12) ++ tmp_ptl.max_12bit_constraint_flag = 0; ++ ++ if (tmp_ptl.profile_idc == GST_H265_PROFILE_IDC_HIGH_THROUGHPUT ++ || tmp_ptl.profile_idc == GST_H265_PROFILE_IDC_SCREEN_CONTENT_CODING ++ || tmp_ptl.profile_idc == ++ GST_H265_PROFILE_IDC_SCALABLE_FORMAT_RANGE_EXTENSION ++ || tmp_ptl.profile_idc == ++ GST_H265_PROFILE_IDC_HIGH_THROUGHPUT_SCREEN_CONTENT_CODING_EXTENSION ++ || tmp_ptl.profile_compatibility_flag[5] ++ || tmp_ptl.profile_compatibility_flag[9] ++ || tmp_ptl.profile_compatibility_flag[10] ++ || tmp_ptl.profile_compatibility_flag[11]) { ++ if (bit_depth_luma > 14 || bit_depth_chroma > 14) ++ tmp_ptl.max_14bit_constraint_flag = 0; ++ } else { ++ tmp_ptl.max_14bit_constraint_flag = 0; ++ } ++ ++ /* first profile of the synthetic ptl */ ++ return gst_h265_profile_tier_level_get_profile (&tmp_ptl); ++} +diff --git a/gst-libs/gst/codecparsers/gsth265parser.h b/gst-libs/gst/codecparsers/gsth265parser.h +index 073123d7c..679d70c6d 100644 +--- a/gst-libs/gst/codecparsers/gsth265parser.h ++++ b/gst-libs/gst/codecparsers/gsth265parser.h +@@ -788,6 +788,7 @@ struct _GstH265ShortTermRefPicSet + + /** + * GstH265VUIParams: ++ * @parsed: %TRUE indicate that VUI parameters have been parsed (Since: 1.22) + * @aspect_ratio_info_present_flag: %TRUE specifies that aspect_ratio_idc is present. + * %FALSE specifies that aspect_ratio_idc is not present + * @aspect_ratio_idc specifies the value of the sample aspect ratio of the luma samples +@@ -856,6 +857,14 @@ struct _GstH265ShortTermRefPicSet + */ + struct _GstH265VUIParams + { ++ /** ++ * _GstH265VUIParams.parsed: ++ * ++ * %TRUE indicate that VUI parameters have been parsed. ++ * ++ * Since: 1.22 ++ */ ++ gboolean parsed; + guint8 aspect_ratio_info_present_flag; + guint8 aspect_ratio_idc; + /* if aspect_ratio_idc == 255 */ +@@ -1102,6 +1111,14 @@ struct _GstH265SPS + { + guint8 id; + ++ /** ++ * _GstH265SPS.vps_id: ++ * ++ * The ID of the VPS. This is used to store the ID until the VPS is ++ * parsed in case its placed after the SPS. ++ * Since: 1.22 ++ */ ++ guint8 vps_id; + GstH265VPS *vps; + + guint8 max_sub_layers_minus1; +@@ -1166,7 +1183,7 @@ struct _GstH265SPS + guint8 strong_intra_smoothing_enabled_flag; + guint8 vui_parameters_present_flag; + +- /* if vui_parameters_present_flat */ ++ /* if vui_parameters_present_flag */ + GstH265VUIParams vui_params; + + guint8 sps_extension_flag; +@@ -1179,7 +1196,12 @@ struct _GstH265SPS + guint8 sps_extension_4bits; + + /* if sps_range_extension_flag */ +- GstH265SPSExtensionParams sps_extnsion_params; ++ /** ++ * _GstH265SPS.sps_extension_params: ++ * ++ * Since: 1.22 ++ */ ++ GstH265SPSExtensionParams sps_extension_params; + /* if sps_scc_extension_flag */ + GstH265SPSSccExtensionParams sps_scc_extension_params; + +@@ -1201,6 +1223,15 @@ struct _GstH265PPS + { + guint id; + ++ /** ++ * _GstH265PPS.sps_id: ++ * ++ * The ID of the SPS. This is used to store the ID until the SPS is ++ * parsed in case its placed after the PPS. ++ * ++ * Since: 1.22 ++ */ ++ guint sps_id; + GstH265SPS *sps; + + guint8 dependent_slice_segments_enabled_flag; +@@ -1384,6 +1415,9 @@ struct _GstH265PredWeightTable + * in this slice_header\() + * @short_term_ref_pic_set_size: the calculated size of short_term_ref_pic_set\() + * in bits. (Since: 1.18) ++ * @long_term_ref_pic_set_size: the calculated size of the branch ++ * `if( long_term_ref_pics_present_flag )` `inside slice_segment_header()` syntax ++ * in bits. (Since: 1.22) + */ + struct _GstH265SliceHdr + { +@@ -1460,8 +1494,19 @@ struct _GstH265SliceHdr + /* Number of emulation prevention bytes (EPB) in this slice_header() */ + guint n_emulation_prevention_bytes; + +- /* Size of short_term_ref_pic_set() in bits */ ++ /* Size of short_term_ref_pic_set() after emulation preventation bytes are ++ * removed, in bits */ + guint short_term_ref_pic_set_size; ++ ++ /** ++ * _GstH265SliceHdr.long_term_ref_pic_set_size: ++ * ++ * The calculated size of the branch `if( long_term_ref_pics_present_flag )` ++ * inside `slice_segment_header()` syntax in bits. ++ * ++ * Since: 1.22 ++ */ ++ guint long_term_ref_pic_set_size; + }; + + struct _GstH265PicTiming +@@ -1652,6 +1697,15 @@ GstH265ParserResult gst_h265_parser_identify_nalu_hevc (GstH265Parser * parser, + guint8 nal_length_size, + GstH265NalUnit * nalu); + ++GST_CODEC_PARSERS_API ++GstH265ParserResult gst_h265_parser_identify_and_split_nalu_hevc (GstH265Parser * parser, ++ const guint8 * data, ++ guint offset, ++ gsize size, ++ guint8 nal_length_size, ++ GArray * nalus, ++ gsize * consumed); ++ + GST_CODEC_PARSERS_API + GstH265ParserResult gst_h265_parser_parse_nal (GstH265Parser * parser, + GstH265NalUnit * nalu); +@@ -1777,7 +1831,7 @@ void gst_h265_quant_matrix_8x8_get_raster_from_uprightdiagonal (guint8 out_qu + gst_h265_quant_matrix_8x8_get_raster_from_uprightdiagonal + + GST_CODEC_PARSERS_API +-GstH265Profile gst_h265_profile_tier_level_get_profile (GstH265ProfileTierLevel * ptl); ++GstH265Profile gst_h265_profile_tier_level_get_profile (const GstH265ProfileTierLevel * ptl); + + GST_CODEC_PARSERS_API + const gchar * gst_h265_profile_to_string (GstH265Profile profile); +@@ -1808,5 +1862,8 @@ GstBuffer * gst_h265_parser_insert_sei_hevc (GstH265Parser * parser, + GstBuffer * au, + GstMemory * sei); + ++GST_CODEC_PARSERS_API ++GstH265Profile gst_h265_get_profile_from_sps (GstH265SPS * sps); ++ + G_END_DECLS + #endif +diff --git a/gst-libs/gst/codecparsers/gstjpeg2000sampling.c b/gst-libs/gst/codecparsers/gstjpeg2000sampling.c +index 526a7ecce..c1e76f0fc 100644 +--- a/gst-libs/gst/codecparsers/gstjpeg2000sampling.c ++++ b/gst-libs/gst/codecparsers/gstjpeg2000sampling.c +@@ -42,6 +42,7 @@ static const gchar *gst_jpeg2000_sampling_strings[] = { + "YCbCr-4:1:0", + "GRAYSCALE", + "YCbCrA-4:4:4:4", ++ "YCbCr-4:1:1", + }; + + /* convert string to GstJPEG2000Sampling enum */ +@@ -86,8 +87,9 @@ gst_jpeg2000_sampling_is_yuv (GstJPEG2000Sampling sampling) + return sampling == GST_JPEG2000_SAMPLING_YBRA4444_EXT || + sampling == GST_JPEG2000_SAMPLING_YBR444 || + sampling == GST_JPEG2000_SAMPLING_YBR422 || +- sampling == GST_JPEG2000_SAMPLING_YBR420 +- || sampling == GST_JPEG2000_SAMPLING_YBR410; ++ sampling == GST_JPEG2000_SAMPLING_YBR420 || ++ sampling == GST_JPEG2000_SAMPLING_YBR411 || ++ sampling == GST_JPEG2000_SAMPLING_YBR410; + } + + /* check if @sampling is in GRAYSCALE color space */ +diff --git a/gst-libs/gst/codecparsers/gstjpeg2000sampling.h b/gst-libs/gst/codecparsers/gstjpeg2000sampling.h +index 5fa081000..4b5bea5a3 100644 +--- a/gst-libs/gst/codecparsers/gstjpeg2000sampling.h ++++ b/gst-libs/gst/codecparsers/gstjpeg2000sampling.h +@@ -30,17 +30,26 @@ + * Note: sampling extensions that are not listed in the RFC are signified by an _EXT at the end of the enum + * + * @GST_JPEG2000_SAMPLING_NONE: no sampling +- * @GST_JPEG2000_SAMPLING_RGB: standard Red, Green, Blue color space. +- * @GST_JPEG2000_SAMPLING_BGR: standard Blue, Green, Red color space. +- * @GST_JPEG2000_SAMPLING_RGBA: standard Red, Green, Blue, Alpha color space. +- * @GST_JPEG2000_SAMPLING_BGRA: standard Blue, Green, Red, Alpha color space. +- * @GST_JPEG2000_SAMPLING_YCbCr-4:4:4: standard YCbCr color space; no subsampling. +- * @GST_JPEG2000_SAMPLING_YCbCr-4:2:2: standard YCbCr color space; Cb and Cr are subsampled horizontally by 1/2. +- * @GST_JPEG2000_SAMPLING_YCbCr-4:2:0: standard YCbCr color space; Cb and Cr are subsampled horizontally and vertically by 1/2. +- * @GST_JPEG2000_SAMPLING_YCbCr-4:1:1: standard YCbCr color space; Cb and Cr are subsampled vertically by 1/4. ++ * @GST_JPEG2000_SAMPLING_RGB: standard Red, Green, Blue color space. ++ * @GST_JPEG2000_SAMPLING_BGR: standard Blue, Green, Red color space. ++ * @GST_JPEG2000_SAMPLING_RGBA: standard Red, Green, Blue, Alpha color space. ++ * @GST_JPEG2000_SAMPLING_BGRA: standard Blue, Green, Red, Alpha color space. ++ * @GST_JPEG2000_SAMPLING_YBR444: standard YCbCr color space; no subsampling. ++ * @GST_JPEG2000_SAMPLING_YBR422: standard YCbCr color space; Cb and Cr are subsampled horizontally by 1/2. ++ * @GST_JPEG2000_SAMPLING_YBR420: standard YCbCr color space; Cb and Cr are subsampled horizontally and vertically by 1/2. ++ * @GST_JPEG2000_SAMPLING_YBR411: standard YCbCr color space; Cb and Cr are subsampled vertically by 1/4 (Since: 1.20). ++ * @GST_JPEG2000_SAMPLING_YBR410: standard YCbCr color space; Cb and Cr are subsampled vertically by 1/4 alternating the Cb and Cr component. + * @GST_JPEG2000_SAMPLING_GRAYSCALE: basically, a single component image of just multilevels of grey. + * @GST_JPEG2000_SAMPLING_YBRA4444_EXT: standard YCbCr color space, alpha channel, no subsampling, + */ ++ ++/** ++ * GST_JPEG2000_SAMPLING_YBR411: ++ * ++ * standard YCbCr color space; Cb and Cr are subsampled vertically by 1/4 ++ * ++ * Since: 1.20 ++ */ + typedef enum + { + GST_JPEG2000_SAMPLING_NONE, +@@ -53,11 +62,12 @@ typedef enum + GST_JPEG2000_SAMPLING_YBR420, + GST_JPEG2000_SAMPLING_YBR410, + GST_JPEG2000_SAMPLING_GRAYSCALE, +- GST_JPEG2000_SAMPLING_YBRA4444_EXT ++ GST_JPEG2000_SAMPLING_YBRA4444_EXT, ++ GST_JPEG2000_SAMPLING_YBR411 + } GstJPEG2000Sampling; + + /* GST_JPEG2000_SAMPLING_LIST: sampling strings in list form, for use in caps */ +-#define GST_JPEG2000_SAMPLING_LIST "sampling = (string) {\"RGB\", \"BGR\", \"RGBA\", \"BGRA\", \"YCbCr-4:4:4\", \"YCbCr-4:2:2\", \"YCbCr-4:2:0\", \"YCbCr-4:1:1\", \"GRAYSCALE\" , \"YCbCrA-4:4:4:4\"}" ++#define GST_JPEG2000_SAMPLING_LIST "sampling = (string) {\"RGB\", \"BGR\", \"RGBA\", \"BGRA\", \"YCbCr-4:4:4\", \"YCbCr-4:2:2\", \"YCbCr-4:2:0\", \"YCbCr-4:1:1\", \"YCbCr-4:1:0\", \"GRAYSCALE\" , \"YCbCrA-4:4:4:4\"}" + + GST_CODEC_PARSERS_API + const gchar *gst_jpeg2000_sampling_to_string (GstJPEG2000Sampling sampling); +diff --git a/gst-libs/gst/codecparsers/gstvp8parser.c b/gst-libs/gst/codecparsers/gstvp8parser.c +index 717647661..2de34f0b9 100644 +--- a/gst-libs/gst/codecparsers/gstvp8parser.c ++++ b/gst-libs/gst/codecparsers/gstvp8parser.c +@@ -535,6 +535,8 @@ gst_vp8_parser_parse_frame_header (GstVp8Parser * parser, + g_return_val_if_fail (frame_hdr != NULL, GST_VP8_PARSER_ERROR); + g_return_val_if_fail (parser != NULL, GST_VP8_PARSER_ERROR); + ++ memset (frame_hdr, 0, sizeof (GstVp8FrameHdr)); ++ + /* Uncompressed Data Chunk */ + gst_byte_reader_init (&br, data, size); + +diff --git a/gst-libs/gst/codecparsers/gstvp9parser.c b/gst-libs/gst/codecparsers/gstvp9parser.c +index 33e1f0ebd..cb82a0816 100644 +--- a/gst-libs/gst/codecparsers/gstvp9parser.c ++++ b/gst-libs/gst/codecparsers/gstvp9parser.c +@@ -646,6 +646,7 @@ gst_vp9_parser_new (void) + return NULL; + + parser->priv = priv; ++ parser->subsampling_x = parser->subsampling_y = -1; + + return parser; + } +diff --git a/gst-libs/gst/codecparsers/gstvp9parser.h b/gst-libs/gst/codecparsers/gstvp9parser.h +index 20417d9c6..3a0d67c08 100644 +--- a/gst-libs/gst/codecparsers/gstvp9parser.h ++++ b/gst-libs/gst/codecparsers/gstvp9parser.h +@@ -48,7 +48,6 @@ G_BEGIN_DECLS + + #define GST_VP9_FRAME_CONTEXTS_LOG2 2 + +-#define GST_VP9_MAX_LOOP_FILTER 63 + #define GST_VP9_MAX_SHARPNESS 7 + + #define GST_VP9_MAX_REF_LF_DELTAS 4 +@@ -461,7 +460,7 @@ struct _GstVp9SuperframeInfo { + */ + struct _GstVp9Segmentation + { +- guint8 filter_level[4][2]; ++ guint8 filter_level[GST_VP9_MAX_REF_LF_DELTAS][GST_VP9_MAX_MODE_LF_DELTAS]; + gint16 luma_ac_quant_scale; + gint16 luma_dc_quant_scale; + gint16 chroma_ac_quant_scale; +diff --git a/gst-libs/gst/codecparsers/meson.build b/gst-libs/gst/codecparsers/meson.build +index 7b76f4001..7d621f6a9 100644 +--- a/gst-libs/gst/codecparsers/meson.build ++++ b/gst-libs/gst/codecparsers/meson.build +@@ -15,7 +15,9 @@ codecparser_sources = files([ + 'dboolhuff.c', + 'vp8utils.c', + 'gstmpegvideometa.c', +- 'gstav1parser.c' ++ 'gstav1parser.c', ++ 'gsth264bitwriter.c', ++ 'gsth265bitwriter.c', + ]) + codecparser_headers = [ + 'codecparsers-prelude.h', +@@ -30,13 +32,14 @@ codecparser_headers = [ + 'gstjpegparser.h', + 'gstmpegvideometa.h', + 'gstvp9parser.h', +- 'gstav1parser.h' ++ 'gstav1parser.h', + ] + install_headers(codecparser_headers, subdir : 'gstreamer-1.0/gst/codecparsers') + + cp_args = [ + '-DGST_USE_UNSTABLE_API', + '-DBUILDING_GST_CODEC_PARSERS', ++ '-DG_LOG_DOMAIN="GStreamer-CodecParsers"', + '-Dvp8_norm=gst_codecparsers_vp8_norm', + '-Dvp8dx_start_decode=gst_codecparsers_vp8dx_start_decode', + '-Dvp8dx_bool_decoder_fill=gst_codecparsers_vp8dx_bool_decoder_fill', +diff --git a/gst-libs/gst/codecparsers/nalutils.c b/gst-libs/gst/codecparsers/nalutils.c +index cd63d8ae2..af802d7d8 100644 +--- a/gst-libs/gst/codecparsers/nalutils.c ++++ b/gst-libs/gst/codecparsers/nalutils.c +@@ -345,29 +345,15 @@ nal_writer_do_rbsp_trailing_bits (NalWriter * nw) + return TRUE; + } + +-GstMemory * +-nal_writer_reset_and_get_memory (NalWriter * nw) ++static gpointer ++nal_writer_create_nal_data (NalWriter * nw, guint32 * ret_size) + { + GstBitWriter bw; + gint i; + guint8 *src, *dst; + gsize size; +- GstMemory *ret = NULL; + gpointer data; + +- g_return_val_if_fail (nw != NULL, NULL); +- +- if ((GST_BIT_WRITER_BIT_SIZE (&nw->bw) >> 3) == 0) { +- GST_WARNING ("No written byte"); +- goto done; +- } +- +- if ((GST_BIT_WRITER_BIT_SIZE (&nw->bw) & 0x7) != 0) { +- GST_WARNING ("Written stream is not byte aligned"); +- if (!nal_writer_do_rbsp_trailing_bits (nw)) +- goto done; +- } +- + /* scan to put emulation_prevention_three_byte */ + size = GST_BIT_WRITER_BIT_SIZE (&nw->bw) >> 3; + src = GST_BIT_WRITER_DATA (&nw->bw); +@@ -388,44 +374,104 @@ nal_writer_reset_and_get_memory (NalWriter * nw) + gst_bit_writer_put_bits_uint8 (&bw, src[i], 8); + } + +- size = bw.bit_size >> 3; ++ *ret_size = bw.bit_size >> 3; + data = gst_bit_writer_reset_and_get_data (&bw); +- ret = gst_memory_new_wrapped (0, data, size, 0, size, data, g_free); + + if (nw->packetized) { +- GstMapInfo info; +- +- gst_memory_map (ret, &info, GST_MAP_WRITE); +- +- size = info.size - nw->nal_prefix_size; ++ size = *ret_size - nw->nal_prefix_size; + + switch (nw->nal_prefix_size) { + case 1: +- GST_WRITE_UINT8 (info.data, size); ++ GST_WRITE_UINT8 (data, size); + break; + case 2: +- GST_WRITE_UINT16_BE (info.data, size); ++ GST_WRITE_UINT16_BE (data, size); + break; + case 3: +- GST_WRITE_UINT24_BE (info.data, size); ++ GST_WRITE_UINT24_BE (data, size); + break; + case 4: +- GST_WRITE_UINT32_BE (info.data, size); ++ GST_WRITE_UINT32_BE (data, size); + break; + default: + g_assert_not_reached (); + break; + } ++ } + +- gst_memory_unmap (ret, &info); ++ return data; ++} ++ ++GstMemory * ++nal_writer_reset_and_get_memory (NalWriter * nw) ++{ ++ guint32 size = 0; ++ GstMemory *ret = NULL; ++ gpointer data; ++ ++ g_return_val_if_fail (nw != NULL, NULL); ++ ++ if ((GST_BIT_WRITER_BIT_SIZE (&nw->bw) >> 3) == 0) { ++ GST_WARNING ("No written byte"); ++ goto done; + } + ++ if ((GST_BIT_WRITER_BIT_SIZE (&nw->bw) & 0x7) != 0) { ++ GST_WARNING ("Written stream is not byte aligned"); ++ if (!nal_writer_do_rbsp_trailing_bits (nw)) ++ goto done; ++ } ++ ++ data = nal_writer_create_nal_data (nw, &size); ++ if (!data) { ++ GST_WARNING ("Failed to create nal data"); ++ goto done; ++ } ++ ++ ret = gst_memory_new_wrapped (0, data, size, 0, size, data, g_free); ++ + done: + gst_bit_writer_reset (&nw->bw); + + return ret; + } + ++guint8 * ++nal_writer_reset_and_get_data (NalWriter * nw, guint32 * ret_size) ++{ ++ guint32 size = 0; ++ guint8 *data = NULL; ++ ++ g_return_val_if_fail (nw != NULL, NULL); ++ g_return_val_if_fail (ret_size != NULL, NULL); ++ ++ *ret_size = 0; ++ ++ if ((GST_BIT_WRITER_BIT_SIZE (&nw->bw) >> 3) == 0) { ++ GST_WARNING ("No written byte"); ++ goto done; ++ } ++ ++ if ((GST_BIT_WRITER_BIT_SIZE (&nw->bw) & 0x7) != 0) { ++ GST_WARNING ("Written stream is not byte aligned"); ++ if (!nal_writer_do_rbsp_trailing_bits (nw)) ++ goto done; ++ } ++ ++ data = nal_writer_create_nal_data (nw, &size); ++ if (!data) { ++ GST_WARNING ("Failed to create nal data"); ++ goto done; ++ } ++ ++ *ret_size = size; ++ ++done: ++ gst_bit_writer_reset (&nw->bw); ++ ++ return data; ++} ++ + gboolean + nal_writer_put_bits_uint8 (NalWriter * nw, guint8 value, guint nbits) + { +diff --git a/gst-libs/gst/codecparsers/nalutils.h b/gst-libs/gst/codecparsers/nalutils.h +index fdfc57556..d8c0fbb7b 100644 +--- a/gst-libs/gst/codecparsers/nalutils.h ++++ b/gst-libs/gst/codecparsers/nalutils.h +@@ -208,6 +208,9 @@ gboolean nal_writer_do_rbsp_trailing_bits (NalWriter * nw); + G_GNUC_INTERNAL + GstMemory * nal_writer_reset_and_get_memory (NalWriter * nw); + ++G_GNUC_INTERNAL ++guint8 * nal_writer_reset_and_get_data (NalWriter * nw, guint32 * ret_size); ++ + G_GNUC_INTERNAL + gboolean nal_writer_put_bits_uint8 (NalWriter * nw, guint8 value, guint nbits); + +diff --git a/gst-libs/gst/webrtc/datachannel.c b/gst-libs/gst/webrtc/datachannel.c +index ee0be6030..38cd3ce03 100644 +--- a/gst-libs/gst/webrtc/datachannel.c ++++ b/gst-libs/gst/webrtc/datachannel.c +@@ -22,6 +22,8 @@ + * SECTION:gstwebrtc-datachannel + * @short_description: RTCDataChannel object + * @title: GstWebRTCDataChannel ++ * @symbols: ++ * - GstWebRTCDataChannel + * + * + * +@@ -33,6 +35,7 @@ + #endif + + #include "datachannel.h" ++#include "webrtc-priv.h" + + #define GST_CAT_DEFAULT gst_webrtc_data_channel_debug + GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); +@@ -178,6 +181,8 @@ gst_webrtc_data_channel_finalize (GObject * object) + g_free (channel->protocol); + channel->protocol = NULL; + ++ g_mutex_clear (&channel->lock); ++ + G_OBJECT_CLASS (parent_class)->finalize (object); + } + +@@ -258,7 +263,7 @@ gst_webrtc_data_channel_class_init (GstWebRTCDataChannelClass * klass) + "Ready State", + "The Ready state of this data channel", + GST_TYPE_WEBRTC_DATA_CHANNEL_STATE, +- GST_WEBRTC_DATA_CHANNEL_STATE_NEW, ++ GST_WEBRTC_DATA_CHANNEL_STATE_CONNECTING, + G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property (gobject_class, +@@ -334,7 +339,7 @@ gst_webrtc_data_channel_class_init (GstWebRTCDataChannelClass * klass) + */ + gst_webrtc_data_channel_signals[SIGNAL_SEND_DATA] = + g_signal_new_class_handler ("send-data", G_TYPE_FROM_CLASS (klass), +- G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, ++ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION | G_SIGNAL_DEPRECATED, + G_CALLBACK (gst_webrtc_data_channel_send_data), NULL, NULL, NULL, + G_TYPE_NONE, 1, G_TYPE_BYTES); + +@@ -519,7 +524,31 @@ gst_webrtc_data_channel_send_data (GstWebRTCDataChannel * channel, + g_return_if_fail (GST_IS_WEBRTC_DATA_CHANNEL (channel)); + + klass = GST_WEBRTC_DATA_CHANNEL_GET_CLASS (channel); +- klass->send_data (channel, data); ++ (void) klass->send_data (channel, data, NULL); ++} ++ ++/** ++ * gst_webrtc_data_channel_send_data_full: ++ * @channel: a #GstWebRTCDataChannel ++ * @data: (nullable): a #GBytes or %NULL ++ * @error: (nullable): location to a #GError or %NULL ++ * ++ * Send @data as a data message over @channel. ++ * ++ * Returns: TRUE if @channel is open and data could be queued ++ * ++ * Since: 1.22 ++ */ ++gboolean ++gst_webrtc_data_channel_send_data_full (GstWebRTCDataChannel * channel, ++ GBytes * data, GError ** error) ++{ ++ GstWebRTCDataChannelClass *klass; ++ ++ g_return_val_if_fail (GST_IS_WEBRTC_DATA_CHANNEL (channel), FALSE); ++ ++ klass = GST_WEBRTC_DATA_CHANNEL_GET_CLASS (channel); ++ return klass->send_data (channel, data, error); + } + + /** +@@ -538,7 +567,30 @@ gst_webrtc_data_channel_send_string (GstWebRTCDataChannel * channel, + g_return_if_fail (GST_IS_WEBRTC_DATA_CHANNEL (channel)); + + klass = GST_WEBRTC_DATA_CHANNEL_GET_CLASS (channel); +- klass->send_string (channel, str); ++ (void) klass->send_string (channel, str, NULL); ++} ++ ++/** ++ * gst_webrtc_data_channel_send_string_full: ++ * @channel: a #GstWebRTCDataChannel ++ * @str: (nullable): a string or %NULL ++ * ++ * Send @str as a string message over @channel. ++ * ++ * Returns: TRUE if @channel is open and data could be queued ++ * ++ * Since: 1.22 ++ */ ++gboolean ++gst_webrtc_data_channel_send_string_full (GstWebRTCDataChannel * channel, ++ const gchar * str, GError ** error) ++{ ++ GstWebRTCDataChannelClass *klass; ++ ++ g_return_val_if_fail (GST_IS_WEBRTC_DATA_CHANNEL (channel), FALSE); ++ ++ klass = GST_WEBRTC_DATA_CHANNEL_GET_CLASS (channel); ++ return klass->send_string (channel, str, error); + } + + /** +diff --git a/gst-libs/gst/webrtc/datachannel.h b/gst-libs/gst/webrtc/datachannel.h +index 79b536f5b..408872aec 100644 +--- a/gst-libs/gst/webrtc/datachannel.h ++++ b/gst-libs/gst/webrtc/datachannel.h +@@ -36,77 +36,22 @@ GType gst_webrtc_data_channel_get_type(void); + #define GST_IS_WEBRTC_DATA_CHANNEL_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_WEBRTC_DATA_CHANNEL)) + #define GST_WEBRTC_DATA_CHANNEL_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_WEBRTC_DATA_CHANNEL,GstWebRTCDataChannelClass)) + +-#define GST_WEBRTC_DATA_CHANNEL_LOCK(channel) g_mutex_lock(&((GstWebRTCDataChannel *)(channel))->lock) +-#define GST_WEBRTC_DATA_CHANNEL_UNLOCK(channel) g_mutex_unlock(&((GstWebRTCDataChannel *)(channel))->lock) +- +-/** +- * GstWebRTCDataChannel: +- * +- * Since: 1.18 +- */ +-struct _GstWebRTCDataChannel +-{ +- GObject parent; +- +- GMutex lock; +- +- gchar *label; +- gboolean ordered; +- guint max_packet_lifetime; +- guint max_retransmits; +- gchar *protocol; +- gboolean negotiated; +- gint id; +- GstWebRTCPriorityType priority; +- GstWebRTCDataChannelState ready_state; +- guint64 buffered_amount; +- guint64 buffered_amount_low_threshold; +- +- gpointer _padding[GST_PADDING]; +-}; +- +-/** +- * GstWebRTCDataChannelClass: +- * +- * Since: 1.18 +- */ +-struct _GstWebRTCDataChannelClass +-{ +- GObjectClass parent_class; +- +- void (*send_data) (GstWebRTCDataChannel * channel, GBytes *data); +- void (*send_string) (GstWebRTCDataChannel * channel, const gchar *str); +- void (*close) (GstWebRTCDataChannel * channel); +- +- gpointer _padding[GST_PADDING]; +-}; +- + GST_WEBRTC_API +-void gst_webrtc_data_channel_on_open (GstWebRTCDataChannel * channel); ++gboolean gst_webrtc_data_channel_send_data_full (GstWebRTCDataChannel * channel, GBytes * data, GError ** error); + + GST_WEBRTC_API +-void gst_webrtc_data_channel_on_close (GstWebRTCDataChannel * channel); ++gboolean gst_webrtc_data_channel_send_string_full (GstWebRTCDataChannel * channel, const gchar * str, GError ** error); + + GST_WEBRTC_API +-void gst_webrtc_data_channel_on_error (GstWebRTCDataChannel * channel, GError * error); +- +-GST_WEBRTC_API +-void gst_webrtc_data_channel_on_message_data (GstWebRTCDataChannel * channel, GBytes * data); +- +-GST_WEBRTC_API +-void gst_webrtc_data_channel_on_message_string (GstWebRTCDataChannel * channel, const gchar * str); +- +-GST_WEBRTC_API +-void gst_webrtc_data_channel_on_buffered_amount_low (GstWebRTCDataChannel * channel); ++void gst_webrtc_data_channel_close (GstWebRTCDataChannel * channel); + +-GST_WEBRTC_API ++#ifndef GST_REMOVE_DEPRECATED ++GST_WEBRTC_DEPRECATED_FOR(gst_webrtc_data_channel_send_data_full) + void gst_webrtc_data_channel_send_data (GstWebRTCDataChannel * channel, GBytes * data); + +-GST_WEBRTC_API ++GST_WEBRTC_DEPRECATED_FOR(gst_webrtc_data_channel_send_string_full) + void gst_webrtc_data_channel_send_string (GstWebRTCDataChannel * channel, const gchar * str); +- +-GST_WEBRTC_API +-void gst_webrtc_data_channel_close (GstWebRTCDataChannel * channel); ++#endif + + G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstWebRTCDataChannel, g_object_unref) + +diff --git a/gst-libs/gst/webrtc/dtlstransport.c b/gst-libs/gst/webrtc/dtlstransport.c +index 2c7135b1d..bd1a553e7 100644 +--- a/gst-libs/gst/webrtc/dtlstransport.c ++++ b/gst-libs/gst/webrtc/dtlstransport.c +@@ -22,6 +22,8 @@ + * @short_description: RTCDtlsTransport object + * @title: GstWebRTCDTLSTransport + * @see_also: #GstWebRTCRTPSender, #GstWebRTCRTPReceiver, #GstWebRTCICETransport ++ * @symbols: ++ * - GstWebRTCDTLSTransport + * + * + */ +@@ -32,6 +34,8 @@ + + #include "dtlstransport.h" + ++#include "webrtc-priv.h" ++ + #define GST_CAT_DEFAULT gst_webrtc_dtls_transport_debug + GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); + +@@ -55,20 +59,26 @@ enum + PROP_STATE, + PROP_CLIENT, + PROP_CERTIFICATE, +- PROP_REMOTE_CERTIFICATE, +- PROP_RTCP, ++ PROP_REMOTE_CERTIFICATE + }; + + void + gst_webrtc_dtls_transport_set_transport (GstWebRTCDTLSTransport * transport, + GstWebRTCICETransport * ice) + { ++ gboolean notify = FALSE; ++ + g_return_if_fail (GST_IS_WEBRTC_DTLS_TRANSPORT (transport)); + g_return_if_fail (GST_IS_WEBRTC_ICE_TRANSPORT (ice)); + + GST_OBJECT_LOCK (transport); +- gst_object_replace ((GstObject **) & transport->transport, GST_OBJECT (ice)); ++ notify = ++ gst_object_replace ((GstObject **) & transport->transport, ++ GST_OBJECT (ice)); + GST_OBJECT_UNLOCK (transport); ++ ++ if (notify) ++ g_object_notify (G_OBJECT (transport), "transport"); + } + + static void +@@ -88,9 +98,6 @@ gst_webrtc_dtls_transport_set_property (GObject * object, guint prop_id, + case PROP_CERTIFICATE: + g_object_set_property (G_OBJECT (webrtc->dtlssrtpdec), "pem", value); + break; +- case PROP_RTCP: +- webrtc->is_rtcp = g_value_get_boolean (value); +- break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -123,9 +130,6 @@ gst_webrtc_dtls_transport_get_property (GObject * object, guint prop_id, + case PROP_REMOTE_CERTIFICATE: + g_object_get_property (G_OBJECT (webrtc->dtlssrtpdec), "peer-pem", value); + break; +- case PROP_RTCP: +- g_value_set_boolean (value, webrtc->is_rtcp); +- break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -184,12 +188,12 @@ gst_webrtc_dtls_transport_constructed (GObject * object) + /* XXX: this may collide with another connection_id however this is only a + * problem if multiple dtls element sets are being used within the same + * process */ +- connection_id = g_strdup_printf ("%s_%u_%u", webrtc->is_rtcp ? "rtcp" : "rtp", +- webrtc->session_id, g_random_int ()); ++ connection_id = g_strdup_printf ("rtp_%u_%u", webrtc->session_id, ++ g_random_int ()); + + webrtc->dtlssrtpenc = gst_element_factory_make ("dtlssrtpenc", NULL); + g_object_set (webrtc->dtlssrtpenc, "connection-id", connection_id, +- "is-client", webrtc->client, "rtp-sync", TRUE, NULL); ++ "is-client", webrtc->client, "rtp-sync", FALSE, NULL); + + webrtc->dtlssrtpdec = gst_element_factory_make ("dtlssrtpdec", NULL); + g_object_set (webrtc->dtlssrtpdec, "connection-id", connection_id, NULL); +@@ -249,12 +253,6 @@ gst_webrtc_dtls_transport_class_init (GstWebRTCDTLSTransportClass * klass) + g_param_spec_string ("remote-certificate", "Remote DTLS certificate", + "Remote DTLS certificate", NULL, + G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); +- +- g_object_class_install_property (gobject_class, +- PROP_RTCP, +- g_param_spec_boolean ("rtcp", "RTCP", +- "The transport is being used solely for RTCP", FALSE, +- G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY | G_PARAM_STATIC_STRINGS)); + } + + static void +@@ -263,8 +261,8 @@ gst_webrtc_dtls_transport_init (GstWebRTCDTLSTransport * webrtc) + } + + GstWebRTCDTLSTransport * +-gst_webrtc_dtls_transport_new (guint session_id, gboolean is_rtcp) ++gst_webrtc_dtls_transport_new (guint session_id) + { + return g_object_new (GST_TYPE_WEBRTC_DTLS_TRANSPORT, "session-id", session_id, +- "rtcp", is_rtcp, NULL); ++ NULL); + } +diff --git a/gst-libs/gst/webrtc/dtlstransport.h b/gst-libs/gst/webrtc/dtlstransport.h +index feb3944bb..019861956 100644 +--- a/gst-libs/gst/webrtc/dtlstransport.h ++++ b/gst-libs/gst/webrtc/dtlstransport.h +@@ -35,39 +35,6 @@ GType gst_webrtc_dtls_transport_get_type(void); + #define GST_IS_WEBRTC_DTLS_TRANSPORT_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_WEBRTC_DTLS_TRANSPORT)) + #define GST_WEBRTC_DTLS_TRANSPORT_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_WEBRTC_DTLS_TRANSPORT,GstWebRTCDTLSTransportClass)) + +-/** +- * GstWebRTCDTLSTransport: +- */ +-struct _GstWebRTCDTLSTransport +-{ +- GstObject parent; +- +- GstWebRTCICETransport *transport; +- GstWebRTCDTLSTransportState state; +- +- gboolean is_rtcp; +- gboolean client; +- guint session_id; +- GstElement *dtlssrtpenc; +- GstElement *dtlssrtpdec; +- +- gpointer _padding[GST_PADDING]; +-}; +- +-struct _GstWebRTCDTLSTransportClass +-{ +- GstObjectClass parent_class; +- +- gpointer _padding[GST_PADDING]; +-}; +- +-GST_WEBRTC_API +-GstWebRTCDTLSTransport * gst_webrtc_dtls_transport_new (guint session_id, gboolean rtcp); +- +-GST_WEBRTC_API +-void gst_webrtc_dtls_transport_set_transport (GstWebRTCDTLSTransport * transport, +- GstWebRTCICETransport * ice); +- + G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstWebRTCDTLSTransport, gst_object_unref) + + G_END_DECLS +diff --git a/gst-libs/gst/webrtc/ice.c b/gst-libs/gst/webrtc/ice.c +new file mode 100644 +index 000000000..2328d0b82 +--- /dev/null ++++ b/gst-libs/gst/webrtc/ice.c +@@ -0,0 +1,622 @@ ++/* GStreamer ++ * Copyright (C) 2017 Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++/** ++ * SECTION:gstwebrtcice ++ * @title: GstWebRTCICE ++ * @short_description: Base class WebRTC ICE handling ++ * @symbols: ++ * - GstWebRTCICE ++ */ ++ ++#ifdef HAVE_CONFIG_H ++# include "config.h" ++#endif ++ ++#include "ice.h" ++#include "icestream.h" ++ ++#include "webrtc-priv.h" ++ ++#define GST_CAT_DEFAULT gst_webrtc_ice_debug ++GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); ++ ++enum ++{ ++ SIGNAL_0, ++ ADD_LOCAL_IP_ADDRESS_SIGNAL, ++ LAST_SIGNAL, ++}; ++ ++enum ++{ ++ PROP_0, ++ PROP_MIN_RTP_PORT, ++ PROP_MAX_RTP_PORT, ++}; ++ ++static guint gst_webrtc_ice_signals[LAST_SIGNAL] = { 0 }; ++ ++#define gst_webrtc_ice_parent_class parent_class ++G_DEFINE_ABSTRACT_TYPE_WITH_CODE (GstWebRTCICE, gst_webrtc_ice, ++ GST_TYPE_OBJECT, GST_DEBUG_CATEGORY_INIT (gst_webrtc_ice_debug, ++ "webrtcice", 0, "webrtcice");); ++ ++/** ++ * gst_webrtc_ice_add_stream: ++ * @ice: The #GstWebRTCICE ++ * @session_id: The session id ++ * ++ * Returns: (transfer full) (nullable): The #GstWebRTCICEStream, or %NULL ++ * ++ * Since: 1.22 ++ */ ++GstWebRTCICEStream * ++gst_webrtc_ice_add_stream (GstWebRTCICE * ice, guint session_id) ++{ ++ g_return_val_if_fail (GST_IS_WEBRTC_ICE (ice), NULL); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->add_stream); ++ ++ return GST_WEBRTC_ICE_GET_CLASS (ice)->add_stream (ice, session_id); ++} ++ ++/** ++ * gst_webrtc_ice_find_transport: ++ * @ice: The #GstWebRTCICE ++ * @stream: The #GstWebRTCICEStream ++ * @component: The #GstWebRTCICEComponent ++ * ++ * Returns: (transfer full) (nullable): The #GstWebRTCICETransport, or %NULL ++ * ++ * Since: 1.22 ++ */ ++GstWebRTCICETransport * ++gst_webrtc_ice_find_transport (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream, GstWebRTCICEComponent component) ++{ ++ g_return_val_if_fail (GST_IS_WEBRTC_ICE (ice), NULL); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->find_transport); ++ ++ return GST_WEBRTC_ICE_GET_CLASS (ice)->find_transport (ice, stream, ++ component); ++} ++ ++/** ++ * gst_webrtc_ice_add_candidate: ++ * @ice: The #GstWebRTCICE ++ * @stream: The #GstWebRTCICEStream ++ * @candidate: The ICE candidate ++ * ++ * Since: 1.22 ++ */ ++void ++gst_webrtc_ice_add_candidate (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream, const gchar * candidate) ++{ ++ g_return_if_fail (GST_IS_WEBRTC_ICE (ice)); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->add_candidate); ++ ++ GST_WEBRTC_ICE_GET_CLASS (ice)->add_candidate (ice, stream, candidate); ++} ++ ++/** ++ * gst_webrtc_ice_set_remote_credentials: ++ * @ice: The #GstWebRTCICE ++ * @stream: The #GstWebRTCICEStream ++ * @ufrag: ICE username ++ * @pwd: ICE password ++ * ++ * Returns: FALSE on error, TRUE otherwise ++ * ++ * Since: 1.22 ++ */ ++gboolean ++gst_webrtc_ice_set_remote_credentials (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream, const gchar * ufrag, const gchar * pwd) ++{ ++ g_return_val_if_fail (GST_IS_WEBRTC_ICE (ice), FALSE); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->set_remote_credentials); ++ ++ return GST_WEBRTC_ICE_GET_CLASS (ice)->set_remote_credentials (ice, stream, ++ ufrag, pwd); ++} ++ ++/** ++ * gst_webrtc_ice_add_turn_server: ++ * @ice: The #GstWebRTCICE ++ * @uri: URI of the TURN server ++ * ++ * Returns: FALSE on error, TRUE otherwise ++ * ++ * Since: 1.22 ++ */ ++gboolean ++gst_webrtc_ice_add_turn_server (GstWebRTCICE * ice, const gchar * uri) ++{ ++ g_return_val_if_fail (GST_IS_WEBRTC_ICE (ice), FALSE); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->add_turn_server); ++ ++ return GST_WEBRTC_ICE_GET_CLASS (ice)->add_turn_server (ice, uri); ++} ++ ++/** ++ * gst_webrtc_ice_set_local_credentials: ++ * @ice: The #GstWebRTCICE ++ * @stream: The #GstWebRTCICEStream ++ * @ufrag: ICE username ++ * @pwd: ICE password ++ * ++ * Returns: FALSE on error, TRUE otherwise ++ * ++ * Since: 1.22 ++ */ ++gboolean ++gst_webrtc_ice_set_local_credentials (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream, const gchar * ufrag, const gchar * pwd) ++{ ++ g_return_val_if_fail (GST_IS_WEBRTC_ICE (ice), FALSE); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->set_local_credentials); ++ ++ return GST_WEBRTC_ICE_GET_CLASS (ice)->set_local_credentials (ice, stream, ++ ufrag, pwd); ++} ++ ++/** ++ * gst_webrtc_ice_gather_candidates: ++ * @ice: The #GstWebRTCICE ++ * @stream: The #GstWebRTCICEStream ++ * Returns: FALSE on error, TRUE otherwise ++ * ++ * Since: 1.22 ++ */ ++gboolean ++gst_webrtc_ice_gather_candidates (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream) ++{ ++ g_return_val_if_fail (GST_IS_WEBRTC_ICE (ice), FALSE); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->gather_candidates); ++ ++ return GST_WEBRTC_ICE_GET_CLASS (ice)->gather_candidates (ice, stream); ++} ++ ++/** ++ * gst_webrtc_ice_set_is_controller: ++ * @ice: The #GstWebRTCICE ++ * @controller: TRUE to set as controller ++ * ++ * Since: 1.22 ++ */ ++void ++gst_webrtc_ice_set_is_controller (GstWebRTCICE * ice, gboolean controller) ++{ ++ g_return_if_fail (GST_IS_WEBRTC_ICE (ice)); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->set_is_controller); ++ ++ GST_WEBRTC_ICE_GET_CLASS (ice)->set_is_controller (ice, controller); ++} ++ ++/** ++ * gst_webrtc_ice_get_is_controller: ++ * @ice: The #GstWebRTCICE ++ * Returns: TRUE if set as controller, FALSE otherwise ++ * ++ * Since: 1.22 ++ */ ++gboolean ++gst_webrtc_ice_get_is_controller (GstWebRTCICE * ice) ++{ ++ g_return_val_if_fail (GST_IS_WEBRTC_ICE (ice), FALSE); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->get_is_controller); ++ ++ return GST_WEBRTC_ICE_GET_CLASS (ice)->get_is_controller (ice); ++} ++ ++/** ++ * gst_webrtc_ice_set_force_relay: ++ * @ice: The #GstWebRTCICE ++ * @force_relay: TRUE to enable force relay ++ * ++ * Since: 1.22 ++ */ ++void ++gst_webrtc_ice_set_force_relay (GstWebRTCICE * ice, gboolean force_relay) ++{ ++ g_return_if_fail (GST_IS_WEBRTC_ICE (ice)); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->set_force_relay); ++ ++ GST_WEBRTC_ICE_GET_CLASS (ice)->set_force_relay (ice, force_relay); ++} ++ ++/** ++ * gst_webrtc_ice_set_tos: ++ * @ice: The #GstWebRTCICE ++ * @stream: The #GstWebRTCICEStream ++ * @tos: ToS to be set ++ * ++ * Since: 1.22 ++ */ ++void ++gst_webrtc_ice_set_tos (GstWebRTCICE * ice, GstWebRTCICEStream * stream, ++ guint tos) ++{ ++ g_return_if_fail (GST_IS_WEBRTC_ICE (ice)); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->set_tos); ++ ++ GST_WEBRTC_ICE_GET_CLASS (ice)->set_tos (ice, stream, tos); ++} ++ ++ ++/** ++ * gst_webrtc_ice_get_local_candidates: ++ * @ice: The #GstWebRTCICE ++ * @stream: The #GstWebRTCICEStream ++ * Returns: (transfer full)(array zero-terminated=1): List of local candidates ++ * ++ * Since: 1.22 ++ */ ++GstWebRTCICECandidateStats ** ++gst_webrtc_ice_get_local_candidates (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream) ++{ ++ g_return_val_if_fail (GST_IS_WEBRTC_ICE (ice), NULL); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->get_local_candidates); ++ ++ return GST_WEBRTC_ICE_GET_CLASS (ice)->get_local_candidates (ice, stream); ++} ++ ++ ++/** ++ * gst_webrtc_ice_get_remote_candidates: ++ * @ice: The #GstWebRTCICE ++ * @stream: The #GstWebRTCICEStream ++ * Returns: (transfer full) (array zero-terminated=1): List of remote candidates ++ * ++ * Since: 1.22 ++ */ ++GstWebRTCICECandidateStats ** ++gst_webrtc_ice_get_remote_candidates (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream) ++{ ++ g_return_val_if_fail (GST_IS_WEBRTC_ICE (ice), NULL); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->get_remote_candidates); ++ ++ return GST_WEBRTC_ICE_GET_CLASS (ice)->get_remote_candidates (ice, stream); ++} ++ ++/** ++ * gst_webrtc_ice_get_selected_pair: ++ * @ice: The #GstWebRTCICE ++ * @stream: The #GstWebRTCICEStream ++ * @local_stats: (out) (transfer full): A pointer to #GstWebRTCICECandidateStats for local candidate ++ * @remote_stats: (out) (transfer full): pointer to #GstWebRTCICECandidateStats for remote candidate ++ * ++ * Returns: FALSE on failure, otherwise @local_stats @remote_stats will be set ++ * ++ * Since: 1.22 ++ */ ++gboolean ++gst_webrtc_ice_get_selected_pair (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream, GstWebRTCICECandidateStats ** local_stats, ++ GstWebRTCICECandidateStats ** remote_stats) ++{ ++ g_return_val_if_fail (GST_IS_WEBRTC_ICE (ice), FALSE); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->get_selected_pair); ++ ++ return GST_WEBRTC_ICE_GET_CLASS (ice)->get_selected_pair (ice, stream, ++ local_stats, remote_stats); ++} ++ ++/** ++ * gst_webrtc_ice_candidate_stats_free: ++ * @stats: The #GstWebRTCICECandidateStats to be free'd ++ * ++ * Helper function to free #GstWebRTCICECandidateStats ++ * ++ * Since: 1.22 ++ */ ++void ++gst_webrtc_ice_candidate_stats_free (GstWebRTCICECandidateStats * stats) ++{ ++ if (stats) { ++ g_free (stats->ipaddr); ++ g_free (stats->url); ++ } ++ ++ g_free (stats); ++} ++ ++/** ++ * gst_webrtc_ice_candidate_stats_copy: ++ * @stats: The #GstWebRTCICE ++ * ++ * Returns: (transfer full): A copy of @stats ++ * ++ * Since: 1.22 ++ */ ++GstWebRTCICECandidateStats * ++gst_webrtc_ice_candidate_stats_copy (GstWebRTCICECandidateStats * stats) ++{ ++ GstWebRTCICECandidateStats *copy = ++ g_malloc (sizeof (GstWebRTCICECandidateStats)); ++ ++ *copy = *stats; ++ ++ copy->ipaddr = g_strdup (stats->ipaddr); ++ copy->url = g_strdup (stats->url); ++ ++ return copy; ++} ++ ++G_DEFINE_BOXED_TYPE (GstWebRTCICECandidateStats, gst_webrtc_ice_candidate_stats, ++ (GBoxedCopyFunc) gst_webrtc_ice_candidate_stats_copy, ++ (GBoxedFreeFunc) gst_webrtc_ice_candidate_stats_free); ++ ++/** ++ * gst_webrtc_ice_set_on_ice_candidate: ++ * @ice: The #GstWebRTCICE ++ * @func: The #GstWebRTCICEOnCandidateFunc callback function ++ * @user_data: User data passed to the callback function ++ * @notify: a #GDestroyNotify when the candidate is no longer needed ++ * ++ * Since: 1.22 ++ */ ++void ++gst_webrtc_ice_set_on_ice_candidate (GstWebRTCICE * ice, ++ GstWebRTCICEOnCandidateFunc func, gpointer user_data, GDestroyNotify notify) ++{ ++ g_return_if_fail (GST_IS_WEBRTC_ICE (ice)); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->set_on_ice_candidate); ++ ++ GST_WEBRTC_ICE_GET_CLASS (ice)->set_on_ice_candidate (ice, func, user_data, ++ notify); ++} ++ ++/** ++ * gst_webrtc_ice_set_stun_server: ++ * @ice: The #GstWebRTCICE ++ * @uri: (nullable): URI of the STUN server ++ * ++ * Since: 1.22 ++ */ ++void ++gst_webrtc_ice_set_stun_server (GstWebRTCICE * ice, const gchar * uri_s) ++{ ++ g_return_if_fail (GST_IS_WEBRTC_ICE (ice)); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->set_stun_server); ++ ++ GST_WEBRTC_ICE_GET_CLASS (ice)->set_stun_server (ice, uri_s); ++} ++ ++/** ++ * gst_webrtc_ice_get_stun_server: ++ * @ice: The #GstWebRTCICE ++ * ++ * Returns: (nullable): URI of the STUN sever ++ * ++ * Since: 1.22 ++ */ ++gchar * ++gst_webrtc_ice_get_stun_server (GstWebRTCICE * ice) ++{ ++ g_return_val_if_fail (GST_IS_WEBRTC_ICE (ice), NULL); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->get_stun_server); ++ ++ return GST_WEBRTC_ICE_GET_CLASS (ice)->get_stun_server (ice); ++} ++ ++/** ++ * gst_webrtc_ice_set_turn_server: ++ * @ice: The #GstWebRTCICE ++ * @uri: (nullable): URI of the TURN sever ++ * ++ * Since: 1.22 ++ */ ++void ++gst_webrtc_ice_set_turn_server (GstWebRTCICE * ice, const gchar * uri_s) ++{ ++ g_return_if_fail (GST_IS_WEBRTC_ICE (ice)); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->set_turn_server); ++ ++ GST_WEBRTC_ICE_GET_CLASS (ice)->set_turn_server (ice, uri_s); ++} ++ ++/** ++ * gst_webrtc_ice_get_turn_server: ++ * @ice: The #GstWebRTCICE ++ * ++ * Returns: (nullable): URI of the TURN sever ++ * ++ * Since: 1.22 ++ */ ++gchar * ++gst_webrtc_ice_get_turn_server (GstWebRTCICE * ice) ++{ ++ g_return_val_if_fail (GST_IS_WEBRTC_ICE (ice), NULL); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->get_turn_server); ++ ++ return GST_WEBRTC_ICE_GET_CLASS (ice)->get_turn_server (ice); ++} ++ ++/** ++ * gst_webrtc_ice_set_http_proxy: ++ * @ice: The #GstWebRTCICE ++ * @uri: (transfer none): URI of the HTTP proxy of the form ++ * http://[username:password@]hostname[:port] ++ * ++ * Set HTTP Proxy to be used when connecting to TURN server. ++ * ++ * Since: 1.22 ++ */ ++void ++gst_webrtc_ice_set_http_proxy (GstWebRTCICE * ice, const gchar * uri_s) ++{ ++ g_return_if_fail (GST_IS_WEBRTC_ICE (ice)); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->set_http_proxy); ++ ++ GST_WEBRTC_ICE_GET_CLASS (ice)->set_http_proxy (ice, uri_s); ++} ++ ++/** ++ * gst_webrtc_ice_get_http_proxy: ++ * @ice: The #GstWebRTCICE ++ * ++ * Returns: (transfer full): URI of the HTTP proxy of the form ++ * http://[username:password@]hostname[:port] ++ * ++ * Get HTTP Proxy to be used when connecting to TURN server. ++ * ++ * Since: 1.22 ++ */ ++gchar * ++gst_webrtc_ice_get_http_proxy (GstWebRTCICE * ice) ++{ ++ g_return_val_if_fail (GST_IS_WEBRTC_ICE (ice), NULL); ++ g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->get_http_proxy); ++ ++ return GST_WEBRTC_ICE_GET_CLASS (ice)->get_http_proxy (ice); ++} ++ ++ ++static void ++gst_webrtc_ice_set_property (GObject * object, guint prop_id, ++ const GValue * value, GParamSpec * pspec) ++{ ++ GstWebRTCICE *ice = GST_WEBRTC_ICE (object); ++ ++ switch (prop_id) { ++ case PROP_MIN_RTP_PORT: ++ ice->min_rtp_port = g_value_get_uint (value); ++ if (ice->min_rtp_port > ice->max_rtp_port) ++ g_warning ("Set min-rtp-port to %u which is larger than" ++ " max-rtp-port %u", ice->min_rtp_port, ice->max_rtp_port); ++ break; ++ case PROP_MAX_RTP_PORT: ++ ice->max_rtp_port = g_value_get_uint (value); ++ if (ice->min_rtp_port > ice->max_rtp_port) ++ g_warning ("Set max-rtp-port to %u which is smaller than" ++ " min-rtp-port %u", ice->max_rtp_port, ice->min_rtp_port); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_webrtc_ice_get_property (GObject * object, guint prop_id, ++ GValue * value, GParamSpec * pspec) ++{ ++ GstWebRTCICE *ice = GST_WEBRTC_ICE (object); ++ ++ switch (prop_id) { ++ case PROP_MIN_RTP_PORT: ++ g_value_set_uint (value, ice->min_rtp_port); ++ break; ++ case PROP_MAX_RTP_PORT: ++ g_value_set_uint (value, ice->max_rtp_port); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_webrtc_ice_class_init (GstWebRTCICEClass * klass) ++{ ++ GObjectClass *gobject_class = (GObjectClass *) klass; ++ ++ klass->add_stream = NULL; ++ klass->find_transport = NULL; ++ klass->gather_candidates = NULL; ++ klass->add_candidate = NULL; ++ klass->set_local_credentials = NULL; ++ klass->set_remote_credentials = NULL; ++ klass->add_turn_server = NULL; ++ klass->set_is_controller = NULL; ++ klass->get_is_controller = NULL; ++ klass->set_force_relay = NULL; ++ klass->set_stun_server = NULL; ++ klass->get_stun_server = NULL; ++ klass->set_turn_server = NULL; ++ klass->get_turn_server = NULL; ++ klass->get_http_proxy = NULL; ++ klass->set_http_proxy = NULL; ++ klass->set_tos = NULL; ++ klass->set_on_ice_candidate = NULL; ++ klass->get_local_candidates = NULL; ++ klass->get_remote_candidates = NULL; ++ klass->get_selected_pair = NULL; ++ ++ gobject_class->get_property = gst_webrtc_ice_get_property; ++ gobject_class->set_property = gst_webrtc_ice_set_property; ++ ++ /** ++ * GstWebRTCICE:min-rtp-port: ++ * ++ * Minimum port for local rtp port range. ++ * min-rtp-port must be <= max-rtp-port ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, ++ PROP_MIN_RTP_PORT, ++ g_param_spec_uint ("min-rtp-port", "ICE RTP candidate min port", ++ "Minimum port for local rtp port range. " ++ "min-rtp-port must be <= max-rtp-port", ++ 0, 65535, 0, ++ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstWebRTCICE:max-rtp-port: ++ * ++ * Maximum port for local rtp port range. ++ * min-rtp-port must be <= max-rtp-port ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, ++ PROP_MAX_RTP_PORT, ++ g_param_spec_uint ("max-rtp-port", "ICE RTP candidate max port", ++ "Maximum port for local rtp port range. " ++ "max-rtp-port must be >= min-rtp-port", ++ 0, 65535, 65535, ++ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstWebRTCICE::add-local-ip-address: ++ * @object: the #GstWebRTCICE ++ * @address: The local IP address ++ * ++ * Add a local IP address to use for ICE candidate gathering. If none ++ * are supplied, they will be discovered automatically. Calling this signal ++ * stops automatic ICE gathering. ++ * ++ * Returns: whether the address could be added. ++ */ ++ gst_webrtc_ice_signals[ADD_LOCAL_IP_ADDRESS_SIGNAL] = ++ g_signal_new_class_handler ("add-local-ip-address", ++ G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, ++ NULL, NULL, NULL, ++ g_cclosure_marshal_generic, G_TYPE_BOOLEAN, 1, G_TYPE_STRING); ++} ++ ++static void ++gst_webrtc_ice_init (GstWebRTCICE * ice) ++{ ++} +diff --git a/gst-libs/gst/webrtc/ice.h b/gst-libs/gst/webrtc/ice.h +new file mode 100644 +index 000000000..f67889b1f +--- /dev/null ++++ b/gst-libs/gst/webrtc/ice.h +@@ -0,0 +1,261 @@ ++/* GStreamer ++ * Copyright (C) 2017 Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_WEBRTC_ICE_H__ ++#define __GST_WEBRTC_ICE_H__ ++ ++#include ++ ++G_BEGIN_DECLS ++ ++GST_WEBRTC_API ++GType gst_webrtc_ice_get_type(void); ++#define GST_TYPE_WEBRTC_ICE (gst_webrtc_ice_get_type()) ++#define GST_WEBRTC_ICE(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_WEBRTC_ICE,GstWebRTCICE)) ++#define GST_IS_WEBRTC_ICE(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_WEBRTC_ICE)) ++#define GST_WEBRTC_ICE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_WEBRTC_ICE,GstWebRTCICEClass)) ++#define GST_IS_WEBRTC_ICE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_WEBRTC_ICE)) ++#define GST_WEBRTC_ICE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_WEBRTC_ICE,GstWebRTCICEClass)) ++ ++struct _GstWebRTCICE ++{ ++ GstObject parent; ++ ++ GstWebRTCICEGatheringState ice_gathering_state; ++ GstWebRTCICEConnectionState ice_connection_state; ++ ++ /*< protected >*/ ++ guint min_rtp_port; ++ guint max_rtp_port; ++ ++ gpointer _gst_reserved[GST_PADDING]; ++}; ++ ++struct _GstWebRTCICECandidateStats ++{ ++ gchar *ipaddr; ++ guint port; ++ guint stream_id; ++ const gchar *type; ++ const gchar *proto; ++ const gchar *relay_proto; ++ guint prio; ++ gchar *url; ++ ++ gpointer _gst_reserved[GST_PADDING_LARGE]; ++}; ++ ++/** ++ * GstWebRTCICEOnCandidateFunc: ++ * @ice: The #GstWebRTCICE ++ * @stream_id: The stream id ++ * @candidate: The discovered candidate ++ * @user_data: User data that was set by #gst_webrtc_ice_set_on_ice_candidate ++ * ++ * Callback function to be triggered on discovery of a new candidate ++ * Since: 1.22 ++ */ ++typedef void (*GstWebRTCICEOnCandidateFunc) (GstWebRTCICE * ice, guint stream_id, const gchar * candidate, gpointer user_data); ++ ++struct _GstWebRTCICEClass { ++ GstObjectClass parent_class; ++ GstWebRTCICEStream * (*add_stream) (GstWebRTCICE * ice, ++ guint session_id); ++ GstWebRTCICETransport * (*find_transport) (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream, ++ GstWebRTCICEComponent component); ++ gboolean (*gather_candidates) (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream); ++ void (*add_candidate) (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream, ++ const gchar * candidate); ++ gboolean (*set_local_credentials) (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream, ++ const gchar * ufrag, ++ const gchar * pwd); ++ gboolean (*set_remote_credentials) (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream, ++ const gchar * ufrag, ++ const gchar * pwd); ++ gboolean (*add_turn_server) (GstWebRTCICE * ice, ++ const gchar * uri); ++ void (*set_is_controller) (GstWebRTCICE * ice, ++ gboolean controller); ++ gboolean (*get_is_controller) (GstWebRTCICE * ice); ++ void (*set_force_relay) (GstWebRTCICE * ice, ++ gboolean force_relay); ++ void (*set_stun_server) (GstWebRTCICE * ice, ++ const gchar * uri); ++ gchar * (*get_stun_server) (GstWebRTCICE * ice); ++ void (*set_turn_server) (GstWebRTCICE * ice, ++ const gchar * uri); ++ gchar * (*get_turn_server) (GstWebRTCICE * ice); ++ ++ /** ++ * GstWebRTCICEClass::set_http_proxy: ++ * @ice: a #GstWebRTCICE ++ * @uri: (transfer none): URI of the HTTP proxy of the form ++ * http://[username:password@]hostname[:port] ++ * ++ * Set HTTP Proxy to be used when connecting to TURN server. ++ * ++ * Since: 1.22 ++ */ ++ void (*set_http_proxy) (GstWebRTCICE * ice, ++ const gchar * uri); ++ ++ /** ++ * GstWebRTCICEClass::get_http_proxy: ++ * @ice: a #GstWebRTCICE ++ * ++ * Get HTTP Proxy to be used when connecting to TURN server. ++ * ++ * Returns: (transfer full): URI of the HTTP proxy of the form ++ * http://[username:password@]hostname[:port] ++ * ++ * Since: 1.22 ++ */ ++ gchar * (*get_http_proxy) (GstWebRTCICE * ice); ++ ++ void (*set_tos) (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream, ++ guint tos); ++ void (*set_on_ice_candidate) (GstWebRTCICE * ice, ++ GstWebRTCICEOnCandidateFunc func, ++ gpointer user_data, ++ GDestroyNotify notify); ++ GstWebRTCICECandidateStats** (*get_local_candidates)(GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream); ++ GstWebRTCICECandidateStats**(*get_remote_candidates)(GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream); ++ gboolean (*get_selected_pair) (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream, ++ GstWebRTCICECandidateStats ** local_stats, ++ GstWebRTCICECandidateStats ** remote_stats); ++ gpointer _gst_reserved[GST_PADDING]; ++}; ++ ++GST_WEBRTC_API ++GstWebRTCICEStream * gst_webrtc_ice_add_stream (GstWebRTCICE * ice, ++ guint session_id); ++ ++GST_WEBRTC_API ++GstWebRTCICETransport * gst_webrtc_ice_find_transport (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream, ++ GstWebRTCICEComponent component); ++ ++ ++GST_WEBRTC_API ++gboolean gst_webrtc_ice_gather_candidates (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream); ++ ++/* FIXME: GstStructure-ize the candidate */ ++GST_WEBRTC_API ++void gst_webrtc_ice_add_candidate (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream, ++ const gchar * candidate); ++ ++GST_WEBRTC_API ++gboolean gst_webrtc_ice_set_local_credentials (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream, ++ const gchar * ufrag, ++ const gchar * pwd); ++ ++GST_WEBRTC_API ++gboolean gst_webrtc_ice_set_remote_credentials (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream, ++ const gchar * ufrag, ++ const gchar * pwd); ++ ++GST_WEBRTC_API ++gboolean gst_webrtc_ice_add_turn_server (GstWebRTCICE * ice, ++ const gchar * uri); ++ ++ ++GST_WEBRTC_API ++void gst_webrtc_ice_set_is_controller (GstWebRTCICE * ice, ++ gboolean controller); ++ ++GST_WEBRTC_API ++gboolean gst_webrtc_ice_get_is_controller (GstWebRTCICE * ice); ++ ++GST_WEBRTC_API ++void gst_webrtc_ice_set_force_relay (GstWebRTCICE * ice, ++ gboolean force_relay); ++ ++GST_WEBRTC_API ++void gst_webrtc_ice_set_stun_server (GstWebRTCICE * ice, ++ const gchar * uri); ++ ++GST_WEBRTC_API ++gchar * gst_webrtc_ice_get_stun_server (GstWebRTCICE * ice); ++ ++GST_WEBRTC_API ++void gst_webrtc_ice_set_turn_server (GstWebRTCICE * ice, ++ const gchar * uri); ++ ++GST_WEBRTC_API ++gchar * gst_webrtc_ice_get_turn_server (GstWebRTCICE * ice); ++ ++GST_WEBRTC_API ++void gst_webrtc_ice_set_http_proxy (GstWebRTCICE * ice, ++ const gchar * uri); ++ ++GST_WEBRTC_API ++gchar * gst_webrtc_ice_get_http_proxy (GstWebRTCICE * ice); ++ ++GST_WEBRTC_API ++void gst_webrtc_ice_set_on_ice_candidate (GstWebRTCICE * ice, ++ GstWebRTCICEOnCandidateFunc func, ++ gpointer user_data, ++ GDestroyNotify notify); ++ ++GST_WEBRTC_API ++void gst_webrtc_ice_set_tos (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream, ++ guint tos); ++ ++GST_WEBRTC_API ++GstWebRTCICECandidateStats** gst_webrtc_ice_get_local_candidates (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream); ++ ++GST_WEBRTC_API ++GstWebRTCICECandidateStats** gst_webrtc_ice_get_remote_candidates (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream); ++ ++GST_WEBRTC_API ++gboolean gst_webrtc_ice_get_selected_pair (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream, ++ GstWebRTCICECandidateStats ** local_stats, ++ GstWebRTCICECandidateStats ** remote_stats); ++ ++GST_WEBRTC_API ++void gst_webrtc_ice_candidate_stats_free (GstWebRTCICECandidateStats * stats); ++ ++GST_WEBRTC_API ++GType gst_webrtc_ice_candidate_stats_get_type (void); ++ ++G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstWebRTCICE, gst_object_unref) ++ ++GST_WEBRTC_API ++GstWebRTCICECandidateStats * gst_webrtc_ice_candidate_stats_copy (GstWebRTCICECandidateStats *stats); ++ ++G_END_DECLS ++ ++#endif /* __GST_WEBRTC_ICE_H__ */ +diff --git a/gst-libs/gst/webrtc/icestream.c b/gst-libs/gst/webrtc/icestream.c +new file mode 100644 +index 000000000..4d0055f52 +--- /dev/null ++++ b/gst-libs/gst/webrtc/icestream.c +@@ -0,0 +1,137 @@ ++/* GStreamer ++ * Copyright (C) 2017 Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++/** ++ * SECTION: icestream ++ * @short_description: IceStream object ++ * @title: GstIceStream ++ * @symbols: ++ * - GstWebRTCICEStream ++ */ ++ ++#ifdef HAVE_CONFIG_H ++# include "config.h" ++#endif ++ ++#include "icestream.h" ++ ++#include "webrtc-priv.h" ++ ++#define GST_CAT_DEFAULT gst_webrtc_ice_stream_debug ++GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); ++ ++enum ++{ ++ PROP_0, ++ PROP_STREAM_ID, ++}; ++ ++#define gst_webrtc_ice_stream_parent_class parent_class ++G_DEFINE_ABSTRACT_TYPE_WITH_CODE (GstWebRTCICEStream, gst_webrtc_ice_stream, ++ GST_TYPE_OBJECT, GST_DEBUG_CATEGORY_INIT (gst_webrtc_ice_stream_debug, ++ "webrtcicestream", 0, "webrtcicestream");); ++ ++static void ++gst_webrtc_ice_stream_set_property (GObject * object, guint prop_id, ++ const GValue * value, GParamSpec * pspec) ++{ ++ GstWebRTCICEStream *stream = GST_WEBRTC_ICE_STREAM (object); ++ ++ switch (prop_id) { ++ case PROP_STREAM_ID: ++ stream->stream_id = g_value_get_uint (value); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_webrtc_ice_stream_get_property (GObject * object, guint prop_id, ++ GValue * value, GParamSpec * pspec) ++{ ++ GstWebRTCICEStream *stream = GST_WEBRTC_ICE_STREAM (object); ++ ++ switch (prop_id) { ++ case PROP_STREAM_ID: ++ g_value_set_uint (value, stream->stream_id); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++/** ++ * gst_webrtc_ice_stream_find_transport: ++ * @stream: the #GstWebRTCICEStream ++ * @component: The #GstWebRTCICEComponent ++ * ++ * Returns: (transfer full) (nullable): the #GstWebRTCICETransport, or %NULL ++ * Since: 1.22 ++ */ ++GstWebRTCICETransport * ++gst_webrtc_ice_stream_find_transport (GstWebRTCICEStream * stream, ++ GstWebRTCICEComponent component) ++{ ++ g_return_val_if_fail (GST_IS_WEBRTC_ICE_STREAM (stream), NULL); ++ g_assert (GST_WEBRTC_ICE_STREAM_GET_CLASS (stream)->find_transport); ++ ++ return GST_WEBRTC_ICE_STREAM_GET_CLASS (stream)->find_transport (stream, ++ component); ++} ++ ++/** ++ * gst_webrtc_ice_stream_gather_candidates: ++ * @ice: the #GstWebRTCICEStream ++ * Returns: FALSE on error, TRUE otherwise ++ * Since: 1.22 ++ */ ++gboolean ++gst_webrtc_ice_stream_gather_candidates (GstWebRTCICEStream * stream) ++{ ++ g_return_val_if_fail (GST_IS_WEBRTC_ICE_STREAM (stream), FALSE); ++ g_assert (GST_WEBRTC_ICE_STREAM_GET_CLASS (stream)->gather_candidates); ++ ++ return GST_WEBRTC_ICE_STREAM_GET_CLASS (stream)->gather_candidates (stream); ++} ++ ++static void ++gst_webrtc_ice_stream_class_init (GstWebRTCICEStreamClass * klass) ++{ ++ GObjectClass *gobject_class = (GObjectClass *) klass; ++ ++ klass->find_transport = NULL; ++ klass->gather_candidates = NULL; ++ ++ gobject_class->get_property = gst_webrtc_ice_stream_get_property; ++ gobject_class->set_property = gst_webrtc_ice_stream_set_property; ++ ++ g_object_class_install_property (gobject_class, ++ PROP_STREAM_ID, ++ g_param_spec_uint ("stream-id", ++ "ICE stream id", "ICE stream id associated with this stream", ++ 0, G_MAXUINT, 0, ++ G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY | G_PARAM_STATIC_STRINGS)); ++} ++ ++static void ++gst_webrtc_ice_stream_init (GstWebRTCICEStream * stream) ++{ ++} +diff --git a/gst-libs/gst/webrtc/icestream.h b/gst-libs/gst/webrtc/icestream.h +new file mode 100644 +index 000000000..361d0b76c +--- /dev/null ++++ b/gst-libs/gst/webrtc/icestream.h +@@ -0,0 +1,61 @@ ++/* GStreamer ++ * Copyright (C) 2017 Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_WEBRTC_ICE_STREAM_H__ ++#define __GST_WEBRTC_ICE_STREAM_H__ ++ ++#include "ice.h" ++ ++G_BEGIN_DECLS ++ ++GST_WEBRTC_API ++GType gst_webrtc_ice_stream_get_type(void); ++#define GST_TYPE_WEBRTC_ICE_STREAM (gst_webrtc_ice_stream_get_type()) ++#define GST_WEBRTC_ICE_STREAM(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_WEBRTC_ICE_STREAM,GstWebRTCICEStream)) ++#define GST_IS_WEBRTC_ICE_STREAM(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_WEBRTC_ICE_STREAM)) ++#define GST_WEBRTC_ICE_STREAM_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_WEBRTC_ICE_STREAM,GstWebRTCICEStreamClass)) ++#define GST_IS_WEBRTC_ICE_STREAM_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_WEBRTC_ICE_STREAM)) ++#define GST_WEBRTC_ICE_STREAM_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_WEBRTC_ICE_STREAM,GstWebRTCICEStreamClass)) ++ ++struct _GstWebRTCICEStream ++{ ++ GstObject parent; ++ guint stream_id; ++}; ++ ++struct _GstWebRTCICEStreamClass ++{ ++ GstObjectClass parent_class; ++ GstWebRTCICETransport * (*find_transport) (GstWebRTCICEStream * stream, ++ GstWebRTCICEComponent component); ++ gboolean (*gather_candidates) (GstWebRTCICEStream * ice); ++}; ++ ++ ++GST_WEBRTC_API ++GstWebRTCICETransport * gst_webrtc_ice_stream_find_transport (GstWebRTCICEStream * stream, ++ GstWebRTCICEComponent component); ++GST_WEBRTC_API ++gboolean gst_webrtc_ice_stream_gather_candidates (GstWebRTCICEStream * ice); ++ ++G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstWebRTCICEStream, gst_object_unref) ++ ++G_END_DECLS ++ ++#endif /* __GST_WEBRTC_ICE_STREAM_H__ */ +diff --git a/gst-libs/gst/webrtc/icetransport.c b/gst-libs/gst/webrtc/icetransport.c +index 21e2cbe9b..30040c6ac 100644 +--- a/gst-libs/gst/webrtc/icetransport.c ++++ b/gst-libs/gst/webrtc/icetransport.c +@@ -22,8 +22,10 @@ + * @short_description: RTCIceTransport object + * @title: GstWebRTCICETransport + * @see_also: #GstWebRTCRTPSender, #GstWebRTCRTPReceiver, #GstWebRTCDTLSTransport ++ * @symbols: ++ * - GstWebRTCICETransport + * +- * ++ * See the [specification](https://www.w3.org/TR/webrtc/#rtcicetransport) + */ + + #ifdef HAVE_CONFIG_H +@@ -33,6 +35,8 @@ + #include "icetransport.h" + #include "webrtc-enumtypes.h" + ++#include "webrtc-priv.h" ++ + #define GST_CAT_DEFAULT gst_webrtc_ice_transport_debug + GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); + +@@ -92,7 +96,7 @@ gst_webrtc_ice_transport_selected_pair_change (GstWebRTCICETransport * ice) + + void + gst_webrtc_ice_transport_new_candidate (GstWebRTCICETransport * ice, +- guint stream_id, GstWebRTCICEComponent component, gchar * attr) ++ guint stream_id, GstWebRTCICEComponent component, const gchar * attr) + { + g_signal_emit (ice, gst_webrtc_ice_transport_signals[ON_NEW_CANDIDATE_SIGNAL], + stream_id, component, attr); +diff --git a/gst-libs/gst/webrtc/icetransport.h b/gst-libs/gst/webrtc/icetransport.h +index c1e56d41e..d605d63ff 100644 +--- a/gst-libs/gst/webrtc/icetransport.h ++++ b/gst-libs/gst/webrtc/icetransport.h +@@ -20,7 +20,6 @@ + #ifndef __GST_WEBRTC_ICE_TRANSPORT_H__ + #define __GST_WEBRTC_ICE_TRANSPORT_H__ + +-#include + #include + + G_BEGIN_DECLS +@@ -33,14 +32,10 @@ GType gst_webrtc_ice_transport_get_type(void); + #define GST_WEBRTC_ICE_TRANSPORT_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_WEBRTC_ICE_TRANSPORT,GstWebRTCICETransportClass)) + #define GST_IS_WEBRTC_ICE_TRANSPORT_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_WEBRTC_ICE_TRANSPORT)) + #define GST_WEBRTC_ICE_TRANSPORT_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_WEBRTC_ICE_TRANSPORT,GstWebRTCICETransportClass)) +- +-/** +- * GstWebRTCICETransport: +- */ + struct _GstWebRTCICETransport + { + GstObject parent; +- ++ /* */ + GstWebRTCICERole role; + GstWebRTCICEComponent component; + +@@ -72,7 +67,7 @@ void gst_webrtc_ice_transport_gathering_state_change (GstWebRTCIC + GST_WEBRTC_API + void gst_webrtc_ice_transport_selected_pair_change (GstWebRTCICETransport * ice); + GST_WEBRTC_API +-void gst_webrtc_ice_transport_new_candidate (GstWebRTCICETransport * ice, guint stream_id, GstWebRTCICEComponent component, gchar * attr); ++void gst_webrtc_ice_transport_new_candidate (GstWebRTCICETransport * ice, guint stream_id, GstWebRTCICEComponent component, const gchar * attr); + + G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstWebRTCICETransport, gst_object_unref) + +diff --git a/gst-libs/gst/webrtc/meson.build b/gst-libs/gst/webrtc/meson.build +index 981083c2e..5614d4cf4 100644 +--- a/gst-libs/gst/webrtc/meson.build ++++ b/gst-libs/gst/webrtc/meson.build +@@ -1,15 +1,21 @@ +-webrtc_sources = [ ++webrtc_sources = files([ + 'dtlstransport.c', ++ 'ice.c', ++ 'icestream.c', + 'icetransport.c', + 'rtcsessiondescription.c', + 'rtpreceiver.c', + 'rtpsender.c', + 'rtptransceiver.c', + 'datachannel.c', +-] ++ 'sctptransport.c', ++ 'webrtc.c', ++]) + +-webrtc_headers = [ ++webrtc_headers = files([ + 'dtlstransport.h', ++ 'ice.h', ++ 'icestream.h', + 'icetransport.h', + 'rtcsessiondescription.h', + 'rtpreceiver.h', +@@ -18,14 +24,17 @@ webrtc_headers = [ + 'datachannel.h', + 'webrtc_fwd.h', + 'webrtc.h', +-] ++ 'sctptransport.h', ++]) + +-webrtc_enumtypes_headers = [ ++webrtc_enumtypes_headers = files([ + 'dtlstransport.h', ++ 'ice.h', ++ 'icestream.h', + 'icetransport.h', + 'rtptransceiver.h', + 'webrtc_fwd.h', +-] ++]) + + webrtc_enums = gnome.mkenums_simple('webrtc-enumtypes', + sources : webrtc_enumtypes_headers, +@@ -44,7 +53,7 @@ gstwebrtc_dependencies = [gstbase_dep, gstsdp_dep] + + gstwebrtc = library('gstwebrtc-' + api_version, + webrtc_sources, gstwebrtc_c, gstwebrtc_h, +- c_args : gst_plugins_bad_args + ['-DGST_USE_UNSTABLE_API', '-DBUILDING_GST_WEBRTC'], ++ c_args : gst_plugins_bad_args + ['-DGST_USE_UNSTABLE_API', '-DBUILDING_GST_WEBRTC', '-DG_LOG_DOMAIN="GStreamer-WebRTC"'], + include_directories : [configinc, libsinc], + version : libversion, + soversion : soversion, +@@ -75,3 +84,6 @@ gstwebrtc_dep = declare_dependency(link_with: gstwebrtc, + include_directories : libsinc, + sources: webrtc_gen_sources, + dependencies: gstwebrtc_dependencies) ++ ++ ++subdir('nice') +\ No newline at end of file +diff --git a/gst-libs/gst/webrtc/nice/meson.build b/gst-libs/gst/webrtc/nice/meson.build +new file mode 100644 +index 000000000..007e7b23b +--- /dev/null ++++ b/gst-libs/gst/webrtc/nice/meson.build +@@ -0,0 +1,48 @@ ++libgstwebrtcnice_sources = files([ ++ 'nice.c', ++ 'nicestream.c', ++ 'nicetransport.c', ++]) ++ ++libgstwebrtcnice_headers = files([ ++ 'nice_fwd.h', ++ 'nice.h', ++ 'nicestream.h', ++ 'nicetransport.h', ++]) ++ ++libgstwebrtcnice_dep = dependency('', required : false) ++ ++libnice_dep = dependency('nice', version : '>=0.1.20', required : get_option('webrtc'), ++ fallback : ['libnice', 'libnice_dep'], ++ default_options: ['tests=disabled']) ++ ++deps = [gstwebrtc_dep, libnice_dep] ++ ++if libnice_dep.found() ++ libnice_version = libnice_dep.version() ++ libnice_c_args = [] ++ libgstwebrtcnice = library('gstwebrtcnice-' + api_version, ++ libgstwebrtcnice_sources, libgstwebrtcnice_headers, ++ c_args : gst_plugins_bad_args + ['-DGST_USE_UNSTABLE_API', '-DBUILDING_GST_WEBRTCNICE', '-DG_LOG_DOMAIN="GStreamer-webrtcnice"'] + libnice_c_args, ++ include_directories: [configinc], ++ version : libversion, ++ soversion : soversion, ++ darwin_versions : osxversion, ++ dependencies: deps, ++ install: true, ++ ) ++ ++ pkg_name = 'gstreamer-webrtc-nice-1.0' ++ pkgconfig.generate(libgstwebrtcnice, ++ libraries : [deps], ++ name : pkg_name, ++ description : 'libnice based implementaion for GstWebRTCICE', ++ ) ++ ++ libgstwebrtcnice_dep = declare_dependency(link_with: libgstwebrtcnice, ++ dependencies: [deps]) ++ ++ install_headers(libgstwebrtcnice_headers, subdir : 'gstreamer-1.0/gst/webrtc/nice') ++ meson.override_dependency(pkg_name, libgstwebrtcnice_dep) ++endif +diff --git a/gst-libs/gst/webrtc/nice/nice.c b/gst-libs/gst/webrtc/nice/nice.c +new file mode 100644 +index 000000000..d7feae194 +--- /dev/null ++++ b/gst-libs/gst/webrtc/nice/nice.c +@@ -0,0 +1,1677 @@ ++/* GStreamer ++ * Copyright (C) 2017 Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifdef HAVE_CONFIG_H ++# include "config.h" ++#endif ++ ++#include "nice.h" ++#include "nicestream.h" ++/* libnice */ ++#include ++ ++#define HTTP_PROXY_PORT_DEFAULT 3128 ++ ++/* XXX: ++ * ++ * - are locally generated remote candidates meant to be readded to libnice? ++ */ ++ ++static GstUri *_validate_turn_server (GstWebRTCNice * ice, const gchar * s); ++ ++#define GST_CAT_DEFAULT gst_webrtc_nice_debug ++GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); ++ ++enum ++{ ++ PROP_0, ++ PROP_AGENT, ++ PROP_ICE_TCP, ++ PROP_ICE_UDP, ++ PROP_MIN_RTP_PORT, ++ PROP_MAX_RTP_PORT, ++}; ++ ++struct _GstWebRTCNicePrivate ++{ ++ NiceAgent *nice_agent; ++ ++ GArray *nice_stream_map; ++ ++ GThread *thread; ++ GMainContext *main_context; ++ GMainLoop *loop; ++ GMutex lock; ++ GCond cond; ++ ++ GstWebRTCICEOnCandidateFunc on_candidate; ++ gpointer on_candidate_data; ++ GDestroyNotify on_candidate_notify; ++ ++ GstUri *stun_server; ++ GstUri *turn_server; ++ ++ GHashTable *turn_servers; ++ ++ GstUri *http_proxy; ++}; ++ ++#define gst_webrtc_nice_parent_class parent_class ++G_DEFINE_TYPE_WITH_CODE (GstWebRTCNice, gst_webrtc_nice, ++ GST_TYPE_WEBRTC_ICE, G_ADD_PRIVATE (GstWebRTCNice) ++ GST_DEBUG_CATEGORY_INIT (gst_webrtc_nice_debug, "webrtcnice", 0, ++ "webrtcnice");); ++ ++static gboolean ++_unlock_pc_thread (GMutex * lock) ++{ ++ g_mutex_unlock (lock); ++ return G_SOURCE_REMOVE; ++} ++ ++static gpointer ++_gst_nice_thread (GstWebRTCNice * ice) ++{ ++ g_mutex_lock (&ice->priv->lock); ++ ice->priv->main_context = g_main_context_new (); ++ ice->priv->loop = g_main_loop_new (ice->priv->main_context, FALSE); ++ ++ g_cond_broadcast (&ice->priv->cond); ++ g_main_context_invoke (ice->priv->main_context, ++ (GSourceFunc) _unlock_pc_thread, &ice->priv->lock); ++ ++ g_main_loop_run (ice->priv->loop); ++ ++ g_mutex_lock (&ice->priv->lock); ++ g_main_context_unref (ice->priv->main_context); ++ ice->priv->main_context = NULL; ++ g_main_loop_unref (ice->priv->loop); ++ ice->priv->loop = NULL; ++ g_cond_broadcast (&ice->priv->cond); ++ g_mutex_unlock (&ice->priv->lock); ++ ++ return NULL; ++} ++ ++static void ++_start_thread (GstWebRTCNice * ice) ++{ ++ g_mutex_lock (&ice->priv->lock); ++ ice->priv->thread = g_thread_new (GST_OBJECT_NAME (ice), ++ (GThreadFunc) _gst_nice_thread, ice); ++ ++ while (!ice->priv->loop) ++ g_cond_wait (&ice->priv->cond, &ice->priv->lock); ++ g_mutex_unlock (&ice->priv->lock); ++} ++ ++static void ++_stop_thread (GstWebRTCNice * ice) ++{ ++ g_mutex_lock (&ice->priv->lock); ++ g_main_loop_quit (ice->priv->loop); ++ while (ice->priv->loop) ++ g_cond_wait (&ice->priv->cond, &ice->priv->lock); ++ g_mutex_unlock (&ice->priv->lock); ++ ++ g_thread_unref (ice->priv->thread); ++} ++ ++struct NiceStreamItem ++{ ++ guint session_id; ++ guint nice_stream_id; ++ GstWebRTCICEStream *stream; ++}; ++ ++/* TRUE to continue, FALSE to stop */ ++typedef gboolean (*NiceStreamItemForeachFunc) (struct NiceStreamItem * item, ++ gpointer user_data); ++ ++static void ++_nice_stream_item_foreach (GstWebRTCNice * ice, NiceStreamItemForeachFunc func, ++ gpointer data) ++{ ++ int i, len; ++ ++ len = ice->priv->nice_stream_map->len; ++ for (i = 0; i < len; i++) { ++ struct NiceStreamItem *item = ++ &g_array_index (ice->priv->nice_stream_map, struct NiceStreamItem, ++ i); ++ ++ if (!func (item, data)) ++ break; ++ } ++} ++ ++/* TRUE for match, FALSE otherwise */ ++typedef gboolean (*NiceStreamItemFindFunc) (struct NiceStreamItem * item, ++ gpointer user_data); ++ ++struct nice_find ++{ ++ NiceStreamItemFindFunc func; ++ gpointer data; ++ struct NiceStreamItem *ret; ++}; ++ ++static gboolean ++_find_nice_item (struct NiceStreamItem *item, gpointer user_data) ++{ ++ struct nice_find *f = user_data; ++ if (f->func (item, f->data)) { ++ f->ret = item; ++ return FALSE; ++ } ++ return TRUE; ++} ++ ++static struct NiceStreamItem * ++_nice_stream_item_find (GstWebRTCNice * ice, NiceStreamItemFindFunc func, ++ gpointer data) ++{ ++ struct nice_find f; ++ ++ f.func = func; ++ f.data = data; ++ f.ret = NULL; ++ ++ _nice_stream_item_foreach (ice, _find_nice_item, &f); ++ ++ return f.ret; ++} ++ ++#define NICE_MATCH_INIT { -1, -1, NULL } ++ ++static gboolean ++_match (struct NiceStreamItem *item, struct NiceStreamItem *m) ++{ ++ if (m->session_id != -1 && m->session_id != item->session_id) ++ return FALSE; ++ if (m->nice_stream_id != -1 && m->nice_stream_id != item->nice_stream_id) ++ return FALSE; ++ if (m->stream != NULL && m->stream != item->stream) ++ return FALSE; ++ ++ return TRUE; ++} ++ ++static struct NiceStreamItem * ++_find_item (GstWebRTCNice * ice, guint session_id, guint nice_stream_id, ++ GstWebRTCICEStream * stream) ++{ ++ struct NiceStreamItem m = NICE_MATCH_INIT; ++ ++ m.session_id = session_id; ++ m.nice_stream_id = nice_stream_id; ++ m.stream = stream; ++ ++ return _nice_stream_item_find (ice, (NiceStreamItemFindFunc) _match, &m); ++} ++ ++static struct NiceStreamItem * ++_create_nice_stream_item (GstWebRTCNice * ice, guint session_id) ++{ ++ struct NiceStreamItem item; ++ ++ item.session_id = session_id; ++ item.nice_stream_id = nice_agent_add_stream (ice->priv->nice_agent, 1); ++ item.stream = ++ GST_WEBRTC_ICE_STREAM (gst_webrtc_nice_stream_new (GST_WEBRTC_ICE (ice), ++ item.nice_stream_id) ++ ); ++ ++ g_array_append_val (ice->priv->nice_stream_map, item); ++ ++ return _find_item (ice, item.session_id, item.nice_stream_id, item.stream); ++} ++ ++static void ++_parse_userinfo (const gchar * userinfo, gchar ** user, gchar ** pass) ++{ ++ const gchar *colon; ++ ++ if (!userinfo) { ++ *user = NULL; ++ *pass = NULL; ++ return; ++ } ++ ++ colon = g_strstr_len (userinfo, -1, ":"); ++ if (!colon) { ++ *user = g_uri_unescape_string (userinfo, NULL); ++ *pass = NULL; ++ return; ++ } ++ ++ /* Check that the first occurence is also the last occurence */ ++ if (colon != g_strrstr (userinfo, ":")) ++ GST_WARNING ("userinfo %s contains more than one ':', will assume that the " ++ "first ':' delineates user:pass. You should escape the user and pass " ++ "before adding to the URI.", userinfo); ++ ++ *user = g_uri_unescape_segment (userinfo, colon, NULL); ++ *pass = g_uri_unescape_string (&colon[1], NULL); ++} ++ ++struct resolve_host_data ++{ ++ GstWebRTCNice *ice; ++ char *host; ++ gboolean main_context_handled; ++ gpointer user_data; ++ GDestroyNotify notify; ++}; ++ ++static void ++on_resolve_host (GResolver * resolver, GAsyncResult * res, gpointer user_data) ++{ ++ GTask *task = user_data; ++ struct resolve_host_data *rh; ++ GError *error = NULL; ++ GList *addresses; ++ ++ rh = g_task_get_task_data (task); ++ ++ if (!(addresses = g_resolver_lookup_by_name_finish (resolver, res, &error))) { ++ GST_ERROR ("failed to resolve: %s", error->message); ++ g_task_return_error (task, error); ++ g_object_unref (task); ++ return; ++ } ++ ++ GST_DEBUG_OBJECT (rh->ice, "Resolved %d addresses for host %s with data %p", ++ g_list_length (addresses), rh->host, rh); ++ ++ g_task_return_pointer (task, addresses, ++ (GDestroyNotify) g_resolver_free_addresses); ++ g_object_unref (task); ++} ++ ++static void ++free_resolve_host_data (struct resolve_host_data *rh) ++{ ++ GST_TRACE_OBJECT (rh->ice, "Freeing data %p for resolving host %s", rh, ++ rh->host); ++ ++ if (rh->notify) ++ rh->notify (rh->user_data); ++ ++ g_free (rh->host); ++ g_free (rh); ++} ++ ++static struct resolve_host_data * ++resolve_host_data_new (GstWebRTCNice * ice, const char *host) ++{ ++ struct resolve_host_data *rh = g_new0 (struct resolve_host_data, 1); ++ ++ rh->ice = ice; ++ rh->host = g_strdup (host); ++ ++ return rh; ++} ++ ++static gboolean ++resolve_host_main_cb (gpointer user_data) ++{ ++ GResolver *resolver = g_resolver_get_default (); ++ GTask *task = user_data; ++ struct resolve_host_data *rh; ++ ++ rh = g_task_get_task_data (task); ++ /* no need to error anymore if the main context disappears and this task is ++ * not run */ ++ rh->main_context_handled = TRUE; ++ ++ GST_DEBUG_OBJECT (rh->ice, "Resolving host %s", rh->host); ++ g_resolver_lookup_by_name_async (resolver, rh->host, NULL, ++ (GAsyncReadyCallback) on_resolve_host, g_object_ref (task)); ++ ++ return G_SOURCE_REMOVE; ++} ++ ++static void ++error_task_if_unhandled (GTask * task) ++{ ++ struct resolve_host_data *rh; ++ ++ rh = g_task_get_task_data (task); ++ ++ if (!rh->main_context_handled) { ++ GST_DEBUG_OBJECT (rh->ice, "host resolve for %s with data %p was never " ++ "executed, main context quit?", rh->host, rh); ++ g_task_return_new_error (task, G_IO_ERROR, G_IO_ERROR_CANCELLED, "%s", ++ "Cancelled"); ++ } ++ ++ g_object_unref (task); ++} ++ ++static void ++resolve_host_async (GstWebRTCNice * ice, const gchar * host, ++ GAsyncReadyCallback cb, gpointer user_data, GDestroyNotify notify) ++{ ++ struct resolve_host_data *rh = resolve_host_data_new (ice, host); ++ GTask *task; ++ ++ rh->user_data = user_data; ++ rh->notify = notify; ++ task = g_task_new (rh->ice, NULL, cb, user_data); ++ ++ g_task_set_task_data (task, rh, (GDestroyNotify) free_resolve_host_data); ++ ++ GST_TRACE_OBJECT (rh->ice, "invoking main context for resolving host %s " ++ "with data %p", host, rh); ++ g_main_context_invoke_full (ice->priv->main_context, G_PRIORITY_DEFAULT, ++ resolve_host_main_cb, task, (GDestroyNotify) error_task_if_unhandled); ++} ++ ++static GList * ++resolve_host_finish (GstWebRTCNice * ice, GAsyncResult * res, GError ** error) ++{ ++ g_return_val_if_fail (g_task_is_valid (res, ice), NULL); ++ ++ return g_task_propagate_pointer (G_TASK (res), error); ++} ++ ++static void ++_add_turn_server (GstWebRTCNice * ice, struct NiceStreamItem *item, ++ GstUri * turn_server) ++{ ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ const gchar *host; ++ NiceRelayType relays[4] = { 0, }; ++ gchar *user, *pass; ++ const gchar *userinfo, *transport, *scheme; ++ int i, relay_n = 0; ++ ++ host = gst_uri_get_host (turn_server); ++ if (!host) { ++ GST_ERROR_OBJECT (ice, "Turn server has no host"); ++ return; ++ } ++ ++ scheme = gst_uri_get_scheme (turn_server); ++ transport = gst_uri_get_query_value (turn_server, "transport"); ++ userinfo = gst_uri_get_userinfo (turn_server); ++ _parse_userinfo (userinfo, &user, &pass); ++ ++ if (g_strcmp0 (scheme, "turns") == 0) { ++ relays[relay_n++] = NICE_RELAY_TYPE_TURN_TLS; ++ } else if (g_strcmp0 (scheme, "turn") == 0) { ++ if (!transport || g_strcmp0 (transport, "udp") == 0) ++ relays[relay_n++] = NICE_RELAY_TYPE_TURN_UDP; ++ if (!transport || g_strcmp0 (transport, "tcp") == 0) ++ relays[relay_n++] = NICE_RELAY_TYPE_TURN_TCP; ++ } ++ g_assert (relay_n < G_N_ELEMENTS (relays)); ++ ++ for (i = 0; i < relay_n; i++) { ++ if (!nice_agent_set_relay_info (nice->priv->nice_agent, ++ item->nice_stream_id, NICE_COMPONENT_TYPE_RTP, ++ gst_uri_get_host (turn_server), gst_uri_get_port (turn_server), ++ user, pass, relays[i])) { ++ gchar *uri_str = gst_uri_to_string (turn_server); ++ GST_ERROR_OBJECT (ice, "Could not set TURN server %s on libnice", ++ uri_str); ++ g_free (uri_str); ++ } ++ } ++ ++ g_free (user); ++ g_free (pass); ++ ++} ++ ++typedef struct ++{ ++ GstWebRTCNice *ice; ++ struct NiceStreamItem *item; ++} AddTurnServerData; ++ ++static void ++_add_turn_server_func (const gchar * uri, GstUri * turn_server, ++ AddTurnServerData * data) ++{ ++ _add_turn_server (data->ice, data->item, turn_server); ++} ++ ++static void ++_add_stun_server (GstWebRTCNice * ice, GstUri * stun_server) ++{ ++ const gchar *msg = "must be of the form stun://:"; ++ const gchar *host; ++ gchar *s = NULL; ++ guint port; ++ ++ s = gst_uri_to_string (stun_server); ++ GST_DEBUG_OBJECT (ice, "adding stun server, %s", s); ++ ++ host = gst_uri_get_host (stun_server); ++ if (!host) { ++ GST_ERROR_OBJECT (ice, "Stun server '%s' has no host, %s", s, msg); ++ goto out; ++ } ++ ++ port = gst_uri_get_port (stun_server); ++ if (port == GST_URI_NO_PORT) { ++ GST_INFO_OBJECT (ice, "Stun server '%s' has no port, assuming 3478", s); ++ port = 3478; ++ gst_uri_set_port (stun_server, port); ++ } ++ ++ g_object_set (ice->priv->nice_agent, "stun-server", host, ++ "stun-server-port", port, NULL); ++ ++out: ++ g_free (s); ++} ++ ++static GstWebRTCICEStream * ++gst_webrtc_nice_add_stream (GstWebRTCICE * ice, guint session_id) ++{ ++ struct NiceStreamItem m = NICE_MATCH_INIT; ++ struct NiceStreamItem *item; ++ AddTurnServerData add_data; ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ ++ m.session_id = session_id; ++ item = _nice_stream_item_find (nice, (NiceStreamItemFindFunc) _match, &m); ++ if (item) { ++ GST_ERROR_OBJECT (nice, "stream already added with session_id=%u", ++ session_id); ++ return 0; ++ } ++ ++ if (nice->priv->stun_server) { ++ _add_stun_server (nice, nice->priv->stun_server); ++ } ++ ++ item = _create_nice_stream_item (nice, session_id); ++ ++ if (nice->priv->turn_server) { ++ _add_turn_server (nice, item, nice->priv->turn_server); ++ } ++ ++ add_data.ice = nice; ++ add_data.item = item; ++ ++ g_hash_table_foreach (nice->priv->turn_servers, ++ (GHFunc) _add_turn_server_func, &add_data); ++ ++ return item->stream; ++} ++ ++static void ++_on_new_candidate (NiceAgent * agent, NiceCandidate * candidate, ++ GstWebRTCNice * ice) ++{ ++ struct NiceStreamItem *item; ++ gchar *attr; ++ ++ item = _find_item (ice, -1, candidate->stream_id, NULL); ++ if (!item) { ++ GST_WARNING_OBJECT (ice, "received signal for non-existent stream %u", ++ candidate->stream_id); ++ return; ++ } ++ ++ if (!candidate->username || !candidate->password) { ++ gboolean got_credentials; ++ gchar *ufrag, *password; ++ ++ got_credentials = nice_agent_get_local_credentials (ice->priv->nice_agent, ++ candidate->stream_id, &ufrag, &password); ++ g_warn_if_fail (got_credentials); ++ ++ if (!candidate->username) ++ candidate->username = ufrag; ++ else ++ g_free (ufrag); ++ ++ if (!candidate->password) ++ candidate->password = password; ++ else ++ g_free (password); ++ } ++ ++ attr = nice_agent_generate_local_candidate_sdp (agent, candidate); ++ ++ if (ice->priv->on_candidate) ++ ice->priv->on_candidate (GST_WEBRTC_ICE (ice), item->session_id, attr, ++ ice->priv->on_candidate_data); ++ ++ g_free (attr); ++} ++ ++static GstWebRTCICETransport * ++gst_webrtc_nice_find_transport (GstWebRTCICE * ice, GstWebRTCICEStream * stream, ++ GstWebRTCICEComponent component) ++{ ++ struct NiceStreamItem *item; ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ ++ item = _find_item (nice, -1, -1, stream); ++ g_return_val_if_fail (item != NULL, NULL); ++ ++ return gst_webrtc_ice_stream_find_transport (item->stream, component); ++} ++ ++#if 0 ++/* TODO don't rely on libnice to (de)serialize candidates */ ++static NiceCandidateType ++_candidate_type_from_string (const gchar * s) ++{ ++ if (g_strcmp0 (s, "host") == 0) { ++ return NICE_CANDIDATE_TYPE_HOST; ++ } else if (g_strcmp0 (s, "srflx") == 0) { ++ return NICE_CANDIDATE_TYPE_SERVER_REFLEXIVE; ++ } else if (g_strcmp0 (s, "prflx") == 0) { /* FIXME: is the right string? */ ++ return NICE_CANDIDATE_TYPE_PEER_REFLEXIVE; ++ } else if (g_strcmp0 (s, "relay") == 0) { ++ return NICE_CANDIDATE_TYPE_RELAY; ++ } else { ++ g_assert_not_reached (); ++ return 0; ++ } ++} ++ ++static const gchar * ++_candidate_type_to_string (NiceCandidateType type) ++{ ++ switch (type) { ++ case NICE_CANDIDATE_TYPE_HOST: ++ return "host"; ++ case NICE_CANDIDATE_TYPE_SERVER_REFLEXIVE: ++ return "srflx"; ++ case NICE_CANDIDATE_TYPE_PEER_REFLEXIVE: ++ return "prflx"; ++ case NICE_CANDIDATE_TYPE_RELAY: ++ return "relay"; ++ default: ++ g_assert_not_reached (); ++ return NULL; ++ } ++} ++ ++static NiceCandidateTransport ++_candidate_transport_from_string (const gchar * s) ++{ ++ if (g_strcmp0 (s, "UDP") == 0) { ++ return NICE_CANDIDATE_TRANSPORT_UDP; ++ } else if (g_strcmp0 (s, "TCP tcptype") == 0) { ++ return NICE_CANDIDATE_TRANSPORT_TCP_ACTIVE; ++ } else if (g_strcmp0 (s, "tcp-passive") == 0) { /* FIXME: is the right string? */ ++ return NICE_CANDIDATE_TRANSPORT_TCP_PASSIVE; ++ } else if (g_strcmp0 (s, "tcp-so") == 0) { ++ return NICE_CANDIDATE_TRANSPORT_TCP_SO; ++ } else { ++ g_assert_not_reached (); ++ return 0; ++ } ++} ++ ++static const gchar * ++_candidate_type_to_string (NiceCandidateType type) ++{ ++ switch (type) { ++ case NICE_CANDIDATE_TYPE_HOST: ++ return "host"; ++ case NICE_CANDIDATE_TYPE_SERVER_REFLEXIVE: ++ return "srflx"; ++ case NICE_CANDIDATE_TYPE_PEER_REFLEXIVE: ++ return "prflx"; ++ case NICE_CANDIDATE_TYPE_RELAY: ++ return "relay"; ++ default: ++ g_assert_not_reached (); ++ return NULL; ++ } ++} ++#endif ++ ++/* parse the address for possible resolution */ ++static gboolean ++get_candidate_address (const gchar * candidate, gchar ** prefix, ++ gchar ** address, gchar ** postfix) ++{ ++ char **tokens = NULL; ++ char *tmp_address = NULL; ++ ++ if (!g_str_has_prefix (candidate, "a=candidate:")) { ++ GST_ERROR ("candidate \"%s\" does not start with \"a=candidate:\"", ++ candidate); ++ goto failure; ++ } ++ ++ if (!(tokens = g_strsplit (candidate, " ", 6))) { ++ GST_ERROR ("candidate \"%s\" could not be tokenized", candidate); ++ goto failure; ++ } ++ ++ if (g_strv_length (tokens) < 6) { ++ GST_ERROR ("candidate \"%s\" tokenization resulted in not enough tokens", ++ candidate); ++ goto failure; ++ } ++ ++ tmp_address = tokens[4]; ++ if (address) ++ *address = g_strdup (tmp_address); ++ tokens[4] = NULL; ++ ++ if (prefix) ++ *prefix = g_strjoinv (" ", tokens); ++ if (postfix) ++ *postfix = g_strdup (tokens[5]); ++ ++ tokens[4] = tmp_address; ++ ++ g_strfreev (tokens); ++ return TRUE; ++ ++failure: ++ if (tokens) ++ g_strfreev (tokens); ++ return FALSE; ++} ++ ++struct resolve_candidate_data ++{ ++ guint nice_stream_id; ++ char *prefix; ++ char *postfix; ++}; ++ ++static void ++free_resolve_candidate_data (struct resolve_candidate_data *rc) ++{ ++ g_free (rc->prefix); ++ g_free (rc->postfix); ++ g_free (rc); ++} ++ ++static void ++add_ice_candidate_to_libnice (GstWebRTCICE * ice, guint nice_stream_id, ++ NiceCandidate * cand) ++{ ++ GSList *candidates = NULL; ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ ++ if (cand->component_id == 2) { ++ /* we only support rtcp-mux so rtcp candidates are useless for us */ ++ GST_INFO_OBJECT (ice, "Dropping RTCP candidate"); ++ return; ++ } ++ ++ candidates = g_slist_append (candidates, cand); ++ ++ nice_agent_set_remote_candidates (nice->priv->nice_agent, nice_stream_id, ++ cand->component_id, candidates); ++ ++ g_slist_free (candidates); ++} ++ ++static void ++on_candidate_resolved (GstWebRTCICE * ice, GAsyncResult * res, ++ gpointer user_data) ++{ ++ struct resolve_candidate_data *rc = user_data; ++ GError *error = NULL; ++ GList *addresses; ++ char *new_candv[4] = { NULL, }; ++ char *new_addr, *new_candidate; ++ NiceCandidate *cand; ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ ++ if (!(addresses = resolve_host_finish (nice, res, &error))) { ++ GST_WARNING_OBJECT (ice, "Could not resolve candidate address: %s", ++ error->message); ++ g_clear_error (&error); ++ return; ++ } ++ ++ new_addr = g_inet_address_to_string (addresses->data); ++ ++ new_candv[0] = rc->prefix; ++ new_candv[1] = new_addr; ++ new_candv[2] = rc->postfix; ++ new_candv[3] = NULL; ++ new_candidate = g_strjoinv (" ", new_candv); ++ ++ GST_DEBUG_OBJECT (ice, "resolved to candidate %s", new_candidate); ++ ++ cand = ++ nice_agent_parse_remote_candidate_sdp (nice->priv->nice_agent, ++ rc->nice_stream_id, new_candidate); ++ g_free (new_candidate); ++ if (!cand) { ++ GST_WARNING_OBJECT (ice, "Could not parse candidate \'%s\'", new_candidate); ++ return; ++ } ++ ++ g_free (new_addr); ++ ++ add_ice_candidate_to_libnice (ice, rc->nice_stream_id, cand); ++ nice_candidate_free (cand); ++} ++ ++/* candidate must start with "a=candidate:" or be NULL*/ ++static void ++gst_webrtc_nice_add_candidate (GstWebRTCICE * ice, GstWebRTCICEStream * stream, ++ const gchar * candidate) ++{ ++ struct NiceStreamItem *item; ++ NiceCandidate *cand; ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ ++ item = _find_item (nice, -1, -1, stream); ++ g_return_if_fail (item != NULL); ++ ++ if (candidate == NULL) { ++ nice_agent_peer_candidate_gathering_done (nice->priv->nice_agent, ++ item->nice_stream_id); ++ return; ++ } ++ ++ cand = ++ nice_agent_parse_remote_candidate_sdp (nice->priv->nice_agent, ++ item->nice_stream_id, candidate); ++ if (!cand) { ++ /* might be a .local candidate */ ++ char *prefix = NULL, *address = NULL, *postfix = NULL; ++ struct resolve_candidate_data *rc; ++ ++ if (!get_candidate_address (candidate, &prefix, &address, &postfix)) { ++ GST_WARNING_OBJECT (nice, "Failed to retrieve address from candidate %s", ++ candidate); ++ goto done; ++ } ++ ++ if (!g_str_has_suffix (address, ".local")) { ++ GST_WARNING_OBJECT (nice, "candidate address \'%s\' does not end " ++ "with \'.local\'", address); ++ goto done; ++ } ++ ++ rc = g_new0 (struct resolve_candidate_data, 1); ++ rc->nice_stream_id = item->nice_stream_id; ++ rc->prefix = prefix; ++ rc->postfix = postfix; ++ resolve_host_async (nice, address, ++ (GAsyncReadyCallback) on_candidate_resolved, rc, ++ (GDestroyNotify) free_resolve_candidate_data); ++ ++ prefix = NULL; ++ postfix = NULL; ++ ++ done: ++ g_clear_pointer (&address, g_free); ++ g_clear_pointer (&prefix, g_free); ++ g_clear_pointer (&postfix, g_free); ++ ++ return; ++ } ++ ++ add_ice_candidate_to_libnice (ice, item->nice_stream_id, cand); ++ nice_candidate_free (cand); ++} ++ ++static gboolean ++gst_webrtc_nice_set_remote_credentials (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream, const gchar * ufrag, const gchar * pwd) ++{ ++ struct NiceStreamItem *item; ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ ++ g_return_val_if_fail (ufrag != NULL, FALSE); ++ g_return_val_if_fail (pwd != NULL, FALSE); ++ item = _find_item (nice, -1, -1, stream); ++ g_return_val_if_fail (item != NULL, FALSE); ++ ++ GST_DEBUG_OBJECT (nice, "Setting remote ICE credentials on " ++ "ICE stream %u ufrag:%s pwd:%s", item->nice_stream_id, ufrag, pwd); ++ ++ nice_agent_set_remote_credentials (nice->priv->nice_agent, ++ item->nice_stream_id, ufrag, pwd); ++ ++ return TRUE; ++} ++ ++typedef struct ++{ ++ GstWebRTCNice *ice; ++ GstUri *turn_server; ++} AddTurnServerToStreamData; ++ ++static gboolean ++_add_turn_server_foreach_stream_func (struct NiceStreamItem *item, ++ gpointer data) ++{ ++ AddTurnServerToStreamData *add_data = (AddTurnServerToStreamData *) data; ++ _add_turn_server (add_data->ice, item, add_data->turn_server); ++ return TRUE; ++} ++ ++static gboolean ++gst_webrtc_nice_add_turn_server (GstWebRTCICE * ice, const gchar * uri) ++{ ++ gboolean ret = FALSE; ++ GstUri *valid_uri; ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ gboolean inserted; ++ AddTurnServerToStreamData add_data; ++ ++ if (!(valid_uri = _validate_turn_server (nice, uri))) ++ goto done; ++ ++ inserted = ++ g_hash_table_insert (nice->priv->turn_servers, g_strdup (uri), valid_uri); ++ ++ /* add the turn server to any streams that were already created */ ++ if (inserted) { ++ add_data.ice = nice; ++ add_data.turn_server = valid_uri; ++ _nice_stream_item_foreach (nice, _add_turn_server_foreach_stream_func, ++ &add_data); ++ } ++ ++ ret = TRUE; ++ ++done: ++ return ret; ++} ++ ++static gboolean ++gst_webrtc_nice_add_local_ip_address (GstWebRTCNice * ice, ++ const gchar * address) ++{ ++ gboolean ret = FALSE; ++ NiceAddress nice_addr; ++ ++ nice_address_init (&nice_addr); ++ ++ ret = nice_address_set_from_string (&nice_addr, address); ++ ++ if (ret) { ++ ret = nice_agent_add_local_address (ice->priv->nice_agent, &nice_addr); ++ if (!ret) { ++ GST_ERROR_OBJECT (ice, "Failed to add local address to NiceAgent"); ++ } ++ } else { ++ GST_ERROR_OBJECT (ice, "Failed to initialize NiceAddress [%s]", address); ++ } ++ ++ return ret; ++} ++ ++static gboolean ++gst_webrtc_nice_set_local_credentials (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream, const gchar * ufrag, const gchar * pwd) ++{ ++ struct NiceStreamItem *item; ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ ++ g_return_val_if_fail (ufrag != NULL, FALSE); ++ g_return_val_if_fail (pwd != NULL, FALSE); ++ item = _find_item (nice, -1, -1, stream); ++ g_return_val_if_fail (item != NULL, FALSE); ++ ++ GST_DEBUG_OBJECT (nice, "Setting local ICE credentials on " ++ "ICE stream %u ufrag:%s pwd:%s", item->nice_stream_id, ufrag, pwd); ++ ++ nice_agent_set_local_credentials (nice->priv->nice_agent, ++ item->nice_stream_id, ufrag, pwd); ++ ++ return TRUE; ++} ++ ++static gboolean ++gst_webrtc_nice_gather_candidates (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream) ++{ ++ struct NiceStreamItem *item; ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ ++ item = _find_item (nice, -1, -1, stream); ++ g_return_val_if_fail (item != NULL, FALSE); ++ ++ GST_DEBUG_OBJECT (nice, "gather candidates for stream %u", ++ item->nice_stream_id); ++ ++ return gst_webrtc_ice_stream_gather_candidates (stream); ++} ++ ++static void ++gst_webrtc_nice_set_is_controller (GstWebRTCICE * ice, gboolean controller) ++{ ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ g_object_set (G_OBJECT (nice->priv->nice_agent), "controlling-mode", ++ controller, NULL); ++} ++ ++static gboolean ++gst_webrtc_nice_get_is_controller (GstWebRTCICE * ice) ++{ ++ gboolean ret; ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ g_object_get (G_OBJECT (nice->priv->nice_agent), "controlling-mode", ++ &ret, NULL); ++ return ret; ++} ++ ++static void ++gst_webrtc_nice_set_force_relay (GstWebRTCICE * ice, gboolean force_relay) ++{ ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ g_object_set (G_OBJECT (nice->priv->nice_agent), "force-relay", force_relay, ++ NULL); ++} ++ ++static void ++gst_webrtc_nice_set_on_ice_candidate (GstWebRTCICE * ice, ++ GstWebRTCICEOnCandidateFunc func, gpointer user_data, GDestroyNotify notify) ++{ ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ if (nice->priv->on_candidate_notify) ++ nice->priv->on_candidate_notify (nice->priv->on_candidate_data); ++ nice->priv->on_candidate = NULL; ++ ++ nice->priv->on_candidate = func; ++ nice->priv->on_candidate_data = user_data; ++ nice->priv->on_candidate_notify = notify; ++} ++ ++static void ++gst_webrtc_nice_set_tos (GstWebRTCICE * ice, GstWebRTCICEStream * stream, ++ guint tos) ++{ ++ struct NiceStreamItem *item; ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ ++ item = _find_item (nice, -1, -1, stream); ++ g_return_if_fail (item != NULL); ++ ++ nice_agent_set_stream_tos (nice->priv->nice_agent, item->nice_stream_id, tos); ++} ++ ++static const gchar * ++_relay_type_to_string (GstUri * turn_server) ++{ ++ const gchar *scheme; ++ const gchar *transport; ++ ++ if (!turn_server) ++ return "none"; ++ ++ scheme = gst_uri_get_scheme (turn_server); ++ transport = gst_uri_get_query_value (turn_server, "transport"); ++ ++ if (g_strcmp0 (scheme, "turns") == 0) { ++ return "tls"; ++ } else if (g_strcmp0 (scheme, "turn") == 0) { ++ if (!transport || g_strcmp0 (transport, "udp") == 0) ++ return "udp"; ++ if (!transport || g_strcmp0 (transport, "tcp") == 0) ++ return "tcp"; ++ } ++ ++ return "none"; ++} ++ ++static gchar * ++_get_server_url (GstWebRTCNice * ice, NiceCandidate * cand) ++{ ++ switch (cand->type) { ++ case NICE_CANDIDATE_TYPE_RELAYED:{ ++ NiceAddress addr; ++ gchar ipaddr[NICE_ADDRESS_STRING_LEN]; ++ nice_candidate_relay_address (cand, &addr); ++ nice_address_to_string (&addr, ipaddr); ++ return g_strdup (ipaddr); ++ } ++ case NICE_CANDIDATE_TYPE_SERVER_REFLEXIVE:{ ++ NiceAddress addr; ++ gchar ipaddr[NICE_ADDRESS_STRING_LEN]; ++ if (nice_candidate_stun_server_address (cand, &addr)) { ++ nice_address_to_string (&addr, ipaddr); ++ return g_strdup (ipaddr); ++ } else { ++ return g_strdup (gst_uri_get_host (ice->priv->stun_server)); ++ } ++ return g_strdup (gst_uri_get_host (ice->priv->stun_server)); ++ } ++ default: ++ return g_strdup (""); ++ } ++} ++ ++/* TODO: replace it with nice_candidate_type_to_string() ++ * when it's ready for use ++ * https://libnice.freedesktop.org/libnice/NiceCandidate.html#nice-candidate-type-to-string ++ */ ++static const gchar * ++_candidate_type_to_string (NiceCandidateType type) ++{ ++ switch (type) { ++ case NICE_CANDIDATE_TYPE_HOST: ++ return "host"; ++ case NICE_CANDIDATE_TYPE_SERVER_REFLEXIVE: ++ return "srflx"; ++ case NICE_CANDIDATE_TYPE_PEER_REFLEXIVE: ++ return "prflx"; ++ case NICE_CANDIDATE_TYPE_RELAYED: ++ return "relay"; ++ default: ++ g_assert_not_reached (); ++ return NULL; ++ } ++} ++ ++static void ++_populate_candidate_stats (GstWebRTCNice * ice, NiceCandidate * cand, ++ GstWebRTCICEStream * stream, GstWebRTCICECandidateStats * stats, ++ gboolean is_local) ++{ ++ gchar ipaddr[INET6_ADDRSTRLEN]; ++ ++ g_assert (cand != NULL); ++ ++ nice_address_to_string (&cand->addr, ipaddr); ++ stats->port = nice_address_get_port (&cand->addr); ++ stats->ipaddr = g_strdup (ipaddr); ++ stats->stream_id = stream->stream_id; ++ stats->type = _candidate_type_to_string (cand->type); ++ stats->prio = cand->priority; ++ stats->proto = ++ cand->transport == NICE_CANDIDATE_TRANSPORT_UDP ? "udp" : "tcp"; ++ if (is_local) { ++ if (cand->type == NICE_CANDIDATE_TYPE_RELAYED) ++ stats->relay_proto = _relay_type_to_string (ice->priv->turn_server); ++ stats->url = _get_server_url (ice, cand); ++ } ++} ++ ++static void ++_populate_candidate_list_stats (GstWebRTCNice * ice, GSList * cands, ++ GstWebRTCICEStream * stream, GPtrArray * result, gboolean is_local) ++{ ++ GSList *item; ++ ++ for (item = cands; item != NULL; item = item->next) { ++ GstWebRTCICECandidateStats *stats = ++ g_malloc0 (sizeof (GstWebRTCICECandidateStats)); ++ NiceCandidate *c = item->data; ++ _populate_candidate_stats (ice, c, stream, stats, is_local); ++ g_ptr_array_add (result, stats); ++ } ++ ++ g_ptr_array_add (result, NULL); ++} ++ ++static GstWebRTCICECandidateStats ** ++gst_webrtc_nice_get_local_candidates (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream) ++{ ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ GSList *cands = NULL; ++ ++ /* TODO: Use a g_ptr_array_new_null_terminated once when we depend on GLib 2.74 */ ++ GPtrArray *result = g_ptr_array_new (); ++ ++ cands = nice_agent_get_local_candidates (nice->priv->nice_agent, ++ stream->stream_id, NICE_COMPONENT_TYPE_RTP); ++ ++ _populate_candidate_list_stats (nice, cands, stream, result, TRUE); ++ g_slist_free_full (cands, (GDestroyNotify) nice_candidate_free); ++ ++ return (GstWebRTCICECandidateStats **) g_ptr_array_free (result, FALSE); ++} ++ ++static GstWebRTCICECandidateStats ** ++gst_webrtc_nice_get_remote_candidates (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream) ++{ ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ GSList *cands = NULL; ++ ++ /* TODO: Use a g_ptr_array_new_null_terminated once when we depend on GLib 2.74 */ ++ GPtrArray *result = g_ptr_array_new (); ++ ++ cands = nice_agent_get_remote_candidates (nice->priv->nice_agent, ++ stream->stream_id, NICE_COMPONENT_TYPE_RTP); ++ ++ _populate_candidate_list_stats (nice, cands, stream, result, FALSE); ++ g_slist_free_full (cands, (GDestroyNotify) nice_candidate_free); ++ ++ return (GstWebRTCICECandidateStats **) g_ptr_array_free (result, FALSE); ++} ++ ++static gboolean ++gst_webrtc_nice_get_selected_pair (GstWebRTCICE * ice, ++ GstWebRTCICEStream * stream, GstWebRTCICECandidateStats ** local_stats, ++ GstWebRTCICECandidateStats ** remote_stats) ++{ ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ NiceCandidate *local_cand = NULL; ++ NiceCandidate *remote_cand = NULL; ++ ++ ++ if (stream) { ++ if (nice_agent_get_selected_pair (nice->priv->nice_agent, stream->stream_id, ++ NICE_COMPONENT_TYPE_RTP, &local_cand, &remote_cand)) { ++ *local_stats = g_new0 (GstWebRTCICECandidateStats, 1); ++ _populate_candidate_stats (nice, local_cand, stream, *local_stats, TRUE); ++ ++ *remote_stats = g_new0 (GstWebRTCICECandidateStats, 1); ++ _populate_candidate_stats (nice, remote_cand, stream, *remote_stats, ++ FALSE); ++ ++ return TRUE; ++ } ++ } ++ ++ return FALSE; ++} ++ ++static void ++_clear_ice_stream (struct NiceStreamItem *item) ++{ ++ GstWebRTCNice *ice = NULL; ++ ++ if (!item) ++ return; ++ ++ if (item->stream) { ++ g_object_get (item->stream, "ice", &ice, NULL); ++ ++ if (ice != NULL) { ++ g_signal_handlers_disconnect_by_data (ice->priv->nice_agent, ++ item->stream); ++ gst_object_unref (ice); ++ } ++ gst_object_unref (item->stream); ++ } ++} ++ ++static GstUri * ++_validate_turn_server (GstWebRTCNice * ice, const gchar * s) ++{ ++ GstUri *uri = gst_uri_from_string_escaped (s); ++ const gchar *userinfo, *scheme; ++ GList *keys = NULL, *l; ++ gchar *user = NULL, *pass = NULL; ++ gboolean turn_tls = FALSE; ++ guint port; ++ ++ GST_DEBUG_OBJECT (ice, "validating turn server, %s", s); ++ ++ if (!uri) { ++ GST_ERROR_OBJECT (ice, "Could not parse turn server '%s'", s); ++ return NULL; ++ } ++ ++ scheme = gst_uri_get_scheme (uri); ++ if (g_strcmp0 (scheme, "turn") == 0) { ++ } else if (g_strcmp0 (scheme, "turns") == 0) { ++ turn_tls = TRUE; ++ } else { ++ GST_ERROR_OBJECT (ice, "unknown scheme '%s'", scheme); ++ goto out; ++ } ++ ++ keys = gst_uri_get_query_keys (uri); ++ for (l = keys; l; l = l->next) { ++ gchar *key = l->data; ++ ++ if (g_strcmp0 (key, "transport") == 0) { ++ const gchar *transport = gst_uri_get_query_value (uri, "transport"); ++ if (!transport) { ++ } else if (g_strcmp0 (transport, "udp") == 0) { ++ } else if (g_strcmp0 (transport, "tcp") == 0) { ++ } else { ++ GST_ERROR_OBJECT (ice, "unknown transport value, '%s'", transport); ++ goto out; ++ } ++ } else { ++ GST_ERROR_OBJECT (ice, "unknown query key, '%s'", key); ++ goto out; ++ } ++ } ++ ++ /* TODO: Implement error checking similar to the stun server below */ ++ userinfo = gst_uri_get_userinfo (uri); ++ _parse_userinfo (userinfo, &user, &pass); ++ if (!user) { ++ GST_ERROR_OBJECT (ice, "No username specified in '%s'", s); ++ goto out; ++ } ++ if (!pass) { ++ GST_ERROR_OBJECT (ice, "No password specified in '%s'", s); ++ goto out; ++ } ++ ++ port = gst_uri_get_port (uri); ++ ++ if (port == GST_URI_NO_PORT) { ++ if (turn_tls) { ++ gst_uri_set_port (uri, 5349); ++ } else { ++ gst_uri_set_port (uri, 3478); ++ } ++ } ++ ++ g_list_free (keys); ++ g_free (user); ++ g_free (pass); ++ ++ return uri; ++ ++out: ++ g_list_free (keys); ++ g_free (user); ++ g_free (pass); ++ gst_uri_unref (uri); ++ ++ return NULL; ++} ++ ++static void ++on_http_proxy_resolved (GstWebRTCICE * ice, GAsyncResult * res, ++ gpointer user_data) ++{ ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ GstUri *uri = user_data; ++ GList *addresses; ++ GError *error = NULL; ++ const gchar *userinfo; ++ gchar *user = NULL; ++ gchar *pass = NULL; ++ gchar *ip = NULL; ++ guint port = GST_URI_NO_PORT; ++ ++ if (!(addresses = resolve_host_finish (nice, res, &error))) { ++ GST_WARNING_OBJECT (ice, "Failed to resolve http proxy: %s", ++ error->message); ++ g_clear_error (&error); ++ return; ++ } ++ ++ /* XXX: only the first IP is used */ ++ ip = g_inet_address_to_string (addresses->data); ++ ++ if (!ip) { ++ GST_ERROR_OBJECT (ice, "failed to resolve host for proxy"); ++ gst_uri_unref (uri); ++ return; ++ } ++ ++ port = gst_uri_get_port (uri); ++ if (port == GST_URI_NO_PORT) { ++ port = HTTP_PROXY_PORT_DEFAULT; ++ GST_DEBUG_OBJECT (ice, "Proxy server has no port, assuming %u", ++ HTTP_PROXY_PORT_DEFAULT); ++ } ++ ++ userinfo = gst_uri_get_userinfo (uri); ++ _parse_userinfo (userinfo, &user, &pass); ++ ++ g_object_set (nice->priv->nice_agent, ++ "proxy-ip", ip, "proxy-port", port, "proxy-type", NICE_PROXY_TYPE_HTTP, ++ "proxy-username", user, "proxy-password", pass, NULL); ++ ++ g_free (ip); ++ g_free (user); ++ g_free (pass); ++} ++ ++static GstUri * ++_set_http_proxy (GstWebRTCICE * ice, const gchar * s) ++{ ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ GstUri *uri = gst_uri_from_string_escaped (s); ++ const gchar *msg = ++ "must be of the form http://[username:password@][:]"; ++ const gchar *host = NULL; ++ const gchar *userinfo; ++ gchar *user = NULL, *pass = NULL; ++ ++ GST_DEBUG_OBJECT (ice, "setting http proxy %s", s); ++ ++ if (!uri) { ++ GST_ERROR_OBJECT (ice, "Couldn't parse http proxy uri '%s', %s", s, msg); ++ return NULL; ++ } ++ ++ if (g_strcmp0 (gst_uri_get_scheme (uri), "http") != 0) { ++ GST_ERROR_OBJECT (ice, ++ "Couldn't parse uri scheme for http proxy server '%s', %s", s, msg); ++ gst_uri_unref (uri); ++ return NULL; ++ } ++ ++ host = gst_uri_get_host (uri); ++ if (!host) { ++ GST_ERROR_OBJECT (ice, "http proxy server '%s' has no host, %s", s, msg); ++ gst_uri_unref (uri); ++ return NULL; ++ } ++ ++ userinfo = gst_uri_get_userinfo (uri); ++ _parse_userinfo (userinfo, &user, &pass); ++ if ((pass && pass[0] != '\0') && (!user || user[0] == '\0')) { ++ GST_ERROR_OBJECT (ice, ++ "Password specified without user for http proxy '%s', %s", s, msg); ++ uri = NULL; ++ goto out; ++ } ++ ++ resolve_host_async (nice, host, (GAsyncReadyCallback) on_http_proxy_resolved, ++ gst_uri_ref (uri), (GDestroyNotify) gst_uri_unref); ++ ++out: ++ g_free (user); ++ g_free (pass); ++ ++ return uri; ++} ++ ++static void ++gst_webrtc_nice_set_stun_server (GstWebRTCICE * ice, const gchar * uri_s) ++{ ++ GstUri *uri = gst_uri_from_string_escaped (uri_s); ++ const gchar *msg = "must be of the form stun://:"; ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ ++ GST_DEBUG_OBJECT (nice, "setting stun server, %s", uri_s); ++ ++ if (!uri) { ++ GST_ERROR_OBJECT (nice, "Couldn't parse stun server '%s', %s", uri_s, msg); ++ return; ++ } ++ ++ if (nice->priv->stun_server) ++ gst_uri_unref (nice->priv->stun_server); ++ nice->priv->stun_server = uri; ++} ++ ++static gchar * ++gst_webrtc_nice_get_stun_server (GstWebRTCICE * ice) ++{ ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ if (nice->priv->stun_server) ++ return gst_uri_to_string (nice->priv->stun_server); ++ else ++ return NULL; ++} ++ ++static void ++gst_webrtc_nice_set_turn_server (GstWebRTCICE * ice, const gchar * uri_s) ++{ ++ GstUri *uri; ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ uri = _validate_turn_server (nice, uri_s); ++ ++ if (uri) { ++ if (nice->priv->turn_server) ++ gst_uri_unref (nice->priv->turn_server); ++ nice->priv->turn_server = uri; ++ } ++} ++ ++static gchar * ++gst_webrtc_nice_get_turn_server (GstWebRTCICE * ice) ++{ ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ if (nice->priv->turn_server) ++ return gst_uri_to_string (nice->priv->turn_server); ++ else ++ return NULL; ++} ++ ++static void ++gst_webrtc_nice_set_http_proxy (GstWebRTCICE * ice, const gchar * http_proxy) ++{ ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ GstUri *uri = _set_http_proxy (ice, http_proxy); ++ ++ if (uri) { ++ if (nice->priv->http_proxy) ++ gst_uri_unref (nice->priv->http_proxy); ++ nice->priv->http_proxy = uri; ++ } ++} ++ ++static gchar * ++gst_webrtc_nice_get_http_proxy (GstWebRTCICE * ice) ++{ ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); ++ ++ if (nice->priv->http_proxy) ++ return gst_uri_to_string (nice->priv->http_proxy); ++ else ++ return NULL; ++} ++ ++static void ++gst_webrtc_nice_set_property (GObject * object, guint prop_id, ++ const GValue * value, GParamSpec * pspec) ++{ ++ GstWebRTCICE *ice = GST_WEBRTC_ICE (object); ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (object); ++ ++ switch (prop_id) { ++ case PROP_ICE_TCP: ++ g_object_set_property (G_OBJECT (nice->priv->nice_agent), ++ "ice-tcp", value); ++ break; ++ case PROP_ICE_UDP: ++ g_object_set_property (G_OBJECT (nice->priv->nice_agent), ++ "ice-udp", value); ++ break; ++ case PROP_MIN_RTP_PORT: ++ ice->min_rtp_port = g_value_get_uint (value); ++ if (ice->min_rtp_port > ice->max_rtp_port) ++ g_warning ("Set min-rtp-port to %u which is larger than" ++ " max-rtp-port %u", ice->min_rtp_port, ice->max_rtp_port); ++ break; ++ case PROP_MAX_RTP_PORT: ++ ice->max_rtp_port = g_value_get_uint (value); ++ if (ice->min_rtp_port > ice->max_rtp_port) ++ g_warning ("Set max-rtp-port to %u which is smaller than" ++ " min-rtp-port %u", ice->max_rtp_port, ice->min_rtp_port); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_webrtc_nice_get_property (GObject * object, guint prop_id, ++ GValue * value, GParamSpec * pspec) ++{ ++ GstWebRTCICE *ice = GST_WEBRTC_ICE (object); ++ GstWebRTCNice *nice = GST_WEBRTC_NICE (object); ++ ++ switch (prop_id) { ++ case PROP_AGENT: ++ g_value_set_object (value, nice->priv->nice_agent); ++ break; ++ case PROP_ICE_TCP: ++ g_object_get_property (G_OBJECT (nice->priv->nice_agent), ++ "ice-tcp", value); ++ break; ++ case PROP_ICE_UDP: ++ g_object_get_property (G_OBJECT (nice->priv->nice_agent), ++ "ice-udp", value); ++ break; ++ case PROP_MIN_RTP_PORT: ++ g_value_set_uint (value, ice->min_rtp_port); ++ break; ++ case PROP_MAX_RTP_PORT: ++ g_value_set_uint (value, ice->max_rtp_port); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_webrtc_nice_finalize (GObject * object) ++{ ++ GstWebRTCNice *ice = GST_WEBRTC_NICE (object); ++ ++ g_signal_handlers_disconnect_by_data (ice->priv->nice_agent, ice); ++ ++ _stop_thread (ice); ++ ++ if (ice->priv->on_candidate_notify) ++ ice->priv->on_candidate_notify (ice->priv->on_candidate_data); ++ ice->priv->on_candidate = NULL; ++ ice->priv->on_candidate_notify = NULL; ++ ++ if (ice->priv->turn_server) ++ gst_uri_unref (ice->priv->turn_server); ++ if (ice->priv->stun_server) ++ gst_uri_unref (ice->priv->stun_server); ++ if (ice->priv->http_proxy) ++ gst_uri_unref (ice->priv->http_proxy); ++ ++ g_mutex_clear (&ice->priv->lock); ++ g_cond_clear (&ice->priv->cond); ++ ++ g_array_free (ice->priv->nice_stream_map, TRUE); ++ ++ g_object_unref (ice->priv->nice_agent); ++ ++ g_hash_table_unref (ice->priv->turn_servers); ++ ++ G_OBJECT_CLASS (parent_class)->finalize (object); ++} ++ ++static void ++gst_webrtc_nice_constructed (GObject * object) ++{ ++ GstWebRTCNice *ice = GST_WEBRTC_NICE (object); ++ NiceAgentOption options = 0; ++ ++ _start_thread (ice); ++ ++ options |= NICE_AGENT_OPTION_ICE_TRICKLE; ++ options |= NICE_AGENT_OPTION_REGULAR_NOMINATION; ++ ++ ice->priv->nice_agent = nice_agent_new_full (ice->priv->main_context, ++ NICE_COMPATIBILITY_RFC5245, options); ++ g_signal_connect (ice->priv->nice_agent, "new-candidate-full", ++ G_CALLBACK (_on_new_candidate), ice); ++ ++ G_OBJECT_CLASS (parent_class)->constructed (object); ++} ++ ++static void ++gst_webrtc_nice_class_init (GstWebRTCNiceClass * klass) ++{ ++ GstWebRTCICEClass *gst_webrtc_ice_class = GST_WEBRTC_ICE_CLASS (klass); ++ GObjectClass *gobject_class = (GObjectClass *) klass; ++ ++ // override virtual functions ++ gst_webrtc_ice_class->add_candidate = gst_webrtc_nice_add_candidate; ++ gst_webrtc_ice_class->add_stream = gst_webrtc_nice_add_stream; ++ gst_webrtc_ice_class->add_turn_server = gst_webrtc_nice_add_turn_server; ++ gst_webrtc_ice_class->find_transport = gst_webrtc_nice_find_transport; ++ gst_webrtc_ice_class->gather_candidates = gst_webrtc_nice_gather_candidates; ++ gst_webrtc_ice_class->get_is_controller = gst_webrtc_nice_get_is_controller; ++ gst_webrtc_ice_class->get_stun_server = gst_webrtc_nice_get_stun_server; ++ gst_webrtc_ice_class->get_turn_server = gst_webrtc_nice_get_turn_server; ++ gst_webrtc_ice_class->get_http_proxy = gst_webrtc_nice_get_http_proxy; ++ gst_webrtc_ice_class->set_force_relay = gst_webrtc_nice_set_force_relay; ++ gst_webrtc_ice_class->set_is_controller = gst_webrtc_nice_set_is_controller; ++ gst_webrtc_ice_class->set_local_credentials = ++ gst_webrtc_nice_set_local_credentials; ++ gst_webrtc_ice_class->set_remote_credentials = ++ gst_webrtc_nice_set_remote_credentials; ++ gst_webrtc_ice_class->set_stun_server = gst_webrtc_nice_set_stun_server; ++ gst_webrtc_ice_class->set_tos = gst_webrtc_nice_set_tos; ++ gst_webrtc_ice_class->set_turn_server = gst_webrtc_nice_set_turn_server; ++ gst_webrtc_ice_class->set_http_proxy = gst_webrtc_nice_set_http_proxy; ++ gst_webrtc_ice_class->set_on_ice_candidate = ++ gst_webrtc_nice_set_on_ice_candidate; ++ gst_webrtc_ice_class->get_local_candidates = ++ gst_webrtc_nice_get_local_candidates; ++ gst_webrtc_ice_class->get_remote_candidates = ++ gst_webrtc_nice_get_remote_candidates; ++ gst_webrtc_ice_class->get_selected_pair = gst_webrtc_nice_get_selected_pair; ++ ++ gobject_class->constructed = gst_webrtc_nice_constructed; ++ gobject_class->get_property = gst_webrtc_nice_get_property; ++ gobject_class->set_property = gst_webrtc_nice_set_property; ++ gobject_class->finalize = gst_webrtc_nice_finalize; ++ ++ g_object_class_install_property (gobject_class, ++ PROP_AGENT, ++ g_param_spec_object ("agent", "ICE agent", ++ "ICE agent in use by this object. WARNING! Accessing this property " ++ "may have disastrous consequences for the operation of webrtcbin. " ++ "Other ICE implementations may not have the same interface.", ++ NICE_TYPE_AGENT, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); ++ ++ g_object_class_install_property (gobject_class, ++ PROP_ICE_TCP, ++ g_param_spec_boolean ("ice-tcp", "ICE TCP", ++ "Whether the agent should use ICE-TCP when gathering candidates", ++ TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ g_object_class_install_property (gobject_class, ++ PROP_ICE_UDP, ++ g_param_spec_boolean ("ice-udp", "ICE UDP", ++ "Whether the agent should use ICE-UDP when gathering candidates", ++ TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ g_signal_override_class_handler ("add-local-ip-address", ++ G_TYPE_FROM_CLASS (klass), ++ G_CALLBACK (gst_webrtc_nice_add_local_ip_address)); ++} ++ ++static void ++gst_webrtc_nice_init (GstWebRTCNice * ice) ++{ ++ ice->priv = gst_webrtc_nice_get_instance_private (ice); ++ ++ g_mutex_init (&ice->priv->lock); ++ g_cond_init (&ice->priv->cond); ++ ++ ice->priv->turn_servers = ++ g_hash_table_new_full (g_str_hash, g_str_equal, g_free, ++ (GDestroyNotify) gst_uri_unref); ++ ++ ice->priv->nice_stream_map = ++ g_array_new (FALSE, TRUE, sizeof (struct NiceStreamItem)); ++ g_array_set_clear_func (ice->priv->nice_stream_map, ++ (GDestroyNotify) _clear_ice_stream); ++} ++ ++GstWebRTCNice * ++gst_webrtc_nice_new (const gchar * name) ++{ ++ return g_object_new (GST_TYPE_WEBRTC_NICE, "name", name, NULL); ++} +diff --git a/gst-libs/gst/webrtc/nice/nice.h b/gst-libs/gst/webrtc/nice/nice.h +new file mode 100644 +index 000000000..a4d8a94a5 +--- /dev/null ++++ b/gst-libs/gst/webrtc/nice/nice.h +@@ -0,0 +1,67 @@ ++/* GStreamer ++ * Copyright (C) 2017 Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_WEBRTC_NICE_H__ ++#define __GST_WEBRTC_NICE_H__ ++ ++#include "gst/webrtc/ice.h" ++ ++#include "nicestream.h" ++#include "nicetransport.h" ++ ++#include "nice_fwd.h" ++ ++G_BEGIN_DECLS ++ ++GST_WEBRTCNICE_API ++GType gst_webrtc_nice_get_type(void); ++#define GST_TYPE_WEBRTC_NICE (gst_webrtc_nice_get_type()) ++#define GST_WEBRTC_NICE(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_WEBRTC_NICE,GstWebRTCNice)) ++#define GST_IS_WEBRTC_NICE(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_WEBRTC_NICE)) ++#define GST_WEBRTC_NICE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_WEBRTC_NICE,GstWebRTCNiceClass)) ++#define GST_IS_WEBRTC_NICE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_WEBRTC_NICE)) ++#define GST_WEBRTC_NICE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_WEBRTC_NICE,GstWebRTCNiceClass)) ++ ++/** ++ * GstWebRTCNice: ++ */ ++typedef struct _GstWebRTCNice GstWebRTCNice; ++typedef struct _GstWebRTCNiceClass GstWebRTCNiceClass; ++typedef struct _GstWebRTCNicePrivate GstWebRTCNicePrivate; ++ ++struct _GstWebRTCNice ++{ ++ GstWebRTCICE parent; ++ GstWebRTCNicePrivate *priv; ++ ++}; ++ ++struct _GstWebRTCNiceClass ++{ ++ GstWebRTCICEClass parent_class; ++}; ++ ++GST_WEBRTCNICE_API ++GstWebRTCNice * gst_webrtc_nice_new (const gchar * name); ++ ++G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstWebRTCNice, gst_object_unref) ++ ++G_END_DECLS ++ ++#endif /* __GST_WEBRTC_NICE_H__ */ +\ No newline at end of file +diff --git a/gst-libs/gst/webrtc/nice/nice_fwd.h b/gst-libs/gst/webrtc/nice/nice_fwd.h +new file mode 100644 +index 000000000..4d51c36c5 +--- /dev/null ++++ b/gst-libs/gst/webrtc/nice/nice_fwd.h +@@ -0,0 +1,17 @@ ++#ifndef __GST_WEBRTCNICE_FWD_H__ ++#define __GST_WEBRTCNICE_FWD_H__ ++ ++#ifndef GST_USE_UNSTABLE_API ++#warning "The GstWebRTCNice library from gst-plugins-bad is unstable API and may change in future." ++#warning "You can define GST_USE_UNSTABLE_API to avoid this warning." ++#endif ++ ++#ifndef GST_WEBRTCNICE_API ++# ifdef BUILDING_GST_WEBRTCNICE ++# define GST_WEBRTCNICE_API GST_API_EXPORT /* from config.h */ ++# else ++# define GST_WEBRTCNICE_API GST_API_IMPORT ++# endif ++#endif ++ ++#endif /* __GST_WEBRTCNICE_FWD_H__ */ +\ No newline at end of file +diff --git a/gst-libs/gst/webrtc/nice/nicestream.c b/gst-libs/gst/webrtc/nice/nicestream.c +new file mode 100644 +index 000000000..cda1c133f +--- /dev/null ++++ b/gst-libs/gst/webrtc/nice/nicestream.c +@@ -0,0 +1,334 @@ ++/* GStreamer ++ * Copyright (C) 2017 Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifdef HAVE_CONFIG_H ++# include "config.h" ++#endif ++ ++#include "nicestream.h" ++#include "nicetransport.h" ++ ++#define GST_CAT_DEFAULT gst_webrtc_nice_stream_debug ++GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); ++ ++enum ++{ ++ PROP_0, ++ PROP_ICE, ++}; ++ ++struct _GstWebRTCNiceStreamPrivate ++{ ++ gboolean gathered; ++ GList *transports; ++ gboolean gathering_started; ++ gulong candidate_gathering_done_id; ++ GWeakRef ice_weak; ++}; ++ ++#define gst_webrtc_nice_stream_parent_class parent_class ++G_DEFINE_TYPE_WITH_CODE (GstWebRTCNiceStream, gst_webrtc_nice_stream, ++ GST_TYPE_WEBRTC_ICE_STREAM, G_ADD_PRIVATE (GstWebRTCNiceStream) ++ GST_DEBUG_CATEGORY_INIT (gst_webrtc_nice_stream_debug, ++ "webrtcnicestream", 0, "webrtcnicestream");); ++ ++static void ++gst_webrtc_nice_stream_set_property (GObject * object, guint prop_id, ++ const GValue * value, GParamSpec * pspec) ++{ ++ GstWebRTCNiceStream *stream = GST_WEBRTC_NICE_STREAM (object); ++ ++ switch (prop_id) { ++ case PROP_ICE: ++ g_weak_ref_set (&stream->priv->ice_weak, g_value_get_object (value)); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_webrtc_nice_stream_get_property (GObject * object, guint prop_id, ++ GValue * value, GParamSpec * pspec) ++{ ++ GstWebRTCNiceStream *stream = GST_WEBRTC_NICE_STREAM (object); ++ ++ switch (prop_id) { ++ case PROP_ICE: ++ g_value_take_object (value, g_weak_ref_get (&stream->priv->ice_weak)); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static GWeakRef * ++weak_new (gpointer object) ++{ ++ GWeakRef *weak = g_new0 (GWeakRef, 1); ++ g_weak_ref_init (weak, object); ++ return weak; ++} ++ ++static void ++weak_free (GWeakRef * weak) ++{ ++ g_weak_ref_clear (weak); ++ g_free (weak); ++} ++ ++static void ++gst_webrtc_nice_stream_finalize (GObject * object) ++{ ++ GstWebRTCNiceStream *stream = GST_WEBRTC_NICE_STREAM (object); ++ GstWebRTCNice *ice = g_weak_ref_get (&stream->priv->ice_weak); ++ ++ if (ice) { ++ NiceAgent *agent; ++ g_object_get (ice, "agent", &agent, NULL); ++ ++ if (stream->priv->candidate_gathering_done_id != 0) { ++ g_signal_handler_disconnect (agent, ++ stream->priv->candidate_gathering_done_id); ++ } ++ ++ g_object_unref (agent); ++ gst_object_unref (ice); ++ } ++ ++ g_list_foreach (stream->priv->transports, (GFunc) weak_free, NULL); ++ g_list_free (stream->priv->transports); ++ stream->priv->transports = NULL; ++ ++ g_weak_ref_clear (&stream->priv->ice_weak); ++ ++ G_OBJECT_CLASS (parent_class)->finalize (object); ++} ++ ++static GList * ++_delete_transport (GList ** transports, GList * link) ++{ ++ GList *next = link->next; ++ weak_free (link->data); ++ *transports = g_list_delete_link (*transports, link); ++ return next; ++} ++ ++static void ++_on_candidate_gathering_done (NiceAgent * agent, guint stream_id, ++ GWeakRef * ice_weak) ++{ ++ GstWebRTCNiceStream *ice = g_weak_ref_get (ice_weak); ++ GList *l; ++ ++ if (!ice) ++ return; ++ ++ if (stream_id != GST_WEBRTC_ICE_STREAM (ice)->stream_id) ++ goto cleanup; ++ ++ GST_DEBUG_OBJECT (ice, "%u gathering done", stream_id); ++ ++ ice->priv->gathered = TRUE; ++ ++ for (l = ice->priv->transports; l; l = l->next) { ++ GstWebRTCICETransport *trans = g_weak_ref_get (l->data); ++ ++ if (trans) { ++ gst_webrtc_ice_transport_gathering_state_change (trans, ++ GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE); ++ g_object_unref (trans); ++ } else { ++ l = _delete_transport (&ice->priv->transports, l); ++ } ++ } ++ ++cleanup: ++ gst_object_unref (ice); ++} ++ ++static GstWebRTCICETransport * ++gst_webrtc_nice_stream_find_transport (GstWebRTCICEStream * stream, ++ GstWebRTCICEComponent component) ++{ ++ GstWebRTCICEComponent trans_comp; ++ GstWebRTCICETransport *ret; ++ GList *l; ++ GstWebRTCNiceStream *nice_stream = GST_WEBRTC_NICE_STREAM (stream); ++ ++ for (l = nice_stream->priv->transports; l; l = l->next) { ++ GstWebRTCICETransport *trans = g_weak_ref_get (l->data); ++ if (trans) { ++ g_object_get (trans, "component", &trans_comp, NULL); ++ ++ if (component == trans_comp) ++ return trans; ++ else ++ gst_object_unref (trans); ++ } else { ++ l = _delete_transport (&nice_stream->priv->transports, l); ++ } ++ } ++ ++ ret = ++ GST_WEBRTC_ICE_TRANSPORT (gst_webrtc_nice_transport_new (nice_stream, ++ component)); ++ nice_stream->priv->transports = ++ g_list_prepend (nice_stream->priv->transports, weak_new (ret)); ++ ++ return ret; ++} ++ ++static void ++gst_webrtc_nice_stream_constructed (GObject * object) ++{ ++ GstWebRTCNiceStream *stream; ++ NiceAgent *agent; ++ GstWebRTCNice *ice; ++ ++ G_OBJECT_CLASS (parent_class)->constructed (object); ++ ++ stream = GST_WEBRTC_NICE_STREAM (object); ++ ice = g_weak_ref_get (&stream->priv->ice_weak); ++ ++ ++ g_assert (ice != NULL); ++ g_object_get (ice, "agent", &agent, NULL); ++ stream->priv->candidate_gathering_done_id = g_signal_connect_data (agent, ++ "candidate-gathering-done", G_CALLBACK (_on_candidate_gathering_done), ++ weak_new (stream), (GClosureNotify) weak_free, (GConnectFlags) 0); ++ ++ g_object_unref (agent); ++ gst_object_unref (ice); ++} ++ ++static gboolean ++gst_webrtc_nice_stream_gather_candidates (GstWebRTCICEStream * stream) ++{ ++ NiceAgent *agent; ++ GList *l; ++ GstWebRTCICE *ice; ++ gboolean ret = TRUE; ++ GstWebRTCNiceStream *nice_stream = GST_WEBRTC_NICE_STREAM (stream); ++ ++ GST_DEBUG_OBJECT (nice_stream, "start gathering candidates"); ++ ++ if (nice_stream->priv->gathered) ++ return TRUE; ++ ++ for (l = nice_stream->priv->transports; l; l = l->next) { ++ GstWebRTCICETransport *trans = g_weak_ref_get (l->data); ++ ++ if (trans) { ++ gst_webrtc_ice_transport_gathering_state_change (trans, ++ GST_WEBRTC_ICE_GATHERING_STATE_GATHERING); ++ g_object_unref (trans); ++ } else { ++ l = _delete_transport (&nice_stream->priv->transports, l); ++ } ++ } ++ ++ ice = GST_WEBRTC_ICE (g_weak_ref_get (&nice_stream->priv->ice_weak)); ++ g_assert (ice != NULL); ++ ++ g_object_get (ice, "agent", &agent, NULL); ++ ++ if (!nice_stream->priv->gathering_started) { ++ if (ice->min_rtp_port != 0 || ice->max_rtp_port != 65535) { ++ if (ice->min_rtp_port > ice->max_rtp_port) { ++ GST_ERROR_OBJECT (ice, ++ "invalid port range: min-rtp-port %d must be <= max-rtp-port %d", ++ ice->min_rtp_port, ice->max_rtp_port); ++ ret = FALSE; ++ goto cleanup; ++ } ++ ++ nice_agent_set_port_range (agent, stream->stream_id, ++ NICE_COMPONENT_TYPE_RTP, ice->min_rtp_port, ice->max_rtp_port); ++ } ++ /* mark as gathering started to prevent changing ports again */ ++ nice_stream->priv->gathering_started = TRUE; ++ } ++ ++ if (!nice_agent_gather_candidates (agent, stream->stream_id)) { ++ ret = FALSE; ++ goto cleanup; ++ } ++ ++ for (l = nice_stream->priv->transports; l; l = l->next) { ++ GstWebRTCNiceTransport *trans = g_weak_ref_get (l->data); ++ ++ if (trans) { ++ gst_webrtc_nice_transport_update_buffer_size (trans); ++ g_object_unref (trans); ++ } else { ++ l = _delete_transport (&nice_stream->priv->transports, l); ++ } ++ } ++ ++cleanup: ++ if (agent) ++ g_object_unref (agent); ++ if (ice) ++ gst_object_unref (ice); ++ ++ return ret; ++} ++ ++static void ++gst_webrtc_nice_stream_class_init (GstWebRTCNiceStreamClass * klass) ++{ ++ GObjectClass *gobject_class = (GObjectClass *) klass; ++ GstWebRTCICEStreamClass *gst_webrtc_ice_stream_class = ++ GST_WEBRTC_ICE_STREAM_CLASS (klass); ++ ++ gst_webrtc_ice_stream_class->find_transport = ++ gst_webrtc_nice_stream_find_transport; ++ gst_webrtc_ice_stream_class->gather_candidates = ++ gst_webrtc_nice_stream_gather_candidates; ++ ++ gobject_class->constructed = gst_webrtc_nice_stream_constructed; ++ gobject_class->get_property = gst_webrtc_nice_stream_get_property; ++ gobject_class->set_property = gst_webrtc_nice_stream_set_property; ++ gobject_class->finalize = gst_webrtc_nice_stream_finalize; ++ ++ g_object_class_install_property (gobject_class, ++ PROP_ICE, ++ g_param_spec_object ("ice", ++ "ICE", "ICE agent associated with this stream", ++ GST_TYPE_WEBRTC_ICE, ++ G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY | G_PARAM_STATIC_STRINGS)); ++} ++ ++static void ++gst_webrtc_nice_stream_init (GstWebRTCNiceStream * stream) ++{ ++ stream->priv = gst_webrtc_nice_stream_get_instance_private (stream); ++ ++ g_weak_ref_init (&stream->priv->ice_weak, NULL); ++} ++ ++GstWebRTCNiceStream * ++gst_webrtc_nice_stream_new (GstWebRTCICE * ice, guint stream_id) ++{ ++ return g_object_new (GST_TYPE_WEBRTC_NICE_STREAM, "ice", ice, ++ "stream-id", stream_id, NULL); ++} +diff --git a/gst-libs/gst/webrtc/nice/nicestream.h b/gst-libs/gst/webrtc/nice/nicestream.h +new file mode 100644 +index 000000000..ef68153c6 +--- /dev/null ++++ b/gst-libs/gst/webrtc/nice/nicestream.h +@@ -0,0 +1,63 @@ ++/* GStreamer ++ * Copyright (C) 2017 Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_WEBRTC_NICE_STREAM_H__ ++#define __GST_WEBRTC_NICE_STREAM_H__ ++ ++#include "gst/webrtc/icestream.h" ++ ++#include "nice_fwd.h" ++ ++G_BEGIN_DECLS ++ ++GST_WEBRTCNICE_API ++GType gst_webrtc_nice_stream_get_type(void); ++#define GST_TYPE_WEBRTC_NICE_STREAM (gst_webrtc_nice_stream_get_type()) ++#define GST_WEBRTC_NICE_STREAM(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_WEBRTC_NICE_STREAM,GstWebRTCNiceStream)) ++#define GST_IS_WEBRTC_NICE_STREAM(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_WEBRTC_NICE_STREAM)) ++#define GST_WEBRTC_NICE_STREAM_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_WEBRTC_NICE_STREAM,GstWebRTCNiceStreamClass)) ++#define GST_IS_WEBRTC_NICE_STREAM_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_WEBRTC_NICE_STREAM)) ++#define GST_WEBRTC_NICE_STREAM_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_WEBRTC_NICE_STREAM,GstWebRTCNiceStreamClass)) ++ ++/** ++ * GstWebRTCNiceStream: ++ */ ++typedef struct _GstWebRTCNiceStream GstWebRTCNiceStream; ++typedef struct _GstWebRTCNiceStreamClass GstWebRTCNiceStreamClass; ++typedef struct _GstWebRTCNiceStreamPrivate GstWebRTCNiceStreamPrivate; ++ ++struct _GstWebRTCNiceStream ++{ ++ GstWebRTCICEStream parent; ++ GstWebRTCNiceStreamPrivate *priv; ++}; ++ ++struct _GstWebRTCNiceStreamClass ++{ ++ GstWebRTCICEStreamClass parent_class; ++}; ++ ++GstWebRTCNiceStream * gst_webrtc_nice_stream_new (GstWebRTCICE * ice, ++ guint stream_id); ++ ++G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstWebRTCNiceStream, gst_object_unref) ++ ++G_END_DECLS ++ ++#endif /* __GST_WEBRTC_NICE_STREAM_H__ */ +diff --git a/gst-libs/gst/webrtc/nice/nicetransport.c b/gst-libs/gst/webrtc/nice/nicetransport.c +new file mode 100644 +index 000000000..13030d0fb +--- /dev/null ++++ b/gst-libs/gst/webrtc/nice/nicetransport.c +@@ -0,0 +1,426 @@ ++/* GStreamer ++ * Copyright (C) 2017 Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifdef HAVE_CONFIG_H ++# include "config.h" ++#endif ++ ++#include "nicestream.h" ++#include "nicetransport.h" ++ ++#define GST_CAT_DEFAULT gst_webrtc_nice_transport_debug ++GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); ++ ++enum ++{ ++ SIGNAL_0, ++ LAST_SIGNAL, ++}; ++ ++enum ++{ ++ PROP_0, ++ PROP_STREAM, ++ PROP_SEND_BUFFER_SIZE, ++ PROP_RECEIVE_BUFFER_SIZE ++}; ++ ++//static guint gst_webrtc_nice_transport_signals[LAST_SIGNAL] = { 0 }; ++ ++struct _GstWebRTCNiceTransportPrivate ++{ ++ gboolean running; ++ ++ gint send_buffer_size; ++ gint receive_buffer_size; ++ gulong on_new_selected_pair_id; ++ gulong on_component_state_changed_id; ++}; ++ ++#define gst_webrtc_nice_transport_parent_class parent_class ++G_DEFINE_TYPE_WITH_CODE (GstWebRTCNiceTransport, gst_webrtc_nice_transport, ++ GST_TYPE_WEBRTC_ICE_TRANSPORT, G_ADD_PRIVATE (GstWebRTCNiceTransport) ++ GST_DEBUG_CATEGORY_INIT (gst_webrtc_nice_transport_debug, ++ "webrtcnicetransport", 0, "webrtcnicetransport"); ++ ); ++ ++static NiceComponentType ++_gst_component_to_nice (GstWebRTCICEComponent component) ++{ ++ switch (component) { ++ case GST_WEBRTC_ICE_COMPONENT_RTP: ++ return NICE_COMPONENT_TYPE_RTP; ++ case GST_WEBRTC_ICE_COMPONENT_RTCP: ++ return NICE_COMPONENT_TYPE_RTCP; ++ default: ++ g_assert_not_reached (); ++ return 0; ++ } ++} ++ ++static GstWebRTCICEComponent ++_nice_component_to_gst (NiceComponentType component) ++{ ++ switch (component) { ++ case NICE_COMPONENT_TYPE_RTP: ++ return GST_WEBRTC_ICE_COMPONENT_RTP; ++ case NICE_COMPONENT_TYPE_RTCP: ++ return GST_WEBRTC_ICE_COMPONENT_RTCP; ++ default: ++ g_assert_not_reached (); ++ return 0; ++ } ++} ++ ++static GstWebRTCICEConnectionState ++_nice_component_state_to_gst (NiceComponentState state) ++{ ++ switch (state) { ++ case NICE_COMPONENT_STATE_DISCONNECTED: ++ return GST_WEBRTC_ICE_CONNECTION_STATE_DISCONNECTED; ++ case NICE_COMPONENT_STATE_GATHERING: ++ return GST_WEBRTC_ICE_CONNECTION_STATE_NEW; ++ case NICE_COMPONENT_STATE_CONNECTING: ++ return GST_WEBRTC_ICE_CONNECTION_STATE_CHECKING; ++ case NICE_COMPONENT_STATE_CONNECTED: ++ return GST_WEBRTC_ICE_CONNECTION_STATE_CONNECTED; ++ case NICE_COMPONENT_STATE_READY: ++ return GST_WEBRTC_ICE_CONNECTION_STATE_COMPLETED; ++ case NICE_COMPONENT_STATE_FAILED: ++ return GST_WEBRTC_ICE_CONNECTION_STATE_FAILED; ++ default: ++ g_assert_not_reached (); ++ return 0; ++ } ++} ++ ++static void ++gst_webrtc_nice_transport_set_property (GObject * object, guint prop_id, ++ const GValue * value, GParamSpec * pspec) ++{ ++ GstWebRTCNiceTransport *nice = GST_WEBRTC_NICE_TRANSPORT (object); ++ ++ switch (prop_id) { ++ case PROP_STREAM: ++ if (nice->stream) ++ gst_object_unref (nice->stream); ++ nice->stream = g_value_dup_object (value); ++ break; ++ case PROP_SEND_BUFFER_SIZE: ++ nice->priv->send_buffer_size = g_value_get_int (value); ++ gst_webrtc_nice_transport_update_buffer_size (nice); ++ break; ++ case PROP_RECEIVE_BUFFER_SIZE: ++ nice->priv->receive_buffer_size = g_value_get_int (value); ++ gst_webrtc_nice_transport_update_buffer_size (nice); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_webrtc_nice_transport_get_property (GObject * object, guint prop_id, ++ GValue * value, GParamSpec * pspec) ++{ ++ GstWebRTCNiceTransport *nice = GST_WEBRTC_NICE_TRANSPORT (object); ++ ++ switch (prop_id) { ++ case PROP_STREAM: ++ g_value_set_object (value, nice->stream); ++ break; ++ case PROP_SEND_BUFFER_SIZE: ++ g_value_set_int (value, nice->priv->send_buffer_size); ++ break; ++ case PROP_RECEIVE_BUFFER_SIZE: ++ g_value_set_int (value, nice->priv->receive_buffer_size); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_webrtc_nice_transport_finalize (GObject * object) ++{ ++ GstWebRTCNiceTransport *nice = GST_WEBRTC_NICE_TRANSPORT (object); ++ NiceAgent *agent; ++ GstWebRTCNice *webrtc_ice = NULL; ++ ++ g_object_get (nice->stream, "ice", &webrtc_ice, NULL); ++ ++ if (webrtc_ice) { ++ g_object_get (webrtc_ice, "agent", &agent, NULL); ++ ++ if (nice->priv->on_component_state_changed_id != 0) { ++ g_signal_handler_disconnect (agent, ++ nice->priv->on_component_state_changed_id); ++ } ++ ++ if (nice->priv->on_new_selected_pair_id != 0) { ++ g_signal_handler_disconnect (agent, nice->priv->on_new_selected_pair_id); ++ } ++ ++ g_object_unref (agent); ++ gst_object_unref (webrtc_ice); ++ } ++ ++ gst_object_unref (nice->stream); ++ ++ G_OBJECT_CLASS (parent_class)->finalize (object); ++} ++ ++void ++gst_webrtc_nice_transport_update_buffer_size (GstWebRTCNiceTransport * nice) ++{ ++ NiceAgent *agent = NULL; ++ GPtrArray *sockets; ++ guint i; ++ GstWebRTCNice *webrtc_ice = NULL; ++ ++ g_object_get (nice->stream, "ice", &webrtc_ice, NULL); ++ ++ g_assert (webrtc_ice != NULL); ++ ++ g_object_get (webrtc_ice, "agent", &agent, NULL); ++ g_assert (agent != NULL); ++ ++ sockets = ++ nice_agent_get_sockets (agent, ++ GST_WEBRTC_ICE_STREAM (nice->stream)->stream_id, 1); ++ if (sockets == NULL) { ++ g_object_unref (agent); ++ gst_object_unref (webrtc_ice); ++ return; ++ } ++ ++ for (i = 0; i < sockets->len; i++) { ++ GSocket *gsocket = g_ptr_array_index (sockets, i); ++#ifdef SO_SNDBUF ++ if (nice->priv->send_buffer_size != 0) { ++ GError *gerror = NULL; ++ if (!g_socket_set_option (gsocket, SOL_SOCKET, SO_SNDBUF, ++ nice->priv->send_buffer_size, &gerror)) ++ GST_WARNING_OBJECT (nice, "Could not set send buffer size : %s", ++ gerror->message); ++ g_clear_error (&gerror); ++ } ++#endif ++#ifdef SO_RCVBUF ++ if (nice->priv->receive_buffer_size != 0) { ++ GError *gerror = NULL; ++ if (!g_socket_set_option (gsocket, SOL_SOCKET, SO_RCVBUF, ++ nice->priv->receive_buffer_size, &gerror)) ++ GST_WARNING_OBJECT (nice, "Could not set send receive size : %s", ++ gerror->message); ++ g_clear_error (&gerror); ++ } ++#endif ++ } ++ g_ptr_array_unref (sockets); ++ g_object_unref (agent); ++ gst_object_unref (webrtc_ice); ++} ++ ++ ++static void ++_on_new_selected_pair (NiceAgent * agent, guint stream_id, ++ NiceComponentType component, NiceCandidate * lcandidate, ++ NiceCandidate * rcandidate, GWeakRef * nice_weak) ++{ ++ GstWebRTCNiceTransport *nice = g_weak_ref_get (nice_weak); ++ GstWebRTCICETransport *ice; ++ GstWebRTCICEComponent comp = _nice_component_to_gst (component); ++ guint our_stream_id; ++ ++ if (!nice) ++ return; ++ ++ ice = GST_WEBRTC_ICE_TRANSPORT (nice); ++ ++ g_object_get (nice->stream, "stream-id", &our_stream_id, NULL); ++ ++ if (stream_id != our_stream_id) ++ goto cleanup; ++ if (comp != ice->component) ++ goto cleanup; ++ ++ gst_webrtc_ice_transport_selected_pair_change (ice); ++ ++cleanup: ++ gst_object_unref (nice); ++} ++ ++static void ++_on_component_state_changed (NiceAgent * agent, guint stream_id, ++ NiceComponentType component, NiceComponentState state, GWeakRef * nice_weak) ++{ ++ GstWebRTCNiceTransport *nice = g_weak_ref_get (nice_weak); ++ GstWebRTCICETransport *ice; ++ GstWebRTCICEComponent comp = _nice_component_to_gst (component); ++ guint our_stream_id; ++ ++ if (!nice) ++ return; ++ ++ ice = GST_WEBRTC_ICE_TRANSPORT (nice); ++ ++ g_object_get (nice->stream, "stream-id", &our_stream_id, NULL); ++ ++ if (stream_id != our_stream_id) ++ goto cleanup; ++ if (comp != ice->component) ++ goto cleanup; ++ ++ GST_DEBUG_OBJECT (ice, "%u %u %s", stream_id, component, ++ nice_component_state_to_string (state)); ++ ++ gst_webrtc_ice_transport_connection_state_change (ice, ++ _nice_component_state_to_gst (state)); ++ ++cleanup: ++ gst_object_unref (nice); ++} ++ ++static GWeakRef * ++weak_new (GstWebRTCNiceTransport * nice) ++{ ++ GWeakRef *weak = g_new0 (GWeakRef, 1); ++ g_weak_ref_init (weak, nice); ++ return weak; ++} ++ ++static void ++weak_free (GWeakRef * weak) ++{ ++ g_weak_ref_clear (weak); ++ g_free (weak); ++} ++ ++static void ++gst_webrtc_nice_transport_constructed (GObject * object) ++{ ++ GstWebRTCNiceTransport *nice; ++ GstWebRTCICETransport *ice; ++ NiceComponentType component; ++ gboolean controlling_mode; ++ guint our_stream_id; ++ NiceAgent *agent; ++ GstWebRTCNice *webrtc_ice = NULL; ++ ++ G_OBJECT_CLASS (parent_class)->constructed (object); ++ ++ nice = GST_WEBRTC_NICE_TRANSPORT (object); ++ ice = GST_WEBRTC_ICE_TRANSPORT (object); ++ component = _gst_component_to_nice (ice->component); ++ ++ g_object_get (nice->stream, "ice", &webrtc_ice, "stream-id", &our_stream_id, ++ NULL); ++ g_assert (webrtc_ice != NULL); ++ g_object_get (webrtc_ice, "agent", &agent, NULL); ++ ++ g_object_get (agent, "controlling-mode", &controlling_mode, NULL); ++ ice->role = ++ controlling_mode ? GST_WEBRTC_ICE_ROLE_CONTROLLING : ++ GST_WEBRTC_ICE_ROLE_CONTROLLED; ++ ++ nice->priv->on_component_state_changed_id = g_signal_connect_data (agent, ++ "component-state-changed", G_CALLBACK (_on_component_state_changed), ++ weak_new (nice), (GClosureNotify) weak_free, (GConnectFlags) 0); ++ nice->priv->on_new_selected_pair_id = g_signal_connect_data (agent, ++ "new-selected-pair-full", G_CALLBACK (_on_new_selected_pair), ++ weak_new (nice), (GClosureNotify) weak_free, (GConnectFlags) 0); ++ ++ ice->src = gst_element_factory_make ("nicesrc", NULL); ++ if (ice->src) { ++ g_object_set (ice->src, "agent", agent, "stream", our_stream_id, ++ "component", component, NULL); ++ } ++ ice->sink = gst_element_factory_make ("nicesink", NULL); ++ if (ice->sink) { ++ g_object_set (ice->sink, "agent", agent, "stream", our_stream_id, ++ "component", component, "async", FALSE, "enable-last-sample", FALSE, ++ "sync", FALSE, NULL); ++ } ++ ++ g_object_unref (agent); ++ gst_object_unref (webrtc_ice); ++} ++ ++static void ++gst_webrtc_nice_transport_class_init (GstWebRTCNiceTransportClass * klass) ++{ ++ GObjectClass *gobject_class = (GObjectClass *) klass; ++ ++ gobject_class->constructed = gst_webrtc_nice_transport_constructed; ++ gobject_class->get_property = gst_webrtc_nice_transport_get_property; ++ gobject_class->set_property = gst_webrtc_nice_transport_set_property; ++ gobject_class->finalize = gst_webrtc_nice_transport_finalize; ++ ++ g_object_class_install_property (gobject_class, ++ PROP_STREAM, ++ g_param_spec_object ("stream", ++ "WebRTC ICE Stream", "ICE stream associated with this transport", ++ GST_TYPE_WEBRTC_NICE_STREAM, ++ G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstWebRTCNiceTransport:send-buffer-size: ++ * ++ * Size of the kernel send buffer in bytes, 0=default ++ * ++ * Since: 1.20 ++ */ ++ ++ g_object_class_install_property (G_OBJECT_CLASS (klass), ++ PROP_SEND_BUFFER_SIZE, g_param_spec_int ("send-buffer-size", ++ "Send Buffer Size", ++ "Size of the kernel send buffer in bytes, 0=default", 0, G_MAXINT, 0, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstWebRTCNiceTransport:receive-buffer-size: ++ * ++ * Size of the kernel receive buffer in bytes, 0=default ++ * ++ * Since: 1.20 ++ */ ++ ++ g_object_class_install_property (G_OBJECT_CLASS (klass), ++ PROP_RECEIVE_BUFFER_SIZE, g_param_spec_int ("receive-buffer-size", ++ "Receive Buffer Size", ++ "Size of the kernel receive buffer in bytes, 0=default", 0, G_MAXINT, ++ 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++} ++ ++static void ++gst_webrtc_nice_transport_init (GstWebRTCNiceTransport * nice) ++{ ++ nice->priv = gst_webrtc_nice_transport_get_instance_private (nice); ++} ++ ++GstWebRTCNiceTransport * ++gst_webrtc_nice_transport_new (GstWebRTCNiceStream * stream, ++ GstWebRTCICEComponent component) ++{ ++ return g_object_new (GST_TYPE_WEBRTC_NICE_TRANSPORT, "stream", stream, ++ "component", component, NULL); ++} +diff --git a/gst-libs/gst/webrtc/nice/nicetransport.h b/gst-libs/gst/webrtc/nice/nicetransport.h +new file mode 100644 +index 000000000..93263a033 +--- /dev/null ++++ b/gst-libs/gst/webrtc/nice/nicetransport.h +@@ -0,0 +1,71 @@ ++/* GStreamer ++ * Copyright (C) 2017 Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_WEBRTC_NICE_TRANSPORT_H__ ++#define __GST_WEBRTC_NICE_TRANSPORT_H__ ++ ++#include "nice.h" ++#include "gst/webrtc/icetransport.h" ++/* libnice */ ++#include ++ ++#include "nice_fwd.h" ++ ++G_BEGIN_DECLS ++ ++GST_WEBRTCNICE_API ++GType gst_webrtc_nice_transport_get_type(void); ++#define GST_TYPE_WEBRTC_NICE_TRANSPORT (gst_webrtc_nice_transport_get_type()) ++#define GST_WEBRTC_NICE_TRANSPORT(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_WEBRTC_NICE_TRANSPORT,GstWebRTCNiceTransport)) ++#define GST_IS_WEBRTC_NICE_TRANSPORT(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_WEBRTC_NICE_TRANSPORT)) ++#define GST_WEBRTC_NICE_TRANSPORT_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_WEBRTC_NICE_TRANSPORT,GstWebRTCNiceTransportClass)) ++#define GST_IS_WEBRTC_NICE_TRANSPORT_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_WEBRTC_NICE_TRANSPORT)) ++#define GST_WEBRTC_NICE_TRANSPORT_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_WEBRTC_NICE_TRANSPORT,GstWebRTCNiceTransportClass)) ++ ++/** ++ * GstWebRTCNiceTransport: ++ */ ++typedef struct _GstWebRTCNiceTransport GstWebRTCNiceTransport; ++typedef struct _GstWebRTCNiceTransportClass GstWebRTCNiceTransportClass; ++typedef struct _GstWebRTCNiceTransportPrivate GstWebRTCNiceTransportPrivate; ++ ++struct _GstWebRTCNiceTransport ++{ ++ GstWebRTCICETransport parent; ++ ++ GstWebRTCNiceStream *stream; ++ ++ GstWebRTCNiceTransportPrivate *priv; ++}; ++ ++struct _GstWebRTCNiceTransportClass ++{ ++ GstWebRTCICETransportClass parent_class; ++}; ++ ++GstWebRTCNiceTransport * gst_webrtc_nice_transport_new (GstWebRTCNiceStream * stream, ++ GstWebRTCICEComponent component); ++ ++void gst_webrtc_nice_transport_update_buffer_size (GstWebRTCNiceTransport * nice); ++ ++G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstWebRTCNiceTransport, gst_object_unref) ++ ++G_END_DECLS ++ ++#endif /* __GST_WEBRTC_NICE_TRANSPORT_H__ */ +diff --git a/gst-libs/gst/webrtc/rtpreceiver.c b/gst-libs/gst/webrtc/rtpreceiver.c +index 768e9876d..4a397ff37 100644 +--- a/gst-libs/gst/webrtc/rtpreceiver.c ++++ b/gst-libs/gst/webrtc/rtpreceiver.c +@@ -22,6 +22,8 @@ + * @short_description: RTCRtpReceiver object + * @title: GstWebRTCRTPReceiver + * @see_also: #GstWebRTCRTPSender, #GstWebRTCRTPTransceiver ++ * @symbols: ++ * - GstWebRTCRTPReceiver + * + * + */ +@@ -31,6 +33,7 @@ + #endif + + #include "rtpreceiver.h" ++#include "webrtc-priv.h" + + #define GST_CAT_DEFAULT gst_webrtc_rtp_receiver_debug + GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); +@@ -49,36 +52,11 @@ enum + enum + { + PROP_0, ++ PROP_TRANSPORT, + }; + + //static guint gst_webrtc_rtp_receiver_signals[LAST_SIGNAL] = { 0 }; + +-void +-gst_webrtc_rtp_receiver_set_transport (GstWebRTCRTPReceiver * receiver, +- GstWebRTCDTLSTransport * transport) +-{ +- g_return_if_fail (GST_IS_WEBRTC_RTP_RECEIVER (receiver)); +- g_return_if_fail (GST_IS_WEBRTC_DTLS_TRANSPORT (transport)); +- +- GST_OBJECT_LOCK (receiver); +- gst_object_replace ((GstObject **) & receiver->transport, +- GST_OBJECT (transport)); +- GST_OBJECT_UNLOCK (receiver); +-} +- +-void +-gst_webrtc_rtp_receiver_set_rtcp_transport (GstWebRTCRTPReceiver * receiver, +- GstWebRTCDTLSTransport * transport) +-{ +- g_return_if_fail (GST_IS_WEBRTC_RTP_RECEIVER (receiver)); +- g_return_if_fail (GST_IS_WEBRTC_DTLS_TRANSPORT (transport)); +- +- GST_OBJECT_LOCK (receiver); +- gst_object_replace ((GstObject **) & receiver->rtcp_transport, +- GST_OBJECT (transport)); +- GST_OBJECT_UNLOCK (receiver); +-} +- + static void + gst_webrtc_rtp_receiver_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +@@ -94,7 +72,13 @@ static void + gst_webrtc_rtp_receiver_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec) + { ++ GstWebRTCRTPReceiver *receiver = GST_WEBRTC_RTP_RECEIVER (object); + switch (prop_id) { ++ case PROP_TRANSPORT: ++ GST_OBJECT_LOCK (receiver); ++ g_value_set_object (value, receiver->transport); ++ GST_OBJECT_UNLOCK (receiver); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -110,10 +94,6 @@ gst_webrtc_rtp_receiver_finalize (GObject * object) + gst_object_unref (webrtc->transport); + webrtc->transport = NULL; + +- if (webrtc->rtcp_transport) +- gst_object_unref (webrtc->rtcp_transport); +- webrtc->rtcp_transport = NULL; +- + G_OBJECT_CLASS (parent_class)->finalize (object); + } + +@@ -125,6 +105,20 @@ gst_webrtc_rtp_receiver_class_init (GstWebRTCRTPReceiverClass * klass) + gobject_class->get_property = gst_webrtc_rtp_receiver_get_property; + gobject_class->set_property = gst_webrtc_rtp_receiver_set_property; + gobject_class->finalize = gst_webrtc_rtp_receiver_finalize; ++ ++ /** ++ * GstWebRTCRTPReceiver:transport: ++ * ++ * The DTLS transport for this receiver ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, ++ PROP_TRANSPORT, ++ g_param_spec_object ("transport", "Transport", ++ "The DTLS transport for this receiver", ++ GST_TYPE_WEBRTC_DTLS_TRANSPORT, ++ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); + } + + static void +diff --git a/gst-libs/gst/webrtc/rtpreceiver.h b/gst-libs/gst/webrtc/rtpreceiver.h +index 55a9a86fd..5f02fda81 100644 +--- a/gst-libs/gst/webrtc/rtpreceiver.h ++++ b/gst-libs/gst/webrtc/rtpreceiver.h +@@ -35,36 +35,6 @@ GType gst_webrtc_rtp_receiver_get_type(void); + #define GST_IS_WEBRTC_RTP_RECEIVER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_WEBRTC_RTP_RECEIVER)) + #define GST_WEBRTC_RTP_RECEIVER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_WEBRTC_RTP_RECEIVER,GstWebRTCRTPReceiverClass)) + +-/** +- * GstWebRTCRTPReceiver: +- */ +-struct _GstWebRTCRTPReceiver +-{ +- GstObject parent; +- +- /* The MediStreamTrack is represented by the stream and is output into @transport/@rtcp_transport as necessary */ +- GstWebRTCDTLSTransport *transport; +- GstWebRTCDTLSTransport *rtcp_transport; +- +- gpointer _padding[GST_PADDING]; +-}; +- +-struct _GstWebRTCRTPReceiverClass +-{ +- GstObjectClass parent_class; +- +- gpointer _padding[GST_PADDING]; +-}; +- +-GST_WEBRTC_API +-GstWebRTCRTPReceiver * gst_webrtc_rtp_receiver_new (void); +-GST_WEBRTC_API +-void gst_webrtc_rtp_receiver_set_transport (GstWebRTCRTPReceiver * receiver, +- GstWebRTCDTLSTransport * transport); +-GST_WEBRTC_API +-void gst_webrtc_rtp_receiver_set_rtcp_transport (GstWebRTCRTPReceiver * receiver, +- GstWebRTCDTLSTransport * transport); +- + G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstWebRTCRTPReceiver, gst_object_unref) + + G_END_DECLS +diff --git a/gst-libs/gst/webrtc/rtpsender.c b/gst-libs/gst/webrtc/rtpsender.c +index 3a8a9044f..2a3a96969 100644 +--- a/gst-libs/gst/webrtc/rtpsender.c ++++ b/gst-libs/gst/webrtc/rtpsender.c +@@ -22,6 +22,8 @@ + * @short_description: RTCRtpSender object + * @title: GstWebRTCRTPSender + * @see_also: #GstWebRTCRTPReceiver, #GstWebRTCRTPTransceiver ++ * @symbols: ++ * - GstWebRTCRTPSender + * + * + */ +@@ -32,6 +34,7 @@ + + #include "rtpsender.h" + #include "rtptransceiver.h" ++#include "webrtc-priv.h" + + #define GST_CAT_DEFAULT gst_webrtc_rtp_sender_debug + GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); +@@ -51,45 +54,44 @@ enum + enum + { + PROP_0, +- PROP_MID, +- PROP_SENDER, +- PROP_STOPPED, +- PROP_DIRECTION, ++ PROP_PRIORITY, ++ PROP_TRANSPORT, + }; + + //static guint gst_webrtc_rtp_sender_signals[LAST_SIGNAL] = { 0 }; + +-void +-gst_webrtc_rtp_sender_set_transport (GstWebRTCRTPSender * sender, +- GstWebRTCDTLSTransport * transport) +-{ +- g_return_if_fail (GST_IS_WEBRTC_RTP_SENDER (sender)); +- g_return_if_fail (GST_IS_WEBRTC_DTLS_TRANSPORT (transport)); +- +- GST_OBJECT_LOCK (sender); +- gst_object_replace ((GstObject **) & sender->transport, +- GST_OBJECT (transport)); +- GST_OBJECT_UNLOCK (sender); +-} ++/** ++ * gst_webrtc_rtp_sender_set_priority: ++ * @sender: a #GstWebRTCRTPSender ++ * @priority: The priority of this sender ++ * ++ * Sets the content of the IPv4 Type of Service (ToS), also known as DSCP ++ * (Differentiated Services Code Point). ++ * This also sets the Traffic Class field of IPv6. ++ * ++ * Since: 1.20 ++ */ + + void +-gst_webrtc_rtp_sender_set_rtcp_transport (GstWebRTCRTPSender * sender, +- GstWebRTCDTLSTransport * transport) ++gst_webrtc_rtp_sender_set_priority (GstWebRTCRTPSender * sender, ++ GstWebRTCPriorityType priority) + { +- g_return_if_fail (GST_IS_WEBRTC_RTP_SENDER (sender)); +- g_return_if_fail (GST_IS_WEBRTC_DTLS_TRANSPORT (transport)); +- + GST_OBJECT_LOCK (sender); +- gst_object_replace ((GstObject **) & sender->rtcp_transport, +- GST_OBJECT (transport)); ++ sender->priority = priority; + GST_OBJECT_UNLOCK (sender); ++ g_object_notify (G_OBJECT (sender), "priority"); + } + + static void + gst_webrtc_rtp_sender_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) + { ++ GstWebRTCRTPSender *sender = GST_WEBRTC_RTP_SENDER (object); ++ + switch (prop_id) { ++ case PROP_PRIORITY: ++ gst_webrtc_rtp_sender_set_priority (sender, g_value_get_uint (value)); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -100,7 +102,19 @@ static void + gst_webrtc_rtp_sender_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec) + { ++ GstWebRTCRTPSender *sender = GST_WEBRTC_RTP_SENDER (object); ++ + switch (prop_id) { ++ case PROP_PRIORITY: ++ GST_OBJECT_LOCK (sender); ++ g_value_set_uint (value, sender->priority); ++ GST_OBJECT_UNLOCK (sender); ++ break; ++ case PROP_TRANSPORT: ++ GST_OBJECT_LOCK (sender); ++ g_value_set_object (value, sender->transport); ++ GST_OBJECT_UNLOCK (sender); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -110,15 +124,11 @@ gst_webrtc_rtp_sender_get_property (GObject * object, guint prop_id, + static void + gst_webrtc_rtp_sender_finalize (GObject * object) + { +- GstWebRTCRTPSender *webrtc = GST_WEBRTC_RTP_SENDER (object); ++ GstWebRTCRTPSender *sender = GST_WEBRTC_RTP_SENDER (object); + +- if (webrtc->transport) +- gst_object_unref (webrtc->transport); +- webrtc->transport = NULL; +- +- if (webrtc->rtcp_transport) +- gst_object_unref (webrtc->rtcp_transport); +- webrtc->rtcp_transport = NULL; ++ if (sender->transport) ++ gst_object_unref (sender->transport); ++ sender->transport = NULL; + + G_OBJECT_CLASS (parent_class)->finalize (object); + } +@@ -131,6 +141,35 @@ gst_webrtc_rtp_sender_class_init (GstWebRTCRTPSenderClass * klass) + gobject_class->get_property = gst_webrtc_rtp_sender_get_property; + gobject_class->set_property = gst_webrtc_rtp_sender_set_property; + gobject_class->finalize = gst_webrtc_rtp_sender_finalize; ++ ++ /** ++ * GstWebRTCRTPSender:priority: ++ * ++ * The priority from which to set the DSCP field on packets ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, ++ PROP_PRIORITY, ++ g_param_spec_enum ("priority", ++ "Priority", ++ "The priority from which to set the DSCP field on packets", ++ GST_TYPE_WEBRTC_PRIORITY_TYPE, GST_WEBRTC_PRIORITY_TYPE_LOW, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstWebRTCRTPSender:transport: ++ * ++ * The DTLS transport for this sender ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, ++ PROP_TRANSPORT, ++ g_param_spec_object ("transport", "Transport", ++ "The DTLS transport for this sender", ++ GST_TYPE_WEBRTC_DTLS_TRANSPORT, ++ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); + } + + static void +diff --git a/gst-libs/gst/webrtc/rtpsender.h b/gst-libs/gst/webrtc/rtpsender.h +index bcaf93c60..b3ca9a010 100644 +--- a/gst-libs/gst/webrtc/rtpsender.h ++++ b/gst-libs/gst/webrtc/rtpsender.h +@@ -35,39 +35,9 @@ GType gst_webrtc_rtp_sender_get_type(void); + #define GST_IS_WEBRTC_RTP_SENDER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_WEBRTC_RTP_SENDER)) + #define GST_WEBRTC_RTP_SENDER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_WEBRTC_RTP_SENDER,GstWebRTCRTPSenderClass)) + +-/** +- * GstWebRTCRTPSender: +- */ +-struct _GstWebRTCRTPSender +-{ +- GstObject parent; +- +- /* The MediStreamTrack is represented by the stream and is output into @transport/@rtcp_transport as necessary */ +- GstWebRTCDTLSTransport *transport; +- GstWebRTCDTLSTransport *rtcp_transport; +- +- GArray *send_encodings; +- +- gpointer _padding[GST_PADDING]; +-}; +- +-struct _GstWebRTCRTPSenderClass +-{ +- GstObjectClass parent_class; +- +- gpointer _padding[GST_PADDING]; +-}; +- +-GST_WEBRTC_API +-GstWebRTCRTPSender * gst_webrtc_rtp_sender_new (void); +- + GST_WEBRTC_API +-void gst_webrtc_rtp_sender_set_transport (GstWebRTCRTPSender * sender, +- GstWebRTCDTLSTransport * transport); +-GST_WEBRTC_API +-void gst_webrtc_rtp_sender_set_rtcp_transport (GstWebRTCRTPSender * sender, +- GstWebRTCDTLSTransport * transport); +- ++void gst_webrtc_rtp_sender_set_priority (GstWebRTCRTPSender *sender, ++ GstWebRTCPriorityType priority); + + G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstWebRTCRTPSender, gst_object_unref) + +diff --git a/gst-libs/gst/webrtc/rtptransceiver.c b/gst-libs/gst/webrtc/rtptransceiver.c +index 08019462a..2db2abd4d 100644 +--- a/gst-libs/gst/webrtc/rtptransceiver.c ++++ b/gst-libs/gst/webrtc/rtptransceiver.c +@@ -22,6 +22,8 @@ + * @short_description: RTCRtpTransceiver object + * @title: GstWebRTCRTPTransceiver + * @see_also: #GstWebRTCRTPSender, #GstWebRTCRTPReceiver ++ * @symbols: ++ * - GstWebRTCRTPTransceiver + * + * + */ +@@ -32,6 +34,8 @@ + + #include "rtptransceiver.h" + ++#include "webrtc-priv.h" ++ + #define GST_CAT_DEFAULT gst_webrtc_rtp_transceiver_debug + GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); + +@@ -51,11 +55,14 @@ enum + enum + { + PROP_0, +- PROP_MID, + PROP_SENDER, + PROP_RECEIVER, + PROP_DIRECTION, + PROP_MLINE, ++ PROP_MID, ++ PROP_CURRENT_DIRECTION, ++ PROP_KIND, ++ PROP_CODEC_PREFERENCES, + PROP_STOPPED, // FIXME + }; + +@@ -78,7 +85,14 @@ gst_webrtc_rtp_transceiver_set_property (GObject * object, guint prop_id, + webrtc->mline = g_value_get_uint (value); + break; + case PROP_DIRECTION: ++ GST_OBJECT_LOCK (webrtc); + webrtc->direction = g_value_get_enum (value); ++ GST_OBJECT_UNLOCK (webrtc); ++ break; ++ case PROP_CODEC_PREFERENCES: ++ GST_OBJECT_LOCK (webrtc); ++ gst_caps_replace (&webrtc->codec_preferences, g_value_get_boxed (value)); ++ GST_OBJECT_UNLOCK (webrtc); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); +@@ -93,6 +107,9 @@ gst_webrtc_rtp_transceiver_get_property (GObject * object, guint prop_id, + GstWebRTCRTPTransceiver *webrtc = GST_WEBRTC_RTP_TRANSCEIVER (object); + + switch (prop_id) { ++ case PROP_MID: ++ g_value_set_string (value, webrtc->mid); ++ break; + case PROP_SENDER: + g_value_set_object (value, webrtc->sender); + break; +@@ -103,7 +120,20 @@ gst_webrtc_rtp_transceiver_get_property (GObject * object, guint prop_id, + g_value_set_uint (value, webrtc->mline); + break; + case PROP_DIRECTION: ++ GST_OBJECT_LOCK (webrtc); + g_value_set_enum (value, webrtc->direction); ++ GST_OBJECT_UNLOCK (webrtc); ++ break; ++ case PROP_CURRENT_DIRECTION: ++ g_value_set_enum (value, webrtc->current_direction); ++ break; ++ case PROP_KIND: ++ g_value_set_enum (value, webrtc->kind); ++ break; ++ case PROP_CODEC_PREFERENCES: ++ GST_OBJECT_LOCK (webrtc); ++ gst_value_set_caps (value, webrtc->codec_preferences); ++ GST_OBJECT_UNLOCK (webrtc); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); +@@ -199,6 +229,73 @@ gst_webrtc_rtp_transceiver_class_init (GstWebRTCRTPTransceiverClass * klass) + GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION, + GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstWebRTCRTPTransceiver:mid: ++ * ++ * The media ID of the m-line associated with this transceiver. This ++ * association is established, when possible, whenever either a ++ * local or remote description is applied. This field is null if ++ * neither a local or remote description has been applied, or if its ++ * associated m-line is rejected by either a remote offer or any ++ * answer. ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, ++ PROP_MID, ++ g_param_spec_string ("mid", "Media ID", ++ "The media ID of the m-line associated with this transceiver. This " ++ " association is established, when possible, whenever either a local" ++ " or remote description is applied. This field is null if neither a" ++ " local or remote description has been applied, or if its associated" ++ " m-line is rejected by either a remote offer or any answer.", ++ NULL, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstWebRTCRTPTransceiver:current-direction: ++ * ++ * The transceiver's current directionality, or none if the ++ * transceiver is stopped or has never participated in an exchange ++ * of offers and answers. To change the transceiver's ++ * directionality, set the value of the direction property. ++ * ++ * Since: 1.20 ++ **/ ++ g_object_class_install_property (gobject_class, ++ PROP_DIRECTION, ++ g_param_spec_enum ("current-direction", "Current Direction", ++ "Transceiver current direction", ++ GST_TYPE_WEBRTC_RTP_TRANSCEIVER_DIRECTION, ++ GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE, ++ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstWebRTCRTPTransceiver:kind: ++ * ++ * The kind of media this transceiver transports ++ * ++ * Since: 1.20 ++ **/ ++ g_object_class_install_property (gobject_class, ++ PROP_KIND, ++ g_param_spec_enum ("kind", "Media Kind", ++ "Kind of media this transceiver transports", ++ GST_TYPE_WEBRTC_KIND, GST_WEBRTC_KIND_UNKNOWN, ++ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstWebRTCRTPTransceiver:codec-preferences: ++ * ++ * Caps representing the codec preferences. ++ * ++ * Since: 1.20 ++ **/ ++ g_object_class_install_property (gobject_class, ++ PROP_CODEC_PREFERENCES, ++ g_param_spec_boxed ("codec-preferences", "Codec Preferences", ++ "Caps representing the codec preferences.", ++ GST_TYPE_CAPS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + } + + static void +diff --git a/gst-libs/gst/webrtc/rtptransceiver.h b/gst-libs/gst/webrtc/rtptransceiver.h +index 4b2e6e30c..569a39a46 100644 +--- a/gst-libs/gst/webrtc/rtptransceiver.h ++++ b/gst-libs/gst/webrtc/rtptransceiver.h +@@ -22,8 +22,6 @@ + + #include + #include +-#include +-#include + + G_BEGIN_DECLS + +@@ -36,35 +34,6 @@ GType gst_webrtc_rtp_transceiver_get_type(void); + #define GST_IS_WEBRTC_RTP_TRANSCEIVER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_WEBRTC_RTP_TRANSCEIVER)) + #define GST_WEBRTC_RTP_TRANSCEIVER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_WEBRTC_RTP_TRANSCEIVER,GstWebRTCRTPTransceiverClass)) + +-/** +- * GstWebRTCRTPTransceiver: +- */ +-struct _GstWebRTCRTPTransceiver +-{ +- GstObject parent; +- guint mline; +- gchar *mid; +- gboolean stopped; +- +- GstWebRTCRTPSender *sender; +- GstWebRTCRTPReceiver *receiver; +- +- GstWebRTCRTPTransceiverDirection direction; +- GstWebRTCRTPTransceiverDirection current_direction; +- +- GstCaps *codec_preferences; +- +- gpointer _padding[GST_PADDING]; +-}; +- +-struct _GstWebRTCRTPTransceiverClass +-{ +- GstObjectClass parent_class; +- +- /* FIXME; reset */ +- gpointer _padding[GST_PADDING]; +-}; +- + G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstWebRTCRTPTransceiver, gst_object_unref) + + G_END_DECLS +diff --git a/gst-libs/gst/webrtc/sctptransport.c b/gst-libs/gst/webrtc/sctptransport.c +new file mode 100644 +index 000000000..4d0495a46 +--- /dev/null ++++ b/gst-libs/gst/webrtc/sctptransport.c +@@ -0,0 +1,79 @@ ++/* GStreamer ++ * Copyright (C) 2018 Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifdef HAVE_CONFIG_H ++# include "config.h" ++#endif ++ ++#include "sctptransport.h" ++#include "webrtc-priv.h" ++ ++G_DEFINE_ABSTRACT_TYPE (GstWebRTCSCTPTransport, gst_webrtc_sctp_transport, ++ GST_TYPE_OBJECT); ++ ++static void ++gst_webrtc_sctp_transport_get_property (GObject * object, guint prop_id, ++ GValue * value, GParamSpec * pspec) ++{ ++ /* all properties should by handled by the plugin class */ ++ g_assert_not_reached (); ++} ++ ++static void ++gst_webrtc_sctp_transport_class_init (GstWebRTCSCTPTransportClass * klass) ++{ ++ GObjectClass *gobject_class = (GObjectClass *) klass; ++ guint property_id_dummy = 0; ++ ++ gobject_class->get_property = gst_webrtc_sctp_transport_get_property; ++ ++ g_object_class_install_property (gobject_class, ++ ++property_id_dummy, ++ g_param_spec_object ("transport", ++ "WebRTC DTLS Transport", ++ "DTLS transport used for this SCTP transport", ++ GST_TYPE_WEBRTC_DTLS_TRANSPORT, ++ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); ++ ++ g_object_class_install_property (gobject_class, ++ ++property_id_dummy, ++ g_param_spec_enum ("state", ++ "WebRTC SCTP Transport state", "WebRTC SCTP Transport state", ++ GST_TYPE_WEBRTC_SCTP_TRANSPORT_STATE, ++ GST_WEBRTC_SCTP_TRANSPORT_STATE_NEW, ++ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); ++ ++ g_object_class_install_property (gobject_class, ++ ++property_id_dummy, ++ g_param_spec_uint64 ("max-message-size", ++ "Maximum message size", ++ "Maximum message size as reported by the transport", 0, G_MAXUINT64, ++ 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); ++ ++ g_object_class_install_property (gobject_class, ++ ++property_id_dummy, ++ g_param_spec_uint ("max-channels", ++ "Maximum number of channels", "Maximum number of channels", ++ 0, G_MAXUINT16, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); ++} ++ ++static void ++gst_webrtc_sctp_transport_init (GstWebRTCSCTPTransport * nice) ++{ ++} +diff --git a/gst-libs/gst/webrtc/sctptransport.h b/gst-libs/gst/webrtc/sctptransport.h +new file mode 100644 +index 000000000..99a46eede +--- /dev/null ++++ b/gst-libs/gst/webrtc/sctptransport.h +@@ -0,0 +1,42 @@ ++/* GStreamer ++ * Copyright (C) 2018 Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_WEBRTC_SCTP_TRANSPORT_H__ ++#define __GST_WEBRTC_SCTP_TRANSPORT_H__ ++ ++#include ++#include ++ ++G_BEGIN_DECLS ++ ++GST_WEBRTC_API ++GType gst_webrtc_sctp_transport_get_type(void); ++ ++#define GST_TYPE_WEBRTC_SCTP_TRANSPORT (gst_webrtc_sctp_transport_get_type()) ++#define GST_WEBRTC_SCTP_TRANSPORT(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_WEBRTC_SCTP_TRANSPORT,GstWebRTCSCTPTransport)) ++#define GST_IS_WEBRTC_SCTP_TRANSPORT(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_WEBRTC_SCTP_TRANSPORT)) ++#define GST_WEBRTC_SCTP_TRANSPORT_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_WEBRTC_SCTP_TRANSPORT,GstWebRTCSCTPTransportClass)) ++#define GST_IS_WEBRTC_SCTP_TRANSPORT_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_WEBRTC_SCTP_TRANSPORT)) ++#define GST_WEBRTC_SCTP_TRANSPORT_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_WEBRTC_SCTP_TRANSPORT,GstWebRTCSCTPTransportClass)) ++ ++G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstWebRTCSCTPTransport, gst_object_unref) ++ ++G_END_DECLS ++ ++#endif /* __GST_WEBRTC_SCTP_TRANSPORT_H__ */ +diff --git a/gst-libs/gst/webrtc/webrtc-priv.h b/gst-libs/gst/webrtc/webrtc-priv.h +new file mode 100644 +index 000000000..67676a38e +--- /dev/null ++++ b/gst-libs/gst/webrtc/webrtc-priv.h +@@ -0,0 +1,274 @@ ++/* GStreamer ++ * Copyright (C) 2017 Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_WEBRTC_PRIV_H__ ++#define __GST_WEBRTC_PRIV_H__ ++ ++#include ++#include ++#include ++#include ++ ++G_BEGIN_DECLS ++ ++/** ++ * GstWebRTCRTPTransceiver: ++ * @mline: the mline number this transceiver corresponds to ++ * @mid: The media ID of the m-line associated with this ++ * transceiver. This association is established, when possible, ++ * whenever either a local or remote description is applied. This ++ * field is NULL if neither a local or remote description has been ++ * applied, or if its associated m-line is rejected by either a remote ++ * offer or any answer. ++ * @stopped: Indicates whether or not sending and receiving using the paired ++ * #GstWebRTCRTPSender and #GstWebRTCRTPReceiver has been permanently disabled, ++ * either due to SDP offer/answer ++ * @sender: The #GstWebRTCRTPSender object responsible sending data to the ++ * remote peer ++ * @receiver: The #GstWebRTCRTPReceiver object responsible for receiver data from ++ * the remote peer. ++ * @direction: The transceiver's desired direction. ++ * @current_direction: The transceiver's current direction (read-only) ++ * @codec_preferences: A caps representing the codec preferences (read-only) ++ * @kind: Type of media (Since: 1.20) ++ * ++ * Mostly matches the WebRTC RTCRtpTransceiver interface. ++ */ ++/** ++ * GstWebRTCRTPTransceiver.kind: ++ * ++ * Type of media ++ * ++ * Since: 1.20 ++ */ ++struct _GstWebRTCRTPTransceiver ++{ ++ GstObject parent; ++ guint mline; ++ gchar *mid; ++ gboolean stopped; ++ ++ GstWebRTCRTPSender *sender; ++ GstWebRTCRTPReceiver *receiver; ++ ++ GstWebRTCRTPTransceiverDirection direction; ++ GstWebRTCRTPTransceiverDirection current_direction; ++ ++ GstCaps *codec_preferences; ++ GstWebRTCKind kind; ++ ++ gpointer _padding[GST_PADDING]; ++}; ++ ++struct _GstWebRTCRTPTransceiverClass ++{ ++ GstObjectClass parent_class; ++ ++ /* FIXME; reset */ ++ gpointer _padding[GST_PADDING]; ++}; ++ ++/** ++ * GstWebRTCRTPSender: ++ * @transport: The transport for RTP packets ++ * @send_encodings: Unused ++ * @priority: The priority of the stream (Since: 1.20) ++ * ++ * An object to track the sending aspect of the stream ++ * ++ * Mostly matches the WebRTC RTCRtpSender interface. ++ */ ++/** ++ * GstWebRTCRTPSender.priority: ++ * ++ * The priority of the stream ++ * ++ * Since: 1.20 ++ */ ++struct _GstWebRTCRTPSender ++{ ++ GstObject parent; ++ ++ /* The MediStreamTrack is represented by the stream and is output into @transport as necessary */ ++ GstWebRTCDTLSTransport *transport; ++ ++ GArray *send_encodings; ++ GstWebRTCPriorityType priority; ++ ++ gpointer _padding[GST_PADDING]; ++}; ++ ++struct _GstWebRTCRTPSenderClass ++{ ++ GstObjectClass parent_class; ++ ++ gpointer _padding[GST_PADDING]; ++}; ++ ++GST_WEBRTC_API ++GstWebRTCRTPSender * gst_webrtc_rtp_sender_new (void); ++ ++/** ++ * GstWebRTCRTPReceiver: ++ * @transport: The transport for RTP packets ++ * ++ * An object to track the receiving aspect of the stream ++ * ++ * Mostly matches the WebRTC RTCRtpReceiver interface. ++ */ ++struct _GstWebRTCRTPReceiver ++{ ++ GstObject parent; ++ ++ /* The MediStreamTrack is represented by the stream and is output into @transport as necessary */ ++ GstWebRTCDTLSTransport *transport; ++ ++ gpointer _padding[GST_PADDING]; ++}; ++ ++struct _GstWebRTCRTPReceiverClass ++{ ++ GstObjectClass parent_class; ++ ++ gpointer _padding[GST_PADDING]; ++}; ++ ++GST_WEBRTC_API ++GstWebRTCRTPReceiver * gst_webrtc_rtp_receiver_new (void); ++ ++/** ++ * GstWebRTCDTLSTransport: ++ */ ++struct _GstWebRTCDTLSTransport ++{ ++ GstObject parent; ++ ++ GstWebRTCICETransport *transport; ++ GstWebRTCDTLSTransportState state; ++ ++ gboolean client; ++ guint session_id; ++ GstElement *dtlssrtpenc; ++ GstElement *dtlssrtpdec; ++ ++ gpointer _padding[GST_PADDING]; ++}; ++ ++struct _GstWebRTCDTLSTransportClass ++{ ++ GstObjectClass parent_class; ++ ++ gpointer _padding[GST_PADDING]; ++}; ++ ++GST_WEBRTC_API ++GstWebRTCDTLSTransport * gst_webrtc_dtls_transport_new (guint session_id); ++ ++GST_WEBRTC_API ++void gst_webrtc_dtls_transport_set_transport (GstWebRTCDTLSTransport * transport, ++ GstWebRTCICETransport * ice); ++ ++#define GST_WEBRTC_DATA_CHANNEL_LOCK(channel) g_mutex_lock(&((GstWebRTCDataChannel *)(channel))->lock) ++#define GST_WEBRTC_DATA_CHANNEL_UNLOCK(channel) g_mutex_unlock(&((GstWebRTCDataChannel *)(channel))->lock) ++ ++/** ++ * GstWebRTCDataChannel: ++ * ++ * Since: 1.18 ++ */ ++struct _GstWebRTCDataChannel ++{ ++ GObject parent; ++ ++ GMutex lock; ++ ++ gchar *label; ++ gboolean ordered; ++ guint max_packet_lifetime; ++ guint max_retransmits; ++ gchar *protocol; ++ gboolean negotiated; ++ gint id; ++ GstWebRTCPriorityType priority; ++ GstWebRTCDataChannelState ready_state; ++ guint64 buffered_amount; ++ guint64 buffered_amount_low_threshold; ++ ++ gpointer _padding[GST_PADDING]; ++}; ++ ++/** ++ * GstWebRTCDataChannelClass: ++ * ++ * Since: 1.18 ++ */ ++struct _GstWebRTCDataChannelClass ++{ ++ GObjectClass parent_class; ++ ++ gboolean (*send_data) (GstWebRTCDataChannel * channel, GBytes *data, GError ** error); ++ gboolean (*send_string) (GstWebRTCDataChannel * channel, const gchar *str, GError ** error); ++ void (*close) (GstWebRTCDataChannel * channel); ++ ++ gpointer _padding[GST_PADDING]; ++}; ++ ++GST_WEBRTC_API ++void gst_webrtc_data_channel_on_open (GstWebRTCDataChannel * channel); ++ ++GST_WEBRTC_API ++void gst_webrtc_data_channel_on_close (GstWebRTCDataChannel * channel); ++ ++GST_WEBRTC_API ++void gst_webrtc_data_channel_on_error (GstWebRTCDataChannel * channel, GError * error); ++ ++GST_WEBRTC_API ++void gst_webrtc_data_channel_on_message_data (GstWebRTCDataChannel * channel, GBytes * data); ++ ++GST_WEBRTC_API ++void gst_webrtc_data_channel_on_message_string (GstWebRTCDataChannel * channel, const gchar * str); ++ ++GST_WEBRTC_API ++void gst_webrtc_data_channel_on_buffered_amount_low (GstWebRTCDataChannel * channel); ++ ++ ++/** ++ * GstWebRTCSCTPTransport: ++ * ++ * Since: 1.20 ++ */ ++struct _GstWebRTCSCTPTransport ++{ ++ GstObject parent; ++}; ++ ++/** ++ * GstWebRTCSCTPTransportClass: ++ * ++ * Since: 1.20 ++ */ ++struct _GstWebRTCSCTPTransportClass ++{ ++ GstObjectClass parent_class; ++}; ++ ++ ++G_END_DECLS ++ ++#endif /* __GST_WEBRTC_PRIV_H__ */ +diff --git a/gst-libs/gst/webrtc/webrtc.c b/gst-libs/gst/webrtc/webrtc.c +new file mode 100644 +index 000000000..8040e3832 +--- /dev/null ++++ b/gst-libs/gst/webrtc/webrtc.c +@@ -0,0 +1,35 @@ ++/* GStreamer ++ * Copyright (C) 2017 Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include ++ ++/** ++ * gst_webrtc_error_quark: ++ * ++ * Since: 1.20 ++ */ ++GQuark ++gst_webrtc_error_quark (void) ++{ ++ return g_quark_from_static_string ("gst-webrtc-error-quark"); ++} +diff --git a/gst-libs/gst/webrtc/webrtc.h b/gst-libs/gst/webrtc/webrtc.h +index e68a9dba8..a467bdb92 100644 +--- a/gst-libs/gst/webrtc/webrtc.h ++++ b/gst-libs/gst/webrtc/webrtc.h +@@ -24,6 +24,8 @@ + #include + #include + #include ++#include ++#include + #include + #include + #include +diff --git a/gst-libs/gst/webrtc/webrtc_fwd.h b/gst-libs/gst/webrtc/webrtc_fwd.h +index 5c727d234..d3556400a 100644 +--- a/gst-libs/gst/webrtc/webrtc_fwd.h ++++ b/gst-libs/gst/webrtc/webrtc_fwd.h +@@ -40,28 +40,98 @@ + # endif + #endif + ++/** ++ * GST_WEBRTC_DEPRECATED: (attributes doc.skip=true) ++ */ ++/** ++ * GST_WEBRTC_DEPRECATED_FOR: (attributes doc.skip=true) ++ */ ++#ifndef GST_DISABLE_DEPRECATED ++#define GST_WEBRTC_DEPRECATED GST_WEBRTC_API ++#define GST_WEBRTC_DEPRECATED_FOR(f) GST_WEBRTC_API ++#else ++#define GST_WEBRTC_DEPRECATED G_DEPRECATED GST_WEBRTC_API ++#define GST_WEBRTC_DEPRECATED_FOR(f) G_DEPRECATED_FOR(f) GST_WEBRTC_API ++#endif ++ + #include + ++/** ++ * GstWebRTCDTLSTransport: ++ */ + typedef struct _GstWebRTCDTLSTransport GstWebRTCDTLSTransport; + typedef struct _GstWebRTCDTLSTransportClass GstWebRTCDTLSTransportClass; + ++/** ++ * GstWebRTCICE: ++ * ++ * Since: 1.22 ++ */ ++typedef struct _GstWebRTCICE GstWebRTCICE; ++typedef struct _GstWebRTCICEClass GstWebRTCICEClass; ++ ++/** ++ * GstWebRTCICECandidateStats: ++ * ++ * Since: 1.22 ++ */ ++typedef struct _GstWebRTCICECandidateStats GstWebRTCICECandidateStats; ++ ++/** ++ * GstWebRTCICEStream: ++ * ++ * Since: 1.22 ++ */ ++typedef struct _GstWebRTCICEStream GstWebRTCICEStream; ++typedef struct _GstWebRTCICEStreamClass GstWebRTCICEStreamClass; ++ ++/** ++ * GstWebRTCICETransport: ++ */ + typedef struct _GstWebRTCICETransport GstWebRTCICETransport; + typedef struct _GstWebRTCICETransportClass GstWebRTCICETransportClass; + ++/** ++ * GstWebRTCRTPReceiver: ++ * ++ * An object to track the receiving aspect of the stream ++ * ++ * Mostly matches the WebRTC RTCRtpReceiver interface. ++ */ + typedef struct _GstWebRTCRTPReceiver GstWebRTCRTPReceiver; + typedef struct _GstWebRTCRTPReceiverClass GstWebRTCRTPReceiverClass; + ++/** ++ * GstWebRTCRTPSender: ++ * ++ * An object to track the sending aspect of the stream ++ * ++ * Mostly matches the WebRTC RTCRtpSender interface. ++ */ + typedef struct _GstWebRTCRTPSender GstWebRTCRTPSender; + typedef struct _GstWebRTCRTPSenderClass GstWebRTCRTPSenderClass; + + typedef struct _GstWebRTCSessionDescription GstWebRTCSessionDescription; + ++/** ++ * GstWebRTCRTPTransceiver: ++ * ++ * Mostly matches the WebRTC RTCRtpTransceiver interface. ++ */ + typedef struct _GstWebRTCRTPTransceiver GstWebRTCRTPTransceiver; + typedef struct _GstWebRTCRTPTransceiverClass GstWebRTCRTPTransceiverClass; + ++/** ++ * GstWebRTCDataChannel: ++ * ++ * Since: 1.18 ++ */ + typedef struct _GstWebRTCDataChannel GstWebRTCDataChannel; + typedef struct _GstWebRTCDataChannelClass GstWebRTCDataChannelClass; + ++typedef struct _GstWebRTCSCTPTransport GstWebRTCSCTPTransport; ++typedef struct _GstWebRTCSCTPTransportClass GstWebRTCSCTPTransportClass; ++ + /** + * GstWebRTCDTLSTransportState: + * @GST_WEBRTC_DTLS_TRANSPORT_STATE_NEW: new +@@ -238,7 +308,7 @@ typedef enum /*< underscore_name=gst_webrtc_dtls_setup >*/ + * @GST_WEBRTC_STATS_REMOTE_INBOUND_RTP: remote-inbound-rtp + * @GST_WEBRTC_STATS_REMOTE_OUTBOUND_RTP: remote-outbound-rtp + * @GST_WEBRTC_STATS_CSRC: csrc +- * @GST_WEBRTC_STATS_PEER_CONNECTION: peer-connectiion ++ * @GST_WEBRTC_STATS_PEER_CONNECTION: peer-connection + * @GST_WEBRTC_STATS_DATA_CHANNEL: data-channel + * @GST_WEBRTC_STATS_STREAM: stream + * @GST_WEBRTC_STATS_TRANSPORT: transport +@@ -246,6 +316,8 @@ typedef enum /*< underscore_name=gst_webrtc_dtls_setup >*/ + * @GST_WEBRTC_STATS_LOCAL_CANDIDATE: local-candidate + * @GST_WEBRTC_STATS_REMOTE_CANDIDATE: remote-candidate + * @GST_WEBRTC_STATS_CERTIFICATE: certificate ++ * ++ * See + */ + typedef enum /*< underscore_name=gst_webrtc_stats_type >*/ + { +@@ -280,10 +352,10 @@ typedef enum /*< underscore_name=gst_webrtc_fec_type >*/ + + /** + * GstWebRTCSCTPTransportState: +- * GST_WEBRTC_SCTP_TRANSPORT_STATE_NEW: new +- * GST_WEBRTC_SCTP_TRANSPORT_STATE_CONNECTING: connecting +- * GST_WEBRTC_SCTP_TRANSPORT_STATE_CONNECTED: connected +- * GST_WEBRTC_SCTP_TRANSPORT_STATE_CLOSED: closed ++ * @GST_WEBRTC_SCTP_TRANSPORT_STATE_NEW: new ++ * @GST_WEBRTC_SCTP_TRANSPORT_STATE_CONNECTING: connecting ++ * @GST_WEBRTC_SCTP_TRANSPORT_STATE_CONNECTED: connected ++ * @GST_WEBRTC_SCTP_TRANSPORT_STATE_CLOSED: closed + * + * See + * +@@ -299,10 +371,10 @@ typedef enum /*< underscore_name=gst_webrtc_sctp_transport_state >*/ + + /** + * GstWebRTCPriorityType: +- * GST_WEBRTC_PRIORITY_TYPE_VERY_LOW: very-low +- * GST_WEBRTC_PRIORITY_TYPE_LOW: low +- * GST_WEBRTC_PRIORITY_TYPE_MEDIUM: medium +- * GST_WEBRTC_PRIORITY_TYPE_HIGH: high ++ * @GST_WEBRTC_PRIORITY_TYPE_VERY_LOW: very-low ++ * @GST_WEBRTC_PRIORITY_TYPE_LOW: low ++ * @GST_WEBRTC_PRIORITY_TYPE_MEDIUM: medium ++ * @GST_WEBRTC_PRIORITY_TYPE_HIGH: high + * + * See + * +@@ -318,11 +390,10 @@ typedef enum /*< underscore_name=gst_webrtc_priority_type >*/ + + /** + * GstWebRTCDataChannelState: +- * GST_WEBRTC_DATA_CHANNEL_STATE_NEW: new +- * GST_WEBRTC_DATA_CHANNEL_STATE_CONNECTING: connection +- * GST_WEBRTC_DATA_CHANNEL_STATE_OPEN: open +- * GST_WEBRTC_DATA_CHANNEL_STATE_CLOSING: closing +- * GST_WEBRTC_DATA_CHANNEL_STATE_CLOSED: closed ++ * @GST_WEBRTC_DATA_CHANNEL_STATE_CONNECTING: connecting ++ * @GST_WEBRTC_DATA_CHANNEL_STATE_OPEN: open ++ * @GST_WEBRTC_DATA_CHANNEL_STATE_CLOSING: closing ++ * @GST_WEBRTC_DATA_CHANNEL_STATE_CLOSED: closed + * + * See + * +@@ -330,8 +401,7 @@ typedef enum /*< underscore_name=gst_webrtc_priority_type >*/ + */ + typedef enum /*< underscore_name=gst_webrtc_data_channel_state >*/ + { +- GST_WEBRTC_DATA_CHANNEL_STATE_NEW, +- GST_WEBRTC_DATA_CHANNEL_STATE_CONNECTING, ++ GST_WEBRTC_DATA_CHANNEL_STATE_CONNECTING = 1, + GST_WEBRTC_DATA_CHANNEL_STATE_OPEN, + GST_WEBRTC_DATA_CHANNEL_STATE_CLOSING, + GST_WEBRTC_DATA_CHANNEL_STATE_CLOSED, +@@ -339,10 +409,10 @@ typedef enum /*< underscore_name=gst_webrtc_data_channel_state >*/ + + /** + * GstWebRTCBundlePolicy: +- * GST_WEBRTC_BUNDLE_POLICY_NONE: none +- * GST_WEBRTC_BUNDLE_POLICY_BALANCED: balanced +- * GST_WEBRTC_BUNDLE_POLICY_MAX_COMPAT: max-compat +- * GST_WEBRTC_BUNDLE_POLICY_MAX_BUNDLE: max-bundle ++ * @GST_WEBRTC_BUNDLE_POLICY_NONE: none ++ * @GST_WEBRTC_BUNDLE_POLICY_BALANCED: balanced ++ * @GST_WEBRTC_BUNDLE_POLICY_MAX_COMPAT: max-compat ++ * @GST_WEBRTC_BUNDLE_POLICY_MAX_BUNDLE: max-bundle + * + * See https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-24#section-4.1.1 + * for more information. +@@ -359,8 +429,8 @@ typedef enum /**/ + + /** + * GstWebRTCICETransportPolicy: +- * GST_WEBRTC_ICE_TRANSPORT_POLICY_ALL: all +- * GST_WEBRTC_ICE_TRANSPORT_POLICY_RELAY: relay ++ * @GST_WEBRTC_ICE_TRANSPORT_POLICY_ALL: all ++ * @GST_WEBRTC_ICE_TRANSPORT_POLICY_RELAY: relay + * + * See https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-24#section-4.1.1 + * for more information. +@@ -373,4 +443,77 @@ typedef enum /**/ + GST_WEBRTC_ICE_TRANSPORT_POLICY_RELAY, + } GstWebRTCICETransportPolicy; + ++/** ++ * GstWebRTCKind: ++ * @GST_WEBRTC_KIND_UNKNOWN: Kind has not yet been set ++ * @GST_WEBRTC_KIND_AUDIO: Kind is audio ++ * @GST_WEBRTC_KIND_VIDEO: Kind is audio ++ * ++ * https://w3c.github.io/mediacapture-main/#dom-mediastreamtrack-kind ++ * ++ * Since: 1.20 ++ */ ++typedef enum /**/ ++{ ++ GST_WEBRTC_KIND_UNKNOWN, ++ GST_WEBRTC_KIND_AUDIO, ++ GST_WEBRTC_KIND_VIDEO, ++} GstWebRTCKind; ++ ++ ++GST_WEBRTC_API ++GQuark gst_webrtc_error_quark (void); ++ ++/** ++ * GST_WEBRTC_ERROR: ++ * ++ * Since: 1.20 ++ */ ++#define GST_WEBRTC_ERROR gst_webrtc_error_quark () ++ ++/** ++ * GstWebRTCError: ++ * @GST_WEBRTC_ERROR_DATA_CHANNEL_FAILURE: data-channel-failure ++ * @GST_WEBRTC_ERROR_DTLS_FAILURE: dtls-failure ++ * @GST_WEBRTC_ERROR_FINGERPRINT_FAILURE: fingerprint-failure ++ * @GST_WEBRTC_ERROR_SCTP_FAILURE: sctp-failure ++ * @GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR: sdp-syntax-error ++ * @GST_WEBRTC_ERROR_HARDWARE_ENCODER_NOT_AVAILABLE: hardware-encoder-not-available ++ * @GST_WEBRTC_ERROR_ENCODER_ERROR: encoder-error ++ * @GST_WEBRTC_ERROR_INVALID_STATE: invalid-state (part of WebIDL specification) ++ * @GST_WEBRTC_ERROR_INTERNAL_FAILURE: GStreamer-specific failure, not matching any other value from the specification ++ * ++ * See for more information. ++ * ++ * Since: 1.20 ++ */ ++/** ++ * GST_WEBRTC_ERROR_INVALID_MODIFICATION: ++ * ++ * invalid-modification (part of WebIDL specification) ++ * ++ * Since: 1.22 ++ */ ++/** ++ * GST_WEBRTC_ERROR_TYPE_ERROR: ++ * ++ * type-error (maps to JavaScript TypeError) ++ * ++ * Since: 1.22 ++ */ ++typedef enum /**/ ++{ ++ GST_WEBRTC_ERROR_DATA_CHANNEL_FAILURE, ++ GST_WEBRTC_ERROR_DTLS_FAILURE, ++ GST_WEBRTC_ERROR_FINGERPRINT_FAILURE, ++ GST_WEBRTC_ERROR_SCTP_FAILURE, ++ GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, ++ GST_WEBRTC_ERROR_HARDWARE_ENCODER_NOT_AVAILABLE, ++ GST_WEBRTC_ERROR_ENCODER_ERROR, ++ GST_WEBRTC_ERROR_INVALID_STATE, ++ GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ GST_WEBRTC_ERROR_INVALID_MODIFICATION, ++ GST_WEBRTC_ERROR_TYPE_ERROR, ++} GstWebRTCError; ++ + #endif /* __GST_WEBRTC_FWD_H__ */ +diff --git a/gst/videoparsers/gstav1parse.c b/gst/videoparsers/gstav1parse.c +new file mode 100644 +index 000000000..9eaa1f47d +--- /dev/null ++++ b/gst/videoparsers/gstav1parse.c +@@ -0,0 +1,2135 @@ ++/* GStreamer ++ * Copyright (C) 2020 He Junyan ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++/* ++ * SECTION:element-av1parse ++ * @title: av1parse ++ * @short_description: An AV1 stream parse. ++ * ++ * The minimal unit should be the BYTE. ++ * There are four types of AV1 alignment in the AV1 stream. ++ * ++ * alignment: byte, obu, frame, tu ++ * ++ * 1. Aligned to byte. The basic and default one for input. ++ * 2. Aligned to obu(Open Bitstream Units). ++ * 3. Aligned to frame. The default one for output. This ensures that ++ * each buffer contains only one frame or frame header with the ++ * show_existing flag for the base or sub layer. It is useful for ++ * the decoder. ++ * 4. Aligned to tu(Temporal Unit). A temporal unit consists of all the ++ * OBUs that are associated with a specific, distinct time instant. ++ * When scalability is disabled, it contains just exact one showing ++ * frame(may contain several unshowing frames). When scalability is ++ * enabled, it contains frames depending on the layer number. It should ++ * begin with a temporal delimiter obu. It may be useful for mux/demux ++ * to index the data of some timestamp. ++ * ++ * The annex B define a special format for the temporal unit. The size of ++ * each temporal unit is extract out to the header of the buffer, and no ++ * size field inside the each obu. There are two stream formats: ++ * ++ * stream-format: obu-stream, annexb ++ * ++ * 1. obu-stream. The basic and default one. ++ * 2. annexb. A special stream of temporal unit. It also implies that the ++ * alignment should be TU. ++ * ++ * This AV1 parse implements the conversion between the alignments and the ++ * stream-formats. If the input and output have the same alignment and the ++ * same stream-format, it will check and bypass the data. ++ * ++ * ## Example launch line to generate annex B format AV1 stream: ++ * ``` ++ * gst-launch-1.0 filesrc location=sample.av1 ! ivfparse ! av1parse ! \ ++ * video/x-av1,alignment=\(string\)tu,stream-format=\(string\)annexb ! \ ++ * filesink location=matroskamux ! filesink location=trans.mkv ++ * ``` ++ * ++ * Since: 1.20 ++ */ ++ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include ++#include ++#include ++#include ++#include "gstvideoparserselements.h" ++#include "gstav1parse.h" ++ ++#include ++ ++#define GST_AV1_MAX_LEB_128_SIZE 8 ++ ++GST_DEBUG_CATEGORY (av1_parse_debug); ++#define GST_CAT_DEFAULT av1_parse_debug ++ ++/* We combine the stream format and the alignment ++ together. When stream format is annexb, the ++ alignment must be TU. */ ++typedef enum ++{ ++ GST_AV1_PARSE_ALIGN_ERROR = -1, ++ GST_AV1_PARSE_ALIGN_NONE = 0, ++ GST_AV1_PARSE_ALIGN_BYTE, ++ GST_AV1_PARSE_ALIGN_OBU, ++ GST_AV1_PARSE_ALIGN_FRAME, ++ GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT, ++ GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B, ++} GstAV1ParseAligment; ++ ++struct _GstAV1Parse ++{ ++ GstBaseParse parent; ++ ++ gint width; ++ gint height; ++ gint subsampling_x; ++ gint subsampling_y; ++ gboolean mono_chrome; ++ guint8 bit_depth; ++ gchar *colorimetry; ++ GstAV1Profile profile; ++ ++ GstAV1ParseAligment in_align; ++ gboolean detect_annex_b; ++ GstAV1ParseAligment align; ++ ++ GstAV1Parser *parser; ++ GstAdapter *cache_out; ++ guint last_parsed_offset; ++ GstAdapter *frame_cache; ++ guint highest_spatial_id; ++ gint last_shown_frame_temporal_id; ++ gint last_shown_frame_spatial_id; ++ gboolean within_one_frame; ++ gboolean update_caps; ++ gboolean discont; ++ gboolean header; ++ gboolean keyframe; ++ gboolean show_frame; ++ ++ GstClockTime buffer_pts; ++ GstClockTime buffer_dts; ++ GstClockTime buffer_duration; ++}; ++ ++static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink", ++ GST_PAD_SINK, ++ GST_PAD_ALWAYS, ++ GST_STATIC_CAPS ("video/x-av1")); ++ ++static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src", ++ GST_PAD_SRC, ++ GST_PAD_ALWAYS, ++ GST_STATIC_CAPS ("video/x-av1, parsed = (boolean) true, " ++ "stream-format=(string) { obu-stream, annexb }, " ++ "alignment=(string) { obu, tu, frame }")); ++ ++#define parent_class gst_av1_parse_parent_class ++G_DEFINE_TYPE (GstAV1Parse, gst_av1_parse, GST_TYPE_BASE_PARSE); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (av1parse, "av1parse", GST_RANK_SECONDARY, ++ GST_TYPE_AV1_PARSE, videoparsers_element_init (plugin)); ++ ++static void ++remove_fields (GstCaps * caps, gboolean all) ++{ ++ guint i, n; ++ ++ n = gst_caps_get_size (caps); ++ for (i = 0; i < n; i++) { ++ GstStructure *s = gst_caps_get_structure (caps, i); ++ ++ if (all) { ++ gst_structure_remove_field (s, "alignment"); ++ gst_structure_remove_field (s, "stream-format"); ++ } ++ gst_structure_remove_field (s, "parsed"); ++ } ++} ++ ++static const gchar * ++_obu_name (GstAV1OBUType type) ++{ ++ switch (type) { ++ case GST_AV1_OBU_SEQUENCE_HEADER: ++ return "sequence header"; ++ case GST_AV1_OBU_TEMPORAL_DELIMITER: ++ return "temporal delimiter"; ++ case GST_AV1_OBU_FRAME_HEADER: ++ return "frame header"; ++ case GST_AV1_OBU_TILE_GROUP: ++ return "tile group"; ++ case GST_AV1_OBU_METADATA: ++ return "metadata"; ++ case GST_AV1_OBU_FRAME: ++ return "frame"; ++ case GST_AV1_OBU_REDUNDANT_FRAME_HEADER: ++ return "redundant frame header"; ++ case GST_AV1_OBU_TILE_LIST: ++ return "tile list"; ++ case GST_AV1_OBU_PADDING: ++ return "padding"; ++ default: ++ return "unknown"; ++ } ++ ++ return NULL; ++} ++ ++static guint32 ++_read_leb128 (guint8 * data, GstAV1ParserResult * retval, guint32 * comsumed) ++{ ++ guint8 leb128_byte = 0; ++ guint64 value = 0; ++ gint i; ++ gboolean result; ++ GstBitReader br; ++ guint32 cur_pos; ++ ++ gst_bit_reader_init (&br, data, 8); ++ ++ cur_pos = gst_bit_reader_get_pos (&br); ++ for (i = 0; i < 8; i++) { ++ leb128_byte = 0; ++ result = gst_bit_reader_get_bits_uint8 (&br, &leb128_byte, 8); ++ if (result == FALSE) { ++ *retval = GST_AV1_PARSER_BITSTREAM_ERROR; ++ return 0; ++ } ++ ++ value |= (((gint) leb128_byte & 0x7f) << (i * 7)); ++ if (!(leb128_byte & 0x80)) ++ break; ++ } ++ ++ *comsumed = (gst_bit_reader_get_pos (&br) - cur_pos) / 8; ++ /* check for bitstream conformance see chapter4.10.5 */ ++ if (value < G_MAXUINT32) { ++ *retval = GST_AV1_PARSER_OK; ++ return (guint32) value; ++ } else { ++ GST_WARNING ("invalid leb128"); ++ *retval = GST_AV1_PARSER_BITSTREAM_ERROR; ++ return 0; ++ } ++} ++ ++static gsize ++_leb_size_in_bytes (guint64 value) ++{ ++ gsize size = 0; ++ do { ++ ++size; ++ } while ((value >>= 7) != 0); ++ ++ return size; ++} ++ ++static gboolean ++_write_leb128 (guint8 * data, guint * len, guint64 value) ++{ ++ guint leb_size = _leb_size_in_bytes (value); ++ guint i; ++ ++ if (value > G_MAXUINT32 || leb_size > GST_AV1_MAX_LEB_128_SIZE) ++ return FALSE; ++ ++ for (i = 0; i < leb_size; ++i) { ++ guint8 byte = value & 0x7f; ++ value >>= 7; ++ ++ /* Signal that more bytes follow. */ ++ if (value != 0) ++ byte |= 0x80; ++ ++ *(data + i) = byte; ++ } ++ ++ *len = leb_size; ++ return TRUE; ++} ++ ++static gboolean gst_av1_parse_start (GstBaseParse * parse); ++static gboolean gst_av1_parse_stop (GstBaseParse * parse); ++static GstFlowReturn gst_av1_parse_handle_frame (GstBaseParse * parse, ++ GstBaseParseFrame * frame, gint * skipsize); ++static gboolean gst_av1_parse_set_sink_caps (GstBaseParse * parse, ++ GstCaps * caps); ++static GstCaps *gst_av1_parse_get_sink_caps (GstBaseParse * parse, ++ GstCaps * filter); ++static GstFlowReturn gst_av1_parse_pre_push_frame (GstBaseParse * parse, ++ GstBaseParseFrame * frame); ++ ++/* Clear the parse state related to data kind OBUs. */ ++static void ++gst_av1_parse_reset_obu_data_state (GstAV1Parse * self) ++{ ++ self->last_shown_frame_temporal_id = -1; ++ self->last_shown_frame_spatial_id = -1; ++ self->within_one_frame = FALSE; ++} ++ ++static void ++gst_av1_parse_reset_tu_timestamp (GstAV1Parse * self) ++{ ++ self->buffer_pts = GST_CLOCK_TIME_NONE; ++ self->buffer_dts = GST_CLOCK_TIME_NONE; ++ self->buffer_duration = GST_CLOCK_TIME_NONE; ++} ++ ++static void ++gst_av1_parse_reset (GstAV1Parse * self) ++{ ++ self->width = 0; ++ self->height = 0; ++ self->subsampling_x = -1; ++ self->subsampling_y = -1; ++ self->mono_chrome = FALSE; ++ self->profile = GST_AV1_PROFILE_UNDEFINED; ++ self->bit_depth = 0; ++ self->align = GST_AV1_PARSE_ALIGN_NONE; ++ self->in_align = GST_AV1_PARSE_ALIGN_NONE; ++ self->detect_annex_b = FALSE; ++ self->discont = TRUE; ++ self->header = FALSE; ++ self->keyframe = FALSE; ++ self->show_frame = FALSE; ++ self->last_parsed_offset = 0; ++ self->highest_spatial_id = 0; ++ gst_av1_parse_reset_obu_data_state (self); ++ g_clear_pointer (&self->colorimetry, g_free); ++ g_clear_pointer (&self->parser, gst_av1_parser_free); ++ gst_adapter_clear (self->cache_out); ++ gst_adapter_clear (self->frame_cache); ++ gst_av1_parse_reset_tu_timestamp (self); ++} ++ ++static void ++gst_av1_parse_init (GstAV1Parse * self) ++{ ++ gst_base_parse_set_pts_interpolation (GST_BASE_PARSE (self), FALSE); ++ gst_base_parse_set_infer_ts (GST_BASE_PARSE (self), FALSE); ++ ++ GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (self)); ++ GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_PARSE_SINK_PAD (self)); ++ ++ self->cache_out = gst_adapter_new (); ++ self->frame_cache = gst_adapter_new (); ++} ++ ++static void ++gst_av1_parse_finalize (GObject * object) ++{ ++ GstAV1Parse *self = GST_AV1_PARSE (object); ++ ++ gst_av1_parse_reset (self); ++ g_object_unref (self->cache_out); ++ g_object_unref (self->frame_cache); ++ ++ G_OBJECT_CLASS (parent_class)->finalize (object); ++} ++ ++static void ++gst_av1_parse_class_init (GstAV1ParseClass * klass) ++{ ++ GObjectClass *gobject_class = (GObjectClass *) klass; ++ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass); ++ GstElementClass *element_class = GST_ELEMENT_CLASS (klass); ++ ++ gobject_class->finalize = gst_av1_parse_finalize; ++ parse_class->start = GST_DEBUG_FUNCPTR (gst_av1_parse_start); ++ parse_class->stop = GST_DEBUG_FUNCPTR (gst_av1_parse_stop); ++ parse_class->handle_frame = GST_DEBUG_FUNCPTR (gst_av1_parse_handle_frame); ++ parse_class->pre_push_frame = ++ GST_DEBUG_FUNCPTR (gst_av1_parse_pre_push_frame); ++ parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_av1_parse_set_sink_caps); ++ parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_av1_parse_get_sink_caps); ++ ++ gst_element_class_add_static_pad_template (element_class, &srctemplate); ++ gst_element_class_add_static_pad_template (element_class, &sinktemplate); ++ ++ gst_element_class_set_static_metadata (element_class, "AV1 parser", ++ "Codec/Parser/Converter/Video", ++ "Parses AV1 streams", "He Junyan "); ++ ++ GST_DEBUG_CATEGORY_INIT (av1_parse_debug, "av1parse", 0, "av1 parser"); ++} ++ ++static gboolean ++gst_av1_parse_start (GstBaseParse * parse) ++{ ++ GstAV1Parse *self = GST_AV1_PARSE (parse); ++ ++ GST_DEBUG_OBJECT (self, "start"); ++ ++ gst_av1_parse_reset (self); ++ self->parser = gst_av1_parser_new (); ++ ++ /* At least the OBU header. */ ++ gst_base_parse_set_min_frame_size (parse, 1); ++ ++ return TRUE; ++} ++ ++static gboolean ++gst_av1_parse_stop (GstBaseParse * parse) ++{ ++ GstAV1Parse *self = GST_AV1_PARSE (parse); ++ ++ GST_DEBUG_OBJECT (self, "stop"); ++ g_clear_pointer (&self->parser, gst_av1_parser_free); ++ ++ return TRUE; ++} ++ ++static const gchar * ++gst_av1_parse_profile_to_string (GstAV1Profile profile) ++{ ++ switch (profile) { ++ case GST_AV1_PROFILE_0: ++ return "main"; ++ case GST_AV1_PROFILE_1: ++ return "high"; ++ case GST_AV1_PROFILE_2: ++ return "professional"; ++ default: ++ break; ++ } ++ ++ return NULL; ++} ++ ++static GstAV1Profile ++gst_av1_parse_profile_from_string (const gchar * profile) ++{ ++ if (!profile) ++ return GST_AV1_PROFILE_UNDEFINED; ++ ++ if (g_strcmp0 (profile, "main") == 0) ++ return GST_AV1_PROFILE_0; ++ else if (g_strcmp0 (profile, "high") == 0) ++ return GST_AV1_PROFILE_1; ++ else if (g_strcmp0 (profile, "professional") == 0) ++ return GST_AV1_PROFILE_2; ++ ++ return GST_AV1_PROFILE_UNDEFINED; ++} ++ ++static const gchar * ++gst_av1_parse_alignment_to_steam_format_string (GstAV1ParseAligment align) ++{ ++ switch (align) { ++ case GST_AV1_PARSE_ALIGN_BYTE: ++ return "obu-stream"; ++ case GST_AV1_PARSE_ALIGN_OBU: ++ case GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT: ++ case GST_AV1_PARSE_ALIGN_FRAME: ++ return "obu-stream"; ++ case GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B: ++ return "annexb"; ++ default: ++ GST_WARNING ("Unrecognized steam format"); ++ break; ++ } ++ ++ return NULL; ++} ++ ++static const gchar * ++gst_av1_parse_alignment_to_string (GstAV1ParseAligment align) ++{ ++ switch (align) { ++ case GST_AV1_PARSE_ALIGN_BYTE: ++ return "byte"; ++ case GST_AV1_PARSE_ALIGN_OBU: ++ return "obu"; ++ case GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT: ++ case GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B: ++ return "tu"; ++ case GST_AV1_PARSE_ALIGN_FRAME: ++ return "frame"; ++ default: ++ GST_WARNING ("Unrecognized alignment"); ++ break; ++ } ++ ++ return NULL; ++} ++ ++static GstAV1ParseAligment ++gst_av1_parse_alignment_from_string (const gchar * align, ++ const gchar * stream_format) ++{ ++ if (!align && !stream_format) ++ return GST_AV1_PARSE_ALIGN_NONE; ++ ++ if (stream_format) { ++ if (g_strcmp0 (stream_format, "annexb") == 0) { ++ if (align && g_strcmp0 (align, "tu") != 0) { ++ /* annex b stream must align to TU. */ ++ return GST_AV1_PARSE_ALIGN_ERROR; ++ } else { ++ return GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B; ++ } ++ } else if (g_strcmp0 (stream_format, "obu-stream") != 0) { ++ /* unrecognized */ ++ return GST_AV1_PARSE_ALIGN_NONE; ++ } ++ ++ /* stream-format is obu-stream, depends on align */ ++ } ++ ++ if (align) { ++ if (g_strcmp0 (align, "byte") == 0) { ++ return GST_AV1_PARSE_ALIGN_BYTE; ++ } else if (g_strcmp0 (align, "obu") == 0) { ++ return GST_AV1_PARSE_ALIGN_OBU; ++ } else if (g_strcmp0 (align, "tu") == 0) { ++ return GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT; ++ } else if (g_strcmp0 (align, "frame") == 0) { ++ return GST_AV1_PARSE_ALIGN_FRAME; ++ } else { ++ /* unrecognized */ ++ return GST_AV1_PARSE_ALIGN_NONE; ++ } ++ } ++ ++ return GST_AV1_PARSE_ALIGN_NONE; ++} ++ ++static gboolean ++gst_av1_parse_caps_has_alignment (GstCaps * caps, GstAV1ParseAligment alignment) ++{ ++ guint i, j, caps_size; ++ const gchar *cmp_align_str = NULL; ++ const gchar *cmp_stream_str = NULL; ++ ++ GST_DEBUG ("Try to find alignment %d in caps: %" GST_PTR_FORMAT, ++ alignment, caps); ++ ++ caps_size = gst_caps_get_size (caps); ++ if (caps_size == 0) ++ return FALSE; ++ ++ switch (alignment) { ++ case GST_AV1_PARSE_ALIGN_BYTE: ++ cmp_align_str = "byte"; ++ cmp_stream_str = "obu-stream"; ++ break; ++ case GST_AV1_PARSE_ALIGN_OBU: ++ cmp_align_str = "obu"; ++ cmp_stream_str = "obu-stream"; ++ break; ++ case GST_AV1_PARSE_ALIGN_FRAME: ++ cmp_align_str = "frame"; ++ cmp_stream_str = "obu-stream"; ++ break; ++ case GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT: ++ cmp_align_str = "tu"; ++ cmp_stream_str = "obu-stream"; ++ break; ++ case GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B: ++ cmp_align_str = "tu"; ++ cmp_stream_str = "annexb"; ++ break; ++ default: ++ return FALSE; ++ } ++ ++ for (i = 0; i < caps_size; i++) { ++ GstStructure *s = gst_caps_get_structure (caps, i); ++ const GValue *alignment_value = gst_structure_get_value (s, "alignment"); ++ const GValue *stream_value = gst_structure_get_value (s, "stream-format"); ++ ++ if (!alignment_value || !stream_value) ++ continue; ++ ++ if (G_VALUE_HOLDS_STRING (alignment_value)) { ++ const gchar *align_str = g_value_get_string (alignment_value); ++ ++ if (g_strcmp0 (align_str, cmp_align_str) != 0) ++ continue; ++ } else if (GST_VALUE_HOLDS_LIST (alignment_value)) { ++ guint num_values = gst_value_list_get_size (alignment_value); ++ ++ for (j = 0; j < num_values; j++) { ++ const GValue *v = gst_value_list_get_value (alignment_value, j); ++ const gchar *align_str = g_value_get_string (v); ++ ++ if (g_strcmp0 (align_str, cmp_align_str) == 0) ++ break; ++ } ++ ++ if (j == num_values) ++ continue; ++ } ++ ++ if (G_VALUE_HOLDS_STRING (stream_value)) { ++ const gchar *stream_str = g_value_get_string (stream_value); ++ ++ if (g_strcmp0 (stream_str, cmp_stream_str) != 0) ++ continue; ++ } else if (GST_VALUE_HOLDS_LIST (stream_value)) { ++ guint num_values = gst_value_list_get_size (stream_value); ++ ++ for (j = 0; j < num_values; j++) { ++ const GValue *v = gst_value_list_get_value (stream_value, j); ++ const gchar *stream_str = g_value_get_string (v); ++ ++ if (g_strcmp0 (stream_str, cmp_stream_str) == 0) ++ break; ++ } ++ ++ if (j == num_values) ++ continue; ++ } ++ ++ return TRUE; ++ } ++ ++ return FALSE; ++} ++ ++static GstAV1ParseAligment ++gst_av1_parse_alignment_from_caps (GstCaps * caps) ++{ ++ GstAV1ParseAligment align; ++ ++ align = GST_AV1_PARSE_ALIGN_NONE; ++ ++ GST_DEBUG ("parsing caps: %" GST_PTR_FORMAT, caps); ++ ++ if (caps && gst_caps_get_size (caps) > 0) { ++ GstStructure *s = gst_caps_get_structure (caps, 0); ++ const gchar *str_align = NULL; ++ const gchar *str_stream = NULL; ++ ++ str_align = gst_structure_get_string (s, "alignment"); ++ str_stream = gst_structure_get_string (s, "stream-format"); ++ ++ align = gst_av1_parse_alignment_from_string (str_align, str_stream); ++ } ++ ++ return align; ++} ++ ++static void ++gst_av1_parse_update_src_caps (GstAV1Parse * self, GstCaps * caps) ++{ ++ GstCaps *sink_caps, *src_caps; ++ GstCaps *final_caps = NULL; ++ GstStructure *s = NULL; ++ gint width, height; ++ gint par_n = 0, par_d = 0; ++ gint fps_n = 0, fps_d = 0; ++ const gchar *profile = NULL; ++ ++ if (G_UNLIKELY (!gst_pad_has_current_caps (GST_BASE_PARSE_SRC_PAD (self)))) ++ self->update_caps = TRUE; ++ ++ if (!self->update_caps) ++ return; ++ ++ /* if this is being called from the first _setcaps call, caps on the sinkpad ++ * aren't set yet and so they need to be passed as an argument */ ++ if (caps) ++ sink_caps = gst_caps_ref (caps); ++ else ++ sink_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (self)); ++ ++ /* carry over input caps as much as possible; override with our own stuff */ ++ if (!sink_caps) ++ sink_caps = gst_caps_new_empty_simple ("video/x-av1"); ++ else ++ s = gst_caps_get_structure (sink_caps, 0); ++ ++ final_caps = gst_caps_copy (sink_caps); ++ ++ if (s && gst_structure_has_field (s, "width") && ++ gst_structure_has_field (s, "height")) { ++ gst_structure_get_int (s, "width", &width); ++ gst_structure_get_int (s, "height", &height); ++ } else { ++ width = self->width; ++ height = self->height; ++ } ++ ++ if (width > 0 && height > 0) ++ gst_caps_set_simple (final_caps, "width", G_TYPE_INT, width, ++ "height", G_TYPE_INT, height, NULL); ++ ++ if (s && gst_structure_get_fraction (s, "pixel-aspect-ratio", &par_n, &par_d)) { ++ if (par_n != 0 && par_d != 0) { ++ gst_caps_set_simple (final_caps, "pixel-aspect-ratio", ++ GST_TYPE_FRACTION, par_n, par_d, NULL); ++ } ++ } ++ ++ if (s && gst_structure_has_field (s, "framerate")) { ++ gst_structure_get_fraction (s, "framerate", &fps_n, &fps_d); ++ } ++ ++ if (fps_n > 0 && fps_d > 0) { ++ gst_caps_set_simple (final_caps, "framerate", ++ GST_TYPE_FRACTION, fps_n, fps_d, NULL); ++ gst_base_parse_set_frame_rate (GST_BASE_PARSE (self), fps_n, fps_d, 0, 0); ++ } ++ ++ /* When not RGB, the chroma format is needed. */ ++ if (self->colorimetry == NULL || ++ (g_strcmp0 (self->colorimetry, GST_VIDEO_COLORIMETRY_SRGB) != 0)) { ++ const gchar *chroma_format = NULL; ++ ++ if (self->subsampling_x == 1 && self->subsampling_y == 1) { ++ if (!self->mono_chrome) { ++ chroma_format = "4:2:0"; ++ } else { ++ chroma_format = "4:0:0"; ++ } ++ } else if (self->subsampling_x == 1 && self->subsampling_y == 0) { ++ chroma_format = "4:2:2"; ++ } else if (self->subsampling_x == 0 && self->subsampling_y == 0) { ++ chroma_format = "4:4:4"; ++ } ++ ++ if (chroma_format) ++ gst_caps_set_simple (final_caps, ++ "chroma-format", G_TYPE_STRING, chroma_format, NULL); ++ } ++ ++ if (self->bit_depth) ++ gst_caps_set_simple (final_caps, ++ "bit-depth-luma", G_TYPE_UINT, self->bit_depth, ++ "bit-depth-chroma", G_TYPE_UINT, self->bit_depth, NULL); ++ ++ if (self->colorimetry && (!s || !gst_structure_has_field (s, "colorimetry"))) ++ gst_caps_set_simple (final_caps, ++ "colorimetry", G_TYPE_STRING, self->colorimetry, NULL); ++ ++ g_assert (self->align > GST_AV1_PARSE_ALIGN_NONE); ++ gst_caps_set_simple (final_caps, "parsed", G_TYPE_BOOLEAN, TRUE, ++ "stream-format", G_TYPE_STRING, ++ gst_av1_parse_alignment_to_steam_format_string (self->align), ++ "alignment", G_TYPE_STRING, ++ gst_av1_parse_alignment_to_string (self->align), NULL); ++ ++ profile = gst_av1_parse_profile_to_string (self->profile); ++ if (profile) ++ gst_caps_set_simple (final_caps, "profile", G_TYPE_STRING, profile, NULL); ++ ++ src_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (self)); ++ ++ if (!(src_caps && gst_caps_is_strictly_equal (src_caps, final_caps))) { ++ GST_DEBUG_OBJECT (self, "Update src caps %" GST_PTR_FORMAT, final_caps); ++ gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (self), final_caps); ++ } ++ ++ gst_clear_caps (&src_caps); ++ gst_caps_unref (final_caps); ++ gst_caps_unref (sink_caps); ++ ++ self->update_caps = FALSE; ++} ++ ++/* check downstream caps to configure format and alignment */ ++static void ++gst_av1_parse_negotiate (GstAV1Parse * self, GstCaps * in_caps) ++{ ++ GstCaps *caps; ++ GstAV1ParseAligment align; ++ ++ caps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (self)); ++ GST_DEBUG_OBJECT (self, "allowed caps: %" GST_PTR_FORMAT, caps); ++ ++ /* concentrate on leading structure, since decodebin parser ++ * capsfilter always includes parser template caps */ ++ if (caps) { ++ caps = gst_caps_truncate (caps); ++ GST_DEBUG_OBJECT (self, "negotiating with caps: %" GST_PTR_FORMAT, caps); ++ } ++ ++ /* prefer TU as default */ ++ if (gst_av1_parse_caps_has_alignment (caps, ++ GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT)) { ++ self->align = GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT; ++ goto done; ++ } ++ ++ /* Both upsteam and downstream support, best */ ++ if (in_caps && caps) { ++ if (gst_caps_can_intersect (in_caps, caps)) { ++ GstCaps *common_caps = NULL; ++ ++ common_caps = gst_caps_intersect (in_caps, caps); ++ align = gst_av1_parse_alignment_from_caps (common_caps); ++ gst_clear_caps (&common_caps); ++ ++ if (align != GST_AV1_PARSE_ALIGN_NONE ++ && align != GST_AV1_PARSE_ALIGN_ERROR) { ++ self->align = align; ++ goto done; ++ } ++ } ++ } ++ ++ /* Select first one of downstream support */ ++ if (caps && !gst_caps_is_empty (caps)) { ++ /* fixate to avoid ambiguity with lists when parsing */ ++ caps = gst_caps_fixate (caps); ++ align = gst_av1_parse_alignment_from_caps (caps); ++ ++ if (align != GST_AV1_PARSE_ALIGN_NONE && align != GST_AV1_PARSE_ALIGN_ERROR) { ++ self->align = align; ++ goto done; ++ } ++ } ++ ++ /* default */ ++ self->align = GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT; ++ ++done: ++ GST_INFO_OBJECT (self, "selected alignment %s", ++ gst_av1_parse_alignment_to_string (self->align)); ++ ++ gst_clear_caps (&caps); ++} ++ ++static GstCaps * ++gst_av1_parse_get_sink_caps (GstBaseParse * parse, GstCaps * filter) ++{ ++ GstCaps *peercaps, *templ; ++ GstCaps *res, *tmp, *pcopy; ++ ++ templ = gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD (parse)); ++ if (filter) { ++ GstCaps *fcopy = gst_caps_copy (filter); ++ /* Remove the fields we convert */ ++ remove_fields (fcopy, TRUE); ++ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), fcopy); ++ gst_caps_unref (fcopy); ++ } else { ++ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), NULL); ++ } ++ ++ pcopy = gst_caps_copy (peercaps); ++ remove_fields (pcopy, TRUE); ++ ++ res = gst_caps_intersect_full (pcopy, templ, GST_CAPS_INTERSECT_FIRST); ++ gst_caps_unref (pcopy); ++ gst_caps_unref (templ); ++ ++ if (filter) { ++ GstCaps *tmp = gst_caps_intersect_full (res, filter, ++ GST_CAPS_INTERSECT_FIRST); ++ gst_caps_unref (res); ++ res = tmp; ++ } ++ ++ /* Try if we can put the downstream caps first */ ++ pcopy = gst_caps_copy (peercaps); ++ remove_fields (pcopy, FALSE); ++ tmp = gst_caps_intersect_full (pcopy, res, GST_CAPS_INTERSECT_FIRST); ++ gst_caps_unref (pcopy); ++ if (!gst_caps_is_empty (tmp)) ++ res = gst_caps_merge (tmp, res); ++ else ++ gst_caps_unref (tmp); ++ ++ gst_caps_unref (peercaps); ++ ++ return res; ++} ++ ++static gboolean ++gst_av1_parse_set_sink_caps (GstBaseParse * parse, GstCaps * caps) ++{ ++ GstAV1Parse *self = GST_AV1_PARSE (parse); ++ GstStructure *str; ++ GstAV1ParseAligment align; ++ GstCaps *in_caps = NULL; ++ const gchar *profile; ++ ++ str = gst_caps_get_structure (caps, 0); ++ ++ /* accept upstream info if provided */ ++ gst_structure_get_int (str, "width", &self->width); ++ gst_structure_get_int (str, "height", &self->height); ++ profile = gst_structure_get_string (str, "profile"); ++ if (profile) ++ self->profile = gst_av1_parse_profile_from_string (profile); ++ ++ /* get upstream align from caps */ ++ align = gst_av1_parse_alignment_from_caps (caps); ++ if (align == GST_AV1_PARSE_ALIGN_ERROR) { ++ GST_ERROR_OBJECT (self, "Sink caps %" GST_PTR_FORMAT " set stream-format" ++ " and alignment conflict.", caps); ++ return FALSE; ++ } ++ ++ in_caps = gst_caps_copy (caps); ++ /* default */ ++ if (align == GST_AV1_PARSE_ALIGN_NONE) { ++ align = GST_AV1_PARSE_ALIGN_BYTE; ++ gst_caps_set_simple (in_caps, "alignment", G_TYPE_STRING, ++ gst_av1_parse_alignment_to_string (align), ++ "stream-format", G_TYPE_STRING, "obu-stream", NULL); ++ } ++ ++ /* negotiate with downstream, set output align */ ++ gst_av1_parse_negotiate (self, in_caps); ++ ++ self->update_caps = TRUE; ++ ++ /* if all of decoder's capability related values are provided ++ * by upstream, update src caps now */ ++ if (self->width > 0 && self->height > 0 && profile) ++ gst_av1_parse_update_src_caps (self, in_caps); ++ ++ gst_caps_unref (in_caps); ++ ++ self->in_align = align; ++ ++ if (self->in_align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT) ++ self->detect_annex_b = TRUE; ++ ++ if (self->in_align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B) { ++ gst_av1_parser_reset (self->parser, TRUE); ++ } else { ++ gst_av1_parser_reset (self->parser, FALSE); ++ } ++ ++ return TRUE; ++} ++ ++static GstFlowReturn ++gst_av1_parse_push_data (GstAV1Parse * self, GstBaseParseFrame * frame, ++ guint32 finish_sz, gboolean frame_finished) ++{ ++ gsize sz; ++ GstBuffer *buf, *header_buf; ++ GstBuffer *buffer = frame->buffer; ++ GstFlowReturn ret = GST_FLOW_OK; ++ ++ /* Need to generate the final TU annex-b format */ ++ if (self->align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B) { ++ guint8 size_data[GST_AV1_MAX_LEB_128_SIZE]; ++ guint size_len = 0; ++ guint len; ++ ++ /* When push a TU, it must also be a frame end. */ ++ g_assert (frame_finished); ++ ++ /* Still some left in the frame cache */ ++ len = gst_adapter_available (self->frame_cache); ++ if (len) { ++ buf = gst_adapter_take_buffer (self->frame_cache, len); ++ ++ /* frame_unit_size */ ++ _write_leb128 (size_data, &size_len, len); ++ header_buf = gst_buffer_new_memdup (size_data, size_len); ++ GST_BUFFER_PTS (header_buf) = GST_BUFFER_PTS (buf); ++ GST_BUFFER_DTS (header_buf) = GST_BUFFER_DTS (buf); ++ GST_BUFFER_DURATION (header_buf) = GST_BUFFER_DURATION (buf); ++ ++ gst_adapter_push (self->cache_out, header_buf); ++ gst_adapter_push (self->cache_out, buf); ++ } ++ ++ len = gst_adapter_available (self->cache_out); ++ if (len) { ++ buf = gst_adapter_take_buffer (self->cache_out, len); ++ ++ /* temporal_unit_size */ ++ _write_leb128 (size_data, &size_len, len); ++ header_buf = gst_buffer_new_memdup (size_data, size_len); ++ GST_BUFFER_PTS (header_buf) = GST_BUFFER_PTS (buf); ++ GST_BUFFER_DTS (header_buf) = GST_BUFFER_DTS (buf); ++ GST_BUFFER_DURATION (header_buf) = GST_BUFFER_DURATION (buf); ++ ++ gst_adapter_push (self->cache_out, header_buf); ++ gst_adapter_push (self->cache_out, buf); ++ } ++ } ++ ++ sz = gst_adapter_available (self->cache_out); ++ if (sz) { ++ buf = gst_adapter_take_buffer (self->cache_out, sz); ++ gst_buffer_copy_into (buf, buffer, GST_BUFFER_COPY_METADATA, 0, -1); ++ if (self->discont) { ++ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT); ++ self->discont = FALSE; ++ } else { ++ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT); ++ } ++ ++ if (self->header) { ++ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER); ++ self->header = FALSE; ++ } else { ++ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_HEADER); ++ } ++ ++ if (self->keyframe) { ++ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT); ++ self->keyframe = FALSE; ++ } else { ++ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT); ++ } ++ ++ if (frame_finished) { ++ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_MARKER); ++ } else { ++ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_MARKER); ++ } ++ ++ if (self->align == GST_AV1_PARSE_ALIGN_FRAME) { ++ if (!self->show_frame) { ++ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DECODE_ONLY); ++ } else { ++ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DECODE_ONLY); ++ } ++ } else { ++ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DECODE_ONLY); ++ } ++ ++ gst_buffer_replace (&frame->out_buffer, buf); ++ gst_buffer_unref (buf); ++ ++ gst_av1_parse_update_src_caps (self, NULL); ++ GST_LOG_OBJECT (self, "comsumed %d, output one buffer with size %" ++ G_GSSIZE_FORMAT, finish_sz, sz); ++ ret = gst_base_parse_finish_frame (GST_BASE_PARSE (self), frame, finish_sz); ++ } ++ ++ return ret; ++} ++ ++static void ++gst_av1_parse_convert_to_annexb (GstAV1Parse * self, GstBuffer * buffer, ++ GstAV1OBU * obu, gboolean frame_complete) ++{ ++ guint8 size_data[GST_AV1_MAX_LEB_128_SIZE]; ++ guint size_len = 0; ++ GstBitWriter bs; ++ GstBuffer *buf, *buf2; ++ guint8 *data; ++ guint len, len2, offset; ++ ++ /* obu_length */ ++ _write_leb128 (size_data, &size_len, ++ obu->obu_size + 1 + obu->header.obu_extention_flag); ++ ++ gst_bit_writer_init_with_size (&bs, 128, FALSE); ++ /* obu_forbidden_bit */ ++ gst_bit_writer_put_bits_uint8 (&bs, 0, 1); ++ /* obu_type */ ++ gst_bit_writer_put_bits_uint8 (&bs, obu->obu_type, 4); ++ /* obu_extension_flag */ ++ gst_bit_writer_put_bits_uint8 (&bs, obu->header.obu_extention_flag, 1); ++ /* obu_has_size_field */ ++ gst_bit_writer_put_bits_uint8 (&bs, 0, 1); ++ /* obu_reserved_1bit */ ++ gst_bit_writer_put_bits_uint8 (&bs, 0, 1); ++ if (obu->header.obu_extention_flag) { ++ /* temporal_id */ ++ gst_bit_writer_put_bits_uint8 (&bs, obu->header.obu_temporal_id, 3); ++ /* spatial_id */ ++ gst_bit_writer_put_bits_uint8 (&bs, obu->header.obu_spatial_id, 2); ++ /* extension_header_reserved_3bits */ ++ gst_bit_writer_put_bits_uint8 (&bs, 0, 3); ++ } ++ g_assert (GST_BIT_WRITER_BIT_SIZE (&bs) % 8 == 0); ++ ++ len = size_len; ++ len += GST_BIT_WRITER_BIT_SIZE (&bs) / 8; ++ len += obu->obu_size; ++ ++ data = g_malloc (len); ++ offset = 0; ++ ++ memcpy (data + offset, size_data, size_len); ++ offset += size_len; ++ ++ memcpy (data + offset, GST_BIT_WRITER_DATA (&bs), ++ GST_BIT_WRITER_BIT_SIZE (&bs) / 8); ++ offset += GST_BIT_WRITER_BIT_SIZE (&bs) / 8; ++ ++ memcpy (data + offset, obu->data, obu->obu_size); ++ ++ /* The buf of this OBU */ ++ buf = gst_buffer_new_wrapped (data, len); ++ GST_BUFFER_PTS (buf) = GST_BUFFER_PTS (buffer); ++ GST_BUFFER_DTS (buf) = GST_BUFFER_DTS (buffer); ++ GST_BUFFER_DURATION (buf) = GST_BUFFER_DURATION (buffer); ++ ++ gst_adapter_push (self->frame_cache, buf); ++ ++ if (frame_complete) { ++ len2 = gst_adapter_available (self->frame_cache); ++ buf2 = gst_adapter_take_buffer (self->frame_cache, len2); ++ ++ /* frame_unit_size */ ++ _write_leb128 (size_data, &size_len, len2); ++ buf = gst_buffer_new_memdup (size_data, size_len); ++ GST_BUFFER_PTS (buf) = GST_BUFFER_PTS (buf2); ++ GST_BUFFER_DTS (buf) = GST_BUFFER_DTS (buf2); ++ GST_BUFFER_DURATION (buf) = GST_BUFFER_DURATION (buf2); ++ ++ gst_adapter_push (self->cache_out, buf); ++ gst_adapter_push (self->cache_out, buf2); ++ } ++ ++ gst_bit_writer_reset (&bs); ++} ++ ++static void ++gst_av1_parse_convert_from_annexb (GstAV1Parse * self, GstBuffer * buffer, ++ GstAV1OBU * obu) ++{ ++ guint8 size_data[GST_AV1_MAX_LEB_128_SIZE]; ++ guint size_len = 0; ++ GstBuffer *buf; ++ guint len, offset; ++ guint8 *data; ++ GstBitWriter bs; ++ ++ _write_leb128 (size_data, &size_len, obu->obu_size); ++ ++ /* obu_header */ ++ len = 1; ++ if (obu->header.obu_extention_flag) ++ len += 1; ++ len += size_len; ++ len += obu->obu_size; ++ ++ gst_bit_writer_init_with_size (&bs, 128, FALSE); ++ /* obu_forbidden_bit */ ++ gst_bit_writer_put_bits_uint8 (&bs, 0, 1); ++ /* obu_type */ ++ gst_bit_writer_put_bits_uint8 (&bs, obu->obu_type, 4); ++ /* obu_extension_flag */ ++ gst_bit_writer_put_bits_uint8 (&bs, obu->header.obu_extention_flag, 1); ++ /* obu_has_size_field */ ++ gst_bit_writer_put_bits_uint8 (&bs, 1, 1); ++ /* obu_reserved_1bit */ ++ gst_bit_writer_put_bits_uint8 (&bs, 0, 1); ++ if (obu->header.obu_extention_flag) { ++ /* temporal_id */ ++ gst_bit_writer_put_bits_uint8 (&bs, obu->header.obu_temporal_id, 3); ++ /* spatial_id */ ++ gst_bit_writer_put_bits_uint8 (&bs, obu->header.obu_spatial_id, 2); ++ /* extension_header_reserved_3bits */ ++ gst_bit_writer_put_bits_uint8 (&bs, 0, 3); ++ } ++ g_assert (GST_BIT_WRITER_BIT_SIZE (&bs) % 8 == 0); ++ ++ data = g_malloc (len); ++ offset = 0; ++ memcpy (data + offset, GST_BIT_WRITER_DATA (&bs), ++ GST_BIT_WRITER_BIT_SIZE (&bs) / 8); ++ offset += GST_BIT_WRITER_BIT_SIZE (&bs) / 8; ++ ++ memcpy (data + offset, size_data, size_len); ++ offset += size_len; ++ ++ memcpy (data + offset, obu->data, obu->obu_size); ++ ++ buf = gst_buffer_new_wrapped (data, len); ++ GST_BUFFER_PTS (buf) = GST_BUFFER_PTS (buffer); ++ GST_BUFFER_DTS (buf) = GST_BUFFER_DTS (buffer); ++ GST_BUFFER_DURATION (buf) = GST_BUFFER_DURATION (buffer); ++ ++ gst_adapter_push (self->cache_out, buf); ++ ++ gst_bit_writer_reset (&bs); ++} ++ ++static void ++gst_av1_parse_cache_one_obu (GstAV1Parse * self, GstBuffer * buffer, ++ GstAV1OBU * obu, guint8 * data, guint32 size, gboolean frame_complete) ++{ ++ gboolean need_convert = FALSE; ++ GstBuffer *buf; ++ ++ if (self->in_align != self->align ++ && (self->in_align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B ++ || self->align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B)) ++ need_convert = TRUE; ++ ++ if (need_convert) { ++ if (self->in_align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B) { ++ gst_av1_parse_convert_from_annexb (self, buffer, obu); ++ } else { ++ gst_av1_parse_convert_to_annexb (self, buffer, obu, frame_complete); ++ } ++ } else if (self->align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B) { ++ g_assert (self->in_align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B); ++ gst_av1_parse_convert_to_annexb (self, buffer, obu, frame_complete); ++ } else { ++ buf = gst_buffer_new_memdup (data, size); ++ GST_BUFFER_PTS (buf) = GST_BUFFER_PTS (buffer); ++ GST_BUFFER_DTS (buf) = GST_BUFFER_DTS (buffer); ++ GST_BUFFER_DURATION (buf) = GST_BUFFER_DURATION (buffer); ++ ++ gst_adapter_push (self->cache_out, buf); ++ } ++} ++ ++static GstAV1ParserResult ++gst_av1_parse_handle_sequence_obu (GstAV1Parse * self, GstAV1OBU * obu) ++{ ++ GstAV1SequenceHeaderOBU seq_header; ++ GstAV1ParserResult res; ++ guint i; ++ guint val; ++ ++ res = gst_av1_parser_parse_sequence_header_obu (self->parser, ++ obu, &seq_header); ++ if (res != GST_AV1_PARSER_OK) ++ return res; ++ ++ if (self->width != seq_header.max_frame_width_minus_1 + 1) { ++ self->width = seq_header.max_frame_width_minus_1 + 1; ++ self->update_caps = TRUE; ++ } ++ if (self->height != seq_header.max_frame_height_minus_1 + 1) { ++ self->height = seq_header.max_frame_height_minus_1 + 1; ++ self->update_caps = TRUE; ++ } ++ ++ if (seq_header.color_config.color_description_present_flag) { ++ GstVideoColorimetry cinfo; ++ gchar *colorimetry = NULL; ++ ++ if (seq_header.color_config.color_range) ++ cinfo.range = GST_VIDEO_COLOR_RANGE_0_255; ++ else ++ cinfo.range = GST_VIDEO_COLOR_RANGE_16_235; ++ ++ cinfo.matrix = gst_video_color_matrix_from_iso ++ (seq_header.color_config.matrix_coefficients); ++ cinfo.transfer = gst_video_transfer_function_from_iso ++ (seq_header.color_config.transfer_characteristics); ++ cinfo.primaries = gst_video_color_primaries_from_iso ++ (seq_header.color_config.color_primaries); ++ ++ colorimetry = gst_video_colorimetry_to_string (&cinfo); ++ ++ if (g_strcmp0 (colorimetry, self->colorimetry) != 0) { ++ g_free (self->colorimetry); ++ self->colorimetry = colorimetry; ++ colorimetry = NULL; ++ self->update_caps = TRUE; ++ } ++ ++ g_clear_pointer (&colorimetry, g_free); ++ } ++ ++ if (self->subsampling_x != seq_header.color_config.subsampling_x) { ++ self->subsampling_x = seq_header.color_config.subsampling_x; ++ self->update_caps = TRUE; ++ } ++ ++ if (self->subsampling_y != seq_header.color_config.subsampling_y) { ++ self->subsampling_y = seq_header.color_config.subsampling_y; ++ self->update_caps = TRUE; ++ } ++ ++ if (self->mono_chrome != seq_header.color_config.mono_chrome) { ++ self->mono_chrome = seq_header.color_config.mono_chrome; ++ self->update_caps = TRUE; ++ } ++ ++ if (self->bit_depth != seq_header.bit_depth) { ++ self->bit_depth = seq_header.bit_depth; ++ self->update_caps = TRUE; ++ } ++ ++ if (self->profile != seq_header.seq_profile) { ++ self->profile = seq_header.seq_profile; ++ self->update_caps = TRUE; ++ } ++ ++ val = (self->parser->state.operating_point_idc >> 8) & 0x0f; ++ for (i = 0; i < GST_AV1_MAX_NUM_SPATIAL_LAYERS; i++) { ++ if (val & (1 << i)) ++ self->highest_spatial_id = i; ++ } ++ ++ return GST_AV1_PARSER_OK; ++} ++ ++/* Check whether the frame start a new TU. ++ The obu here should be a shown frame/frame header. */ ++static gboolean ++gst_av1_parse_frame_start_new_temporal_unit (GstAV1Parse * self, ++ GstAV1OBU * obu) ++{ ++ gboolean ret = FALSE; ++ ++ g_assert (obu->obu_type == GST_AV1_OBU_FRAME_HEADER ++ || obu->obu_type == GST_AV1_OBU_FRAME); ++ ++ /* 7.5.Ordering of OBUs: The value of temporal_id must be the same in all ++ OBU extension headers that are contained in the same temporal unit. */ ++ if (self->last_shown_frame_temporal_id >= 0 && ++ obu->header.obu_temporal_id != self->last_shown_frame_temporal_id) { ++ ret = TRUE; ++ goto new_tu; ++ } ++ ++ /* If scalability is not being used, only one shown frame for each ++ temporal unit. So the new frame belongs to a new temporal unit. */ ++ if (!self->within_one_frame && self->last_shown_frame_temporal_id >= 0 && ++ self->parser->state.operating_point_idc == 0) { ++ ret = TRUE; ++ goto new_tu; ++ } ++ ++ /* The new frame has the same layer IDs with the last shown frame, ++ it should belong to a new temporal unit. */ ++ if (!self->within_one_frame && ++ obu->header.obu_temporal_id == self->last_shown_frame_temporal_id && ++ obu->header.obu_spatial_id == self->last_shown_frame_spatial_id) { ++ ret = TRUE; ++ goto new_tu; ++ } ++ ++new_tu: ++ if (ret) { ++ if (self->within_one_frame) ++ GST_WARNING_OBJECT (self, ++ "Start a new temporal unit with incompleted frame."); ++ ++ gst_av1_parse_reset_obu_data_state (self); ++ } ++ ++ return ret; ++} ++ ++/* frame_complete will be set true if it is the frame edge. */ ++static GstAV1ParserResult ++gst_av1_parse_handle_one_obu (GstAV1Parse * self, GstAV1OBU * obu, ++ gboolean * frame_complete, gboolean * check_new_tu) ++{ ++ GstAV1ParserResult res = GST_AV1_PARSER_OK; ++ GstAV1MetadataOBU metadata; ++ GstAV1FrameHeaderOBU frame_header; ++ GstAV1TileListOBU tile_list; ++ GstAV1TileGroupOBU tile_group; ++ GstAV1FrameOBU frame; ++ ++ *frame_complete = FALSE; ++ ++ switch (obu->obu_type) { ++ case GST_AV1_OBU_TEMPORAL_DELIMITER: ++ res = gst_av1_parser_parse_temporal_delimiter_obu (self->parser, obu); ++ break; ++ case GST_AV1_OBU_SEQUENCE_HEADER: ++ res = gst_av1_parse_handle_sequence_obu (self, obu); ++ break; ++ case GST_AV1_OBU_REDUNDANT_FRAME_HEADER: ++ res = gst_av1_parser_parse_frame_header_obu (self->parser, obu, ++ &frame_header); ++ break; ++ case GST_AV1_OBU_FRAME_HEADER: ++ res = gst_av1_parser_parse_frame_header_obu (self->parser, obu, ++ &frame_header); ++ break; ++ case GST_AV1_OBU_FRAME: ++ res = gst_av1_parser_parse_frame_obu (self->parser, obu, &frame); ++ break; ++ case GST_AV1_OBU_METADATA: ++ res = gst_av1_parser_parse_metadata_obu (self->parser, obu, &metadata); ++ break; ++ case GST_AV1_OBU_TILE_GROUP: ++ res = ++ gst_av1_parser_parse_tile_group_obu (self->parser, obu, &tile_group); ++ break; ++ case GST_AV1_OBU_TILE_LIST: ++ res = gst_av1_parser_parse_tile_list_obu (self->parser, obu, &tile_list); ++ break; ++ case GST_AV1_OBU_PADDING: ++ break; ++ default: ++ GST_WARNING_OBJECT (self, "an unrecognized obu type %d", obu->obu_type); ++ res = GST_AV1_PARSER_BITSTREAM_ERROR; ++ break; ++ } ++ ++ GST_LOG_OBJECT (self, "parsing the obu %s, result is %d", ++ _obu_name (obu->obu_type), res); ++ if (res != GST_AV1_PARSER_OK) ++ goto out; ++ ++ /* 7.5: ++ All OBU extension headers that are contained in the same temporal ++ unit and have the same spatial_id value must have the same temporal_id ++ value. ++ And ++ OBUs with spatial level IDs (spatial_id) greater than 0 must ++ appear within a temporal unit in increasing order of the spatial ++ level ID values. */ ++ if (obu->header.obu_spatial_id > self->highest_spatial_id) { ++ GST_WARNING_OBJECT (self, ++ "spatial_id %d is bigger than highest_spatial_id %d", ++ obu->header.obu_spatial_id, self->highest_spatial_id); ++ res = GST_AV1_PARSER_BITSTREAM_ERROR; ++ goto out; ++ } ++ ++ /* If to check a new temporal starts, return early. ++ In 7.5.Ordering of OBUs: Sequence header OBUs may appear in any order ++ within a coded video sequence. So it is allowed to repeat the sequence ++ header within one temporal unit, and sequence header does not definitely ++ start a TU. We only check TD here. */ ++ if (obu->obu_type == GST_AV1_OBU_TEMPORAL_DELIMITER) { ++ gst_av1_parse_reset_obu_data_state (self); ++ ++ if (check_new_tu) { ++ *check_new_tu = TRUE; ++ res = GST_AV1_PARSER_OK; ++ goto out; ++ } ++ } ++ ++ if (obu->obu_type == GST_AV1_OBU_SEQUENCE_HEADER) ++ self->header = TRUE; ++ ++ if (obu->obu_type == GST_AV1_OBU_FRAME_HEADER ++ || obu->obu_type == GST_AV1_OBU_FRAME ++ || obu->obu_type == GST_AV1_OBU_REDUNDANT_FRAME_HEADER) { ++ GstAV1FrameHeaderOBU *fh = &frame_header; ++ ++ if (obu->obu_type == GST_AV1_OBU_FRAME) ++ fh = &frame.frame_header; ++ ++ self->show_frame = fh->show_frame || fh->show_existing_frame; ++ if (self->show_frame) { ++ /* Check whether a new temporal starts, and return early. */ ++ if (check_new_tu && obu->obu_type != GST_AV1_OBU_REDUNDANT_FRAME_HEADER ++ && gst_av1_parse_frame_start_new_temporal_unit (self, obu)) { ++ *check_new_tu = TRUE; ++ res = GST_AV1_PARSER_OK; ++ goto out; ++ } ++ ++ self->last_shown_frame_temporal_id = obu->header.obu_temporal_id; ++ self->last_shown_frame_spatial_id = obu->header.obu_spatial_id; ++ } ++ ++ self->within_one_frame = TRUE; ++ ++ /* if a show_existing_frame case, only update key frame. ++ otherwise, update all type of frame. */ ++ if (!fh->show_existing_frame || fh->frame_type == GST_AV1_KEY_FRAME) ++ res = gst_av1_parser_reference_frame_update (self->parser, fh); ++ ++ if (res != GST_AV1_PARSER_OK) ++ GST_WARNING_OBJECT (self, "update frame get result %d", res); ++ ++ if (fh->show_existing_frame) { ++ *frame_complete = TRUE; ++ self->within_one_frame = FALSE; ++ } ++ ++ if (fh->frame_type == GST_AV1_KEY_FRAME) ++ self->keyframe = TRUE; ++ } ++ ++ if (obu->obu_type == GST_AV1_OBU_TILE_GROUP ++ || obu->obu_type == GST_AV1_OBU_FRAME) { ++ GstAV1TileGroupOBU *tg = &tile_group; ++ ++ self->within_one_frame = TRUE; ++ ++ if (obu->obu_type == GST_AV1_OBU_FRAME) ++ tg = &frame.tile_group; ++ ++ if (tg->tg_end == tg->num_tiles - 1) { ++ *frame_complete = TRUE; ++ self->within_one_frame = FALSE; ++ } ++ } ++ ++out: ++ if (res != GST_AV1_PARSER_OK) { ++ /* Some verbose OBU can be skip */ ++ if (obu->obu_type == GST_AV1_OBU_REDUNDANT_FRAME_HEADER) { ++ GST_WARNING_OBJECT (self, "Ignore a verbose %s OBU parsing error", ++ _obu_name (obu->obu_type)); ++ gst_av1_parse_reset_obu_data_state (self); ++ res = GST_AV1_PARSER_OK; ++ } ++ } ++ ++ return res; ++} ++ ++static GstFlowReturn ++gst_av1_parse_handle_obu_to_obu (GstBaseParse * parse, ++ GstBaseParseFrame * frame, gint * skipsize) ++{ ++ GstAV1Parse *self = GST_AV1_PARSE (parse); ++ GstMapInfo map_info; ++ GstAV1OBU obu; ++ GstFlowReturn ret = GST_FLOW_OK; ++ GstAV1ParserResult res; ++ GstBuffer *buffer = gst_buffer_ref (frame->buffer); ++ guint32 consumed; ++ gboolean frame_complete; ++ ++ if (!gst_buffer_map (buffer, &map_info, GST_MAP_READ)) { ++ *skipsize = 0; ++ GST_ERROR_OBJECT (parse, "Couldn't map incoming buffer"); ++ return GST_FLOW_ERROR; ++ } ++ ++ consumed = 0; ++ frame_complete = FALSE; ++ res = gst_av1_parser_identify_one_obu (self->parser, map_info.data, ++ map_info.size, &obu, &consumed); ++ if (res == GST_AV1_PARSER_OK) ++ res = gst_av1_parse_handle_one_obu (self, &obu, &frame_complete, NULL); ++ ++ g_assert (consumed <= map_info.size); ++ ++ if (res == GST_AV1_PARSER_BITSTREAM_ERROR || ++ res == GST_AV1_PARSER_MISSING_OBU_REFERENCE) { ++ if (consumed) { ++ *skipsize = consumed; ++ } else { ++ *skipsize = map_info.size; ++ } ++ GST_WARNING_OBJECT (parse, "Parse obu error, discard %d.", *skipsize); ++ gst_av1_parse_reset_obu_data_state (self); ++ ret = GST_FLOW_OK; ++ goto out; ++ } else if (res == GST_AV1_PARSER_NO_MORE_DATA) { ++ *skipsize = 0; ++ ++ if (self->in_align == GST_AV1_PARSE_ALIGN_OBU) { ++ /* The buffer is already aligned to OBU, should not happen. */ ++ if (consumed) { ++ *skipsize = consumed; ++ } else { ++ *skipsize = map_info.size; ++ } ++ GST_WARNING_OBJECT (parse, "Parse obu need more data, discard %d.", ++ *skipsize); ++ gst_av1_parse_reset_obu_data_state (self); ++ } ++ ret = GST_FLOW_OK; ++ goto out; ++ } else if (res == GST_AV1_PARSER_DROP) { ++ GST_DEBUG_OBJECT (parse, "Drop %d data", consumed); ++ *skipsize = consumed; ++ gst_av1_parse_reset_obu_data_state (self); ++ ret = GST_FLOW_OK; ++ goto out; ++ } else if (res != GST_AV1_PARSER_OK) { ++ GST_ERROR_OBJECT (parse, "Parse obu get unexpect error %d", res); ++ *skipsize = 0; ++ ret = GST_FLOW_ERROR; ++ goto out; ++ } ++ ++ g_assert (consumed); ++ ++ gst_av1_parse_update_src_caps (self, NULL); ++ if (self->discont) { ++ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); ++ self->discont = FALSE; ++ } ++ if (self->header) { ++ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_HEADER); ++ self->header = FALSE; ++ } ++ /* happen to be a frame boundary */ ++ if (frame_complete) ++ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_MARKER); ++ ++ GST_LOG_OBJECT (self, "Output one buffer with size %d", consumed); ++ ret = gst_base_parse_finish_frame (parse, frame, consumed); ++ *skipsize = 0; ++ ++out: ++ gst_buffer_unmap (buffer, &map_info); ++ gst_buffer_unref (buffer); ++ return ret; ++} ++ ++static void ++gst_av1_parse_create_subframe (GstBaseParseFrame * frame, ++ GstBaseParseFrame * subframe, GstBuffer * buffer) ++{ ++ gst_base_parse_frame_init (subframe); ++ subframe->flags |= frame->flags; ++ subframe->offset = frame->offset; ++ subframe->overhead = frame->overhead; ++ /* Just ref the input buffer. The base parse will check that ++ pointer, and it will be replaced by its out_buffer later. */ ++ subframe->buffer = gst_buffer_ref (buffer); ++} ++ ++static GstFlowReturn ++gst_av1_parse_handle_to_small_and_equal_align (GstBaseParse * parse, ++ GstBaseParseFrame * frame, gint * skipsize) ++{ ++ GstAV1Parse *self = GST_AV1_PARSE (parse); ++ GstMapInfo map_info; ++ GstAV1OBU obu; ++ GstFlowReturn ret = GST_FLOW_OK; ++ GstAV1ParserResult res = GST_AV1_PARSER_INVALID_OPERATION; ++ GstBuffer *buffer = gst_buffer_ref (frame->buffer); ++ guint32 offset, consumed_before_push, consumed; ++ gboolean frame_complete; ++ GstBaseParseFrame subframe; ++ ++ if (!gst_buffer_map (buffer, &map_info, GST_MAP_READ)) { ++ GST_ERROR_OBJECT (parse, "Couldn't map incoming buffer"); ++ return GST_FLOW_ERROR; ++ } ++ ++ self->buffer_pts = GST_BUFFER_PTS (buffer); ++ self->buffer_dts = GST_BUFFER_DTS (buffer); ++ self->buffer_duration = GST_BUFFER_DURATION (buffer); ++ ++ consumed_before_push = 0; ++ offset = 0; ++ frame_complete = FALSE; ++again: ++ while (offset < map_info.size) { ++ GST_BUFFER_OFFSET (buffer) = offset; ++ ++ res = gst_av1_parser_identify_one_obu (self->parser, ++ map_info.data + offset, map_info.size - offset, &obu, &consumed); ++ if (res == GST_AV1_PARSER_OK) ++ res = gst_av1_parse_handle_one_obu (self, &obu, &frame_complete, NULL); ++ if (res != GST_AV1_PARSER_OK) ++ break; ++ ++ if (obu.obu_type == GST_AV1_OBU_TEMPORAL_DELIMITER ++ && consumed_before_push > 0) { ++ GST_DEBUG_OBJECT (self, "Encounter TD inside one %s aligned" ++ " buffer, should not happen normally.", ++ gst_av1_parse_alignment_to_string (self->in_align)); ++ ++ if (self->in_align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B) ++ gst_av1_parser_reset_annex_b (self->parser); ++ ++ /* Not include this TD obu, it should belong to the next TU or frame, ++ we push all the data we already got. */ ++ gst_av1_parse_create_subframe (frame, &subframe, buffer); ++ ret = gst_av1_parse_push_data (self, &subframe, ++ consumed_before_push, TRUE); ++ if (ret != GST_FLOW_OK) ++ goto out; ++ ++ /* Begin to find the next. */ ++ frame_complete = FALSE; ++ consumed_before_push = 0; ++ continue; ++ } ++ ++ gst_av1_parse_cache_one_obu (self, buffer, &obu, ++ map_info.data + offset, consumed, frame_complete); ++ ++ offset += consumed; ++ consumed_before_push += consumed; ++ ++ if ((self->align == GST_AV1_PARSE_ALIGN_OBU) || ++ (self->align == GST_AV1_PARSE_ALIGN_FRAME && frame_complete)) { ++ gst_av1_parse_create_subframe (frame, &subframe, buffer); ++ ret = gst_av1_parse_push_data (self, &subframe, ++ consumed_before_push, frame_complete); ++ if (ret != GST_FLOW_OK) ++ goto out; ++ ++ /* Begin to find the next. */ ++ frame_complete = FALSE; ++ consumed_before_push = 0; ++ continue; ++ } ++ } ++ ++ if (res == GST_AV1_PARSER_BITSTREAM_ERROR || ++ res == GST_AV1_PARSER_MISSING_OBU_REFERENCE) { ++ /* Discard the whole frame */ ++ *skipsize = map_info.size; ++ GST_WARNING_OBJECT (parse, "Parse obu error, discard %d", *skipsize); ++ if (self->in_align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B) ++ gst_av1_parser_reset_annex_b (self->parser); ++ gst_av1_parse_reset_obu_data_state (self); ++ ret = GST_FLOW_OK; ++ goto out; ++ } else if (res == GST_AV1_PARSER_NO_MORE_DATA) { ++ /* Discard the whole buffer */ ++ *skipsize = map_info.size; ++ GST_WARNING_OBJECT (parse, "Parse obu need more data, discard %d.", ++ *skipsize); ++ if (self->in_align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B) ++ gst_av1_parser_reset_annex_b (self->parser); ++ ++ gst_av1_parse_reset_obu_data_state (self); ++ ret = GST_FLOW_OK; ++ goto out; ++ } else if (res == GST_AV1_PARSER_DROP) { ++ GST_DEBUG_OBJECT (parse, "Drop %d data", consumed); ++ offset += consumed; ++ gst_av1_parse_reset_obu_data_state (self); ++ res = GST_AV1_PARSER_OK; ++ goto again; ++ } else if (res != GST_AV1_PARSER_OK) { ++ GST_ERROR_OBJECT (parse, "Parse obu get unexpect error %d", res); ++ *skipsize = 0; ++ ret = GST_FLOW_ERROR; ++ goto out; ++ } ++ ++ /* If the total buffer exhausted but frame is not complete, we just ++ push the left data and consider it as a frame. */ ++ if (consumed_before_push > 0 && !frame_complete ++ && self->align == GST_AV1_PARSE_ALIGN_FRAME) { ++ g_assert (offset >= map_info.size); ++ /* Warning and still consider the frame is complete */ ++ GST_WARNING_OBJECT (self, "Exhaust the buffer but still incomplete frame," ++ " should not happend in %s alignment", ++ gst_av1_parse_alignment_to_string (self->in_align)); ++ } ++ ++ ret = gst_av1_parse_push_data (self, frame, consumed_before_push, TRUE); ++ ++out: ++ gst_buffer_unmap (buffer, &map_info); ++ gst_buffer_unref (buffer); ++ gst_av1_parse_reset_tu_timestamp (self); ++ return ret; ++} ++ ++static GstFlowReturn ++gst_av1_parse_handle_to_big_align (GstBaseParse * parse, ++ GstBaseParseFrame * frame, gint * skipsize) ++{ ++ GstAV1Parse *self = GST_AV1_PARSE (parse); ++ GstMapInfo map_info; ++ GstAV1OBU obu; ++ GstFlowReturn ret = GST_FLOW_OK; ++ GstAV1ParserResult res = GST_AV1_PARSER_OK; ++ GstBuffer *buffer = gst_buffer_ref (frame->buffer); ++ guint32 consumed; ++ gboolean frame_complete; ++ gboolean check_new_tu; ++ gboolean complete; ++ ++ g_assert (self->in_align <= GST_AV1_PARSE_ALIGN_FRAME); ++ ++ if (!gst_buffer_map (buffer, &map_info, GST_MAP_READ)) { ++ *skipsize = 0; ++ GST_ERROR_OBJECT (parse, "Couldn't map incoming buffer"); ++ return GST_FLOW_ERROR; ++ } ++ ++ complete = FALSE; ++again: ++ while (self->last_parsed_offset < map_info.size) { ++ res = gst_av1_parser_identify_one_obu (self->parser, ++ map_info.data + self->last_parsed_offset, ++ map_info.size - self->last_parsed_offset, &obu, &consumed); ++ if (res != GST_AV1_PARSER_OK) ++ break; ++ ++ check_new_tu = FALSE; ++ if (self->align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT ++ || self->align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B) { ++ res = gst_av1_parse_handle_one_obu (self, &obu, &frame_complete, ++ &check_new_tu); ++ } else { ++ res = gst_av1_parse_handle_one_obu (self, &obu, &frame_complete, NULL); ++ } ++ if (res != GST_AV1_PARSER_OK) ++ break; ++ ++ if (check_new_tu && (gst_adapter_available (self->cache_out) || ++ gst_adapter_available (self->frame_cache))) { ++ complete = TRUE; ++ break; ++ } ++ ++ if (self->align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT || ++ self->align == GST_AV1_PARSE_ALIGN_FRAME) { ++ GstBuffer *buf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, ++ self->last_parsed_offset, consumed); ++ gst_adapter_push (self->cache_out, buf); ++ } else if (self->align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B) { ++ gst_av1_parse_convert_to_annexb (self, buffer, &obu, frame_complete); ++ } else { ++ g_assert_not_reached (); ++ } ++ self->last_parsed_offset += consumed; ++ ++ if (self->align == GST_AV1_PARSE_ALIGN_FRAME && frame_complete) ++ complete = TRUE; ++ ++ if (complete) ++ break; ++ } ++ ++ /* Finish a complete frame anyway */ ++ if (complete || GST_BASE_PARSE_DRAINING (parse)) { ++ *skipsize = 0; ++ ++ /* push the left anyway if no error */ ++ if (res == GST_AV1_PARSER_OK) ++ ret = gst_av1_parse_push_data (self, frame, ++ self->last_parsed_offset, TRUE); ++ ++ self->last_parsed_offset = 0; ++ ++ goto out; ++ } ++ ++ if (res == GST_AV1_PARSER_BITSTREAM_ERROR || ++ res == GST_AV1_PARSER_MISSING_OBU_REFERENCE) { ++ *skipsize = map_info.size; ++ GST_WARNING_OBJECT (parse, "Parse obu error, discard whole buffer %d.", ++ *skipsize); ++ /* The adapter will be cleared in next loop because of ++ GST_BASE_PARSE_FRAME_FLAG_NEW_FRAME flag */ ++ gst_av1_parse_reset_obu_data_state (self); ++ ret = GST_FLOW_OK; ++ } else if (res == GST_AV1_PARSER_NO_MORE_DATA) { ++ *skipsize = 0; ++ ++ if (self->in_align >= GST_AV1_PARSE_ALIGN_OBU) { ++ /* The buffer is already aligned to OBU, should not happen. ++ The adapter will be cleared in next loop because of ++ GST_BASE_PARSE_FRAME_FLAG_NEW_FRAME flag */ ++ *skipsize = map_info.size; ++ gst_av1_parse_reset_obu_data_state (self); ++ GST_WARNING_OBJECT (parse, ++ "Parse obu need more data, discard whole buffer %d.", *skipsize); ++ } ++ ret = GST_FLOW_OK; ++ } else if (res == GST_AV1_PARSER_DROP) { ++ GST_DEBUG_OBJECT (parse, "Drop %d data", consumed); ++ self->last_parsed_offset += consumed; ++ gst_av1_parse_reset_obu_data_state (self); ++ res = GST_AV1_PARSER_OK; ++ goto again; ++ } else if (res == GST_AV1_PARSER_OK) { ++ /* Everything is correct but still not get a frame or tu, ++ need more data */ ++ GST_DEBUG_OBJECT (parse, "Need more data"); ++ *skipsize = 0; ++ ret = GST_FLOW_OK; ++ } else { ++ GST_ERROR_OBJECT (parse, "Parse obu get unexpect error %d", res); ++ *skipsize = 0; ++ ret = GST_FLOW_ERROR; ++ } ++ ++out: ++ gst_buffer_unmap (buffer, &map_info); ++ gst_buffer_unref (buffer); ++ return ret; ++} ++ ++/* Try to recognize whether the input is annex-b format. ++ return TRUE if we decide, FALSE if we can not decide or ++ encounter some error. */ ++static gboolean ++gst_av1_parse_detect_stream_format (GstBaseParse * parse, ++ GstBaseParseFrame * frame) ++{ ++ GstAV1Parse *self = GST_AV1_PARSE (parse); ++ GstMapInfo map_info; ++ GstAV1OBU obu; ++ GstAV1ParserResult res = GST_AV1_PARSER_INVALID_OPERATION; ++ GstBuffer *buffer = gst_buffer_ref (frame->buffer); ++ gboolean got_seq, got_frame; ++ gboolean frame_complete; ++ guint32 consumed; ++ guint32 total_consumed; ++ guint32 tu_sz; ++ gboolean ret = FALSE; ++ ++ g_assert (self->in_align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT); ++ g_assert (self->detect_annex_b == TRUE); ++ ++ if (!gst_buffer_map (buffer, &map_info, GST_MAP_READ)) { ++ GST_ERROR_OBJECT (parse, "Couldn't map incoming buffer"); ++ return FALSE; ++ } ++ ++ gst_av1_parser_reset (self->parser, FALSE); ++ ++ got_seq = FALSE; ++ got_frame = FALSE; ++ total_consumed = 0; ++ ++again: ++ while (total_consumed < map_info.size) { ++ res = gst_av1_parser_identify_one_obu (self->parser, ++ map_info.data + total_consumed, map_info.size - total_consumed, ++ &obu, &consumed); ++ if (res == GST_AV1_PARSER_OK) { ++ total_consumed += consumed; ++ res = gst_av1_parse_handle_one_obu (self, &obu, &frame_complete, NULL); ++ } ++ ++ if (res != GST_AV1_PARSER_OK) ++ break; ++ ++ if (obu.obu_type == GST_AV1_OBU_SEQUENCE_HEADER) ++ got_seq = TRUE; ++ ++ if (obu.obu_type == GST_AV1_OBU_REDUNDANT_FRAME_HEADER || ++ obu.obu_type == GST_AV1_OBU_FRAME || ++ obu.obu_type == GST_AV1_OBU_FRAME_HEADER) ++ got_frame = TRUE; ++ ++ if (got_seq || got_frame) ++ break; ++ } ++ ++ gst_av1_parser_reset (self->parser, FALSE); ++ ++ /* If succeed recognize seq or frame, it's done. ++ otherwise, just need to get more data. */ ++ if (got_seq || got_frame) { ++ ret = TRUE; ++ self->detect_annex_b = FALSE; ++ goto out; ++ } ++ ++ if (res == GST_AV1_PARSER_DROP) { ++ total_consumed += consumed; ++ res = GST_AV1_PARSER_OK; ++ gst_av1_parse_reset_obu_data_state (self); ++ goto again; ++ } ++ ++ /* Try the annex b format. The buffer should contain the whole TU, ++ and the buffer start with the TU size in leb128() format. */ ++ if (map_info.size < 8) { ++ /* Too small. */ ++ goto out; ++ } ++ ++ tu_sz = _read_leb128 (map_info.data, &res, &consumed); ++ if (tu_sz == 0 || res != GST_AV1_PARSER_OK) { ++ /* error to get the TU size, should not be annex b. */ ++ goto out; ++ } ++ ++ if (tu_sz + consumed != map_info.size) { ++ GST_DEBUG_OBJECT (self, "Buffer size %" G_GSSIZE_FORMAT ", TU size %d," ++ " do not match.", map_info.size, tu_sz); ++ goto out; ++ } ++ ++ GST_INFO_OBJECT (self, "Detect the annex-b format"); ++ self->in_align = GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B; ++ self->detect_annex_b = FALSE; ++ gst_av1_parser_reset (self->parser, TRUE); ++ ret = TRUE; ++ ++out: ++ gst_av1_parse_reset_obu_data_state (self); ++ gst_buffer_unmap (buffer, &map_info); ++ gst_buffer_unref (buffer); ++ return ret; ++} ++ ++static GstFlowReturn ++gst_av1_parse_handle_frame (GstBaseParse * parse, ++ GstBaseParseFrame * frame, gint * skipsize) ++{ ++ GstAV1Parse *self = GST_AV1_PARSE (parse); ++ GstFlowReturn ret = GST_FLOW_OK; ++ guint in_level, out_level; ++ ++ if (GST_BUFFER_FLAG_IS_SET (frame->buffer, GST_BUFFER_FLAG_DISCONT)) { ++ self->discont = TRUE; ++ ++ if (frame->flags & GST_BASE_PARSE_FRAME_FLAG_NEW_FRAME) ++ gst_av1_parse_reset_obu_data_state (self); ++ } else { ++ self->discont = FALSE; ++ } ++ ++ GST_LOG_OBJECT (self, "Input frame size %" G_GSSIZE_FORMAT, ++ gst_buffer_get_size (frame->buffer)); ++ ++ /* avoid stale cached parsing state */ ++ if (frame->flags & GST_BASE_PARSE_FRAME_FLAG_NEW_FRAME) { ++ GST_LOG_OBJECT (self, "parsing new frame"); ++ gst_adapter_clear (self->cache_out); ++ gst_adapter_clear (self->frame_cache); ++ self->last_parsed_offset = 0; ++ self->header = FALSE; ++ self->keyframe = FALSE; ++ self->show_frame = FALSE; ++ } else { ++ GST_LOG_OBJECT (self, "resuming frame parsing"); ++ } ++ ++ /* When in pull mode, the sink pad has no caps, we may get the ++ caps by query the upstream element */ ++ if (self->in_align == GST_AV1_PARSE_ALIGN_NONE) { ++ GstCaps *upstream_caps; ++ ++ upstream_caps = ++ gst_pad_peer_query_caps (GST_BASE_PARSE_SINK_PAD (self), NULL); ++ if (upstream_caps) { ++ if (!gst_caps_is_empty (upstream_caps) ++ && !gst_caps_is_any (upstream_caps)) { ++ GstAV1ParseAligment align; ++ ++ GST_LOG_OBJECT (self, "upstream caps: %" GST_PTR_FORMAT, upstream_caps); ++ ++ /* fixate to avoid ambiguity with lists when parsing */ ++ upstream_caps = gst_caps_fixate (upstream_caps); ++ align = gst_av1_parse_alignment_from_caps (upstream_caps); ++ if (align == GST_AV1_PARSE_ALIGN_ERROR) { ++ GST_ERROR_OBJECT (self, "upstream caps %" GST_PTR_FORMAT ++ " set stream-format and alignment conflict.", upstream_caps); ++ ++ gst_caps_unref (upstream_caps); ++ return GST_FLOW_ERROR; ++ } ++ ++ self->in_align = align; ++ } ++ ++ gst_caps_unref (upstream_caps); ++ ++ gst_av1_parser_reset (self->parser, ++ self->in_align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B); ++ } ++ ++ if (self->in_align != GST_AV1_PARSE_ALIGN_NONE) { ++ GST_LOG_OBJECT (self, "Query the upstream get the alignment %s", ++ gst_av1_parse_alignment_to_string (self->in_align)); ++ } else { ++ self->in_align = GST_AV1_PARSE_ALIGN_BYTE; ++ GST_DEBUG_OBJECT (self, "alignment set to default %s", ++ gst_av1_parse_alignment_to_string (GST_AV1_PARSE_ALIGN_BYTE)); ++ } ++ } ++ ++ if (self->in_align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT ++ && self->detect_annex_b) { ++ /* Only happend at the first time of handle_frame, try to ++ recognize the annex b stream format. */ ++ if (gst_av1_parse_detect_stream_format (parse, frame)) { ++ GST_INFO_OBJECT (self, "Input alignment %s", ++ gst_av1_parse_alignment_to_string (self->in_align)); ++ } else { ++ /* Because the input is already TU aligned, we should skip ++ the whole problematic TU and check the next one. */ ++ *skipsize = gst_buffer_get_size (frame->buffer); ++ GST_WARNING_OBJECT (self, "Fail to detect the stream format for TU," ++ " skip the whole TU %d", *skipsize); ++ return GST_FLOW_OK; ++ } ++ } ++ ++ /* We may in pull mode and no caps is set */ ++ if (self->align == GST_AV1_PARSE_ALIGN_NONE) ++ gst_av1_parse_negotiate (self, NULL); ++ ++ in_level = self->in_align; ++ if (self->in_align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B) ++ in_level = GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT; ++ out_level = self->align; ++ if (self->align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B) ++ out_level = GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT; ++ ++ if (self->in_align <= GST_AV1_PARSE_ALIGN_OBU ++ && self->align == GST_AV1_PARSE_ALIGN_OBU) { ++ ret = gst_av1_parse_handle_obu_to_obu (parse, frame, skipsize); ++ } else if (in_level < out_level) { ++ ret = gst_av1_parse_handle_to_big_align (parse, frame, skipsize); ++ } else { ++ ret = gst_av1_parse_handle_to_small_and_equal_align (parse, ++ frame, skipsize); ++ } ++ ++ return ret; ++} ++ ++static GstFlowReturn ++gst_av1_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame) ++{ ++ GstAV1Parse *self = GST_AV1_PARSE (parse); ++ ++ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_CLIP; ++ ++ if (!frame->buffer) ++ return GST_FLOW_OK; ++ ++ if (self->align == GST_AV1_PARSE_ALIGN_FRAME) { ++ /* When the input align to TU, it may may contain more than one frames ++ inside its buffer. When splitting a TU into frames, the base parse ++ class only assign the PTS to the first frame and leave the others' ++ PTS invalid. But in fact, all decode only frames should have invalid ++ PTS while showable frames should have correct PTS setting. */ ++ if (self->in_align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT ++ || self->in_align == GST_AV1_PARSE_ALIGN_TEMPORAL_UNIT_ANNEX_B) { ++ if (GST_BUFFER_FLAG_IS_SET (frame->buffer, GST_BUFFER_FLAG_DECODE_ONLY)) { ++ GST_BUFFER_PTS (frame->buffer) = GST_CLOCK_TIME_NONE; ++ GST_BUFFER_DURATION (frame->buffer) = GST_CLOCK_TIME_NONE; ++ } else { ++ GST_BUFFER_PTS (frame->buffer) = self->buffer_pts; ++ GST_BUFFER_DURATION (frame->buffer) = self->buffer_duration; ++ } ++ ++ GST_BUFFER_DTS (frame->buffer) = self->buffer_dts; ++ } else { ++ if (GST_BUFFER_FLAG_IS_SET (frame->buffer, GST_BUFFER_FLAG_DECODE_ONLY)) { ++ GST_BUFFER_PTS (frame->buffer) = GST_CLOCK_TIME_NONE; ++ GST_BUFFER_DURATION (frame->buffer) = GST_CLOCK_TIME_NONE; ++ } ++ } ++ } else if (self->align == GST_AV1_PARSE_ALIGN_OBU) { ++ /* When we split a big frame or TU into OBUs, all OBUs should have the ++ same PTS and DTS of the input buffer, and should not have duration. */ ++ if (self->in_align >= GST_AV1_PARSE_ALIGN_FRAME) { ++ GST_BUFFER_PTS (frame->buffer) = self->buffer_pts; ++ GST_BUFFER_DTS (frame->buffer) = self->buffer_dts; ++ GST_BUFFER_DURATION (frame->buffer) = GST_CLOCK_TIME_NONE; ++ } ++ } ++ ++ GST_LOG_OBJECT (parse, "Adjust the frame buffer PTS/DTS/duration." ++ " The buffer of size %" G_GSIZE_FORMAT " now with dts %" ++ GST_TIME_FORMAT ", pts %" GST_TIME_FORMAT ", duration %" ++ GST_TIME_FORMAT, gst_buffer_get_size (frame->buffer), ++ GST_TIME_ARGS (GST_BUFFER_DTS (frame->buffer)), ++ GST_TIME_ARGS (GST_BUFFER_PTS (frame->buffer)), ++ GST_TIME_ARGS (GST_BUFFER_DURATION (frame->buffer))); ++ ++ return GST_FLOW_OK; ++} +diff --git a/gst/videoparsers/gstav1parse.h b/gst/videoparsers/gstav1parse.h +new file mode 100644 +index 000000000..464658e1b +--- /dev/null ++++ b/gst/videoparsers/gstav1parse.h +@@ -0,0 +1,34 @@ ++/* GStreamer ++ * Copyright (C) 2020 He Junyan ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_AV1_PARSE_H__ ++#define __GST_AV1_PARSE_H__ ++ ++#include ++#include ++ ++G_BEGIN_DECLS ++ ++#define GST_TYPE_AV1_PARSE (gst_av1_parse_get_type()) ++G_DECLARE_FINAL_TYPE (GstAV1Parse, ++ gst_av1_parse, GST, AV1_PARSE, GstBaseParse); ++ ++G_END_DECLS ++ ++#endif /* __GST_AV1_PARSE_H__ */ +diff --git a/gst/videoparsers/gstdiracparse.c b/gst/videoparsers/gstdiracparse.c +index 105f4f499..d7a0c92fd 100644 +--- a/gst/videoparsers/gstdiracparse.c ++++ b/gst/videoparsers/gstdiracparse.c +@@ -38,6 +38,7 @@ + #include + #include + #include ++#include "gstvideoparserselements.h" + #include "gstdiracparse.h" + #include "dirac_parse.h" + +@@ -96,6 +97,8 @@ GST_STATIC_PAD_TEMPLATE ("src", + + #define parent_class gst_dirac_parse_parent_class + G_DEFINE_TYPE (GstDiracParse, gst_dirac_parse, GST_TYPE_BASE_PARSE); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (diracparse, "diracparse", GST_RANK_NONE, ++ GST_TYPE_DIRAC_PARSE, videoparsers_element_init (plugin)); + + static void + gst_dirac_parse_class_init (GstDiracParseClass * klass) +diff --git a/gst/videoparsers/gsth263parse.c b/gst/videoparsers/gsth263parse.c +index 0f4d42ee7..d2c1e4954 100644 +--- a/gst/videoparsers/gsth263parse.c ++++ b/gst/videoparsers/gsth263parse.c +@@ -31,6 +31,7 @@ + + #include + #include ++#include "gstvideoparserselements.h" + #include "gsth263parse.h" + + #include +@@ -53,6 +54,9 @@ GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, + + #define parent_class gst_h263_parse_parent_class + G_DEFINE_TYPE (GstH263Parse, gst_h263_parse, GST_TYPE_BASE_PARSE); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (h263parse, "h263parse", ++ GST_RANK_PRIMARY + 1, GST_TYPE_H263_PARSE, ++ videoparsers_element_init (plugin)); + + static gboolean gst_h263_parse_start (GstBaseParse * parse); + static gboolean gst_h263_parse_stop (GstBaseParse * parse); +diff --git a/gst/videoparsers/gsth264parse.c b/gst/videoparsers/gsth264parse.c +index 6260a5f23..8bb47f9db 100644 +--- a/gst/videoparsers/gsth264parse.c ++++ b/gst/videoparsers/gsth264parse.c +@@ -29,6 +29,7 @@ + #include + #include + #include ++#include "gstvideoparserselements.h" + #include "gsth264parse.h" + + #include +@@ -98,6 +99,9 @@ static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src", + + #define parent_class gst_h264_parse_parent_class + G_DEFINE_TYPE (GstH264Parse, gst_h264_parse, GST_TYPE_BASE_PARSE); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (h264parse, "h264parse", ++ GST_RANK_PRIMARY + 1, GST_TYPE_H264_PARSE, ++ videoparsers_element_init (plugin)); + + static void gst_h264_parse_finalize (GObject * object); + +@@ -167,7 +171,7 @@ gst_h264_parse_class_init (GstH264ParseClass * klass) + "is attached to incoming buffer and also Picture Timing SEI exists " + "in the bitstream. To make this property work, SPS must contain " + "VUI and pic_struct_present_flag of VUI must be non-zero", +- DEFAULT_CONFIG_INTERVAL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ DEFAULT_UPDATE_TIMECODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + /* Override BaseParse vfuncs */ + parse_class->start = GST_DEBUG_FUNCPTR (gst_h264_parse_start); +@@ -200,7 +204,7 @@ gst_h264_parse_init (GstH264Parse * h264parse) + + h264parse->aud_needed = TRUE; + h264parse->aud_insert = TRUE; +- h264parse->update_timecode = FALSE; ++ h264parse->update_timecode = DEFAULT_UPDATE_TIMECODE; + } + + static void +@@ -208,6 +212,8 @@ gst_h264_parse_finalize (GObject * object) + { + GstH264Parse *h264parse = GST_H264_PARSE (object); + ++ gst_video_user_data_unregistered_clear (&h264parse->user_data_unregistered); ++ + g_object_unref (h264parse->frame_out); + + G_OBJECT_CLASS (parent_class)->finalize (object); +@@ -233,6 +239,7 @@ gst_h264_parse_reset_frame (GstH264Parse * h264parse) + h264parse->frame_start = FALSE; + h264parse->have_sps_in_frame = FALSE; + h264parse->have_pps_in_frame = FALSE; ++ h264parse->have_aud_in_frame = FALSE; + gst_adapter_clear (h264parse->frame_out); + } + +@@ -269,6 +276,7 @@ gst_h264_parse_reset_stream_info (GstH264Parse * h264parse) + h264parse->packetized = FALSE; + h264parse->push_codec = FALSE; + h264parse->first_frame = TRUE; ++ h264parse->ignore_vui_fps = FALSE; + + gst_buffer_replace (&h264parse->codec_data, NULL); + gst_buffer_replace (&h264parse->codec_data_in, NULL); +@@ -601,6 +609,21 @@ gst_h264_parse_process_sei_user_data (GstH264Parse * h264parse, + + } + ++static void ++gst_h264_parse_process_sei_user_data_unregistered (GstH264Parse * h264parse, ++ GstH264UserDataUnregistered * urud) ++{ ++ GstByteReader br; ++ ++ if (urud->data == NULL || urud->size < 1) ++ return; ++ ++ gst_byte_reader_init (&br, urud->data, urud->size); ++ ++ gst_video_parse_user_data_unregistered ((GstElement *) h264parse, ++ &h264parse->user_data_unregistered, &br, urud->uuid); ++} ++ + static void + gst_h264_parse_process_sei (GstH264Parse * h264parse, GstH264NalUnit * nalu) + { +@@ -659,6 +682,10 @@ gst_h264_parse_process_sei (GstH264Parse * h264parse, GstH264NalUnit * nalu) + gst_h264_parse_process_sei_user_data (h264parse, + &sei.payload.registered_user_data); + break; ++ case GST_H264_SEI_USER_DATA_UNREGISTERED: ++ gst_h264_parse_process_sei_user_data_unregistered (h264parse, ++ &sei.payload.user_data_unregistered); ++ break; + case GST_H264_SEI_BUF_PERIOD: + if (h264parse->ts_trn_nb == GST_CLOCK_TIME_NONE || + h264parse->dts == GST_CLOCK_TIME_NONE) +@@ -1126,6 +1153,7 @@ gst_h264_parse_process_nal (GstH264Parse * h264parse, GstH264NalUnit * nalu) + if (pres != GST_H264_PARSER_OK) + return FALSE; + h264parse->aud_needed = FALSE; ++ h264parse->have_aud_in_frame = TRUE; + break; + default: + /* drop anything before the initial SPS */ +@@ -1227,8 +1255,9 @@ gst_h264_parse_handle_frame_packetized (GstBaseParse * parse, + parse_res = gst_h264_parser_identify_nalu_avc (h264parse->nalparser, + map.data, 0, map.size, nl, &nalu); + +- /* there is no AUD in AVC, always enable insertion, the pre_push function +- * will only add it once, and will only add it for byte-stream output. */ ++ /* Always enable AUD insertion per frame here. The pre_push function ++ * will only add it once, and will only add it for byte-stream output ++ * if AUD doesn't exist in the current frame */ + h264parse->aud_insert = TRUE; + + while (parse_res == GST_H264_PARSER_OK) { +@@ -1247,6 +1276,10 @@ gst_h264_parse_handle_frame_packetized (GstBaseParse * parse, + tmp_frame.overhead = frame->overhead; + tmp_frame.buffer = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, + nalu.offset, nalu.size); ++ /* Don't lose timestamp when offset is not 0. */ ++ GST_BUFFER_PTS (tmp_frame.buffer) = GST_BUFFER_PTS (buffer); ++ GST_BUFFER_DTS (tmp_frame.buffer) = GST_BUFFER_DTS (buffer); ++ GST_BUFFER_DURATION (tmp_frame.buffer) = GST_BUFFER_DURATION (buffer); + + /* Set marker on last packet */ + if (nl + nalu.size == left) { +@@ -2007,6 +2040,82 @@ get_level_string (GstH264SPS * sps) + } + } + ++typedef enum ++{ ++ GST_H264_LEVEL_L1 = 10, ++ GST_H264_LEVEL_L1B = 9, ++ GST_H264_LEVEL_L1_1 = 11, ++ GST_H264_LEVEL_L1_2 = 12, ++ GST_H264_LEVEL_L1_3 = 13, ++ GST_H264_LEVEL_L2_0 = 20, ++ GST_H264_LEVEL_L2_1 = 21, ++ GST_H264_LEVEL_L2_2 = 22, ++ GST_H264_LEVEL_L3 = 30, ++ GST_H264_LEVEL_L3_1 = 31, ++ GST_H264_LEVEL_L3_2 = 32, ++ GST_H264_LEVEL_L4 = 40, ++ GST_H264_LEVEL_L4_1 = 41, ++ GST_H264_LEVEL_L4_2 = 42, ++ GST_H264_LEVEL_L5 = 50, ++ GST_H264_LEVEL_L5_1 = 51, ++ GST_H264_LEVEL_L5_2 = 52, ++ GST_H264_LEVEL_L6 = 60, ++ GST_H264_LEVEL_L6_1 = 61, ++ GST_H264_LEVEL_L6_2 = 62, ++} GstH264Level; ++ ++typedef struct ++{ ++ GstH264Level level; ++ guint max_sample_per_sec; ++} GstH264LevelLimit; ++ ++static const GstH264LevelLimit level_limits_map[] = { ++ {GST_H264_LEVEL_L1, 380160}, ++ {GST_H264_LEVEL_L1B, 380160}, ++ {GST_H264_LEVEL_L1_1, 768000}, ++ {GST_H264_LEVEL_L1_2, 1536000}, ++ {GST_H264_LEVEL_L1_3, 3041280}, ++ {GST_H264_LEVEL_L2_0, 3041280}, ++ {GST_H264_LEVEL_L2_1, 5068800}, ++ {GST_H264_LEVEL_L2_2, 5184000}, ++ {GST_H264_LEVEL_L3, 10368000}, ++ {GST_H264_LEVEL_L3_1, 27648000}, ++ {GST_H264_LEVEL_L3_2, 55296000}, ++ {GST_H264_LEVEL_L4, 62914560}, ++ {GST_H264_LEVEL_L4_1, 62914560}, ++ {GST_H264_LEVEL_L4_2, 62914560}, ++ {GST_H264_LEVEL_L5, 150994994}, ++ {GST_H264_LEVEL_L5_1, 251658240}, ++ {GST_H264_LEVEL_L5_2, 530841600}, ++ {GST_H264_LEVEL_L6, 1069547520}, ++ {GST_H264_LEVEL_L6_1, 2139095040}, ++ {GST_H264_LEVEL_L6_2, 4278190080}, ++}; ++ ++/* A.3.4 Effect of level limits on frame rate (informative) */ ++static guint ++get_max_samples_per_second (const GstH264SPS * sps) ++{ ++ guint i; ++ guint n_levels = G_N_ELEMENTS (level_limits_map); ++ GstH264Level level = (GstH264Level) sps->level_idc; ++ ++ if (level == GST_H264_LEVEL_L1_1 && ++ (sps->profile_idc == 66 || sps->profile_idc == 77) && ++ sps->constraint_set3_flag) { ++ /* Level 1b */ ++ level = GST_H264_LEVEL_L1B; ++ } ++ ++ for (i = 0; i < n_levels; i++) { ++ if (level == level_limits_map[i].level) ++ return level_limits_map[i].max_sample_per_sec; ++ } ++ ++ return level_limits_map[n_levels - 1].max_sample_per_sec; ++} ++ + static void + gst_h264_parse_update_src_caps (GstH264Parse * h264parse, GstCaps * caps) + { +@@ -2090,6 +2199,32 @@ gst_h264_parse_update_src_caps (GstH264Parse * h264parse, GstCaps * caps) + * it in case we have no info */ + gst_h264_video_calculate_framerate (sps, h264parse->field_pic_flag, + h264parse->sei_pic_struct, &fps_num, &fps_den); ++ ++ /* Checks whether given framerate makes sense or not ++ * See also A.3.4 Effect of level limits on frame rate (informative) ++ */ ++ h264parse->ignore_vui_fps = FALSE; ++ if (fps_num > 0 && fps_den > 0 && sps->width > 0 && sps->height > 0 && ++ sps->vui_parameters_present_flag && ++ sps->vui_parameters.timing_info_present_flag) { ++ guint luma_samples = sps->width * sps->height; ++ guint max_samples = get_max_samples_per_second (sps); ++ gdouble max_fps, cur_fps; ++ ++ cur_fps = (gdouble) fps_num / fps_den; ++ max_fps = (gdouble) max_samples / luma_samples; ++ ++ /* XXX: allows up to 2x higher framerate */ ++ if (max_fps * 2 < cur_fps) { ++ GST_WARNING_OBJECT (h264parse, ++ "VUI framerate %.1f exceeds allowed maximum %.1f", ++ cur_fps, max_fps); ++ fps_num = 0; ++ fps_den = 1; ++ h264parse->ignore_vui_fps = TRUE; ++ } ++ } ++ + if (G_UNLIKELY (h264parse->fps_num != fps_num + || h264parse->fps_den != fps_den)) { + GST_DEBUG_OBJECT (h264parse, "framerate changed %d/%d", fps_num, fps_den); +@@ -2114,16 +2249,20 @@ gst_h264_parse_update_src_caps (GstH264Parse * h264parse, GstCaps * caps) + GstVideoColorimetry ci = { 0, }; + gchar *old_colorimetry = NULL; + +- if (vui->video_full_range_flag) +- ci.range = GST_VIDEO_COLOR_RANGE_0_255; +- else +- ci.range = GST_VIDEO_COLOR_RANGE_16_235; +- + ci.matrix = gst_video_color_matrix_from_iso (vui->matrix_coefficients); + ci.transfer = + gst_video_transfer_function_from_iso (vui->transfer_characteristics); + ci.primaries = gst_video_color_primaries_from_iso (vui->colour_primaries); + ++ if (ci.matrix != GST_VIDEO_COLOR_MATRIX_UNKNOWN ++ && ci.transfer != GST_VIDEO_TRANSFER_UNKNOWN ++ && ci.primaries != GST_VIDEO_COLOR_PRIMARIES_UNKNOWN) { ++ if (vui->video_full_range_flag) ++ ci.range = GST_VIDEO_COLOR_RANGE_0_255; ++ else ++ ci.range = GST_VIDEO_COLOR_RANGE_16_235; ++ } ++ + old_colorimetry = + gst_video_colorimetry_to_string (&h264parse->parsed_colorimetry); + colorimetry = gst_video_colorimetry_to_string (&ci); +@@ -2148,6 +2287,7 @@ gst_h264_parse_update_src_caps (GstH264Parse * h264parse, GstCaps * caps) + GstVideoMultiviewFlags mview_flags = h264parse->multiview_flags; + const gchar *chroma_format = NULL; + guint bit_depth_chroma; ++ const gchar *coded_picture_structure; + + fps_num = h264parse->fps_num; + fps_den = h264parse->fps_den; +@@ -2213,8 +2353,6 @@ gst_h264_parse_update_src_caps (GstH264Parse * h264parse, GstCaps * caps) + s2 = gst_caps_get_structure (caps, 0); + gst_structure_get_fraction (s2, "framerate", &h264parse->parsed_fps_n, + &h264parse->parsed_fps_d); +- gst_base_parse_set_frame_rate (GST_BASE_PARSE (h264parse), fps_num, +- fps_den, 0, 0); + + /* If we know the frame duration, and if we are not in one of the zero + * latency pattern, add one frame of latency */ +@@ -2228,6 +2366,15 @@ gst_h264_parse_update_src_caps (GstH264Parse * h264parse, GstCaps * caps) + latency); + } + ++ if (sps->frame_mbs_only_flag == 1) { ++ coded_picture_structure = "frame"; ++ } else { ++ coded_picture_structure = "field"; ++ } ++ ++ gst_caps_set_simple (caps, "coded-picture-structure", G_TYPE_STRING, ++ coded_picture_structure, NULL); ++ + bit_depth_chroma = sps->bit_depth_chroma_minus8 + 8; + + switch (sps->chroma_format_idc) { +@@ -2401,6 +2548,99 @@ gst_h264_parse_update_src_caps (GstH264Parse * h264parse, GstCaps * caps) + gst_buffer_unref (buf); + } + ++static GstClockTime ++gst_h264_parse_get_duration (GstH264Parse * h264parse, gboolean frame) ++{ ++ GstClockTime ret = GST_CLOCK_TIME_NONE; ++ GstH264SPS *sps = h264parse->nalparser->last_sps; ++ gint duration = 1; ++ ++ if (!frame) { ++ GST_LOG_OBJECT (h264parse, "no frame data -> 0 duration"); ++ ret = 0; ++ goto done; ++ } ++ ++ if (!sps) { ++ GST_DEBUG_OBJECT (h264parse, "referred SPS invalid"); ++ goto fps_duration; ++ } else if (h264parse->ignore_vui_fps) { ++ GST_DEBUG_OBJECT (h264parse, "VUI framerate is not reliable"); ++ goto fps_duration; ++ } else if (!sps->vui_parameters_present_flag) { ++ GST_DEBUG_OBJECT (h264parse, "unable to compute duration: VUI not present"); ++ goto fps_duration; ++ } else if (!sps->vui_parameters.timing_info_present_flag) { ++ GST_DEBUG_OBJECT (h264parse, ++ "unable to compute duration: timing info not present"); ++ goto fps_duration; ++ } else if (sps->vui_parameters.time_scale == 0) { ++ GST_DEBUG_OBJECT (h264parse, ++ "unable to compute duration: time_scale = 0 " ++ "(this is forbidden in spec; bitstream probably contains error)"); ++ goto fps_duration; ++ } ++ ++ if (h264parse->sei_pic_struct_pres_flag && ++ h264parse->sei_pic_struct != (guint8) - 1) { ++ /* Note that when h264parse->sei_pic_struct == -1 (unspecified), there ++ * are ways to infer its value. This is related to computing the ++ * TopFieldOrderCnt and BottomFieldOrderCnt, which looks ++ * complicated and thus not implemented for the time being. Yet ++ * the value we have here is correct for many applications ++ */ ++ switch (h264parse->sei_pic_struct) { ++ case GST_H264_SEI_PIC_STRUCT_TOP_FIELD: ++ case GST_H264_SEI_PIC_STRUCT_BOTTOM_FIELD: ++ duration = 1; ++ break; ++ case GST_H264_SEI_PIC_STRUCT_FRAME: ++ case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM: ++ case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP: ++ duration = 2; ++ break; ++ case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP: ++ case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM: ++ duration = 3; ++ break; ++ case GST_H264_SEI_PIC_STRUCT_FRAME_DOUBLING: ++ duration = 4; ++ break; ++ case GST_H264_SEI_PIC_STRUCT_FRAME_TRIPLING: ++ duration = 6; ++ break; ++ default: ++ GST_DEBUG_OBJECT (h264parse, ++ "h264parse->sei_pic_struct of unknown value %d. Not parsed", ++ h264parse->sei_pic_struct); ++ break; ++ } ++ } else { ++ duration = h264parse->field_pic_flag ? 1 : 2; ++ } ++ ++ GST_LOG_OBJECT (h264parse, "frame tick duration %d", duration); ++ ++ ret = gst_util_uint64_scale (duration * GST_SECOND, ++ sps->vui_parameters.num_units_in_tick, sps->vui_parameters.time_scale); ++ /* sanity check */ ++ if (ret < GST_MSECOND) { ++ GST_DEBUG_OBJECT (h264parse, "discarding dur %" GST_TIME_FORMAT, ++ GST_TIME_ARGS (ret)); ++ goto fps_duration; ++ } ++ ++done: ++ return ret; ++ ++fps_duration: ++ if (h264parse->parsed_fps_d > 0 && h264parse->parsed_fps_n > 0) ++ ret = ++ gst_util_uint64_scale (GST_SECOND, h264parse->parsed_fps_d, ++ h264parse->parsed_fps_n); ++ goto done; ++} ++ + static void + gst_h264_parse_get_timestamp (GstH264Parse * h264parse, + GstClockTime * out_ts, GstClockTime * out_dur, gboolean frame) +@@ -2547,10 +2787,19 @@ gst_h264_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame) + + /* don't mess with timestamps if provided by upstream, + * particularly since our ts not that good they handle seeking etc */ +- if (h264parse->do_ts) ++ if (h264parse->do_ts) { + gst_h264_parse_get_timestamp (h264parse, + &GST_BUFFER_DTS (buffer), &GST_BUFFER_DURATION (buffer), + h264parse->frame_start); ++ } ++ ++ /* We don't want to let baseparse select a duration itself based ++ * solely on the framerate, as we have more per-frame information ++ * available */ ++ if (!GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buffer))) { ++ GST_BUFFER_DURATION (buffer) = ++ gst_h264_parse_get_duration (h264parse, h264parse->frame_start); ++ } + + if (h264parse->keyframe) + GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); +@@ -2841,16 +3090,16 @@ gst_h264_parse_create_pic_timing_sei (GstH264Parse * h264parse, + + num_clock_ts = num_clock_ts_table[h264parse->sei_pic_struct]; + +- if (num_meta != num_clock_ts) { ++ if (num_meta > num_clock_ts) { + GST_LOG_OBJECT (h264parse, +- "The number of timecode meta %d is not equal to required %d", ++ "The number of timecode meta %d is superior to required %d", + num_meta, num_clock_ts); + + return NULL; + } + + GST_LOG_OBJECT (h264parse, +- "The number of timecode meta %d is equal", num_meta); ++ "The number of timecode meta %d is compatible", num_meta); + + memset (&sei, 0, sizeof (GstH264SEIMessage)); + sei.payloadType = GST_H264_SEI_PIC_TIMING; +@@ -3014,7 +3263,8 @@ gst_h264_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame) + + /* In case of byte-stream, insert au delimiter by default + * if it doesn't exist */ +- if (h264parse->aud_insert && h264parse->format == GST_H264_PARSE_FORMAT_BYTE) { ++ if (h264parse->aud_insert && !h264parse->have_aud_in_frame && ++ h264parse->format == GST_H264_PARSE_FORMAT_BYTE) { + GST_DEBUG_OBJECT (h264parse, "Inserting AUD into the stream."); + if (h264parse->align == GST_H264_PARSE_ALIGN_AU) { + GstMemory *mem = +@@ -3134,7 +3384,14 @@ gst_h264_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame) + } + #endif + +- if (!gst_buffer_get_video_time_code_meta (buffer)) { ++ if (frame->out_buffer) { ++ parse_buffer = frame->out_buffer = ++ gst_buffer_make_writable (frame->out_buffer); ++ } else { ++ parse_buffer = frame->buffer = gst_buffer_make_writable (frame->buffer); ++ } ++ ++ if (!gst_buffer_get_video_time_code_meta (parse_buffer)) { + guint i = 0; + + for (i = 0; i < 3 && h264parse->num_clock_timestamp; i++) { +@@ -3197,7 +3454,7 @@ gst_h264_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame) + "Add time code meta %02u:%02u:%02u:%02u", + tim->hours_value, tim->minutes_value, tim->seconds_value, n_frames); + +- gst_buffer_add_video_time_code_meta_full (buffer, ++ gst_buffer_add_video_time_code_meta_full (parse_buffer, + h264parse->parsed_fps_n, + h264parse->parsed_fps_d, + NULL, +@@ -3210,13 +3467,6 @@ gst_h264_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame) + h264parse->num_clock_timestamp = 0; + } + +- if (frame->out_buffer) { +- parse_buffer = frame->out_buffer = +- gst_buffer_make_writable (frame->out_buffer); +- } else { +- parse_buffer = frame->buffer = gst_buffer_make_writable (frame->buffer); +- } +- + if (is_interlaced) { + GST_BUFFER_FLAG_SET (parse_buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED); + if (h264parse->sei_pic_struct == GST_H264_SEI_PIC_STRUCT_TOP_FIELD) +@@ -3226,6 +3476,9 @@ gst_h264_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame) + gst_video_push_user_data ((GstElement *) h264parse, &h264parse->user_data, + parse_buffer); + ++ gst_video_push_user_data_unregistered ((GstElement *) h264parse, ++ &h264parse->user_data_unregistered, parse_buffer); ++ + gst_h264_parse_reset_frame (h264parse); + + return GST_FLOW_OK; +@@ -3238,11 +3491,11 @@ gst_h264_parse_set_caps (GstBaseParse * parse, GstCaps * caps) + GstStructure *str; + const GValue *codec_data_value; + GstBuffer *codec_data = NULL; +- gsize size; +- guint format, align, off; +- GstH264NalUnit nalu; ++ guint format, align; ++ GstH264NalUnit *nalu; + GstH264ParserResult parseres; + GstCaps *old_caps; ++ GstH264DecoderConfigRecord *config = NULL; + + h264parse = GST_H264_PARSE (parse); + +@@ -3307,12 +3560,7 @@ gst_h264_parse_set_caps (GstBaseParse * parse, GstCaps * caps) + /* packetized video has codec_data (required for AVC, optional for AVC3) */ + if (codec_data_value != NULL) { + GstMapInfo map; +- guint8 *data; +- guint num_sps, num_pps; +-#ifndef GST_DISABLE_GST_DEBUG +- guint profile; +-#endif +- gint i; ++ guint i; + + GST_DEBUG_OBJECT (h264parse, "have packetized h264"); + /* make note for optional split processing */ +@@ -3326,67 +3574,36 @@ gst_h264_parse_set_caps (GstBaseParse * parse, GstCaps * caps) + if (!codec_data) + goto avc_caps_codec_data_missing; + gst_buffer_map (codec_data, &map, GST_MAP_READ); +- data = map.data; +- size = map.size; + +- /* parse the avcC data */ +- if (size < 7) { /* when numSPS==0 and numPPS==0, length is 7 bytes */ +- gst_buffer_unmap (codec_data, &map); +- goto avcc_too_small; +- } +- /* parse the version, this must be 1 */ +- if (data[0] != 1) { ++ parseres = ++ gst_h264_parser_parse_decoder_config_record (h264parse->nalparser, ++ map.data, map.size, &config); ++ if (parseres != GST_H264_PARSER_OK) { + gst_buffer_unmap (codec_data, &map); +- goto wrong_version; ++ goto avcC_failed; + } +-#ifndef GST_DISABLE_GST_DEBUG +- /* AVCProfileIndication */ +- /* profile_compat */ +- /* AVCLevelIndication */ +- profile = (data[1] << 16) | (data[2] << 8) | data[3]; +- GST_DEBUG_OBJECT (h264parse, "profile %06x", profile); +-#endif + +- /* 6 bits reserved | 2 bits lengthSizeMinusOne */ +- /* this is the number of bytes in front of the NAL units to mark their +- * length */ +- h264parse->nal_length_size = (data[4] & 0x03) + 1; ++ h264parse->nal_length_size = config->length_size_minus_one + 1; + GST_DEBUG_OBJECT (h264parse, "nal length size %u", + h264parse->nal_length_size); ++ GST_DEBUG_OBJECT (h264parse, "AVCProfileIndication %d", ++ config->profile_indication); ++ GST_DEBUG_OBJECT (h264parse, "profile_compatibility %d", ++ config->profile_compatibility); ++ GST_DEBUG_OBJECT (h264parse, "AVCLevelIndication %d", ++ config->level_indication); + +- num_sps = data[5] & 0x1f; +- off = 6; +- for (i = 0; i < num_sps; i++) { +- parseres = gst_h264_parser_identify_nalu_avc (h264parse->nalparser, +- data, off, size, 2, &nalu); +- if (parseres != GST_H264_PARSER_OK) { +- gst_buffer_unmap (codec_data, &map); +- goto avcc_too_small; +- } +- +- gst_h264_parse_process_nal (h264parse, &nalu); +- off = nalu.offset + nalu.size; ++ for (i = 0; i < config->sps->len; i++) { ++ nalu = &g_array_index (config->sps, GstH264NalUnit, i); ++ gst_h264_parse_process_nal (h264parse, nalu); + } + +- if (off >= size) { +- gst_buffer_unmap (codec_data, &map); +- goto avcc_too_small; +- } +- num_pps = data[off]; +- off++; +- +- for (i = 0; i < num_pps; i++) { +- parseres = gst_h264_parser_identify_nalu_avc (h264parse->nalparser, +- data, off, size, 2, &nalu); +- if (parseres != GST_H264_PARSER_OK) { +- gst_buffer_unmap (codec_data, &map); +- goto avcc_too_small; +- } +- +- gst_h264_parse_process_nal (h264parse, &nalu); +- off = nalu.offset + nalu.size; ++ for (i = 0; i < config->pps->len; i++) { ++ nalu = &g_array_index (config->pps, GstH264NalUnit, i); ++ gst_h264_parse_process_nal (h264parse, nalu); + } + ++ gst_h264_decoder_config_record_free (config); + gst_buffer_unmap (codec_data, &map); + + gst_buffer_replace (&h264parse->codec_data_in, codec_data); +@@ -3461,14 +3678,9 @@ bytestream_caps_with_codec_data: + "expected, send SPS/PPS in-band with data or in streamheader field"); + goto refuse_caps; + } +-avcc_too_small: +- { +- GST_DEBUG_OBJECT (h264parse, "avcC size %" G_GSIZE_FORMAT " < 8", size); +- goto refuse_caps; +- } +-wrong_version: ++avcC_failed: + { +- GST_DEBUG_OBJECT (h264parse, "wrong avcC version"); ++ GST_DEBUG_OBJECT (h264parse, "Failed to parse avcC data"); + goto refuse_caps; + } + refuse_caps: +diff --git a/gst/videoparsers/gsth264parse.h b/gst/videoparsers/gsth264parse.h +index c526defdd..1b6939de3 100644 +--- a/gst/videoparsers/gsth264parse.h ++++ b/gst/videoparsers/gsth264parse.h +@@ -92,6 +92,16 @@ struct _GstH264Parse + gboolean have_sps_in_frame; + gboolean have_pps_in_frame; + ++ /* per frame AU Delimiter check used when in_format == avc or avc3 */ ++ gboolean have_aud_in_frame; ++ ++ /* tracing state whether h264parse needs to insert AUD or not. ++ * Used when in_format == byte-stream */ ++ gboolean aud_needed; ++ ++ /* For insertion of AU Delimiter */ ++ gboolean aud_insert; ++ + gboolean first_frame; + + /* collected SPS and PPS NALUs */ +@@ -107,6 +117,7 @@ struct _GstH264Parse + guint8 sei_pic_struct; + guint8 sei_pic_struct_pres_flag; + guint field_pic_flag; ++ gboolean ignore_vui_fps; + + /* cached timestamps */ + /* (trying to) track upstream dts and interpolate */ +@@ -146,11 +157,8 @@ struct _GstH264Parse + GstVideoMultiviewFlags multiview_flags; + gboolean first_in_bundle; + +- /* For insertion of AU Delimiter */ +- gboolean aud_needed; +- gboolean aud_insert; +- + GstVideoParseUserData user_data; ++ GstVideoParseUserDataUnregistered user_data_unregistered; + + GstVideoMasteringDisplayInfo mastering_display_info; + guint mastering_display_info_state; +diff --git a/gst/videoparsers/gsth265parse.c b/gst/videoparsers/gsth265parse.c +index 32f23d875..0fb4652f0 100644 +--- a/gst/videoparsers/gsth265parse.c ++++ b/gst/videoparsers/gsth265parse.c +@@ -24,6 +24,7 @@ + + #include + #include ++#include "gstvideoparserselements.h" + #include "gsth265parse.h" + + #include +@@ -91,6 +92,9 @@ static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src", + + #define parent_class gst_h265_parse_parent_class + G_DEFINE_TYPE (GstH265Parse, gst_h265_parse, GST_TYPE_BASE_PARSE); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (h265parse, "h265parse", ++ GST_RANK_SECONDARY, GST_TYPE_H265_PARSE, ++ videoparsers_element_init (plugin)); + + static void gst_h265_parse_finalize (GObject * object); + +@@ -793,14 +797,8 @@ gst_h265_parse_process_nal (GstH265Parse * h265parse, GstH265NalUnit * nalu) + h265parse->state |= GST_H265_PARSE_STATE_GOT_SPS; + break; + case GST_H265_NAL_PPS: +- /* expected state: got-sps */ +- h265parse->state &= GST_H265_PARSE_STATE_GOT_SPS; +- if (!GST_H265_PARSE_STATE_VALID (h265parse, GST_H265_PARSE_STATE_GOT_SPS)) +- return FALSE; +- + pres = gst_h265_parser_parse_pps (nalparser, nalu, &pps); + +- + /* arranged for a fallback pps.id, so use that one and only warn */ + if (pres != GST_H265_PARSER_OK) { + GST_WARNING_OBJECT (h265parse, "failed to parse PPS:"); +@@ -956,16 +954,8 @@ gst_h265_parse_process_nal (GstH265Parse * h265parse, GstH265NalUnit * nalu) + break; + } + case GST_H265_NAL_AUD: +- /* Just accumulate AU Delimiter, whether it's before SPS or not */ +- pres = gst_h265_parser_parse_nal (nalparser, nalu); +- if (pres != GST_H265_PARSER_OK) +- return FALSE; +- break; + default: +- /* drop anything before the initial SPS */ +- if (!GST_H265_PARSE_STATE_VALID (h265parse, GST_H265_PARSE_STATE_GOT_SPS)) +- return FALSE; +- ++ /* Just accumulate AU Delimiter, whether it's before SPS or not */ + pres = gst_h265_parser_parse_nal (nalparser, nalu); + if (pres != GST_H265_PARSER_OK) + return FALSE; +@@ -1071,6 +1061,10 @@ gst_h265_parse_handle_frame_packetized (GstBaseParse * parse, + tmp_frame.overhead = frame->overhead; + tmp_frame.buffer = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, + nalu.offset, nalu.size); ++ /* Don't lose timestamp when offset is not 0. */ ++ GST_BUFFER_PTS (tmp_frame.buffer) = GST_BUFFER_PTS (buffer); ++ GST_BUFFER_DTS (tmp_frame.buffer) = GST_BUFFER_DTS (buffer); ++ GST_BUFFER_DURATION (tmp_frame.buffer) = GST_BUFFER_DURATION (buffer); + + /* Set marker on last packet */ + if (nl + nalu.size == left) { +@@ -1846,6 +1840,106 @@ get_compatible_profile_caps (GstH265SPS * sps, GstH265Profile profile) + (GST_H265_PROFILE_SCREEN_EXTENDED_HIGH_THROUGHPUT_444_14); + break; + } ++ /* All the -intra profiles can map to non-intra profiles, except ++ the monochrome case for main and main-10. */ ++ case GST_H265_PROFILE_MAIN_INTRA: ++ { ++ if (sps->chroma_format_idc == 1) { ++ profiles |= profile_to_flag (GST_H265_PROFILE_MAIN); ++ ++ /* Add all main compatible profiles without monochrome. */ ++ /* A.3.3 */ ++ profiles |= profile_to_flag (GST_H265_PROFILE_MAIN_10); ++ ++ /* A.3.5 */ ++ profiles |= profile_to_flag (GST_H265_PROFILE_MAIN_444_10); ++ profiles |= profile_to_flag (GST_H265_PROFILE_MAIN_444_12); ++ ++ /* A.3.7 */ ++ profiles |= profile_to_flag (GST_H265_PROFILE_SCREEN_EXTENDED_MAIN); ++ profiles |= profile_to_flag (GST_H265_PROFILE_SCREEN_EXTENDED_MAIN_10); ++ profiles |= ++ profile_to_flag ++ (GST_H265_PROFILE_SCREEN_EXTENDED_HIGH_THROUGHPUT_444); ++ profiles |= ++ profile_to_flag ++ (GST_H265_PROFILE_SCREEN_EXTENDED_HIGH_THROUGHPUT_444_10); ++ profiles |= ++ profile_to_flag ++ (GST_H265_PROFILE_SCREEN_EXTENDED_HIGH_THROUGHPUT_444_14); ++ ++ /* G.11.1.1 */ ++ profiles |= profile_to_flag (GST_H265_PROFILE_MULTIVIEW_MAIN); ++ ++ /* H.11.1.1 */ ++ profiles |= profile_to_flag (GST_H265_PROFILE_SCALABLE_MAIN); ++ profiles |= profile_to_flag (GST_H265_PROFILE_SCALABLE_MAIN_10); ++ ++ /* I.11.1.1 */ ++ profiles |= profile_to_flag (GST_H265_PROFILE_3D_MAIN); ++ } ++ ++ /* Add all main compatible profiles with monochrome. */ ++ profiles |= profile_to_flag (GST_H265_PROFILE_MAIN_12); ++ profiles |= profile_to_flag (GST_H265_PROFILE_MAIN_422_10); ++ profiles |= profile_to_flag (GST_H265_PROFILE_MAIN_422_12); ++ profiles |= profile_to_flag (GST_H265_PROFILE_SCREEN_EXTENDED_MAIN_444); ++ profiles |= ++ profile_to_flag (GST_H265_PROFILE_SCREEN_EXTENDED_MAIN_444_10); ++ break; ++ } ++ case GST_H265_PROFILE_MAIN_10_INTRA: ++ { ++ if (sps->chroma_format_idc == 1) { ++ profiles |= profile_to_flag (GST_H265_PROFILE_MAIN_10); ++ ++ /* Add all main-10 compatible profiles without monochrome. */ ++ /* A.3.5 */ ++ profiles |= profile_to_flag (GST_H265_PROFILE_MAIN_444_10); ++ profiles |= profile_to_flag (GST_H265_PROFILE_MAIN_444_12); ++ ++ /* A.3.7 */ ++ profiles |= profile_to_flag (GST_H265_PROFILE_SCREEN_EXTENDED_MAIN_10); ++ ++ /* H.11.1.1 */ ++ profiles |= profile_to_flag (GST_H265_PROFILE_SCALABLE_MAIN_10); ++ } ++ ++ /* Add all main-10 compatible profiles with monochrome. */ ++ profiles |= profile_to_flag (GST_H265_PROFILE_MAIN_12); ++ profiles |= profile_to_flag (GST_H265_PROFILE_MAIN_422_10); ++ profiles |= profile_to_flag (GST_H265_PROFILE_MAIN_422_12); ++ break; ++ } ++ case GST_H265_PROFILE_MAIN_12_INTRA: ++ profiles |= profile_to_flag (GST_H265_PROFILE_MAIN_12); ++ break; ++ case GST_H265_PROFILE_MAIN_422_10_INTRA: ++ profiles |= profile_to_flag (GST_H265_PROFILE_MAIN_422_10); ++ break; ++ case GST_H265_PROFILE_MAIN_422_12_INTRA: ++ profiles |= profile_to_flag (GST_H265_PROFILE_MAIN_422_12); ++ break; ++ case GST_H265_PROFILE_MAIN_444_INTRA: ++ profiles |= profile_to_flag (GST_H265_PROFILE_MAIN_444); ++ ++ /* Add all main444 compatible profiles. */ ++ /* A.3.7 */ ++ profiles |= profile_to_flag (GST_H265_PROFILE_SCREEN_EXTENDED_MAIN_444); ++ profiles |= ++ profile_to_flag (GST_H265_PROFILE_SCREEN_EXTENDED_MAIN_444_10); ++ break; ++ case GST_H265_PROFILE_MAIN_444_10_INTRA: ++ profiles |= profile_to_flag (GST_H265_PROFILE_MAIN_444_10); ++ ++ /* Add all main444-10 compatible profiles. */ ++ /* A.3.7 */ ++ profiles |= ++ profile_to_flag (GST_H265_PROFILE_SCREEN_EXTENDED_MAIN_444_10); ++ break; ++ case GST_H265_PROFILE_MAIN_444_12_INTRA: ++ profiles |= profile_to_flag (GST_H265_PROFILE_MAIN_444_12); ++ break; + default: + break; + } +@@ -1878,6 +1972,22 @@ get_compatible_profile_caps (GstH265SPS * sps, GstH265Profile profile) + return caps; + } + ++static void ++fix_invalid_profile (GstH265Parse * h265parse, GstCaps * caps, GstH265SPS * sps) ++{ ++ /* HACK: This is a work-around to identify some main profile streams ++ * having wrong profile_idc. There are some wrongly encoded main profile ++ * streams which doesn't have any of the profile_idc values mentioned in ++ * Annex-A. Just assuming them as MAIN profile for now if they meet the ++ * A.3.2 requirement. */ ++ if (sps->chroma_format_idc == 1 && sps->bit_depth_luma_minus8 == 0 && ++ sps->bit_depth_chroma_minus8 == 0 && sps->sps_extension_flag == 0) { ++ gst_caps_set_simple (caps, "profile", G_TYPE_STRING, "main", NULL); ++ GST_WARNING_OBJECT (h265parse, ++ "Wrong profile_idc = 0, setting it as main profile !!"); ++ } ++} ++ + /* if downstream didn't support the exact profile indicated in sps header, + * check for the compatible profiles also */ + static void +@@ -1886,6 +1996,9 @@ ensure_caps_profile (GstH265Parse * h265parse, GstCaps * caps, GstH265SPS * sps, + { + GstCaps *peer_caps, *compat_caps; + ++ if (profile == GST_H265_PROFILE_INVALID) ++ fix_invalid_profile (h265parse, caps, sps); ++ + peer_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (h265parse)); + if (!peer_caps || !gst_caps_can_intersect (caps, peer_caps)) { + GstCaps *filter_caps = gst_caps_new_empty_simple ("video/x-h265"); +@@ -1930,6 +2043,31 @@ ensure_caps_profile (GstH265Parse * h265parse, GstCaps * caps, GstH265SPS * sps, + gst_caps_unref (peer_caps); + } + ++static gboolean ++gst_h265_parse_is_field_interlaced (GstH265Parse * h265parse) ++{ ++ /* FIXME: The SEI is optional, so theoretically there could be files with ++ * the interlaced_source_flag set to TRUE but no SEI present, or SEI present ++ * but no pic_struct. Haven't seen any such files in practice, and we don't ++ * know how to interpret the data without the pic_struct, so we'll treat ++ * them as progressive */ ++ ++ switch (h265parse->sei_pic_struct) { ++ case GST_H265_SEI_PIC_STRUCT_TOP_FIELD: ++ case GST_H265_SEI_PIC_STRUCT_TOP_PAIRED_PREVIOUS_BOTTOM: ++ case GST_H265_SEI_PIC_STRUCT_TOP_PAIRED_NEXT_BOTTOM: ++ case GST_H265_SEI_PIC_STRUCT_BOTTOM_FIELD: ++ case GST_H265_SEI_PIC_STRUCT_BOTTOM_PAIRED_PREVIOUS_TOP: ++ case GST_H265_SEI_PIC_STRUCT_BOTTOM_PAIRED_NEXT_TOP: ++ return TRUE; ++ break; ++ default: ++ break; ++ } ++ ++ return FALSE; ++} ++ + static void + gst_h265_parse_update_src_caps (GstH265Parse * h265parse, GstCaps * caps) + { +@@ -1938,6 +2076,7 @@ gst_h265_parse_update_src_caps (GstH265Parse * h265parse, GstCaps * caps) + gboolean modified = FALSE; + GstBuffer *buf = NULL; + GstStructure *s = NULL; ++ gint width, height; + + if (G_UNLIKELY (!gst_pad_has_current_caps (GST_BASE_PARSE_SRC_PAD + (h265parse)))) +@@ -2002,19 +2141,21 @@ gst_h265_parse_update_src_caps (GstH265Parse * h265parse, GstCaps * caps) + crop_width = sps->width; + crop_height = sps->height; + } ++ if (gst_h265_parse_is_field_interlaced (h265parse)) { ++ crop_height *= 2; ++ } + + if (G_UNLIKELY (h265parse->width != crop_width || + h265parse->height != crop_height)) { + h265parse->width = crop_width; +- h265parse->height = sps->profile_tier_level.interlaced_source_flag ? +- crop_height * 2 : crop_height; ++ h265parse->height = crop_height; + GST_INFO_OBJECT (h265parse, "resolution changed %dx%d", + h265parse->width, h265parse->height); + modified = TRUE; + } + + /* 0/1 is set as the default in the codec parser */ +- if (vui->timing_info_present_flag) { ++ if (vui->timing_info_present_flag && !h265parse->framerate_from_caps) { + gint fps_num = 0, fps_den = 1; + + if (!(sps->fps_num == 0 && sps->fps_den == 1)) { +@@ -2025,8 +2166,20 @@ gst_h265_parse_update_src_caps (GstH265Parse * h265parse, GstCaps * caps) + fps_num = sps->vui_params.time_scale; + fps_den = sps->vui_params.num_units_in_tick; + +- if (sps->profile_tier_level.interlaced_source_flag) +- fps_num /= 2; ++ if (gst_h265_parse_is_field_interlaced (h265parse)) { ++ gint new_fps_num, new_fps_den; ++ ++ if (!gst_util_fraction_multiply (fps_num, fps_den, 1, 2, &new_fps_num, ++ &new_fps_den)) { ++ GST_WARNING_OBJECT (h265parse, "Error calculating the new framerate" ++ " - integer overflow; setting it to 0/1"); ++ fps_num = 0; ++ fps_den = 1; ++ } else { ++ fps_num = new_fps_num; ++ fps_den = new_fps_den; ++ } ++ } + } + + if (G_UNLIKELY (h265parse->fps_num != fps_num +@@ -2048,7 +2201,6 @@ gst_h265_parse_update_src_caps (GstH265Parse * h265parse, GstCaps * caps) + h265parse->parsed_par_n, h265parse->parsed_par_d); + modified = TRUE; + } +- + } + + if (vui->video_signal_type_present_flag && +@@ -2084,7 +2236,6 @@ gst_h265_parse_update_src_caps (GstH265Parse * h265parse, GstCaps * caps) + if (G_UNLIKELY (modified || h265parse->update_caps)) { + gint fps_num = h265parse->fps_num; + gint fps_den = h265parse->fps_den; +- gint width, height; + GstClockTime latency = 0; + + caps = gst_caps_copy (sink_caps); +@@ -2103,6 +2254,7 @@ gst_h265_parse_update_src_caps (GstH265Parse * h265parse, GstCaps * caps) + gst_caps_set_simple (caps, "width", G_TYPE_INT, width, + "height", G_TYPE_INT, height, NULL); + ++ h265parse->framerate_from_caps = FALSE; + /* upstream overrides */ + if (s && gst_structure_has_field (s, "framerate")) + gst_structure_get_fraction (s, "framerate", &fps_num, &fps_den); +@@ -2120,8 +2272,9 @@ gst_h265_parse_update_src_caps (GstH265Parse * h265parse, GstCaps * caps) + &h265parse->parsed_fps_d); + gst_base_parse_set_frame_rate (GST_BASE_PARSE (h265parse), + fps_num, fps_den, 0, 0); +- val = sps->profile_tier_level.interlaced_source_flag ? GST_SECOND / 2 : ++ val = gst_h265_parse_is_field_interlaced (h265parse) ? GST_SECOND / 2 : + GST_SECOND; ++ h265parse->framerate_from_caps = TRUE; + + /* If we know the frame duration, and if we are not in one of the zero + * latency pattern, add one frame of latency */ +@@ -2175,6 +2328,7 @@ gst_h265_parse_update_src_caps (GstH265Parse * h265parse, GstCaps * caps) + const gchar *mdi_str = NULL; + const gchar *cll_str = NULL; + gboolean codec_data_modified = FALSE; ++ GstStructure *st; + + gst_caps_set_simple (caps, "parsed", G_TYPE_BOOLEAN, TRUE, + "stream-format", G_TYPE_STRING, +@@ -2183,11 +2337,32 @@ gst_h265_parse_update_src_caps (GstH265Parse * h265parse, GstCaps * caps) + gst_h265_parse_get_string (h265parse, FALSE, h265parse->align), NULL); + + gst_h265_parse_get_par (h265parse, &par_n, &par_d); +- if (par_n != 0 && par_d != 0 && ++ ++ width = 0; ++ height = 0; ++ st = gst_caps_get_structure (caps, 0); ++ gst_structure_get_int (st, "width", &width); ++ gst_structure_get_int (st, "height", &height); ++ ++ /* If no resolution info, do not consider aspect ratio */ ++ if (par_n != 0 && par_d != 0 && width > 0 && height > 0 && + (!s || !gst_structure_has_field (s, "pixel-aspect-ratio"))) { +- GST_INFO_OBJECT (h265parse, "PAR %d/%d", par_n, par_d); ++ gint new_par_d = par_d; ++ /* Special case for some encoders which provide an 1:2 pixel aspect ratio ++ * for HEVC interlaced content, possibly to work around decoders that don't ++ * support field-based interlacing. Add some defensive checks to check for ++ * a "common" aspect ratio. */ ++ if (par_n == 1 && par_d == 2 ++ && gst_h265_parse_is_field_interlaced (h265parse) ++ && !gst_video_is_common_aspect_ratio (width, height, par_n, par_d) ++ && gst_video_is_common_aspect_ratio (width, height, 1, 1)) { ++ GST_WARNING_OBJECT (h265parse, "PAR 1/2 makes the aspect ratio of " ++ "a %d x %d frame uncommon. Switching to 1/1", width, height); ++ new_par_d = 1; ++ } ++ GST_INFO_OBJECT (h265parse, "PAR %d/%d", par_n, new_par_d); + gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION, +- par_n, par_d, NULL); ++ par_n, new_par_d, NULL); + } + + /* set profile and level in caps */ +@@ -2195,15 +2370,82 @@ gst_h265_parse_update_src_caps (GstH265Parse * h265parse, GstCaps * caps) + const gchar *profile, *tier, *level; + GstH265Profile p; + +- p = gst_h265_profile_tier_level_get_profile (&sps->profile_tier_level); ++ p = gst_h265_get_profile_from_sps (sps); ++ /* gst_h265_get_profile_from_sps() method will determine profile ++ * as defined in spec, with allowing slightly broken profile-tier-level ++ * bits, then it might not be able to cover all cases. ++ * If it's still unknown, do guess again */ ++ if (p == GST_H265_PROFILE_INVALID) { ++ GST_WARNING_OBJECT (h265parse, "Unknown profile, guessing"); ++ switch (sps->chroma_format_idc) { ++ case 0: ++ if (sps->bit_depth_luma_minus8 == 0) { ++ p = GST_H265_PROFILE_MONOCHROME; ++ } else if (sps->bit_depth_luma_minus8 <= 2) { ++ p = GST_H265_PROFILE_MONOCHROME_10; ++ } else if (sps->bit_depth_luma_minus8 <= 4) { ++ p = GST_H265_PROFILE_MONOCHROME_12; ++ } else { ++ p = GST_H265_PROFILE_MONOCHROME_16; ++ } ++ break; ++ case 1: ++ if (sps->bit_depth_luma_minus8 == 0) { ++ p = GST_H265_PROFILE_MAIN; ++ } else if (sps->bit_depth_luma_minus8 <= 2) { ++ p = GST_H265_PROFILE_MAIN_10; ++ } else if (sps->bit_depth_luma_minus8 <= 4) { ++ p = GST_H265_PROFILE_MAIN_12; ++ } else { ++ p = GST_H265_PROFILE_MAIN_444_16_INTRA; ++ } ++ break; ++ case 2: ++ if (sps->bit_depth_luma_minus8 <= 2) { ++ p = GST_H265_PROFILE_MAIN_422_10; ++ } else if (sps->bit_depth_luma_minus8 <= 4) { ++ p = GST_H265_PROFILE_MAIN_422_12; ++ } else { ++ p = GST_H265_PROFILE_MAIN_444_16_INTRA; ++ } ++ break; ++ case 3: ++ if (sps->bit_depth_luma_minus8 == 0) { ++ p = GST_H265_PROFILE_MAIN_444; ++ } else if (sps->bit_depth_luma_minus8 <= 2) { ++ p = GST_H265_PROFILE_MAIN_444_10; ++ } else if (sps->bit_depth_luma_minus8 <= 4) { ++ p = GST_H265_PROFILE_MAIN_444_12; ++ } else { ++ p = GST_H265_PROFILE_MAIN_444_16_INTRA; ++ } ++ break; ++ default: ++ break; ++ } ++ } ++ + profile = gst_h265_profile_to_string (p); ++ ++ if (s && gst_structure_has_field (s, "profile")) { ++ const gchar *profile_sink = gst_structure_get_string (s, "profile"); ++ GstH265Profile p_sink = gst_h265_profile_from_string (profile_sink); ++ ++ if (p != p_sink) { ++ const gchar *profile_src; ++ ++ p = MAX (p, p_sink); ++ profile_src = (p == p_sink) ? profile_sink : profile; ++ GST_INFO_OBJECT (h265parse, ++ "Upstream profile (%s) is different than in SPS (%s). " ++ "Using %s.", profile_sink, profile, profile_src); ++ profile = profile_src; ++ } ++ } ++ + if (profile != NULL) + gst_caps_set_simple (caps, "profile", G_TYPE_STRING, profile, NULL); + +- if (sps->profile_tier_level.interlaced_source_flag) +- gst_caps_set_simple (caps, "interlace-mode", G_TYPE_STRING, +- "interleaved", NULL); +- + tier = get_tier_string (sps->profile_tier_level.tier_flag); + if (tier != NULL) + gst_caps_set_simple (caps, "tier", G_TYPE_STRING, tier, NULL); +@@ -2342,11 +2584,10 @@ gst_h265_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame) + gst_h265_parse_update_src_caps (h265parse, NULL); + + if (h265parse->fps_num > 0 && h265parse->fps_den > 0) { +- GstH265SPS *sps = h265parse->nalparser->last_sps; +- GstClockTime val; ++ GstClockTime val = ++ gst_h265_parse_is_field_interlaced (h265parse) ? GST_SECOND / ++ 2 : GST_SECOND; + +- val = (sps != NULL && sps->profile_tier_level.interlaced_source_flag) ? +- GST_SECOND / 2 : GST_SECOND; + GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale (val, + h265parse->fps_den, h265parse->fps_num); + } +@@ -2748,6 +2989,37 @@ gst_h265_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame) + } + } + ++ if (frame->out_buffer) { ++ parse_buffer = frame->out_buffer = ++ gst_buffer_make_writable (frame->out_buffer); ++ } else { ++ parse_buffer = frame->buffer = gst_buffer_make_writable (frame->buffer); ++ } ++ ++ /* see section D.3.3 of the spec */ ++ switch (h265parse->sei_pic_struct) { ++ case GST_H265_SEI_PIC_STRUCT_TOP_BOTTOM: ++ case GST_H265_SEI_PIC_STRUCT_BOTTOM_TOP: ++ case GST_H265_SEI_PIC_STRUCT_TOP_BOTTOM_TOP: ++ case GST_H265_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM: ++ GST_BUFFER_FLAG_SET (parse_buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED); ++ break; ++ case GST_H265_SEI_PIC_STRUCT_TOP_FIELD: ++ case GST_H265_SEI_PIC_STRUCT_TOP_PAIRED_NEXT_BOTTOM: ++ case GST_H265_SEI_PIC_STRUCT_TOP_PAIRED_PREVIOUS_BOTTOM: ++ GST_BUFFER_FLAG_SET (parse_buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED); ++ GST_BUFFER_FLAG_SET (parse_buffer, GST_VIDEO_BUFFER_FLAG_TOP_FIELD); ++ break; ++ case GST_H265_SEI_PIC_STRUCT_BOTTOM_FIELD: ++ case GST_H265_SEI_PIC_STRUCT_BOTTOM_PAIRED_PREVIOUS_TOP: ++ case GST_H265_SEI_PIC_STRUCT_BOTTOM_PAIRED_NEXT_TOP: ++ GST_BUFFER_FLAG_SET (parse_buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED); ++ GST_BUFFER_FLAG_SET (parse_buffer, GST_VIDEO_BUFFER_FLAG_BOTTOM_FIELD); ++ break; ++ default: ++ break; ++ } ++ + { + guint i = 0; + +@@ -2809,7 +3081,7 @@ gst_h265_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame) + gst_util_uint64_scale_int (h265parse->time_code.n_frames[i], 1, + 2 - h265parse->time_code.units_field_based_flag[i]); + +- gst_buffer_add_video_time_code_meta_full (buffer, ++ gst_buffer_add_video_time_code_meta_full (parse_buffer, + h265parse->parsed_fps_n, + h265parse->parsed_fps_d, + NULL, +@@ -2823,19 +3095,6 @@ gst_h265_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame) + } + } + +- if (frame->out_buffer) { +- parse_buffer = frame->out_buffer = +- gst_buffer_make_writable (frame->out_buffer); +- } else { +- parse_buffer = frame->buffer = gst_buffer_make_writable (frame->buffer); +- } +- +- if (h265parse->sei_pic_struct != GST_H265_SEI_PIC_STRUCT_FRAME) { +- GST_BUFFER_FLAG_SET (parse_buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED); +- if (h265parse->sei_pic_struct == GST_H265_SEI_PIC_STRUCT_TOP_FIELD) +- GST_BUFFER_FLAG_SET (parse_buffer, GST_VIDEO_BUFFER_FLAG_TFF); +- } +- + gst_video_push_user_data ((GstElement *) h265parse, &h265parse->user_data, + parse_buffer); + +@@ -2921,11 +3180,14 @@ gst_h265_parse_set_caps (GstBaseParse * parse, GstCaps * caps) + off = 23; + + for (i = 0; i < num_nal_arrays; i++) { ++ guint8 nalu_type; ++ + if (off + 3 >= size) { + gst_buffer_unmap (codec_data, &map); + goto hvcc_too_small; + } + ++ nalu_type = data[off] & 0x3f; + num_nals = GST_READ_UINT16_BE (data + off + 1); + off += 3; + for (j = 0; j < num_nals; j++) { +@@ -2933,6 +3195,15 @@ gst_h265_parse_set_caps (GstBaseParse * parse, GstCaps * caps) + data, off, size, 2, &nalu); + + if (parseres != GST_H265_PARSER_OK) { ++ if (i + 1 == num_nal_arrays && j + 1 == num_nals && ++ nalu_type != GST_H265_NAL_VPS && nalu_type != GST_H265_NAL_SPS && ++ nalu_type != GST_H265_NAL_PPS) { ++ GST_WARNING_OBJECT (h265parse, ++ "Couldn't parse the last nalu, type %d at array %d / %d", ++ nalu_type, i, j); ++ goto codec_data_done; ++ } ++ GST_ERROR ("aaa, %d", nalu_type); + gst_buffer_unmap (codec_data, &map); + goto hvcc_too_small; + } +@@ -2941,6 +3212,7 @@ gst_h265_parse_set_caps (GstBaseParse * parse, GstCaps * caps) + off = nalu.offset + nalu.size; + } + } ++ codec_data_done: + gst_buffer_unmap (codec_data, &map); + + /* don't confuse codec_data with inband vps/sps/pps */ +diff --git a/gst/videoparsers/gsth265parse.h b/gst/videoparsers/gsth265parse.h +index d3f588c20..e49d22cdb 100644 +--- a/gst/videoparsers/gsth265parse.h ++++ b/gst/videoparsers/gsth265parse.h +@@ -110,6 +110,7 @@ struct _GstH265Parse + gboolean predicted; + gboolean bidirectional; + gboolean header; ++ gboolean framerate_from_caps; + /* AU state */ + gboolean picture_start; + +diff --git a/gst/videoparsers/gstmpeg4videoparse.c b/gst/videoparsers/gstmpeg4videoparse.c +index 50413d0e0..1214a2655 100644 +--- a/gst/videoparsers/gstmpeg4videoparse.c ++++ b/gst/videoparsers/gstmpeg4videoparse.c +@@ -33,6 +33,7 @@ + #include + #include + ++#include "gstvideoparserselements.h" + #include "gstmpeg4videoparse.h" + + GST_DEBUG_CATEGORY (mpeg4v_parse_debug); +@@ -71,6 +72,9 @@ enum + + #define gst_mpeg4vparse_parent_class parent_class + G_DEFINE_TYPE (GstMpeg4VParse, gst_mpeg4vparse, GST_TYPE_BASE_PARSE); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (mpeg4videoparse, "mpeg4videoparse", ++ GST_RANK_PRIMARY + 1, GST_TYPE_MPEG4VIDEO_PARSE, ++ videoparsers_element_init (plugin)); + + static gboolean gst_mpeg4vparse_start (GstBaseParse * parse); + static gboolean gst_mpeg4vparse_stop (GstBaseParse * parse); +@@ -300,7 +304,7 @@ gst_mpeg4vparse_process_config (GstMpeg4VParse * mp4vparse, + if (mp4vparse->config != NULL) + gst_buffer_unref (mp4vparse->config); + +- mp4vparse->config = gst_buffer_new_wrapped (g_memdup (data, size), size); ++ mp4vparse->config = gst_buffer_new_memdup (data, size); + + /* trigger src caps update */ + mp4vparse->update_caps = TRUE; +diff --git a/gst/videoparsers/gstmpegvideoparse.c b/gst/videoparsers/gstmpegvideoparse.c +index 6e27deec2..f8ef31a1b 100644 +--- a/gst/videoparsers/gstmpegvideoparse.c ++++ b/gst/videoparsers/gstmpegvideoparse.c +@@ -31,6 +31,7 @@ + #include + #include + ++#include "gstvideoparserselements.h" + #include "gstmpegvideoparse.h" + + GST_DEBUG_CATEGORY (mpegv_parse_debug); +@@ -64,6 +65,9 @@ enum + + #define parent_class gst_mpegv_parse_parent_class + G_DEFINE_TYPE (GstMpegvParse, gst_mpegv_parse, GST_TYPE_BASE_PARSE); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (mpegvideoparse, "mpegvideoparse", ++ GST_RANK_PRIMARY + 1, GST_TYPE_MPEGVIDEO_PARSE, ++ videoparsers_element_init (plugin)); + + static gboolean gst_mpegv_parse_start (GstBaseParse * parse); + static gboolean gst_mpegv_parse_stop (GstBaseParse * parse); +diff --git a/gst/videoparsers/gstpngparse.c b/gst/videoparsers/gstpngparse.c +index 6df53bd51..81621d993 100644 +--- a/gst/videoparsers/gstpngparse.c ++++ b/gst/videoparsers/gstpngparse.c +@@ -22,6 +22,7 @@ + # include "config.h" + #endif + ++#include "gstvideoparserselements.h" + #include "gstpngparse.h" + + #include +@@ -47,6 +48,8 @@ GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, + + #define parent_class gst_png_parse_parent_class + G_DEFINE_TYPE (GstPngParse, gst_png_parse, GST_TYPE_BASE_PARSE); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (pngparse, "pngparse", GST_RANK_PRIMARY, ++ GST_TYPE_PNG_PARSE, videoparsers_element_init (plugin)); + + static gboolean gst_png_parse_start (GstBaseParse * parse); + static gboolean gst_png_parse_event (GstBaseParse * parse, GstEvent * event); +diff --git a/gst/videoparsers/gstvc1parse.c b/gst/videoparsers/gstvc1parse.c +index 68033dfd9..74f3b961e 100644 +--- a/gst/videoparsers/gstvc1parse.c ++++ b/gst/videoparsers/gstvc1parse.c +@@ -79,6 +79,7 @@ + #include "config.h" + #endif + ++#include "gstvideoparserselements.h" + #include "gstvc1parse.h" + + #include +@@ -186,6 +187,8 @@ static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src", + + #define parent_class gst_vc1_parse_parent_class + G_DEFINE_TYPE (GstVC1Parse, gst_vc1_parse, GST_TYPE_BASE_PARSE); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (vc1parse, "vc1parse", GST_RANK_NONE, ++ GST_TYPE_VC1_PARSE, videoparsers_element_init (plugin)); + + static void gst_vc1_parse_finalize (GObject * object); + +diff --git a/gst/videoparsers/gstvideoparserselement.c b/gst/videoparsers/gstvideoparserselement.c +new file mode 100644 +index 000000000..d55564165 +--- /dev/null ++++ b/gst/videoparsers/gstvideoparserselement.c +@@ -0,0 +1,39 @@ ++/* GStreamer video parsers ++ * Copyright (C) 2011 Mark Nauwelaerts ++ * Copyright (C) 2009 Tim-Philipp Müller ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include "gstvideoparserselements.h" ++ ++GST_DEBUG_CATEGORY (videoparseutils_debug); ++ ++void ++videoparsers_element_init (GstPlugin * plugin) ++{ ++ static gsize res = FALSE; ++ ++ if (g_once_init_enter (&res)) { ++ GST_DEBUG_CATEGORY_INIT (videoparseutils_debug, "videoparseutils", 0, ++ "video parse utilities"); ++ g_once_init_leave (&res, TRUE); ++ } ++} +diff --git a/gst/videoparsers/gstvideoparserselements.h b/gst/videoparsers/gstvideoparserselements.h +new file mode 100644 +index 000000000..a8d40c91f +--- /dev/null ++++ b/gst/videoparsers/gstvideoparserselements.h +@@ -0,0 +1,46 @@ ++/* GStreamer ++ * Copyright (C) 2020 Huawei Technologies Co., Ltd. ++ * @Author: Stéphane Cerveau ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++ ++#ifndef __GST_VIDEOPARSERS_ELEMENTS_H__ ++#define __GST_VIDEOPARSERS_ELEMENTS_H__ ++ ++#ifdef HAVE_CONFIG_H ++#include ++#endif ++ ++#include ++ ++ ++void videoparsers_element_init (GstPlugin * plugin); ++ ++GST_ELEMENT_REGISTER_DECLARE (av1parse); ++GST_ELEMENT_REGISTER_DECLARE (diracparse); ++GST_ELEMENT_REGISTER_DECLARE (h263parse); ++GST_ELEMENT_REGISTER_DECLARE (h264parse); ++GST_ELEMENT_REGISTER_DECLARE (h265parse); ++GST_ELEMENT_REGISTER_DECLARE (jpeg2000parse); ++GST_ELEMENT_REGISTER_DECLARE (mpeg4videoparse); ++GST_ELEMENT_REGISTER_DECLARE (mpegvideoparse); ++GST_ELEMENT_REGISTER_DECLARE (pngparse); ++GST_ELEMENT_REGISTER_DECLARE (vc1parse); ++GST_ELEMENT_REGISTER_DECLARE (vp9parse); ++ ++#endif /* __GST_VIDEOPARSERS_ELEMENTS_H__ */ +diff --git a/gst/videoparsers/gstvideoparseutils.c b/gst/videoparsers/gstvideoparseutils.c +index 4e94d21b5..729fe054c 100644 +--- a/gst/videoparsers/gstvideoparseutils.c ++++ b/gst/videoparsers/gstvideoparseutils.c +@@ -26,6 +26,7 @@ + #include + #include + #include ++#include + #include + #include + +@@ -436,3 +437,58 @@ gst_video_parse_utils_parse_afd (const guint8 data, GstVideoAFD * afd, + afd->afd = (GstVideoAFDValue) afd_data; + return TRUE; + } ++ ++/* ++ * gst_video_parse_user_data_unregistered: ++ * @elt: #GstElement that is parsing user data ++ * @user_data: #GstVideoParseUserDataUnregistered struct to hold parsed data ++ * @br: #GstByteReader attached to buffer of user data ++ * @uuid: User Data Unregistered UUID ++ * ++ * Parse user data and store in @user_data ++ */ ++void ++gst_video_parse_user_data_unregistered (GstElement * elt, ++ GstVideoParseUserDataUnregistered * user_data, ++ GstByteReader * br, guint8 uuid[16]) ++{ ++ gst_video_user_data_unregistered_clear (user_data); ++ ++ memcpy (&user_data->uuid, uuid, 16); ++ user_data->size = gst_byte_reader_get_size (br); ++ gst_byte_reader_dup_data (br, user_data->size, &user_data->data); ++} ++ ++/* ++ * gst_video_user_data_unregistered_clear: ++ * @user_data: #GstVideoParseUserDataUnregistered holding SEI User Data Unregistered ++ * ++ * Clears the user data unregistered ++ */ ++void ++gst_video_user_data_unregistered_clear (GstVideoParseUserDataUnregistered * ++ user_data) ++{ ++ g_free (user_data->data); ++ user_data->data = NULL; ++ user_data->size = 0; ++} ++ ++/* ++ * gst_video_push_user_data_unregistered: ++ * @elt: #GstElement that is pushing user data ++ * @user_data: #GstVideoParseUserDataUnregistered holding SEI User Data Unregistered ++ * @buf: #GstBuffer that receives the parsed data ++ * ++ * After user data has been parsed, add the data to @buf ++ */ ++void ++gst_video_push_user_data_unregistered (GstElement * elt, ++ GstVideoParseUserDataUnregistered * user_data, GstBuffer * buf) ++{ ++ if (user_data->data != NULL) { ++ gst_buffer_add_video_sei_user_data_unregistered_meta (buf, user_data->uuid, ++ user_data->data, user_data->size); ++ gst_video_user_data_unregistered_clear (user_data); ++ } ++} +diff --git a/gst/videoparsers/gstvideoparseutils.h b/gst/videoparsers/gstvideoparseutils.h +index 603cc7170..c2d14dc57 100644 +--- a/gst/videoparsers/gstvideoparseutils.h ++++ b/gst/videoparsers/gstvideoparseutils.h +@@ -174,13 +174,33 @@ typedef struct + + } GstVideoParseUserData; + ++/* ++ * GstVideoParseUserDataUnregistered ++ * ++ * Holds unparsed User Data Unregistered. ++ */ ++typedef struct ++{ ++ guint8 uuid[16]; ++ guint8 *data; ++ gsize size; ++} GstVideoParseUserDataUnregistered; ++ + G_BEGIN_DECLS + + void gst_video_parse_user_data(GstElement * elt, GstVideoParseUserData * user_data, + GstByteReader * br, guint8 field, guint16 provider_code); + ++void gst_video_parse_user_data_unregistered(GstElement * elt, GstVideoParseUserDataUnregistered * user_data, ++ GstByteReader * br, guint8 uuid[16]); ++ ++void gst_video_user_data_unregistered_clear(GstVideoParseUserDataUnregistered * user_data); ++ + void gst_video_push_user_data(GstElement * elt, GstVideoParseUserData * user_data, + GstBuffer * buf); + ++void gst_video_push_user_data_unregistered(GstElement * elt, GstVideoParseUserDataUnregistered * user_data, ++ GstBuffer * buf); ++ + G_END_DECLS + #endif /* __VIDEO_PARSE_UTILS_H__ */ +diff --git a/gst/videoparsers/gstvp9parse.c b/gst/videoparsers/gstvp9parse.c +new file mode 100644 +index 000000000..f5ed34ce7 +--- /dev/null ++++ b/gst/videoparsers/gstvp9parse.c +@@ -0,0 +1,897 @@ ++/* GStreamer ++ * Copyright (C) 2020 Seungha Yang ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include ++#include ++#include "gstvideoparserselements.h" ++#include "gstvp9parse.h" ++ ++#include ++ ++GST_DEBUG_CATEGORY (vp9_parse_debug); ++#define GST_CAT_DEFAULT vp9_parse_debug ++ ++typedef enum ++{ ++ GST_VP9_PARSE_ALIGN_NONE = 0, ++ GST_VP9_PARSE_ALIGN_SUPER_FRAME, ++ GST_VP9_PARSE_ALIGN_FRAME, ++} GstVp9ParseAligment; ++ ++struct _GstVp9Parse ++{ ++ GstBaseParse parent; ++ ++ /* parsed from the last keyframe */ ++ gint width; ++ gint height; ++ gint subsampling_x; ++ gint subsampling_y; ++ GstVp9ColorSpace color_space; ++ GstVp9ColorRange color_range; ++ GstVP9Profile profile; ++ GstVp9BitDepth bit_depth; ++ gboolean codec_alpha; ++ ++ GstVp9ParseAligment in_align; ++ GstVp9ParseAligment align; ++ ++ GstVp9Parser *parser; ++ gboolean update_caps; ++ ++ /* per frame status */ ++ gboolean discont; ++ ++ GstClockTime super_frame_pts; ++ GstClockTime super_frame_dts; ++ GstClockTime super_frame_duration; ++}; ++ ++static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink", ++ GST_PAD_SINK, ++ GST_PAD_ALWAYS, ++ GST_STATIC_CAPS ("video/x-vp9")); ++ ++static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src", ++ GST_PAD_SRC, ++ GST_PAD_ALWAYS, ++ GST_STATIC_CAPS ("video/x-vp9, parsed = (boolean) true, " ++ "alignment=(string) { super-frame, frame }")); ++ ++#define parent_class gst_vp9_parse_parent_class ++G_DEFINE_TYPE (GstVp9Parse, gst_vp9_parse, GST_TYPE_BASE_PARSE); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (vp9parse, "vp9parse", GST_RANK_SECONDARY, ++ GST_TYPE_VP9_PARSE, videoparsers_element_init (plugin)); ++ ++static gboolean gst_vp9_parse_start (GstBaseParse * parse); ++static gboolean gst_vp9_parse_stop (GstBaseParse * parse); ++static GstFlowReturn gst_vp9_parse_handle_frame (GstBaseParse * parse, ++ GstBaseParseFrame * frame, gint * skipsize); ++static gboolean gst_vp9_parse_set_sink_caps (GstBaseParse * parse, ++ GstCaps * caps); ++static GstCaps *gst_vp9_parse_get_sink_caps (GstBaseParse * parse, ++ GstCaps * filter); ++static void gst_vp9_parse_update_src_caps (GstVp9Parse * self, GstCaps * caps); ++static GstFlowReturn gst_vp9_parse_parse_frame (GstVp9Parse * self, ++ GstBaseParseFrame * frame, GstVp9FrameHdr * frame_hdr); ++static GstFlowReturn gst_vp9_parse_pre_push_frame (GstBaseParse * parse, ++ GstBaseParseFrame * frame); ++ ++static void ++gst_vp9_parse_class_init (GstVp9ParseClass * klass) ++{ ++ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass); ++ GstElementClass *element_class = GST_ELEMENT_CLASS (klass); ++ ++ parse_class->start = GST_DEBUG_FUNCPTR (gst_vp9_parse_start); ++ parse_class->stop = GST_DEBUG_FUNCPTR (gst_vp9_parse_stop); ++ parse_class->handle_frame = GST_DEBUG_FUNCPTR (gst_vp9_parse_handle_frame); ++ parse_class->pre_push_frame = ++ GST_DEBUG_FUNCPTR (gst_vp9_parse_pre_push_frame); ++ parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_vp9_parse_set_sink_caps); ++ parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_vp9_parse_get_sink_caps); ++ ++ gst_element_class_add_static_pad_template (element_class, &srctemplate); ++ gst_element_class_add_static_pad_template (element_class, &sinktemplate); ++ ++ gst_element_class_set_static_metadata (element_class, "VP9 parser", ++ "Codec/Parser/Converter/Video", ++ "Parses VP9 streams", "Seungha Yang "); ++ ++ GST_DEBUG_CATEGORY_INIT (vp9_parse_debug, "vp9parse", 0, "vp9 parser"); ++} ++ ++static void ++gst_vp9_parse_init (GstVp9Parse * self) ++{ ++ gst_base_parse_set_pts_interpolation (GST_BASE_PARSE (self), FALSE); ++ gst_base_parse_set_infer_ts (GST_BASE_PARSE (self), FALSE); ++ ++ GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (self)); ++ GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_PARSE_SINK_PAD (self)); ++} ++ ++static void ++gst_vp9_parse_reset_super_frame (GstVp9Parse * self) ++{ ++ self->super_frame_pts = GST_CLOCK_TIME_NONE; ++ self->super_frame_dts = GST_CLOCK_TIME_NONE; ++ self->super_frame_duration = GST_CLOCK_TIME_NONE; ++} ++ ++static void ++gst_vp9_parse_reset (GstVp9Parse * self) ++{ ++ self->width = 0; ++ self->height = 0; ++ self->subsampling_x = -1; ++ self->subsampling_y = -1; ++ self->color_space = GST_VP9_CS_UNKNOWN; ++ self->color_range = GST_VP9_CR_LIMITED; ++ self->profile = GST_VP9_PROFILE_UNDEFINED; ++ self->bit_depth = (GstVp9BitDepth) 0; ++ self->codec_alpha = FALSE; ++ gst_vp9_parse_reset_super_frame (self); ++} ++ ++static gboolean ++gst_vp9_parse_start (GstBaseParse * parse) ++{ ++ GstVp9Parse *self = GST_VP9_PARSE (parse); ++ ++ GST_DEBUG_OBJECT (self, "start"); ++ ++ self->parser = gst_vp9_parser_new (); ++ gst_vp9_parse_reset (self); ++ ++ /* short frame header with one byte */ ++ gst_base_parse_set_min_frame_size (parse, 1); ++ ++ return TRUE; ++} ++ ++static gboolean ++gst_vp9_parse_stop (GstBaseParse * parse) ++{ ++ GstVp9Parse *self = GST_VP9_PARSE (parse); ++ ++ GST_DEBUG_OBJECT (self, "stop"); ++ g_clear_pointer (&self->parser, gst_vp9_parser_free); ++ ++ return TRUE; ++} ++ ++static const gchar * ++gst_vp9_parse_profile_to_string (GstVP9Profile profile) ++{ ++ switch (profile) { ++ case GST_VP9_PROFILE_0: ++ return "0"; ++ case GST_VP9_PROFILE_1: ++ return "1"; ++ case GST_VP9_PROFILE_2: ++ return "2"; ++ case GST_VP9_PROFILE_3: ++ return "3"; ++ default: ++ break; ++ } ++ ++ return NULL; ++} ++ ++static GstVP9Profile ++gst_vp9_parse_profile_from_string (const gchar * profile) ++{ ++ if (!profile) ++ return GST_VP9_PROFILE_UNDEFINED; ++ ++ if (g_strcmp0 (profile, "0") == 0) ++ return GST_VP9_PROFILE_0; ++ else if (g_strcmp0 (profile, "1") == 0) ++ return GST_VP9_PROFILE_1; ++ else if (g_strcmp0 (profile, "2") == 0) ++ return GST_VP9_PROFILE_2; ++ else if (g_strcmp0 (profile, "3") == 0) ++ return GST_VP9_PROFILE_3; ++ ++ return GST_VP9_PROFILE_UNDEFINED; ++} ++ ++static const gchar * ++gst_vp9_parse_alignment_to_string (GstVp9ParseAligment align) ++{ ++ switch (align) { ++ case GST_VP9_PARSE_ALIGN_SUPER_FRAME: ++ return "super-frame"; ++ case GST_VP9_PARSE_ALIGN_FRAME: ++ return "frame"; ++ default: ++ break; ++ } ++ ++ return NULL; ++} ++ ++static GstVp9ParseAligment ++gst_vp9_parse_alignment_from_string (const gchar * align) ++{ ++ if (!align) ++ return GST_VP9_PARSE_ALIGN_NONE; ++ ++ if (g_strcmp0 (align, "super-frame") == 0) ++ return GST_VP9_PARSE_ALIGN_SUPER_FRAME; ++ else if (g_strcmp0 (align, "frame") == 0) ++ return GST_VP9_PARSE_ALIGN_FRAME; ++ ++ return GST_VP9_PARSE_ALIGN_NONE; ++} ++ ++static void ++gst_vp9_parse_alignment_from_caps (GstCaps * caps, GstVp9ParseAligment * align) ++{ ++ *align = GST_VP9_PARSE_ALIGN_NONE; ++ ++ GST_DEBUG ("parsing caps: %" GST_PTR_FORMAT, caps); ++ ++ if (caps && gst_caps_get_size (caps) > 0) { ++ GstStructure *s = gst_caps_get_structure (caps, 0); ++ const gchar *str = NULL; ++ ++ if ((str = gst_structure_get_string (s, "alignment"))) { ++ *align = gst_vp9_parse_alignment_from_string (str); ++ } ++ } ++} ++ ++/* implement custom semantic for codec-alpha */ ++static gboolean ++gst_vp9_parse_check_codec_alpha (GstStructure * s, gboolean codec_alpha) ++{ ++ gboolean value; ++ ++ if (gst_structure_get_boolean (s, "codec-alpha", &value)) ++ return value == codec_alpha; ++ ++ return codec_alpha == FALSE; ++} ++ ++/* check downstream caps to configure format and alignment */ ++static void ++gst_vp9_parse_negotiate (GstVp9Parse * self, GstVp9ParseAligment in_align, ++ GstCaps * in_caps) ++{ ++ GstCaps *caps; ++ GstVp9ParseAligment align = self->align; ++ ++ caps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (self)); ++ GST_DEBUG_OBJECT (self, "allowed caps: %" GST_PTR_FORMAT, caps); ++ ++ /* concentrate on leading structure, since decodebin parser ++ * capsfilter always includes parser template caps */ ++ if (caps) { ++ caps = gst_caps_make_writable (caps); ++ while (gst_caps_get_size (caps) > 0) { ++ GstStructure *s = gst_caps_get_structure (caps, 0); ++ ++ if (gst_vp9_parse_check_codec_alpha (s, self->codec_alpha)) ++ break; ++ ++ gst_caps_remove_structure (caps, 0); ++ } ++ ++ /* this may happen if there is simply no codec alpha decoder in the ++ * gstreamer installation, in this case, pick the first non-alpha decoder. ++ */ ++ if (gst_caps_is_empty (caps)) { ++ gst_caps_unref (caps); ++ caps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (self)); ++ } ++ ++ caps = gst_caps_truncate (caps); ++ GST_DEBUG_OBJECT (self, "negotiating with caps: %" GST_PTR_FORMAT, caps); ++ } ++ ++ if (in_caps && caps) { ++ if (gst_caps_can_intersect (in_caps, caps)) { ++ GST_DEBUG_OBJECT (self, "downstream accepts upstream caps"); ++ gst_vp9_parse_alignment_from_caps (in_caps, &align); ++ gst_clear_caps (&caps); ++ } ++ } ++ ++ /* FIXME We could fail the negotiation immediately if caps are empty */ ++ if (caps && !gst_caps_is_empty (caps)) { ++ /* fixate to avoid ambiguity with lists when parsing */ ++ caps = gst_caps_fixate (caps); ++ gst_vp9_parse_alignment_from_caps (caps, &align); ++ } ++ ++ /* default */ ++ if (align == GST_VP9_PARSE_ALIGN_NONE) ++ align = GST_VP9_PARSE_ALIGN_SUPER_FRAME; ++ ++ GST_DEBUG_OBJECT (self, "selected alignment %s", ++ gst_vp9_parse_alignment_to_string (align)); ++ ++ self->align = align; ++ ++ gst_clear_caps (&caps); ++} ++ ++static gboolean ++gst_vp9_parse_is_info_valid (GstVp9Parse * self) ++{ ++ if (self->width <= 0 || self->height <= 0) ++ return FALSE; ++ ++ if (self->subsampling_x < 0 || self->subsampling_y < 0) ++ return FALSE; ++ ++ if (self->profile == GST_VP9_PROFILE_UNDEFINED) ++ return FALSE; ++ ++ if (self->bit_depth < (GstVp9BitDepth) GST_VP9_BIT_DEPTH_8) ++ return FALSE; ++ ++ return TRUE; ++} ++ ++static gboolean ++gst_vp9_parse_process_frame (GstVp9Parse * self, GstVp9FrameHdr * frame_hdr) ++{ ++ GstVp9Parser *parser = self->parser; ++ gint width, height; ++ ++ /* the resolution might be varying. Update our status per key frame */ ++ if (frame_hdr->frame_type != GST_VP9_KEY_FRAME || ++ frame_hdr->show_existing_frame) { ++ /* Need to continue to get some valid info. */ ++ if (gst_vp9_parse_is_info_valid (self)) ++ return TRUE; ++ } ++ ++ width = frame_hdr->width; ++ height = frame_hdr->height; ++ if (frame_hdr->display_size_enabled && ++ frame_hdr->display_width > 0 && frame_hdr->display_height) { ++ width = frame_hdr->display_width; ++ height = frame_hdr->display_height; ++ } ++ ++ if (width != self->width || height != self->height) { ++ GST_DEBUG_OBJECT (self, "resolution change from %dx%d to %dx%d", ++ self->width, self->height, width, height); ++ self->width = width; ++ self->height = height; ++ self->update_caps = TRUE; ++ } ++ ++ if (self->subsampling_x != parser->subsampling_x || ++ self->subsampling_y != parser->subsampling_y) { ++ GST_DEBUG_OBJECT (self, ++ "subsampling changed from x: %d, y: %d to x: %d, y: %d", ++ self->subsampling_x, self->subsampling_y, ++ parser->subsampling_x, parser->subsampling_y); ++ self->subsampling_x = parser->subsampling_x; ++ self->subsampling_y = parser->subsampling_y; ++ self->update_caps = TRUE; ++ } ++ ++ if (parser->color_space != GST_VP9_CS_UNKNOWN && ++ parser->color_space != GST_VP9_CS_RESERVED_2 && ++ parser->color_space != self->color_space) { ++ GST_DEBUG_OBJECT (self, "colorspace changed from %d to %d", ++ self->color_space, parser->color_space); ++ self->color_space = parser->color_space; ++ self->update_caps = TRUE; ++ } ++ ++ if (parser->color_range != self->color_range) { ++ GST_DEBUG_OBJECT (self, "color range changed from %d to %d", ++ self->color_range, parser->color_range); ++ self->color_range = parser->color_range; ++ self->update_caps = TRUE; ++ } ++ ++ if (frame_hdr->profile != GST_VP9_PROFILE_UNDEFINED && ++ frame_hdr->profile != self->profile) { ++ GST_DEBUG_OBJECT (self, "profile changed from %d to %d", self->profile, ++ frame_hdr->profile); ++ self->profile = frame_hdr->profile; ++ self->update_caps = TRUE; ++ } ++ ++ if (parser->bit_depth != self->bit_depth) { ++ GST_DEBUG_OBJECT (self, "bit-depth changed from %d to %d", ++ self->bit_depth, parser->bit_depth); ++ self->bit_depth = parser->bit_depth; ++ self->update_caps = TRUE; ++ } ++ ++ return TRUE; ++} ++ ++static GstFlowReturn ++gst_vp9_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame) ++{ ++ GstVp9Parse *self = GST_VP9_PARSE (parse); ++ ++ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_CLIP; ++ ++ if (!frame->buffer) ++ return GST_FLOW_OK; ++ ++ /* The super frame may contain more than one frames inside its buffer. ++ When splitting a super frame into frames, the base parse class only ++ assign the PTS to the first frame and leave the others' PTS invalid. ++ But in fact, all decode only frames should have invalid PTS while ++ showable frames should have correct PTS setting. */ ++ if (self->align != GST_VP9_PARSE_ALIGN_FRAME) ++ return GST_FLOW_OK; ++ ++ if (GST_BUFFER_FLAG_IS_SET (frame->buffer, GST_BUFFER_FLAG_DECODE_ONLY)) { ++ GST_BUFFER_PTS (frame->buffer) = GST_CLOCK_TIME_NONE; ++ GST_BUFFER_DURATION (frame->buffer) = GST_CLOCK_TIME_NONE; ++ } else { ++ GST_BUFFER_PTS (frame->buffer) = self->super_frame_pts; ++ GST_BUFFER_DURATION (frame->buffer) = self->super_frame_duration; ++ } ++ GST_BUFFER_DTS (frame->buffer) = self->super_frame_dts; ++ ++ return GST_FLOW_OK; ++} ++ ++static GstFlowReturn ++gst_vp9_parse_handle_frame (GstBaseParse * parse, GstBaseParseFrame * frame, ++ gint * skipsize) ++{ ++ GstVp9Parse *self = GST_VP9_PARSE (parse); ++ GstBuffer *buffer = frame->buffer; ++ GstFlowReturn ret = GST_FLOW_OK; ++ GstVp9ParserResult parse_res = GST_VP9_PARSER_ERROR; ++ GstMapInfo map; ++ gsize offset = 0; ++ GstVp9SuperframeInfo superframe_info; ++ guint i; ++ GstVp9FrameHdr frame_hdr; ++ ++ if (GST_BUFFER_FLAG_IS_SET (frame->buffer, GST_BUFFER_FLAG_DISCONT)) ++ self->discont = TRUE; ++ else ++ self->discont = FALSE; ++ ++ /* need to save buffer from invalidation upon _finish_frame */ ++ if (self->align == GST_VP9_PARSE_ALIGN_FRAME) ++ buffer = gst_buffer_copy (frame->buffer); ++ ++ if (!gst_buffer_map (buffer, &map, GST_MAP_READ)) { ++ GST_ELEMENT_ERROR (parse, CORE, NOT_IMPLEMENTED, (NULL), ++ ("Couldn't map incoming buffer")); ++ ++ return GST_FLOW_ERROR; ++ } ++ ++ GST_TRACE_OBJECT (self, "processing buffer of size %" G_GSIZE_FORMAT, ++ map.size); ++ ++ /* superframe_info will be zero initialized by GstVp9Parser */ ++ parse_res = gst_vp9_parser_parse_superframe_info (self->parser, ++ &superframe_info, map.data, map.size); ++ ++ if (parse_res != GST_VP9_PARSER_OK) { ++ /* just finish this frame anyway, so that we don't too strict ++ * regarding parsing vp9 stream. ++ * Downstream might be able to handle this stream even though ++ * it's very unlikely */ ++ GST_WARNING_OBJECT (self, "Couldn't parse superframe res: %d", parse_res); ++ goto done; ++ } ++ ++ self->super_frame_pts = GST_BUFFER_PTS (buffer); ++ self->super_frame_dts = GST_BUFFER_DTS (buffer); ++ self->super_frame_duration = GST_BUFFER_DURATION (buffer); ++ ++ for (i = 0; i < superframe_info.frames_in_superframe; i++) { ++ guint32 frame_size; ++ ++ frame_size = superframe_info.frame_sizes[i]; ++ parse_res = gst_vp9_parser_parse_frame_header (self->parser, ++ &frame_hdr, map.data + offset, frame_size); ++ ++ if (parse_res != GST_VP9_PARSER_OK) { ++ GST_WARNING_OBJECT (self, "Parsing error %d", parse_res); ++ break; ++ } ++ ++ gst_vp9_parse_process_frame (self, &frame_hdr); ++ ++ if (self->align == GST_VP9_PARSE_ALIGN_FRAME) { ++ GstBaseParseFrame subframe; ++ ++ gst_base_parse_frame_init (&subframe); ++ subframe.flags |= frame->flags; ++ subframe.offset = frame->offset; ++ subframe.overhead = frame->overhead; ++ subframe.buffer = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, ++ offset, frame_size); ++ ++ /* note we don't need to come up with a sub-buffer, since ++ * subsequent code only considers input buffer's metadata. ++ * Real data is either taken from input by baseclass or ++ * a replacement output buffer is provided anyway. */ ++ gst_vp9_parse_parse_frame (self, &subframe, &frame_hdr); ++ ++ ret = gst_base_parse_finish_frame (parse, &subframe, frame_size); ++ } else { ++ /* FIXME: need to parse all frames belong to this superframe? */ ++ break; ++ } ++ ++ offset += frame_size; ++ } ++ ++ gst_vp9_parse_reset_super_frame (self); ++ ++done: ++ gst_buffer_unmap (buffer, &map); ++ ++ if (self->align != GST_VP9_PARSE_ALIGN_FRAME) { ++ if (parse_res == GST_VP9_PARSER_OK) ++ gst_vp9_parse_parse_frame (self, frame, &frame_hdr); ++ ret = gst_base_parse_finish_frame (parse, frame, map.size); ++ } else { ++ gst_buffer_unref (buffer); ++ if (offset != map.size) { ++ gsize left = map.size - offset; ++ if (left != superframe_info.superframe_index_size) { ++ GST_WARNING_OBJECT (parse, ++ "Skipping leftover frame data %" G_GSIZE_FORMAT, left); ++ } ++ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_DROP; ++ ret = gst_base_parse_finish_frame (parse, frame, left); ++ } ++ } ++ ++ return ret; ++} ++ ++static void ++gst_vp9_parse_update_src_caps (GstVp9Parse * self, GstCaps * caps) ++{ ++ GstCaps *sink_caps, *src_caps; ++ GstCaps *final_caps = NULL; ++ GstStructure *s = NULL; ++ gint width, height; ++ gint par_n = 0, par_d = 0; ++ gint fps_n = 0, fps_d = 0; ++ gint bitdepth = 0; ++ gchar *colorimetry = NULL; ++ const gchar *chroma_format = NULL; ++ const gchar *profile = NULL; ++ ++ if (!self->update_caps) ++ return; ++ ++ /* if this is being called from the first _setcaps call, caps on the sinkpad ++ * aren't set yet and so they need to be passed as an argument */ ++ if (caps) ++ sink_caps = gst_caps_ref (caps); ++ else ++ sink_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (self)); ++ ++ /* carry over input caps as much as possible; override with our own stuff */ ++ if (!sink_caps) ++ sink_caps = gst_caps_new_empty_simple ("video/x-vp9"); ++ else ++ s = gst_caps_get_structure (sink_caps, 0); ++ ++ final_caps = gst_caps_copy (sink_caps); ++ ++ /* frame header should give this but upstream overrides */ ++ if (s && gst_structure_has_field (s, "width") && ++ gst_structure_has_field (s, "height")) { ++ gst_structure_get_int (s, "width", &width); ++ gst_structure_get_int (s, "height", &height); ++ } else { ++ width = self->width; ++ height = self->height; ++ } ++ ++ if (width > 0 && height > 0) ++ gst_caps_set_simple (final_caps, "width", G_TYPE_INT, width, ++ "height", G_TYPE_INT, height, NULL); ++ ++ if (s && gst_structure_get_fraction (s, "pixel-aspect-ratio", &par_n, &par_d)) { ++ if (par_n != 0 && par_d != 0) { ++ gst_caps_set_simple (final_caps, "pixel-aspect-ratio", ++ GST_TYPE_FRACTION, par_n, par_d, NULL); ++ } ++ } ++ ++ if (s && gst_structure_has_field (s, "framerate")) { ++ gst_structure_get_fraction (s, "framerate", &fps_n, &fps_d); ++ } ++ ++ if (fps_n > 0 && fps_d > 0) { ++ gst_caps_set_simple (final_caps, "framerate", ++ GST_TYPE_FRACTION, fps_n, fps_d, NULL); ++ gst_base_parse_set_frame_rate (GST_BASE_PARSE (self), fps_n, fps_d, 0, 0); ++ } ++ ++ if (self->color_space != GST_VP9_CS_UNKNOWN && ++ self->color_space != GST_VP9_CS_RESERVED_2) { ++ GstVideoColorimetry cinfo; ++ gboolean have_cinfo = TRUE; ++ ++ memset (&cinfo, 0, sizeof (GstVideoColorimetry)); ++ ++ switch (self->parser->color_space) { ++ case GST_VP9_CS_BT_601: ++ gst_video_colorimetry_from_string (&cinfo, GST_VIDEO_COLORIMETRY_BT601); ++ break; ++ case GST_VP9_CS_BT_709: ++ gst_video_colorimetry_from_string (&cinfo, GST_VIDEO_COLORIMETRY_BT709); ++ break; ++ case GST_VP9_CS_SMPTE_170: ++ gst_video_colorimetry_from_string (&cinfo, GST_VIDEO_COLORIMETRY_BT601); ++ break; ++ case GST_VP9_CS_SMPTE_240: ++ gst_video_colorimetry_from_string (&cinfo, ++ GST_VIDEO_COLORIMETRY_SMPTE240M); ++ break; ++ case GST_VP9_CS_BT_2020: ++ if (self->parser->bit_depth == GST_VP9_BIT_DEPTH_12) { ++ gst_video_colorimetry_from_string (&cinfo, ++ GST_VIDEO_COLORIMETRY_BT2020); ++ } else { ++ gst_video_colorimetry_from_string (&cinfo, ++ GST_VIDEO_COLORIMETRY_BT2020_10); ++ } ++ break; ++ case GST_VP9_CS_SRGB: ++ gst_video_colorimetry_from_string (&cinfo, GST_VIDEO_COLORIMETRY_SRGB); ++ break; ++ default: ++ have_cinfo = FALSE; ++ break; ++ } ++ ++ if (have_cinfo) { ++ if (self->parser->color_range == GST_VP9_CR_LIMITED) ++ cinfo.range = GST_VIDEO_COLOR_RANGE_16_235; ++ else ++ cinfo.range = GST_VIDEO_COLOR_RANGE_0_255; ++ ++ colorimetry = gst_video_colorimetry_to_string (&cinfo); ++ } ++ } ++ ++ if (self->parser->subsampling_x == 1 && self->parser->subsampling_y == 1) ++ chroma_format = "4:2:0"; ++ else if (self->parser->subsampling_x == 1 && self->parser->subsampling_y == 0) ++ chroma_format = "4:2:2"; ++ else if (self->parser->subsampling_x == 0 && self->parser->subsampling_y == 1) ++ chroma_format = "4:4:0"; ++ else if (self->parser->subsampling_x == 0 && self->parser->subsampling_y == 0) ++ chroma_format = "4:4:4"; ++ ++ if (chroma_format) ++ gst_caps_set_simple (final_caps, ++ "chroma-format", G_TYPE_STRING, chroma_format, NULL); ++ ++ switch (self->bit_depth) { ++ case GST_VP9_BIT_DEPTH_8: ++ bitdepth = 8; ++ break; ++ case GST_VP9_BIT_DEPTH_10: ++ bitdepth = 10; ++ break; ++ case GST_VP9_BIT_DEPTH_12: ++ bitdepth = 12; ++ break; ++ default: ++ break; ++ } ++ ++ if (bitdepth) { ++ gst_caps_set_simple (final_caps, ++ "bit-depth-luma", G_TYPE_UINT, bitdepth, ++ "bit-depth-chroma", G_TYPE_UINT, bitdepth, NULL); ++ } ++ ++ if (colorimetry && (!s || !gst_structure_has_field (s, "colorimetry"))) { ++ gst_caps_set_simple (final_caps, ++ "colorimetry", G_TYPE_STRING, colorimetry, NULL); ++ } ++ ++ g_free (colorimetry); ++ ++ gst_caps_set_simple (final_caps, "parsed", G_TYPE_BOOLEAN, TRUE, ++ "alignment", G_TYPE_STRING, ++ gst_vp9_parse_alignment_to_string (self->align), NULL); ++ ++ profile = gst_vp9_parse_profile_to_string (self->profile); ++ if (profile) ++ gst_caps_set_simple (final_caps, "profile", G_TYPE_STRING, profile, NULL); ++ ++ gst_caps_set_simple (final_caps, "codec-alpha", G_TYPE_BOOLEAN, ++ self->codec_alpha, NULL); ++ ++ src_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (self)); ++ ++ if (!(src_caps && gst_caps_is_strictly_equal (src_caps, final_caps))) { ++ GST_DEBUG_OBJECT (self, "Update src caps %" GST_PTR_FORMAT, final_caps); ++ gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (self), final_caps); ++ } ++ ++ gst_clear_caps (&src_caps); ++ gst_caps_unref (final_caps); ++ gst_caps_unref (sink_caps); ++ ++ self->update_caps = FALSE; ++} ++ ++static GstFlowReturn ++gst_vp9_parse_parse_frame (GstVp9Parse * self, GstBaseParseFrame * frame, ++ GstVp9FrameHdr * frame_hdr) ++{ ++ GstBuffer *buffer; ++ ++ buffer = frame->buffer; ++ ++ gst_vp9_parse_update_src_caps (self, NULL); ++ ++ if (frame_hdr->frame_type == GST_VP9_KEY_FRAME) ++ GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); ++ else ++ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); ++ ++ if (self->align == GST_VP9_PARSE_ALIGN_FRAME) { ++ if (!frame_hdr->show_frame && !frame_hdr->show_existing_frame) ++ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DECODE_ONLY); ++ else ++ GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DECODE_ONLY); ++ } ++ ++ if (self->discont) { ++ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); ++ self->discont = FALSE; ++ } ++ ++ return GST_FLOW_OK; ++} ++ ++static gboolean ++gst_vp9_parse_set_sink_caps (GstBaseParse * parse, GstCaps * caps) ++{ ++ GstVp9Parse *self = GST_VP9_PARSE (parse); ++ GstStructure *str; ++ GstVp9ParseAligment align; ++ GstCaps *in_caps = NULL; ++ const gchar *profile; ++ ++ str = gst_caps_get_structure (caps, 0); ++ ++ /* accept upstream info if provided */ ++ gst_structure_get_int (str, "width", &self->width); ++ gst_structure_get_int (str, "height", &self->height); ++ profile = gst_structure_get_string (str, "profile"); ++ if (profile) ++ self->profile = gst_vp9_parse_profile_from_string (profile); ++ gst_structure_get_boolean (str, "codec-alpha", &self->codec_alpha); ++ ++ /* get upstream align from caps */ ++ gst_vp9_parse_alignment_from_caps (caps, &align); ++ ++ /* default */ ++ if (align == GST_VP9_PARSE_ALIGN_NONE) ++ align = GST_VP9_PARSE_ALIGN_SUPER_FRAME; ++ ++ /* prefer alignment type determined above */ ++ in_caps = gst_caps_copy (caps); ++ gst_caps_set_simple (in_caps, "alignment", G_TYPE_STRING, ++ gst_vp9_parse_alignment_to_string (align), NULL); ++ ++ /* negotiate with downstream, set output align */ ++ gst_vp9_parse_negotiate (self, align, in_caps); ++ ++ self->update_caps = TRUE; ++ ++ /* if all of decoder's capability related values are provided ++ * by upstream, update src caps now */ ++ if (self->width > 0 && self->height > 0 && profile && ++ /* Other profiles defines multiple bitdepth/subsampling ++ * Delaying src caps update for non profile-0 streams */ ++ self->profile == GST_VP9_PROFILE_0) { ++ gst_vp9_parse_update_src_caps (self, in_caps); ++ } ++ ++ gst_caps_unref (in_caps); ++ ++ self->in_align = align; ++ ++ return TRUE; ++} ++ ++static void ++remove_fields (GstCaps * caps, gboolean all) ++{ ++ guint i, n; ++ ++ n = gst_caps_get_size (caps); ++ for (i = 0; i < n; i++) { ++ GstStructure *s = gst_caps_get_structure (caps, i); ++ ++ if (all) { ++ gst_structure_remove_field (s, "alignment"); ++ } ++ gst_structure_remove_field (s, "parsed"); ++ } ++} ++ ++static GstCaps * ++gst_vp9_parse_get_sink_caps (GstBaseParse * parse, GstCaps * filter) ++{ ++ GstCaps *peercaps, *templ; ++ GstCaps *res, *tmp, *pcopy; ++ ++ templ = gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD (parse)); ++ if (filter) { ++ GstCaps *fcopy = gst_caps_copy (filter); ++ /* Remove the fields we convert */ ++ remove_fields (fcopy, TRUE); ++ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), fcopy); ++ gst_caps_unref (fcopy); ++ } else { ++ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), NULL); ++ } ++ ++ pcopy = gst_caps_copy (peercaps); ++ remove_fields (pcopy, TRUE); ++ ++ res = gst_caps_intersect_full (pcopy, templ, GST_CAPS_INTERSECT_FIRST); ++ gst_caps_unref (pcopy); ++ gst_caps_unref (templ); ++ ++ if (filter) { ++ GstCaps *tmp = gst_caps_intersect_full (res, filter, ++ GST_CAPS_INTERSECT_FIRST); ++ gst_caps_unref (res); ++ res = tmp; ++ } ++ ++ /* Try if we can put the downstream caps first */ ++ pcopy = gst_caps_copy (peercaps); ++ remove_fields (pcopy, FALSE); ++ tmp = gst_caps_intersect_full (pcopy, res, GST_CAPS_INTERSECT_FIRST); ++ gst_caps_unref (pcopy); ++ if (!gst_caps_is_empty (tmp)) ++ res = gst_caps_merge (tmp, res); ++ else ++ gst_caps_unref (tmp); ++ ++ gst_caps_unref (peercaps); ++ ++ return res; ++} +diff --git a/gst/videoparsers/gstvp9parse.h b/gst/videoparsers/gstvp9parse.h +new file mode 100644 +index 000000000..3ec4d356a +--- /dev/null ++++ b/gst/videoparsers/gstvp9parse.h +@@ -0,0 +1,34 @@ ++/* GStreamer ++ * Copyright (C) 2020 Seungha Yang ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_VP9_PARSE_H__ ++#define __GST_VP9_PARSE_H__ ++ ++#include ++#include ++ ++G_BEGIN_DECLS ++ ++#define GST_TYPE_VP9_PARSE (gst_vp9_parse_get_type()) ++G_DECLARE_FINAL_TYPE (GstVp9Parse, ++ gst_vp9_parse, GST, VP9_PARSE, GstBaseParse); ++ ++G_END_DECLS ++ ++#endif /* __GST_VP9_PARSE_H__ */ +diff --git a/gst/videoparsers/meson.build b/gst/videoparsers/meson.build +index 2fd164fc3..37b783346 100644 +--- a/gst/videoparsers/meson.build ++++ b/gst/videoparsers/meson.build +@@ -1,5 +1,6 @@ + vparse_sources = [ + 'plugin.c', ++ 'gstvideoparserselement.c', + 'h263parse.c', + 'gsth263parse.c', + 'gstdiracparse.c', +@@ -12,6 +13,8 @@ vparse_sources = [ + 'gsth265parse.c', + 'gstvideoparseutils.c', + 'gstjpeg2000parse.c', ++ 'gstvp9parse.c', ++ 'gstav1parse.c', + ] + + gstvideoparsersbad = library('gstvideoparsersbad', +@@ -22,5 +25,4 @@ gstvideoparsersbad = library('gstvideoparsersbad', + install : true, + install_dir : plugins_install_dir, + ) +-pkgconfig.generate(gstvideoparsersbad, install_dir : plugins_pkgconfig_install_dir) + plugins += [gstvideoparsersbad] +diff --git a/gst/videoparsers/plugin.c b/gst/videoparsers/plugin.c +index f4690c466..256fd94a1 100644 +--- a/gst/videoparsers/plugin.c ++++ b/gst/videoparsers/plugin.c +@@ -22,44 +22,33 @@ + #include "config.h" + #endif + +-#include "gsth263parse.h" +-#include "gsth264parse.h" +-#include "gstdiracparse.h" +-#include "gstmpegvideoparse.h" +-#include "gstmpeg4videoparse.h" +-#include "gstpngparse.h" +-#include "gstjpeg2000parse.h" +-#include "gstvc1parse.h" +-#include "gsth265parse.h" +- +-GST_DEBUG_CATEGORY (videoparseutils_debug); ++#include "gstvideoparserselements.h" + + static gboolean + plugin_init (GstPlugin * plugin) + { + gboolean ret = FALSE; + +- GST_DEBUG_CATEGORY_INIT (videoparseutils_debug, "videoparseutils", 0, +- "video parse utilities"); +- +- ret |= gst_element_register (plugin, "h263parse", +- GST_RANK_PRIMARY + 1, GST_TYPE_H263_PARSE); +- ret |= gst_element_register (plugin, "h264parse", +- GST_RANK_PRIMARY + 1, GST_TYPE_H264_PARSE); +- ret |= gst_element_register (plugin, "diracparse", +- GST_RANK_NONE, GST_TYPE_DIRAC_PARSE); +- ret |= gst_element_register (plugin, "mpegvideoparse", +- GST_RANK_PRIMARY + 1, GST_TYPE_MPEGVIDEO_PARSE); +- ret |= gst_element_register (plugin, "mpeg4videoparse", +- GST_RANK_PRIMARY + 1, GST_TYPE_MPEG4VIDEO_PARSE); +- ret |= gst_element_register (plugin, "pngparse", +- GST_RANK_PRIMARY, GST_TYPE_PNG_PARSE); +- ret |= gst_element_register (plugin, "jpeg2000parse", +- GST_RANK_PRIMARY, GST_TYPE_JPEG2000_PARSE); +- ret |= gst_element_register (plugin, "h265parse", +- GST_RANK_SECONDARY, GST_TYPE_H265_PARSE); +- ret |= gst_element_register (plugin, "vc1parse", +- GST_RANK_NONE, GST_TYPE_VC1_PARSE); ++ ret |= GST_ELEMENT_REGISTER (h263parse, plugin); ++ ret |= GST_ELEMENT_REGISTER (h264parse, plugin); ++ ret |= GST_ELEMENT_REGISTER (diracparse, plugin); ++ ret |= GST_ELEMENT_REGISTER (mpegvideoparse, plugin); ++ ret |= GST_ELEMENT_REGISTER (mpeg4videoparse, plugin); ++ ret |= GST_ELEMENT_REGISTER (pngparse, plugin); ++ ret |= GST_ELEMENT_REGISTER (h265parse, plugin); ++ ret |= GST_ELEMENT_REGISTER (vc1parse, plugin); ++ /** ++ * element-vp9parse: ++ * ++ * Since: 1.20 ++ */ ++ ret |= GST_ELEMENT_REGISTER (vp9parse, plugin); ++ /** ++ * element-av1parse: ++ * ++ * Since: 1.20 ++ */ ++ ret |= GST_ELEMENT_REGISTER (av1parse, plugin); + + return ret; + } +-- +2.47.1 + diff --git a/package/gstreamer1/gst1-plugins-bad/1.18.6-gstwebrtc/0002-WebRTC-related-backports-from-GStreamer-1.24.patch b/package/gstreamer1/gst1-plugins-bad/1.18.6-gstwebrtc/0002-WebRTC-related-backports-from-GStreamer-1.24.patch new file mode 100644 index 000000000000..f5eaf309c0d9 --- /dev/null +++ b/package/gstreamer1/gst1-plugins-bad/1.18.6-gstwebrtc/0002-WebRTC-related-backports-from-GStreamer-1.24.patch @@ -0,0 +1,4285 @@ +From 755147ec3b7d2e95e68b596902600453f0e17949 Mon Sep 17 00:00:00 2001 +From: Carlos Bentzen +Date: Fri, 12 Jul 2024 10:46:14 +0200 +Subject: [PATCH 2/5] WebRTC-related backports from GStreamer 1.24 + +--- + ext/webrtc/gstwebrtcbin.c | 323 +++++-- + ext/webrtc/gstwebrtcbin.h | 10 +- + ext/webrtc/gstwebrtcice.c | 1161 ------------------------- + ext/webrtc/gstwebrtcice.h | 106 --- + ext/webrtc/gstwebrtcstats.c | 32 +- + ext/webrtc/icestream.c | 235 ----- + ext/webrtc/icestream.h | 63 -- + ext/webrtc/nicetransport.c | 262 ------ + ext/webrtc/nicetransport.h | 58 -- + ext/webrtc/sctptransport.c | 270 ------ + ext/webrtc/sctptransport.h | 66 -- + ext/webrtc/utils.c | 25 + + ext/webrtc/utils.h | 2 + + ext/webrtc/webrtcdatachannel.c | 54 +- + ext/webrtc/webrtcdatachannel.h | 6 +- + ext/webrtc/webrtcsdp.c | 76 +- + ext/webrtc/webrtcsdp.h | 2 + + gst-libs/gst/webrtc/dtlstransport.c | 32 +- + gst-libs/gst/webrtc/ice.c | 10 +- + gst-libs/gst/webrtc/ice.h | 6 +- + gst-libs/gst/webrtc/meson.build | 5 +- + gst-libs/gst/webrtc/nice/meson.build | 3 + + gst-libs/gst/webrtc/nice/nice.c | 296 +++++-- + gst-libs/gst/webrtc/nice/nicestream.c | 12 +- + gst-libs/gst/webrtc/webrtc_fwd.h | 4 + + 25 files changed, 682 insertions(+), 2437 deletions(-) + delete mode 100644 ext/webrtc/gstwebrtcice.c + delete mode 100644 ext/webrtc/gstwebrtcice.h + delete mode 100644 ext/webrtc/icestream.c + delete mode 100644 ext/webrtc/icestream.h + delete mode 100644 ext/webrtc/nicetransport.c + delete mode 100644 ext/webrtc/nicetransport.h + delete mode 100644 ext/webrtc/sctptransport.c + delete mode 100644 ext/webrtc/sctptransport.h + +diff --git a/ext/webrtc/gstwebrtcbin.c b/ext/webrtc/gstwebrtcbin.c +index 6a8f72236..bf0bc5b3f 100644 +--- a/ext/webrtc/gstwebrtcbin.c ++++ b/ext/webrtc/gstwebrtcbin.c +@@ -70,6 +70,10 @@ + #define RTPHDREXT_STREAM_ID GST_RTP_HDREXT_BASE "sdes:rtp-stream-id" + #define RTPHDREXT_REPAIRED_STREAM_ID GST_RTP_HDREXT_BASE "sdes:repaired-rtp-stream-id" + ++#if !GLIB_CHECK_VERSION(2, 74, 0) ++#define G_CONNECT_DEFAULT 0 ++#endif ++ + /** + * SECTION: element-webrtcbin + * title: webrtcbin +@@ -650,6 +654,7 @@ enum + ON_DATA_CHANNEL_SIGNAL, + PREPARE_DATA_CHANNEL_SIGNAL, + REQUEST_AUX_SENDER, ++ ADD_ICE_CANDIDATE_FULL_SIGNAL, + LAST_SIGNAL, + }; + +@@ -820,6 +825,30 @@ _find_transport_for_session (GstWebRTCBin * webrtc, guint session_id) + return stream; + } + ++static gboolean ++match_stream_for_ice_transport (TransportStream * trans, ++ GstWebRTCICETransport * transport) ++{ ++ return trans->transport && trans->transport->transport == transport; ++} ++ ++static TransportStream * ++_find_transport_for_ice_transport (GstWebRTCBin * webrtc, ++ GstWebRTCICETransport * transport) ++{ ++ TransportStream *stream; ++ ++ stream = _find_transport (webrtc, transport, ++ (FindTransportFunc) match_stream_for_ice_transport); ++ ++ GST_TRACE_OBJECT (webrtc, ++ "Found transport %" GST_PTR_FORMAT " for ice transport %" GST_PTR_FORMAT, ++ stream, transport); ++ ++ return stream; ++} ++ ++ + typedef gboolean (*FindPadFunc) (GstWebRTCBinPad * p1, gconstpointer data); + + static GstWebRTCBinPad * +@@ -1081,10 +1110,9 @@ _gst_pc_thread (GstWebRTCBin * webrtc) + g_main_context_invoke (webrtc->priv->main_context, + (GSourceFunc) _unlock_pc_thread, PC_GET_LOCK (webrtc)); + +- /* Having the thread be the thread default GMainContext will break the +- * required queue-like ordering (from W3's peerconnection spec) of re-entrant +- * tasks */ ++ g_main_context_push_thread_default (webrtc->priv->main_context); + g_main_loop_run (webrtc->priv->loop); ++ g_main_context_pop_thread_default (webrtc->priv->main_context); + + GST_OBJECT_LOCK (webrtc); + g_main_context_unref (webrtc->priv->main_context); +@@ -1227,6 +1255,20 @@ gst_webrtc_bin_enqueue_task (GstWebRTCBin * webrtc, GstWebRTCBinFunc func, + return TRUE; + } + ++void ++gst_webrtc_bin_get_peer_connection_stats (GstWebRTCBin * webrtc, ++ guint * data_channels_opened, guint * data_channels_closed) ++{ ++ DC_LOCK (webrtc); ++ if (data_channels_opened) { ++ *data_channels_opened = webrtc->priv->data_channels_opened; ++ } ++ if (data_channels_closed) { ++ *data_channels_closed = webrtc->priv->data_channels_closed; ++ } ++ DC_UNLOCK (webrtc); ++} ++ + /* https://www.w3.org/TR/webrtc/#dom-rtciceconnectionstate */ + static GstWebRTCICEConnectionState + _collate_ice_connection_states (GstWebRTCBin * webrtc) +@@ -1600,13 +1642,6 @@ _update_ice_gathering_state_task (GstWebRTCBin * webrtc, gpointer data) + return NULL; + } + +-static void +-_update_ice_gathering_state (GstWebRTCBin * webrtc) +-{ +- gst_webrtc_bin_enqueue_task (webrtc, _update_ice_gathering_state_task, NULL, +- NULL, NULL); +-} +- + static GstStructure * + _update_ice_connection_state_task (GstWebRTCBin * webrtc, gpointer data) + { +@@ -2115,11 +2150,27 @@ _on_ice_transport_notify_state (GstWebRTCICETransport * transport, + _update_peer_connection_state (webrtc); + } + ++static void ++_on_local_ice_candidate_cb (GstWebRTCICE * ice, guint session_id, ++ gchar * candidate, GstWebRTCBin * webrtc); ++ + static void + _on_ice_transport_notify_gathering_state (GstWebRTCICETransport * transport, + GParamSpec * pspec, GstWebRTCBin * webrtc) + { +- _update_ice_gathering_state (webrtc); ++ GstWebRTCICEGatheringState ice_state; ++ ++ g_object_get (transport, "gathering-state", &ice_state, NULL); ++ if (ice_state == GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE) { ++ TransportStream *stream = ++ _find_transport_for_ice_transport (webrtc, transport); ++ /* signal end-of-candidates */ ++ _on_local_ice_candidate_cb (webrtc->priv->ice, stream->session_id, ++ (char *) "", webrtc); ++ } ++ ++ gst_webrtc_bin_enqueue_task (webrtc, _update_ice_gathering_state_task, NULL, ++ NULL, NULL); + } + + static void +@@ -2386,6 +2437,15 @@ gst_webrtc_bin_attach_tos (GstWebRTCBin * webrtc) + gst_webrtc_bin_update_sctp_priority (webrtc); + } + ++static void ++on_transceiver_notify_direction (GstWebRTCRTPTransceiver * transceiver, ++ GParamSpec * pspec, GstWebRTCBin * webrtc) ++{ ++ PC_LOCK (webrtc); ++ _update_need_negotiation (webrtc); ++ PC_UNLOCK (webrtc); ++} ++ + static WebRTCTransceiver * + _create_webrtc_transceiver (GstWebRTCBin * webrtc, + GstWebRTCRTPTransceiverDirection direction, guint mline, GstWebRTCKind kind, +@@ -2415,15 +2475,14 @@ _create_webrtc_transceiver (GstWebRTCBin * webrtc, + + g_signal_connect_object (sender, "notify::priority", + G_CALLBACK (gst_webrtc_bin_attach_tos), webrtc, G_CONNECT_SWAPPED); ++ g_signal_connect_object (trans, "notify::direction", ++ G_CALLBACK (on_transceiver_notify_direction), webrtc, G_CONNECT_DEFAULT); + + g_ptr_array_add (webrtc->priv->transceivers, trans); + + gst_object_unref (sender); + gst_object_unref (receiver); + +- g_signal_emit (webrtc, gst_webrtc_bin_signals[ON_NEW_TRANSCEIVER_SIGNAL], +- 0, trans); +- + return trans; + } + +@@ -2507,6 +2566,7 @@ _on_data_channel_ready_state (WebRTCDataChannel * channel, + } + + g_ptr_array_add (webrtc->priv->data_channels, gst_object_ref (channel)); ++ webrtc->priv->data_channels_opened++; + DC_UNLOCK (webrtc); + + gst_webrtc_bin_update_sctp_priority (webrtc); +@@ -2514,14 +2574,30 @@ _on_data_channel_ready_state (WebRTCDataChannel * channel, + g_signal_emit (webrtc, gst_webrtc_bin_signals[ON_DATA_CHANNEL_SIGNAL], 0, + channel); + } else if (ready_state == GST_WEBRTC_DATA_CHANNEL_STATE_CLOSED) { ++ gboolean found_pending; + gboolean found; + ++ /* Change state on bins outside dc_lock to avoid deadlocks */ ++ gst_element_set_locked_state (channel->src_bin, TRUE); ++ gst_element_set_state (channel->src_bin, GST_STATE_NULL); ++ gst_element_set_locked_state (channel->sink_bin, TRUE); ++ gst_element_set_state (channel->sink_bin, GST_STATE_NULL); ++ + DC_LOCK (webrtc); +- found = g_ptr_array_remove (webrtc->priv->pending_data_channels, channel) ++ found_pending = ++ g_ptr_array_remove (webrtc->priv->pending_data_channels, channel); ++ found = found_pending + || g_ptr_array_remove (webrtc->priv->data_channels, channel); + + if (found == FALSE) { + GST_FIXME_OBJECT (webrtc, "Received close for unknown data channel"); ++ } else { ++ gst_bin_remove (GST_BIN (webrtc), channel->src_bin); ++ gst_bin_remove (GST_BIN (webrtc), channel->sink_bin); ++ ++ if (found_pending == FALSE) { ++ webrtc->priv->data_channels_closed++; ++ } + } + DC_UNLOCK (webrtc); + } +@@ -2543,7 +2619,7 @@ _on_sctpdec_pad_added (GstElement * sctpdec, GstPad * pad, + if (!channel) { + channel = g_object_new (WEBRTC_TYPE_DATA_CHANNEL, NULL); + channel->parent.id = stream_id; +- channel->webrtcbin = webrtc; ++ webrtc_data_channel_set_webrtcbin (channel, webrtc); + + g_signal_emit (webrtc, gst_webrtc_bin_signals[PREPARE_DATA_CHANNEL_SIGNAL], + 0, channel, FALSE); +@@ -4476,6 +4552,8 @@ _create_answer_task (GstWebRTCBin * webrtc, const GstStructure * options, + gst_sdp_media_set_proto (media, "UDP/TLS/RTP/SAVPF"); + offer_caps = _rtp_caps_from_media (offer_media); + ++ _remove_optional_offer_fields (offer_caps); ++ + if (last_answer && i < gst_sdp_message_medias_len (last_answer) + && (rtp_trans = _find_transceiver_for_mid (webrtc, mid))) { + const GstSDPMedia *last_media = +@@ -4586,6 +4664,11 @@ _create_answer_task (GstWebRTCBin * webrtc, const GstStructure * options, + trans = _create_webrtc_transceiver (webrtc, answer_dir, i, kind, NULL); + rtp_trans = GST_WEBRTC_RTP_TRANSCEIVER (trans); + ++ PC_UNLOCK (webrtc); ++ g_signal_emit (webrtc, ++ gst_webrtc_bin_signals[ON_NEW_TRANSCEIVER_SIGNAL], 0, rtp_trans); ++ PC_LOCK (webrtc); ++ + GST_LOG_OBJECT (webrtc, "Created new transceiver %" GST_PTR_FORMAT + " for mline %u with media kind %d", trans, i, kind); + +@@ -4748,7 +4831,7 @@ out: + webrtc->priv->last_generated_offer = NULL; + if (webrtc->priv->last_generated_answer) + gst_webrtc_session_description_free (webrtc->priv->last_generated_answer); +- { ++ if (ret) { + GstSDPMessage *copy; + gst_sdp_message_copy (ret, ©); + webrtc->priv->last_generated_answer = +@@ -5302,12 +5385,15 @@ typedef struct + { + guint mlineindex; + gchar *candidate; ++ GstPromise *promise; + } IceCandidateItem; + + static void + _clear_ice_candidate_item (IceCandidateItem * item) + { + g_free (item->candidate); ++ if (item->promise) ++ gst_promise_unref (item->promise); + } + + static void +@@ -5319,12 +5405,23 @@ _add_ice_candidate (GstWebRTCBin * webrtc, IceCandidateItem * item, + stream = _find_ice_stream_for_session (webrtc, item->mlineindex); + if (stream == NULL) { + if (drop_invalid) { +- GST_WARNING_OBJECT (webrtc, "Unknown mline %u, dropping", +- item->mlineindex); ++ if (item->promise) { ++ GError *error = ++ g_error_new (GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "Unknown mline %u, dropping", item->mlineindex); ++ GstStructure *s = gst_structure_new ("application/x-gst-promise", ++ "error", G_TYPE_ERROR, error, NULL); ++ gst_promise_reply (item->promise, s); ++ g_clear_error (&error); ++ } else { ++ GST_WARNING_OBJECT (webrtc, "Unknown mline %u, dropping", ++ item->mlineindex); ++ } + } else { + IceCandidateItem new; + new.mlineindex = item->mlineindex; + new.candidate = g_strdup (item->candidate); ++ new.promise = NULL; + GST_INFO_OBJECT (webrtc, "Unknown mline %u, deferring", item->mlineindex); + + ICE_LOCK (webrtc); +@@ -5337,7 +5434,8 @@ _add_ice_candidate (GstWebRTCBin * webrtc, IceCandidateItem * item, + GST_LOG_OBJECT (webrtc, "adding ICE candidate with mline:%u, %s", + item->mlineindex, item->candidate); + +- gst_webrtc_ice_add_candidate (webrtc->priv->ice, stream, item->candidate); ++ gst_webrtc_ice_add_candidate (webrtc->priv->ice, stream, item->candidate, ++ item->promise); + } + + static void +@@ -5363,7 +5461,7 @@ _add_ice_candidates_from_sdp (GstWebRTCBin * webrtc, gint mlineindex, + candidate = g_strdup_printf ("a=candidate:%s", attr->value); + GST_LOG_OBJECT (webrtc, "adding ICE candidate with mline:%u, %s", + mlineindex, candidate); +- gst_webrtc_ice_add_candidate (webrtc->priv->ice, stream, candidate); ++ gst_webrtc_ice_add_candidate (webrtc->priv->ice, stream, candidate, NULL); + g_free (candidate); + } + } +@@ -5395,6 +5493,24 @@ _add_ice_candidate_to_sdp (GstWebRTCBin * webrtc, + gst_sdp_media_add_attribute (media, "candidate", candidate + 10); + } + ++static void ++_add_end_of_candidate_to_sdp (GstWebRTCBin * webrtc, ++ GstSDPMessage * sdp, gint mline_index) ++{ ++ GstSDPMedia *media = NULL; ++ ++ if (mline_index < sdp->medias->len) { ++ media = &g_array_index (sdp->medias, GstSDPMedia, mline_index); ++ } ++ ++ if (media == NULL) { ++ GST_WARNING_OBJECT (webrtc, "Couldn't find mline %d to merge ICE candidate", ++ mline_index); ++ return; ++ } ++ gst_sdp_media_add_attribute (media, "end-of-candidates", ""); ++} ++ + static gboolean + _filter_sdp_fields (GQuark field_id, const GValue * value, + GstStructure * new_structure) +@@ -5632,7 +5748,15 @@ _update_transceiver_from_sdp_media (GstWebRTCBin * webrtc, + GstWebRTCDTLSSetup local_setup, remote_setup; + + local_setup = _get_dtls_setup_from_media (local_media); ++ if (local_setup == GST_WEBRTC_DTLS_SETUP_NONE) ++ local_setup = ++ _get_dtls_setup_from_session (webrtc->current_local_description->sdp); ++ + remote_setup = _get_dtls_setup_from_media (remote_media); ++ if (remote_setup == GST_WEBRTC_DTLS_SETUP_NONE) ++ remote_setup = ++ _get_dtls_setup_from_session (webrtc-> ++ current_remote_description->sdp); + new_setup = _get_final_setup (local_setup, remote_setup); + if (new_setup == GST_WEBRTC_DTLS_SETUP_NONE) { + g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, +@@ -5648,7 +5772,7 @@ _update_transceiver_from_sdp_media (GstWebRTCBin * webrtc, + "Cannot intersect dtls setup attributes for media %u", media_idx); + return; + } +- ++#if 0 + if (prev_dir != GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE + && new_dir != GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_INACTIVE + && prev_dir != new_dir) { +@@ -5658,7 +5782,7 @@ _update_transceiver_from_sdp_media (GstWebRTCBin * webrtc, + media_idx); + return; + } +- ++#endif + if (!bundled || bundle_idx == media_idx) { + new_rtcp_rsize = _media_has_attribute_key (local_media, "rtcp-rsize") + && _media_has_attribute_key (remote_media, "rtcp-rsize"); +@@ -5974,15 +6098,19 @@ _find_compatible_unassociated_transceiver (GstWebRTCRTPTransceiver * p1, + return TRUE; + } + +-static void +-_connect_rtpfunnel (GstWebRTCBin * webrtc, guint session_id) ++static gboolean ++_connect_rtpfunnel (GstWebRTCBin * webrtc, guint session_id, GError ** error) + { + gchar *pad_name; + GstPad *srcpad; + GstPad *rtp_sink; + TransportStream *stream = _find_transport_for_session (webrtc, session_id); + +- g_assert (stream); ++ if (!stream) { ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, ++ "Invalid bundle id %u, no session found", session_id); ++ return FALSE; ++ } + + if (webrtc->rtpfunnel) + goto done; +@@ -6008,7 +6136,7 @@ _connect_rtpfunnel (GstWebRTCBin * webrtc, guint session_id) + g_free (pad_name); + + done: +- return; ++ return TRUE; + } + + static gboolean +@@ -6050,7 +6178,9 @@ _update_transceivers_from_sdp (GstWebRTCBin * webrtc, SDPSource source, + } + ensure_rtx_hdr_ext (bundle_stream); + +- _connect_rtpfunnel (webrtc, bundle_idx); ++ if (!_connect_rtpfunnel (webrtc, bundle_idx, error)) { ++ goto done; ++ } + } + + for (i = 0; i < gst_sdp_message_medias_len (sdp->sdp); i++) { +@@ -6116,6 +6246,10 @@ _update_transceivers_from_sdp (GstWebRTCBin * webrtc, SDPSource source, + _get_direction_from_media (media), i, kind, NULL); + webrtc_transceiver_set_transport (t, stream); + trans = GST_WEBRTC_RTP_TRANSCEIVER (t); ++ PC_UNLOCK (webrtc); ++ g_signal_emit (webrtc, ++ gst_webrtc_bin_signals[ON_NEW_TRANSCEIVER_SIGNAL], 0, trans); ++ PC_LOCK (webrtc); + } + + _update_transceiver_from_sdp_media (webrtc, sdp->sdp, i, stream, +@@ -6284,6 +6418,7 @@ get_last_generated_description (GstWebRTCBin * webrtc, SDPSource source, + static GstStructure * + _set_description_task (GstWebRTCBin * webrtc, struct set_description *sd) + { ++ GstWebRTCSignalingState old_signaling_state = webrtc->signaling_state; + GstWebRTCSignalingState new_signaling_state = webrtc->signaling_state; + gboolean signalling_state_changed = FALSE; + GError *error = NULL; +@@ -6638,7 +6773,7 @@ _set_description_task (GstWebRTCBin * webrtc, struct set_description *sd) + */ + if (signalling_state_changed) { + const gchar *from = _enum_value_to_string (GST_TYPE_WEBRTC_SIGNALING_STATE, +- webrtc->signaling_state); ++ old_signaling_state); + const gchar *to = _enum_value_to_string (GST_TYPE_WEBRTC_SIGNALING_STATE, + new_signaling_state); + GST_TRACE_OBJECT (webrtc, "notify signaling-state from %s " +@@ -6767,6 +6902,7 @@ _add_ice_candidate_task (GstWebRTCBin * webrtc, IceCandidateItem * item) + IceCandidateItem new; + new.mlineindex = item->mlineindex; + new.candidate = g_steal_pointer (&item->candidate); ++ new.promise = NULL; + + ICE_LOCK (webrtc); + g_array_append_val (webrtc->priv->pending_remote_ice_candidates, new); +@@ -6787,21 +6923,32 @@ _free_ice_candidate_item (IceCandidateItem * item) + + static void + gst_webrtc_bin_add_ice_candidate (GstWebRTCBin * webrtc, guint mline, +- const gchar * attr) ++ const gchar * attr, GstPromise * promise) + { + IceCandidateItem *item; + + item = g_new0 (IceCandidateItem, 1); + item->mlineindex = mline; ++ item->promise = promise ? gst_promise_ref (promise) : NULL; + if (attr && attr[0] != 0) { + if (!g_ascii_strncasecmp (attr, "a=candidate:", 12)) + item->candidate = g_strdup (attr); + else if (!g_ascii_strncasecmp (attr, "candidate:", 10)) + item->candidate = g_strdup_printf ("a=%s", attr); + } +- gst_webrtc_bin_enqueue_task (webrtc, +- (GstWebRTCBinFunc) _add_ice_candidate_task, item, +- (GDestroyNotify) _free_ice_candidate_item, NULL); ++ if (!gst_webrtc_bin_enqueue_task (webrtc, ++ (GstWebRTCBinFunc) _add_ice_candidate_task, item, ++ (GDestroyNotify) _free_ice_candidate_item, promise)) { ++ GError *error = ++ g_error_new (GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_INVALID_STATE, ++ "Could not add ICE candidate. webrtcbin is closed"); ++ GstStructure *s = gst_structure_new ("application/x-gst-promise", "error", ++ G_TYPE_ERROR, error, NULL); ++ ++ gst_promise_reply (promise, s); ++ ++ g_clear_error (&error); ++ } + } + + static GstStructure * +@@ -6832,7 +6979,7 @@ _on_local_ice_candidate_task (GstWebRTCBin * webrtc) + IceCandidateItem *item = &g_array_index (items, IceCandidateItem, i); + const gchar *cand = item->candidate; + +- if (!g_ascii_strncasecmp (cand, "a=candidate:", 12)) { ++ if (cand && !g_ascii_strncasecmp (cand, "a=candidate:", 12)) { + /* stripping away "a=" */ + cand += 2; + } +@@ -6847,12 +6994,24 @@ _on_local_ice_candidate_task (GstWebRTCBin * webrtc) + * FIXME: This ICE candidate should be stored somewhere with + * the associated mid and also merged back into any subsequent + * local descriptions on renegotiation */ +- if (webrtc->current_local_description) +- _add_ice_candidate_to_sdp (webrtc, webrtc->current_local_description->sdp, +- item->mlineindex, cand); +- if (webrtc->pending_local_description) +- _add_ice_candidate_to_sdp (webrtc, webrtc->pending_local_description->sdp, +- item->mlineindex, cand); ++ if (webrtc->current_local_description) { ++ if (cand && cand[0] != '\0') { ++ _add_ice_candidate_to_sdp (webrtc, ++ webrtc->current_local_description->sdp, item->mlineindex, cand); ++ } else { ++ _add_end_of_candidate_to_sdp (webrtc, ++ webrtc->current_local_description->sdp, item->mlineindex); ++ } ++ } ++ if (webrtc->pending_local_description) { ++ if (cand && cand[0] != '\0') { ++ _add_ice_candidate_to_sdp (webrtc, ++ webrtc->pending_local_description->sdp, item->mlineindex, cand); ++ } else { ++ _add_end_of_candidate_to_sdp (webrtc, ++ webrtc->pending_local_description->sdp, item->mlineindex); ++ } ++ } + + PC_UNLOCK (webrtc); + g_signal_emit (webrtc, gst_webrtc_bin_signals[ON_ICE_CANDIDATE_SIGNAL], +@@ -6874,6 +7033,7 @@ _on_local_ice_candidate_cb (GstWebRTCICE * ice, guint session_id, + + item.mlineindex = session_id; + item.candidate = g_strdup (candidate); ++ item.promise = NULL; + + ICE_LOCK (webrtc); + g_array_append_val (webrtc->priv->pending_local_ice_candidates, item); +@@ -6966,6 +7126,9 @@ gst_webrtc_bin_add_transceiver (GstWebRTCBin * webrtc, + + PC_UNLOCK (webrtc); + ++ g_signal_emit (webrtc, gst_webrtc_bin_signals[ON_NEW_TRANSCEIVER_SIGNAL], 0, ++ trans); ++ + return gst_object_ref (trans); + } + +@@ -7110,6 +7273,10 @@ gst_webrtc_bin_create_data_channel (GstWebRTCBin * webrtc, const gchar * label, + g_object_get (webrtc->priv->sctp_transport, "max-channels", &max_channels, + NULL); + ++ if (max_channels <= 0) { ++ max_channels = 65534; ++ } ++ + g_return_val_if_fail (id <= max_channels, NULL); + } + +@@ -7166,8 +7333,9 @@ gst_webrtc_bin_create_data_channel (GstWebRTCBin * webrtc, const gchar * label, + gst_element_sync_state_with_parent (ret->sink_bin); + + ret = gst_object_ref (ret); +- ret->webrtcbin = webrtc; ++ webrtc_data_channel_set_webrtcbin (ret, webrtc); + g_ptr_array_add (webrtc->priv->data_channels, ret); ++ webrtc->priv->data_channels_opened++; + DC_UNLOCK (webrtc); + + gst_webrtc_bin_update_sctp_priority (webrtc); +@@ -7361,7 +7529,8 @@ on_rtpbin_request_aux_sender (GstElement * rtpbin, guint session_id, + GstPad *sinkpad = gst_element_get_static_pad (aux_sender, "sink"); + GstPad *srcpad = gst_element_get_static_pad (aux_sender, "src"); + +- gst_object_ref_sink (aux_sender); ++ if (g_object_is_floating (aux_sender)) ++ aux_sender = gst_object_ref_sink (aux_sender); + + if (!sinkpad || !srcpad) { + GST_ERROR_OBJECT (webrtc, +@@ -7723,6 +7892,7 @@ jitter_buffer_set_retransmission (SsrcMapItem * item, + { + GstWebRTCRTPTransceiver *trans; + gboolean do_nack; ++ GObjectClass *jb_class; + + if (item->media_idx == -1) + return TRUE; +@@ -7733,13 +7903,23 @@ jitter_buffer_set_retransmission (SsrcMapItem * item, + return TRUE; + } + ++ jb_class = G_OBJECT_GET_CLASS (G_OBJECT (data->jitterbuffer)); + do_nack = WEBRTC_TRANSCEIVER (trans)->do_nack; +- /* We don't set do-retransmission on rtpbin as we want per-session control */ +- GST_LOG_OBJECT (data->webrtc, "setting do-nack=%s for transceiver %" +- GST_PTR_FORMAT " with transport %" GST_PTR_FORMAT +- " rtp session %u ssrc %u", do_nack ? "true" : "false", trans, +- data->stream, data->stream->session_id, data->ssrc); +- g_object_set (data->jitterbuffer, "do-retransmission", do_nack, NULL); ++ if (g_object_class_find_property (jb_class, "do-retransmission")) { ++ /* We don't set do-retransmission on rtpbin as we want per-session control */ ++ GST_LOG_OBJECT (data->webrtc, "setting do-nack=%s for transceiver %" ++ GST_PTR_FORMAT " with transport %" GST_PTR_FORMAT ++ " rtp session %u ssrc %u", do_nack ? "true" : "false", trans, ++ data->stream, data->stream->session_id, data->ssrc); ++ g_object_set (data->jitterbuffer, "do-retransmission", do_nack, NULL); ++ } else if (do_nack) { ++ GST_WARNING_OBJECT (data->webrtc, "Not setting do-nack for transceiver %" ++ GST_PTR_FORMAT " with transport %" GST_PTR_FORMAT ++ " rtp session %u ssrc %u" ++ " as its jitterbuffer does not have a do-retransmission property", ++ trans, data->stream, data->stream->session_id, data->ssrc); ++ } ++ + + g_weak_ref_set (&item->rtpjitterbuffer, data->jitterbuffer); + +@@ -7990,7 +8170,7 @@ gst_webrtc_bin_request_new_pad (GstElement * element, GstPadTemplate * templ, + const gchar * name, const GstCaps * caps) + { + GstWebRTCBin *webrtc = GST_WEBRTC_BIN (element); +- GstWebRTCRTPTransceiver *trans = NULL; ++ GstWebRTCRTPTransceiver *trans = NULL, *created_trans = NULL; + GstWebRTCBinPad *pad = NULL; + guint serial; + gboolean lock_mline = FALSE; +@@ -8118,7 +8298,8 @@ gst_webrtc_bin_request_new_pad (GstElement * element, GstPadTemplate * templ, + } + + if (!trans) { +- trans = GST_WEBRTC_RTP_TRANSCEIVER (_create_webrtc_transceiver (webrtc, ++ trans = created_trans = ++ GST_WEBRTC_RTP_TRANSCEIVER (_create_webrtc_transceiver (webrtc, + GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV, -1, + webrtc_kind_from_caps (caps), NULL)); + GST_LOG_OBJECT (webrtc, "Created new transceiver %" GST_PTR_FORMAT, trans); +@@ -8158,6 +8339,10 @@ gst_webrtc_bin_request_new_pad (GstElement * element, GstPadTemplate * templ, + + PC_UNLOCK (webrtc); + ++ if (created_trans) ++ g_signal_emit (webrtc, gst_webrtc_bin_signals[ON_NEW_TRANSCEIVER_SIGNAL], ++ 0, created_trans); ++ + _add_pad (webrtc, pad); + + return GST_PAD (pad); +@@ -8184,6 +8369,11 @@ gst_webrtc_bin_release_pad (GstElement * element, GstPad * pad) + gst_caps_replace (&webrtc_pad->received_caps, NULL); + PC_UNLOCK (webrtc); + ++ if (webrtc_pad->block_id) { ++ gst_pad_remove_probe (GST_PAD (pad), webrtc_pad->block_id); ++ webrtc_pad->block_id = 0; ++ } ++ + _remove_pad (webrtc, webrtc_pad); + + PC_LOCK (webrtc); +@@ -8624,7 +8814,7 @@ gst_webrtc_bin_class_init (GstWebRTCBinClass * klass) + * GstWebRTCBin:http-proxy: + * + * A HTTP proxy for use with TURN/TCP of the form +- * http://[username:password@]hostname[:port] ++ * http://[username:password@]hostname[:port][?alpn=] + * + * Since: 1.22 + */ +@@ -8632,7 +8822,7 @@ gst_webrtc_bin_class_init (GstWebRTCBinClass * klass) + PROP_HTTP_PROXY, + g_param_spec_string ("http-proxy", "HTTP Proxy", + "A HTTP proxy for use with TURN/TCP of the form " +- "http://[username:password@]hostname[:port]", ++ "http://[username:password@]hostname[:port][?alpn=]", + NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + /** +@@ -8710,6 +8900,26 @@ gst_webrtc_bin_class_init (GstWebRTCBinClass * klass) + G_CALLBACK (gst_webrtc_bin_add_ice_candidate), NULL, NULL, NULL, + G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_STRING); + ++ /** ++ * GstWebRTCBin::add-ice-candidate-full: ++ * @object: the #webrtcbin ++ * @mline_index: the index of the media description in the SDP ++ * @ice-candidate: an ice candidate or NULL/"" to mark that no more candidates ++ * will arrive ++ * @promise: (nullable): a #GstPromise to be notified when the task is ++ * complete ++ * ++ * Variant of the `add-ice-candidate` signal, allowing the call site to be ++ * notified using a #GstPromise when the task has completed. ++ * ++ * Since: 1.24 ++ */ ++ gst_webrtc_bin_signals[ADD_ICE_CANDIDATE_FULL_SIGNAL] = ++ g_signal_new_class_handler ("add-ice-candidate-full", ++ G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, ++ G_CALLBACK (gst_webrtc_bin_add_ice_candidate), NULL, NULL, NULL, ++ G_TYPE_NONE, 3, G_TYPE_UINT, G_TYPE_STRING, GST_TYPE_PROMISE); ++ + /** + * GstWebRTCBin::get-stats: + * @object: the #webrtcbin +@@ -8788,6 +8998,11 @@ gst_webrtc_bin_class_init (GstWebRTCBinClass * klass) + * "local-id" G_TYPE_STRING identifier for the associated RTCInboundRTPSTreamStats + * "remote-timestamp" G_TYPE_DOUBLE remote timestamp the statistics were sent by the remote + * ++ * RTCPeerConnectionStats supported fields (https://w3c.github.io/webrtc-stats/#pcstats-dict*) (Since: 1.24) ++ * ++ * "data-channels-opened" G_TYPE_UINT number of unique data channels that have entered the 'open' state ++ * "data-channels-closed" G_TYPE_UINT number of unique data channels that have left the 'open' state ++ * + * RTCIceCandidateStats supported fields (https://www.w3.org/TR/webrtc-stats/#icecandidate-dict*) (Since: 1.22) + * + * "transport-id" G_TYPE_STRING identifier for the associated RTCTransportStats for this stream +@@ -8886,7 +9101,7 @@ gst_webrtc_bin_class_init (GstWebRTCBinClass * klass) + * GstWebRTCBin::add-transceiver: + * @object: the #webrtcbin + * @direction: the direction of the new transceiver +- * @caps: (allow none): the codec preferences for this transceiver ++ * @caps: (nullable): the codec preferences for this transceiver + * + * Returns: the new #GstWebRTCRTPTransceiver + */ +@@ -8936,7 +9151,7 @@ gst_webrtc_bin_class_init (GstWebRTCBinClass * klass) + G_CALLBACK (gst_webrtc_bin_add_turn_server), NULL, NULL, NULL, + G_TYPE_BOOLEAN, 1, G_TYPE_STRING); + +- /* ++ /** + * GstWebRTCBin::create-data-channel: + * @object: the #GstWebRTCBin + * @label: the label for the data channel +diff --git a/ext/webrtc/gstwebrtcbin.h b/ext/webrtc/gstwebrtcbin.h +index 9445d9e5a..59ed787f2 100644 +--- a/ext/webrtc/gstwebrtcbin.h ++++ b/ext/webrtc/gstwebrtcbin.h +@@ -108,11 +108,15 @@ struct _GstWebRTCBinPrivate + gboolean bundle; + GPtrArray *transceivers; + GPtrArray *transports; ++ /* stats according to https://www.w3.org/TR/webrtc-stats/#dictionary-rtcpeerconnectionstats-members */ ++ guint data_channels_opened; ++ guint data_channels_closed; + GPtrArray *data_channels; + /* list of data channels we've received a sctp stream for but no data + * channel protocol for */ + GPtrArray *pending_data_channels; +- /* dc_lock protects data_channels and pending_data_channels */ ++ /* dc_lock protects data_channels and pending_data_channels ++ * and data_channels_opened and data_channels_closed */ + /* lock ordering is pc_lock first, then dc_lock */ + GMutex dc_lock; + +@@ -172,6 +176,10 @@ gboolean gst_webrtc_bin_enqueue_task (GstWebRTCBin * pc, + GDestroyNotify notify, + GstPromise *promise); + ++void gst_webrtc_bin_get_peer_connection_stats(GstWebRTCBin * pc, ++ guint * data_channels_opened, ++ guint * data_channels_closed); ++ + G_END_DECLS + + #endif /* __GST_WEBRTC_BIN_H__ */ +diff --git a/ext/webrtc/gstwebrtcice.c b/ext/webrtc/gstwebrtcice.c +deleted file mode 100644 +index 7dd716ff7..000000000 +--- a/ext/webrtc/gstwebrtcice.c ++++ /dev/null +@@ -1,1161 +0,0 @@ +-/* GStreamer +- * Copyright (C) 2017 Matthew Waters +- * +- * This library is free software; you can redistribute it and/or +- * modify it under the terms of the GNU Library General Public +- * License as published by the Free Software Foundation; either +- * version 2 of the License, or (at your option) any later version. +- * +- * This library is distributed in the hope that it will be useful, +- * but WITHOUT ANY WARRANTY; without even the implied warranty of +- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +- * Library General Public License for more details. +- * +- * You should have received a copy of the GNU Library General Public +- * License along with this library; if not, write to the +- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, +- * Boston, MA 02110-1301, USA. +- */ +- +-#ifdef HAVE_CONFIG_H +-# include "config.h" +-#endif +- +-#include "gstwebrtcice.h" +-/* libnice */ +-#include +-#include "icestream.h" +-#include "nicetransport.h" +- +-/* XXX: +- * +- * - are locally generated remote candidates meant to be readded to libnice? +- */ +- +-static GstUri *_validate_turn_server (GstWebRTCICE * ice, const gchar * s); +- +-#define GST_CAT_DEFAULT gst_webrtc_ice_debug +-GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); +- +-GQuark +-gst_webrtc_ice_error_quark (void) +-{ +- return g_quark_from_static_string ("gst-webrtc-ice-error-quark"); +-} +- +-enum +-{ +- SIGNAL_0, +- ADD_LOCAL_IP_ADDRESS_SIGNAL, +- LAST_SIGNAL, +-}; +- +-enum +-{ +- PROP_0, +- PROP_AGENT, +- PROP_ICE_TCP, +- PROP_ICE_UDP, +-}; +- +-static guint gst_webrtc_ice_signals[LAST_SIGNAL] = { 0 }; +- +-struct _GstWebRTCICEPrivate +-{ +- NiceAgent *nice_agent; +- +- GArray *nice_stream_map; +- +- GThread *thread; +- GMainContext *main_context; +- GMainLoop *loop; +- GMutex lock; +- GCond cond; +- +- GstWebRTCIceOnCandidateFunc on_candidate; +- gpointer on_candidate_data; +- GDestroyNotify on_candidate_notify; +-}; +- +-#define gst_webrtc_ice_parent_class parent_class +-G_DEFINE_TYPE_WITH_CODE (GstWebRTCICE, gst_webrtc_ice, +- GST_TYPE_OBJECT, G_ADD_PRIVATE (GstWebRTCICE) +- GST_DEBUG_CATEGORY_INIT (gst_webrtc_ice_debug, "webrtcice", 0, +- "webrtcice");); +- +-static gboolean +-_unlock_pc_thread (GMutex * lock) +-{ +- g_mutex_unlock (lock); +- return G_SOURCE_REMOVE; +-} +- +-static gpointer +-_gst_nice_thread (GstWebRTCICE * ice) +-{ +- g_mutex_lock (&ice->priv->lock); +- ice->priv->main_context = g_main_context_new (); +- ice->priv->loop = g_main_loop_new (ice->priv->main_context, FALSE); +- +- g_cond_broadcast (&ice->priv->cond); +- g_main_context_invoke (ice->priv->main_context, +- (GSourceFunc) _unlock_pc_thread, &ice->priv->lock); +- +- g_main_loop_run (ice->priv->loop); +- +- g_mutex_lock (&ice->priv->lock); +- g_main_context_unref (ice->priv->main_context); +- ice->priv->main_context = NULL; +- g_main_loop_unref (ice->priv->loop); +- ice->priv->loop = NULL; +- g_cond_broadcast (&ice->priv->cond); +- g_mutex_unlock (&ice->priv->lock); +- +- return NULL; +-} +- +-static void +-_start_thread (GstWebRTCICE * ice) +-{ +- g_mutex_lock (&ice->priv->lock); +- ice->priv->thread = g_thread_new (GST_OBJECT_NAME (ice), +- (GThreadFunc) _gst_nice_thread, ice); +- +- while (!ice->priv->loop) +- g_cond_wait (&ice->priv->cond, &ice->priv->lock); +- g_mutex_unlock (&ice->priv->lock); +-} +- +-static void +-_stop_thread (GstWebRTCICE * ice) +-{ +- g_mutex_lock (&ice->priv->lock); +- g_main_loop_quit (ice->priv->loop); +- while (ice->priv->loop) +- g_cond_wait (&ice->priv->cond, &ice->priv->lock); +- g_mutex_unlock (&ice->priv->lock); +- +- g_thread_unref (ice->priv->thread); +-} +- +-#if 0 +-static NiceComponentType +-_webrtc_component_to_nice (GstWebRTCICEComponent comp) +-{ +- switch (comp) { +- case GST_WEBRTC_ICE_COMPONENT_RTP: +- return NICE_COMPONENT_TYPE_RTP; +- case GST_WEBRTC_ICE_COMPONENT_RTCP: +- return NICE_COMPONENT_TYPE_RTCP; +- default: +- g_assert_not_reached (); +- return 0; +- } +-} +- +-static GstWebRTCICEComponent +-_nice_component_to_webrtc (NiceComponentType comp) +-{ +- switch (comp) { +- case NICE_COMPONENT_TYPE_RTP: +- return GST_WEBRTC_ICE_COMPONENT_RTP; +- case NICE_COMPONENT_TYPE_RTCP: +- return GST_WEBRTC_ICE_COMPONENT_RTCP; +- default: +- g_assert_not_reached (); +- return 0; +- } +-} +-#endif +-struct NiceStreamItem +-{ +- guint session_id; +- guint nice_stream_id; +- GstWebRTCICEStream *stream; +-}; +- +-/* TRUE to continue, FALSE to stop */ +-typedef gboolean (*NiceStreamItemForeachFunc) (struct NiceStreamItem * item, +- gpointer user_data); +- +-static void +-_nice_stream_item_foreach (GstWebRTCICE * ice, NiceStreamItemForeachFunc func, +- gpointer data) +-{ +- int i, len; +- +- len = ice->priv->nice_stream_map->len; +- for (i = 0; i < len; i++) { +- struct NiceStreamItem *item = +- &g_array_index (ice->priv->nice_stream_map, struct NiceStreamItem, +- i); +- +- if (!func (item, data)) +- break; +- } +-} +- +-/* TRUE for match, FALSE otherwise */ +-typedef gboolean (*NiceStreamItemFindFunc) (struct NiceStreamItem * item, +- gpointer user_data); +- +-struct nice_find +-{ +- NiceStreamItemFindFunc func; +- gpointer data; +- struct NiceStreamItem *ret; +-}; +- +-static gboolean +-_find_nice_item (struct NiceStreamItem *item, gpointer user_data) +-{ +- struct nice_find *f = user_data; +- if (f->func (item, f->data)) { +- f->ret = item; +- return FALSE; +- } +- return TRUE; +-} +- +-static struct NiceStreamItem * +-_nice_stream_item_find (GstWebRTCICE * ice, NiceStreamItemFindFunc func, +- gpointer data) +-{ +- struct nice_find f; +- +- f.func = func; +- f.data = data; +- f.ret = NULL; +- +- _nice_stream_item_foreach (ice, _find_nice_item, &f); +- +- return f.ret; +-} +- +-#define NICE_MATCH_INIT { -1, -1, NULL } +- +-static gboolean +-_match (struct NiceStreamItem *item, struct NiceStreamItem *m) +-{ +- if (m->session_id != -1 && m->session_id != item->session_id) +- return FALSE; +- if (m->nice_stream_id != -1 && m->nice_stream_id != item->nice_stream_id) +- return FALSE; +- if (m->stream != NULL && m->stream != item->stream) +- return FALSE; +- +- return TRUE; +-} +- +-static struct NiceStreamItem * +-_find_item (GstWebRTCICE * ice, guint session_id, guint nice_stream_id, +- GstWebRTCICEStream * stream) +-{ +- struct NiceStreamItem m = NICE_MATCH_INIT; +- +- m.session_id = session_id; +- m.nice_stream_id = nice_stream_id; +- m.stream = stream; +- +- return _nice_stream_item_find (ice, (NiceStreamItemFindFunc) _match, &m); +-} +- +-static struct NiceStreamItem * +-_create_nice_stream_item (GstWebRTCICE * ice, guint session_id) +-{ +- struct NiceStreamItem item; +- +- item.session_id = session_id; +- item.nice_stream_id = nice_agent_add_stream (ice->priv->nice_agent, 2); +- item.stream = gst_webrtc_ice_stream_new (ice, item.nice_stream_id); +- g_array_append_val (ice->priv->nice_stream_map, item); +- +- return _find_item (ice, item.session_id, item.nice_stream_id, item.stream); +-} +- +-static void +-_parse_userinfo (const gchar * userinfo, gchar ** user, gchar ** pass) +-{ +- const gchar *colon; +- +- if (!userinfo) { +- *user = NULL; +- *pass = NULL; +- return; +- } +- +- colon = g_strstr_len (userinfo, -1, ":"); +- if (!colon) { +- *user = g_uri_unescape_string (userinfo, NULL); +- *pass = NULL; +- return; +- } +- +- /* Check that the first occurence is also the last occurence */ +- if (colon != g_strrstr (userinfo, ":")) +- GST_WARNING ("userinfo %s contains more than one ':', will assume that the " +- "first ':' delineates user:pass. You should escape the user and pass " +- "before adding to the URI.", userinfo); +- +- *user = g_uri_unescape_segment (userinfo, colon, NULL); +- *pass = g_uri_unescape_string (&colon[1], NULL); +-} +- +-static gchar * +-_resolve_host (GstWebRTCICE * ice, const gchar * host) +-{ +- GResolver *resolver = g_resolver_get_default (); +- GError *error = NULL; +- GInetAddress *addr; +- GList *addresses; +- gchar *address; +- +- GST_DEBUG_OBJECT (ice, "Resolving host %s", host); +- +- if (!(addresses = g_resolver_lookup_by_name (resolver, host, NULL, &error))) { +- GST_ERROR ("%s", error->message); +- g_clear_error (&error); +- return NULL; +- } +- +- GST_DEBUG_OBJECT (ice, "Resolved %d addresses for host %s", +- g_list_length (addresses), host); +- +- /* XXX: only the first address is used */ +- addr = addresses->data; +- address = g_inet_address_to_string (addr); +- g_resolver_free_addresses (addresses); +- +- return address; +-} +- +-static void +-_add_turn_server (GstWebRTCICE * ice, struct NiceStreamItem *item, +- GstUri * turn_server) +-{ +- gboolean ret; +- gchar *user, *pass; +- const gchar *host, *userinfo, *transport, *scheme; +- NiceRelayType relays[4] = { 0, }; +- int i, relay_n = 0; +- gchar *ip = NULL; +- +- host = gst_uri_get_host (turn_server); +- if (!host) { +- GST_ERROR_OBJECT (ice, "Turn server has no host"); +- goto out; +- } +- ip = _resolve_host (ice, host); +- if (!ip) { +- GST_ERROR_OBJECT (ice, "Failed to resolve turn server '%s'", host); +- goto out; +- } +- +- /* Set the resolved IP as the host since that's what libnice wants */ +- gst_uri_set_host (turn_server, ip); +- +- scheme = gst_uri_get_scheme (turn_server); +- transport = gst_uri_get_query_value (turn_server, "transport"); +- userinfo = gst_uri_get_userinfo (turn_server); +- _parse_userinfo (userinfo, &user, &pass); +- +- if (g_strcmp0 (scheme, "turns") == 0) { +- relays[relay_n++] = NICE_RELAY_TYPE_TURN_TLS; +- } else if (g_strcmp0 (scheme, "turn") == 0) { +- if (!transport || g_strcmp0 (transport, "udp") == 0) +- relays[relay_n++] = NICE_RELAY_TYPE_TURN_UDP; +- if (!transport || g_strcmp0 (transport, "tcp") == 0) +- relays[relay_n++] = NICE_RELAY_TYPE_TURN_TCP; +- } +- g_assert (relay_n < G_N_ELEMENTS (relays)); +- +- for (i = 0; i < relay_n; i++) { +- ret = nice_agent_set_relay_info (ice->priv->nice_agent, +- item->nice_stream_id, NICE_COMPONENT_TYPE_RTP, +- gst_uri_get_host (turn_server), gst_uri_get_port (turn_server), +- user, pass, relays[i]); +- if (!ret) { +- gchar *uri = gst_uri_to_string (turn_server); +- GST_ERROR_OBJECT (ice, "Failed to set TURN server '%s'", uri); +- g_free (uri); +- break; +- } +- ret = nice_agent_set_relay_info (ice->priv->nice_agent, +- item->nice_stream_id, NICE_COMPONENT_TYPE_RTCP, +- gst_uri_get_host (turn_server), gst_uri_get_port (turn_server), +- user, pass, relays[i]); +- if (!ret) { +- gchar *uri = gst_uri_to_string (turn_server); +- GST_ERROR_OBJECT (ice, "Failed to set TURN server '%s'", uri); +- g_free (uri); +- break; +- } +- } +- g_free (user); +- g_free (pass); +- +-out: +- g_free (ip); +-} +- +-typedef struct +-{ +- GstWebRTCICE *ice; +- struct NiceStreamItem *item; +-} AddTurnServerData; +- +-static void +-_add_turn_server_func (const gchar * uri, GstUri * turn_server, +- AddTurnServerData * data) +-{ +- _add_turn_server (data->ice, data->item, turn_server); +-} +- +-static void +-_add_stun_server (GstWebRTCICE * ice, GstUri * stun_server) +-{ +- const gchar *msg = "must be of the form stun://:"; +- const gchar *host; +- gchar *s = NULL; +- gchar *ip = NULL; +- guint port; +- +- s = gst_uri_to_string (stun_server); +- GST_DEBUG_OBJECT (ice, "adding stun server, %s", s); +- +- host = gst_uri_get_host (stun_server); +- if (!host) { +- GST_ERROR_OBJECT (ice, "Stun server '%s' has no host, %s", s, msg); +- goto out; +- } +- +- port = gst_uri_get_port (stun_server); +- if (port == GST_URI_NO_PORT) { +- GST_INFO_OBJECT (ice, "Stun server '%s' has no port, assuming 3478", s); +- port = 3478; +- gst_uri_set_port (stun_server, port); +- } +- +- ip = _resolve_host (ice, host); +- if (!ip) { +- GST_ERROR_OBJECT (ice, "Failed to resolve stun server '%s'", host); +- goto out; +- } +- +- g_object_set (ice->priv->nice_agent, "stun-server", ip, +- "stun-server-port", port, NULL); +- +-out: +- g_free (s); +- g_free (ip); +-} +- +-GstWebRTCICEStream * +-gst_webrtc_ice_add_stream (GstWebRTCICE * ice, guint session_id) +-{ +- struct NiceStreamItem m = NICE_MATCH_INIT; +- struct NiceStreamItem *item; +- AddTurnServerData add_data; +- +- m.session_id = session_id; +- item = _nice_stream_item_find (ice, (NiceStreamItemFindFunc) _match, &m); +- if (item) { +- GST_ERROR_OBJECT (ice, "stream already added with session_id=%u", +- session_id); +- return 0; +- } +- +- if (ice->stun_server) { +- _add_stun_server (ice, ice->stun_server); +- } +- +- item = _create_nice_stream_item (ice, session_id); +- +- if (ice->turn_server) { +- _add_turn_server (ice, item, ice->turn_server); +- } +- +- add_data.ice = ice; +- add_data.item = item; +- +- g_hash_table_foreach (ice->turn_servers, (GHFunc) _add_turn_server_func, +- &add_data); +- +- return item->stream; +-} +- +-static void +-_on_new_candidate (NiceAgent * agent, NiceCandidate * candidate, +- GstWebRTCICE * ice) +-{ +- struct NiceStreamItem *item; +- gchar *attr; +- +- item = _find_item (ice, -1, candidate->stream_id, NULL); +- if (!item) { +- GST_WARNING_OBJECT (ice, "received signal for non-existent stream %u", +- candidate->stream_id); +- return; +- } +- +- if (!candidate->username || !candidate->password) { +- gboolean got_credentials; +- gchar *ufrag, *password; +- +- got_credentials = nice_agent_get_local_credentials (ice->priv->nice_agent, +- candidate->stream_id, &ufrag, &password); +- g_warn_if_fail (got_credentials); +- +- if (!candidate->username) +- candidate->username = ufrag; +- else +- g_free (ufrag); +- +- if (!candidate->password) +- candidate->password = password; +- else +- g_free (password); +- } +- +- attr = nice_agent_generate_local_candidate_sdp (agent, candidate); +- +- if (ice->priv->on_candidate) +- ice->priv->on_candidate (ice, item->session_id, attr, +- ice->priv->on_candidate_data); +- +- g_free (attr); +-} +- +-GstWebRTCICETransport * +-gst_webrtc_ice_find_transport (GstWebRTCICE * ice, GstWebRTCICEStream * stream, +- GstWebRTCICEComponent component) +-{ +- struct NiceStreamItem *item; +- +- item = _find_item (ice, -1, -1, stream); +- g_return_val_if_fail (item != NULL, NULL); +- +- return gst_webrtc_ice_stream_find_transport (item->stream, component); +-} +- +-#if 0 +-/* TODO don't rely on libnice to (de)serialize candidates */ +-static NiceCandidateType +-_candidate_type_from_string (const gchar * s) +-{ +- if (g_strcmp0 (s, "host") == 0) { +- return NICE_CANDIDATE_TYPE_HOST; +- } else if (g_strcmp0 (s, "srflx") == 0) { +- return NICE_CANDIDATE_TYPE_SERVER_REFLEXIVE; +- } else if (g_strcmp0 (s, "prflx") == 0) { /* FIXME: is the right string? */ +- return NICE_CANDIDATE_TYPE_PEER_REFLEXIVE; +- } else if (g_strcmp0 (s, "relay") == 0) { +- return NICE_CANDIDATE_TYPE_RELAY; +- } else { +- g_assert_not_reached (); +- return 0; +- } +-} +- +-static const gchar * +-_candidate_type_to_string (NiceCandidateType type) +-{ +- switch (type) { +- case NICE_CANDIDATE_TYPE_HOST: +- return "host"; +- case NICE_CANDIDATE_TYPE_SERVER_REFLEXIVE: +- return "srflx"; +- case NICE_CANDIDATE_TYPE_PEER_REFLEXIVE: +- return "prflx"; +- case NICE_CANDIDATE_TYPE_RELAY: +- return "relay"; +- default: +- g_assert_not_reached (); +- return NULL; +- } +-} +- +-static NiceCandidateTransport +-_candidate_transport_from_string (const gchar * s) +-{ +- if (g_strcmp0 (s, "UDP") == 0) { +- return NICE_CANDIDATE_TRANSPORT_UDP; +- } else if (g_strcmp0 (s, "TCP tcptype") == 0) { +- return NICE_CANDIDATE_TRANSPORT_TCP_ACTIVE; +- } else if (g_strcmp0 (s, "tcp-passive") == 0) { /* FIXME: is the right string? */ +- return NICE_CANDIDATE_TRANSPORT_TCP_PASSIVE; +- } else if (g_strcmp0 (s, "tcp-so") == 0) { +- return NICE_CANDIDATE_TRANSPORT_TCP_SO; +- } else { +- g_assert_not_reached (); +- return 0; +- } +-} +- +-static const gchar * +-_candidate_type_to_string (NiceCandidateType type) +-{ +- switch (type) { +- case NICE_CANDIDATE_TYPE_HOST: +- return "host"; +- case NICE_CANDIDATE_TYPE_SERVER_REFLEXIVE: +- return "srflx"; +- case NICE_CANDIDATE_TYPE_PEER_REFLEXIVE: +- return "prflx"; +- case NICE_CANDIDATE_TYPE_RELAY: +- return "relay"; +- default: +- g_assert_not_reached (); +- return NULL; +- } +-} +-#endif +- +-/* parse the address for possible resolution */ +-static gboolean +-get_candidate_address (const gchar * candidate, gchar ** prefix, +- gchar ** address, gchar ** postfix) +-{ +- char **tokens = NULL; +- +- if (!g_str_has_prefix (candidate, "a=candidate:")) { +- GST_ERROR ("candidate \"%s\" does not start with \"a=candidate:\"", +- candidate); +- goto failure; +- } +- +- if (!(tokens = g_strsplit (candidate, " ", 6))) { +- GST_ERROR ("candidate \"%s\" could not be tokenized", candidate); +- goto failure; +- } +- +- if (g_strv_length (tokens) < 6) { +- GST_ERROR ("candidate \"%s\" tokenization resulted in not enough tokens", +- candidate); +- goto failure; +- } +- +- if (address) +- *address = g_strdup (tokens[4]); +- tokens[4] = NULL; +- if (prefix) +- *prefix = g_strjoinv (" ", tokens); +- if (postfix) +- *postfix = g_strdup (tokens[5]); +- +- g_strfreev (tokens); +- return TRUE; +- +-failure: +- if (tokens) +- g_strfreev (tokens); +- return FALSE; +-} +- +-/* must start with "a=candidate:" */ +-void +-gst_webrtc_ice_add_candidate (GstWebRTCICE * ice, GstWebRTCICEStream * stream, +- const gchar * candidate) +-{ +- struct NiceStreamItem *item; +- NiceCandidate *cand; +- GSList *candidates = NULL; +- +- item = _find_item (ice, -1, -1, stream); +- g_return_if_fail (item != NULL); +- +- cand = +- nice_agent_parse_remote_candidate_sdp (ice->priv->nice_agent, +- item->nice_stream_id, candidate); +- if (!cand) { +- /* might be a .local candidate */ +- char *prefix = NULL, *address = NULL, *postfix = NULL; +- char *new_addr, *new_candidate; +- char *new_candv[4] = { NULL, }; +- +- if (!get_candidate_address (candidate, &prefix, &address, &postfix)) { +- GST_WARNING_OBJECT (ice, "Failed to retrieve address from candidate %s", +- candidate); +- goto fail; +- } +- +- if (!g_str_has_suffix (address, ".local")) { +- GST_WARNING_OBJECT (ice, "candidate address \'%s\' does not end " +- "with \'.local\'", address); +- goto fail; +- } +- +- /* FIXME: async */ +- if (!(new_addr = _resolve_host (ice, address))) { +- GST_WARNING_OBJECT (ice, "Failed to resolve %s", address); +- goto fail; +- } +- +- new_candv[0] = prefix; +- new_candv[1] = new_addr; +- new_candv[2] = postfix; +- new_candv[3] = NULL; +- new_candidate = g_strjoinv (" ", new_candv); +- +- GST_DEBUG_OBJECT (ice, "resolved to candidate %s", new_candidate); +- +- cand = +- nice_agent_parse_remote_candidate_sdp (ice->priv->nice_agent, +- item->nice_stream_id, new_candidate); +- g_free (new_candidate); +- if (!cand) { +- GST_WARNING_OBJECT (ice, "Could not parse candidate \'%s\'", +- new_candidate); +- goto fail; +- } +- +- g_free (prefix); +- g_free (new_addr); +- g_free (postfix); +- +- if (0) { +- fail: +- g_free (prefix); +- g_free (address); +- g_free (postfix); +- return; +- } +- } +- +- candidates = g_slist_append (candidates, cand); +- +- nice_agent_set_remote_candidates (ice->priv->nice_agent, item->nice_stream_id, +- cand->component_id, candidates); +- +- g_slist_free (candidates); +- nice_candidate_free (cand); +-} +- +-gboolean +-gst_webrtc_ice_set_remote_credentials (GstWebRTCICE * ice, +- GstWebRTCICEStream * stream, gchar * ufrag, gchar * pwd) +-{ +- struct NiceStreamItem *item; +- +- g_return_val_if_fail (ufrag != NULL, FALSE); +- g_return_val_if_fail (pwd != NULL, FALSE); +- item = _find_item (ice, -1, -1, stream); +- g_return_val_if_fail (item != NULL, FALSE); +- +- GST_DEBUG_OBJECT (ice, "Setting remote ICE credentials on " +- "ICE stream %u ufrag:%s pwd:%s", item->nice_stream_id, ufrag, pwd); +- +- nice_agent_set_remote_credentials (ice->priv->nice_agent, +- item->nice_stream_id, ufrag, pwd); +- +- return TRUE; +-} +- +-gboolean +-gst_webrtc_ice_add_turn_server (GstWebRTCICE * ice, const gchar * uri) +-{ +- gboolean ret = FALSE; +- GstUri *valid_uri; +- +- if (!(valid_uri = _validate_turn_server (ice, uri))) +- goto done; +- +- g_hash_table_insert (ice->turn_servers, g_strdup (uri), valid_uri); +- +- ret = TRUE; +- +-done: +- return ret; +-} +- +-static gboolean +-gst_webrtc_ice_add_local_ip_address (GstWebRTCICE * ice, const gchar * address) +-{ +- gboolean ret = FALSE; +- NiceAddress nice_addr; +- +- nice_address_init (&nice_addr); +- +- ret = nice_address_set_from_string (&nice_addr, address); +- +- if (ret) { +- ret = nice_agent_add_local_address (ice->priv->nice_agent, &nice_addr); +- if (!ret) { +- GST_ERROR_OBJECT (ice, "Failed to add local address to NiceAgent"); +- } +- } else { +- GST_ERROR_OBJECT (ice, "Failed to initialize NiceAddress [%s]", address); +- } +- +- return ret; +-} +- +-gboolean +-gst_webrtc_ice_set_local_credentials (GstWebRTCICE * ice, +- GstWebRTCICEStream * stream, gchar * ufrag, gchar * pwd) +-{ +- struct NiceStreamItem *item; +- +- g_return_val_if_fail (ufrag != NULL, FALSE); +- g_return_val_if_fail (pwd != NULL, FALSE); +- item = _find_item (ice, -1, -1, stream); +- g_return_val_if_fail (item != NULL, FALSE); +- +- GST_DEBUG_OBJECT (ice, "Setting local ICE credentials on " +- "ICE stream %u ufrag:%s pwd:%s", item->nice_stream_id, ufrag, pwd); +- +- nice_agent_set_local_credentials (ice->priv->nice_agent, item->nice_stream_id, +- ufrag, pwd); +- +- return TRUE; +-} +- +-gboolean +-gst_webrtc_ice_gather_candidates (GstWebRTCICE * ice, +- GstWebRTCICEStream * stream) +-{ +- struct NiceStreamItem *item; +- +- item = _find_item (ice, -1, -1, stream); +- g_return_val_if_fail (item != NULL, FALSE); +- +- GST_DEBUG_OBJECT (ice, "gather candidates for stream %u", +- item->nice_stream_id); +- +- return gst_webrtc_ice_stream_gather_candidates (stream); +-} +- +-void +-gst_webrtc_ice_set_is_controller (GstWebRTCICE * ice, gboolean controller) +-{ +- g_object_set (G_OBJECT (ice->priv->nice_agent), "controlling-mode", +- controller, NULL); +-} +- +-gboolean +-gst_webrtc_ice_get_is_controller (GstWebRTCICE * ice) +-{ +- gboolean ret; +- g_object_get (G_OBJECT (ice->priv->nice_agent), "controlling-mode", +- &ret, NULL); +- return ret; +-} +- +-void +-gst_webrtc_ice_set_force_relay (GstWebRTCICE * ice, gboolean force_relay) +-{ +- g_object_set (G_OBJECT (ice->priv->nice_agent), "force-relay", force_relay, +- NULL); +-} +- +-void +-gst_webrtc_ice_set_on_ice_candidate (GstWebRTCICE * ice, +- GstWebRTCIceOnCandidateFunc func, gpointer user_data, GDestroyNotify notify) +-{ +- if (ice->priv->on_candidate_notify) +- ice->priv->on_candidate_notify (ice->priv->on_candidate_data); +- ice->priv->on_candidate = NULL; +- +- ice->priv->on_candidate = func; +- ice->priv->on_candidate_data = user_data; +- ice->priv->on_candidate_notify = notify; +-} +- +-static void +-_clear_ice_stream (struct NiceStreamItem *item) +-{ +- if (!item) +- return; +- +- if (item->stream) { +- g_signal_handlers_disconnect_by_data (item->stream->ice->priv->nice_agent, +- item->stream); +- gst_object_unref (item->stream); +- } +-} +- +-static GstUri * +-_validate_turn_server (GstWebRTCICE * ice, const gchar * s) +-{ +- GstUri *uri = gst_uri_from_string_escaped (s); +- const gchar *userinfo, *scheme; +- GList *keys = NULL, *l; +- gchar *user = NULL, *pass = NULL; +- gboolean turn_tls = FALSE; +- guint port; +- +- GST_DEBUG_OBJECT (ice, "validating turn server, %s", s); +- +- if (!uri) { +- GST_ERROR_OBJECT (ice, "Could not parse turn server '%s'", s); +- return NULL; +- } +- +- scheme = gst_uri_get_scheme (uri); +- if (g_strcmp0 (scheme, "turn") == 0) { +- } else if (g_strcmp0 (scheme, "turns") == 0) { +- turn_tls = TRUE; +- } else { +- GST_ERROR_OBJECT (ice, "unknown scheme '%s'", scheme); +- goto out; +- } +- +- keys = gst_uri_get_query_keys (uri); +- for (l = keys; l; l = l->next) { +- gchar *key = l->data; +- +- if (g_strcmp0 (key, "transport") == 0) { +- const gchar *transport = gst_uri_get_query_value (uri, "transport"); +- if (!transport) { +- } else if (g_strcmp0 (transport, "udp") == 0) { +- } else if (g_strcmp0 (transport, "tcp") == 0) { +- } else { +- GST_ERROR_OBJECT (ice, "unknown transport value, '%s'", transport); +- goto out; +- } +- } else { +- GST_ERROR_OBJECT (ice, "unknown query key, '%s'", key); +- goto out; +- } +- } +- +- /* TODO: Implement error checking similar to the stun server below */ +- userinfo = gst_uri_get_userinfo (uri); +- _parse_userinfo (userinfo, &user, &pass); +- if (!user) { +- GST_ERROR_OBJECT (ice, "No username specified in '%s'", s); +- goto out; +- } +- if (!pass) { +- GST_ERROR_OBJECT (ice, "No password specified in '%s'", s); +- goto out; +- } +- +- port = gst_uri_get_port (uri); +- +- if (port == GST_URI_NO_PORT) { +- if (turn_tls) { +- gst_uri_set_port (uri, 5349); +- } else { +- gst_uri_set_port (uri, 3478); +- } +- } +- +-out: +- g_list_free (keys); +- g_free (user); +- g_free (pass); +- +- return uri; +-} +- +-void +-gst_webrtc_ice_set_stun_server (GstWebRTCICE * ice, const gchar * uri_s) +-{ +- GstUri *uri = gst_uri_from_string_escaped (uri_s); +- const gchar *msg = "must be of the form stun://:"; +- +- GST_DEBUG_OBJECT (ice, "setting stun server, %s", uri_s); +- +- if (!uri) { +- GST_ERROR_OBJECT (ice, "Couldn't parse stun server '%s', %s", uri_s, msg); +- return; +- } +- +- if (ice->stun_server) +- gst_uri_unref (ice->stun_server); +- ice->stun_server = uri; +-} +- +-gchar * +-gst_webrtc_ice_get_stun_server (GstWebRTCICE * ice) +-{ +- if (ice->stun_server) +- return gst_uri_to_string (ice->stun_server); +- else +- return NULL; +-} +- +-void +-gst_webrtc_ice_set_turn_server (GstWebRTCICE * ice, const gchar * uri_s) +-{ +- GstUri *uri = _validate_turn_server (ice, uri_s); +- +- if (uri) { +- if (ice->turn_server) +- gst_uri_unref (ice->turn_server); +- ice->turn_server = uri; +- } +-} +- +-gchar * +-gst_webrtc_ice_get_turn_server (GstWebRTCICE * ice) +-{ +- if (ice->turn_server) +- return gst_uri_to_string (ice->turn_server); +- else +- return NULL; +-} +- +-static void +-gst_webrtc_ice_set_property (GObject * object, guint prop_id, +- const GValue * value, GParamSpec * pspec) +-{ +- GstWebRTCICE *ice = GST_WEBRTC_ICE (object); +- +- switch (prop_id) { +- case PROP_ICE_TCP: +- g_object_set_property (G_OBJECT (ice->priv->nice_agent), +- "ice-tcp", value); +- break; +- case PROP_ICE_UDP: +- g_object_set_property (G_OBJECT (ice->priv->nice_agent), +- "ice-udp", value); +- break; +- default: +- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); +- break; +- } +-} +- +-static void +-gst_webrtc_ice_get_property (GObject * object, guint prop_id, +- GValue * value, GParamSpec * pspec) +-{ +- GstWebRTCICE *ice = GST_WEBRTC_ICE (object); +- +- switch (prop_id) { +- case PROP_AGENT: +- g_value_set_object (value, ice->priv->nice_agent); +- break; +- case PROP_ICE_TCP: +- g_object_get_property (G_OBJECT (ice->priv->nice_agent), +- "ice-tcp", value); +- break; +- case PROP_ICE_UDP: +- g_object_get_property (G_OBJECT (ice->priv->nice_agent), +- "ice-udp", value); +- break; +- default: +- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); +- break; +- } +-} +- +-static void +-gst_webrtc_ice_finalize (GObject * object) +-{ +- GstWebRTCICE *ice = GST_WEBRTC_ICE (object); +- +- g_signal_handlers_disconnect_by_data (ice->priv->nice_agent, ice); +- +- _stop_thread (ice); +- +- if (ice->priv->on_candidate_notify) +- ice->priv->on_candidate_notify (ice->priv->on_candidate_data); +- ice->priv->on_candidate = NULL; +- ice->priv->on_candidate_notify = NULL; +- +- if (ice->turn_server) +- gst_uri_unref (ice->turn_server); +- if (ice->stun_server) +- gst_uri_unref (ice->stun_server); +- +- g_mutex_clear (&ice->priv->lock); +- g_cond_clear (&ice->priv->cond); +- +- g_array_free (ice->priv->nice_stream_map, TRUE); +- +- g_object_unref (ice->priv->nice_agent); +- +- g_hash_table_unref (ice->turn_servers); +- +- G_OBJECT_CLASS (parent_class)->finalize (object); +-} +- +-static void +-gst_webrtc_ice_constructed (GObject * object) +-{ +- GstWebRTCICE *ice = GST_WEBRTC_ICE (object); +- +- _start_thread (ice); +- +- ice->priv->nice_agent = nice_agent_new (ice->priv->main_context, +- NICE_COMPATIBILITY_RFC5245); +- g_signal_connect (ice->priv->nice_agent, "new-candidate-full", +- G_CALLBACK (_on_new_candidate), ice); +- +- G_OBJECT_CLASS (parent_class)->constructed (object); +-} +- +-static void +-gst_webrtc_ice_class_init (GstWebRTCICEClass * klass) +-{ +- GObjectClass *gobject_class = (GObjectClass *) klass; +- +- gobject_class->constructed = gst_webrtc_ice_constructed; +- gobject_class->get_property = gst_webrtc_ice_get_property; +- gobject_class->set_property = gst_webrtc_ice_set_property; +- gobject_class->finalize = gst_webrtc_ice_finalize; +- +- g_object_class_install_property (gobject_class, +- PROP_AGENT, +- g_param_spec_object ("agent", "ICE agent", +- "ICE agent in use by this object. WARNING! Accessing this property " +- "may have disastrous consequences for the operation of webrtcbin. " +- "Other ICE implementations may not have the same interface.", +- NICE_TYPE_AGENT, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); +- +- g_object_class_install_property (gobject_class, +- PROP_ICE_TCP, +- g_param_spec_boolean ("ice-tcp", "ICE TCP", +- "Whether the agent should use ICE-TCP when gathering candidates", +- TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); +- +- g_object_class_install_property (gobject_class, +- PROP_ICE_UDP, +- g_param_spec_boolean ("ice-udp", "ICE UDP", +- "Whether the agent should use ICE-UDP when gathering candidates", +- TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); +- +- /** +- * GstWebRTCICE::add-local-ip-address: +- * @object: the #GstWebRTCICE +- * @address: The local IP address +- * +- * Add a local IP address to use for ICE candidate gathering. If none +- * are supplied, they will be discovered automatically. Calling this signal +- * stops automatic ICE gathering. +- * +- * Returns: whether the address could be added. +- */ +- gst_webrtc_ice_signals[ADD_LOCAL_IP_ADDRESS_SIGNAL] = +- g_signal_new_class_handler ("add-local-ip-address", +- G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, +- G_CALLBACK (gst_webrtc_ice_add_local_ip_address), NULL, NULL, +- g_cclosure_marshal_generic, G_TYPE_BOOLEAN, 1, G_TYPE_STRING); +-} +- +-static void +-gst_webrtc_ice_init (GstWebRTCICE * ice) +-{ +- ice->priv = gst_webrtc_ice_get_instance_private (ice); +- +- g_mutex_init (&ice->priv->lock); +- g_cond_init (&ice->priv->cond); +- +- ice->turn_servers = +- g_hash_table_new_full (g_str_hash, g_str_equal, g_free, +- (GDestroyNotify) gst_uri_unref); +- +- ice->priv->nice_stream_map = +- g_array_new (FALSE, TRUE, sizeof (struct NiceStreamItem)); +- g_array_set_clear_func (ice->priv->nice_stream_map, +- (GDestroyNotify) _clear_ice_stream); +-} +- +-GstWebRTCICE * +-gst_webrtc_ice_new (const gchar * name) +-{ +- return g_object_new (GST_TYPE_WEBRTC_ICE, "name", name, NULL); +-} +diff --git a/ext/webrtc/gstwebrtcice.h b/ext/webrtc/gstwebrtcice.h +deleted file mode 100644 +index d7c096550..000000000 +--- a/ext/webrtc/gstwebrtcice.h ++++ /dev/null +@@ -1,106 +0,0 @@ +-/* GStreamer +- * Copyright (C) 2017 Matthew Waters +- * +- * This library is free software; you can redistribute it and/or +- * modify it under the terms of the GNU Library General Public +- * License as published by the Free Software Foundation; either +- * version 2 of the License, or (at your option) any later version. +- * +- * This library is distributed in the hope that it will be useful, +- * but WITHOUT ANY WARRANTY; without even the implied warranty of +- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +- * Library General Public License for more details. +- * +- * You should have received a copy of the GNU Library General Public +- * License along with this library; if not, write to the +- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, +- * Boston, MA 02110-1301, USA. +- */ +- +-#ifndef __GST_WEBRTC_ICE_H__ +-#define __GST_WEBRTC_ICE_H__ +- +-#include +-#include +-#include +-#include "fwd.h" +- +-G_BEGIN_DECLS +- +-#define GST_WEBRTC_ICE_ERROR gst_webrtc_ice_error_quark () +-GQuark gst_webrtc_ice_error_quark (void); +- +-GType gst_webrtc_ice_get_type(void); +-#define GST_TYPE_WEBRTC_ICE (gst_webrtc_ice_get_type()) +-#define GST_WEBRTC_ICE(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_WEBRTC_ICE,GstWebRTCICE)) +-#define GST_IS_WEBRTC_ICE(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_WEBRTC_ICE)) +-#define GST_WEBRTC_ICE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_WEBRTC_ICE,GstWebRTCICEClass)) +-#define GST_IS_WEBRTC_ICE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_WEBRTC_ICE)) +-#define GST_WEBRTC_ICE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_WEBRTC_ICE,GstWebRTCICEClass)) +- +-struct _GstWebRTCICE +-{ +- GstObject parent; +- +- GstWebRTCICEGatheringState ice_gathering_state; +- GstWebRTCICEConnectionState ice_connection_state; +- +- GstUri *stun_server; +- GstUri *turn_server; +- +- GHashTable *turn_servers; +- +- GstWebRTCICEPrivate *priv; +-}; +- +-struct _GstWebRTCICEClass +-{ +- GstObjectClass parent_class; +-}; +- +-GstWebRTCICE * gst_webrtc_ice_new (const gchar * name); +-GstWebRTCICEStream * gst_webrtc_ice_add_stream (GstWebRTCICE * ice, +- guint session_id); +-GstWebRTCICETransport * gst_webrtc_ice_find_transport (GstWebRTCICE * ice, +- GstWebRTCICEStream * stream, +- GstWebRTCICEComponent component); +- +-gboolean gst_webrtc_ice_gather_candidates (GstWebRTCICE * ice, +- GstWebRTCICEStream * stream); +-/* FIXME: GstStructure-ize the candidate */ +-void gst_webrtc_ice_add_candidate (GstWebRTCICE * ice, +- GstWebRTCICEStream * stream, +- const gchar * candidate); +-gboolean gst_webrtc_ice_set_local_credentials (GstWebRTCICE * ice, +- GstWebRTCICEStream * stream, +- gchar * ufrag, +- gchar * pwd); +-gboolean gst_webrtc_ice_set_remote_credentials (GstWebRTCICE * ice, +- GstWebRTCICEStream * stream, +- gchar * ufrag, +- gchar * pwd); +-gboolean gst_webrtc_ice_add_turn_server (GstWebRTCICE * ice, +- const gchar * uri); +- +-void gst_webrtc_ice_set_is_controller (GstWebRTCICE * ice, +- gboolean controller); +-gboolean gst_webrtc_ice_get_is_controller (GstWebRTCICE * ice); +-void gst_webrtc_ice_set_force_relay (GstWebRTCICE * ice, +- gboolean force_relay); +-void gst_webrtc_ice_set_stun_server (GstWebRTCICE * ice, +- const gchar * uri); +-gchar * gst_webrtc_ice_get_stun_server (GstWebRTCICE * ice); +-void gst_webrtc_ice_set_turn_server (GstWebRTCICE * ice, +- const gchar * uri); +-gchar * gst_webrtc_ice_get_turn_server (GstWebRTCICE * ice); +- +-typedef void (*GstWebRTCIceOnCandidateFunc) (GstWebRTCICE * ice, guint stream_id, gchar * candidate, gpointer user_data); +- +-void gst_webrtc_ice_set_on_ice_candidate (GstWebRTCICE * ice, +- GstWebRTCIceOnCandidateFunc func, +- gpointer user_data, +- GDestroyNotify notify); +- +-G_END_DECLS +- +-#endif /* __GST_WEBRTC_ICE_H__ */ +diff --git a/ext/webrtc/gstwebrtcstats.c b/ext/webrtc/gstwebrtcstats.c +index 5ff2bd6d2..5bc3e2298 100644 +--- a/ext/webrtc/gstwebrtcstats.c ++++ b/ext/webrtc/gstwebrtcstats.c +@@ -71,11 +71,14 @@ _set_base_stats (GstStructure * s, GstWebRTCStatsType type, double ts, + static GstStructure * + _get_peer_connection_stats (GstWebRTCBin * webrtc) + { +- GstStructure *s = gst_structure_new_empty ("unused"); ++ guint opened; ++ guint closed; ++ GstStructure *s = gst_structure_new_empty ("peer-connection-stats"); + +- /* FIXME: datachannel */ +- gst_structure_set (s, "data-channels-opened", G_TYPE_UINT, 0, +- "data-channels-closed", G_TYPE_UINT, 0, "data-channels-requested", ++ gst_webrtc_bin_get_peer_connection_stats (webrtc, &opened, &closed); ++ ++ gst_structure_set (s, "data-channels-opened", G_TYPE_UINT, opened, ++ "data-channels-closed", G_TYPE_UINT, closed, "data-channels-requested", + G_TYPE_UINT, 0, "data-channels-accepted", G_TYPE_UINT, 0, NULL); + + return s; +@@ -581,9 +584,9 @@ _get_stats_from_ice_candidates (GstWebRTCBin * webrtc, + can->stream_id, can->ipaddr, can->port); + stats = gst_structure_new_empty (id); + +- if (strcmp (candidate_tag, "local")) { ++ if (g_str_equal (candidate_tag, "local")) { + type = GST_WEBRTC_STATS_LOCAL_CANDIDATE; +- } else if (strcmp (candidate_tag, "remote")) { ++ } else if (g_str_equal (candidate_tag, "remote")) { + type = GST_WEBRTC_STATS_REMOTE_CANDIDATE; + } else { + GST_WARNING_OBJECT (webrtc, "Invalid ice candidate tag: %s", candidate_tag); +@@ -792,13 +795,16 @@ _get_codec_stats_from_pad (GstWebRTCBin * webrtc, GstPad * pad, + + if (wpad->received_caps) + caps = gst_caps_ref (wpad->received_caps); ++ else ++ caps = gst_pad_get_current_caps (pad); ++ + GST_DEBUG_OBJECT (pad, "Pad caps are: %" GST_PTR_FORMAT, caps); + if (caps && gst_caps_is_fixed (caps)) { + GstStructure *caps_s = gst_caps_get_structure (caps, 0); + gint pt; + const gchar *encoding_name, *media, *encoding_params; +- GstSDPMedia sdp_media = { 0 }; + guint channels = 0; ++ const gchar *fmtp; + + if (gst_structure_get_int (caps_s, "payload", &pt)) + gst_structure_set (stats, "payload-type", G_TYPE_UINT, pt, NULL); +@@ -834,15 +840,10 @@ _get_codec_stats_from_pad (GstWebRTCBin * webrtc, GstPad * pad, + else + gst_structure_set (stats, "codec-type", G_TYPE_STRING, "encode", NULL); + +- gst_sdp_media_init (&sdp_media); +- if (gst_sdp_media_set_media_from_caps (caps, &sdp_media) == GST_SDP_OK) { +- const gchar *fmtp = gst_sdp_media_get_attribute_val (&sdp_media, "fmtp"); +- +- if (fmtp) { +- gst_structure_set (stats, "sdp-fmtp-line", G_TYPE_STRING, fmtp, NULL); +- } ++ fmtp = gst_structure_get_string (caps_s, "a-fmtp"); ++ if (fmtp) { ++ gst_structure_set (stats, "sdp-fmtp-line", G_TYPE_STRING, fmtp, NULL); + } +- gst_sdp_media_uninit (&sdp_media); + + /* FIXME: transportId */ + } +@@ -974,6 +975,7 @@ _get_stats_from_pad (GstWebRTCBin * webrtc, GstPad * pad, GstStructure * s) + ts_stats.source_stats->n_values, ts_stats.stream->transport); + + ts_stats.s = s; ++ ts_stats.clock_rate = clock_rate; + + transport_stream_find_ssrc_map_item (ts_stats.stream, &ts_stats, + (FindSsrcMapFunc) webrtc_stats_get_from_transport); +diff --git a/ext/webrtc/icestream.c b/ext/webrtc/icestream.c +deleted file mode 100644 +index e51af7044..000000000 +--- a/ext/webrtc/icestream.c ++++ /dev/null +@@ -1,235 +0,0 @@ +-/* GStreamer +- * Copyright (C) 2017 Matthew Waters +- * +- * This library is free software; you can redistribute it and/or +- * modify it under the terms of the GNU Library General Public +- * License as published by the Free Software Foundation; either +- * version 2 of the License, or (at your option) any later version. +- * +- * This library is distributed in the hope that it will be useful, +- * but WITHOUT ANY WARRANTY; without even the implied warranty of +- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +- * Library General Public License for more details. +- * +- * You should have received a copy of the GNU Library General Public +- * License along with this library; if not, write to the +- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, +- * Boston, MA 02110-1301, USA. +- */ +- +-#ifdef HAVE_CONFIG_H +-# include "config.h" +-#endif +- +-#include "icestream.h" +-#include "nicetransport.h" +- +-#define GST_CAT_DEFAULT gst_webrtc_ice_stream_debug +-GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); +- +-enum +-{ +- SIGNAL_0, +- LAST_SIGNAL, +-}; +- +-enum +-{ +- PROP_0, +- PROP_ICE, +- PROP_STREAM_ID, +-}; +- +-//static guint gst_webrtc_ice_stream_signals[LAST_SIGNAL] = { 0 }; +- +-struct _GstWebRTCICEStreamPrivate +-{ +- gboolean gathered; +- GList *transports; +-}; +- +-#define gst_webrtc_ice_stream_parent_class parent_class +-G_DEFINE_TYPE_WITH_CODE (GstWebRTCICEStream, gst_webrtc_ice_stream, +- GST_TYPE_OBJECT, G_ADD_PRIVATE (GstWebRTCICEStream) +- GST_DEBUG_CATEGORY_INIT (gst_webrtc_ice_stream_debug, +- "webrtcicestream", 0, "webrtcicestream");); +- +-static void +-gst_webrtc_ice_stream_set_property (GObject * object, guint prop_id, +- const GValue * value, GParamSpec * pspec) +-{ +- GstWebRTCICEStream *stream = GST_WEBRTC_ICE_STREAM (object); +- +- switch (prop_id) { +- case PROP_ICE: +- /* XXX: weak-ref this? */ +- stream->ice = g_value_get_object (value); +- break; +- case PROP_STREAM_ID: +- stream->stream_id = g_value_get_uint (value); +- break; +- default: +- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); +- break; +- } +-} +- +-static void +-gst_webrtc_ice_stream_get_property (GObject * object, guint prop_id, +- GValue * value, GParamSpec * pspec) +-{ +- GstWebRTCICEStream *stream = GST_WEBRTC_ICE_STREAM (object); +- +- switch (prop_id) { +- case PROP_ICE: +- g_value_set_object (value, stream->ice); +- break; +- case PROP_STREAM_ID: +- g_value_set_uint (value, stream->stream_id); +- break; +- default: +- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); +- break; +- } +-} +- +-static void +-gst_webrtc_ice_stream_finalize (GObject * object) +-{ +- GstWebRTCICEStream *stream = GST_WEBRTC_ICE_STREAM (object); +- +- g_list_free (stream->priv->transports); +- stream->priv->transports = NULL; +- +- G_OBJECT_CLASS (parent_class)->finalize (object); +-} +- +-static void +-_on_candidate_gathering_done (NiceAgent * agent, guint stream_id, +- GstWebRTCICEStream * ice) +-{ +- GList *l; +- +- if (stream_id != ice->stream_id) +- return; +- +- GST_DEBUG_OBJECT (ice, "%u gathering done", stream_id); +- +- ice->priv->gathered = TRUE; +- +- for (l = ice->priv->transports; l; l = l->next) { +- GstWebRTCICETransport *ice = l->data; +- +- gst_webrtc_ice_transport_gathering_state_change (ice, +- GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE); +- } +-} +- +-GstWebRTCICETransport * +-gst_webrtc_ice_stream_find_transport (GstWebRTCICEStream * stream, +- GstWebRTCICEComponent component) +-{ +- GstWebRTCICEComponent trans_comp; +- GstWebRTCICETransport *ret; +- GList *l; +- +- g_return_val_if_fail (GST_IS_WEBRTC_ICE_STREAM (stream), NULL); +- +- for (l = stream->priv->transports; l; l = l->next) { +- GstWebRTCICETransport *trans = l->data; +- g_object_get (trans, "component", &trans_comp, NULL); +- +- if (component == trans_comp) +- return gst_object_ref (trans); +- } +- +- ret = +- GST_WEBRTC_ICE_TRANSPORT (gst_webrtc_nice_transport_new (stream, +- component)); +- stream->priv->transports = g_list_prepend (stream->priv->transports, ret); +- +- return ret; +-} +- +-static void +-gst_webrtc_ice_stream_constructed (GObject * object) +-{ +- GstWebRTCICEStream *stream = GST_WEBRTC_ICE_STREAM (object); +- NiceAgent *agent; +- +- g_object_get (stream->ice, "agent", &agent, NULL); +- g_signal_connect (agent, "candidate-gathering-done", +- G_CALLBACK (_on_candidate_gathering_done), stream); +- +- g_object_unref (agent); +- +- G_OBJECT_CLASS (parent_class)->constructed (object); +-} +- +-gboolean +-gst_webrtc_ice_stream_gather_candidates (GstWebRTCICEStream * stream) +-{ +- NiceAgent *agent; +- GList *l; +- +- g_return_val_if_fail (GST_IS_WEBRTC_ICE_STREAM (stream), FALSE); +- +- GST_DEBUG_OBJECT (stream, "start gathering candidates"); +- +- if (stream->priv->gathered) +- return TRUE; +- +- for (l = stream->priv->transports; l; l = l->next) { +- GstWebRTCICETransport *trans = l->data; +- +- gst_webrtc_ice_transport_gathering_state_change (trans, +- GST_WEBRTC_ICE_GATHERING_STATE_GATHERING); +- } +- +- g_object_get (stream->ice, "agent", &agent, NULL); +- if (!nice_agent_gather_candidates (agent, stream->stream_id)) { +- g_object_unref (agent); +- return FALSE; +- } +- +- g_object_unref (agent); +- return TRUE; +-} +- +-static void +-gst_webrtc_ice_stream_class_init (GstWebRTCICEStreamClass * klass) +-{ +- GObjectClass *gobject_class = (GObjectClass *) klass; +- +- gobject_class->constructed = gst_webrtc_ice_stream_constructed; +- gobject_class->get_property = gst_webrtc_ice_stream_get_property; +- gobject_class->set_property = gst_webrtc_ice_stream_set_property; +- gobject_class->finalize = gst_webrtc_ice_stream_finalize; +- +- g_object_class_install_property (gobject_class, +- PROP_ICE, +- g_param_spec_object ("ice", +- "ICE", "ICE agent associated with this stream", +- GST_TYPE_WEBRTC_ICE, +- G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY | G_PARAM_STATIC_STRINGS)); +- +- g_object_class_install_property (gobject_class, +- PROP_STREAM_ID, +- g_param_spec_uint ("stream-id", +- "ICE stream id", "ICE stream id associated with this stream", +- 0, G_MAXUINT, 0, +- G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY | G_PARAM_STATIC_STRINGS)); +-} +- +-static void +-gst_webrtc_ice_stream_init (GstWebRTCICEStream * ice) +-{ +- ice->priv = gst_webrtc_ice_stream_get_instance_private (ice); +-} +- +-GstWebRTCICEStream * +-gst_webrtc_ice_stream_new (GstWebRTCICE * ice, guint stream_id) +-{ +- return g_object_new (GST_TYPE_WEBRTC_ICE_STREAM, "ice", ice, +- "stream-id", stream_id, NULL); +-} +diff --git a/ext/webrtc/icestream.h b/ext/webrtc/icestream.h +deleted file mode 100644 +index 6bf67ea78..000000000 +--- a/ext/webrtc/icestream.h ++++ /dev/null +@@ -1,63 +0,0 @@ +-/* GStreamer +- * Copyright (C) 2017 Matthew Waters +- * +- * This library is free software; you can redistribute it and/or +- * modify it under the terms of the GNU Library General Public +- * License as published by the Free Software Foundation; either +- * version 2 of the License, or (at your option) any later version. +- * +- * This library is distributed in the hope that it will be useful, +- * but WITHOUT ANY WARRANTY; without even the implied warranty of +- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +- * Library General Public License for more details. +- * +- * You should have received a copy of the GNU Library General Public +- * License along with this library; if not, write to the +- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, +- * Boston, MA 02110-1301, USA. +- */ +- +-#ifndef __GST_WEBRTC_ICE_STREAM_H__ +-#define __GST_WEBRTC_ICE_STREAM_H__ +- +-#include +-/* libice */ +-#include +-#include +-#include "gstwebrtcice.h" +- +-G_BEGIN_DECLS +- +-GType gst_webrtc_ice_stream_get_type(void); +-#define GST_TYPE_WEBRTC_ICE_STREAM (gst_webrtc_ice_stream_get_type()) +-#define GST_WEBRTC_ICE_STREAM(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_WEBRTC_ICE_STREAM,GstWebRTCICEStream)) +-#define GST_IS_WEBRTC_ICE_STREAM(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_WEBRTC_ICE_STREAM)) +-#define GST_WEBRTC_ICE_STREAM_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_WEBRTC_ICE_STREAM,GstWebRTCICEStreamClass)) +-#define GST_IS_WEBRTC_ICE_STREAM_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_WEBRTC_ICE_STREAM)) +-#define GST_WEBRTC_ICE_STREAM_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_WEBRTC_ICE_STREAM,GstWebRTCICEStreamClass)) +- +-struct _GstWebRTCICEStream +-{ +- GstObject parent; +- +- GstWebRTCICE *ice; +- +- guint stream_id; +- +- GstWebRTCICEStreamPrivate *priv; +-}; +- +-struct _GstWebRTCICEStreamClass +-{ +- GstObjectClass parent_class; +-}; +- +-GstWebRTCICEStream * gst_webrtc_ice_stream_new (GstWebRTCICE * ice, +- guint stream_id); +-GstWebRTCICETransport * gst_webrtc_ice_stream_find_transport (GstWebRTCICEStream * stream, +- GstWebRTCICEComponent component); +-gboolean gst_webrtc_ice_stream_gather_candidates (GstWebRTCICEStream * ice); +- +-G_END_DECLS +- +-#endif /* __GST_WEBRTC_ICE_STREAM_H__ */ +diff --git a/ext/webrtc/nicetransport.c b/ext/webrtc/nicetransport.c +deleted file mode 100644 +index bbe910f80..000000000 +--- a/ext/webrtc/nicetransport.c ++++ /dev/null +@@ -1,262 +0,0 @@ +-/* GStreamer +- * Copyright (C) 2017 Matthew Waters +- * +- * This library is free software; you can redistribute it and/or +- * modify it under the terms of the GNU Library General Public +- * License as published by the Free Software Foundation; either +- * version 2 of the License, or (at your option) any later version. +- * +- * This library is distributed in the hope that it will be useful, +- * but WITHOUT ANY WARRANTY; without even the implied warranty of +- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +- * Library General Public License for more details. +- * +- * You should have received a copy of the GNU Library General Public +- * License along with this library; if not, write to the +- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, +- * Boston, MA 02110-1301, USA. +- */ +- +-#ifdef HAVE_CONFIG_H +-# include "config.h" +-#endif +- +-#include "nicetransport.h" +-#include "icestream.h" +- +-#define GST_CAT_DEFAULT gst_webrtc_nice_transport_debug +-GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); +- +-enum +-{ +- SIGNAL_0, +- LAST_SIGNAL, +-}; +- +-enum +-{ +- PROP_0, +- PROP_STREAM, +-}; +- +-//static guint gst_webrtc_nice_transport_signals[LAST_SIGNAL] = { 0 }; +- +-struct _GstWebRTCNiceTransportPrivate +-{ +- gboolean running; +-}; +- +-#define gst_webrtc_nice_transport_parent_class parent_class +-G_DEFINE_TYPE_WITH_CODE (GstWebRTCNiceTransport, gst_webrtc_nice_transport, +- GST_TYPE_WEBRTC_ICE_TRANSPORT, G_ADD_PRIVATE (GstWebRTCNiceTransport) +- GST_DEBUG_CATEGORY_INIT (gst_webrtc_nice_transport_debug, +- "webrtcnicetransport", 0, "webrtcnicetransport"); +- ); +- +-static NiceComponentType +-_gst_component_to_nice (GstWebRTCICEComponent component) +-{ +- switch (component) { +- case GST_WEBRTC_ICE_COMPONENT_RTP: +- return NICE_COMPONENT_TYPE_RTP; +- case GST_WEBRTC_ICE_COMPONENT_RTCP: +- return NICE_COMPONENT_TYPE_RTCP; +- default: +- g_assert_not_reached (); +- return 0; +- } +-} +- +-static GstWebRTCICEComponent +-_nice_component_to_gst (NiceComponentType component) +-{ +- switch (component) { +- case NICE_COMPONENT_TYPE_RTP: +- return GST_WEBRTC_ICE_COMPONENT_RTP; +- case NICE_COMPONENT_TYPE_RTCP: +- return GST_WEBRTC_ICE_COMPONENT_RTCP; +- default: +- g_assert_not_reached (); +- return 0; +- } +-} +- +-static GstWebRTCICEConnectionState +-_nice_component_state_to_gst (NiceComponentState state) +-{ +- switch (state) { +- case NICE_COMPONENT_STATE_DISCONNECTED: +- return GST_WEBRTC_ICE_CONNECTION_STATE_DISCONNECTED; +- case NICE_COMPONENT_STATE_GATHERING: +- return GST_WEBRTC_ICE_CONNECTION_STATE_NEW; +- case NICE_COMPONENT_STATE_CONNECTING: +- return GST_WEBRTC_ICE_CONNECTION_STATE_CHECKING; +- case NICE_COMPONENT_STATE_CONNECTED: +- return GST_WEBRTC_ICE_CONNECTION_STATE_CONNECTED; +- case NICE_COMPONENT_STATE_READY: +- return GST_WEBRTC_ICE_CONNECTION_STATE_COMPLETED; +- case NICE_COMPONENT_STATE_FAILED: +- return GST_WEBRTC_ICE_CONNECTION_STATE_FAILED; +- default: +- g_assert_not_reached (); +- return 0; +- } +-} +- +-static void +-gst_webrtc_nice_transport_set_property (GObject * object, guint prop_id, +- const GValue * value, GParamSpec * pspec) +-{ +- GstWebRTCNiceTransport *nice = GST_WEBRTC_NICE_TRANSPORT (object); +- +- switch (prop_id) { +- case PROP_STREAM: +- if (nice->stream) +- gst_object_unref (nice->stream); +- nice->stream = g_value_dup_object (value); +- break; +- default: +- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); +- break; +- } +-} +- +-static void +-gst_webrtc_nice_transport_get_property (GObject * object, guint prop_id, +- GValue * value, GParamSpec * pspec) +-{ +- GstWebRTCNiceTransport *nice = GST_WEBRTC_NICE_TRANSPORT (object); +- +- switch (prop_id) { +- case PROP_STREAM: +- g_value_set_object (value, nice->stream); +- break; +- default: +- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); +- break; +- } +-} +- +-static void +-gst_webrtc_nice_transport_finalize (GObject * object) +-{ +- GstWebRTCNiceTransport *nice = GST_WEBRTC_NICE_TRANSPORT (object); +- +- gst_object_unref (nice->stream); +- +- G_OBJECT_CLASS (parent_class)->finalize (object); +-} +- +-static void +-_on_new_selected_pair (NiceAgent * agent, guint stream_id, +- NiceComponentType component, NiceCandidate * lcandidate, +- NiceCandidate * rcandidate, GstWebRTCNiceTransport * nice) +-{ +- GstWebRTCICETransport *ice = GST_WEBRTC_ICE_TRANSPORT (nice); +- GstWebRTCICEComponent comp = _nice_component_to_gst (component); +- guint our_stream_id; +- +- g_object_get (nice->stream, "stream-id", &our_stream_id, NULL); +- +- if (stream_id != our_stream_id) +- return; +- if (comp != ice->component) +- return; +- +- gst_webrtc_ice_transport_selected_pair_change (ice); +-} +- +-static void +-_on_component_state_changed (NiceAgent * agent, guint stream_id, +- NiceComponentType component, NiceComponentState state, +- GstWebRTCNiceTransport * nice) +-{ +- GstWebRTCICETransport *ice = GST_WEBRTC_ICE_TRANSPORT (nice); +- GstWebRTCICEComponent comp = _nice_component_to_gst (component); +- guint our_stream_id; +- +- g_object_get (nice->stream, "stream-id", &our_stream_id, NULL); +- +- if (stream_id != our_stream_id) +- return; +- if (comp != ice->component) +- return; +- +- GST_DEBUG_OBJECT (ice, "%u %u %s", stream_id, component, +- nice_component_state_to_string (state)); +- +- gst_webrtc_ice_transport_connection_state_change (ice, +- _nice_component_state_to_gst (state)); +-} +- +-static void +-gst_webrtc_nice_transport_constructed (GObject * object) +-{ +- GstWebRTCNiceTransport *nice = GST_WEBRTC_NICE_TRANSPORT (object); +- GstWebRTCICETransport *ice = GST_WEBRTC_ICE_TRANSPORT (object); +- NiceComponentType component = _gst_component_to_nice (ice->component); +- gboolean controlling_mode; +- guint our_stream_id; +- NiceAgent *agent; +- +- g_object_get (nice->stream, "stream-id", &our_stream_id, NULL); +- g_object_get (nice->stream->ice, "agent", &agent, NULL); +- +- g_object_get (agent, "controlling-mode", &controlling_mode, NULL); +- ice->role = +- controlling_mode ? GST_WEBRTC_ICE_ROLE_CONTROLLING : +- GST_WEBRTC_ICE_ROLE_CONTROLLED; +- +- g_signal_connect (agent, "component-state-changed", +- G_CALLBACK (_on_component_state_changed), nice); +- g_signal_connect (agent, "new-selected-pair-full", +- G_CALLBACK (_on_new_selected_pair), nice); +- +- ice->src = gst_element_factory_make ("nicesrc", NULL); +- if (ice->src) { +- g_object_set (ice->src, "agent", agent, "stream", our_stream_id, +- "component", component, NULL); +- } +- ice->sink = gst_element_factory_make ("nicesink", NULL); +- if (ice->sink) { +- g_object_set (ice->sink, "agent", agent, "stream", our_stream_id, +- "component", component, "async", FALSE, "enable-last-sample", FALSE, +- "sync", FALSE, NULL); +- } +- +- g_object_unref (agent); +- +- G_OBJECT_CLASS (parent_class)->constructed (object); +-} +- +-static void +-gst_webrtc_nice_transport_class_init (GstWebRTCNiceTransportClass * klass) +-{ +- GObjectClass *gobject_class = (GObjectClass *) klass; +- +- gobject_class->constructed = gst_webrtc_nice_transport_constructed; +- gobject_class->get_property = gst_webrtc_nice_transport_get_property; +- gobject_class->set_property = gst_webrtc_nice_transport_set_property; +- gobject_class->finalize = gst_webrtc_nice_transport_finalize; +- +- g_object_class_install_property (gobject_class, +- PROP_STREAM, +- g_param_spec_object ("stream", +- "WebRTC ICE Stream", "ICE stream associated with this transport", +- GST_TYPE_WEBRTC_ICE_STREAM, +- G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY | G_PARAM_STATIC_STRINGS)); +-} +- +-static void +-gst_webrtc_nice_transport_init (GstWebRTCNiceTransport * nice) +-{ +- nice->priv = gst_webrtc_nice_transport_get_instance_private (nice); +-} +- +-GstWebRTCNiceTransport * +-gst_webrtc_nice_transport_new (GstWebRTCICEStream * stream, +- GstWebRTCICEComponent component) +-{ +- return g_object_new (GST_TYPE_WEBRTC_NICE_TRANSPORT, "stream", stream, +- "component", component, NULL); +-} +diff --git a/ext/webrtc/nicetransport.h b/ext/webrtc/nicetransport.h +deleted file mode 100644 +index f36e1ccb9..000000000 +--- a/ext/webrtc/nicetransport.h ++++ /dev/null +@@ -1,58 +0,0 @@ +-/* GStreamer +- * Copyright (C) 2017 Matthew Waters +- * +- * This library is free software; you can redistribute it and/or +- * modify it under the terms of the GNU Library General Public +- * License as published by the Free Software Foundation; either +- * version 2 of the License, or (at your option) any later version. +- * +- * This library is distributed in the hope that it will be useful, +- * but WITHOUT ANY WARRANTY; without even the implied warranty of +- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +- * Library General Public License for more details. +- * +- * You should have received a copy of the GNU Library General Public +- * License along with this library; if not, write to the +- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, +- * Boston, MA 02110-1301, USA. +- */ +- +-#ifndef __GST_WEBRTC_NICE_TRANSPORT_H__ +-#define __GST_WEBRTC_NICE_TRANSPORT_H__ +- +-#include +-/* libnice */ +-#include +-#include +-#include "gstwebrtcice.h" +- +-G_BEGIN_DECLS +- +-GType gst_webrtc_nice_transport_get_type(void); +-#define GST_TYPE_WEBRTC_NICE_TRANSPORT (gst_webrtc_nice_transport_get_type()) +-#define GST_WEBRTC_NICE_TRANSPORT(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_WEBRTC_NICE_TRANSPORT,GstWebRTCNiceTransport)) +-#define GST_IS_WEBRTC_NICE_TRANSPORT(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_WEBRTC_NICE_TRANSPORT)) +-#define GST_WEBRTC_NICE_TRANSPORT_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_WEBRTC_NICE_TRANSPORT,GstWebRTCNiceTransportClass)) +-#define GST_IS_WEBRTC_NICE_TRANSPORT_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_WEBRTC_NICE_TRANSPORT)) +-#define GST_WEBRTC_NICE_TRANSPORT_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_WEBRTC_NICE_TRANSPORT,GstWebRTCNiceTransportClass)) +- +-struct _GstWebRTCNiceTransport +-{ +- GstWebRTCICETransport parent; +- +- GstWebRTCICEStream *stream; +- +- GstWebRTCNiceTransportPrivate *priv; +-}; +- +-struct _GstWebRTCNiceTransportClass +-{ +- GstWebRTCICETransportClass parent_class; +-}; +- +-GstWebRTCNiceTransport * gst_webrtc_nice_transport_new (GstWebRTCICEStream * stream, +- GstWebRTCICEComponent component); +- +-G_END_DECLS +- +-#endif /* __GST_WEBRTC_NICE_TRANSPORT_H__ */ +diff --git a/ext/webrtc/sctptransport.c b/ext/webrtc/sctptransport.c +deleted file mode 100644 +index f5a1e9db4..000000000 +--- a/ext/webrtc/sctptransport.c ++++ /dev/null +@@ -1,270 +0,0 @@ +-/* GStreamer +- * Copyright (C) 2018 Matthew Waters +- * +- * This library is free software; you can redistribute it and/or +- * modify it under the terms of the GNU Library General Public +- * License as published by the Free Software Foundation; either +- * version 2 of the License, or (at your option) any later version. +- * +- * This library is distributed in the hope that it will be useful, +- * but WITHOUT ANY WARRANTY; without even the implied warranty of +- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +- * Library General Public License for more details. +- * +- * You should have received a copy of the GNU Library General Public +- * License along with this library; if not, write to the +- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, +- * Boston, MA 02110-1301, USA. +- */ +- +-#ifdef HAVE_CONFIG_H +-# include "config.h" +-#endif +- +-#include +- +-#include "sctptransport.h" +-#include "gstwebrtcbin.h" +- +-#define GST_CAT_DEFAULT gst_webrtc_sctp_transport_debug +-GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); +- +-enum +-{ +- SIGNAL_0, +- ON_RESET_STREAM_SIGNAL, +- LAST_SIGNAL, +-}; +- +-enum +-{ +- PROP_0, +- PROP_TRANSPORT, +- PROP_STATE, +- PROP_MAX_MESSAGE_SIZE, +- PROP_MAX_CHANNELS, +-}; +- +-static guint gst_webrtc_sctp_transport_signals[LAST_SIGNAL] = { 0 }; +- +-#define gst_webrtc_sctp_transport_parent_class parent_class +-G_DEFINE_TYPE_WITH_CODE (GstWebRTCSCTPTransport, gst_webrtc_sctp_transport, +- GST_TYPE_OBJECT, GST_DEBUG_CATEGORY_INIT (gst_webrtc_sctp_transport_debug, +- "webrtcsctptransport", 0, "webrtcsctptransport");); +- +-typedef void (*SCTPTask) (GstWebRTCSCTPTransport * sctp, gpointer user_data); +- +-struct task +-{ +- GstWebRTCSCTPTransport *sctp; +- SCTPTask func; +- gpointer user_data; +- GDestroyNotify notify; +-}; +- +-static void +-_execute_task (GstWebRTCBin * webrtc, struct task *task) +-{ +- if (task->func) +- task->func (task->sctp, task->user_data); +-} +- +-static void +-_free_task (struct task *task) +-{ +- gst_object_unref (task->sctp); +- +- if (task->notify) +- task->notify (task->user_data); +- g_free (task); +-} +- +-static void +-_sctp_enqueue_task (GstWebRTCSCTPTransport * sctp, SCTPTask func, +- gpointer user_data, GDestroyNotify notify) +-{ +- struct task *task = g_new0 (struct task, 1); +- +- task->sctp = gst_object_ref (sctp); +- task->func = func; +- task->user_data = user_data; +- task->notify = notify; +- +- gst_webrtc_bin_enqueue_task (sctp->webrtcbin, +- (GstWebRTCBinFunc) _execute_task, task, (GDestroyNotify) _free_task, +- NULL); +-} +- +-static void +-_emit_stream_reset (GstWebRTCSCTPTransport * sctp, gpointer user_data) +-{ +- guint stream_id = GPOINTER_TO_UINT (user_data); +- +- g_signal_emit (sctp, +- gst_webrtc_sctp_transport_signals[ON_RESET_STREAM_SIGNAL], 0, stream_id); +-} +- +-static void +-_on_sctp_dec_pad_removed (GstElement * sctpdec, GstPad * pad, +- GstWebRTCSCTPTransport * sctp) +-{ +- guint stream_id; +- +- if (sscanf (GST_PAD_NAME (pad), "src_%u", &stream_id) != 1) +- return; +- +- _sctp_enqueue_task (sctp, (SCTPTask) _emit_stream_reset, +- GUINT_TO_POINTER (stream_id), NULL); +-} +- +-static void +-_on_sctp_association_established (GstElement * sctpenc, gboolean established, +- GstWebRTCSCTPTransport * sctp) +-{ +- GST_OBJECT_LOCK (sctp); +- if (established) +- sctp->state = GST_WEBRTC_SCTP_TRANSPORT_STATE_CONNECTED; +- else +- sctp->state = GST_WEBRTC_SCTP_TRANSPORT_STATE_CLOSED; +- sctp->association_established = established; +- GST_OBJECT_UNLOCK (sctp); +- +- g_object_notify (G_OBJECT (sctp), "state"); +-} +- +-static void +-gst_webrtc_sctp_transport_set_property (GObject * object, guint prop_id, +- const GValue * value, GParamSpec * pspec) +-{ +-// GstWebRTCSCTPTransport *sctp = GST_WEBRTC_SCTP_TRANSPORT (object); +- +- switch (prop_id) { +- default: +- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); +- break; +- } +-} +- +-static void +-gst_webrtc_sctp_transport_get_property (GObject * object, guint prop_id, +- GValue * value, GParamSpec * pspec) +-{ +- GstWebRTCSCTPTransport *sctp = GST_WEBRTC_SCTP_TRANSPORT (object); +- +- switch (prop_id) { +- case PROP_TRANSPORT: +- g_value_set_object (value, sctp->transport); +- break; +- case PROP_STATE: +- g_value_set_enum (value, sctp->state); +- break; +- case PROP_MAX_MESSAGE_SIZE: +- g_value_set_uint64 (value, sctp->max_message_size); +- break; +- case PROP_MAX_CHANNELS: +- g_value_set_uint (value, sctp->max_channels); +- break; +- default: +- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); +- break; +- } +-} +- +-static void +-gst_webrtc_sctp_transport_finalize (GObject * object) +-{ +- GstWebRTCSCTPTransport *sctp = GST_WEBRTC_SCTP_TRANSPORT (object); +- +- g_signal_handlers_disconnect_by_data (sctp->sctpdec, sctp); +- g_signal_handlers_disconnect_by_data (sctp->sctpenc, sctp); +- +- gst_object_unref (sctp->sctpdec); +- gst_object_unref (sctp->sctpenc); +- +- g_clear_object (&sctp->transport); +- +- G_OBJECT_CLASS (parent_class)->finalize (object); +-} +- +-static void +-gst_webrtc_sctp_transport_constructed (GObject * object) +-{ +- GstWebRTCSCTPTransport *sctp = GST_WEBRTC_SCTP_TRANSPORT (object); +- guint association_id; +- +- association_id = g_random_int_range (0, G_MAXUINT16); +- +- sctp->sctpdec = +- g_object_ref_sink (gst_element_factory_make ("sctpdec", NULL)); +- g_object_set (sctp->sctpdec, "sctp-association-id", association_id, NULL); +- sctp->sctpenc = +- g_object_ref_sink (gst_element_factory_make ("sctpenc", NULL)); +- g_object_set (sctp->sctpenc, "sctp-association-id", association_id, NULL); +- +- g_signal_connect (sctp->sctpdec, "pad-removed", +- G_CALLBACK (_on_sctp_dec_pad_removed), sctp); +- g_signal_connect (sctp->sctpenc, "sctp-association-established", +- G_CALLBACK (_on_sctp_association_established), sctp); +- +- G_OBJECT_CLASS (parent_class)->constructed (object); +-} +- +-static void +-gst_webrtc_sctp_transport_class_init (GstWebRTCSCTPTransportClass * klass) +-{ +- GObjectClass *gobject_class = (GObjectClass *) klass; +- +- gobject_class->constructed = gst_webrtc_sctp_transport_constructed; +- gobject_class->get_property = gst_webrtc_sctp_transport_get_property; +- gobject_class->set_property = gst_webrtc_sctp_transport_set_property; +- gobject_class->finalize = gst_webrtc_sctp_transport_finalize; +- +- g_object_class_install_property (gobject_class, +- PROP_TRANSPORT, +- g_param_spec_object ("transport", +- "WebRTC DTLS Transport", +- "DTLS transport used for this SCTP transport", +- GST_TYPE_WEBRTC_DTLS_TRANSPORT, +- G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); +- +- g_object_class_install_property (gobject_class, +- PROP_STATE, +- g_param_spec_enum ("state", +- "WebRTC SCTP Transport state", "WebRTC SCTP Transport state", +- GST_TYPE_WEBRTC_SCTP_TRANSPORT_STATE, +- GST_WEBRTC_SCTP_TRANSPORT_STATE_NEW, +- G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); +- +- g_object_class_install_property (gobject_class, +- PROP_MAX_MESSAGE_SIZE, +- g_param_spec_uint64 ("max-message-size", +- "Maximum message size", +- "Maximum message size as reported by the transport", 0, G_MAXUINT64, +- 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); +- +- g_object_class_install_property (gobject_class, +- PROP_MAX_CHANNELS, +- g_param_spec_uint ("max-channels", +- "Maximum number of channels", "Maximum number of channels", +- 0, G_MAXUINT16, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); +- +- /** +- * GstWebRTCSCTPTransport::reset-stream: +- * @object: the #GstWebRTCSCTPTransport +- * @stream_id: the SCTP stream that was reset +- */ +- gst_webrtc_sctp_transport_signals[ON_RESET_STREAM_SIGNAL] = +- g_signal_new ("stream-reset", G_TYPE_FROM_CLASS (klass), +- G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_UINT); +-} +- +-static void +-gst_webrtc_sctp_transport_init (GstWebRTCSCTPTransport * nice) +-{ +-} +- +-GstWebRTCSCTPTransport * +-gst_webrtc_sctp_transport_new (void) +-{ +- return g_object_new (GST_TYPE_WEBRTC_SCTP_TRANSPORT, NULL); +-} +diff --git a/ext/webrtc/sctptransport.h b/ext/webrtc/sctptransport.h +deleted file mode 100644 +index 212f15eb9..000000000 +--- a/ext/webrtc/sctptransport.h ++++ /dev/null +@@ -1,66 +0,0 @@ +-/* GStreamer +- * Copyright (C) 2018 Matthew Waters +- * +- * This library is free software; you can redistribute it and/or +- * modify it under the terms of the GNU Library General Public +- * License as published by the Free Software Foundation; either +- * version 2 of the License, or (at your option) any later version. +- * +- * This library is distributed in the hope that it will be useful, +- * but WITHOUT ANY WARRANTY; without even the implied warranty of +- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +- * Library General Public License for more details. +- * +- * You should have received a copy of the GNU Library General Public +- * License along with this library; if not, write to the +- * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, +- * Boston, MA 02110-1301, USA. +- */ +- +-#ifndef __GST_WEBRTC_SCTP_TRANSPORT_H__ +-#define __GST_WEBRTC_SCTP_TRANSPORT_H__ +- +-#include +-/* libnice */ +-#include +-#include +-#include "gstwebrtcice.h" +- +-G_BEGIN_DECLS +- +-GType gst_webrtc_sctp_transport_get_type(void); +-#define GST_TYPE_WEBRTC_SCTP_TRANSPORT (gst_webrtc_sctp_transport_get_type()) +-#define GST_WEBRTC_SCTP_TRANSPORT(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_WEBRTC_SCTP_TRANSPORT,GstWebRTCSCTPTransport)) +-#define GST_IS_WEBRTC_SCTP_TRANSPORT(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_WEBRTC_SCTP_TRANSPORT)) +-#define GST_WEBRTC_SCTP_TRANSPORT_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_WEBRTC_SCTP_TRANSPORT,GstWebRTCSCTPTransportClass)) +-#define GST_IS_WEBRTC_SCTP_TRANSPORT_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_WEBRTC_SCTP_TRANSPORT)) +-#define GST_WEBRTC_SCTP_TRANSPORT_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_WEBRTC_SCTP_TRANSPORT,GstWebRTCSCTPTransportClass)) +- +-struct _GstWebRTCSCTPTransport +-{ +- GstObject parent; +- +- GstWebRTCDTLSTransport *transport; +- GstWebRTCSCTPTransportState state; +- guint64 max_message_size; +- guint max_channels; +- +- gboolean association_established; +- +- gulong sctpdec_block_id; +- GstElement *sctpdec; +- GstElement *sctpenc; +- +- GstWebRTCBin *webrtcbin; +-}; +- +-struct _GstWebRTCSCTPTransportClass +-{ +- GstObjectClass parent_class; +-}; +- +-GstWebRTCSCTPTransport * gst_webrtc_sctp_transport_new (void); +- +-G_END_DECLS +- +-#endif /* __GST_WEBRTC_SCTP_TRANSPORT_H__ */ +diff --git a/ext/webrtc/utils.c b/ext/webrtc/utils.c +index f0741d1e5..e3552b381 100644 +--- a/ext/webrtc/utils.c ++++ b/ext/webrtc/utils.c +@@ -170,6 +170,31 @@ _g_checksum_to_webrtc_string (GChecksumType type) + } + } + ++void ++_remove_optional_offer_fields (GstCaps * offer_caps) ++{ ++ int i; ++ ++ for (i = 0; i < gst_caps_get_size (offer_caps); i++) { ++ GstStructure *s = gst_caps_get_structure (offer_caps, i); ++ const gchar *mtype = gst_structure_get_string (s, "media"); ++ const gchar *encoding_name = gst_structure_get_string (s, "encoding-name"); ++ ++ if (mtype == NULL || encoding_name == NULL) { ++ continue; ++ } ++ ++ /* Special cases for different codecs - sender-only fields ++ * that we don't need to care about for SDP intersection */ ++ if (g_str_equal (mtype, "audio")) { ++ if (g_str_equal (encoding_name, "OPUS")) { ++ gst_structure_remove_fields (s, "sprop-stereo", "sprop-maxcapturerate", ++ NULL); ++ } ++ } ++ } ++} ++ + GstCaps * + _rtp_caps_from_media (const GstSDPMedia * media) + { +diff --git a/ext/webrtc/utils.h b/ext/webrtc/utils.h +index e5d3d124a..bcbe906a7 100644 +--- a/ext/webrtc/utils.h ++++ b/ext/webrtc/utils.h +@@ -63,6 +63,8 @@ const gchar * _enum_value_to_string (GType type, guint val + G_GNUC_INTERNAL + const gchar * _g_checksum_to_webrtc_string (GChecksumType type); + G_GNUC_INTERNAL ++void _remove_optional_offer_fields (GstCaps *offer_caps); ++G_GNUC_INTERNAL + GstCaps * _rtp_caps_from_media (const GstSDPMedia * media); + G_GNUC_INTERNAL + GstWebRTCKind webrtc_kind_from_caps (const GstCaps * caps); +diff --git a/ext/webrtc/webrtcdatachannel.c b/ext/webrtc/webrtcdatachannel.c +index 0260c6172..1305c740a 100644 +--- a/ext/webrtc/webrtcdatachannel.c ++++ b/ext/webrtc/webrtcdatachannel.c +@@ -51,6 +51,7 @@ typedef void (*ChannelTask) (GstWebRTCDataChannel * channel, + + struct task + { ++ GstWebRTCBin *webrtcbin; + GstWebRTCDataChannel *channel; + ChannelTask func; + gpointer user_data; +@@ -69,6 +70,7 @@ _execute_task (GstWebRTCBin * webrtc, struct task *task) + static void + _free_task (struct task *task) + { ++ g_object_unref (task->webrtcbin); + gst_object_unref (task->channel); + + if (task->notify) +@@ -80,14 +82,22 @@ static void + _channel_enqueue_task (WebRTCDataChannel * channel, ChannelTask func, + gpointer user_data, GDestroyNotify notify) + { +- struct task *task = g_new0 (struct task, 1); ++ GstWebRTCBin *webrtcbin = NULL; ++ struct task *task = NULL; + ++ webrtcbin = g_weak_ref_get (&channel->webrtcbin_weak); ++ if (!webrtcbin) ++ return; ++ ++ task = g_new0 (struct task, 1); ++ ++ task->webrtcbin = webrtcbin; + task->channel = gst_object_ref (channel); + task->func = func; + task->user_data = user_data; + task->notify = notify; + +- gst_webrtc_bin_enqueue_task (channel->webrtcbin, ++ gst_webrtc_bin_enqueue_task (task->webrtcbin, + (GstWebRTCBinFunc) _execute_task, task, (GDestroyNotify) _free_task, + NULL); + } +@@ -427,11 +437,15 @@ _close_procedure (WebRTCDataChannel * channel, gpointer user_data) + GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); + g_object_notify (G_OBJECT (channel), "ready-state"); + +- GST_WEBRTC_DATA_CHANNEL_LOCK (channel); +- if (channel->parent.buffered_amount <= 0) { +- _channel_enqueue_task (channel, (ChannelTask) _close_sctp_stream, +- NULL, NULL); ++ /* Make sure that all data enqueued gets properly sent before data channel is closed. */ ++ GstFlowReturn ret = ++ gst_app_src_end_of_stream (GST_APP_SRC (WEBRTC_DATA_CHANNEL ++ (channel)->appsrc)); ++ if (ret != GST_FLOW_OK) { ++ GST_WARNING_OBJECT (channel, "Send end of stream returned %i, %s", ret, ++ gst_flow_get_name (ret)); + } ++ return; + } + + GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); +@@ -871,6 +885,7 @@ webrtc_data_channel_send_data (GstWebRTCDataChannel * base_channel, + GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); + g_set_error (error, GST_WEBRTC_ERROR, + GST_WEBRTC_ERROR_INVALID_STATE, "channel is not open"); ++ gst_buffer_unref (buffer); + return FALSE; + } + GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); +@@ -945,6 +960,7 @@ webrtc_data_channel_send_string (GstWebRTCDataChannel * base_channel, + GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); + g_set_error (error, GST_WEBRTC_ERROR, + GST_WEBRTC_ERROR_INVALID_STATE, "channel is not open"); ++ gst_buffer_unref (buffer); + return FALSE; + } + GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); +@@ -1033,6 +1049,16 @@ on_appsrc_data (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) + } else if (GST_PAD_PROBE_INFO_TYPE (info) & GST_PAD_PROBE_TYPE_BUFFER_LIST) { + GstBufferList *list = GST_PAD_PROBE_INFO_BUFFER_LIST (info); + size = gst_buffer_list_calculate_size (list); ++ } else if (GST_PAD_PROBE_INFO_TYPE (info) & ++ GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM) { ++ GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info); ++ if (GST_EVENT_TYPE (event) == GST_EVENT_EOS ++ && channel->parent.ready_state == ++ GST_WEBRTC_DATA_CHANNEL_STATE_CLOSING) { ++ _channel_enqueue_task (channel, (ChannelTask) _close_sctp_stream, NULL, ++ NULL); ++ return GST_PAD_PROBE_DROP; ++ } + } + + if (size > 0) { +@@ -1051,11 +1077,6 @@ on_appsrc_data (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) + NULL); + } + +- if (channel->parent.ready_state == GST_WEBRTC_DATA_CHANNEL_STATE_CLOSING +- && channel->parent.buffered_amount <= 0) { +- _channel_enqueue_task (channel, (ChannelTask) _close_sctp_stream, NULL, +- NULL); +- } + GST_WEBRTC_DATA_CHANNEL_UNLOCK (channel); + g_object_notify (G_OBJECT (&channel->parent), "buffered-amount"); + } +@@ -1128,6 +1149,8 @@ gst_webrtc_data_channel_finalize (GObject * object) + g_clear_object (&channel->appsrc); + g_clear_object (&channel->appsink); + ++ g_weak_ref_clear (&channel->webrtcbin_weak); ++ + G_OBJECT_CLASS (parent_class)->finalize (object); + } + +@@ -1153,6 +1176,8 @@ webrtc_data_channel_init (WebRTCDataChannel * channel) + G_LOCK (outstanding_channels_lock); + outstanding_channels = g_list_prepend (outstanding_channels, channel); + G_UNLOCK (outstanding_channels_lock); ++ ++ g_weak_ref_init (&channel->webrtcbin_weak, NULL); + } + + static void +@@ -1202,3 +1227,10 @@ webrtc_data_channel_link_to_sctp (WebRTCDataChannel * channel, + } + } + } ++ ++void ++webrtc_data_channel_set_webrtcbin (WebRTCDataChannel * channel, ++ GstWebRTCBin * webrtcbin) ++{ ++ g_weak_ref_set (&channel->webrtcbin_weak, webrtcbin); ++} +diff --git a/ext/webrtc/webrtcdatachannel.h b/ext/webrtc/webrtcdatachannel.h +index dd65a66ae..919949f68 100644 +--- a/ext/webrtc/webrtcdatachannel.h ++++ b/ext/webrtc/webrtcdatachannel.h +@@ -51,7 +51,7 @@ struct _WebRTCDataChannel + GstElement *sink_bin; + GstElement *appsink; + +- GstWebRTCBin *webrtcbin; ++ GWeakRef webrtcbin_weak; + gboolean opened; + gulong src_probe; + GError *stored_error; +@@ -72,6 +72,10 @@ G_GNUC_INTERNAL + void webrtc_data_channel_link_to_sctp (WebRTCDataChannel *channel, + WebRTCSCTPTransport *sctp_transport); + ++G_GNUC_INTERNAL ++void webrtc_data_channel_set_webrtcbin (WebRTCDataChannel *channel, ++ GstWebRTCBin *webrtcbin); ++ + G_DECLARE_FINAL_TYPE (WebRTCErrorIgnoreBin, webrtc_error_ignore_bin, WEBRTC, ERROR_IGNORE_BIN, GstBin); + + G_END_DECLS +diff --git a/ext/webrtc/webrtcsdp.c b/ext/webrtc/webrtcsdp.c +index 1abd4b115..67c8143d9 100644 +--- a/ext/webrtc/webrtcsdp.c ++++ b/ext/webrtc/webrtcsdp.c +@@ -101,7 +101,7 @@ static gboolean + _check_sdp_crypto (SDPSource source, GstWebRTCSessionDescription * sdp, + GError ** error) + { +- const gchar *message_fingerprint, *fingerprint; ++ const gchar *message_fingerprint; + const GstSDPKey *key; + int i; + +@@ -112,12 +112,18 @@ _check_sdp_crypto (SDPSource source, GstWebRTCSessionDescription * sdp, + return FALSE; + } + +- message_fingerprint = fingerprint = ++ message_fingerprint = + gst_sdp_message_get_attribute_val (sdp->sdp, "fingerprint"); + for (i = 0; i < gst_sdp_message_medias_len (sdp->sdp); i++) { + const GstSDPMedia *media = gst_sdp_message_get_media (sdp->sdp, i); + const gchar *media_fingerprint = + gst_sdp_media_get_attribute_val (media, "fingerprint"); ++ GstWebRTCRTPTransceiverDirection direction = ++ _get_direction_from_media (media); ++ ++ /* Skip inactive media */ ++ if (direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_INACTIVE) ++ continue; + + if (!IS_EMPTY_SDP_ATTRIBUTE (message_fingerprint) + && !IS_EMPTY_SDP_ATTRIBUTE (media_fingerprint)) { +@@ -126,18 +132,6 @@ _check_sdp_crypto (SDPSource source, GstWebRTCSessionDescription * sdp, + "No fingerprint lines in sdp for media %u", i); + return FALSE; + } +- if (IS_EMPTY_SDP_ATTRIBUTE (fingerprint)) { +- fingerprint = media_fingerprint; +- } +- if (!IS_EMPTY_SDP_ATTRIBUTE (media_fingerprint) +- && g_strcmp0 (fingerprint, media_fingerprint) != 0) { +- g_set_error (error, GST_WEBRTC_ERROR, +- GST_WEBRTC_ERROR_FINGERPRINT_FAILURE, +- "Fingerprint in media %u differs from %s fingerprint. " +- "\'%s\' != \'%s\'", i, message_fingerprint ? "global" : "previous", +- fingerprint, media_fingerprint); +- return FALSE; +- } + } + + return TRUE; +@@ -242,9 +236,20 @@ _media_get_ice_pwd (const GstSDPMessage * msg, guint media_idx) + } + + static gboolean +-_media_has_setup (const GstSDPMedia * media, guint media_idx, GError ** error) ++_validate_setup_attribute (const gchar * setup, GError ** error) + { + static const gchar *valid_setups[] = { "actpass", "active", "passive", NULL }; ++ if (!g_strv_contains (valid_setups, setup)) { ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, ++ "SDP contains unknown \'setup\' attribute, \'%s\'", setup); ++ return FALSE; ++ } ++ return TRUE; ++} ++ ++static gboolean ++_media_has_setup (const GstSDPMedia * media, guint media_idx, GError ** error) ++{ + const gchar *setup = gst_sdp_media_get_attribute_val (media, "setup"); + if (IS_EMPTY_SDP_ATTRIBUTE (setup)) { + g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, +@@ -252,13 +257,7 @@ _media_has_setup (const GstSDPMedia * media, guint media_idx, GError ** error) + media_idx); + return FALSE; + } +- if (!g_strv_contains (valid_setups, setup)) { +- g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, +- "media %u contains unknown \'setup\' attribute, \'%s\'", media_idx, +- setup); +- return FALSE; +- } +- return TRUE; ++ return _validate_setup_attribute (setup, error); + } + + #if 0 +@@ -279,9 +278,11 @@ gboolean + validate_sdp (GstWebRTCSignalingState state, SDPSource source, + GstWebRTCSessionDescription * sdp, GError ** error) + { +- const gchar *group, *bundle_ice_ufrag = NULL, *bundle_ice_pwd = NULL; ++ const gchar *group, *bundle_ice_ufrag = NULL, *bundle_ice_pwd = NULL, *setup = ++ NULL; + gchar **group_members = NULL; + gboolean is_bundle = FALSE; ++ gboolean has_session_setup = FALSE; + int i; + + if (!_check_valid_state_for_sdp_change (state, source, sdp->type, error)) +@@ -296,6 +297,13 @@ validate_sdp (GstWebRTCSignalingState state, SDPSource source, + if (is_bundle) + group_members = g_strsplit (&group[6], " ", -1); + ++ setup = gst_sdp_message_get_attribute_val (sdp->sdp, "setup"); ++ if (setup) { ++ if (!_validate_setup_attribute (setup, error)) ++ return FALSE; ++ has_session_setup = TRUE; ++ } ++ + for (i = 0; i < gst_sdp_message_medias_len (sdp->sdp); i++) { + const GstSDPMedia *media = gst_sdp_message_get_media (sdp->sdp, i); + const gchar *mid; +@@ -316,7 +324,7 @@ validate_sdp (GstWebRTCSignalingState state, SDPSource source, + "media %u is missing or contains an empty \'ice-pwd\' attribute", i); + goto fail; + } +- if (!_media_has_setup (media, i, error)) ++ if (!has_session_setup && !_media_has_setup (media, i, error)) + goto fail; + /* check parameters in bundle are the same */ + if (media_in_bundle) { +@@ -533,6 +541,26 @@ _get_dtls_setup_from_media (const GstSDPMedia * media) + return SETUP (NONE); + } + ++GstWebRTCDTLSSetup ++_get_dtls_setup_from_session (const GstSDPMessage * sdp) ++{ ++ const gchar *attr = gst_sdp_message_get_attribute_val (sdp, "setup"); ++ if (!attr) { ++ GST_LOG ("no setup attribute in session"); ++ return SETUP (NONE); ++ } ++ if (g_strcmp0 (attr, "actpass") == 0) { ++ return SETUP (ACTPASS); ++ } else if (g_strcmp0 (attr, "active") == 0) { ++ return SETUP (ACTIVE); ++ } else if (g_strcmp0 (attr, "passive") == 0) { ++ return SETUP (PASSIVE); ++ } ++ ++ GST_ERROR ("unknown setup value %s", attr); ++ return SETUP (NONE); ++} ++ + GstWebRTCDTLSSetup + _intersect_dtls_setup (GstWebRTCDTLSSetup offer) + { +diff --git a/ext/webrtc/webrtcsdp.h b/ext/webrtc/webrtcsdp.h +index c55709b50..80d21203c 100644 +--- a/ext/webrtc/webrtcsdp.h ++++ b/ext/webrtc/webrtcsdp.h +@@ -58,6 +58,8 @@ GstWebRTCRTPTransceiverDirection _get_final_direction (Gs + G_GNUC_INTERNAL + GstWebRTCDTLSSetup _get_dtls_setup_from_media (const GstSDPMedia * media); + G_GNUC_INTERNAL ++GstWebRTCDTLSSetup _get_dtls_setup_from_session (const GstSDPMessage * sdp); ++G_GNUC_INTERNAL + GstWebRTCDTLSSetup _intersect_dtls_setup (GstWebRTCDTLSSetup offer); + G_GNUC_INTERNAL + void _media_replace_setup (GstSDPMedia * media, +diff --git a/gst-libs/gst/webrtc/dtlstransport.c b/gst-libs/gst/webrtc/dtlstransport.c +index bd1a553e7..1d5980b27 100644 +--- a/gst-libs/gst/webrtc/dtlstransport.c ++++ b/gst-libs/gst/webrtc/dtlstransport.c +@@ -112,23 +112,35 @@ gst_webrtc_dtls_transport_get_property (GObject * object, guint prop_id, + + switch (prop_id) { + case PROP_SESSION_ID: ++ GST_OBJECT_LOCK (webrtc); + g_value_set_uint (value, webrtc->session_id); ++ GST_OBJECT_UNLOCK (webrtc); + break; + case PROP_TRANSPORT: ++ GST_OBJECT_LOCK (webrtc); + g_value_set_object (value, webrtc->transport); ++ GST_OBJECT_UNLOCK (webrtc); + break; + case PROP_STATE: ++ GST_OBJECT_LOCK (webrtc); + g_value_set_enum (value, webrtc->state); ++ GST_OBJECT_UNLOCK (webrtc); + break; + case PROP_CLIENT: ++ GST_OBJECT_LOCK (webrtc); + g_object_get_property (G_OBJECT (webrtc->dtlssrtpenc), "is-client", + value); ++ GST_OBJECT_UNLOCK (webrtc); + break; + case PROP_CERTIFICATE: ++ GST_OBJECT_LOCK (webrtc); + g_object_get_property (G_OBJECT (webrtc->dtlssrtpdec), "pem", value); ++ GST_OBJECT_UNLOCK (webrtc); + break; + case PROP_REMOTE_CERTIFICATE: ++ GST_OBJECT_LOCK (webrtc); + g_object_get_property (G_OBJECT (webrtc->dtlssrtpdec), "peer-pem", value); ++ GST_OBJECT_UNLOCK (webrtc); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); +@@ -182,9 +194,13 @@ on_connection_state_changed (GObject * obj, GParamSpec * pspec, + static void + gst_webrtc_dtls_transport_constructed (GObject * object) + { +- GstWebRTCDTLSTransport *webrtc = GST_WEBRTC_DTLS_TRANSPORT (object); ++ GstWebRTCDTLSTransport *webrtc = NULL; + gchar *connection_id; + ++ G_OBJECT_CLASS (parent_class)->constructed (object); ++ ++ webrtc = GST_WEBRTC_DTLS_TRANSPORT (object); ++ + /* XXX: this may collide with another connection_id however this is only a + * problem if multiple dtls element sets are being used within the same + * process */ +@@ -192,17 +208,28 @@ gst_webrtc_dtls_transport_constructed (GObject * object) + g_random_int ()); + + webrtc->dtlssrtpenc = gst_element_factory_make ("dtlssrtpenc", NULL); ++ gst_object_ref_sink (webrtc->dtlssrtpenc); + g_object_set (webrtc->dtlssrtpenc, "connection-id", connection_id, + "is-client", webrtc->client, "rtp-sync", FALSE, NULL); + + webrtc->dtlssrtpdec = gst_element_factory_make ("dtlssrtpdec", NULL); ++ gst_object_ref_sink (webrtc->dtlssrtpdec); + g_object_set (webrtc->dtlssrtpdec, "connection-id", connection_id, NULL); + g_free (connection_id); + + g_signal_connect (webrtc->dtlssrtpenc, "notify::connection-state", + G_CALLBACK (on_connection_state_changed), webrtc); ++} + +- G_OBJECT_CLASS (parent_class)->constructed (object); ++static void ++gst_webrtc_dtls_transport_dispose (GObject * object) ++{ ++ GstWebRTCDTLSTransport *webrtc = GST_WEBRTC_DTLS_TRANSPORT (object); ++ ++ gst_clear_object (&webrtc->dtlssrtpdec); ++ gst_clear_object (&webrtc->dtlssrtpenc); ++ ++ G_OBJECT_CLASS (parent_class)->dispose (object); + } + + static void +@@ -211,6 +238,7 @@ gst_webrtc_dtls_transport_class_init (GstWebRTCDTLSTransportClass * klass) + GObjectClass *gobject_class = (GObjectClass *) klass; + + gobject_class->constructed = gst_webrtc_dtls_transport_constructed; ++ gobject_class->dispose = gst_webrtc_dtls_transport_dispose; + gobject_class->get_property = gst_webrtc_dtls_transport_get_property; + gobject_class->set_property = gst_webrtc_dtls_transport_set_property; + gobject_class->finalize = gst_webrtc_dtls_transport_finalize; +diff --git a/gst-libs/gst/webrtc/ice.c b/gst-libs/gst/webrtc/ice.c +index 2328d0b82..82a685940 100644 +--- a/gst-libs/gst/webrtc/ice.c ++++ b/gst-libs/gst/webrtc/ice.c +@@ -101,17 +101,19 @@ gst_webrtc_ice_find_transport (GstWebRTCICE * ice, + * @ice: The #GstWebRTCICE + * @stream: The #GstWebRTCICEStream + * @candidate: The ICE candidate ++ * @promise: (nullable): A #GstPromise for task notifications (Since: 1.24) + * + * Since: 1.22 + */ + void + gst_webrtc_ice_add_candidate (GstWebRTCICE * ice, +- GstWebRTCICEStream * stream, const gchar * candidate) ++ GstWebRTCICEStream * stream, const gchar * candidate, GstPromise * promise) + { + g_return_if_fail (GST_IS_WEBRTC_ICE (ice)); + g_assert (GST_WEBRTC_ICE_GET_CLASS (ice)->add_candidate); + +- GST_WEBRTC_ICE_GET_CLASS (ice)->add_candidate (ice, stream, candidate); ++ GST_WEBRTC_ICE_GET_CLASS (ice)->add_candidate (ice, stream, candidate, ++ promise); + } + + /** +@@ -456,7 +458,7 @@ gst_webrtc_ice_get_turn_server (GstWebRTCICE * ice) + * gst_webrtc_ice_set_http_proxy: + * @ice: The #GstWebRTCICE + * @uri: (transfer none): URI of the HTTP proxy of the form +- * http://[username:password@]hostname[:port] ++ * http://[username:password@]hostname[:port][?alpn=] + * + * Set HTTP Proxy to be used when connecting to TURN server. + * +@@ -476,7 +478,7 @@ gst_webrtc_ice_set_http_proxy (GstWebRTCICE * ice, const gchar * uri_s) + * @ice: The #GstWebRTCICE + * + * Returns: (transfer full): URI of the HTTP proxy of the form +- * http://[username:password@]hostname[:port] ++ * http://[username:password@]hostname[:port][?alpn=] + * + * Get HTTP Proxy to be used when connecting to TURN server. + * +diff --git a/gst-libs/gst/webrtc/ice.h b/gst-libs/gst/webrtc/ice.h +index f67889b1f..e1422f37a 100644 +--- a/gst-libs/gst/webrtc/ice.h ++++ b/gst-libs/gst/webrtc/ice.h +@@ -84,7 +84,8 @@ struct _GstWebRTCICEClass { + GstWebRTCICEStream * stream); + void (*add_candidate) (GstWebRTCICE * ice, + GstWebRTCICEStream * stream, +- const gchar * candidate); ++ const gchar * candidate, ++ GstPromise * promise); + gboolean (*set_local_credentials) (GstWebRTCICE * ice, + GstWebRTCICEStream * stream, + const gchar * ufrag, +@@ -169,7 +170,8 @@ gboolean gst_webrtc_ice_gather_candidates (GstWebRTCIC + GST_WEBRTC_API + void gst_webrtc_ice_add_candidate (GstWebRTCICE * ice, + GstWebRTCICEStream * stream, +- const gchar * candidate); ++ const gchar * candidate, ++ GstPromise * promise); + + GST_WEBRTC_API + gboolean gst_webrtc_ice_set_local_credentials (GstWebRTCICE * ice, +diff --git a/gst-libs/gst/webrtc/meson.build b/gst-libs/gst/webrtc/meson.build +index 5614d4cf4..4d1e8b17a 100644 +--- a/gst-libs/gst/webrtc/meson.build ++++ b/gst-libs/gst/webrtc/meson.build +@@ -73,7 +73,7 @@ if build_gir + includes : ['Gst-1.0', 'GstSdp-1.0'], + install : true, + extra_args : gir_init_section + ['-DGST_USE_UNSTABLE_API'] + ['--c-include=gst/webrtc/webrtc.h'], +- dependencies : [gstbase_dep, gstsdp_dep] ++ dependencies : gstwebrtc_dependencies + ) + webrtc_gen_sources += webrtc_gir + endif +@@ -85,5 +85,4 @@ gstwebrtc_dep = declare_dependency(link_with: gstwebrtc, + sources: webrtc_gen_sources, + dependencies: gstwebrtc_dependencies) + +- +-subdir('nice') +\ No newline at end of file ++subdir('nice') +diff --git a/gst-libs/gst/webrtc/nice/meson.build b/gst-libs/gst/webrtc/nice/meson.build +index 007e7b23b..72fdfc265 100644 +--- a/gst-libs/gst/webrtc/nice/meson.build ++++ b/gst-libs/gst/webrtc/nice/meson.build +@@ -22,6 +22,9 @@ deps = [gstwebrtc_dep, libnice_dep] + if libnice_dep.found() + libnice_version = libnice_dep.version() + libnice_c_args = [] ++ if libnice_version.version_compare('> 0.1.21.1') ++ libnice_c_args += '-DHAVE_LIBNICE_CONSENT_FIX' ++ endif + libgstwebrtcnice = library('gstwebrtcnice-' + api_version, + libgstwebrtcnice_sources, libgstwebrtcnice_headers, + c_args : gst_plugins_bad_args + ['-DGST_USE_UNSTABLE_API', '-DBUILDING_GST_WEBRTCNICE', '-DG_LOG_DOMAIN="GStreamer-webrtcnice"'] + libnice_c_args, +diff --git a/gst-libs/gst/webrtc/nice/nice.c b/gst-libs/gst/webrtc/nice/nice.c +index d7feae194..d93d742ee 100644 +--- a/gst-libs/gst/webrtc/nice/nice.c ++++ b/gst-libs/gst/webrtc/nice/nice.c +@@ -96,7 +96,9 @@ _gst_nice_thread (GstWebRTCNice * ice) + g_main_context_invoke (ice->priv->main_context, + (GSourceFunc) _unlock_pc_thread, &ice->priv->lock); + ++ g_main_context_push_thread_default (ice->priv->main_context); + g_main_loop_run (ice->priv->loop); ++ g_main_context_pop_thread_default (ice->priv->main_context); + + g_mutex_lock (&ice->priv->lock); + g_main_context_unref (ice->priv->main_context); +@@ -271,125 +273,153 @@ _parse_userinfo (const gchar * userinfo, gchar ** user, gchar ** pass) + *pass = g_uri_unescape_string (&colon[1], NULL); + } + ++typedef void (*GstResolvedCallback) ++ (GstWebRTCNice * nice, GList * addresses, GError * error, gpointer user_data); ++ + struct resolve_host_data + { +- GstWebRTCNice *ice; ++ GWeakRef nice_weak; + char *host; + gboolean main_context_handled; ++ GstResolvedCallback resolved_callback; + gpointer user_data; + GDestroyNotify notify; + }; + +-static void +-on_resolve_host (GResolver * resolver, GAsyncResult * res, gpointer user_data) ++static struct resolve_host_data * ++resolve_host_data_new (GstWebRTCNice * ice, const char *host) + { +- GTask *task = user_data; +- struct resolve_host_data *rh; +- GError *error = NULL; +- GList *addresses; +- +- rh = g_task_get_task_data (task); ++ struct resolve_host_data *rh = ++ g_atomic_rc_box_new0 (struct resolve_host_data); + +- if (!(addresses = g_resolver_lookup_by_name_finish (resolver, res, &error))) { +- GST_ERROR ("failed to resolve: %s", error->message); +- g_task_return_error (task, error); +- g_object_unref (task); +- return; +- } ++ g_weak_ref_init (&rh->nice_weak, ice); ++ rh->host = g_strdup (host); + +- GST_DEBUG_OBJECT (rh->ice, "Resolved %d addresses for host %s with data %p", +- g_list_length (addresses), rh->host, rh); ++ return rh; ++} + +- g_task_return_pointer (task, addresses, +- (GDestroyNotify) g_resolver_free_addresses); +- g_object_unref (task); ++static struct resolve_host_data * ++resolve_host_data_ref (struct resolve_host_data *rh) ++{ ++ return (struct resolve_host_data *) g_atomic_rc_box_acquire (rh); + } + + static void +-free_resolve_host_data (struct resolve_host_data *rh) ++resolve_host_data_clear (struct resolve_host_data *rh) + { +- GST_TRACE_OBJECT (rh->ice, "Freeing data %p for resolving host %s", rh, +- rh->host); ++ GST_TRACE ("Freeing data %p for resolving host %s", rh, rh->host); + + if (rh->notify) + rh->notify (rh->user_data); + ++ g_weak_ref_clear (&rh->nice_weak); + g_free (rh->host); +- g_free (rh); + } + +-static struct resolve_host_data * +-resolve_host_data_new (GstWebRTCNice * ice, const char *host) ++static void ++resolve_host_data_unref (struct resolve_host_data *rh) + { +- struct resolve_host_data *rh = g_new0 (struct resolve_host_data, 1); ++ g_atomic_rc_box_release_full (rh, (GDestroyNotify) resolve_host_data_clear); ++} + +- rh->ice = ice; +- rh->host = g_strdup (host); ++static void ++on_resolve_host (GResolver * resolver, GAsyncResult * res, gpointer user_data) ++{ ++ struct resolve_host_data *rh = user_data; ++ GstWebRTCNice *nice = g_weak_ref_get (&rh->nice_weak); ++ GError *error = NULL; ++ GList *addresses; + +- return rh; ++ if (!nice) { ++ error = g_error_new_literal (G_IO_ERROR, G_IO_ERROR_CANCELLED, "Cancelled"); ++ rh->resolved_callback (NULL, NULL, error, rh->user_data); ++ resolve_host_data_unref (rh); ++ g_error_free (error); ++ ++ return; ++ } ++ ++ if (!(addresses = g_resolver_lookup_by_name_finish (resolver, res, &error))) { ++ GST_ERROR ("failed to resolve: %s", error->message); ++ ++ rh->resolved_callback (nice, NULL, error, rh->user_data); ++ gst_object_unref (nice); ++ resolve_host_data_unref (rh); ++ g_error_free (error); ++ ++ return; ++ } ++ ++ GST_DEBUG_OBJECT (nice, "Resolved %d addresses for host %s with data %p", ++ g_list_length (addresses), rh->host, rh); ++ ++ rh->resolved_callback (nice, addresses, error, rh->user_data); ++ gst_object_unref (nice); ++ resolve_host_data_unref (rh); ++ g_resolver_free_addresses (addresses); + } + + static gboolean + resolve_host_main_cb (gpointer user_data) + { + GResolver *resolver = g_resolver_get_default (); +- GTask *task = user_data; +- struct resolve_host_data *rh; +- +- rh = g_task_get_task_data (task); +- /* no need to error anymore if the main context disappears and this task is +- * not run */ +- rh->main_context_handled = TRUE; +- +- GST_DEBUG_OBJECT (rh->ice, "Resolving host %s", rh->host); +- g_resolver_lookup_by_name_async (resolver, rh->host, NULL, +- (GAsyncReadyCallback) on_resolve_host, g_object_ref (task)); ++ struct resolve_host_data *rh = user_data; ++ GstWebRTCNice *nice = g_weak_ref_get (&rh->nice_weak); ++ ++ if (nice) { ++ /* no need to error anymore if the main context disappears and this task is ++ * not run */ ++ rh->main_context_handled = TRUE; ++ ++ GST_DEBUG_OBJECT (nice, "Resolving host %s", rh->host); ++ g_resolver_lookup_by_name_async (resolver, rh->host, NULL, ++ (GAsyncReadyCallback) on_resolve_host, resolve_host_data_ref (rh)); ++ gst_object_unref (nice); ++ } + + return G_SOURCE_REMOVE; + } + + static void +-error_task_if_unhandled (GTask * task) ++error_resolve_if_unhandled (struct resolve_host_data *rh) + { +- struct resolve_host_data *rh; +- +- rh = g_task_get_task_data (task); ++ GstWebRTCNice *nice = g_weak_ref_get (&rh->nice_weak); + + if (!rh->main_context_handled) { +- GST_DEBUG_OBJECT (rh->ice, "host resolve for %s with data %p was never " +- "executed, main context quit?", rh->host, rh); +- g_task_return_new_error (task, G_IO_ERROR, G_IO_ERROR_CANCELLED, "%s", +- "Cancelled"); ++ if (nice) { ++ GST_DEBUG_OBJECT (nice, "host resolve for %s with data %p was never " ++ "executed, main context quit?", rh->host, rh); ++ } else { ++ GST_DEBUG ("host resolve for %s with data %p was never " ++ "executed, main context quit?", rh->host, rh); ++ } ++ ++ GError *error = ++ g_error_new_literal (G_IO_ERROR, G_IO_ERROR_CANCELLED, "Cancelled"); ++ rh->resolved_callback (nice, NULL, error, rh->user_data); ++ g_error_free (error); + } + +- g_object_unref (task); ++ if (nice) ++ gst_object_unref (nice); ++ resolve_host_data_unref (rh); + } + + static void +-resolve_host_async (GstWebRTCNice * ice, const gchar * host, +- GAsyncReadyCallback cb, gpointer user_data, GDestroyNotify notify) ++resolve_host_async (GstWebRTCNice * nice, const gchar * host, ++ GstResolvedCallback resolved_callback, gpointer user_data, ++ GDestroyNotify notify) + { +- struct resolve_host_data *rh = resolve_host_data_new (ice, host); +- GTask *task; ++ struct resolve_host_data *rh = resolve_host_data_new (nice, host); + ++ rh->resolved_callback = resolved_callback; + rh->user_data = user_data; + rh->notify = notify; +- task = g_task_new (rh->ice, NULL, cb, user_data); +- +- g_task_set_task_data (task, rh, (GDestroyNotify) free_resolve_host_data); + +- GST_TRACE_OBJECT (rh->ice, "invoking main context for resolving host %s " ++ GST_TRACE_OBJECT (nice, "invoking main context for resolving host %s " + "with data %p", host, rh); +- g_main_context_invoke_full (ice->priv->main_context, G_PRIORITY_DEFAULT, +- resolve_host_main_cb, task, (GDestroyNotify) error_task_if_unhandled); +-} +- +-static GList * +-resolve_host_finish (GstWebRTCNice * ice, GAsyncResult * res, GError ** error) +-{ +- g_return_val_if_fail (g_task_is_valid (res, ice), NULL); +- +- return g_task_propagate_pointer (G_TASK (res), error); ++ g_main_context_invoke_full (nice->priv->main_context, G_PRIORITY_DEFAULT, ++ resolve_host_main_cb, rh, (GDestroyNotify) error_resolve_if_unhandled); + } + + static void +@@ -699,6 +729,7 @@ struct resolve_candidate_data + guint nice_stream_id; + char *prefix; + char *postfix; ++ GstPromise *promise; + }; + + static void +@@ -706,6 +737,8 @@ free_resolve_candidate_data (struct resolve_candidate_data *rc) + { + g_free (rc->prefix); + g_free (rc->postfix); ++ if (rc->promise) ++ gst_promise_unref (rc->promise); + g_free (rc); + } + +@@ -731,24 +764,37 @@ add_ice_candidate_to_libnice (GstWebRTCICE * ice, guint nice_stream_id, + } + + static void +-on_candidate_resolved (GstWebRTCICE * ice, GAsyncResult * res, +- gpointer user_data) ++on_candidate_resolved (GstWebRTCNice * nice, GList * addresses, ++ GError * error, gpointer user_data) + { + struct resolve_candidate_data *rc = user_data; +- GError *error = NULL; +- GList *addresses; + char *new_candv[4] = { NULL, }; + char *new_addr, *new_candidate; + NiceCandidate *cand; +- GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); + +- if (!(addresses = resolve_host_finish (nice, res, &error))) { +- GST_WARNING_OBJECT (ice, "Could not resolve candidate address: %s", +- error->message); +- g_clear_error (&error); ++ if (!nice) ++ error = g_error_new_literal (G_IO_ERROR, G_IO_ERROR_CANCELLED, "Cancelled"); ++ ++ if (error) { ++ if (rc->promise) { ++ GstStructure *s = gst_structure_new ("application/x-gst-promise", "error", ++ G_TYPE_ERROR, error, NULL); ++ gst_promise_reply (rc->promise, s); ++ } else if (nice) { ++ GST_WARNING_OBJECT (nice, "Could not resolve candidate address: %s", ++ error->message); ++ } else { ++ GST_WARNING ("Could not resolve candidate address: %s", error->message); ++ } ++ ++ if (!nice) ++ g_clear_error (&error); ++ + return; + } + ++ GstWebRTCICE *ice = GST_WEBRTC_ICE (nice); ++ + new_addr = g_inet_address_to_string (addresses->data); + + new_candv[0] = rc->prefix; +@@ -764,7 +810,18 @@ on_candidate_resolved (GstWebRTCICE * ice, GAsyncResult * res, + rc->nice_stream_id, new_candidate); + g_free (new_candidate); + if (!cand) { +- GST_WARNING_OBJECT (ice, "Could not parse candidate \'%s\'", new_candidate); ++ if (rc->promise) { ++ GError *error = ++ g_error_new (GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "Could not parse candidate \'%s\'", new_candidate); ++ GstStructure *s = gst_structure_new ("application/x-gst-promise", "error", ++ G_TYPE_ERROR, error, NULL); ++ gst_promise_reply (rc->promise, s); ++ g_clear_error (&error); ++ } else { ++ GST_WARNING_OBJECT (ice, "Could not parse candidate \'%s\'", ++ new_candidate); ++ } + return; + } + +@@ -777,7 +834,7 @@ on_candidate_resolved (GstWebRTCICE * ice, GAsyncResult * res, + /* candidate must start with "a=candidate:" or be NULL*/ + static void + gst_webrtc_nice_add_candidate (GstWebRTCICE * ice, GstWebRTCICEStream * stream, +- const gchar * candidate) ++ const gchar * candidate, GstPromise * promise) + { + struct NiceStreamItem *item; + NiceCandidate *cand; +@@ -801,14 +858,37 @@ gst_webrtc_nice_add_candidate (GstWebRTCICE * ice, GstWebRTCICEStream * stream, + struct resolve_candidate_data *rc; + + if (!get_candidate_address (candidate, &prefix, &address, &postfix)) { +- GST_WARNING_OBJECT (nice, "Failed to retrieve address from candidate %s", +- candidate); ++ if (promise) { ++ GError *error = ++ g_error_new (GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "Failed to retrieve address from candidate %s", ++ candidate); ++ GstStructure *s = gst_structure_new ("application/x-gst-promise", ++ "error", G_TYPE_ERROR, error, NULL); ++ gst_promise_reply (promise, s); ++ g_clear_error (&error); ++ } else { ++ GST_WARNING_OBJECT (nice, ++ "Failed to retrieve address from candidate %s", candidate); ++ } + goto done; + } + + if (!g_str_has_suffix (address, ".local")) { +- GST_WARNING_OBJECT (nice, "candidate address \'%s\' does not end " +- "with \'.local\'", address); ++ if (promise) { ++ GError *error = ++ g_error_new (GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "candidate address \'%s\' does not end " "with \'.local\'", ++ address); ++ GstStructure *s = gst_structure_new ("application/x-gst-promise", ++ "error", G_TYPE_ERROR, error, NULL); ++ gst_promise_reply (promise, s); ++ g_clear_error (&error); ++ } else { ++ GST_WARNING_OBJECT (nice, ++ "candidate address \'%s\' does not end " ++ "with \'.local\'", address); ++ } + goto done; + } + +@@ -816,8 +896,9 @@ gst_webrtc_nice_add_candidate (GstWebRTCICE * ice, GstWebRTCICEStream * stream, + rc->nice_stream_id = item->nice_stream_id; + rc->prefix = prefix; + rc->postfix = postfix; ++ rc->promise = promise ? gst_promise_ref (promise) : NULL; + resolve_host_async (nice, address, +- (GAsyncReadyCallback) on_candidate_resolved, rc, ++ on_candidate_resolved, rc, + (GDestroyNotify) free_resolve_candidate_data); + + prefix = NULL; +@@ -1295,26 +1376,36 @@ out: + } + + static void +-on_http_proxy_resolved (GstWebRTCICE * ice, GAsyncResult * res, +- gpointer user_data) ++on_http_proxy_resolved (GstWebRTCNice * nice, GList * addresses, ++ GError * error, gpointer user_data) + { +- GstWebRTCNice *nice = GST_WEBRTC_NICE (ice); + GstUri *uri = user_data; +- GList *addresses; +- GError *error = NULL; + const gchar *userinfo; + gchar *user = NULL; + gchar *pass = NULL; ++ const gchar *alpn = NULL; + gchar *ip = NULL; + guint port = GST_URI_NO_PORT; ++ GHashTable *extra_headers; ++ ++ if (error) { ++ if (nice) { ++ GST_WARNING_OBJECT (nice, "Failed to resolve http proxy: %s", ++ error->message); ++ } else { ++ GST_WARNING ("Failed to resolve http proxy: %s", error->message); ++ } ++ ++ return; ++ } + +- if (!(addresses = resolve_host_finish (nice, res, &error))) { +- GST_WARNING_OBJECT (ice, "Failed to resolve http proxy: %s", +- error->message); +- g_clear_error (&error); ++ if (!nice) { ++ GST_WARNING ("Missing GstWebRTCNice instance"); + return; + } + ++ GstWebRTCICE *ice = GST_WEBRTC_ICE (nice); ++ + /* XXX: only the first IP is used */ + ip = g_inet_address_to_string (addresses->data); + +@@ -1334,13 +1425,23 @@ on_http_proxy_resolved (GstWebRTCICE * ice, GAsyncResult * res, + userinfo = gst_uri_get_userinfo (uri); + _parse_userinfo (userinfo, &user, &pass); + ++ alpn = gst_uri_get_query_value (uri, "alpn"); ++ if (!alpn) { ++ alpn = "webrtc"; ++ } ++ extra_headers = g_hash_table_new_full (g_str_hash, ++ g_str_equal, g_free, g_free); ++ g_hash_table_insert (extra_headers, g_strdup ("ALPN"), g_strdup (alpn)); ++ + g_object_set (nice->priv->nice_agent, + "proxy-ip", ip, "proxy-port", port, "proxy-type", NICE_PROXY_TYPE_HTTP, +- "proxy-username", user, "proxy-password", pass, NULL); ++ "proxy-username", user, "proxy-password", pass, "proxy-extra-headers", ++ extra_headers, NULL); + + g_free (ip); + g_free (user); + g_free (pass); ++ g_hash_table_unref (extra_headers); + } + + static GstUri * +@@ -1384,7 +1485,7 @@ _set_http_proxy (GstWebRTCICE * ice, const gchar * s) + goto out; + } + +- resolve_host_async (nice, host, (GAsyncReadyCallback) on_http_proxy_resolved, ++ resolve_host_async (nice, host, on_http_proxy_resolved, + gst_uri_ref (uri), (GDestroyNotify) gst_uri_unref); + + out: +@@ -1580,6 +1681,11 @@ gst_webrtc_nice_constructed (GObject * object) + options |= NICE_AGENT_OPTION_ICE_TRICKLE; + options |= NICE_AGENT_OPTION_REGULAR_NOMINATION; + ++/* https://gitlab.freedesktop.org/libnice/libnice/-/merge_requests/257 */ ++#if HAVE_LIBNICE_CONSENT_FIX ++ options |= NICE_AGENT_OPTION_CONSENT_FRESHNESS; ++#endif ++ + ice->priv->nice_agent = nice_agent_new_full (ice->priv->main_context, + NICE_COMPATIBILITY_RFC5245, options); + g_signal_connect (ice->priv->nice_agent, "new-candidate-full", +diff --git a/gst-libs/gst/webrtc/nice/nicestream.c b/gst-libs/gst/webrtc/nice/nicestream.c +index cda1c133f..44c576835 100644 +--- a/gst-libs/gst/webrtc/nice/nicestream.c ++++ b/gst-libs/gst/webrtc/nice/nicestream.c +@@ -149,13 +149,14 @@ _on_candidate_gathering_done (NiceAgent * agent, guint stream_id, + + ice->priv->gathered = TRUE; + +- for (l = ice->priv->transports; l; l = l->next) { ++ for (l = ice->priv->transports; l;) { + GstWebRTCICETransport *trans = g_weak_ref_get (l->data); + + if (trans) { + gst_webrtc_ice_transport_gathering_state_change (trans, + GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE); + g_object_unref (trans); ++ l = l->next; + } else { + l = _delete_transport (&ice->priv->transports, l); + } +@@ -174,7 +175,7 @@ gst_webrtc_nice_stream_find_transport (GstWebRTCICEStream * stream, + GList *l; + GstWebRTCNiceStream *nice_stream = GST_WEBRTC_NICE_STREAM (stream); + +- for (l = nice_stream->priv->transports; l; l = l->next) { ++ for (l = nice_stream->priv->transports; l;) { + GstWebRTCICETransport *trans = g_weak_ref_get (l->data); + if (trans) { + g_object_get (trans, "component", &trans_comp, NULL); +@@ -183,6 +184,7 @@ gst_webrtc_nice_stream_find_transport (GstWebRTCICEStream * stream, + return trans; + else + gst_object_unref (trans); ++ l = l->next; + } else { + l = _delete_transport (&nice_stream->priv->transports, l); + } +@@ -234,13 +236,14 @@ gst_webrtc_nice_stream_gather_candidates (GstWebRTCICEStream * stream) + if (nice_stream->priv->gathered) + return TRUE; + +- for (l = nice_stream->priv->transports; l; l = l->next) { ++ for (l = nice_stream->priv->transports; l;) { + GstWebRTCICETransport *trans = g_weak_ref_get (l->data); + + if (trans) { + gst_webrtc_ice_transport_gathering_state_change (trans, + GST_WEBRTC_ICE_GATHERING_STATE_GATHERING); + g_object_unref (trans); ++ l = l->next; + } else { + l = _delete_transport (&nice_stream->priv->transports, l); + } +@@ -273,12 +276,13 @@ gst_webrtc_nice_stream_gather_candidates (GstWebRTCICEStream * stream) + goto cleanup; + } + +- for (l = nice_stream->priv->transports; l; l = l->next) { ++ for (l = nice_stream->priv->transports; l;) { + GstWebRTCNiceTransport *trans = g_weak_ref_get (l->data); + + if (trans) { + gst_webrtc_nice_transport_update_buffer_size (trans); + g_object_unref (trans); ++ l = l->next; + } else { + l = _delete_transport (&nice_stream->priv->transports, l); + } +diff --git a/gst-libs/gst/webrtc/webrtc_fwd.h b/gst-libs/gst/webrtc/webrtc_fwd.h +index d3556400a..6099ae608 100644 +--- a/gst-libs/gst/webrtc/webrtc_fwd.h ++++ b/gst-libs/gst/webrtc/webrtc_fwd.h +@@ -32,6 +32,8 @@ + * @title: GstWebRTC Enumerations + */ + ++G_BEGIN_DECLS ++ + #ifndef GST_WEBRTC_API + # ifdef BUILDING_GST_WEBRTC + # define GST_WEBRTC_API GST_API_EXPORT /* from config.h */ +@@ -516,4 +518,6 @@ typedef enum /**/ + GST_WEBRTC_ERROR_TYPE_ERROR, + } GstWebRTCError; + ++G_END_DECLS ++ + #endif /* __GST_WEBRTC_FWD_H__ */ +-- +2.47.1 + diff --git a/package/gstreamer1/gst1-plugins-bad/1.18.6-gstwebrtc/0003-WebRTC-Fix-track-events.patch b/package/gstreamer1/gst1-plugins-bad/1.18.6-gstwebrtc/0003-WebRTC-Fix-track-events.patch new file mode 100644 index 000000000000..cde7ba9431b8 --- /dev/null +++ b/package/gstreamer1/gst1-plugins-bad/1.18.6-gstwebrtc/0003-WebRTC-Fix-track-events.patch @@ -0,0 +1,766 @@ +From 3c40bfe18a011788cba18f56badb7fe13cb42dc6 Mon Sep 17 00:00:00 2001 +From: Carlos Bentzen +Date: Thu, 15 Aug 2024 15:53:07 +0200 +Subject: [PATCH 1/3] webrtcbin: create and associate transceivers earlier in + negotation + +According to https://w3c.github.io/webrtc-pc/#set-the-session-description +(steps in 4.6.10.), we should be creating and associating transceivers when +setting session descriptions. + +Before this commit, webrtcbin deviated from the spec: +1. Transceivers from sink pads where created when the sink pad was + requested, but not associated after setting local description, only + when signaling is STABLE. +2. Transceivers from remote offers were not created after applying the + the remote description, only when the answer is created, and were then + only associated once signaling is STABLE. + +This commit makes webrtcbin follow the spec more closely with regards to +timing of transceivers creation and association. + +A unit test is added, checking that the transceivers are created and +associated after every session description is set. + +Part-of: +--- + ext/webrtc/gstwebrtcbin.c | 477 ++++++++++++++++++++++---------------- + ext/webrtc/webrtcsdp.c | 11 + + ext/webrtc/webrtcsdp.h | 2 + + 3 files changed, 296 insertions(+), 194 deletions(-) + +diff --git a/ext/webrtc/gstwebrtcbin.c b/ext/webrtc/gstwebrtcbin.c +index bf0bc5b3f..0ee3fb6b2 100644 +--- a/ext/webrtc/gstwebrtcbin.c ++++ b/ext/webrtc/gstwebrtcbin.c +@@ -748,6 +748,13 @@ transceiver_match_for_mid (GstWebRTCRTPTransceiver * trans, const gchar * mid) + return g_strcmp0 (trans->mid, mid) == 0; + } + ++static gboolean ++transceiver_match_for_pending_mid (GstWebRTCRTPTransceiver * trans, ++ const gchar * mid) ++{ ++ return g_strcmp0 (WEBRTC_TRANSCEIVER (trans)->pending_mid, mid) == 0; ++} ++ + static gboolean + transceiver_match_for_mline (GstWebRTCRTPTransceiver * trans, guint * mline) + { +@@ -786,6 +793,20 @@ _find_transceiver_for_mid (GstWebRTCBin * webrtc, const char *mid) + return trans; + } + ++static GstWebRTCRTPTransceiver * ++_find_transceiver_for_pending_mid (GstWebRTCBin * webrtc, const char *mid) ++{ ++ GstWebRTCRTPTransceiver *trans; ++ ++ trans = _find_transceiver (webrtc, mid, ++ (FindTransceiverFunc) transceiver_match_for_pending_mid); ++ ++ GST_TRACE_OBJECT (webrtc, "Found transceiver %" GST_PTR_FORMAT " for " ++ "pending mid %s", trans, mid); ++ ++ return trans; ++} ++ + typedef gboolean (*FindTransportFunc) (TransportStream * p1, + gconstpointer data); + +@@ -4554,146 +4575,51 @@ _create_answer_task (GstWebRTCBin * webrtc, const GstStructure * options, + + _remove_optional_offer_fields (offer_caps); + +- if (last_answer && i < gst_sdp_message_medias_len (last_answer) +- && (rtp_trans = _find_transceiver_for_mid (webrtc, mid))) { ++ rtp_trans = _find_transceiver_for_mid (webrtc, mid); ++ if (!rtp_trans) { ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_INVALID_STATE, ++ "Transceiver for media with mid %s not found", mid); ++ gst_caps_unref (offer_caps); ++ goto rejected; ++ } ++ GstCaps *current_caps = ++ _find_codec_preferences (webrtc, rtp_trans, i, error); ++ if (*error) { ++ gst_caps_unref (offer_caps); ++ goto rejected; ++ } ++ ++ if (last_answer && i < gst_sdp_message_medias_len (last_answer)) { + const GstSDPMedia *last_media = + gst_sdp_message_get_media (last_answer, i); + const gchar *last_mid = + gst_sdp_media_get_attribute_val (last_media, "mid"); +- GstCaps *current_caps; +- + /* FIXME: assumes no shenanigans with recycling transceivers */ + g_assert (g_strcmp0 (mid, last_mid) == 0); +- +- current_caps = _find_codec_preferences (webrtc, rtp_trans, i, error); +- if (*error) { +- gst_caps_unref (offer_caps); +- goto rejected; +- } + if (!current_caps) + current_caps = _rtp_caps_from_media (last_media); +- +- if (current_caps) { +- answer_caps = gst_caps_intersect (offer_caps, current_caps); +- if (gst_caps_is_empty (answer_caps)) { +- GST_WARNING_OBJECT (webrtc, "Caps from offer for m-line %d (%" +- GST_PTR_FORMAT ") don't intersect with caps from codec" +- " preferences and transceiver %" GST_PTR_FORMAT, i, offer_caps, +- current_caps); +- gst_caps_unref (current_caps); +- gst_caps_unref (answer_caps); +- gst_caps_unref (offer_caps); +- goto rejected; +- } +- gst_caps_unref (current_caps); +- } +- +- /* XXX: In theory we're meant to use the sendrecv formats for the +- * inactive direction however we don't know what that may be and would +- * require asking outside what it expects to possibly send later */ +- +- GST_LOG_OBJECT (webrtc, "Found existing previously negotiated " +- "transceiver %" GST_PTR_FORMAT " from mid %s for mline %u " +- "using caps %" GST_PTR_FORMAT, rtp_trans, mid, i, answer_caps); +- } else { +- for (j = 0; j < webrtc->priv->transceivers->len; j++) { +- GstCaps *trans_caps; +- +- rtp_trans = g_ptr_array_index (webrtc->priv->transceivers, j); +- +- if (g_list_find (seen_transceivers, rtp_trans)) { +- /* Don't double allocate a transceiver to multiple mlines */ +- rtp_trans = NULL; +- continue; +- } +- +- trans_caps = _find_codec_preferences (webrtc, rtp_trans, j, error); +- if (*error) { +- gst_caps_unref (offer_caps); +- goto rejected; +- } +- +- GST_LOG_OBJECT (webrtc, "trying to compare %" GST_PTR_FORMAT +- " and %" GST_PTR_FORMAT, offer_caps, trans_caps); +- +- /* FIXME: technically this is a little overreaching as some fields we +- * we can deal with not having and/or we may have unrecognized fields +- * that we cannot actually support */ +- if (trans_caps) { +- answer_caps = gst_caps_intersect (offer_caps, trans_caps); +- gst_caps_unref (trans_caps); +- if (answer_caps) { +- if (!gst_caps_is_empty (answer_caps)) { +- GST_LOG_OBJECT (webrtc, +- "found compatible transceiver %" GST_PTR_FORMAT +- " for offer media %u", rtp_trans, i); +- break; +- } +- gst_caps_unref (answer_caps); +- answer_caps = NULL; +- } +- } +- rtp_trans = NULL; +- } +- } +- +- if (rtp_trans) { +- answer_dir = rtp_trans->direction; +- g_assert (answer_caps != NULL); +- } else { +- /* if no transceiver, then we only receive that stream and respond with +- * the intersection with the transceivers codec preferences caps */ +- answer_dir = GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY; +- GST_WARNING_OBJECT (webrtc, "did not find compatible transceiver for " +- "offer caps %" GST_PTR_FORMAT ", will only receive", offer_caps); + } + +- if (!rtp_trans) { +- GstCaps *trans_caps; +- GstWebRTCKind kind = GST_WEBRTC_KIND_UNKNOWN; +- +- if (g_strcmp0 (gst_sdp_media_get_media (offer_media), "audio") == 0) +- kind = GST_WEBRTC_KIND_AUDIO; +- else if (g_strcmp0 (gst_sdp_media_get_media (offer_media), +- "video") == 0) +- kind = GST_WEBRTC_KIND_VIDEO; +- else +- GST_LOG_OBJECT (webrtc, "Unknown media kind %s", +- GST_STR_NULL (gst_sdp_media_get_media (offer_media))); +- +- trans = _create_webrtc_transceiver (webrtc, answer_dir, i, kind, NULL); +- rtp_trans = GST_WEBRTC_RTP_TRANSCEIVER (trans); +- +- PC_UNLOCK (webrtc); +- g_signal_emit (webrtc, +- gst_webrtc_bin_signals[ON_NEW_TRANSCEIVER_SIGNAL], 0, rtp_trans); +- PC_LOCK (webrtc); +- +- GST_LOG_OBJECT (webrtc, "Created new transceiver %" GST_PTR_FORMAT +- " for mline %u with media kind %d", trans, i, kind); +- +- trans_caps = _find_codec_preferences (webrtc, rtp_trans, i, error); +- if (*error) { ++ if (current_caps) { ++ answer_caps = gst_caps_intersect (offer_caps, current_caps); ++ if (gst_caps_is_empty (answer_caps)) { ++ GST_WARNING_OBJECT (webrtc, "Caps from offer for m-line %d (%" ++ GST_PTR_FORMAT ") don't intersect with caps from codec" ++ " preferences and transceiver %" GST_PTR_FORMAT, i, offer_caps, ++ current_caps); ++ gst_caps_unref (current_caps); ++ gst_caps_unref (answer_caps); + gst_caps_unref (offer_caps); + goto rejected; + } +- +- GST_TRACE_OBJECT (webrtc, "trying to compare %" GST_PTR_FORMAT +- " and %" GST_PTR_FORMAT, offer_caps, trans_caps); +- +- /* FIXME: technically this is a little overreaching as some fields we +- * we can deal with not having and/or we may have unrecognized fields +- * that we cannot actually support */ +- if (trans_caps) { +- answer_caps = gst_caps_intersect (offer_caps, trans_caps); +- gst_clear_caps (&trans_caps); +- } else { +- answer_caps = gst_caps_ref (offer_caps); +- } ++ gst_caps_unref (current_caps); + } else { +- trans = WEBRTC_TRANSCEIVER (rtp_trans); ++ answer_caps = gst_caps_ref (offer_caps); + } + ++ answer_dir = rtp_trans->direction; ++ trans = WEBRTC_TRANSCEIVER (rtp_trans); ++ + seen_transceivers = g_list_prepend (seen_transceivers, rtp_trans); + + if (gst_caps_is_empty (answer_caps)) { +@@ -5740,6 +5666,7 @@ _update_transceiver_from_sdp_media (GstWebRTCBin * webrtc, + if (g_strcmp0 (attr->key, "mid") == 0) { + g_free (rtp_trans->mid); + rtp_trans->mid = g_strdup (attr->value); ++ g_object_notify (G_OBJECT (rtp_trans), "mid"); + } + } + +@@ -6186,7 +6113,6 @@ _update_transceivers_from_sdp (GstWebRTCBin * webrtc, SDPSource source, + for (i = 0; i < gst_sdp_message_medias_len (sdp->sdp); i++) { + const GstSDPMedia *media = gst_sdp_message_get_media (sdp->sdp, i); + TransportStream *stream; +- GstWebRTCRTPTransceiver *trans; + guint transport_idx; + + /* skip rejected media */ +@@ -6198,8 +6124,6 @@ _update_transceivers_from_sdp (GstWebRTCBin * webrtc, SDPSource source, + else + transport_idx = i; + +- trans = _find_transceiver_for_sdp_media (webrtc, sdp->sdp, i); +- + stream = _get_or_create_transport_stream (webrtc, transport_idx, + _message_media_is_datachannel (sdp->sdp, transport_idx)); + if (!bundled) { +@@ -6210,60 +6134,28 @@ _update_transceivers_from_sdp (GstWebRTCBin * webrtc, SDPSource source, + ensure_rtx_hdr_ext (stream); + } + +- if (trans) +- webrtc_transceiver_set_transport ((WebRTCTransceiver *) trans, stream); +- +- if (source == SDP_LOCAL && sdp->type == GST_WEBRTC_SDP_TYPE_OFFER && !trans) { +- g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, +- "State mismatch. Could not find local transceiver by mline %u", i); +- goto done; +- } else { +- if (g_strcmp0 (gst_sdp_media_get_media (media), "audio") == 0 || +- g_strcmp0 (gst_sdp_media_get_media (media), "video") == 0) { +- GstWebRTCKind kind = GST_WEBRTC_KIND_UNKNOWN; +- +- /* No existing transceiver, find an unused one */ +- if (!trans) { +- if (g_strcmp0 (gst_sdp_media_get_media (media), "audio") == 0) +- kind = GST_WEBRTC_KIND_AUDIO; +- else if (g_strcmp0 (gst_sdp_media_get_media (media), "video") == 0) +- kind = GST_WEBRTC_KIND_VIDEO; +- else +- GST_LOG_OBJECT (webrtc, "Unknown media kind %s", +- GST_STR_NULL (gst_sdp_media_get_media (media))); +- +- trans = _find_transceiver (webrtc, GINT_TO_POINTER (kind), +- (FindTransceiverFunc) _find_compatible_unassociated_transceiver); +- } +- +- /* Still no transceiver? Create one */ +- /* XXX: default to the advertised direction in the sdp for new +- * transceivers. The spec doesn't actually say what happens here, only +- * that calls to setDirection will change the value. Nothing about +- * a default value when the transceiver is created internally */ +- if (!trans) { +- WebRTCTransceiver *t = _create_webrtc_transceiver (webrtc, +- _get_direction_from_media (media), i, kind, NULL); +- webrtc_transceiver_set_transport (t, stream); +- trans = GST_WEBRTC_RTP_TRANSCEIVER (t); +- PC_UNLOCK (webrtc); +- g_signal_emit (webrtc, +- gst_webrtc_bin_signals[ON_NEW_TRANSCEIVER_SIGNAL], 0, trans); +- PC_LOCK (webrtc); +- } ++ if (g_strcmp0 (gst_sdp_media_get_media (media), "audio") == 0 || ++ g_strcmp0 (gst_sdp_media_get_media (media), "video") == 0) { ++ GstWebRTCRTPTransceiver *trans; + +- _update_transceiver_from_sdp_media (webrtc, sdp->sdp, i, stream, +- trans, bundled, bundle_idx, error); +- if (error && *error) +- goto done; +- } else if (_message_media_is_datachannel (sdp->sdp, i)) { +- _update_data_channel_from_sdp_media (webrtc, sdp->sdp, i, stream, +- error); +- if (error && *error) +- goto done; +- } else { +- GST_ERROR_OBJECT (webrtc, "Unknown media type in SDP at index %u", i); ++ trans = _find_transceiver_for_sdp_media (webrtc, sdp->sdp, i); ++ if (!trans) { ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_INVALID_STATE, ++ "Transceiver for mline %d not found", i); ++ goto done; + } ++ webrtc_transceiver_set_transport (WEBRTC_TRANSCEIVER (trans), stream); ++ ++ _update_transceiver_from_sdp_media (webrtc, sdp->sdp, i, stream, ++ trans, bundled, bundle_idx, error); ++ if (error && *error) ++ goto done; ++ } else if (_message_media_is_datachannel (sdp->sdp, i)) { ++ _update_data_channel_from_sdp_media (webrtc, sdp->sdp, i, stream, error); ++ if (error && *error) ++ goto done; ++ } else { ++ GST_ERROR_OBJECT (webrtc, "Unknown media type in SDP at index %u", i); + } + } + +@@ -6413,6 +6305,210 @@ get_last_generated_description (GstWebRTCBin * webrtc, SDPSource source, + return NULL; + } + ++/* https://w3c.github.io/webrtc-pc/#set-description (steps in 4.6.10.) */ ++static gboolean ++_create_and_associate_transceivers_from_sdp (GstWebRTCBin * webrtc, ++ struct set_description *sd, GError ** error) ++{ ++ gboolean ret = FALSE; ++ GStrv bundled = NULL; ++ guint bundle_idx = 0; ++ int i; ++ ++ if (sd->sdp->type == GST_WEBRTC_SDP_TYPE_ROLLBACK) { ++ /* FIXME: ++ * If the mid value of an RTCRtpTransceiver was set to a non-null value ++ * by the RTCSessionDescription that is being rolled back, set the mid ++ * value of that transceiver to null, as described by [JSEP] ++ * (section 4.1.7.2.). ++ * If an RTCRtpTransceiver was created by applying the ++ * RTCSessionDescription that is being rolled back, and a track has not ++ * been attached to it via addTrack, remove that transceiver from ++ * connection's set of transceivers, as described by [JSEP] ++ * (section 4.1.7.2.). ++ * Restore the value of connection's [[ sctpTransport]] internal slot ++ * to its value at the last stable signaling state. ++ */ ++ return ret; ++ } ++ ++ /* FIXME: With some peers, it's possible we could have ++ * multiple bundles to deal with, although I've never seen one yet */ ++ if (webrtc->bundle_policy != GST_WEBRTC_BUNDLE_POLICY_NONE) ++ if (!_parse_bundle (sd->sdp->sdp, &bundled, error)) ++ goto out; ++ ++ if (bundled) { ++ if (!_get_bundle_index (sd->sdp->sdp, bundled, &bundle_idx)) { ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, ++ "Bundle tag is %s but no media found matching", bundled[0]); ++ goto out; ++ } ++ } ++ ++ for (i = 0; i < gst_sdp_message_medias_len (sd->sdp->sdp); i++) { ++ GstWebRTCRTPTransceiver *trans; ++ WebRTCTransceiver *wtrans; ++ const GstSDPMedia *media; ++ const gchar *mid; ++ guint transport_idx; ++ TransportStream *stream; ++ ++ if (_message_media_is_datachannel (sd->sdp->sdp, i)) ++ continue; ++ ++ media = gst_sdp_message_get_media (sd->sdp->sdp, i); ++ mid = gst_sdp_media_get_attribute_val (media, "mid"); ++ ++ /* XXX: not strictly required but a lot of functionality requires a mid */ ++ if (!mid) { ++ g_set_error (error, GST_WEBRTC_ERROR, GST_WEBRTC_ERROR_SDP_SYNTAX_ERROR, ++ "Missing mid attribute in media"); ++ goto out; ++ } ++ ++ if (bundled) ++ transport_idx = bundle_idx; ++ else ++ transport_idx = i; ++ ++ trans = _find_transceiver_for_mid (webrtc, mid); ++ ++ if (sd->source == SDP_LOCAL) { ++ /* If the media description was not yet associated with an RTCRtpTransceiver object then run the following steps: */ ++ if (!trans) { ++ /* Let transceiver be the RTCRtpTransceiver used to create the media description. */ ++ trans = _find_transceiver_for_pending_mid (webrtc, mid); ++ if (!trans) { ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INVALID_STATE, ++ "Transceiver used to created media with mid %s not found", mid); ++ goto out; ++ } ++ wtrans = WEBRTC_TRANSCEIVER (trans); ++ if (wtrans->mline_locked && trans->mline != i) { ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "Transceiver <%s> with mid %s has mline %d from session description " ++ "but transceiver has locked mline %u", ++ GST_OBJECT_NAME (trans), GST_STR_NULL (trans->mid), i, ++ trans->mline); ++ } ++ trans->mline = i; ++ /* Set transceiver.[[Mid]] to transceiver.[[JsepMid]] */ ++ g_free (trans->mid); ++ trans->mid = g_strdup (mid); ++ g_object_notify (G_OBJECT (trans), "mid"); ++ /* If transceiver.[[Stopped]] is true, abort these sub steps */ ++ if (trans->stopped) ++ continue; ++ /* If the media description is indicated as using an existing media transport according to [RFC8843], let ++ * transport be the RTCDtlsTransport object representing the RTP/RTCP component of that transport. ++ * Otherwise, let transport be a newly created RTCDtlsTransport object with a new underlying RTCIceTransport. ++ */ ++ stream = _get_or_create_transport_stream (webrtc, transport_idx, FALSE); ++ webrtc_transceiver_set_transport (wtrans, stream); ++ } ++ } else { ++ if (!trans) { ++ /* RFC9429: If the "m=" section is "sendrecv" or "recvonly", and there are RtpTransceivers of the same type ++ * that were added to the PeerConnection by addTrack and are not associated with any "m=" section ++ * and are not stopped, find the first (according to the canonical order described in Section 5.2.1) ++ * such RtpTransceiver. */ ++ GstWebRTCRTPTransceiverDirection direction = ++ _get_direction_from_media (media); ++ if (direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV ++ || direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY) { ++ int j; ++ for (j = 0; j < webrtc->priv->transceivers->len; ++j) { ++ trans = g_ptr_array_index (webrtc->priv->transceivers, j); ++ if (trans->mid || trans->stopped) { ++ trans = NULL; ++ continue; ++ } ++ ++ /* FIXME: Here we shouldn't in theory need to match caps, as the spec says only about ++ * "RtpTransceivers of the same type". However, transceivers created by requesting sink ++ * pads (aka addTrack) may still have unknown type at this point. We may be missing updating ++ * the transceiver type early enough during caps negotation. ++ */ ++ GstCaps *trans_caps = ++ _find_codec_preferences (webrtc, trans, i, error); ++ if (error && *error) ++ goto out; ++ ++ if (trans_caps) { ++ GstCaps *offer_caps = _rtp_caps_from_media (media); ++ GstCaps *caps = gst_caps_intersect (offer_caps, trans_caps); ++ gst_caps_unref (offer_caps); ++ gst_caps_unref (trans_caps); ++ if (caps) { ++ if (!gst_caps_is_empty (caps)) { ++ GST_LOG_OBJECT (webrtc, ++ "found compatible transceiver %" GST_PTR_FORMAT ++ " for offer media %u", trans, i); ++ gst_caps_unref (caps); ++ break; ++ } ++ gst_caps_unref (caps); ++ caps = NULL; ++ } ++ } ++ trans = NULL; ++ } ++ } ++ } ++ ++ /* If no RtpTransceiver was found in the previous step, create one with a "recvonly" direction. */ ++ if (!trans) { ++ wtrans = _create_webrtc_transceiver (webrtc, ++ GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY, i, ++ _get_kind_from_media (media), NULL); ++ trans = GST_WEBRTC_RTP_TRANSCEIVER (wtrans); ++ ++ PC_UNLOCK (webrtc); ++ g_signal_emit (webrtc, ++ gst_webrtc_bin_signals[ON_NEW_TRANSCEIVER_SIGNAL], 0, trans); ++ PC_LOCK (webrtc); ++ } ++ ++ /* Associate the found or created RtpTransceiver with the "m=" section by setting the value of ++ * the RtpTransceiver's mid property to the MID of the "m=" section, and establish a mapping ++ * between the transceiver and the index of the "m=" section. */ ++ wtrans = WEBRTC_TRANSCEIVER (trans); ++ if (wtrans->mline_locked && trans->mline != i) { ++ g_set_error (error, GST_WEBRTC_ERROR, ++ GST_WEBRTC_ERROR_INTERNAL_FAILURE, ++ "Transceiver <%s> with mid %s has mline %d from session description " ++ "but transceiver has locked mline %u", ++ GST_OBJECT_NAME (trans), GST_STR_NULL (trans->mid), i, ++ trans->mline); ++ } ++ trans->mline = i; ++ g_free (trans->mid); ++ trans->mid = g_strdup (mid); ++ g_object_notify (G_OBJECT (trans), "mid"); ++ ++ /* If description is of type "answer" or "pranswer", then run the following steps: */ ++ if (sd->sdp->type == GST_WEBRTC_SDP_TYPE_ANSWER ++ || sd->sdp->type == GST_WEBRTC_SDP_TYPE_PRANSWER) { ++ /* Set transceiver.[[CurrentDirection]] to direction. */ ++ trans->current_direction = _get_direction_from_media (media); ++ } ++ /* Let transport be the RTCDtlsTransport object representing the RTP/RTCP component of the media transport ++ * used by transceiver's associated media description, according to [RFC8843]. */ ++ if (!wtrans->stream) { ++ stream = _get_or_create_transport_stream (webrtc, transport_idx, FALSE); ++ webrtc_transceiver_set_transport (wtrans, stream); ++ } ++ } ++ } ++ ++ ret = TRUE; ++out: ++ g_strfreev (bundled); ++ return ret; ++} + + /* http://w3c.github.io/webrtc-pc/#set-description */ + static GstStructure * +@@ -6575,21 +6671,8 @@ _set_description_task (GstWebRTCBin * webrtc, struct set_description *sd) + } + } + +- if (sd->sdp->type == GST_WEBRTC_SDP_TYPE_ROLLBACK) { +- /* FIXME: +- * If the mid value of an RTCRtpTransceiver was set to a non-null value +- * by the RTCSessionDescription that is being rolled back, set the mid +- * value of that transceiver to null, as described by [JSEP] +- * (section 4.1.7.2.). +- * If an RTCRtpTransceiver was created by applying the +- * RTCSessionDescription that is being rolled back, and a track has not +- * been attached to it via addTrack, remove that transceiver from +- * connection's set of transceivers, as described by [JSEP] +- * (section 4.1.7.2.). +- * Restore the value of connection's [[ sctpTransport]] internal slot +- * to its value at the last stable signaling state. +- */ +- } ++ if (!_create_and_associate_transceivers_from_sdp (webrtc, sd, &error)) ++ goto out; + + if (webrtc->signaling_state != new_signaling_state) { + webrtc->signaling_state = new_signaling_state; +@@ -6649,6 +6732,12 @@ _set_description_task (GstWebRTCBin * webrtc, struct set_description *sd) + continue; + } + ++ if (!pad->trans->mid) { ++ GST_DEBUG_OBJECT (pad, "transceiver not associated. Skipping"); ++ tmp = tmp->next; ++ continue; ++ } ++ + media = gst_sdp_message_get_media (sd->sdp->sdp, pad->trans->mline); + /* skip rejected media */ + if (gst_sdp_media_get_port (media) == 0) { +diff --git a/ext/webrtc/webrtcsdp.c b/ext/webrtc/webrtcsdp.c +index 67c8143d9..0ece0c625 100644 +--- a/ext/webrtc/webrtcsdp.c ++++ b/ext/webrtc/webrtcsdp.c +@@ -399,6 +399,17 @@ _get_direction_from_media (const GstSDPMedia * media) + return new_dir; + } + ++GstWebRTCKind ++_get_kind_from_media (const GstSDPMedia * media) ++{ ++ GstWebRTCKind kind = GST_WEBRTC_KIND_UNKNOWN; ++ if (!g_strcmp0 (gst_sdp_media_get_media (media), "audio")) ++ kind = GST_WEBRTC_KIND_AUDIO; ++ else if (!g_strcmp0 (gst_sdp_media_get_media (media), "video")) ++ kind = GST_WEBRTC_KIND_VIDEO; ++ return kind; ++} ++ + #define DIR(val) GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_ ## val + GstWebRTCRTPTransceiverDirection + _intersect_answer_directions (GstWebRTCRTPTransceiverDirection offer, +diff --git a/ext/webrtc/webrtcsdp.h b/ext/webrtc/webrtcsdp.h +index 80d21203c..abeb5dba3 100644 +--- a/ext/webrtc/webrtcsdp.h ++++ b/ext/webrtc/webrtcsdp.h +@@ -46,6 +46,8 @@ gboolean validate_sdp (Gst + G_GNUC_INTERNAL + GstWebRTCRTPTransceiverDirection _get_direction_from_media (const GstSDPMedia * media); + G_GNUC_INTERNAL ++GstWebRTCKind _get_kind_from_media (const GstSDPMedia * media); ++G_GNUC_INTERNAL + GstWebRTCRTPTransceiverDirection _intersect_answer_directions (GstWebRTCRTPTransceiverDirection offer, + GstWebRTCRTPTransceiverDirection answer); + G_GNUC_INTERNAL +-- +2.47.1 + + +From a2cfcaa6abbe17642159ce09c13cd7deaba4c2e7 Mon Sep 17 00:00:00 2001 +From: Carlos Bentzen +Date: Thu, 15 Aug 2024 15:54:24 +0200 +Subject: [PATCH 2/3] webrtcbin: reverse direction from remote media + +This had been overlooked from the spec. We need to reverse +the remote media direction when setting the transceiver direction. + +Part-of: +--- + ext/webrtc/gstwebrtcbin.c | 29 ++++++++++++++++++++++++++--- + 1 file changed, 26 insertions(+), 3 deletions(-) + +diff --git a/ext/webrtc/gstwebrtcbin.c b/ext/webrtc/gstwebrtcbin.c +index 0ee3fb6b2..49694cd6e 100644 +--- a/ext/webrtc/gstwebrtcbin.c ++++ b/ext/webrtc/gstwebrtcbin.c +@@ -6305,6 +6305,22 @@ get_last_generated_description (GstWebRTCBin * webrtc, SDPSource source, + return NULL; + } + ++static GstWebRTCRTPTransceiverDirection ++_reverse_direction (GstWebRTCRTPTransceiverDirection direction) ++{ ++ switch (direction) { ++ case GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE: ++ case GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_INACTIVE: ++ case GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV: ++ return direction; ++ case GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY: ++ return GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY; ++ case GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY: ++ return GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY; ++ } ++ return GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_NONE; ++} ++ + /* https://w3c.github.io/webrtc-pc/#set-description (steps in 4.6.10.) */ + static gboolean + _create_and_associate_transceivers_from_sdp (GstWebRTCBin * webrtc, +@@ -6353,12 +6369,14 @@ _create_and_associate_transceivers_from_sdp (GstWebRTCBin * webrtc, + const gchar *mid; + guint transport_idx; + TransportStream *stream; ++ GstWebRTCRTPTransceiverDirection direction; + + if (_message_media_is_datachannel (sd->sdp->sdp, i)) + continue; + + media = gst_sdp_message_get_media (sd->sdp->sdp, i); + mid = gst_sdp_media_get_attribute_val (media, "mid"); ++ direction = _get_direction_from_media (media); + + /* XXX: not strictly required but a lot of functionality requires a mid */ + if (!mid) { +@@ -6415,8 +6433,6 @@ _create_and_associate_transceivers_from_sdp (GstWebRTCBin * webrtc, + * that were added to the PeerConnection by addTrack and are not associated with any "m=" section + * and are not stopped, find the first (according to the canonical order described in Section 5.2.1) + * such RtpTransceiver. */ +- GstWebRTCRTPTransceiverDirection direction = +- _get_direction_from_media (media); + if (direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV + || direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY) { + int j; +@@ -6489,11 +6505,18 @@ _create_and_associate_transceivers_from_sdp (GstWebRTCBin * webrtc, + trans->mid = g_strdup (mid); + g_object_notify (G_OBJECT (trans), "mid"); + ++ /* Let direction be an RTCRtpTransceiverDirection value representing the direction from the media ++ description, but with the send and receive directions reversed to represent this peer's point of view. */ ++ direction = _reverse_direction (direction); ++ /* If the media description is rejected, set direction to "inactive". */ ++ if (gst_sdp_media_get_port (media) == 0) ++ direction = GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_INACTIVE; ++ + /* If description is of type "answer" or "pranswer", then run the following steps: */ + if (sd->sdp->type == GST_WEBRTC_SDP_TYPE_ANSWER + || sd->sdp->type == GST_WEBRTC_SDP_TYPE_PRANSWER) { + /* Set transceiver.[[CurrentDirection]] to direction. */ +- trans->current_direction = _get_direction_from_media (media); ++ trans->current_direction = direction; + } + /* Let transport be the RTCDtlsTransport object representing the RTP/RTCP component of the media transport + * used by transceiver's associated media description, according to [RFC8843]. */ +-- +2.47.1 + + +From 5a80a89f6cb3100b94f3ec3b7344eb64f2f5ea6b Mon Sep 17 00:00:00 2001 +From: Carlos Bentzen +Date: Thu, 15 Aug 2024 15:55:43 +0200 +Subject: [PATCH 3/3] webrtcbin: connect output stream on recv transceivers + +With MR 7156, transceivers and transports are created earlier, +but for sendrecv media we could get `not-linked` errors due to +transportreceivebin not being connected to rtpbin yet when incoming +data arrives. + +This condition wasn't being tested in elements_webrtcbin, but could be +reproduced in the webrtcbidirectional example. This commit now also +adds a test for this, so that this doesn't regress anymore. + +Part-of: +--- + ext/webrtc/gstwebrtcbin.c | 6 ++++++ + 1 file changed, 6 insertions(+) + +diff --git a/ext/webrtc/gstwebrtcbin.c b/ext/webrtc/gstwebrtcbin.c +index 49694cd6e..d26c814d2 100644 +--- a/ext/webrtc/gstwebrtcbin.c ++++ b/ext/webrtc/gstwebrtcbin.c +@@ -6525,6 +6525,12 @@ _create_and_associate_transceivers_from_sdp (GstWebRTCBin * webrtc, + webrtc_transceiver_set_transport (wtrans, stream); + } + } ++ ++ wtrans = WEBRTC_TRANSCEIVER (trans); ++ if (wtrans->stream ++ && (direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV ++ || direction == GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY)) ++ _connect_output_stream (webrtc, wtrans->stream, transport_idx); + } + + ret = TRUE; +-- +2.47.1 + diff --git a/package/gstreamer1/gst1-plugins-bad/gst1-plugins-bad.mk b/package/gstreamer1/gst1-plugins-bad/gst1-plugins-bad.mk index d95fc00183b4..c2a194267aa2 100644 --- a/package/gstreamer1/gst1-plugins-bad/gst1-plugins-bad.mk +++ b/package/gstreamer1/gst1-plugins-bad/gst1-plugins-bad.mk @@ -78,6 +78,7 @@ GST1_PLUGINS_BAD_CONF_OPTS += \ -Dgme=disabled \ -Dspandsp=disabled \ -Diqa=disabled \ + -Dnvcodec=disabled \ -Dopencv=disabled GST1_PLUGINS_BAD_DEPENDENCIES = gst1-plugins-base gstreamer1 @@ -767,11 +768,18 @@ endif ifeq ($(BR2_PACKAGE_GST1_PLUGINS_BAD_PLUGIN_DASH_MPD_NO_INIT_DATA_XML_PARSING),y) define GST1_PLUGINS_BAD_APPLY_MPD_EXTRA_PATCHES_POST_HOOK - cd $(@D) && { for P in ../../../package/gstreamer1/gst1-plugins-bad/$(GST1_PLUGINS_BAD_VERSION)-mpd-extra/*.patch; do patch -p1 < "$$P" ; done; } + cd $(@D) && { for P in $(TOPDIR)/$(GST1_PLUGINS_BAD_PKGDIR)/$(GST1_PLUGINS_BAD_VERSION)-mpd-extra/*.patch; do patch -p1 < "$$P" ; done; } endef GST1_PLUGINS_BAD_POST_PATCH_HOOKS += GST1_PLUGINS_BAD_APPLY_MPD_EXTRA_PATCHES_POST_HOOK endif +ifeq ($(BR2_PACKAGE_WPEWEBKIT_USE_GSTREAMER_WEBRTC),y) +define GST1_PLUGINS_BAD_APPLY_GSTWEBRTC_PATCHES_POST_HOOK + cd $(@D) && { for P in $(TOPDIR)/$(GST1_PLUGINS_BAD_PKGDIR)/$(GST1_PLUGINS_BAD_VERSION)-gstwebrtc/*.patch; do patch -p1 < "$$P" ; done; } +endef +GST1_PLUGINS_BAD_POST_PATCH_HOOKS += GST1_PLUGINS_BAD_APPLY_GSTWEBRTC_PATCHES_POST_HOOK +endif + # Use the following command to extract license info for plugins. # # find . -name 'plugin-*.xml' | xargs grep license diff --git a/package/gstreamer1/gst1-plugins-base/1.18.6-gstwebrtc/0001-Reapply-appsink-Reuse-sample-object-in-pull_sample-i.patch b/package/gstreamer1/gst1-plugins-base/1.18.6-gstwebrtc/0001-Reapply-appsink-Reuse-sample-object-in-pull_sample-i.patch new file mode 100644 index 000000000000..7f6ac613f72f --- /dev/null +++ b/package/gstreamer1/gst1-plugins-base/1.18.6-gstwebrtc/0001-Reapply-appsink-Reuse-sample-object-in-pull_sample-i.patch @@ -0,0 +1,170 @@ +From 5794fd4a1cf4cd2fbadcf969021cff4fb9bfe4f5 Mon Sep 17 00:00:00 2001 +From: Carlos Bentzen +Date: Thu, 19 Dec 2024 18:28:22 +0100 +Subject: [PATCH] Reapply "appsink: Reuse sample object in pull_sample if + possible" + +This reverts commit 2024e3401122eb9e4c3767aed2c2f64ef0f2ff53. +--- + gst-libs/gst/app/gstappsink.c | 29 +++++++++++++++++--- + tests/check/elements/appsink.c | 49 ++++++++++++++++++++++++++++++++++ + 2 files changed, 74 insertions(+), 4 deletions(-) + +diff --git a/gst-libs/gst/app/gstappsink.c b/gst-libs/gst/app/gstappsink.c +index abd56e802..cfcb1bf6d 100644 +--- a/gst-libs/gst/app/gstappsink.c ++++ b/gst-libs/gst/app/gstappsink.c +@@ -133,6 +133,8 @@ struct _GstAppSinkPrivate + gboolean buffer_lists_supported; + + Callbacks *callbacks; ++ ++ GstSample *sample; + }; + + GST_DEBUG_CATEGORY_STATIC (app_sink_debug); +@@ -484,6 +486,7 @@ gst_app_sink_init (GstAppSink * appsink) + g_mutex_init (&priv->mutex); + g_cond_init (&priv->cond); + priv->queue = gst_queue_array_new (16); ++ priv->sample = gst_sample_new (NULL, NULL, NULL, NULL); + + priv->emit_signals = DEFAULT_PROP_EMIT_SIGNALS; + priv->max_buffers = DEFAULT_PROP_MAX_BUFFERS; +@@ -516,6 +519,10 @@ gst_app_sink_dispose (GObject * obj) + gst_buffer_replace (&priv->preroll_buffer, NULL); + gst_caps_replace (&priv->preroll_caps, NULL); + gst_caps_replace (&priv->last_caps, NULL); ++ if (priv->sample) { ++ gst_sample_unref (priv->sample); ++ priv->sample = NULL; ++ } + g_mutex_unlock (&priv->mutex); + + g_clear_pointer (&callbacks, callbacks_unref); +@@ -668,6 +675,11 @@ gst_app_sink_start (GstBaseSink * psink) + priv->started = TRUE; + gst_segment_init (&priv->preroll_segment, GST_FORMAT_TIME); + gst_segment_init (&priv->last_segment, GST_FORMAT_TIME); ++ priv->sample = gst_sample_make_writable (priv->sample); ++ gst_sample_set_buffer (priv->sample, NULL); ++ gst_sample_set_buffer_list (priv->sample, NULL); ++ gst_sample_set_caps (priv->sample, NULL); ++ gst_sample_set_segment (priv->sample, NULL); + g_mutex_unlock (&priv->mutex); + + return TRUE; +@@ -865,10 +877,14 @@ dequeue_buffer (GstAppSink * appsink) + gst_event_parse_caps (event, &caps); + GST_DEBUG_OBJECT (appsink, "activating caps %" GST_PTR_FORMAT, caps); + gst_caps_replace (&priv->last_caps, caps); ++ priv->sample = gst_sample_make_writable (priv->sample); ++ gst_sample_set_caps (priv->sample, priv->last_caps); + break; + } + case GST_EVENT_SEGMENT: + gst_event_copy_segment (event, &priv->last_segment); ++ priv->sample = gst_sample_make_writable (priv->sample); ++ gst_sample_set_segment (priv->sample, &priv->last_segment); + GST_DEBUG_OBJECT (appsink, "activated segment %" GST_SEGMENT_FORMAT, + &priv->last_segment); + break; +@@ -902,6 +918,7 @@ restart: + if (G_UNLIKELY (!priv->last_caps && + gst_pad_has_current_caps (GST_BASE_SINK_PAD (psink)))) { + priv->last_caps = gst_pad_get_current_caps (GST_BASE_SINK_PAD (psink)); ++ gst_sample_set_caps (priv->sample, priv->last_caps); + GST_DEBUG_OBJECT (appsink, "activating pad caps %" GST_PTR_FORMAT, + priv->last_caps); + } +@@ -1690,12 +1707,16 @@ gst_app_sink_try_pull_sample (GstAppSink * appsink, GstClockTime timeout) + obj = dequeue_buffer (appsink); + if (GST_IS_BUFFER (obj)) { + GST_DEBUG_OBJECT (appsink, "we have a buffer %p", obj); +- sample = gst_sample_new (GST_BUFFER_CAST (obj), priv->last_caps, +- &priv->last_segment, NULL); ++ priv->sample = gst_sample_make_writable (priv->sample); ++ gst_sample_set_buffer_list (priv->sample, NULL); ++ gst_sample_set_buffer (priv->sample, GST_BUFFER_CAST (obj)); ++ sample = gst_sample_ref (priv->sample); + } else { + GST_DEBUG_OBJECT (appsink, "we have a list %p", obj); +- sample = gst_sample_new (NULL, priv->last_caps, &priv->last_segment, NULL); +- gst_sample_set_buffer_list (sample, GST_BUFFER_LIST_CAST (obj)); ++ priv->sample = gst_sample_make_writable (priv->sample); ++ gst_sample_set_buffer (priv->sample, NULL); ++ gst_sample_set_buffer_list (priv->sample, GST_BUFFER_LIST_CAST (obj)); ++ sample = gst_sample_ref (priv->sample); + } + gst_mini_object_unref (obj); + +diff --git a/tests/check/elements/appsink.c b/tests/check/elements/appsink.c +index 03adf063a..9acbdcb9b 100644 +--- a/tests/check/elements/appsink.c ++++ b/tests/check/elements/appsink.c +@@ -666,6 +666,54 @@ GST_START_TEST (test_query_drain) + + GST_END_TEST; + ++GST_START_TEST (test_pull_sample_refcounts) ++{ ++ GstElement *sink; ++ GstBuffer *buffer; ++ GstSample *s1, *s2, *s3; ++ ++ sink = setup_appsink (); ++ ++ ASSERT_SET_STATE (sink, GST_STATE_PLAYING, GST_STATE_CHANGE_ASYNC); ++ ++ buffer = gst_buffer_new_and_alloc (4); ++ fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK); ++ ++ s1 = gst_app_sink_pull_sample (GST_APP_SINK (sink)); ++ fail_unless (s1 != NULL); ++ fail_unless (gst_buffer_get_size (gst_sample_get_buffer (s1)) == 4); ++ gst_sample_unref (s1); ++ ++ buffer = gst_buffer_new_and_alloc (6); ++ fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK); ++ s2 = gst_app_sink_pull_sample (GST_APP_SINK (sink)); ++ fail_unless (s2 != NULL); ++ fail_unless (gst_buffer_get_size (gst_sample_get_buffer (s2)) == 6); ++ ++ /* We unreffed s1, appsink should thus reuse the same sample, ++ * avoiding an extra allocation */ ++ fail_unless (s1 == s2); ++ ++ buffer = gst_buffer_new_and_alloc (8); ++ fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK); ++ s3 = gst_app_sink_pull_sample (GST_APP_SINK (sink)); ++ fail_unless (s3 != NULL); ++ fail_unless (gst_buffer_get_size (gst_sample_get_buffer (s2)) == 6); ++ fail_unless (gst_buffer_get_size (gst_sample_get_buffer (s3)) == 8); ++ ++ ++ /* We didn't unref s2, appsink should thus have created a new sample */ ++ fail_unless (s2 != s3); ++ ++ gst_sample_unref (s2); ++ gst_sample_unref (s3); ++ ++ ASSERT_SET_STATE (sink, GST_STATE_NULL, GST_STATE_CHANGE_SUCCESS); ++ cleanup_appsink (sink); ++} ++ ++GST_END_TEST; ++ + static Suite * + appsink_suite (void) + { +@@ -686,6 +734,7 @@ appsink_suite (void) + tcase_add_test (tc_chain, test_query_drain); + tcase_add_test (tc_chain, test_pull_preroll); + tcase_add_test (tc_chain, test_do_not_care_preroll); ++ tcase_add_test (tc_chain, test_pull_sample_refcounts); + + return s; + } +-- +2.47.1 + diff --git a/package/gstreamer1/gst1-plugins-base/1.18.6-gstwebrtc/0002-Backports-from-GStreamer-1.22.patch b/package/gstreamer1/gst1-plugins-base/1.18.6-gstwebrtc/0002-Backports-from-GStreamer-1.22.patch new file mode 100644 index 000000000000..4c8cd7704121 --- /dev/null +++ b/package/gstreamer1/gst1-plugins-base/1.18.6-gstwebrtc/0002-Backports-from-GStreamer-1.22.patch @@ -0,0 +1,14696 @@ +From 985a1aa557272dc0c0b66c39285ce5d0feb9dad5 Mon Sep 17 00:00:00 2001 +From: Philippe Normand +Date: Thu, 19 Jan 2023 14:51:08 +0000 +Subject: [PATCH] Backports from GStreamer 1.22 + +--- + gst-libs/gst/app/gstappsink.c | 322 +++++- + gst-libs/gst/app/gstappsink.h | 28 +- + gst-libs/gst/app/gstappsrc.c | 844 ++++++++++++++-- + gst-libs/gst/app/gstappsrc.h | 41 + + gst-libs/gst/audio/audio-buffer.c | 4 +- + gst-libs/gst/audio/audio-channel-mixer.c | 9 +- + gst-libs/gst/audio/audio-converter.c | 17 +- + gst-libs/gst/audio/audio-converter.h | 11 + + gst-libs/gst/audio/audio-format.c | 22 +- + gst-libs/gst/audio/audio-format.h | 19 +- + gst-libs/gst/audio/audio-info.c | 26 +- + gst-libs/gst/audio/audio-info.h | 5 +- + gst-libs/gst/audio/audio-quantize.c | 33 +- + gst-libs/gst/audio/audio-resampler.c | 5 +- + gst-libs/gst/audio/audio.c | 41 +- + gst-libs/gst/audio/gstaudiobasesink.c | 6 +- + gst-libs/gst/audio/gstaudiobasesink.h | 2 +- + gst-libs/gst/audio/gstaudiobasesrc.c | 17 +- + gst-libs/gst/audio/gstaudiocdsrc.c | 4 +- + gst-libs/gst/audio/gstaudiodecoder.c | 110 +- + gst-libs/gst/audio/gstaudiodecoder.h | 7 +- + gst-libs/gst/audio/gstaudioencoder.c | 154 ++- + gst-libs/gst/audio/gstaudiofilter.c | 5 + + gst-libs/gst/audio/gstaudiometa.c | 128 +++ + gst-libs/gst/audio/gstaudiometa.h | 49 + + gst-libs/gst/audio/gstaudioringbuffer.c | 65 +- + gst-libs/gst/audio/gstaudiosink.c | 5 +- + gst-libs/gst/audio/gstaudiosink.h | 7 +- + gst-libs/gst/audio/gstaudiosrc.c | 2 +- + gst-libs/gst/audio/gstaudiosrc.h | 8 +- + gst-libs/gst/audio/gstaudiostreamalign.c | 17 +- + gst-libs/gst/audio/gstaudiostreamalign.h | 10 +- + gst-libs/gst/audio/gstaudioutilsprivate.c | 10 +- + gst-libs/gst/meson.build | 2 +- + gst-libs/gst/pbutils/codec-utils.c | 739 +++++++++++++- + gst-libs/gst/pbutils/codec-utils.h | 14 + + gst-libs/gst/pbutils/descriptions.c | 109 +- + gst-libs/gst/pbutils/descriptions.h | 48 + + gst-libs/gst/pbutils/pbutils-private.h | 4 + + gst-libs/gst/pbutils/pbutils.c | 22 +- + gst-libs/gst/rtp/gstrtcpbuffer.c | 53 +- + gst-libs/gst/rtp/gstrtcpbuffer.h | 80 +- + gst-libs/gst/rtp/gstrtpbasedepayload.c | 632 +++++++++++- + gst-libs/gst/rtp/gstrtpbasepayload.c | 667 ++++++++++-- + gst-libs/gst/rtp/gstrtpbasepayload.h | 4 + + gst-libs/gst/rtp/gstrtpbuffer.c | 90 +- + gst-libs/gst/rtp/gstrtpbuffer.h | 3 + + gst-libs/gst/rtp/gstrtphdrext.c | 779 ++++++++++++++ + gst-libs/gst/rtp/gstrtphdrext.h | 238 +++++ + gst-libs/gst/rtp/gstrtpmeta.c | 10 +- + gst-libs/gst/rtp/gstrtppayloads.c | 4 +- + gst-libs/gst/sdp/gstmikey.c | 52 +- + gst-libs/gst/sdp/gstsdpmessage.c | 378 ++++++- + gst-libs/gst/sdp/meson.build | 14 +- + gst-libs/gst/video/convertframe.c | 129 ++- + gst-libs/gst/video/gstvideocodecalphameta.c | 155 +++ + gst-libs/gst/video/gstvideocodecalphameta.h | 88 ++ + gst-libs/gst/video/gstvideodecoder.c | 1012 ++++++++++++++++--- + gst-libs/gst/video/gstvideodecoder.h | 79 +- + gst-libs/gst/video/gstvideoencoder.c | 152 +-- + gst-libs/gst/video/gstvideometa.c | 36 +- + gst-libs/gst/video/gstvideopool.c | 2 +- + gst-libs/gst/video/gstvideopool.h | 2 +- + gst-libs/gst/video/gstvideosink.c | 149 ++- + gst-libs/gst/video/gstvideosink.h | 22 +- + gst-libs/gst/video/gstvideotimecode.c | 118 ++- + gst-libs/gst/video/gstvideoutils.c | 4 + + gst-libs/gst/video/gstvideoutils.h | 45 +- + gst-libs/gst/video/meson.build | 18 +- + gst-libs/gst/video/navigation.c | 586 ++++++++++- + gst-libs/gst/video/navigation.h | 276 ++++- + gst-libs/gst/video/video-chroma.c | 122 ++- + gst-libs/gst/video/video-chroma.h | 10 +- + gst-libs/gst/video/video-color.c | 132 ++- + gst-libs/gst/video/video-color.h | 18 +- + gst-libs/gst/video/video-hdr.c | 41 +- + gst-libs/gst/video/video-hdr.h | 4 + + gst-libs/gst/video/video-sei.c | 236 +++++ + gst-libs/gst/video/video-sei.h | 110 ++ + gst-libs/gst/video/video.c | 87 ++ + gst-libs/gst/video/video.h | 26 +- + 81 files changed, 8686 insertions(+), 948 deletions(-) + create mode 100644 gst-libs/gst/video/gstvideocodecalphameta.c + create mode 100644 gst-libs/gst/video/gstvideocodecalphameta.h + create mode 100644 gst-libs/gst/video/video-sei.c + create mode 100644 gst-libs/gst/video/video-sei.h + +diff --git a/gst-libs/gst/app/gstappsink.c b/gst-libs/gst/app/gstappsink.c +index cfcb1bf6d..6777ff230 100644 +--- a/gst-libs/gst/app/gstappsink.c ++++ b/gst-libs/gst/app/gstappsink.c +@@ -113,6 +113,7 @@ struct _GstAppSinkPrivate + GstCaps *caps; + gboolean emit_signals; + guint num_buffers; ++ guint num_events; + guint max_buffers; + gboolean drop; + gboolean wait_on_eos; +@@ -146,12 +147,14 @@ enum + SIGNAL_EOS, + SIGNAL_NEW_PREROLL, + SIGNAL_NEW_SAMPLE, ++ SIGNAL_NEW_SERIALIZED_EVENT, + + /* actions */ + SIGNAL_PULL_PREROLL, + SIGNAL_PULL_SAMPLE, + SIGNAL_TRY_PULL_PREROLL, + SIGNAL_TRY_PULL_SAMPLE, ++ SIGNAL_TRY_PULL_OBJECT, + + LAST_SIGNAL + }; +@@ -332,6 +335,34 @@ gst_app_sink_class_init (GstAppSinkClass * klass) + G_STRUCT_OFFSET (GstAppSinkClass, new_sample), + NULL, NULL, NULL, GST_TYPE_FLOW_RETURN, 0, G_TYPE_NONE); + ++ /** ++ * GstAppSink::new-serialized-event: ++ * @appsink: the appsink element that emitted the signal ++ * ++ * Signal that a new downstream serialized event is available. ++ * ++ * This signal is emitted from the streaming thread and only when the ++ * "emit-signals" property is %TRUE. ++ * ++ * The new event can be retrieved with the "try-pull-object" action ++ * signal or gst_app_sink_pull_object() either from this signal callback ++ * or from any other thread. ++ * ++ * EOS will not be notified using this signal, use #GstAppSink::eos instead. ++ * EOS cannot be pulled either, use gst_app_sink_is_eos() to check for it. ++ * ++ * Note that this signal is only emitted when the "emit-signals" property is ++ * set to %TRUE, which it is not by default for performance reasons. ++ * ++ * The callback should return %TRUE if the event has been handled, which will ++ * skip basesink handling of the event, %FALSE otherwise. ++ * ++ * Since: 1.20 ++ */ ++ gst_app_sink_signals[SIGNAL_NEW_SERIALIZED_EVENT] = ++ g_signal_new ("new-serialized-event", G_TYPE_FROM_CLASS (klass), ++ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_BOOLEAN, 0, G_TYPE_NONE); ++ + /** + * GstAppSink::pull-preroll: + * @appsink: the appsink element to emit this signal on +@@ -355,7 +386,7 @@ gst_app_sink_class_init (GstAppSinkClass * klass) + * This function blocks until a preroll sample or EOS is received or the appsink + * element is set to the READY/NULL state. + * +- * Returns: a #GstSample or NULL when the appsink is stopped or EOS. ++ * Returns: (nullable): a #GstSample or NULL when the appsink is stopped or EOS. + */ + gst_app_sink_signals[SIGNAL_PULL_PREROLL] = + g_signal_new ("pull-preroll", G_TYPE_FROM_CLASS (klass), +@@ -380,12 +411,13 @@ gst_app_sink_class_init (GstAppSinkClass * klass) + * If an EOS event was received before any buffers, this function returns + * %NULL. Use gst_app_sink_is_eos () to check for the EOS condition. + * +- * Returns: a #GstSample or NULL when the appsink is stopped or EOS. ++ * Returns: (nullable): a #GstSample or NULL when the appsink is stopped or EOS. + */ + gst_app_sink_signals[SIGNAL_PULL_SAMPLE] = + g_signal_new ("pull-sample", G_TYPE_FROM_CLASS (klass), + G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstAppSinkClass, + pull_sample), NULL, NULL, NULL, GST_TYPE_SAMPLE, 0, G_TYPE_NONE); ++ + /** + * GstAppSink::try-pull-preroll: + * @appsink: the appsink element to emit this signal on +@@ -411,7 +443,7 @@ gst_app_sink_class_init (GstAppSinkClass * klass) + * This function blocks until a preroll sample or EOS is received, the appsink + * element is set to the READY/NULL state, or the timeout expires. + * +- * Returns: a #GstSample or NULL when the appsink is stopped or EOS or the timeout expires. ++ * Returns: (nullable): a #GstSample or NULL when the appsink is stopped or EOS or the timeout expires. + * + * Since: 1.10 + */ +@@ -441,7 +473,7 @@ gst_app_sink_class_init (GstAppSinkClass * klass) + * this function returns %NULL. Use gst_app_sink_is_eos () to check + * for the EOS condition. + * +- * Returns: a #GstSample or NULL when the appsink is stopped or EOS or the timeout expires. ++ * Returns: (nullable): a #GstSample or NULL when the appsink is stopped or EOS or the timeout expires. + * + * Since: 1.10 + */ +@@ -451,6 +483,44 @@ gst_app_sink_class_init (GstAppSinkClass * klass) + G_STRUCT_OFFSET (GstAppSinkClass, try_pull_sample), NULL, NULL, NULL, + GST_TYPE_SAMPLE, 1, GST_TYPE_CLOCK_TIME); + ++ /** ++ * GstAppSink::try-pull-object: ++ * @appsink: the appsink element to emit this signal on ++ * @timeout: the maximum amount of time to wait for a sample ++ * ++ * This function blocks until a sample or an event becomes available or the appsink ++ * element is set to the READY/NULL state or the timeout expires. ++ * ++ * This function will only return samples when the appsink is in the PLAYING ++ * state. All rendered samples and events will be put in a queue so that the application ++ * can pull them at its own rate. ++ * Events can be pulled when the appsink is in the READY, PAUSED or PLAYING state. ++ * ++ * Note that when the application does not pull samples fast enough, the ++ * queued samples could consume a lot of memory, especially when dealing with ++ * raw video frames. It's possible to control the behaviour of the queue with ++ * the "drop" and "max-buffers" properties. ++ * ++ * This function will only pull serialized events, excluding ++ * the EOS event for which this functions returns ++ * %NULL. Use gst_app_sink_is_eos() to check for the EOS condition. ++ * ++ * This signal is a variant of #GstAppSink::try-pull-sample: that can be used ++ * to handle incoming events as well as samples. ++ * ++ * Note that future releases may extend this API to return other object types ++ * so make sure that your code is checking for the actual type it is handling. ++ * ++ * Returns: (nullable) (transfer full): a #GstSample or a #GstEvent or NULL when the appsink is stopped or EOS or the timeout expires. ++ * ++ * Since: 1.20 ++ */ ++ gst_app_sink_signals[SIGNAL_TRY_PULL_OBJECT] = ++ g_signal_new ("try-pull-object", G_TYPE_FROM_CLASS (klass), ++ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, ++ G_STRUCT_OFFSET (GstAppSinkClass, try_pull_object), NULL, NULL, NULL, ++ GST_TYPE_MINI_OBJECT, 1, GST_TYPE_CLOCK_TIME); ++ + gst_element_class_set_static_metadata (element_class, "AppSink", + "Generic/Sink", "Allow the application to get access to raw buffer", + "David Schleef , Wim Taymans "); +@@ -474,6 +544,7 @@ gst_app_sink_class_init (GstAppSinkClass * klass) + klass->pull_sample = gst_app_sink_pull_sample; + klass->try_pull_preroll = gst_app_sink_try_pull_preroll; + klass->try_pull_sample = gst_app_sink_try_pull_sample; ++ klass->try_pull_object = gst_app_sink_try_pull_object; + } + + static void +@@ -659,6 +730,8 @@ gst_app_sink_flush_unlocked (GstAppSink * appsink) + while ((obj = gst_queue_array_pop_head (priv->queue))) + gst_mini_object_unref (obj); + priv->num_buffers = 0; ++ priv->num_events = 0; ++ gst_caps_replace (&priv->last_caps, NULL); + g_cond_signal (&priv->cond); + } + +@@ -702,6 +775,11 @@ gst_app_sink_stop (GstBaseSink * psink) + gst_caps_replace (&priv->last_caps, NULL); + gst_segment_init (&priv->preroll_segment, GST_FORMAT_UNDEFINED); + gst_segment_init (&priv->last_segment, GST_FORMAT_UNDEFINED); ++ priv->sample = gst_sample_make_writable (priv->sample); ++ gst_sample_set_buffer (priv->sample, NULL); ++ gst_sample_set_buffer_list (priv->sample, NULL); ++ gst_sample_set_caps (priv->sample, NULL); ++ gst_sample_set_segment (priv->sample, NULL); + g_mutex_unlock (&priv->mutex); + + return TRUE; +@@ -716,6 +794,7 @@ gst_app_sink_setcaps (GstBaseSink * sink, GstCaps * caps) + g_mutex_lock (&priv->mutex); + GST_DEBUG_OBJECT (appsink, "receiving CAPS"); + gst_queue_array_push_tail (priv->queue, gst_event_new_caps (caps)); ++ priv->num_events++; + if (!priv->preroll_buffer) + gst_caps_replace (&priv->preroll_caps, caps); + g_mutex_unlock (&priv->mutex); +@@ -729,11 +808,12 @@ gst_app_sink_event (GstBaseSink * sink, GstEvent * event) + GstAppSink *appsink = GST_APP_SINK_CAST (sink); + GstAppSinkPrivate *priv = appsink->priv; + ++ GST_DEBUG_OBJECT (appsink, "%" GST_PTR_FORMAT, event); ++ + switch (event->type) { + case GST_EVENT_SEGMENT: + g_mutex_lock (&priv->mutex); + GST_DEBUG_OBJECT (appsink, "receiving SEGMENT"); +- gst_queue_array_push_tail (priv->queue, gst_event_ref (event)); + if (!priv->preroll_buffer) + gst_event_copy_segment (event, &priv->preroll_segment); + g_mutex_unlock (&priv->mutex); +@@ -805,6 +885,40 @@ gst_app_sink_event (GstBaseSink * sink, GstEvent * event) + default: + break; + } ++ ++ if (GST_EVENT_TYPE (event) != GST_EVENT_EOS ++ && GST_EVENT_IS_SERIALIZED (event)) { ++ gboolean emit; ++ Callbacks *callbacks = NULL; ++ gboolean ret; ++ ++ g_mutex_lock (&priv->mutex); ++ ++ emit = priv->emit_signals; ++ if (priv->callbacks) ++ callbacks = callbacks_ref (priv->callbacks); ++ ++ gst_queue_array_push_tail (priv->queue, gst_event_ref (event)); ++ priv->num_events++; ++ ++ g_mutex_unlock (&priv->mutex); ++ ++ if (callbacks && callbacks->callbacks.new_event) { ++ ret = callbacks->callbacks.new_event (appsink, callbacks->user_data); ++ } else { ++ ret = FALSE; ++ if (emit) ++ g_signal_emit (appsink, ++ gst_app_sink_signals[SIGNAL_NEW_SERIALIZED_EVENT], 0, &ret); ++ } ++ g_clear_pointer (&callbacks, callbacks_unref); ++ ++ if (ret) { ++ gst_event_unref (event); ++ return TRUE; ++ } ++ } ++ + return GST_BASE_SINK_CLASS (parent_class)->event (sink, event); + } + +@@ -854,45 +968,61 @@ flushing: + } + + static GstMiniObject * +-dequeue_buffer (GstAppSink * appsink) ++dequeue_object (GstAppSink * appsink) + { + GstAppSinkPrivate *priv = appsink->priv; + GstMiniObject *obj; + ++ obj = gst_queue_array_pop_head (priv->queue); ++ ++ if (GST_IS_BUFFER (obj) || GST_IS_BUFFER_LIST (obj)) { ++ GST_DEBUG_OBJECT (appsink, "dequeued buffer/list %p", obj); ++ priv->num_buffers--; ++ } else if (GST_IS_EVENT (obj)) { ++ GstEvent *event = GST_EVENT_CAST (obj); ++ ++ priv->num_events--; ++ ++ switch (GST_EVENT_TYPE (obj)) { ++ case GST_EVENT_CAPS: ++ { ++ GstCaps *caps; ++ ++ gst_event_parse_caps (event, &caps); ++ GST_DEBUG_OBJECT (appsink, "activating caps %" GST_PTR_FORMAT, caps); ++ gst_caps_replace (&priv->last_caps, caps); ++ priv->sample = gst_sample_make_writable (priv->sample); ++ gst_sample_set_caps (priv->sample, priv->last_caps); ++ break; ++ } ++ case GST_EVENT_SEGMENT: ++ gst_event_copy_segment (event, &priv->last_segment); ++ priv->sample = gst_sample_make_writable (priv->sample); ++ gst_sample_set_segment (priv->sample, &priv->last_segment); ++ GST_DEBUG_OBJECT (appsink, "activated segment %" GST_SEGMENT_FORMAT, ++ &priv->last_segment); ++ break; ++ default: ++ break; ++ } ++ } ++ ++ return obj; ++} ++ ++static GstMiniObject * ++dequeue_buffer (GstAppSink * appsink) ++{ ++ GstMiniObject *obj; ++ + do { +- obj = gst_queue_array_pop_head (priv->queue); ++ obj = dequeue_object (appsink); + + if (GST_IS_BUFFER (obj) || GST_IS_BUFFER_LIST (obj)) { +- GST_DEBUG_OBJECT (appsink, "dequeued buffer/list %p", obj); +- priv->num_buffers--; + break; +- } else if (GST_IS_EVENT (obj)) { +- GstEvent *event = GST_EVENT_CAST (obj); +- +- switch (GST_EVENT_TYPE (obj)) { +- case GST_EVENT_CAPS: +- { +- GstCaps *caps; +- +- gst_event_parse_caps (event, &caps); +- GST_DEBUG_OBJECT (appsink, "activating caps %" GST_PTR_FORMAT, caps); +- gst_caps_replace (&priv->last_caps, caps); +- priv->sample = gst_sample_make_writable (priv->sample); +- gst_sample_set_caps (priv->sample, priv->last_caps); +- break; +- } +- case GST_EVENT_SEGMENT: +- gst_event_copy_segment (event, &priv->last_segment); +- priv->sample = gst_sample_make_writable (priv->sample); +- gst_sample_set_segment (priv->sample, &priv->last_segment); +- GST_DEBUG_OBJECT (appsink, "activated segment %" GST_SEGMENT_FORMAT, +- &priv->last_segment); +- break; +- default: +- break; +- } +- gst_mini_object_unref (obj); + } ++ ++ gst_mini_object_unref (obj); + } while (TRUE); + + return obj; +@@ -918,6 +1048,7 @@ restart: + if (G_UNLIKELY (!priv->last_caps && + gst_pad_has_current_caps (GST_BASE_SINK_PAD (psink)))) { + priv->last_caps = gst_pad_get_current_caps (GST_BASE_SINK_PAD (psink)); ++ priv->sample = gst_sample_make_writable (priv->sample); + gst_sample_set_caps (priv->sample, priv->last_caps); + GST_DEBUG_OBJECT (appsink, "activating pad caps %" GST_PTR_FORMAT, + priv->last_caps); +@@ -1147,7 +1278,7 @@ gst_app_sink_set_caps (GstAppSink * appsink, const GstCaps * caps) + * + * Get the configured caps on @appsink. + * +- * Returns: the #GstCaps accepted by the sink. gst_caps_unref() after usage. ++ * Returns: (nullable) (transfer full): the #GstCaps accepted by the sink. gst_caps_unref() after usage. + */ + GstCaps * + gst_app_sink_get_caps (GstAppSink * appsink) +@@ -1499,7 +1630,7 @@ gst_app_sink_get_wait_on_eos (GstAppSink * appsink) + * This function blocks until a preroll sample or EOS is received or the appsink + * element is set to the READY/NULL state. + * +- * Returns: (transfer full): a #GstSample or NULL when the appsink is stopped or EOS. ++ * Returns: (transfer full) (nullable): a #GstSample or NULL when the appsink is stopped or EOS. + * Call gst_sample_unref() after usage. + */ + GstSample * +@@ -1524,7 +1655,7 @@ gst_app_sink_pull_preroll (GstAppSink * appsink) + * If an EOS event was received before any buffers, this function returns + * %NULL. Use gst_app_sink_is_eos () to check for the EOS condition. + * +- * Returns: (transfer full): a #GstSample or NULL when the appsink is stopped or EOS. ++ * Returns: (transfer full) (nullable): a #GstSample or NULL when the appsink is stopped or EOS. + * Call gst_sample_unref() after usage. + */ + GstSample * +@@ -1533,6 +1664,41 @@ gst_app_sink_pull_sample (GstAppSink * appsink) + return gst_app_sink_try_pull_sample (appsink, GST_CLOCK_TIME_NONE); + } + ++/** ++ * gst_app_sink_pull_object: (skip) ++ * @appsink: a #GstAppSink ++ * ++ * This function blocks until a sample or an event becomes available or the appsink ++ * element is set to the READY/NULL state. ++ * ++ * This function will only return samples when the appsink is in the PLAYING ++ * state. All rendered buffers and events will be put in a queue so that the application ++ * can pull them at its own rate. Note that when the application does not ++ * pull samples fast enough, the queued buffers could consume a lot of memory, ++ * especially when dealing with raw video frames. ++ * Events can be pulled when the appsink is in the READY, PAUSED or PLAYING state. ++ * ++ * This function will only pull serialized events, excluding ++ * the EOS event for which this functions returns ++ * %NULL. Use gst_app_sink_is_eos() to check for the EOS condition. ++ * ++ * This method is a variant of gst_app_sink_pull_sample() that can be used ++ * to handle incoming events events as well as samples. ++ * ++ * Note that future releases may extend this API to return other object types ++ * so make sure that your code is checking for the actual type it is handling. ++ * ++ * Returns: (transfer full) (nullable): a #GstSample, or a #GstEvent or NULL when the appsink is stopped or EOS. ++ * Call gst_mini_object_unref() after usage. ++ * ++ * Since: 1.20 ++ */ ++GstMiniObject * ++gst_app_sink_pull_object (GstAppSink * appsink) ++{ ++ return gst_app_sink_try_pull_object (appsink, GST_CLOCK_TIME_NONE); ++} ++ + /** + * gst_app_sink_try_pull_preroll: + * @appsink: a #GstAppSink +@@ -1558,7 +1724,7 @@ gst_app_sink_pull_sample (GstAppSink * appsink) + * This function blocks until a preroll sample or EOS is received, the appsink + * element is set to the READY/NULL state, or the timeout expires. + * +- * Returns: (transfer full): a #GstSample or NULL when the appsink is stopped or EOS or the timeout expires. ++ * Returns: (transfer full) (nullable): a #GstSample or NULL when the appsink is stopped or EOS or the timeout expires. + * Call gst_sample_unref() after usage. + * + * Since: 1.10 +@@ -1654,17 +1820,64 @@ not_started: + * this function returns %NULL. Use gst_app_sink_is_eos () to check for the EOS + * condition. + * +- * Returns: (transfer full): a #GstSample or NULL when the appsink is stopped or EOS or the timeout expires. +- * Call gst_sample_unref() after usage. ++ * Returns: (transfer full) (nullable): a #GstSample or NULL when the appsink is stopped or EOS or the timeout expires. ++ * Call gst_sample_unref() after usage. + * + * Since: 1.10 + */ + GstSample * + gst_app_sink_try_pull_sample (GstAppSink * appsink, GstClockTime timeout) ++{ ++ while (TRUE) { ++ GstMiniObject *obj; ++ ++ obj = gst_app_sink_try_pull_object (appsink, timeout); ++ ++ if (!obj) { ++ return NULL; ++ } else if (GST_IS_SAMPLE (obj)) { ++ return GST_SAMPLE_CAST (obj); ++ } else { ++ gst_mini_object_unref (obj); ++ } ++ } ++} ++ ++/** ++ * gst_app_sink_try_pull_object: (skip) ++ * @appsink: a #GstAppSink ++ * @timeout: the maximum amount of time to wait for a sample ++ * ++ * This function blocks until a sample or an event or EOS becomes available or the appsink ++ * element is set to the READY/NULL state or the timeout expires. ++ * ++ * This function will only return samples when the appsink is in the PLAYING ++ * state. All rendered buffers and events will be put in a queue so that the application ++ * can pull them at its own rate. Note that when the application does not ++ * pull samples fast enough, the queued buffers could consume a lot of memory, ++ * especially when dealing with raw video frames. ++ * Events can be pulled when the appsink is in the READY, PAUSED or PLAYING state. ++ * ++ * This function will only pull serialized events, excluding ++ * the EOS event for which this functions returns ++ * %NULL. Use gst_app_sink_is_eos() to check for the EOS condition. ++ * ++ * This method is a variant of gst_app_sink_try_pull_sample() that can be used ++ * to handle incoming events events as well as samples. ++ * ++ * Note that future releases may extend this API to return other object types ++ * so make sure that your code is checking for the actual type it is handling. ++ * ++ * Returns: (transfer full) (nullable): a #GstSample, or #GstEvent or NULL when the appsink is stopped or EOS or the timeout expires. ++ * Call gst_mini_object_unref() after usage. ++ * ++ * Since: 1.20 ++ */ ++GstMiniObject * ++gst_app_sink_try_pull_object (GstAppSink * appsink, GstClockTime timeout) + { + GstAppSinkPrivate *priv; +- GstSample *sample = NULL; +- GstMiniObject *obj; ++ GstMiniObject *obj = NULL, *ret; + gboolean timeout_valid; + gint64 end_time; + +@@ -1682,18 +1895,18 @@ gst_app_sink_try_pull_sample (GstAppSink * appsink, GstClockTime timeout) + gst_buffer_replace (&priv->preroll_buffer, NULL); + + while (TRUE) { +- GST_DEBUG_OBJECT (appsink, "trying to grab a buffer"); ++ GST_DEBUG_OBJECT (appsink, "trying to grab an object"); + if (!priv->started) + goto not_started; + +- if (priv->num_buffers > 0) ++ if (priv->num_buffers > 0 || priv->num_events > 0) + break; + + if (priv->is_eos) + goto eos; + + /* nothing to return, wait */ +- GST_DEBUG_OBJECT (appsink, "waiting for a buffer"); ++ GST_DEBUG_OBJECT (appsink, "waiting for an object"); + priv->wait_status |= APP_WAITING; + if (timeout_valid) { + if (!g_cond_wait_until (&priv->cond, &priv->mutex, end_time)) +@@ -1704,28 +1917,33 @@ gst_app_sink_try_pull_sample (GstAppSink * appsink, GstClockTime timeout) + priv->wait_status &= ~APP_WAITING; + } + +- obj = dequeue_buffer (appsink); ++ obj = dequeue_object (appsink); ++ ++ /* convert buffer and buffer list to sample */ + if (GST_IS_BUFFER (obj)) { + GST_DEBUG_OBJECT (appsink, "we have a buffer %p", obj); + priv->sample = gst_sample_make_writable (priv->sample); + gst_sample_set_buffer_list (priv->sample, NULL); + gst_sample_set_buffer (priv->sample, GST_BUFFER_CAST (obj)); +- sample = gst_sample_ref (priv->sample); +- } else { ++ ret = GST_MINI_OBJECT_CAST (gst_sample_ref (priv->sample)); ++ gst_mini_object_unref (obj); ++ } else if (GST_IS_BUFFER_LIST (obj)) { + GST_DEBUG_OBJECT (appsink, "we have a list %p", obj); + priv->sample = gst_sample_make_writable (priv->sample); + gst_sample_set_buffer (priv->sample, NULL); + gst_sample_set_buffer_list (priv->sample, GST_BUFFER_LIST_CAST (obj)); +- sample = gst_sample_ref (priv->sample); ++ ret = GST_MINI_OBJECT_CAST (gst_sample_ref (priv->sample)); ++ gst_mini_object_unref (obj); ++ } else { ++ ret = obj; + } +- gst_mini_object_unref (obj); + + if ((priv->wait_status & STREAM_WAITING)) + g_cond_signal (&priv->cond); + + g_mutex_unlock (&priv->mutex); + +- return sample; ++ return ret; + + /* special conditions */ + expired: +diff --git a/gst-libs/gst/app/gstappsink.h b/gst-libs/gst/app/gstappsink.h +index 036b86e50..90e678f53 100644 +--- a/gst-libs/gst/app/gstappsink.h ++++ b/gst-libs/gst/app/gstappsink.h +@@ -59,6 +59,14 @@ typedef struct _GstAppSinkPrivate GstAppSinkPrivate; + * The new sample can be retrieved with + * gst_app_sink_pull_sample() either from this callback + * or from any other thread. ++ * @new_event: Called when a new event is available. ++ * This callback is called from the streaming thread. ++ * The new event can be retrieved with ++ * gst_app_sink_pull_event() either from this callback ++ * or from any other thread. ++ * The callback should return %TRUE if the event has been handled, ++ * %FALSE otherwise. ++ * Since: 1.20 + * + * A set of callbacks that can be installed on the appsink with + * gst_app_sink_set_callbacks(). +@@ -67,9 +75,10 @@ typedef struct { + void (*eos) (GstAppSink *appsink, gpointer user_data); + GstFlowReturn (*new_preroll) (GstAppSink *appsink, gpointer user_data); + GstFlowReturn (*new_sample) (GstAppSink *appsink, gpointer user_data); ++ gboolean (*new_event) (GstAppSink *appsink, gpointer user_data); + + /*< private >*/ +- gpointer _gst_reserved[GST_PADDING]; ++ gpointer _gst_reserved[GST_PADDING - 1]; + } GstAppSinkCallbacks; + + struct _GstAppSink +@@ -91,15 +100,24 @@ struct _GstAppSinkClass + void (*eos) (GstAppSink *appsink); + GstFlowReturn (*new_preroll) (GstAppSink *appsink); + GstFlowReturn (*new_sample) (GstAppSink *appsink); ++ /* new_event is missing as we ran out padding */ + + /* actions */ + GstSample * (*pull_preroll) (GstAppSink *appsink); + GstSample * (*pull_sample) (GstAppSink *appsink); + GstSample * (*try_pull_preroll) (GstAppSink *appsink, GstClockTime timeout); + GstSample * (*try_pull_sample) (GstAppSink *appsink, GstClockTime timeout); ++ /** ++ * GstAppSinkClass::try_pull_object: ++ * ++ * See #GstAppSink::try-pull-object: signal. ++ * ++ * Since: 1.20 ++ */ ++ GstMiniObject * (*try_pull_object) (GstAppSink *appsink, GstClockTime timeout); + + /*< private >*/ +- gpointer _gst_reserved[GST_PADDING - 2]; ++ gpointer _gst_reserved[GST_PADDING - 3]; + }; + + GST_APP_API +@@ -150,12 +168,18 @@ GstSample * gst_app_sink_pull_preroll (GstAppSink *appsink); + GST_APP_API + GstSample * gst_app_sink_pull_sample (GstAppSink *appsink); + ++GST_APP_API ++GstMiniObject * gst_app_sink_pull_object (GstAppSink *appsink); ++ + GST_APP_API + GstSample * gst_app_sink_try_pull_preroll (GstAppSink *appsink, GstClockTime timeout); + + GST_APP_API + GstSample * gst_app_sink_try_pull_sample (GstAppSink *appsink, GstClockTime timeout); + ++GST_APP_API ++GstMiniObject * gst_app_sink_try_pull_object (GstAppSink *appsink, GstClockTime timeout); ++ + GST_APP_API + void gst_app_sink_set_callbacks (GstAppSink * appsink, + GstAppSinkCallbacks *callbacks, +diff --git a/gst-libs/gst/app/gstappsrc.c b/gst-libs/gst/app/gstappsrc.c +index c2267a045..8577f61a6 100644 +--- a/gst-libs/gst/app/gstappsrc.c ++++ b/gst-libs/gst/app/gstappsrc.c +@@ -46,11 +46,12 @@ + * streaming thread. It is important to note that data transport will not happen + * from the thread that performed the push-buffer call. + * +- * The "max-bytes" property controls how much data can be queued in appsrc +- * before appsrc considers the queue full. A filled internal queue will always +- * signal the "enough-data" signal, which signals the application that it should +- * stop pushing data into appsrc. The "block" property will cause appsrc to +- * block the push-buffer method until free data becomes available again. ++ * The "max-bytes", "max-buffers" and "max-time" properties control how much ++ * data can be queued in appsrc before appsrc considers the queue full. A ++ * filled internal queue will always signal the "enough-data" signal, which ++ * signals the application that it should stop pushing data into appsrc. The ++ * "block" property will cause appsrc to block the push-buffer method until ++ * free data becomes available again. + * + * When the internal queue is running out of data, the "need-data" signal is + * emitted, which signals the application that it should start pushing more data +@@ -146,14 +147,25 @@ struct _GstAppSrcPrivate + + GstCaps *last_caps; + GstCaps *current_caps; ++ /* last segment received on the input */ + GstSegment last_segment; ++ /* currently configured segment for the output */ + GstSegment current_segment; ++ /* queue up a segment event based on last_segment before ++ * the next buffer of buffer list */ + gboolean pending_custom_segment; + ++ /* the next buffer that will be queued needs a discont flag ++ * because the previous one was dropped - GST_APP_LEAKY_TYPE_UPSTREAM */ ++ gboolean need_discont_upstream; ++ /* the next buffer that will be dequeued needs a discont flag ++ * because the previous one was dropped - GST_APP_LEAKY_TYPE_DOWNSTREAM */ ++ gboolean need_discont_downstream; ++ + gint64 size; + GstClockTime duration; + GstAppStreamType stream_type; +- guint64 max_bytes; ++ guint64 max_bytes, max_buffers, max_time; + GstFormat format; + gboolean block; + gchar *uri; +@@ -161,16 +173,25 @@ struct _GstAppSrcPrivate + gboolean flushing; + gboolean started; + gboolean is_eos; +- guint64 queued_bytes; ++ guint64 queued_bytes, queued_buffers; ++ /* Used to calculate the current time level */ ++ GstClockTime last_in_running_time, last_out_running_time; ++ /* Updated based on the above whenever they change */ ++ GstClockTime queued_time; + guint64 offset; + GstAppStreamType current_type; + + guint64 min_latency; + guint64 max_latency; ++ /* Tracks whether the latency message was posted at least once */ ++ gboolean posted_latency_msg; ++ + gboolean emit_signals; + guint min_percent; + gboolean handle_segment_change; + ++ GstAppLeakyType leaky_type; ++ + Callbacks *callbacks; + }; + +@@ -196,6 +217,8 @@ enum + #define DEFAULT_PROP_SIZE -1 + #define DEFAULT_PROP_STREAM_TYPE GST_APP_STREAM_TYPE_STREAM + #define DEFAULT_PROP_MAX_BYTES 200000 ++#define DEFAULT_PROP_MAX_BUFFERS 0 ++#define DEFAULT_PROP_MAX_TIME (0 * GST_SECOND) + #define DEFAULT_PROP_FORMAT GST_FORMAT_BYTES + #define DEFAULT_PROP_BLOCK FALSE + #define DEFAULT_PROP_IS_LIVE FALSE +@@ -204,8 +227,11 @@ enum + #define DEFAULT_PROP_EMIT_SIGNALS TRUE + #define DEFAULT_PROP_MIN_PERCENT 0 + #define DEFAULT_PROP_CURRENT_LEVEL_BYTES 0 ++#define DEFAULT_PROP_CURRENT_LEVEL_BUFFERS 0 ++#define DEFAULT_PROP_CURRENT_LEVEL_TIME 0 + #define DEFAULT_PROP_DURATION GST_CLOCK_TIME_NONE + #define DEFAULT_PROP_HANDLE_SEGMENT_CHANGE FALSE ++#define DEFAULT_PROP_LEAKY_TYPE GST_APP_LEAKY_TYPE_NONE + + enum + { +@@ -214,6 +240,8 @@ enum + PROP_SIZE, + PROP_STREAM_TYPE, + PROP_MAX_BYTES, ++ PROP_MAX_BUFFERS, ++ PROP_MAX_TIME, + PROP_FORMAT, + PROP_BLOCK, + PROP_IS_LIVE, +@@ -222,8 +250,11 @@ enum + PROP_EMIT_SIGNALS, + PROP_MIN_PERCENT, + PROP_CURRENT_LEVEL_BYTES, ++ PROP_CURRENT_LEVEL_BUFFERS, ++ PROP_CURRENT_LEVEL_TIME, + PROP_DURATION, + PROP_HANDLE_SEGMENT_CHANGE, ++ PROP_LEAKY_TYPE, + PROP_LAST + }; + +@@ -347,6 +378,37 @@ gst_app_src_class_init (GstAppSrcClass * klass) + "The maximum number of bytes to queue internally (0 = unlimited)", + 0, G_MAXUINT64, DEFAULT_PROP_MAX_BYTES, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstAppSrc:max-buffers: ++ * ++ * The maximum amount of buffers that can be queued internally. ++ * After the maximum amount of buffers are queued, appsrc will emit the ++ * "enough-data" signal. ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_MAX_BUFFERS, ++ g_param_spec_uint64 ("max-buffers", "Max buffers", ++ "The maximum number of buffers to queue internally (0 = unlimited)", ++ 0, G_MAXUINT64, DEFAULT_PROP_MAX_BUFFERS, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstAppSrc:max-time: ++ * ++ * The maximum amount of time that can be queued internally. ++ * After the maximum amount of time are queued, appsrc will emit the ++ * "enough-data" signal. ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_MAX_TIME, ++ g_param_spec_uint64 ("max-time", "Max time", ++ "The maximum amount of time to queue internally (0 = unlimited)", ++ 0, G_MAXUINT64, DEFAULT_PROP_MAX_TIME, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ + /** + * GstAppSrc:block: + * +@@ -430,6 +492,32 @@ gst_app_src_class_init (GstAppSrcClass * klass) + 0, G_MAXUINT64, DEFAULT_PROP_CURRENT_LEVEL_BYTES, + G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); + ++ /** ++ * GstAppSrc:current-level-buffers: ++ * ++ * The number of currently queued buffers inside appsrc. ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_CURRENT_LEVEL_BUFFERS, ++ g_param_spec_uint64 ("current-level-buffers", "Current Level Buffers", ++ "The number of currently queued buffers", ++ 0, G_MAXUINT64, DEFAULT_PROP_CURRENT_LEVEL_BUFFERS, ++ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstAppSrc:current-level-time: ++ * ++ * The amount of currently queued time inside appsrc. ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_CURRENT_LEVEL_TIME, ++ g_param_spec_uint64 ("current-level-time", "Current Level Time", ++ "The amount of currently queued time", ++ 0, G_MAXUINT64, DEFAULT_PROP_CURRENT_LEVEL_TIME, ++ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); ++ + /** + * GstAppSrc:duration: + * +@@ -467,6 +555,24 @@ gst_app_src_class_init (GstAppSrcClass * klass) + G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY | + G_PARAM_STATIC_STRINGS)); + ++ /** ++ * GstAppSrc:leaky-type: ++ * ++ * When set to any other value than GST_APP_LEAKY_TYPE_NONE then the appsrc ++ * will drop any buffers that are pushed into it once its internal queue is ++ * full. The selected type defines whether to drop the oldest or new ++ * buffers. ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_LEAKY_TYPE, ++ g_param_spec_enum ("leaky-type", "Leaky Type", ++ "Whether to drop buffers once the internal queue is full", ++ GST_TYPE_APP_LEAKY_TYPE, ++ DEFAULT_PROP_LEAKY_TYPE, ++ G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY | ++ G_PARAM_STATIC_STRINGS)); ++ + /** + * GstAppSrc::need-data: + * @appsrc: the appsrc element that emitted the signal +@@ -518,11 +624,14 @@ gst_app_src_class_init (GstAppSrcClass * klass) + /** + * GstAppSrc::push-buffer: + * @appsrc: the appsrc +- * @buffer: a buffer to push ++ * @buffer: (transfer none): a buffer to push + * + * Adds a buffer to the queue of buffers that the appsrc element will +- * push to its source pad. This function does not take ownership of the +- * buffer so the buffer needs to be unreffed after calling this function. ++ * push to its source pad. ++ * ++ * This function does not take ownership of the buffer, but it takes a ++ * reference so the buffer can be unreffed at any time after calling this ++ * function. + * + * When the block property is TRUE, this function can block until free space + * becomes available in the queue. +@@ -536,12 +645,14 @@ gst_app_src_class_init (GstAppSrcClass * klass) + /** + * GstAppSrc::push-buffer-list: + * @appsrc: the appsrc +- * @buffer_list: a buffer list to push ++ * @buffer_list: (transfer none): a buffer list to push + * + * Adds a buffer list to the queue of buffers and buffer lists that the +- * appsrc element will push to its source pad. This function does not take +- * ownership of the buffer list so the buffer list needs to be unreffed +- * after calling this function. ++ * appsrc element will push to its source pad. ++ * ++ * This function does not take ownership of the buffer list, but it takes a ++ * reference so the buffer list can be unreffed at any time after calling ++ * this function. + * + * When the block property is TRUE, this function can block until free space + * becomes available in the queue. +@@ -557,7 +668,7 @@ gst_app_src_class_init (GstAppSrcClass * klass) + /** + * GstAppSrc::push-sample: + * @appsrc: the appsrc +- * @sample: a sample from which extract buffer to push ++ * @sample: (transfer none): a sample from which extract buffer to push + * + * Extract a buffer from the provided sample and adds the extracted buffer + * to the queue of buffers that the appsrc element will +@@ -565,8 +676,10 @@ gst_app_src_class_init (GstAppSrcClass * klass) + * in the sample and reset the caps if they change. + * Only the caps and the buffer of the provided sample are used and not + * for example the segment in the sample. +- * This function does not take ownership of the +- * sample so the sample needs to be unreffed after calling this function. ++ * ++ * This function does not take ownership of the sample, but it takes a ++ * reference so the sample can be unreffed at any time after calling this ++ * function. + * + * When the block property is TRUE, this function can block until free space + * becomes available in the queue. +@@ -636,6 +749,8 @@ gst_app_src_init (GstAppSrc * appsrc) + priv->duration = DEFAULT_PROP_DURATION; + priv->stream_type = DEFAULT_PROP_STREAM_TYPE; + priv->max_bytes = DEFAULT_PROP_MAX_BYTES; ++ priv->max_buffers = DEFAULT_PROP_MAX_BUFFERS; ++ priv->max_time = DEFAULT_PROP_MAX_TIME; + priv->format = DEFAULT_PROP_FORMAT; + priv->block = DEFAULT_PROP_BLOCK; + priv->min_latency = DEFAULT_PROP_MIN_LATENCY; +@@ -643,6 +758,7 @@ gst_app_src_init (GstAppSrc * appsrc) + priv->emit_signals = DEFAULT_PROP_EMIT_SIGNALS; + priv->min_percent = DEFAULT_PROP_MIN_PERCENT; + priv->handle_segment_change = DEFAULT_PROP_HANDLE_SEGMENT_CHANGE; ++ priv->leaky_type = DEFAULT_PROP_LEAKY_TYPE; + + gst_base_src_set_live (GST_BASE_SRC (appsrc), DEFAULT_PROP_IS_LIVE); + } +@@ -670,6 +786,12 @@ gst_app_src_flush_queued (GstAppSrc * src, gboolean retain_last_caps) + } + + priv->queued_bytes = 0; ++ priv->queued_buffers = 0; ++ priv->queued_time = 0; ++ priv->last_in_running_time = GST_CLOCK_TIME_NONE; ++ priv->last_out_running_time = GST_CLOCK_TIME_NONE; ++ priv->need_discont_upstream = FALSE; ++ priv->need_discont_downstream = FALSE; + } + + static void +@@ -762,6 +884,12 @@ gst_app_src_set_property (GObject * object, guint prop_id, + case PROP_MAX_BYTES: + gst_app_src_set_max_bytes (appsrc, g_value_get_uint64 (value)); + break; ++ case PROP_MAX_BUFFERS: ++ gst_app_src_set_max_buffers (appsrc, g_value_get_uint64 (value)); ++ break; ++ case PROP_MAX_TIME: ++ gst_app_src_set_max_time (appsrc, g_value_get_uint64 (value)); ++ break; + case PROP_FORMAT: + priv->format = g_value_get_enum (value); + break; +@@ -792,6 +920,9 @@ gst_app_src_set_property (GObject * object, guint prop_id, + case PROP_HANDLE_SEGMENT_CHANGE: + priv->handle_segment_change = g_value_get_boolean (value); + break; ++ case PROP_LEAKY_TYPE: ++ priv->leaky_type = g_value_get_enum (value); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -818,6 +949,12 @@ gst_app_src_get_property (GObject * object, guint prop_id, GValue * value, + case PROP_MAX_BYTES: + g_value_set_uint64 (value, gst_app_src_get_max_bytes (appsrc)); + break; ++ case PROP_MAX_BUFFERS: ++ g_value_set_uint64 (value, gst_app_src_get_max_buffers (appsrc)); ++ break; ++ case PROP_MAX_TIME: ++ g_value_set_uint64 (value, gst_app_src_get_max_time (appsrc)); ++ break; + case PROP_FORMAT: + g_value_set_enum (value, priv->format); + break; +@@ -852,12 +989,22 @@ gst_app_src_get_property (GObject * object, guint prop_id, GValue * value, + case PROP_CURRENT_LEVEL_BYTES: + g_value_set_uint64 (value, gst_app_src_get_current_level_bytes (appsrc)); + break; ++ case PROP_CURRENT_LEVEL_BUFFERS: ++ g_value_set_uint64 (value, ++ gst_app_src_get_current_level_buffers (appsrc)); ++ break; ++ case PROP_CURRENT_LEVEL_TIME: ++ g_value_set_uint64 (value, gst_app_src_get_current_level_time (appsrc)); ++ break; + case PROP_DURATION: + g_value_set_uint64 (value, gst_app_src_get_duration (appsrc)); + break; + case PROP_HANDLE_SEGMENT_CHANGE: + g_value_set_boolean (value, priv->handle_segment_change); + break; ++ case PROP_LEAKY_TYPE: ++ g_value_set_enum (value, priv->leaky_type); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -877,6 +1024,17 @@ gst_app_src_send_event (GstElement * element, GstEvent * event) + g_mutex_unlock (&priv->mutex); + break; + default: ++ if (GST_EVENT_IS_SERIALIZED (event)) { ++ GST_DEBUG_OBJECT (appsrc, "queue event: %" GST_PTR_FORMAT, event); ++ g_mutex_lock (&priv->mutex); ++ gst_queue_array_push_tail (priv->queue, event); ++ ++ if ((priv->wait_status & STREAM_WAITING)) ++ g_cond_broadcast (&priv->cond); ++ ++ g_mutex_unlock (&priv->mutex); ++ return TRUE; ++ } + break; + } + +@@ -948,6 +1106,7 @@ gst_app_src_stop (GstBaseSrc * bsrc) + priv->is_eos = FALSE; + priv->flushing = TRUE; + priv->started = FALSE; ++ priv->posted_latency_msg = FALSE; + gst_app_src_flush_queued (appsrc, TRUE); + g_cond_broadcast (&priv->cond); + g_mutex_unlock (&priv->mutex); +@@ -1170,7 +1329,7 @@ gst_app_src_do_negotiate (GstBaseSrc * basesrc) + { + GstAppSrc *appsrc = GST_APP_SRC_CAST (basesrc); + GstAppSrcPrivate *priv = appsrc->priv; +- gboolean result; ++ gboolean result = TRUE; + GstCaps *caps; + + GST_OBJECT_LOCK (basesrc); +@@ -1183,8 +1342,6 @@ gst_app_src_do_negotiate (GstBaseSrc * basesrc) + if (caps) { + result = gst_base_src_set_caps (basesrc, caps); + gst_caps_unref (caps); +- } else { +- result = GST_BASE_SRC_CLASS (parent_class)->negotiate (basesrc); + } + g_mutex_lock (&priv->mutex); + +@@ -1204,6 +1361,204 @@ gst_app_src_negotiate (GstBaseSrc * basesrc) + return result; + } + ++/* Update the currently queued bytes/buffers/time information for the item ++ * that was just removed from the queue. ++ * ++ * If update_offset is set, additionally the offset of the source will be ++ * moved forward accordingly as if that many bytes were output. ++ */ ++static void ++gst_app_src_update_queued_pop (GstAppSrc * appsrc, GstMiniObject * item, ++ gboolean update_offset) ++{ ++ GstAppSrcPrivate *priv = appsrc->priv; ++ guint buf_size = 0; ++ guint n_buffers = 0; ++ GstClockTime end_buffer_ts = GST_CLOCK_TIME_NONE; ++ ++ if (GST_IS_BUFFER (item)) { ++ GstBuffer *buf = GST_BUFFER_CAST (item); ++ buf_size = gst_buffer_get_size (buf); ++ n_buffers = 1; ++ ++ end_buffer_ts = GST_BUFFER_DTS_OR_PTS (buf); ++ if (end_buffer_ts != GST_CLOCK_TIME_NONE ++ && GST_BUFFER_DURATION_IS_VALID (buf)) ++ end_buffer_ts += GST_BUFFER_DURATION (buf); ++ ++ GST_LOG_OBJECT (appsrc, "have buffer %p of size %u", buf, buf_size); ++ } else if (GST_IS_BUFFER_LIST (item)) { ++ GstBufferList *buffer_list = GST_BUFFER_LIST_CAST (item); ++ guint i; ++ ++ n_buffers = gst_buffer_list_length (buffer_list); ++ ++ for (i = 0; i < n_buffers; i++) { ++ GstBuffer *tmp = gst_buffer_list_get (buffer_list, i); ++ GstClockTime ts = GST_BUFFER_DTS_OR_PTS (tmp); ++ ++ buf_size += gst_buffer_get_size (tmp); ++ /* Update to the last buffer's timestamp that is known */ ++ if (ts != GST_CLOCK_TIME_NONE) { ++ end_buffer_ts = ts; ++ if (GST_BUFFER_DURATION_IS_VALID (tmp)) ++ end_buffer_ts += GST_BUFFER_DURATION (tmp); ++ } ++ } ++ } ++ ++ priv->queued_bytes -= buf_size; ++ priv->queued_buffers -= n_buffers; ++ ++ /* Update time level if working on a TIME segment */ ++ if ((priv->current_segment.format == GST_FORMAT_TIME ++ || (priv->current_segment.format == GST_FORMAT_UNDEFINED ++ && priv->last_segment.format == GST_FORMAT_TIME)) ++ && end_buffer_ts != GST_CLOCK_TIME_NONE) { ++ const GstSegment *segment = ++ priv->current_segment.format == ++ GST_FORMAT_TIME ? &priv->current_segment : &priv->last_segment; ++ ++ /* Clip to the current segment boundaries */ ++ if (segment->stop != -1 && end_buffer_ts > segment->stop) ++ end_buffer_ts = segment->stop; ++ else if (segment->start > end_buffer_ts) ++ end_buffer_ts = segment->start; ++ ++ priv->last_out_running_time = ++ gst_segment_to_running_time (segment, GST_FORMAT_TIME, end_buffer_ts); ++ ++ GST_TRACE_OBJECT (appsrc, ++ "Last in running time %" GST_TIME_FORMAT ", last out running time %" ++ GST_TIME_FORMAT, GST_TIME_ARGS (priv->last_in_running_time), ++ GST_TIME_ARGS (priv->last_out_running_time)); ++ ++ /* If timestamps on both sides are known, calculate the current ++ * fill level in time and consider the queue empty if the output ++ * running time is lower than the input one (i.e. some kind of reset ++ * has happened). ++ */ ++ if (priv->last_out_running_time != GST_CLOCK_TIME_NONE ++ && priv->last_in_running_time != GST_CLOCK_TIME_NONE) { ++ if (priv->last_out_running_time > priv->last_in_running_time) { ++ priv->queued_time = 0; ++ } else { ++ priv->queued_time = ++ priv->last_in_running_time - priv->last_out_running_time; ++ } ++ } ++ } ++ ++ GST_DEBUG_OBJECT (appsrc, ++ "Currently queued: %" G_GUINT64_FORMAT " bytes, %" G_GUINT64_FORMAT ++ " buffers, %" GST_TIME_FORMAT, priv->queued_bytes, ++ priv->queued_buffers, GST_TIME_ARGS (priv->queued_time)); ++ ++ /* only update the offset when in random_access mode and when requested by ++ * the caller, i.e. not when just dropping the item */ ++ if (update_offset && priv->stream_type == GST_APP_STREAM_TYPE_RANDOM_ACCESS) ++ priv->offset += buf_size; ++} ++ ++/* Update the currently queued bytes/buffers/time information for the item ++ * that was just added to the queue. ++ */ ++static void ++gst_app_src_update_queued_push (GstAppSrc * appsrc, GstMiniObject * item) ++{ ++ GstAppSrcPrivate *priv = appsrc->priv; ++ GstClockTime start_buffer_ts = GST_CLOCK_TIME_NONE; ++ GstClockTime end_buffer_ts = GST_CLOCK_TIME_NONE; ++ guint buf_size = 0; ++ guint n_buffers = 0; ++ ++ if (GST_IS_BUFFER (item)) { ++ GstBuffer *buf = GST_BUFFER_CAST (item); ++ ++ buf_size = gst_buffer_get_size (buf); ++ n_buffers = 1; ++ ++ start_buffer_ts = end_buffer_ts = GST_BUFFER_DTS_OR_PTS (buf); ++ if (end_buffer_ts != GST_CLOCK_TIME_NONE ++ && GST_BUFFER_DURATION_IS_VALID (buf)) ++ end_buffer_ts += GST_BUFFER_DURATION (buf); ++ } else if (GST_IS_BUFFER_LIST (item)) { ++ GstBufferList *buffer_list = GST_BUFFER_LIST_CAST (item); ++ guint i; ++ ++ n_buffers = gst_buffer_list_length (buffer_list); ++ ++ for (i = 0; i < n_buffers; i++) { ++ GstBuffer *tmp = gst_buffer_list_get (buffer_list, i); ++ GstClockTime ts = GST_BUFFER_DTS_OR_PTS (tmp); ++ ++ buf_size += gst_buffer_get_size (tmp); ++ ++ if (ts != GST_CLOCK_TIME_NONE) { ++ if (start_buffer_ts == GST_CLOCK_TIME_NONE) ++ start_buffer_ts = ts; ++ end_buffer_ts = ts; ++ if (GST_BUFFER_DURATION_IS_VALID (tmp)) ++ end_buffer_ts += GST_BUFFER_DURATION (tmp); ++ } ++ } ++ } ++ ++ priv->queued_bytes += buf_size; ++ priv->queued_buffers += n_buffers; ++ ++ /* Update time level if working on a TIME segment */ ++ if (priv->last_segment.format == GST_FORMAT_TIME ++ && end_buffer_ts != GST_CLOCK_TIME_NONE) { ++ /* Clip to the last segment boundaries */ ++ if (priv->last_segment.stop != -1 ++ && end_buffer_ts > priv->last_segment.stop) ++ end_buffer_ts = priv->last_segment.stop; ++ else if (priv->last_segment.start > end_buffer_ts) ++ end_buffer_ts = priv->last_segment.start; ++ ++ priv->last_in_running_time = ++ gst_segment_to_running_time (&priv->last_segment, GST_FORMAT_TIME, ++ end_buffer_ts); ++ ++ /* If this is the only buffer then we can directly update the queued time ++ * here. This is especially useful if this was the first buffer because ++ * otherwise we would have to wait until it is actually unqueued to know ++ * the queued duration */ ++ if (priv->queued_buffers == 1) { ++ if (priv->last_segment.stop != -1 ++ && start_buffer_ts > priv->last_segment.stop) ++ start_buffer_ts = priv->last_segment.stop; ++ else if (priv->last_segment.start > start_buffer_ts) ++ start_buffer_ts = priv->last_segment.start; ++ ++ priv->last_out_running_time = ++ gst_segment_to_running_time (&priv->last_segment, GST_FORMAT_TIME, ++ start_buffer_ts); ++ } ++ ++ GST_TRACE_OBJECT (appsrc, ++ "Last in running time %" GST_TIME_FORMAT ", last out running time %" ++ GST_TIME_FORMAT, GST_TIME_ARGS (priv->last_in_running_time), ++ GST_TIME_ARGS (priv->last_out_running_time)); ++ ++ if (priv->last_out_running_time != GST_CLOCK_TIME_NONE ++ && priv->last_in_running_time != GST_CLOCK_TIME_NONE) { ++ if (priv->last_out_running_time > priv->last_in_running_time) { ++ priv->queued_time = 0; ++ } else { ++ priv->queued_time = ++ priv->last_in_running_time - priv->last_out_running_time; ++ } ++ } ++ } ++ ++ GST_DEBUG_OBJECT (appsrc, ++ "Currently queued: %" G_GUINT64_FORMAT " bytes, %" G_GUINT64_FORMAT ++ " buffers, %" GST_TIME_FORMAT, priv->queued_bytes, priv->queued_buffers, ++ GST_TIME_ARGS (priv->queued_time)); ++} ++ + static GstFlowReturn + gst_app_src_create (GstBaseSrc * bsrc, guint64 offset, guint size, + GstBuffer ** buf) +@@ -1261,9 +1616,13 @@ gst_app_src_create (GstBaseSrc * bsrc, guint64 offset, guint size, + } + + while (TRUE) { ++ /* Our lock may have been release to push events or caps, check out ++ * state in case we are now flushing. */ ++ if (G_UNLIKELY (priv->flushing)) ++ goto flushing; ++ + /* return data as long as we have some */ + if (!gst_queue_array_is_empty (priv->queue)) { +- guint buf_size; + GstMiniObject *obj = gst_queue_array_pop_head (priv->queue); + + if (GST_IS_CAPS (obj)) { +@@ -1284,74 +1643,116 @@ gst_app_src_create (GstBaseSrc * bsrc, guint64 offset, guint size, + if (caps_changed) + gst_app_src_do_negotiate (bsrc); + +- /* Lock has released so now may need +- *- flushing +- *- new caps change +- *- check queue has data */ +- if (G_UNLIKELY (priv->flushing)) +- goto flushing; +- + /* Continue checks caps and queue */ + continue; + } + + if (GST_IS_BUFFER (obj)) { +- *buf = GST_BUFFER (obj); +- buf_size = gst_buffer_get_size (*buf); +- GST_LOG_OBJECT (appsrc, "have buffer %p of size %u", *buf, buf_size); ++ GstBuffer *buffer = GST_BUFFER (obj); ++ ++ /* Mark the buffer as DISCONT if we previously dropped a buffer ++ * instead of outputting it */ ++ if (priv->need_discont_downstream) { ++ buffer = gst_buffer_make_writable (buffer); ++ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); ++ priv->need_discont_downstream = FALSE; ++ } ++ ++ *buf = buffer; + } else if (GST_IS_BUFFER_LIST (obj)) { + GstBufferList *buffer_list; + + buffer_list = GST_BUFFER_LIST (obj); + +- buf_size = gst_buffer_list_calculate_size (buffer_list); ++ /* Mark the first buffer of the buffer list as DISCONT if we ++ * previously dropped a buffer instead of outputting it */ ++ if (priv->need_discont_downstream) { ++ GstBuffer *buffer; + +- GST_LOG_OBJECT (appsrc, "have buffer list %p of size %u, %u buffers", +- buffer_list, buf_size, gst_buffer_list_length (buffer_list)); ++ buffer_list = gst_buffer_list_make_writable (buffer_list); ++ buffer = gst_buffer_list_get_writable (buffer_list, 0); ++ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); ++ priv->need_discont_downstream = FALSE; ++ } + + gst_base_src_submit_buffer_list (bsrc, buffer_list); + *buf = NULL; + } else if (GST_IS_EVENT (obj)) { + GstEvent *event = GST_EVENT (obj); +- const GstSegment *segment = NULL; +- +- gst_event_parse_segment (event, &segment); +- g_assert (segment != NULL); +- +- if (!gst_segment_is_equal (&priv->current_segment, segment)) { +- GST_DEBUG_OBJECT (appsrc, +- "Update new segment %" GST_PTR_FORMAT, event); +- if (!gst_base_src_new_segment (bsrc, segment)) { +- GST_ERROR_OBJECT (appsrc, +- "Couldn't set new segment %" GST_PTR_FORMAT, event); +- gst_event_unref (event); +- goto invalid_segment; ++ ++ GST_DEBUG_OBJECT (appsrc, "pop event %" GST_PTR_FORMAT, event); ++ ++ if (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT) { ++ const GstSegment *segment = NULL; ++ ++ gst_event_parse_segment (event, &segment); ++ g_assert (segment != NULL); ++ ++ if (!gst_segment_is_equal (&priv->current_segment, segment)) { ++ GST_DEBUG_OBJECT (appsrc, ++ "Update new segment %" GST_PTR_FORMAT, event); ++ if (!gst_base_src_new_segment (bsrc, segment)) { ++ GST_ERROR_OBJECT (appsrc, ++ "Couldn't set new segment %" GST_PTR_FORMAT, event); ++ gst_event_unref (event); ++ goto invalid_segment; ++ } ++ gst_segment_copy_into (segment, &priv->current_segment); + } +- gst_segment_copy_into (segment, &priv->current_segment); +- } + +- gst_event_unref (event); ++ gst_event_unref (event); ++ } else { ++ GstEvent *seg_event; ++ GstSegment last_segment = priv->last_segment; ++ ++ /* event is serialized with the buffers flow */ ++ ++ /* We are about to push an event, release out lock */ ++ g_mutex_unlock (&priv->mutex); ++ ++ seg_event = ++ gst_pad_get_sticky_event (GST_BASE_SRC_PAD (appsrc), ++ GST_EVENT_SEGMENT, 0); ++ if (!seg_event) { ++ seg_event = gst_event_new_segment (&last_segment); ++ ++ GST_DEBUG_OBJECT (appsrc, ++ "received serialized event before first buffer, push default segment %" ++ GST_PTR_FORMAT, seg_event); ++ ++ gst_pad_push_event (GST_BASE_SRC_PAD (appsrc), seg_event); ++ } else { ++ gst_event_unref (seg_event); ++ } ++ ++ gst_pad_push_event (GST_BASE_SRC_PAD (appsrc), event); ++ ++ g_mutex_lock (&priv->mutex); ++ } + continue; + } else { + g_assert_not_reached (); + } + +- priv->queued_bytes -= buf_size; +- +- /* only update the offset when in random_access mode */ +- if (priv->stream_type == GST_APP_STREAM_TYPE_RANDOM_ACCESS) +- priv->offset += buf_size; ++ gst_app_src_update_queued_pop (appsrc, obj, TRUE); + + /* signal that we removed an item */ + if ((priv->wait_status & APP_WAITING)) + g_cond_broadcast (&priv->cond); + + /* see if we go lower than the min-percent */ +- if (priv->min_percent && priv->max_bytes) { +- if (priv->queued_bytes * 100 / priv->max_bytes <= priv->min_percent) ++ if (priv->min_percent) { ++ if ((priv->max_bytes ++ && priv->queued_bytes * 100 / priv->max_bytes <= ++ priv->min_percent) || (priv->max_buffers ++ && priv->queued_buffers * 100 / priv->max_buffers <= ++ priv->min_percent) || (priv->max_time ++ && priv->queued_time * 100 / priv->max_time <= ++ priv->min_percent)) { + /* ignore flushing state, we got a buffer and we will return it now. + * Errors will be handled in the next round */ + gst_app_src_emit_need_data (appsrc, size); ++ } + } + ret = GST_FLOW_OK; + break; +@@ -1473,7 +1874,7 @@ gst_app_src_set_caps (GstAppSrc * appsrc, const GstCaps * caps) + * + * Get the configured caps on @appsrc. + * +- * Returns: the #GstCaps produced by the source. gst_caps_unref() after usage. ++ * Returns: (nullable) (transfer full): the #GstCaps produced by the source. gst_caps_unref() after usage. + */ + GstCaps * + gst_app_src_get_caps (GstAppSrc * appsrc) +@@ -1717,7 +2118,7 @@ gst_app_src_get_max_bytes (GstAppSrc * appsrc) + guint64 + gst_app_src_get_current_level_bytes (GstAppSrc * appsrc) + { +- gint64 queued; ++ guint64 queued; + GstAppSrcPrivate *priv; + + g_return_val_if_fail (GST_IS_APP_SRC (appsrc), -1); +@@ -1733,6 +2134,183 @@ gst_app_src_get_current_level_bytes (GstAppSrc * appsrc) + return queued; + } + ++/** ++ * gst_app_src_set_max_buffers: ++ * @appsrc: a #GstAppSrc ++ * @max: the maximum number of buffers to queue ++ * ++ * Set the maximum amount of buffers that can be queued in @appsrc. ++ * After the maximum amount of buffers are queued, @appsrc will emit the ++ * "enough-data" signal. ++ * ++ * Since: 1.20 ++ */ ++void ++gst_app_src_set_max_buffers (GstAppSrc * appsrc, guint64 max) ++{ ++ GstAppSrcPrivate *priv; ++ ++ g_return_if_fail (GST_IS_APP_SRC (appsrc)); ++ ++ priv = appsrc->priv; ++ ++ g_mutex_lock (&priv->mutex); ++ if (max != priv->max_buffers) { ++ GST_DEBUG_OBJECT (appsrc, "setting max-buffers to %" G_GUINT64_FORMAT, max); ++ priv->max_buffers = max; ++ /* signal the change */ ++ g_cond_broadcast (&priv->cond); ++ } ++ g_mutex_unlock (&priv->mutex); ++} ++ ++/** ++ * gst_app_src_get_max_buffers: ++ * @appsrc: a #GstAppSrc ++ * ++ * Get the maximum amount of buffers that can be queued in @appsrc. ++ * ++ * Returns: The maximum amount of buffers that can be queued. ++ * ++ * Since: 1.20 ++ */ ++guint64 ++gst_app_src_get_max_buffers (GstAppSrc * appsrc) ++{ ++ guint64 result; ++ GstAppSrcPrivate *priv; ++ ++ g_return_val_if_fail (GST_IS_APP_SRC (appsrc), 0); ++ ++ priv = appsrc->priv; ++ ++ g_mutex_lock (&priv->mutex); ++ result = priv->max_buffers; ++ GST_DEBUG_OBJECT (appsrc, "getting max-buffers of %" G_GUINT64_FORMAT, ++ result); ++ g_mutex_unlock (&priv->mutex); ++ ++ return result; ++} ++ ++/** ++ * gst_app_src_get_current_level_buffers: ++ * @appsrc: a #GstAppSrc ++ * ++ * Get the number of currently queued buffers inside @appsrc. ++ * ++ * Returns: The number of currently queued buffers. ++ * ++ * Since: 1.20 ++ */ ++guint64 ++gst_app_src_get_current_level_buffers (GstAppSrc * appsrc) ++{ ++ guint64 queued; ++ GstAppSrcPrivate *priv; ++ ++ g_return_val_if_fail (GST_IS_APP_SRC (appsrc), -1); ++ ++ priv = appsrc->priv; ++ ++ GST_OBJECT_LOCK (appsrc); ++ queued = priv->queued_buffers; ++ GST_DEBUG_OBJECT (appsrc, "current level buffers is %" G_GUINT64_FORMAT, ++ queued); ++ GST_OBJECT_UNLOCK (appsrc); ++ ++ return queued; ++} ++ ++/** ++ * gst_app_src_set_max_time: ++ * @appsrc: a #GstAppSrc ++ * @max: the maximum amonut of time to queue ++ * ++ * Set the maximum amount of time that can be queued in @appsrc. ++ * After the maximum amount of time are queued, @appsrc will emit the ++ * "enough-data" signal. ++ * ++ * Since: 1.20 ++ */ ++void ++gst_app_src_set_max_time (GstAppSrc * appsrc, GstClockTime max) ++{ ++ GstAppSrcPrivate *priv; ++ ++ g_return_if_fail (GST_IS_APP_SRC (appsrc)); ++ ++ priv = appsrc->priv; ++ ++ g_mutex_lock (&priv->mutex); ++ if (max != priv->max_time) { ++ GST_DEBUG_OBJECT (appsrc, "setting max-time to %" GST_TIME_FORMAT, ++ GST_TIME_ARGS (max)); ++ priv->max_time = max; ++ /* signal the change */ ++ g_cond_broadcast (&priv->cond); ++ } ++ g_mutex_unlock (&priv->mutex); ++} ++ ++/** ++ * gst_app_src_get_max_time: ++ * @appsrc: a #GstAppSrc ++ * ++ * Get the maximum amount of time that can be queued in @appsrc. ++ * ++ * Returns: The maximum amount of time that can be queued. ++ * ++ * Since: 1.20 ++ */ ++GstClockTime ++gst_app_src_get_max_time (GstAppSrc * appsrc) ++{ ++ GstClockTime result; ++ GstAppSrcPrivate *priv; ++ ++ g_return_val_if_fail (GST_IS_APP_SRC (appsrc), 0); ++ ++ priv = appsrc->priv; ++ ++ g_mutex_lock (&priv->mutex); ++ result = priv->max_time; ++ GST_DEBUG_OBJECT (appsrc, "getting max-time of %" GST_TIME_FORMAT, ++ GST_TIME_ARGS (result)); ++ g_mutex_unlock (&priv->mutex); ++ ++ return result; ++} ++ ++/** ++ * gst_app_src_get_current_level_time: ++ * @appsrc: a #GstAppSrc ++ * ++ * Get the amount of currently queued time inside @appsrc. ++ * ++ * Returns: The amount of currently queued time. ++ * ++ * Since: 1.20 ++ */ ++GstClockTime ++gst_app_src_get_current_level_time (GstAppSrc * appsrc) ++{ ++ gint64 queued; ++ GstAppSrcPrivate *priv; ++ ++ g_return_val_if_fail (GST_IS_APP_SRC (appsrc), GST_CLOCK_TIME_NONE); ++ ++ priv = appsrc->priv; ++ ++ GST_OBJECT_LOCK (appsrc); ++ queued = priv->queued_time; ++ GST_DEBUG_OBJECT (appsrc, "current level time is %" GST_TIME_FORMAT, ++ GST_TIME_ARGS (queued)); ++ GST_OBJECT_UNLOCK (appsrc); ++ ++ return queued; ++} ++ + static void + gst_app_src_set_latencies (GstAppSrc * appsrc, gboolean do_min, guint64 min, + gboolean do_max, guint64 max) +@@ -1749,6 +2327,10 @@ gst_app_src_set_latencies (GstAppSrc * appsrc, gboolean do_min, guint64 min, + priv->max_latency = max; + changed = TRUE; + } ++ if (!priv->posted_latency_msg) { ++ priv->posted_latency_msg = TRUE; ++ changed = TRUE; ++ } + g_mutex_unlock (&priv->mutex); + + if (changed) { +@@ -1758,6 +2340,45 @@ gst_app_src_set_latencies (GstAppSrc * appsrc, gboolean do_min, guint64 min, + } + } + ++/** ++ * gst_app_src_set_leaky_type: ++ * @appsrc: a #GstAppSrc ++ * @leaky: the #GstAppLeakyType ++ * ++ * When set to any other value than GST_APP_LEAKY_TYPE_NONE then the appsrc ++ * will drop any buffers that are pushed into it once its internal queue is ++ * full. The selected type defines whether to drop the oldest or new ++ * buffers. ++ * ++ * Since: 1.20 ++ */ ++void ++gst_app_src_set_leaky_type (GstAppSrc * appsrc, GstAppLeakyType leaky) ++{ ++ g_return_if_fail (GST_IS_APP_SRC (appsrc)); ++ ++ appsrc->priv->leaky_type = leaky; ++} ++ ++/** ++ * gst_app_src_get_leaky_type: ++ * @appsrc: a #GstAppSrc ++ * ++ * Returns the currently set #GstAppLeakyType. See gst_app_src_set_leaky_type() ++ * for more details. ++ * ++ * Returns: The currently set #GstAppLeakyType. ++ * ++ * Since: 1.20 ++ */ ++GstAppLeakyType ++gst_app_src_get_leaky_type (GstAppSrc * appsrc) ++{ ++ g_return_val_if_fail (GST_IS_APP_SRC (appsrc), GST_APP_LEAKY_TYPE_NONE); ++ ++ return appsrc->priv->leaky_type; ++} ++ + /** + * gst_app_src_set_latency: + * @appsrc: a #GstAppSrc +@@ -1925,10 +2546,17 @@ gst_app_src_push_internal (GstAppSrc * appsrc, GstBuffer * buffer, + if (priv->is_eos) + goto eos; + +- if (priv->max_bytes && priv->queued_bytes >= priv->max_bytes) { ++ if ((priv->max_bytes && priv->queued_bytes >= priv->max_bytes) || ++ (priv->max_buffers && priv->queued_buffers >= priv->max_buffers) || ++ (priv->max_time && priv->queued_time >= priv->max_time)) { + GST_DEBUG_OBJECT (appsrc, +- "queue filled (%" G_GUINT64_FORMAT " >= %" G_GUINT64_FORMAT ")", +- priv->queued_bytes, priv->max_bytes); ++ "queue filled (queued %" G_GUINT64_FORMAT " bytes, max %" ++ G_GUINT64_FORMAT " bytes, " "queued %" G_GUINT64_FORMAT ++ " buffers, max %" G_GUINT64_FORMAT " buffers, " "queued %" ++ GST_TIME_FORMAT " time, max %" GST_TIME_FORMAT " time)", ++ priv->queued_bytes, priv->max_bytes, priv->queued_buffers, ++ priv->max_buffers, GST_TIME_ARGS (priv->queued_time), ++ GST_TIME_ARGS (priv->max_time)); + + if (first) { + Callbacks *callbacks = NULL; +@@ -1949,6 +2577,46 @@ gst_app_src_push_internal (GstAppSrc * appsrc, GstBuffer * buffer, + g_clear_pointer (&callbacks, callbacks_unref); + + g_mutex_lock (&priv->mutex); ++ } ++ ++ if (priv->leaky_type == GST_APP_LEAKY_TYPE_UPSTREAM) { ++ priv->need_discont_upstream = TRUE; ++ goto dropped; ++ } else if (priv->leaky_type == GST_APP_LEAKY_TYPE_DOWNSTREAM) { ++ guint i, length = gst_queue_array_get_length (priv->queue); ++ GstMiniObject *item = NULL; ++ ++ /* Find the oldest buffer or buffer list and drop it, then update the ++ * limits. Dropping one is sufficient to go below the limits again. ++ */ ++ for (i = 0; i < length; i++) { ++ item = gst_queue_array_peek_nth (priv->queue, i); ++ if (GST_IS_BUFFER (item) || GST_IS_BUFFER_LIST (item)) { ++ gst_queue_array_drop_element (priv->queue, i); ++ break; ++ } ++ /* To not accidentally have an event after the loop */ ++ item = NULL; ++ } ++ ++ if (!item) { ++ GST_FIXME_OBJECT (appsrc, ++ "No buffer or buffer list queued but queue is full"); ++ /* This shouldn't really happen but in this case we can't really do ++ * anything apart from accepting the buffer / bufferlist */ ++ break; ++ } ++ ++ GST_WARNING_OBJECT (appsrc, "Dropping old item %" GST_PTR_FORMAT, item); ++ ++ gst_app_src_update_queued_pop (appsrc, item, FALSE); ++ gst_mini_object_unref (item); ++ ++ priv->need_discont_downstream = TRUE; ++ continue; ++ } ++ ++ if (first) { + /* continue to check for flushing/eos after releasing the lock */ + first = FALSE; + continue; +@@ -1966,8 +2634,9 @@ gst_app_src_push_internal (GstAppSrc * appsrc, GstBuffer * buffer, + * stops pushing buffers. */ + break; + } +- } else ++ } else { + break; ++ } + } + + if (priv->pending_custom_segment) { +@@ -1979,19 +2648,48 @@ gst_app_src_push_internal (GstAppSrc * appsrc, GstBuffer * buffer, + } + + if (buflist != NULL) { ++ /* Mark the first buffer of the buffer list as DISCONT if we previously ++ * dropped a buffer instead of queueing it */ ++ if (priv->need_discont_upstream) { ++ if (!steal_ref) { ++ buflist = gst_buffer_list_copy (buflist); ++ steal_ref = TRUE; ++ } else { ++ buflist = gst_buffer_list_make_writable (buflist); ++ } ++ buffer = gst_buffer_list_get_writable (buflist, 0); ++ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); ++ priv->need_discont_upstream = FALSE; ++ } ++ + GST_DEBUG_OBJECT (appsrc, "queueing buffer list %p", buflist); ++ + if (!steal_ref) + gst_buffer_list_ref (buflist); + gst_queue_array_push_tail (priv->queue, buflist); +- priv->queued_bytes += gst_buffer_list_calculate_size (buflist); + } else { ++ /* Mark the buffer as DISCONT if we previously dropped a buffer instead of ++ * queueing it */ ++ if (priv->need_discont_upstream) { ++ if (!steal_ref) { ++ buffer = gst_buffer_copy (buffer); ++ steal_ref = TRUE; ++ } else { ++ buffer = gst_buffer_make_writable (buffer); ++ } ++ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); ++ priv->need_discont_upstream = FALSE; ++ } ++ + GST_DEBUG_OBJECT (appsrc, "queueing buffer %p", buffer); + if (!steal_ref) + gst_buffer_ref (buffer); + gst_queue_array_push_tail (priv->queue, buffer); +- priv->queued_bytes += gst_buffer_get_size (buffer); + } + ++ gst_app_src_update_queued_push (appsrc, ++ buflist ? GST_MINI_OBJECT_CAST (buflist) : GST_MINI_OBJECT_CAST (buffer)); ++ + if ((priv->wait_status & STREAM_WAITING)) + g_cond_broadcast (&priv->cond); + +@@ -2024,6 +2722,18 @@ eos: + g_mutex_unlock (&priv->mutex); + return GST_FLOW_EOS; + } ++dropped: ++ { ++ GST_DEBUG_OBJECT (appsrc, "dropped new buffer %p, we are full", buffer); ++ if (steal_ref) { ++ if (buflist) ++ gst_buffer_list_unref (buflist); ++ else ++ gst_buffer_unref (buffer); ++ } ++ g_mutex_unlock (&priv->mutex); ++ return GST_FLOW_OK; ++ } + } + + static GstFlowReturn +@@ -2064,6 +2774,10 @@ gst_app_src_push_sample_internal (GstAppSrc * appsrc, GstSample * sample) + GST_LOG_OBJECT (appsrc, "segment wasn't changed"); + g_mutex_unlock (&priv->mutex); + goto handle_buffer; ++ } else { ++ GST_LOG_OBJECT (appsrc, ++ "segment changed %" GST_SEGMENT_FORMAT " -> %" GST_SEGMENT_FORMAT, ++ &priv->last_segment, segment); + } + + /* will be pushed to queue with next buffer/buffer-list */ +diff --git a/gst-libs/gst/app/gstappsrc.h b/gst-libs/gst/app/gstappsrc.h +index 82f8c2a4e..16180fd16 100644 +--- a/gst-libs/gst/app/gstappsrc.h ++++ b/gst-libs/gst/app/gstappsrc.h +@@ -88,6 +88,23 @@ typedef enum + GST_APP_STREAM_TYPE_RANDOM_ACCESS + } GstAppStreamType; + ++/** ++ * GstAppLeakyType: ++ * @GST_APP_LEAKY_TYPE_NONE: Not Leaky ++ * @GST_APP_LEAKY_TYPE_UPSTREAM: Leaky on upstream (new buffers) ++ * @GST_APP_LEAKY_TYPE_DOWNSTREAM: Leaky on downstream (old buffers) ++ * ++ * Buffer dropping scheme to avoid the element's internal queue to block when ++ * full. ++ * ++ * Since: 1.20 ++ */ ++typedef enum { ++ GST_APP_LEAKY_TYPE_NONE, ++ GST_APP_LEAKY_TYPE_UPSTREAM, ++ GST_APP_LEAKY_TYPE_DOWNSTREAM ++} GstAppLeakyType; ++ + struct _GstAppSrc + { + GstBaseSrc basesrc; +@@ -154,6 +171,30 @@ guint64 gst_app_src_get_max_bytes (GstAppSrc *appsrc); + GST_APP_API + guint64 gst_app_src_get_current_level_bytes (GstAppSrc *appsrc); + ++GST_APP_API ++void gst_app_src_set_max_buffers (GstAppSrc *appsrc, guint64 max); ++ ++GST_APP_API ++guint64 gst_app_src_get_max_buffers (GstAppSrc *appsrc); ++ ++GST_APP_API ++guint64 gst_app_src_get_current_level_buffers (GstAppSrc *appsrc); ++ ++GST_APP_API ++void gst_app_src_set_max_time (GstAppSrc *appsrc, GstClockTime max); ++ ++GST_APP_API ++GstClockTime gst_app_src_get_max_time (GstAppSrc *appsrc); ++ ++GST_APP_API ++GstClockTime gst_app_src_get_current_level_time (GstAppSrc *appsrc); ++ ++GST_APP_API ++void gst_app_src_set_leaky_type (GstAppSrc *appsrc, GstAppLeakyType leaky); ++ ++GST_APP_API ++GstAppLeakyType gst_app_src_get_leaky_type (GstAppSrc *appsrc); ++ + GST_APP_API + void gst_app_src_set_latency (GstAppSrc *appsrc, guint64 min, guint64 max); + +diff --git a/gst-libs/gst/audio/audio-buffer.c b/gst-libs/gst/audio/audio-buffer.c +index 5fdec3c35..7d139f0b2 100644 +--- a/gst-libs/gst/audio/audio-buffer.c ++++ b/gst-libs/gst/audio/audio-buffer.c +@@ -23,7 +23,7 @@ + #endif + + #include "audio-buffer.h" +-#include ++ + + static void + gst_audio_buffer_unmap_internal (GstAudioBuffer * buffer, guint n_unmap) +@@ -55,7 +55,7 @@ gst_audio_buffer_unmap (GstAudioBuffer * buffer) + + /** + * gst_audio_buffer_map: +- * @buffer: pointer to a #GstAudioBuffer ++ * @buffer: (out caller-allocates): pointer to a #GstAudioBuffer + * @info: the audio properties of the buffer + * @gstbuffer: (transfer none): the #GstBuffer to be mapped + * @flags: the access mode for the memory +diff --git a/gst-libs/gst/audio/audio-channel-mixer.c b/gst-libs/gst/audio/audio-channel-mixer.c +index 49d248c3d..bfc3aa24f 100644 +--- a/gst-libs/gst/audio/audio-channel-mixer.c ++++ b/gst-libs/gst/audio/audio-channel-mixer.c +@@ -817,8 +817,7 @@ DEFINE_FLOAT_MIX_FUNC (double, planar, planar); + * + * Create a new channel mixer object for the given parameters. + * +- * Returns: a new #GstAudioChannelMixer object, or %NULL if @format isn't supported, +- * @matrix is invalid, or @matrix is %NULL and @in_channels != @out_channels. ++ * Returns: a new #GstAudioChannelMixer object. + * Free with gst_audio_channel_mixer_free() after usage. + * + * Since: 1.14 +@@ -834,8 +833,6 @@ gst_audio_channel_mixer_new_with_matrix (GstAudioChannelMixerFlags flags, + || format == GST_AUDIO_FORMAT_S32 + || format == GST_AUDIO_FORMAT_F32 + || format == GST_AUDIO_FORMAT_F64, NULL); +- g_return_val_if_fail (in_channels > 0 && in_channels < 64, NULL); +- g_return_val_if_fail (out_channels > 0 && out_channels < 64, NULL); + + mix = g_slice_new0 (GstAudioChannelMixer); + mix->in_channels = in_channels; +@@ -980,7 +977,7 @@ gst_audio_channel_mixer_new_with_matrix (GstAudioChannelMixerFlags flags, + * + * Create a new channel mixer object for the given parameters. + * +- * Returns: a new #GstAudioChannelMixer object, or %NULL if @format isn't supported. ++ * Returns: a new #GstAudioChannelMixer object. + * Free with gst_audio_channel_mixer_free() after usage. + */ + GstAudioChannelMixer * +@@ -996,8 +993,6 @@ gst_audio_channel_mixer_new (GstAudioChannelMixerFlags flags, + || format == GST_AUDIO_FORMAT_S32 + || format == GST_AUDIO_FORMAT_F32 + || format == GST_AUDIO_FORMAT_F64, NULL); +- g_return_val_if_fail (in_channels > 0 && in_channels < 64, NULL); +- g_return_val_if_fail (out_channels > 0 && out_channels < 64, NULL); + + matrix = + gst_audio_channel_mixer_setup_matrix (flags, in_channels, in_position, +diff --git a/gst-libs/gst/audio/audio-converter.c b/gst-libs/gst/audio/audio-converter.c +index 06a2841dd..f1116d391 100644 +--- a/gst-libs/gst/audio/audio-converter.c ++++ b/gst-libs/gst/audio/audio-converter.c +@@ -263,7 +263,6 @@ audio_chain_get_samples (AudioChain * chain, gsize * avail) + return res; + } + +-/* + static guint + get_opt_uint (GstAudioConverter * convert, const gchar * opt, guint def) + { +@@ -272,7 +271,6 @@ get_opt_uint (GstAudioConverter * convert, const gchar * opt, guint def) + res = def; + return res; + } +-*/ + + static gint + get_opt_enum (GstAudioConverter * convert, const gchar * opt, GType type, +@@ -292,6 +290,7 @@ get_opt_value (GstAudioConverter * convert, const gchar * opt) + + #define DEFAULT_OPT_RESAMPLER_METHOD GST_AUDIO_RESAMPLER_METHOD_BLACKMAN_NUTTALL + #define DEFAULT_OPT_DITHER_METHOD GST_AUDIO_DITHER_NONE ++#define DEFAULT_OPT_DITHER_THRESHOLD 20 + #define DEFAULT_OPT_NOISE_SHAPING_METHOD GST_AUDIO_NOISE_SHAPING_NONE + #define DEFAULT_OPT_QUANTIZATION 1 + +@@ -301,6 +300,8 @@ get_opt_value (GstAudioConverter * convert, const gchar * opt) + #define GET_OPT_DITHER_METHOD(c) get_opt_enum(c, \ + GST_AUDIO_CONVERTER_OPT_DITHER_METHOD, GST_TYPE_AUDIO_DITHER_METHOD, \ + DEFAULT_OPT_DITHER_METHOD) ++#define GET_OPT_DITHER_THRESHOLD(c) get_opt_uint(c, \ ++ GST_AUDIO_CONVERTER_OPT_DITHER_THRESHOLD, DEFAULT_OPT_DITHER_THRESHOLD) + #define GET_OPT_NOISE_SHAPING_METHOD(c) get_opt_enum(c, \ + GST_AUDIO_CONVERTER_OPT_NOISE_SHAPING_METHOD, GST_TYPE_AUDIO_NOISE_SHAPING_METHOD, \ + DEFAULT_OPT_NOISE_SHAPING_METHOD) +@@ -478,7 +479,7 @@ do_unpack (AudioChain * chain, gpointer user_data) + } + } else { + for (i = 0; i < chain->blocks; i++) { +- gst_audio_format_fill_silence (chain->finfo, tmp[i], ++ gst_audio_format_info_fill_silence (chain->finfo, tmp[i], + num_samples * chain->inc); + } + } +@@ -951,9 +952,11 @@ chain_quantize (GstAudioConverter * convert, AudioChain * prev) + gint in_depth, out_depth; + gboolean in_int, out_int; + GstAudioDitherMethod dither; ++ guint dither_threshold; + GstAudioNoiseShapingMethod ns; + + dither = GET_OPT_DITHER_METHOD (convert); ++ dither_threshold = GET_OPT_DITHER_THRESHOLD (convert); + ns = GET_OPT_NOISE_SHAPING_METHOD (convert); + + cur_finfo = gst_audio_format_get_info (convert->current_format); +@@ -969,7 +972,7 @@ chain_quantize (GstAudioConverter * convert, AudioChain * prev) + * as DA converters only can do a SNR up to 20 bits in reality. + * Also don't dither or apply noise shaping if target depth is larger than + * source depth. */ +- if (out_depth > 20 || (in_int && out_depth >= in_depth)) { ++ if (out_depth > dither_threshold || (in_int && out_depth >= in_depth)) { + dither = GST_AUDIO_DITHER_NONE; + ns = GST_AUDIO_NOISE_SHAPING_NONE; + GST_INFO ("using no dither and noise shaping"); +@@ -1103,7 +1106,7 @@ converter_passthrough (GstAudioConverter * convert, + } + } else { + for (i = 0; i < chain->blocks; i++) +- gst_audio_format_fill_silence (convert->in.finfo, out[i], samples); ++ gst_audio_format_info_fill_silence (convert->in.finfo, out[i], samples); + } + return TRUE; + } +@@ -1249,7 +1252,7 @@ converter_endian (GstAudioConverter * convert, + convert->swap_endian (out[i], in[i], samples); + } else { + for (i = 0; i < chain->blocks; i++) +- gst_audio_format_fill_silence (convert->in.finfo, out[i], samples); ++ gst_audio_format_info_fill_silence (convert->in.finfo, out[i], samples); + } + return TRUE; + } +@@ -1317,7 +1320,7 @@ converter_resample (GstAudioConverter * convert, + * @config contains extra configuration options, see `GST_AUDIO_CONVERTER_OPT_*` + * parameters for details about the options and values. + * +- * Returns: a #GstAudioConverter or %NULL if conversion is not possible. ++ * Returns: (nullable): a #GstAudioConverter or %NULL if conversion is not possible. + */ + GstAudioConverter * + gst_audio_converter_new (GstAudioConverterFlags flags, GstAudioInfo * in_info, +diff --git a/gst-libs/gst/audio/audio-converter.h b/gst-libs/gst/audio/audio-converter.h +index 083bda4fa..ef5ac9a00 100644 +--- a/gst-libs/gst/audio/audio-converter.h ++++ b/gst-libs/gst/audio/audio-converter.h +@@ -106,6 +106,17 @@ typedef struct _GstAudioConverter GstAudioConverter; + */ + #define GST_AUDIO_CONVERTER_OPT_MIX_MATRIX "GstAudioConverter.mix-matrix" + ++/** ++ * GST_AUDIO_CONVERTER_OPT_DITHER_THRESHOLD: ++ * ++ * Threshold for the output bit depth at/below which to apply dithering. ++ * ++ * Default is 20 bit. ++ * ++ * Since: 1.22 ++ */ ++#define GST_AUDIO_CONVERTER_OPT_DITHER_THRESHOLD "GstAudioConverter.dither-threshold" ++ + /** + * GstAudioConverterFlags: + * @GST_AUDIO_CONVERTER_FLAG_NONE: no flag +diff --git a/gst-libs/gst/audio/audio-format.c b/gst-libs/gst/audio/audio-format.c +index b79a49c73..b1b8d60a1 100644 +--- a/gst-libs/gst/audio/audio-format.c ++++ b/gst-libs/gst/audio/audio-format.c +@@ -459,14 +459,34 @@ gst_audio_format_get_info (GstAudioFormat format) + * @length: the length to fill + * + * Fill @length bytes in @dest with silence samples for @info. ++ * ++ * Deprecated: 1.20: Use gst_audio_format_info_fill_silence() instead. + */ + void + gst_audio_format_fill_silence (const GstAudioFormatInfo * info, + gpointer dest, gsize length) ++{ ++ gst_audio_format_info_fill_silence (info, dest, length); ++} ++ ++/** ++ * gst_audio_format_info_fill_silence: ++ * @info: a #GstAudioFormatInfo ++ * @dest: (array length=length) (element-type guint8): a destination ++ * to fill ++ * @length: the length to fill ++ * ++ * Fill @length bytes in @dest with silence samples for @info. ++ * ++ * Since: 1.20 ++ */ ++void ++gst_audio_format_info_fill_silence (const GstAudioFormatInfo * info, ++ gpointer dest, gsize length) + { + guint8 *dptr = dest; + +- g_return_if_fail (info != NULL); ++ g_return_if_fail (GST_AUDIO_FORMAT_INFO_IS_VALID_RAW (info)); + g_return_if_fail (dest != NULL); + + if (info->flags & GST_AUDIO_FORMAT_FLAG_FLOAT || +diff --git a/gst-libs/gst/audio/audio-format.h b/gst-libs/gst/audio/audio-format.h +index aaab644e1..c7aeef8de 100644 +--- a/gst-libs/gst/audio/audio-format.h ++++ b/gst-libs/gst/audio/audio-format.h +@@ -256,6 +256,18 @@ struct _GstAudioFormatInfo { + GST_AUDIO_API + GType gst_audio_format_info_get_type (void); + ++/** ++ * GST_AUDIO_FORMAT_INFO_IS_VALID_RAW: ++ * ++ * Tests that the given #GstAudioFormatInfo represents a valid un-encoded ++ * format. ++ * ++ * Since: 1.22 ++ */ ++#define GST_AUDIO_FORMAT_INFO_IS_VALID_RAW(info) \ ++ (info != NULL && (info)->format > GST_AUDIO_FORMAT_ENCODED && \ ++ (info)->width > 0 && (info)->depth > 0) ++ + #define GST_AUDIO_FORMAT_INFO_FORMAT(info) ((info)->format) + #define GST_AUDIO_FORMAT_INFO_NAME(info) ((info)->name) + #define GST_AUDIO_FORMAT_INFO_FLAGS(info) ((info)->flags) +@@ -286,8 +298,11 @@ const GstAudioFormatInfo * + gst_audio_format_get_info (GstAudioFormat format) G_GNUC_CONST; + + GST_AUDIO_API +-void gst_audio_format_fill_silence (const GstAudioFormatInfo *info, +- gpointer dest, gsize length); ++void gst_audio_format_info_fill_silence (const GstAudioFormatInfo *info, ++ gpointer dest, gsize length); ++GST_AUDIO_API G_DEPRECATED_FOR(gst_audio_format_info_fill_silence) ++void gst_audio_format_fill_silence (const GstAudioFormatInfo *info, ++ gpointer dest, gsize length); + + /** + * GST_AUDIO_RATE_RANGE: +diff --git a/gst-libs/gst/audio/audio-info.c b/gst-libs/gst/audio/audio-info.c +index b18c2d932..a37e3cf02 100644 +--- a/gst-libs/gst/audio/audio-info.c ++++ b/gst-libs/gst/audio/audio-info.c +@@ -101,7 +101,7 @@ gst_audio_info_new (void) + + /** + * gst_audio_info_init: +- * @info: a #GstAudioInfo ++ * @info: (out caller-allocates): a #GstAudioInfo + * + * Initialize @info with default values. + */ +@@ -181,7 +181,7 @@ gst_audio_info_set_format (GstAudioInfo * info, GstAudioFormat format, + + /** + * gst_audio_info_from_caps: +- * @info: a #GstAudioInfo ++ * @info: (out caller-allocates): a #GstAudioInfo + * @caps: a #GstCaps + * + * Parse @caps and update @info. +@@ -320,6 +320,28 @@ invalid_channel_mask: + } + } + ++/** ++ * gst_audio_info_new_from_caps: ++ * @caps: a #GstCaps ++ * ++ * Parse @caps to generate a #GstAudioInfo. ++ * ++ * Returns: (nullable): A #GstAudioInfo, or %NULL if @caps couldn't be parsed ++ * Since: 1.20 ++ */ ++GstAudioInfo * ++gst_audio_info_new_from_caps (const GstCaps * caps) ++{ ++ GstAudioInfo *ret = gst_audio_info_new (); ++ ++ if (gst_audio_info_from_caps (ret, caps)) { ++ return ret; ++ } else { ++ gst_audio_info_free (ret); ++ return NULL; ++ } ++} ++ + /** + * gst_audio_info_to_caps: + * @info: a #GstAudioInfo +diff --git a/gst-libs/gst/audio/audio-info.h b/gst-libs/gst/audio/audio-info.h +index cc9e97222..51264b930 100644 +--- a/gst-libs/gst/audio/audio-info.h ++++ b/gst-libs/gst/audio/audio-info.h +@@ -93,7 +93,7 @@ GType gst_audio_info_get_type (void); + #define GST_AUDIO_INFO_IS_BIG_ENDIAN(i) (GST_AUDIO_FORMAT_INFO_IS_BIG_ENDIAN((i)->finfo)) + + #define GST_AUDIO_INFO_FLAGS(info) ((info)->flags) +-#define GST_AUDIO_INFO_IS_UNPOSITIONED(info) ((info)->flags & GST_AUDIO_FLAG_UNPOSITIONED) ++#define GST_AUDIO_INFO_IS_UNPOSITIONED(info) (((info)->flags & GST_AUDIO_FLAG_UNPOSITIONED) != 0) + #define GST_AUDIO_INFO_LAYOUT(info) ((info)->layout) + + #define GST_AUDIO_INFO_RATE(info) ((info)->rate) +@@ -104,6 +104,9 @@ GType gst_audio_info_get_type (void); + GST_AUDIO_API + GstAudioInfo * gst_audio_info_new (void); + ++GST_AUDIO_API ++GstAudioInfo * gst_audio_info_new_from_caps (const GstCaps * caps); ++ + GST_AUDIO_API + void gst_audio_info_init (GstAudioInfo *info); + +diff --git a/gst-libs/gst/audio/audio-quantize.c b/gst-libs/gst/audio/audio-quantize.c +index 8c627173b..e4c33ba6a 100644 +--- a/gst-libs/gst/audio/audio-quantize.c ++++ b/gst-libs/gst/audio/audio-quantize.c +@@ -54,6 +54,7 @@ struct _GstAudioQuantize + + /* last random number generated per channel for hifreq TPDF dither */ + gpointer last_random; ++ guint32 random_state; + /* contains the past quantization errors, error[channels][count] */ + guint error_size; + gpointer error_buf; +@@ -92,27 +93,28 @@ gst_audio_quantize_quantize_int_none_none (GstAudioQuantize * quant, + samples * quant->stride); + } + +-/* This is the base function, implementing a linear congruential generator +- * and returning a pseudo random number between 0 and 2^32 - 1. +- */ ++/* 32 bit xorshift PRNG, see https://en.wikipedia.org/wiki/Xorshift */ + static inline guint32 +-gst_fast_random_uint32 (void) ++gst_fast_random_uint32 (guint32 * state) + { +- static guint32 state = 0xdeadbeef; +- return (state = state * 1103515245 + 12345); ++ guint64 x = *state; ++ x ^= x << 13; ++ x ^= x >> 17; ++ x ^= x << 5; ++ return (*state = x); + } + + static inline gint32 +-gst_fast_random_int32 (void) ++gst_fast_random_int32 (guint32 * state) + { +- return (gint32) gst_fast_random_uint32 (); ++ return (gint32) gst_fast_random_uint32 (state); + } + + /* Assuming dither == 2^n, + * returns one of 2^(n+1) possible random values: + * -dither <= retval < dither */ +-#define RANDOM_INT_DITHER(dither) \ +- (- dither + (gst_fast_random_int32 () & ((dither << 1) - 1))) ++#define RANDOM_INT_DITHER(state, dither) \ ++ (- dither + (gst_fast_random_int32 (state) & ((dither << 1) - 1))) + + static void + setup_dither_buf (GstAudioQuantize * quant, gint samples) +@@ -144,13 +146,15 @@ setup_dither_buf (GstAudioQuantize * quant, gint samples) + case GST_AUDIO_DITHER_RPDF: + dither = 1 << (shift); + for (i = 0; i < len; i++) +- d[i] = bias + RANDOM_INT_DITHER (dither); ++ d[i] = bias + RANDOM_INT_DITHER (&quant->random_state, dither); + break; + + case GST_AUDIO_DITHER_TPDF: + dither = 1 << (shift - 1); + for (i = 0; i < len; i++) +- d[i] = bias + RANDOM_INT_DITHER (dither) + RANDOM_INT_DITHER (dither); ++ d[i] = ++ bias + RANDOM_INT_DITHER (&quant->random_state, ++ dither) + RANDOM_INT_DITHER (&quant->random_state, dither); + break; + + case GST_AUDIO_DITHER_TPDF_HF: +@@ -159,7 +163,7 @@ setup_dither_buf (GstAudioQuantize * quant, gint samples) + + dither = 1 << (shift - 1); + for (i = 0; i < len; i++) { +- tmp = RANDOM_INT_DITHER (dither); ++ tmp = RANDOM_INT_DITHER (&quant->random_state, dither); + d[i] = bias + tmp - last_random[i % stride]; + last_random[i % stride] = tmp; + } +@@ -369,6 +373,9 @@ gst_audio_quantize_setup_noise_shaping (GstAudioQuantize * quant) + static void + gst_audio_quantize_setup_dither (GstAudioQuantize * quant) + { ++ /* Some non-zero number */ ++ quant->random_state = 0xc2d6038f; ++ + switch (quant->dither) { + case GST_AUDIO_DITHER_TPDF_HF: + quant->last_random = g_new0 (gint32, quant->stride); +diff --git a/gst-libs/gst/audio/audio-resampler.c b/gst-libs/gst/audio/audio-resampler.c +index c67f86052..c676f7e28 100644 +--- a/gst-libs/gst/audio/audio-resampler.c ++++ b/gst-libs/gst/audio/audio-resampler.c +@@ -257,7 +257,7 @@ convert_taps_##type##_c (gdouble *tmp_taps, gpointer taps, \ + for (j = 0; j < n_taps; j++) \ + t[j] = floor (offset + tmp_taps[j] * multiplier / weight); \ + if (!exact) \ +- GST_WARNING ("can't find exact taps"); \ ++ GST_DEBUG ("can't find exact taps"); \ + } + + #define MAKE_CONVERT_TAPS_FLOAT_FUNC(type) \ +@@ -1342,8 +1342,7 @@ gst_audio_resampler_options_set_quality (GstAudioResamplerMethod method, + * + * Make a new resampler. + * +- * Returns: (skip) (transfer full): The new #GstAudioResampler, or +- * %NULL on failure. ++ * Returns: (skip) (transfer full): The new #GstAudioResampler. + */ + GstAudioResampler * + gst_audio_resampler_new (GstAudioResamplerMethod method, +diff --git a/gst-libs/gst/audio/audio.c b/gst-libs/gst/audio/audio.c +index 440a4e87d..1180326c9 100644 +--- a/gst-libs/gst/audio/audio.c ++++ b/gst-libs/gst/audio/audio.c +@@ -69,7 +69,7 @@ ensure_debug_category (void) + * After calling this function the caller does not own a reference to + * @buffer anymore. + * +- * Returns: (transfer full): %NULL if the buffer is completely outside the configured segment, ++ * Returns: (transfer full) (nullable): %NULL if the buffer is completely outside the configured segment, + * otherwise the clipped buffer is returned. + * + * If the buffer has no timestamp, it is assumed to be inside the segment and +@@ -91,7 +91,7 @@ gst_audio_buffer_clip (GstBuffer * buffer, const GstSegment * segment, + segment->format == GST_FORMAT_DEFAULT, buffer); + g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL); + +- if (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) ++ if (!GST_BUFFER_PTS_IS_VALID (buffer)) + /* No timestamp - assume the buffer is completely in the segment */ + return buffer; + +@@ -109,7 +109,7 @@ gst_audio_buffer_clip (GstBuffer * buffer, const GstSegment * segment, + if (!size) + return buffer; + +- timestamp = GST_BUFFER_TIMESTAMP (buffer); ++ timestamp = GST_BUFFER_PTS (buffer); + GST_DEBUG ("timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (timestamp)); + if (GST_BUFFER_DURATION_IS_VALID (buffer)) { + duration = GST_BUFFER_DURATION (buffer); +@@ -214,9 +214,9 @@ gst_audio_buffer_clip (GstBuffer * buffer, const GstSegment * segment, + if (trim == 0 && size == osize) { + ret = buffer; + +- if (GST_BUFFER_TIMESTAMP (ret) != timestamp) { ++ if (GST_BUFFER_PTS (ret) != timestamp) { + ret = gst_buffer_make_writable (ret); +- GST_BUFFER_TIMESTAMP (ret) = timestamp; ++ GST_BUFFER_PTS (ret) = timestamp; + } + if (GST_BUFFER_DURATION (ret) != duration) { + ret = gst_buffer_make_writable (ret); +@@ -229,7 +229,7 @@ gst_audio_buffer_clip (GstBuffer * buffer, const GstSegment * segment, + + GST_DEBUG ("timestamp %" GST_TIME_FORMAT, GST_TIME_ARGS (timestamp)); + if (ret) { +- GST_BUFFER_TIMESTAMP (ret) = timestamp; ++ GST_BUFFER_PTS (ret) = timestamp; + + if (change_duration) + GST_BUFFER_DURATION (ret) = duration; +@@ -258,11 +258,16 @@ gst_audio_buffer_clip (GstBuffer * buffer, const GstSegment * segment, + * the necessary amount of samples from the end and @trim number of samples + * from the beginning. + * ++ * This function does not know the audio rate, therefore the caller is ++ * responsible for re-setting the correct timestamp and duration to the ++ * buffer. However, timestamp will be preserved if trim == 0, and duration ++ * will also be preserved if there is no trimming to be done. Offset and ++ * offset end will be preserved / updated. ++ * + * After calling this function the caller does not own a reference to + * @buffer anymore. + * +- * Returns: (transfer full): the truncated buffer or %NULL if the arguments +- * were invalid ++ * Returns: (transfer full): the truncated buffer + * + * Since: 1.16 + */ +@@ -274,11 +279,14 @@ gst_audio_buffer_truncate (GstBuffer * buffer, gint bpf, gsize trim, + GstBuffer *ret = NULL; + gsize orig_samples; + gint i; ++ GstClockTime orig_ts, orig_offset; + + g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL); + + meta = gst_buffer_get_audio_meta (buffer); + orig_samples = meta ? meta->samples : gst_buffer_get_size (buffer) / bpf; ++ orig_ts = GST_BUFFER_PTS (buffer); ++ orig_offset = GST_BUFFER_OFFSET (buffer); + + g_return_val_if_fail (trim < orig_samples, NULL); + g_return_val_if_fail (samples == -1 || trim + samples <= orig_samples, NULL); +@@ -312,5 +320,22 @@ gst_audio_buffer_truncate (GstBuffer * buffer, gint bpf, gsize trim, + } + } + ++ GST_BUFFER_DTS (ret) = GST_CLOCK_TIME_NONE; ++ if (GST_CLOCK_TIME_IS_VALID (orig_ts) && trim == 0) { ++ GST_BUFFER_PTS (ret) = orig_ts; ++ } else { ++ GST_BUFFER_PTS (ret) = GST_CLOCK_TIME_NONE; ++ } ++ /* If duration was the same, it would have meant there's no trimming to be ++ * done, so we have an early return further up */ ++ GST_BUFFER_DURATION (ret) = GST_CLOCK_TIME_NONE; ++ if (orig_offset != GST_BUFFER_OFFSET_NONE) { ++ GST_BUFFER_OFFSET (ret) = orig_offset + trim; ++ GST_BUFFER_OFFSET_END (ret) = GST_BUFFER_OFFSET (ret) + samples; ++ } else { ++ GST_BUFFER_OFFSET (ret) = GST_BUFFER_OFFSET_NONE; ++ GST_BUFFER_OFFSET_END (ret) = GST_BUFFER_OFFSET_NONE; ++ } ++ + return ret; + } +diff --git a/gst-libs/gst/audio/gstaudiobasesink.c b/gst-libs/gst/audio/gstaudiobasesink.c +index e40989ab0..891941d12 100644 +--- a/gst-libs/gst/audio/gstaudiobasesink.c ++++ b/gst-libs/gst/audio/gstaudiobasesink.c +@@ -685,6 +685,7 @@ gst_audio_base_sink_set_alignment_threshold (GstAudioBaseSink * sink, + GstClockTime alignment_threshold) + { + g_return_if_fail (GST_IS_AUDIO_BASE_SINK (sink)); ++ g_return_if_fail (GST_CLOCK_TIME_IS_VALID (alignment_threshold)); + + GST_OBJECT_LOCK (sink); + sink->priv->alignment_threshold = alignment_threshold; +@@ -725,6 +726,7 @@ gst_audio_base_sink_set_discont_wait (GstAudioBaseSink * sink, + GstClockTime discont_wait) + { + g_return_if_fail (GST_IS_AUDIO_BASE_SINK (sink)); ++ g_return_if_fail (GST_CLOCK_TIME_IS_VALID (discont_wait)); + + GST_OBJECT_LOCK (sink); + sink->priv->discont_wait = discont_wait; +@@ -1864,7 +1866,7 @@ gst_audio_base_sink_render (GstBaseSink * bsink, GstBuffer * buf) + + samples = size / bpf; + +- time = GST_BUFFER_TIMESTAMP (buf); ++ time = GST_BUFFER_PTS (buf); + + /* Last ditch attempt to ensure that we only play silence if + * we are in trickmode no-audio mode (or if a buffer is marked as a GAP) +@@ -2254,7 +2256,7 @@ sync_latency_failed: + * call the ::create_ringbuffer vmethod and will set @sink as the parent of + * the returned buffer (see gst_object_set_parent()). + * +- * Returns: (transfer none): The new ringbuffer of @sink. ++ * Returns: (transfer none) (nullable): The new ringbuffer of @sink. + */ + GstAudioRingBuffer * + gst_audio_base_sink_create_ringbuffer (GstAudioBaseSink * sink) +diff --git a/gst-libs/gst/audio/gstaudiobasesink.h b/gst-libs/gst/audio/gstaudiobasesink.h +index 0c009fd4a..883ed6ece 100644 +--- a/gst-libs/gst/audio/gstaudiobasesink.h ++++ b/gst-libs/gst/audio/gstaudiobasesink.h +@@ -110,7 +110,7 @@ typedef struct _GstAudioBaseSinkPrivate GstAudioBaseSinkPrivate; + * @GST_AUDIO_BASE_SINK_DISCONT_REASON_FLUSH: Samples have been flushed + * @GST_AUDIO_BASE_SINK_DISCONT_REASON_SYNC_LATENCY: Sink was synchronized to the estimated latency (occurs during initialization) + * @GST_AUDIO_BASE_SINK_DISCONT_REASON_ALIGNMENT: Aligning buffers failed because the timestamps are too discontinuous +- * @GST_AUDIO_BASE_SINK_DISCONT_REASON_DEVICE_FAILURE: Audio output device experienced and recovered from an error but introduced latency in the process (see also @gst_audio_base_sink_report_device_failure()) ++ * @GST_AUDIO_BASE_SINK_DISCONT_REASON_DEVICE_FAILURE: Audio output device experienced and recovered from an error but introduced latency in the process (see also gst_audio_base_sink_report_device_failure()) + * + * Different possible reasons for discontinuities. This enum is useful for the custom + * slave method. +diff --git a/gst-libs/gst/audio/gstaudiobasesrc.c b/gst-libs/gst/audio/gstaudiobasesrc.c +index 3480ee7b8..104570dec 100644 +--- a/gst-libs/gst/audio/gstaudiobasesrc.c ++++ b/gst-libs/gst/audio/gstaudiobasesrc.c +@@ -40,11 +40,15 @@ + #include + #include "gstaudiobasesrc.h" + +-#include "gst/gst-i18n-plugin.h" ++#include + + GST_DEBUG_CATEGORY_STATIC (gst_audio_base_src_debug); + #define GST_CAT_DEFAULT gst_audio_base_src_debug + ++/* This function is public in >= 1.23, but internal in 1.22 */ ++G_GNUC_INTERNAL ++ void __gst_audio_ring_buffer_set_errored (GstAudioRingBuffer * buf); ++ + struct _GstAudioBaseSrcPrivate + { + /* the clock slaving algorithm in use */ +@@ -518,7 +522,8 @@ gst_audio_base_src_setcaps (GstBaseSrc * bsrc, GstCaps * caps) + + spec = &src->ringbuffer->spec; + +- if (G_UNLIKELY (spec->caps && gst_caps_is_equal (spec->caps, caps))) { ++ if (G_UNLIKELY (gst_audio_ring_buffer_is_acquired (src->ringbuffer) ++ && gst_caps_is_equal (spec->caps, caps))) { + GST_DEBUG_OBJECT (src, + "Ringbuffer caps haven't changed, skipping reconfiguration"); + return TRUE; +@@ -1031,7 +1036,7 @@ gst_audio_base_src_create (GstBaseSrc * bsrc, guint64 offset, guint length, + no_sync: + GST_OBJECT_UNLOCK (src); + +- GST_BUFFER_TIMESTAMP (buf) = timestamp; ++ GST_BUFFER_PTS (buf) = timestamp; + GST_BUFFER_DURATION (buf) = duration; + GST_BUFFER_OFFSET (buf) = sample; + GST_BUFFER_OFFSET_END (buf) = sample + samples; +@@ -1039,7 +1044,7 @@ no_sync: + *outbuf = buf; + + GST_LOG_OBJECT (src, "Pushed buffer timestamp %" GST_TIME_FORMAT, +- GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf))); ++ GST_TIME_ARGS (GST_BUFFER_PTS (buf))); + + return GST_FLOW_OK; + +@@ -1085,7 +1090,7 @@ got_error: + * the ::create_ringbuffer vmethod and will set @src as the parent of the + * returned buffer (see gst_object_set_parent()). + * +- * Returns: (transfer none): The new ringbuffer of @src. ++ * Returns: (transfer none) (nullable): The new ringbuffer of @src. + */ + GstAudioRingBuffer * + gst_audio_base_src_create_ringbuffer (GstAudioBaseSrc * src) +@@ -1228,7 +1233,7 @@ gst_audio_base_src_post_message (GstElement * element, GstMessage * message) + * flow error message */ + ret = GST_ELEMENT_CLASS (parent_class)->post_message (element, message); + +- g_atomic_int_set (&ringbuffer->state, GST_AUDIO_RING_BUFFER_STATE_ERROR); ++ __gst_audio_ring_buffer_set_errored (ringbuffer); + GST_AUDIO_RING_BUFFER_SIGNAL (ringbuffer); + gst_object_unref (ringbuffer); + } else { +diff --git a/gst-libs/gst/audio/gstaudiocdsrc.c b/gst-libs/gst/audio/gstaudiocdsrc.c +index 81421d327..437bebb5d 100644 +--- a/gst-libs/gst/audio/gstaudiocdsrc.c ++++ b/gst-libs/gst/audio/gstaudiocdsrc.c +@@ -96,7 +96,7 @@ + #include + #include + #include "gstaudiocdsrc.h" +-#include "gst/gst-i18n-plugin.h" ++#include + + GST_DEBUG_CATEGORY_STATIC (gst_audio_cd_src_debug); + #define GST_CAT_DEFAULT gst_audio_cd_src_debug +@@ -1764,7 +1764,7 @@ gst_audio_cd_src_create (GstPushSrc * pushsrc, GstBuffer ** buffer) + GST_SECOND, 44100); + } + +- GST_BUFFER_TIMESTAMP (buf) = position; ++ GST_BUFFER_PTS (buf) = position; + GST_BUFFER_DURATION (buf) = duration; + + GST_LOG_OBJECT (src, "pushing sector %d with timestamp %" GST_TIME_FORMAT, +diff --git a/gst-libs/gst/audio/gstaudiodecoder.c b/gst-libs/gst/audio/gstaudiodecoder.c +index 47e9e1f53..9396a9ff4 100644 +--- a/gst-libs/gst/audio/gstaudiodecoder.c ++++ b/gst-libs/gst/audio/gstaudiodecoder.c +@@ -180,6 +180,8 @@ typedef struct _GstAudioDecoderContext + /* MT-protected (with LOCK) */ + GstClockTime min_latency; + GstClockTime max_latency; ++ /* Tracks whether the latency message was posted at least once */ ++ gboolean posted_latency_msg; + + GstAllocator *allocator; + GstAllocationParams params; +@@ -555,6 +557,7 @@ gst_audio_decoder_reset (GstAudioDecoder * dec, gboolean full) + memset (&dec->priv->ctx, 0, sizeof (dec->priv->ctx)); + + gst_audio_info_init (&dec->priv->ctx.info); ++ dec->priv->ctx.posted_latency_msg = FALSE; + GST_OBJECT_UNLOCK (dec); + dec->priv->ctx.had_output_data = FALSE; + dec->priv->ctx.had_input_data = FALSE; +@@ -978,12 +981,12 @@ gst_audio_decoder_push_forward (GstAudioDecoder * dec, GstBuffer * buf) + } + + ctx->had_output_data = TRUE; +- ts = GST_BUFFER_TIMESTAMP (buf); ++ ts = GST_BUFFER_PTS (buf); + + GST_LOG_OBJECT (dec, + "clipping buffer of size %" G_GSIZE_FORMAT " with ts %" GST_TIME_FORMAT + ", duration %" GST_TIME_FORMAT, gst_buffer_get_size (buf), +- GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), ++ GST_TIME_ARGS (GST_BUFFER_PTS (buf)), + GST_TIME_ARGS (GST_BUFFER_DURATION (buf))); + + /* clip buffer */ +@@ -1012,11 +1015,11 @@ gst_audio_decoder_push_forward (GstAudioDecoder * dec, GstBuffer * buf) + } + + /* track where we are */ +- if (G_LIKELY (GST_BUFFER_TIMESTAMP_IS_VALID (buf))) { ++ if (G_LIKELY (GST_BUFFER_PTS_IS_VALID (buf))) { + /* duration should always be valid for raw audio */ + g_assert (GST_BUFFER_DURATION_IS_VALID (buf)); + dec->output_segment.position = +- GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf); ++ GST_BUFFER_PTS (buf) + GST_BUFFER_DURATION (buf); + } + + if (klass->pre_push) { +@@ -1034,7 +1037,7 @@ gst_audio_decoder_push_forward (GstAudioDecoder * dec, GstBuffer * buf) + GST_LOG_OBJECT (dec, + "pushing buffer of size %" G_GSIZE_FORMAT " with ts %" GST_TIME_FORMAT + ", duration %" GST_TIME_FORMAT, gst_buffer_get_size (buf), +- GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), ++ GST_TIME_ARGS (GST_BUFFER_PTS (buf)), + GST_TIME_ARGS (GST_BUFFER_DURATION (buf))); + + ret = gst_pad_push (dec->srcpad, buf); +@@ -1061,7 +1064,7 @@ gst_audio_decoder_output (GstAudioDecoder * dec, GstBuffer * buf) + GST_LOG_OBJECT (dec, + "output buffer of size %" G_GSIZE_FORMAT " with ts %" GST_TIME_FORMAT + ", duration %" GST_TIME_FORMAT, gst_buffer_get_size (buf), +- GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), ++ GST_TIME_ARGS (GST_BUFFER_PTS (buf)), + GST_TIME_ARGS (GST_BUFFER_DURATION (buf))); + } + +@@ -1079,9 +1082,9 @@ again: + /* forcibly send current */ + assemble = TRUE; + GST_LOG_OBJECT (dec, "forcing fragment flush"); +- } else if (av && (!GST_BUFFER_TIMESTAMP_IS_VALID (buf) || ++ } else if (av && (!GST_BUFFER_PTS_IS_VALID (buf) || + !GST_CLOCK_TIME_IS_VALID (priv->out_ts) || +- ((diff = GST_CLOCK_DIFF (GST_BUFFER_TIMESTAMP (buf), ++ ((diff = GST_CLOCK_DIFF (GST_BUFFER_PTS (buf), + priv->out_ts + priv->out_dur)) > tol) || diff < -tol)) { + assemble = TRUE; + GST_LOG_OBJECT (dec, "buffer %d ms apart from current fragment", +@@ -1090,7 +1093,7 @@ again: + /* add or start collecting */ + if (!av) { + GST_LOG_OBJECT (dec, "starting new fragment"); +- priv->out_ts = GST_BUFFER_TIMESTAMP (buf); ++ priv->out_ts = GST_BUFFER_PTS (buf); + } else { + GST_LOG_OBJECT (dec, "adding to fragment"); + } +@@ -1105,7 +1108,7 @@ again: + GST_LOG_OBJECT (dec, "assembling fragment"); + inbuf = buf; + buf = gst_adapter_take_buffer (priv->adapter_out, av); +- GST_BUFFER_TIMESTAMP (buf) = priv->out_ts; ++ GST_BUFFER_PTS (buf) = priv->out_ts; + GST_BUFFER_DURATION (buf) = priv->out_dur; + priv->out_ts = GST_CLOCK_TIME_NONE; + priv->out_dur = 0; +@@ -1245,7 +1248,8 @@ foreach_metadata (GstBuffer * inbuf, GstMeta ** meta, gpointer user_data) + const GstMetaInfo *info = (*meta)->info; + gboolean do_copy = FALSE; + +- if (gst_meta_api_type_has_tag (info->api, _gst_meta_tag_memory)) { ++ if (gst_meta_api_type_has_tag (info->api, _gst_meta_tag_memory) ++ || gst_meta_api_type_has_tag (info->api, _gst_meta_tag_memory_reference)) { + /* never call the transform_meta with memory specific metadata */ + GST_DEBUG_OBJECT (decoder, "not copying memory specific metadata %s", + g_type_name (info->api)); +@@ -1271,7 +1275,7 @@ foreach_metadata (GstBuffer * inbuf, GstMeta ** meta, gpointer user_data) + /** + * gst_audio_decoder_finish_subframe: + * @dec: a #GstAudioDecoder +- * @buf: (transfer full) (allow-none): decoded data ++ * @buf: (transfer full) (nullable): decoded data + * + * Collects decoded data and pushes it downstream. This function may be called + * multiple times for a given input frame. +@@ -1305,7 +1309,7 @@ gst_audio_decoder_finish_subframe (GstAudioDecoder * dec, GstBuffer * buf) + /** + * gst_audio_decoder_finish_frame: + * @dec: a #GstAudioDecoder +- * @buf: (transfer full) (allow-none): decoded data ++ * @buf: (transfer full) (nullable): decoded data + * @frames: number of decoded frames represented by decoded data + * + * Collects decoded data and pushes it downstream. +@@ -1420,7 +1424,7 @@ gst_audio_decoder_finish_frame_or_subframe (GstAudioDecoder * dec, + } + + if (G_LIKELY (priv->frames.length)) +- ts = GST_BUFFER_TIMESTAMP (priv->frames.head->data); ++ ts = GST_BUFFER_PTS (priv->frames.head->data); + else + ts = GST_CLOCK_TIME_NONE; + +@@ -1499,14 +1503,14 @@ gst_audio_decoder_finish_frame_or_subframe (GstAudioDecoder * dec, + + buf = gst_buffer_make_writable (buf); + if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (priv->base_ts))) { +- GST_BUFFER_TIMESTAMP (buf) = ++ GST_BUFFER_PTS (buf) = + priv->base_ts + + GST_FRAMES_TO_CLOCK_TIME (priv->samples, ctx->info.rate); + GST_BUFFER_DURATION (buf) = priv->base_ts + + GST_FRAMES_TO_CLOCK_TIME (priv->samples + samples, ctx->info.rate) - +- GST_BUFFER_TIMESTAMP (buf); ++ GST_BUFFER_PTS (buf); + } else { +- GST_BUFFER_TIMESTAMP (buf) = GST_CLOCK_TIME_NONE; ++ GST_BUFFER_PTS (buf) = GST_CLOCK_TIME_NONE; + GST_BUFFER_DURATION (buf) = + GST_FRAMES_TO_CLOCK_TIME (samples, ctx->info.rate); + } +@@ -1624,7 +1628,7 @@ gst_audio_decoder_handle_frame (GstAudioDecoder * dec, + /* keep around for admin */ + GST_LOG_OBJECT (dec, + "tracking frame size %" G_GSIZE_FORMAT ", ts %" GST_TIME_FORMAT, size, +- GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer))); ++ GST_TIME_ARGS (GST_BUFFER_PTS (buffer))); + g_queue_push_tail (&dec->priv->frames, buffer); + dec->priv->ctx.delay = dec->priv->frames.length; + GST_OBJECT_LOCK (dec); +@@ -1718,7 +1722,7 @@ gst_audio_decoder_push_buffers (GstAudioDecoder * dec, gboolean force) + } + buffer = gst_adapter_take_buffer (priv->adapter, len); + buffer = gst_buffer_make_writable (buffer); +- GST_BUFFER_TIMESTAMP (buffer) = ts; ++ GST_BUFFER_PTS (buffer) = ts; + flush += len; + priv->force = FALSE; + } else { +@@ -1952,7 +1956,7 @@ gst_audio_decoder_flush_decode (GstAudioDecoder * dec) + GstBuffer *buf = GST_BUFFER_CAST (walk->data); + + GST_DEBUG_OBJECT (dec, "decoding buffer %p, ts %" GST_TIME_FORMAT, +- buf, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf))); ++ buf, GST_TIME_ARGS (GST_BUFFER_PTS (buf))); + + next = g_list_next (walk); + /* decode buffer, resulting data prepended to output queue */ +@@ -1993,13 +1997,13 @@ gst_audio_decoder_flush_decode (GstAudioDecoder * dec) + timestamp = 0; + } + +- if (!GST_BUFFER_TIMESTAMP_IS_VALID (buf)) { ++ if (!GST_BUFFER_PTS_IS_VALID (buf)) { + GST_LOG_OBJECT (dec, "applying reverse interpolated ts %" + GST_TIME_FORMAT, GST_TIME_ARGS (timestamp)); +- GST_BUFFER_TIMESTAMP (buf) = timestamp; ++ GST_BUFFER_PTS (buf) = timestamp; + } else { + /* track otherwise */ +- timestamp = GST_BUFFER_TIMESTAMP (buf); ++ timestamp = GST_BUFFER_PTS (buf); + GST_LOG_OBJECT (dec, "tracking ts %" GST_TIME_FORMAT, + GST_TIME_ARGS (timestamp)); + } +@@ -2007,7 +2011,7 @@ gst_audio_decoder_flush_decode (GstAudioDecoder * dec) + if (G_LIKELY (res == GST_FLOW_OK)) { + GST_DEBUG_OBJECT (dec, "pushing buffer %p of size %" G_GSIZE_FORMAT ", " + "time %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT, buf, +- gst_buffer_get_size (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), ++ gst_buffer_get_size (buf), GST_TIME_ARGS (GST_BUFFER_PTS (buf)), + GST_TIME_ARGS (GST_BUFFER_DURATION (buf))); + /* should be already, but let's be sure */ + buf = gst_buffer_make_writable (buf); +@@ -2050,7 +2054,7 @@ gst_audio_decoder_chain_reverse (GstAudioDecoder * dec, GstBuffer * buf) + if (G_LIKELY (buf)) { + GST_DEBUG_OBJECT (dec, "gathering buffer %p of size %" G_GSIZE_FORMAT ", " + "time %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT, buf, +- gst_buffer_get_size (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), ++ gst_buffer_get_size (buf), GST_TIME_ARGS (GST_BUFFER_PTS (buf)), + GST_TIME_ARGS (GST_BUFFER_DURATION (buf))); + + /* add buffer to gather queue */ +@@ -2071,7 +2075,7 @@ gst_audio_decoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + GST_LOG_OBJECT (dec, + "received buffer of size %" G_GSIZE_FORMAT " with ts %" GST_TIME_FORMAT + ", duration %" GST_TIME_FORMAT, gst_buffer_get_size (buffer), +- GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)), ++ GST_TIME_ARGS (GST_BUFFER_PTS (buffer)), + GST_TIME_ARGS (GST_BUFFER_DURATION (buffer))); + + GST_AUDIO_DECODER_STREAM_LOCK (dec); +@@ -2096,8 +2100,7 @@ gst_audio_decoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + /* buffer may claim DISCONT loudly, if it can't tell us where we are now, + * we'll stick to where we were ... + * Particularly useful/needed for upstream BYTE based */ +- if (dec->input_segment.rate > 0.0 +- && !GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) { ++ if (dec->input_segment.rate > 0.0 && !GST_BUFFER_PTS_IS_VALID (buffer)) { + GST_DEBUG_OBJECT (dec, "... but restoring previous ts tracking"); + dec->priv->base_ts = ts; + dec->priv->samples = samples; +@@ -2293,7 +2296,7 @@ gst_audio_decoder_handle_gap (GstAudioDecoder * dec, GstEvent * event) + + /* hand subclass empty frame with duration that needs covering */ + buf = gst_buffer_new (); +- GST_BUFFER_TIMESTAMP (buf) = timestamp; ++ GST_BUFFER_PTS (buf) = timestamp; + GST_BUFFER_DURATION (buf) = duration; + /* best effort, not much error handling */ + gst_audio_decoder_handle_frame (dec, klass, buf); +@@ -2762,8 +2765,8 @@ gst_audio_decoder_propose_allocation_default (GstAudioDecoder * dec, + /** + * gst_audio_decoder_proxy_getcaps: + * @decoder: a #GstAudioDecoder +- * @caps: (allow-none): initial caps +- * @filter: (allow-none): filter caps ++ * @caps: (nullable): initial caps ++ * @filter: (nullable): filter caps + * + * Returns caps that express @caps (or sink template caps if @caps == NULL) + * restricted to rate/channels/... combinations supported by downstream +@@ -3271,7 +3274,7 @@ _gst_audio_decoder_error (GstAudioDecoder * dec, gint weight, + * gst_audio_decoder_get_audio_info: + * @dec: a #GstAudioDecoder + * +- * Returns: a #GstAudioInfo describing the input audio format ++ * Returns: (transfer none): a #GstAudioInfo describing the input audio format + */ + GstAudioInfo * + gst_audio_decoder_get_audio_info (GstAudioDecoder * dec) +@@ -3391,31 +3394,50 @@ gst_audio_decoder_get_max_errors (GstAudioDecoder * dec) + * @min: minimum latency + * @max: maximum latency + * +- * Sets decoder latency. ++ * Sets decoder latency. If the provided values changed from ++ * previously provided ones, this will also post a LATENCY message on the bus ++ * so the pipeline can reconfigure its global latency. + */ + void + gst_audio_decoder_set_latency (GstAudioDecoder * dec, + GstClockTime min, GstClockTime max) + { ++ gboolean post_message = FALSE; ++ + g_return_if_fail (GST_IS_AUDIO_DECODER (dec)); + g_return_if_fail (GST_CLOCK_TIME_IS_VALID (min)); + g_return_if_fail (min <= max); + ++ GST_DEBUG_OBJECT (dec, ++ "min_latency:%" GST_TIME_FORMAT " max_latency:%" GST_TIME_FORMAT, ++ GST_TIME_ARGS (min), GST_TIME_ARGS (max)); ++ + GST_OBJECT_LOCK (dec); +- dec->priv->ctx.min_latency = min; +- dec->priv->ctx.max_latency = max; ++ if (dec->priv->ctx.min_latency != min) { ++ dec->priv->ctx.min_latency = min; ++ post_message = TRUE; ++ } ++ if (dec->priv->ctx.max_latency != max) { ++ dec->priv->ctx.max_latency = max; ++ post_message = TRUE; ++ } ++ if (!dec->priv->ctx.posted_latency_msg) { ++ dec->priv->ctx.posted_latency_msg = TRUE; ++ post_message = TRUE; ++ } + GST_OBJECT_UNLOCK (dec); + + /* post latency message on the bus */ +- gst_element_post_message (GST_ELEMENT (dec), +- gst_message_new_latency (GST_OBJECT (dec))); ++ if (post_message) ++ gst_element_post_message (GST_ELEMENT (dec), ++ gst_message_new_latency (GST_OBJECT (dec))); + } + + /** + * gst_audio_decoder_get_latency: + * @dec: a #GstAudioDecoder +- * @min: (out) (allow-none): a pointer to storage to hold minimum latency +- * @max: (out) (allow-none): a pointer to storage to hold maximum latency ++ * @min: (out) (optional): a pointer to storage to hold minimum latency ++ * @max: (out) (optional): a pointer to storage to hold maximum latency + * + * Sets the variables pointed to by @min and @max to the currently configured + * latency. +@@ -3457,7 +3479,7 @@ gst_audio_decoder_get_parse_state (GstAudioDecoder * dec, + /** + * gst_audio_decoder_set_allocation_caps: + * @dec: a #GstAudioDecoder +- * @allocation_caps: (allow-none): a #GstCaps or %NULL ++ * @allocation_caps: (nullable): a #GstCaps or %NULL + * + * Sets a caps in allocation query which are different from the set + * pad's caps. Use this function before calling +@@ -3534,6 +3556,7 @@ void + gst_audio_decoder_set_min_latency (GstAudioDecoder * dec, GstClockTime num) + { + g_return_if_fail (GST_IS_AUDIO_DECODER (dec)); ++ g_return_if_fail (GST_CLOCK_TIME_IS_VALID (num)); + + GST_OBJECT_LOCK (dec); + dec->priv->latency = num; +@@ -3577,6 +3600,7 @@ void + gst_audio_decoder_set_tolerance (GstAudioDecoder * dec, GstClockTime tolerance) + { + g_return_if_fail (GST_IS_AUDIO_DECODER (dec)); ++ g_return_if_fail (GST_CLOCK_TIME_IS_VALID (tolerance)); + + GST_OBJECT_LOCK (dec); + dec->priv->tolerance = tolerance; +@@ -3704,7 +3728,7 @@ gst_audio_decoder_get_needs_format (GstAudioDecoder * dec) + /** + * gst_audio_decoder_merge_tags: + * @dec: a #GstAudioDecoder +- * @tags: (allow-none): a #GstTagList to merge, or NULL ++ * @tags: (nullable): a #GstTagList to merge, or NULL + * @mode: the #GstTagMergeMode to use, usually #GST_TAG_MERGE_REPLACE + * + * Sets the audio decoder tags and how they should be merged with any +@@ -3794,9 +3818,9 @@ fallback: + /** + * gst_audio_decoder_get_allocator: + * @dec: a #GstAudioDecoder +- * @allocator: (out) (allow-none) (transfer full): the #GstAllocator ++ * @allocator: (out) (optional) (nullable) (transfer full): the #GstAllocator + * used +- * @params: (out) (allow-none) (transfer full): the ++ * @params: (out) (optional) (transfer full): the + * #GstAllocationParams of @allocator + * + * Lets #GstAudioDecoder sub-classes to know the memory @allocator +diff --git a/gst-libs/gst/audio/gstaudiodecoder.h b/gst-libs/gst/audio/gstaudiodecoder.h +index b2f36648a..03fb67539 100644 +--- a/gst-libs/gst/audio/gstaudiodecoder.h ++++ b/gst-libs/gst/audio/gstaudiodecoder.h +@@ -147,7 +147,7 @@ G_STMT_START { \ + * + * Default maximum number of errors tolerated before signaling error. + */ +-#define GST_AUDIO_DECODER_MAX_ERRORS 10 ++#define GST_AUDIO_DECODER_MAX_ERRORS -1 + + /** + * GstAudioDecoder: +@@ -272,6 +272,11 @@ struct _GstAudioDecoderClass + gboolean (*set_format) (GstAudioDecoder *dec, + GstCaps *caps); + ++ /** ++ * GstAudioDecoderClass::parse: ++ * @offset: (out): ++ * @length: (out): ++ */ + GstFlowReturn (*parse) (GstAudioDecoder *dec, + GstAdapter *adapter, + gint *offset, gint *length); +diff --git a/gst-libs/gst/audio/gstaudioencoder.c b/gst-libs/gst/audio/gstaudioencoder.c +index 5b9f7410c..526599365 100644 +--- a/gst-libs/gst/audio/gstaudioencoder.c ++++ b/gst-libs/gst/audio/gstaudioencoder.c +@@ -171,6 +171,10 @@ typedef struct _GstAudioEncoderContext + /* MT-protected (with LOCK) */ + GstClockTime min_latency; + GstClockTime max_latency; ++ /* Tracks whether the latency message was posted at least once */ ++ gboolean posted_latency_msg; ++ ++ gboolean negotiated; + + GList *headers; + gboolean new_headers; +@@ -239,6 +243,11 @@ struct _GstAudioEncoderPrivate + + /* pending serialized sink events, will be sent from finish_frame() */ + GList *pending_events; ++ ++ /* these are initial events or events that came in while there was nothing ++ * in the adapter. these events shall be sent after negotiation but before ++ * we push the following buffer. */ ++ GList *early_pending_events; + }; + + +@@ -487,6 +496,7 @@ gst_audio_encoder_reset (GstAudioEncoder * enc, gboolean full) + + memset (&enc->priv->ctx, 0, sizeof (enc->priv->ctx)); + gst_audio_info_init (&enc->priv->ctx.info); ++ enc->priv->ctx.posted_latency_msg = FALSE; + GST_OBJECT_UNLOCK (enc); + + if (enc->priv->upstream_tags) { +@@ -499,8 +509,11 @@ gst_audio_encoder_reset (GstAudioEncoder * enc, gboolean full) + enc->priv->tags_merge_mode = GST_TAG_MERGE_APPEND; + enc->priv->tags_changed = FALSE; + +- g_list_foreach (enc->priv->pending_events, (GFunc) gst_event_unref, NULL); +- g_list_free (enc->priv->pending_events); ++ g_list_free_full (enc->priv->early_pending_events, ++ (GDestroyNotify) gst_event_unref); ++ enc->priv->early_pending_events = NULL; ++ g_list_free_full (enc->priv->pending_events, ++ (GDestroyNotify) gst_event_unref); + enc->priv->pending_events = NULL; + } + +@@ -597,11 +610,31 @@ gst_audio_encoder_push_event (GstAudioEncoder * enc, GstEvent * event) + return gst_pad_push_event (enc->srcpad, event); + } + ++static inline void ++gst_audio_encoder_push_early_pending_events (GstAudioEncoder * enc) ++{ ++ GstAudioEncoderPrivate *priv = enc->priv; ++ ++ if (priv->early_pending_events) { ++ GList *pending_events, *l; ++ ++ pending_events = priv->early_pending_events; ++ priv->early_pending_events = NULL; ++ ++ GST_DEBUG_OBJECT (enc, "Pushing early pending events"); ++ for (l = pending_events; l; l = l->next) ++ gst_audio_encoder_push_event (enc, l->data); ++ g_list_free (pending_events); ++ } ++} ++ + static inline void + gst_audio_encoder_push_pending_events (GstAudioEncoder * enc) + { + GstAudioEncoderPrivate *priv = enc->priv; + ++ gst_audio_encoder_push_early_pending_events (enc); ++ + if (priv->pending_events) { + GList *pending_events, *l; + +@@ -706,7 +739,8 @@ foreach_metadata (GstBuffer * inbuf, GstMeta ** meta, gpointer user_data) + const GstMetaInfo *info = (*meta)->info; + gboolean do_copy = FALSE; + +- if (gst_meta_api_type_has_tag (info->api, _gst_meta_tag_memory)) { ++ if (gst_meta_api_type_has_tag (info->api, _gst_meta_tag_memory) ++ || gst_meta_api_type_has_tag (info->api, _gst_meta_tag_memory_reference)) { + /* never call the transform_meta with memory specific metadata */ + GST_DEBUG_OBJECT (encoder, "not copying memory specific metadata %s", + g_type_name (info->api)); +@@ -732,7 +766,7 @@ foreach_metadata (GstBuffer * inbuf, GstMeta ** meta, gpointer user_data) + /** + * gst_audio_encoder_finish_frame: + * @enc: a #GstAudioEncoder +- * @buffer: (transfer full) (allow-none): encoded data ++ * @buffer: (transfer full) (nullable): encoded data + * @samples: number of samples (per channel) represented by encoded data + * + * Collects encoded data and pushes encoded data downstream. +@@ -795,9 +829,9 @@ gst_audio_encoder_finish_frame (GstAudioEncoder * enc, GstBuffer * buf, + if (G_LIKELY (buf)) + priv->got_data = TRUE; + +- gst_audio_encoder_push_pending_events (enc); ++ gst_audio_encoder_push_early_pending_events (enc); + +- /* send after pending events, which likely includes segment event */ ++ /* send after early pending events, which likely includes segment event */ + gst_audio_encoder_check_and_push_pending_tags (enc); + + /* remove corresponding samples from input */ +@@ -937,16 +971,16 @@ gst_audio_encoder_finish_frame (GstAudioEncoder * enc, GstBuffer * buf, + /* FIXME ? lookahead could lead to weird ts and duration ? + * (particularly if not in perfect mode) */ + /* mind sample rounding and produce perfect output */ +- GST_BUFFER_TIMESTAMP (buf) = priv->base_ts + ++ GST_BUFFER_PTS (buf) = priv->base_ts + + gst_util_uint64_scale (priv->samples - ctx->lookahead, GST_SECOND, + ctx->info.rate); +- GST_BUFFER_DTS (buf) = GST_BUFFER_TIMESTAMP (buf); ++ GST_BUFFER_DTS (buf) = GST_BUFFER_PTS (buf); + GST_DEBUG_OBJECT (enc, "out samples %d", samples); + if (G_LIKELY (samples > 0)) { + priv->samples += samples; + GST_BUFFER_DURATION (buf) = priv->base_ts + + gst_util_uint64_scale (priv->samples - ctx->lookahead, GST_SECOND, +- ctx->info.rate) - GST_BUFFER_TIMESTAMP (buf); ++ ctx->info.rate) - GST_BUFFER_PTS (buf); + priv->last_duration = GST_BUFFER_DURATION (buf); + } else { + /* duration forecast in case of handling remainder; +@@ -1008,11 +1042,15 @@ gst_audio_encoder_finish_frame (GstAudioEncoder * enc, GstBuffer * buf, + GST_LOG_OBJECT (enc, + "pushing buffer of size %" G_GSIZE_FORMAT " with ts %" GST_TIME_FORMAT + ", duration %" GST_TIME_FORMAT, size, +- GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), ++ GST_TIME_ARGS (GST_BUFFER_PTS (buf)), + GST_TIME_ARGS (GST_BUFFER_DURATION (buf))); + + ret = gst_pad_push (enc->srcpad, buf); + GST_LOG_OBJECT (enc, "buffer pushed: %s", gst_flow_get_name (ret)); ++ ++ /* Now push the events that followed after the buffer got into the ++ * adapter. */ ++ gst_audio_encoder_push_pending_events (enc); + } else { + /* merely advance samples, most work for that already done above */ + priv->samples += samples; +@@ -1112,12 +1150,9 @@ gst_audio_encoder_push_buffers (GstAudioEncoder * enc, gboolean force) + + priv->got_data = FALSE; + if (G_LIKELY (need)) { +- const guint8 *data; +- +- data = gst_adapter_map (priv->adapter, priv->offset + need); +- buf = +- gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, +- (gpointer) data, priv->offset + need, priv->offset, need, NULL, NULL); ++ buf = gst_adapter_get_buffer (priv->adapter, priv->offset + need); ++ buf = gst_buffer_make_writable (buf); ++ gst_buffer_resize (buf, priv->offset, -1); + } else if (!priv->drainable) { + GST_DEBUG_OBJECT (enc, "non-drainable and no more data"); + goto finish; +@@ -1144,7 +1179,6 @@ gst_audio_encoder_push_buffers (GstAudioEncoder * enc, gboolean force) + + if (G_LIKELY (buf)) { + gst_buffer_unref (buf); +- gst_adapter_unmap (priv->adapter); + } + + finish: +@@ -1236,7 +1270,7 @@ gst_audio_encoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + GST_LOG_OBJECT (enc, + "received buffer of size %" G_GSIZE_FORMAT " with ts %" GST_TIME_FORMAT + ", duration %" GST_TIME_FORMAT, size, +- GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)), ++ GST_TIME_ARGS (GST_BUFFER_PTS (buffer)), + GST_TIME_ARGS (GST_BUFFER_DURATION (buffer))); + + /* input should be whole number of sample frames */ +@@ -1282,11 +1316,11 @@ gst_audio_encoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + GST_LOG_OBJECT (enc, + "buffer after segment clipping has size %" G_GSIZE_FORMAT " with ts %" + GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT, size, +- GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)), ++ GST_TIME_ARGS (GST_BUFFER_PTS (buffer)), + GST_TIME_ARGS (GST_BUFFER_DURATION (buffer))); + + if (!GST_CLOCK_TIME_IS_VALID (priv->base_ts)) { +- priv->base_ts = GST_BUFFER_TIMESTAMP (buffer); ++ priv->base_ts = GST_BUFFER_PTS (buffer); + GST_DEBUG_OBJECT (enc, "new base ts %" GST_TIME_FORMAT, + GST_TIME_ARGS (priv->base_ts)); + gst_audio_encoder_set_base_gp (enc); +@@ -1298,7 +1332,7 @@ gst_audio_encoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + GstClockTimeDiff diff = 0; + GstClockTime next_ts = 0; + +- if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer) && ++ if (GST_BUFFER_PTS_IS_VALID (buffer) && + GST_CLOCK_TIME_IS_VALID (priv->base_ts)) { + guint64 samples; + +@@ -1310,7 +1344,7 @@ gst_audio_encoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + " samples past base_ts %" GST_TIME_FORMAT + ", expected ts %" GST_TIME_FORMAT, samples, + GST_TIME_ARGS (priv->base_ts), GST_TIME_ARGS (next_ts)); +- diff = GST_CLOCK_DIFF (next_ts, GST_BUFFER_TIMESTAMP (buffer)); ++ diff = GST_CLOCK_DIFF (next_ts, GST_BUFFER_PTS (buffer)); + GST_LOG_OBJECT (enc, "ts diff %d ms", (gint) (diff / GST_MSECOND)); + /* if within tolerance, + * discard buffer ts and carry on producing perfect stream, +@@ -1339,7 +1373,7 @@ gst_audio_encoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + buffer = gst_buffer_make_writable (buffer); + gst_buffer_resize (buffer, diff_bytes, size - diff_bytes); + +- GST_BUFFER_TIMESTAMP (buffer) += diff; ++ GST_BUFFER_PTS (buffer) += diff; + /* care even less about duration after this */ + } else { + /* drain stuff prior to resync */ +@@ -1352,13 +1386,13 @@ gst_audio_encoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + gst_util_uint64_scale (gst_adapter_available (priv->adapter), + GST_SECOND, ctx->info.rate * ctx->info.bpf); + +- if (G_UNLIKELY (shift > GST_BUFFER_TIMESTAMP (buffer))) { ++ if (G_UNLIKELY (shift > GST_BUFFER_PTS (buffer))) { + /* ERROR */ + goto wrong_time; + } + /* arrange for newly added samples to come out with the ts + * of the incoming buffer that adds these */ +- priv->base_ts = GST_BUFFER_TIMESTAMP (buffer) - shift; ++ priv->base_ts = GST_BUFFER_PTS (buffer) - shift; + priv->samples = 0; + gst_audio_encoder_set_base_gp (enc); + priv->discont |= discont; +@@ -1497,8 +1531,8 @@ refuse_caps: + /** + * gst_audio_encoder_proxy_getcaps: + * @enc: a #GstAudioEncoder +- * @caps: (allow-none): initial caps +- * @filter: (allow-none): filter caps ++ * @caps: (nullable): initial caps ++ * @filter: (nullable): filter caps + * + * Returns caps that express @caps (or sink template caps if @caps == NULL) + * restricted to channel/rate combinations supported by downstream elements +@@ -1578,8 +1612,8 @@ gst_audio_encoder_sink_event_default (GstAudioEncoder * enc, GstEvent * event) + /* and follow along with segment */ + enc->input_segment = seg; + +- enc->priv->pending_events = +- g_list_append (enc->priv->pending_events, event); ++ enc->priv->early_pending_events = ++ g_list_append (enc->priv->early_pending_events, event); + GST_AUDIO_ENCODER_STREAM_UNLOCK (enc); + + res = TRUE; +@@ -1696,8 +1730,13 @@ gst_audio_encoder_sink_event_default (GstAudioEncoder * enc, GstEvent * event) + gst_pad_event_default (enc->sinkpad, GST_OBJECT_CAST (enc), event); + } else { + GST_AUDIO_ENCODER_STREAM_LOCK (enc); +- enc->priv->pending_events = +- g_list_append (enc->priv->pending_events, event); ++ if (gst_adapter_available (enc->priv->adapter) == 0) { ++ enc->priv->early_pending_events = ++ g_list_append (enc->priv->early_pending_events, event); ++ } else { ++ enc->priv->pending_events = ++ g_list_append (enc->priv->pending_events, event); ++ } + GST_AUDIO_ENCODER_STREAM_UNLOCK (enc); + res = TRUE; + } +@@ -2150,7 +2189,7 @@ gst_audio_encoder_sink_activate_mode (GstPad * pad, GstObject * parent, + * gst_audio_encoder_get_audio_info: + * @enc: a #GstAudioEncoder + * +- * Returns: a #GstAudioInfo describing the input audio format ++ * Returns: (transfer none): a #GstAudioInfo describing the input audio format + */ + GstAudioInfo * + gst_audio_encoder_get_audio_info (GstAudioEncoder * enc) +@@ -2307,34 +2346,50 @@ gst_audio_encoder_get_lookahead (GstAudioEncoder * enc) + * @min: minimum latency + * @max: maximum latency + * +- * Sets encoder latency. ++ * Sets encoder latency. If the provided values changed from ++ * previously provided ones, this will also post a LATENCY message on the bus ++ * so the pipeline can reconfigure its global latency. + */ + void + gst_audio_encoder_set_latency (GstAudioEncoder * enc, + GstClockTime min, GstClockTime max) + { ++ gboolean post_message = FALSE; ++ + g_return_if_fail (GST_IS_AUDIO_ENCODER (enc)); + g_return_if_fail (GST_CLOCK_TIME_IS_VALID (min)); + g_return_if_fail (min <= max); + ++ GST_DEBUG_OBJECT (enc, ++ "min_latency:%" GST_TIME_FORMAT " max_latency:%" GST_TIME_FORMAT, ++ GST_TIME_ARGS (min), GST_TIME_ARGS (max)); ++ + GST_OBJECT_LOCK (enc); +- enc->priv->ctx.min_latency = min; +- enc->priv->ctx.max_latency = max; ++ if (enc->priv->ctx.min_latency != min) { ++ enc->priv->ctx.min_latency = min; ++ post_message = TRUE; ++ } ++ if (enc->priv->ctx.max_latency != max) { ++ enc->priv->ctx.max_latency = max; ++ post_message = TRUE; ++ } ++ if (!enc->priv->ctx.posted_latency_msg) { ++ enc->priv->ctx.posted_latency_msg = TRUE; ++ post_message = TRUE; ++ } + GST_OBJECT_UNLOCK (enc); + +- GST_LOG_OBJECT (enc, "set to %" GST_TIME_FORMAT "-%" GST_TIME_FORMAT, +- GST_TIME_ARGS (min), GST_TIME_ARGS (max)); +- + /* post latency message on the bus */ +- gst_element_post_message (GST_ELEMENT (enc), +- gst_message_new_latency (GST_OBJECT (enc))); ++ if (post_message) ++ gst_element_post_message (GST_ELEMENT (enc), ++ gst_message_new_latency (GST_OBJECT (enc))); + } + + /** + * gst_audio_encoder_get_latency: + * @enc: a #GstAudioEncoder +- * @min: (out) (allow-none): a pointer to storage to hold minimum latency +- * @max: (out) (allow-none): a pointer to storage to hold maximum latency ++ * @min: (out) (optional): a pointer to storage to hold minimum latency ++ * @max: (out) (optional): a pointer to storage to hold maximum latency + * + * Sets the variables pointed to by @min and @max to the currently configured + * latency. +@@ -2377,7 +2432,7 @@ gst_audio_encoder_set_headers (GstAudioEncoder * enc, GList * headers) + /** + * gst_audio_encoder_set_allocation_caps: + * @enc: a #GstAudioEncoder +- * @allocation_caps: (allow-none): a #GstCaps or %NULL ++ * @allocation_caps: (nullable): a #GstCaps or %NULL + * + * Sets a caps in allocation query which are different from the set + * pad's caps. Use this function before calling +@@ -2544,6 +2599,7 @@ void + gst_audio_encoder_set_tolerance (GstAudioEncoder * enc, GstClockTime tolerance) + { + g_return_if_fail (GST_IS_AUDIO_ENCODER (enc)); ++ g_return_if_fail (GST_CLOCK_TIME_IS_VALID (tolerance)); + + GST_OBJECT_LOCK (enc); + enc->priv->tolerance = tolerance; +@@ -2671,7 +2727,7 @@ gst_audio_encoder_get_drainable (GstAudioEncoder * enc) + /** + * gst_audio_encoder_merge_tags: + * @enc: a #GstAudioEncoder +- * @tags: (allow-none): a #GstTagList to merge, or NULL to unset ++ * @tags: (nullable): a #GstTagList to merge, or NULL to unset + * previously-set tags + * @mode: the #GstTagMergeMode to use, usually #GST_TAG_MERGE_REPLACE + * +@@ -2731,10 +2787,10 @@ gst_audio_encoder_negotiate_default (GstAudioEncoder * enc) + + GST_DEBUG_OBJECT (enc, "Setting srcpad caps %" GST_PTR_FORMAT, caps); + +- if (enc->priv->pending_events) { ++ if (enc->priv->early_pending_events) { + GList **pending_events, *l; + +- pending_events = &enc->priv->pending_events; ++ pending_events = &enc->priv->early_pending_events; + + GST_DEBUG_OBJECT (enc, "Pushing pending events"); + for (l = *pending_events; l;) { +@@ -2813,6 +2869,8 @@ gst_audio_encoder_negotiate_unlocked (GstAudioEncoder * enc) + if (G_LIKELY (klass->negotiate)) + ret = klass->negotiate (enc); + ++ enc->priv->ctx.negotiated = TRUE; ++ + return ret; + } + +@@ -2948,9 +3006,9 @@ fallback: + /** + * gst_audio_encoder_get_allocator: + * @enc: a #GstAudioEncoder +- * @allocator: (out) (allow-none) (transfer full): the #GstAllocator ++ * @allocator: (out) (optional) (nullable) (transfer full): the #GstAllocator + * used +- * @params: (out) (allow-none) (transfer full): the ++ * @params: (out) (optional) (transfer full): the + * #GstAllocationParams of @allocator + * + * Lets #GstAudioEncoder sub-classes to know the memory @allocator +diff --git a/gst-libs/gst/audio/gstaudiofilter.c b/gst-libs/gst/audio/gstaudiofilter.c +index e6ad740bf..003983340 100644 +--- a/gst-libs/gst/audio/gstaudiofilter.c ++++ b/gst-libs/gst/audio/gstaudiofilter.c +@@ -180,6 +180,11 @@ gst_audio_filter_submit_input_buffer (GstBaseTransform * btrans, + GstAudioFilter *filter = GST_AUDIO_FILTER (btrans); + + if (btrans->segment.format == GST_FORMAT_TIME) { ++ if (!GST_AUDIO_INFO_IS_VALID (&filter->info)) { ++ GST_WARNING_OBJECT (filter, "Got buffer, but not negotiated yet!"); ++ return GST_FLOW_NOT_NEGOTIATED; ++ } ++ + input = + gst_audio_buffer_clip (input, &btrans->segment, filter->info.rate, + filter->info.bpf); +diff --git a/gst-libs/gst/audio/gstaudiometa.c b/gst-libs/gst/audio/gstaudiometa.c +index e010ca337..5605dc7b4 100644 +--- a/gst-libs/gst/audio/gstaudiometa.c ++++ b/gst-libs/gst/audio/gstaudiometa.c +@@ -492,3 +492,131 @@ gst_audio_meta_get_info (void) + } + return audio_meta_info; + } ++ ++/** ++ * gst_audio_level_meta_api_get_type: ++ * ++ * Return the #GType associated with #GstAudioLevelMeta. ++ * ++ * Returns: a #GType ++ * ++ * Since: 1.20 ++ */ ++GType ++gst_audio_level_meta_api_get_type (void) ++{ ++ static GType type = 0; ++ static const gchar *tags[] = { NULL }; ++ ++ if (g_once_init_enter (&type)) { ++ GType _type = gst_meta_api_type_register ("GstAudioLevelMetaAPI", tags); ++ g_once_init_leave (&type, _type); ++ } ++ return type; ++} ++ ++static gboolean ++gst_audio_level_meta_init (GstMeta * meta, gpointer params, GstBuffer * buffer) ++{ ++ GstAudioLevelMeta *dmeta = (GstAudioLevelMeta *) meta; ++ ++ dmeta->level = 127; ++ dmeta->voice_activity = FALSE; ++ ++ return TRUE; ++} ++ ++static gboolean ++gst_audio_level_meta_transform (GstBuffer * dst, GstMeta * meta, ++ GstBuffer * src, GQuark type, gpointer data) ++{ ++ if (GST_META_TRANSFORM_IS_COPY (type)) { ++ GstAudioLevelMeta *smeta = (GstAudioLevelMeta *) meta; ++ GstAudioLevelMeta *dmeta; ++ ++ dmeta = gst_buffer_add_audio_level_meta (dst, smeta->level, ++ smeta->voice_activity); ++ if (dmeta == NULL) ++ return FALSE; ++ } else { ++ /* return FALSE, if transform type is not supported */ ++ return FALSE; ++ } ++ ++ return TRUE; ++} ++ ++/** ++ * gst_audio_level_meta_get_info: ++ * ++ * Return the #GstMetaInfo associated with #GstAudioLevelMeta. ++ * ++ * Returns: (transfer none): a #GstMetaInfo ++ * ++ * Since: 1.20 ++ */ ++const GstMetaInfo * ++gst_audio_level_meta_get_info (void) ++{ ++ static const GstMetaInfo *audio_level_meta_info = NULL; ++ ++ if (g_once_init_enter (&audio_level_meta_info)) { ++ const GstMetaInfo *meta = gst_meta_register (GST_AUDIO_LEVEL_META_API_TYPE, ++ "GstAudioLevelMeta", ++ sizeof (GstAudioLevelMeta), ++ gst_audio_level_meta_init, ++ (GstMetaFreeFunction) NULL, ++ gst_audio_level_meta_transform); ++ g_once_init_leave (&audio_level_meta_info, meta); ++ } ++ return audio_level_meta_info; ++} ++ ++/** ++ * gst_buffer_add_audio_level_meta: ++ * @buffer: a #GstBuffer ++ * @level: the -dBov from 0-127 (127 is silence). ++ * @voice_activity: whether the buffer contains voice activity. ++ * ++ * Attaches audio level information to @buffer. (RFC 6464) ++ * ++ * Returns: (transfer none) (nullable): the #GstAudioLevelMeta on @buffer. ++ * ++ * Since: 1.20 ++ */ ++GstAudioLevelMeta * ++gst_buffer_add_audio_level_meta (GstBuffer * buffer, guint8 level, ++ gboolean voice_activity) ++{ ++ GstAudioLevelMeta *meta; ++ ++ g_return_val_if_fail (buffer != NULL, NULL); ++ ++ meta = (GstAudioLevelMeta *) gst_buffer_add_meta (buffer, ++ GST_AUDIO_LEVEL_META_INFO, NULL); ++ if (!meta) ++ return NULL; ++ ++ meta->level = level; ++ meta->voice_activity = voice_activity; ++ ++ return meta; ++} ++ ++/** ++ * gst_buffer_get_audio_level_meta: ++ * @buffer: a #GstBuffer ++ * ++ * Find the #GstAudioLevelMeta on @buffer. ++ * ++ * Returns: (transfer none) (nullable): the #GstAudioLevelMeta or %NULL when ++ * there is no such metadata on @buffer. ++ * ++ * Since: 1.20 ++ */ ++GstAudioLevelMeta * ++gst_buffer_get_audio_level_meta (GstBuffer * buffer) ++{ ++ return (GstAudioLevelMeta *) gst_buffer_get_meta (buffer, ++ gst_audio_level_meta_api_get_type ()); ++} +diff --git a/gst-libs/gst/audio/gstaudiometa.h b/gst-libs/gst/audio/gstaudiometa.h +index 0b1b48bf0..94954d194 100644 +--- a/gst-libs/gst/audio/gstaudiometa.h ++++ b/gst-libs/gst/audio/gstaudiometa.h +@@ -198,6 +198,55 @@ GstAudioMeta * gst_buffer_add_audio_meta (GstBuffer *buffer, + const GstAudioInfo *info, + gsize samples, gsize offsets[]); + ++/** ++ * GST_AUDIO_LEVEL_META_API_TYPE: ++ * ++ * The #GType associated with #GstAudioLevelMeta. ++ * ++ * Since: 1.20 ++ */ ++#define GST_AUDIO_LEVEL_META_API_TYPE (gst_audio_level_meta_api_get_type()) ++/** ++ * GST_AUDIO_LEVEL_META_INFO: ++ * ++ * The #GstMetaInfo associated with #GstAudioLevelMeta. ++ * ++ * Since: 1.20 ++ */ ++#define GST_AUDIO_LEVEL_META_INFO (gst_audio_level_meta_get_info()) ++typedef struct _GstAudioLevelMeta GstAudioLevelMeta; ++ ++/** ++ * GstAudioLevelMeta: ++ * @meta: parent #GstMeta ++ * @level: the -dBov from 0-127 (127 is silence). ++ * @voice_activity: whether the buffer contains voice activity ++ * ++ * Meta containing Audio Level Indication: https://tools.ietf.org/html/rfc6464 ++ * ++ * Since: 1.20 ++ */ ++struct _GstAudioLevelMeta ++{ ++ GstMeta meta; ++ ++ guint8 level; ++ gboolean voice_activity; ++}; ++ ++GST_AUDIO_API ++GType gst_audio_level_meta_api_get_type (void); ++ ++GST_AUDIO_API ++const GstMetaInfo * gst_audio_level_meta_get_info (void); ++ ++GST_AUDIO_API ++GstAudioLevelMeta * gst_buffer_add_audio_level_meta (GstBuffer * buffer, ++ guint8 level, ++ gboolean voice_activity); ++GST_AUDIO_API ++GstAudioLevelMeta * gst_buffer_get_audio_level_meta (GstBuffer * buffer); ++ + G_END_DECLS + + #endif /* __GST_AUDIO_META_H__ */ +diff --git a/gst-libs/gst/audio/gstaudioringbuffer.c b/gst-libs/gst/audio/gstaudioringbuffer.c +index 16d773943..dc38ee6d1 100644 +--- a/gst-libs/gst/audio/gstaudioringbuffer.c ++++ b/gst-libs/gst/audio/gstaudioringbuffer.c +@@ -81,7 +81,7 @@ gst_audio_ring_buffer_init (GstAudioRingBuffer * ringbuffer) + { + ringbuffer->open = FALSE; + ringbuffer->acquired = FALSE; +- ringbuffer->state = GST_AUDIO_RING_BUFFER_STATE_STOPPED; ++ g_atomic_int_set (&ringbuffer->state, GST_AUDIO_RING_BUFFER_STATE_STOPPED); + g_cond_init (&ringbuffer->cond); + ringbuffer->waiting = 0; + ringbuffer->empty_seg = NULL; +@@ -656,7 +656,7 @@ gst_audio_ring_buffer_acquire (GstAudioRingBuffer * buf, + buf->empty_seg = g_malloc (segsize); + + if (buf->spec.type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_RAW) { +- gst_audio_format_fill_silence (buf->spec.info.finfo, buf->empty_seg, ++ gst_audio_format_info_fill_silence (buf->spec.info.finfo, buf->empty_seg, + segsize); + } else { + /* FIXME, non-raw formats get 0 as the empty sample */ +@@ -1005,7 +1005,7 @@ gst_audio_ring_buffer_start (GstAudioRingBuffer * buf) + } + + if (G_UNLIKELY (!res)) { +- buf->state = GST_AUDIO_RING_BUFFER_STATE_PAUSED; ++ g_atomic_int_set (&buf->state, GST_AUDIO_RING_BUFFER_STATE_PAUSED); + GST_DEBUG_OBJECT (buf, "failed to start"); + } else { + GST_DEBUG_OBJECT (buf, "started"); +@@ -1036,6 +1036,40 @@ may_not_start: + } + } + ++G_GNUC_INTERNAL ++ void __gst_audio_ring_buffer_set_errored (GstAudioRingBuffer * buf); ++ ++/* __gst_audio_ring_buffer_set_errored: ++ * @buf: the #GstAudioRingBuffer that has encountered an error ++ * ++ * Mark the ringbuffer as errored after it has started. ++ * ++ * MT safe. ++ ++ * Since: 1.24 (internal in 1.22) ++ */ ++void ++__gst_audio_ring_buffer_set_errored (GstAudioRingBuffer * buf) ++{ ++ gboolean res; ++ ++ /* If started set to errored */ ++ res = g_atomic_int_compare_and_exchange (&buf->state, ++ GST_AUDIO_RING_BUFFER_STATE_STARTED, GST_AUDIO_RING_BUFFER_STATE_ERROR); ++ if (!res) { ++ GST_DEBUG_OBJECT (buf, "ringbuffer was not started, checking paused"); ++ res = g_atomic_int_compare_and_exchange (&buf->state, ++ GST_AUDIO_RING_BUFFER_STATE_PAUSED, GST_AUDIO_RING_BUFFER_STATE_ERROR); ++ } ++ if (res) { ++ GST_DEBUG_OBJECT (buf, "ringbuffer is errored"); ++ } else { ++ GST_DEBUG_OBJECT (buf, ++ "Could not mark ringbuffer as errored. It must have been stopped or already errored (was state %d)", ++ g_atomic_int_get (&buf->state)); ++ } ++} ++ + static gboolean + gst_audio_ring_buffer_pause_unlocked (GstAudioRingBuffer * buf) + { +@@ -1060,7 +1094,8 @@ gst_audio_ring_buffer_pause_unlocked (GstAudioRingBuffer * buf) + res = rclass->pause (buf); + + if (G_UNLIKELY (!res)) { +- buf->state = GST_AUDIO_RING_BUFFER_STATE_STARTED; ++ /* Restore started state */ ++ g_atomic_int_set (&buf->state, GST_AUDIO_RING_BUFFER_STATE_STARTED); + GST_DEBUG_OBJECT (buf, "failed to pause"); + } else { + GST_DEBUG_OBJECT (buf, "paused"); +@@ -1071,7 +1106,7 @@ gst_audio_ring_buffer_pause_unlocked (GstAudioRingBuffer * buf) + not_started: + { + /* was not started */ +- GST_DEBUG_OBJECT (buf, "was not started"); ++ GST_DEBUG_OBJECT (buf, "was not started (state %d)", buf->state); + return TRUE; + } + } +@@ -1153,9 +1188,16 @@ gst_audio_ring_buffer_stop (GstAudioRingBuffer * buf) + GST_AUDIO_RING_BUFFER_STATE_PAUSED, + GST_AUDIO_RING_BUFFER_STATE_STOPPED); + if (!res) { +- /* was not paused either, must have been stopped then */ ++ GST_DEBUG_OBJECT (buf, "was not paused, try errored"); ++ res = g_atomic_int_compare_and_exchange (&buf->state, ++ GST_AUDIO_RING_BUFFER_STATE_ERROR, ++ GST_AUDIO_RING_BUFFER_STATE_STOPPED); ++ } ++ if (!res) { ++ /* was not paused or stopped either, must have been stopped then */ + res = TRUE; +- GST_DEBUG_OBJECT (buf, "was not paused, must have been stopped"); ++ GST_DEBUG_OBJECT (buf, ++ "was not paused or errored, must have been stopped"); + goto done; + } + } +@@ -1169,7 +1211,7 @@ gst_audio_ring_buffer_stop (GstAudioRingBuffer * buf) + res = rclass->stop (buf); + + if (G_UNLIKELY (!res)) { +- buf->state = GST_AUDIO_RING_BUFFER_STATE_STARTED; ++ g_atomic_int_set (&buf->state, GST_AUDIO_RING_BUFFER_STATE_STARTED); + GST_DEBUG_OBJECT (buf, "failed to stop"); + } else { + GST_DEBUG_OBJECT (buf, "stopped"); +@@ -1675,7 +1717,7 @@ not_started: + /** + * gst_audio_ring_buffer_commit: + * @buf: the #GstAudioRingBuffer to commit +- * @sample: the sample position of the data ++ * @sample: (inout): the sample position of the data + * @data: (array length=in_samples): the data to commit + * @in_samples: the number of samples in the data to commit + * @out_samples: the number of samples to write to the ringbuffer +@@ -2044,6 +2086,11 @@ gst_audio_ring_buffer_set_channel_positions (GstAudioRingBuffer * buf, + if (memcmp (position, to, channels * sizeof (to[0])) == 0) + return; + ++ if (channels == 1) { ++ GST_LOG_OBJECT (buf, "single channel, no need to reorder"); ++ return; ++ } ++ + if (position_less_channels (position, channels)) { + GST_LOG_OBJECT (buf, "position-less channels, no need to reorder"); + return; +diff --git a/gst-libs/gst/audio/gstaudiosink.c b/gst-libs/gst/audio/gstaudiosink.c +index 5103f2d00..d7b26f1d9 100644 +--- a/gst-libs/gst/audio/gstaudiosink.c ++++ b/gst-libs/gst/audio/gstaudiosink.c +@@ -254,6 +254,9 @@ audioringbuffer_thread_func (GstAudioRingBuffer * buf) + GST_DEBUG_FUNCPTR_NAME (writefunc), + (errno > 1 ? g_strerror (errno) : "unknown"), left, written); + break; ++ } else if (written == 0 && G_UNLIKELY (g_atomic_int_get (&buf->state) != ++ GST_AUDIO_RING_BUFFER_STATE_STARTED)) { ++ break; + } + left -= written; + readptr += written; +@@ -414,7 +417,7 @@ gst_audio_sink_ring_buffer_acquire (GstAudioRingBuffer * buf, + buf->memory = g_malloc (buf->size); + + if (buf->spec.type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_RAW) { +- gst_audio_format_fill_silence (buf->spec.info.finfo, buf->memory, ++ gst_audio_format_info_fill_silence (buf->spec.info.finfo, buf->memory, + buf->size); + } else { + /* FIXME, non-raw formats get 0 as the empty sample */ +diff --git a/gst-libs/gst/audio/gstaudiosink.h b/gst-libs/gst/audio/gstaudiosink.h +index 06f65ffa3..0fce413b3 100644 +--- a/gst-libs/gst/audio/gstaudiosink.h ++++ b/gst-libs/gst/audio/gstaudiosink.h +@@ -98,7 +98,12 @@ struct _GstAudioSinkClass { + gboolean (*unprepare) (GstAudioSink *sink); + /* close the device */ + gboolean (*close) (GstAudioSink *sink); +- /* write samples to the device */ ++ /** ++ * GstAudioSinkClass::write: ++ * @data: (type guint8) (array length=length): the sample data ++ * ++ * Write samples to the device. ++ */ + gint (*write) (GstAudioSink *sink, gpointer data, guint length); + /* get number of frames queued in the device */ + guint (*delay) (GstAudioSink *sink); +diff --git a/gst-libs/gst/audio/gstaudiosrc.c b/gst-libs/gst/audio/gstaudiosrc.c +index fccdeac29..a676155cd 100644 +--- a/gst-libs/gst/audio/gstaudiosrc.c ++++ b/gst-libs/gst/audio/gstaudiosrc.c +@@ -384,7 +384,7 @@ gst_audio_src_ring_buffer_acquire (GstAudioRingBuffer * buf, + buf->size = spec->segtotal * spec->segsize; + buf->memory = g_malloc (buf->size); + if (buf->spec.type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_RAW) { +- gst_audio_format_fill_silence (buf->spec.info.finfo, buf->memory, ++ gst_audio_format_info_fill_silence (buf->spec.info.finfo, buf->memory, + buf->size); + } else { + /* FIXME, non-raw formats get 0 as the empty sample */ +diff --git a/gst-libs/gst/audio/gstaudiosrc.h b/gst-libs/gst/audio/gstaudiosrc.h +index 93cb5d06e..41c7b3840 100644 +--- a/gst-libs/gst/audio/gstaudiosrc.h ++++ b/gst-libs/gst/audio/gstaudiosrc.h +@@ -84,7 +84,13 @@ struct _GstAudioSrcClass { + gboolean (*unprepare) (GstAudioSrc *src); + /* close the device */ + gboolean (*close) (GstAudioSrc *src); +- /* read samples from the device */ ++ /** ++ * GstAudioSrcClass::read: ++ * @data: (type guint8) (array length=length): the sample data ++ * @timestamp: (out): a #GstClockTime ++ * ++ * Read samples from the device. ++ */ + guint (*read) (GstAudioSrc *src, gpointer data, guint length, + GstClockTime *timestamp); + /* get number of frames queued in the device */ +diff --git a/gst-libs/gst/audio/gstaudiostreamalign.c b/gst-libs/gst/audio/gstaudiostreamalign.c +index 3eaa74bc9..caabc842e 100644 +--- a/gst-libs/gst/audio/gstaudiostreamalign.c ++++ b/gst-libs/gst/audio/gstaudiostreamalign.c +@@ -86,6 +86,8 @@ gst_audio_stream_align_new (gint rate, GstClockTime alignment_threshold, + GstAudioStreamAlign *align; + + g_return_val_if_fail (rate != 0, NULL); ++ g_return_val_if_fail (GST_CLOCK_TIME_IS_VALID (alignment_threshold), NULL); ++ g_return_val_if_fail (GST_CLOCK_TIME_IS_VALID (discont_wait), NULL); + + align = g_new0 (GstAudioStreamAlign, 1); + align->rate = rate; +@@ -172,7 +174,7 @@ gst_audio_stream_align_set_rate (GstAudioStreamAlign * align, gint rate) + * Since: 1.14 + */ + gint +-gst_audio_stream_align_get_rate (GstAudioStreamAlign * align) ++gst_audio_stream_align_get_rate (const GstAudioStreamAlign * align) + { + g_return_val_if_fail (align != NULL, 0); + +@@ -193,6 +195,7 @@ gst_audio_stream_align_set_alignment_threshold (GstAudioStreamAlign * + align, GstClockTime alignment_threshold) + { + g_return_if_fail (align != NULL); ++ g_return_if_fail (GST_CLOCK_TIME_IS_VALID (alignment_threshold)); + + align->alignment_threshold = alignment_threshold; + } +@@ -208,7 +211,8 @@ gst_audio_stream_align_set_alignment_threshold (GstAudioStreamAlign * + * Since: 1.14 + */ + GstClockTime +-gst_audio_stream_align_get_alignment_threshold (GstAudioStreamAlign * align) ++gst_audio_stream_align_get_alignment_threshold (const GstAudioStreamAlign * ++ align) + { + g_return_val_if_fail (align != NULL, 0); + +@@ -229,6 +233,7 @@ gst_audio_stream_align_set_discont_wait (GstAudioStreamAlign * align, + GstClockTime discont_wait) + { + g_return_if_fail (align != NULL); ++ g_return_if_fail (GST_CLOCK_TIME_IS_VALID (discont_wait)); + + align->discont_wait = discont_wait; + } +@@ -244,7 +249,7 @@ gst_audio_stream_align_set_discont_wait (GstAudioStreamAlign * align, + * Since: 1.14 + */ + GstClockTime +-gst_audio_stream_align_get_discont_wait (GstAudioStreamAlign * align) ++gst_audio_stream_align_get_discont_wait (const GstAudioStreamAlign * align) + { + g_return_val_if_fail (align != NULL, 0); + +@@ -280,7 +285,8 @@ gst_audio_stream_align_mark_discont (GstAudioStreamAlign * align) + * Since: 1.14 + */ + GstClockTime +-gst_audio_stream_align_get_timestamp_at_discont (GstAudioStreamAlign * align) ++gst_audio_stream_align_get_timestamp_at_discont (const GstAudioStreamAlign * ++ align) + { + g_return_val_if_fail (align != NULL, GST_CLOCK_TIME_NONE); + +@@ -299,7 +305,8 @@ gst_audio_stream_align_get_timestamp_at_discont (GstAudioStreamAlign * align) + * Since: 1.14 + */ + guint64 +-gst_audio_stream_align_get_samples_since_discont (GstAudioStreamAlign * align) ++gst_audio_stream_align_get_samples_since_discont (const GstAudioStreamAlign * ++ align) + { + g_return_val_if_fail (align != NULL, 0); + +diff --git a/gst-libs/gst/audio/gstaudiostreamalign.h b/gst-libs/gst/audio/gstaudiostreamalign.h +index 80b83f927..080e9ea44 100644 +--- a/gst-libs/gst/audio/gstaudiostreamalign.h ++++ b/gst-libs/gst/audio/gstaudiostreamalign.h +@@ -54,29 +54,29 @@ GST_AUDIO_API + void gst_audio_stream_align_set_rate (GstAudioStreamAlign * align, + gint rate); + GST_AUDIO_API +-gint gst_audio_stream_align_get_rate (GstAudioStreamAlign * align); ++gint gst_audio_stream_align_get_rate (const GstAudioStreamAlign * align); + + GST_AUDIO_API + void gst_audio_stream_align_set_alignment_threshold (GstAudioStreamAlign * align, + GstClockTime alignment_threshold); + GST_AUDIO_API +-GstClockTime gst_audio_stream_align_get_alignment_threshold (GstAudioStreamAlign * align); ++GstClockTime gst_audio_stream_align_get_alignment_threshold (const GstAudioStreamAlign * align); + + GST_AUDIO_API + void gst_audio_stream_align_set_discont_wait (GstAudioStreamAlign * align, + GstClockTime discont_wait); + GST_AUDIO_API +-GstClockTime gst_audio_stream_align_get_discont_wait (GstAudioStreamAlign * align); ++GstClockTime gst_audio_stream_align_get_discont_wait (const GstAudioStreamAlign * align); + + + GST_AUDIO_API + void gst_audio_stream_align_mark_discont (GstAudioStreamAlign * align); + + GST_AUDIO_API +-GstClockTime gst_audio_stream_align_get_timestamp_at_discont (GstAudioStreamAlign * align); ++GstClockTime gst_audio_stream_align_get_timestamp_at_discont (const GstAudioStreamAlign * align); + + GST_AUDIO_API +-guint64 gst_audio_stream_align_get_samples_since_discont (GstAudioStreamAlign * align); ++guint64 gst_audio_stream_align_get_samples_since_discont (const GstAudioStreamAlign * align); + + GST_AUDIO_API + gboolean gst_audio_stream_align_process (GstAudioStreamAlign * align, +diff --git a/gst-libs/gst/audio/gstaudioutilsprivate.c b/gst-libs/gst/audio/gstaudioutilsprivate.c +index 1db4601f0..1f704efad 100644 +--- a/gst-libs/gst/audio/gstaudioutilsprivate.c ++++ b/gst-libs/gst/audio/gstaudioutilsprivate.c +@@ -51,7 +51,6 @@ __gst_audio_element_proxy_caps (GstElement * element, GstCaps * templ_caps, + const GstStructure *caps_s = gst_caps_get_structure (caps, j); + const GValue *val; + GstStructure *s; +- GstCaps *tmp = gst_caps_new_empty (); + + s = gst_structure_new_id_empty (q_name); + if ((val = gst_structure_get_value (caps_s, "rate"))) +@@ -61,9 +60,8 @@ __gst_audio_element_proxy_caps (GstElement * element, GstCaps * templ_caps, + if ((val = gst_structure_get_value (caps_s, "channels-mask"))) + gst_structure_set_value (s, "channels-mask", val); + +- gst_caps_append_structure_full (tmp, s, ++ result = gst_caps_merge_structure_full (result, s, + gst_caps_features_copy (features)); +- result = gst_caps_merge (result, tmp); + } + } + +@@ -128,13 +126,15 @@ __gst_audio_element_proxy_getcaps (GstElement * element, GstPad * sinkpad, + + filter_caps = __gst_audio_element_proxy_caps (element, templ_caps, allowed); + +- fcaps = gst_caps_intersect (filter_caps, templ_caps); ++ fcaps = gst_caps_intersect_full (filter_caps, templ_caps, ++ GST_CAPS_INTERSECT_FIRST); + gst_caps_unref (filter_caps); + gst_caps_unref (templ_caps); + + if (filter) { + GST_LOG_OBJECT (element, "intersecting with %" GST_PTR_FORMAT, filter); +- filter_caps = gst_caps_intersect (fcaps, filter); ++ filter_caps = gst_caps_intersect_full (fcaps, filter, ++ GST_CAPS_INTERSECT_FIRST); + gst_caps_unref (fcaps); + fcaps = filter_caps; + } +diff --git a/gst-libs/gst/meson.build b/gst-libs/gst/meson.build +index cd3b5b043..42c0a6fca 100644 +--- a/gst-libs/gst/meson.build ++++ b/gst-libs/gst/meson.build +@@ -3,9 +3,9 @@ subdir('fft') + subdir('video') + subdir('audio') + subdir('rtp') ++subdir('pbutils') + subdir('sdp') + subdir('rtsp') +-subdir('pbutils') + subdir('riff') + subdir('app') + subdir('allocators') +diff --git a/gst-libs/gst/pbutils/codec-utils.c b/gst-libs/gst/pbutils/codec-utils.c +index 3d6d30a1f..71d918034 100644 +--- a/gst-libs/gst/pbutils/codec-utils.c ++++ b/gst-libs/gst/pbutils/codec-utils.c +@@ -43,6 +43,27 @@ + + #include + ++#ifndef GST_DISABLE_GST_DEBUG ++#define GST_CAT_DEFAULT gst_pb_utils_codec_utils_ensure_debug_category() ++ ++static GstDebugCategory * ++gst_pb_utils_codec_utils_ensure_debug_category (void) ++{ ++ static gsize cat_gonce = 0; ++ ++ if (g_once_init_enter (&cat_gonce)) { ++ GstDebugCategory *cat = NULL; ++ ++ GST_DEBUG_CATEGORY_INIT (cat, "codec-utils", 0, ++ "GstPbUtils codec helper functions"); ++ ++ g_once_init_leave (&cat_gonce, (gsize) cat); ++ } ++ ++ return (GstDebugCategory *) cat_gonce; ++} ++#endif /* GST_DISABLE_GST_DEBUG */ ++ + #define GST_SIMPLE_CAPS_HAS_NAME(caps,name) \ + gst_structure_has_name(gst_caps_get_structure((caps),0),(name)) + +@@ -256,7 +277,7 @@ gst_codec_utils_aac_get_channels (const guint8 * audio_config, guint len) + * normally determined using the AudioObjectType field which is in the first + * 5 bits of @audio_config + * +- * Returns: The profile as a const string and %NULL if the profile could not be ++ * Returns: (nullable): The profile as a const string and %NULL if the profile could not be + * determined. + */ + const gchar * +@@ -319,7 +340,7 @@ gst_codec_utils_aac_get_profile (const guint8 * audio_config, guint len) + * fields are appropriately shifted). + * * Bit 9:12 contains the channel configuration + * +- * Returns: The level as a const string and %NULL if the level could not be ++ * Returns: (nullable): The level as a const string and %NULL if the level could not be + * determined. + */ + const gchar * +@@ -448,7 +469,7 @@ gst_codec_utils_aac_get_level (const guint8 * audio_config, guint len) + else + rcu += (rcu_ref + (rcu_ref - 1) * ((2 * num_cpe) - 1)); + +- num_channels = num_sce + (2 * num_cpe) + num_lfe; ++ num_channels = num_sce + (2 * num_cpe); + + if (audio_object_type == 2) { + /* AAC LC => return the level as per the 'AAC Profile' */ +@@ -564,7 +585,7 @@ gst_codec_utils_aac_caps_set_level_and_profile (GstCaps * caps, + * * Bit 13:15 - Reserved + * * Bit 16:24 - Level indication + * +- * Returns: The profile as a const string, or %NULL if there is an error. ++ * Returns: (nullable): The profile as a const string, or %NULL if there is an error. + */ + const gchar * + gst_codec_utils_h264_get_profile (const guint8 * sps, guint len) +@@ -665,7 +686,7 @@ gst_codec_utils_h264_get_profile (const guint8 * sps, guint len) + * sequence parameter set into a string. The SPS is expected to have the + * same format as for gst_codec_utils_h264_get_profile(). + * +- * Returns: The level as a const string, or %NULL if there is an error. ++ * Returns: (nullable): The level as a const string, or %NULL if there is an error. + */ + const gchar * + gst_codec_utils_h264_get_level (const guint8 * sps, guint len) +@@ -818,6 +839,59 @@ gst_codec_utils_h264_caps_set_level_and_profile (GstCaps * caps, + return (level != NULL && profile != NULL); + } + ++/** ++ * gst_codec_utils_h264_get_profile_flags_level: ++ * @codec_data: (array length=len): H264 AVCC extradata ++ * @len: length of @codec_data ++ * @profile: (optional) (out): return location for h264 profile_idc or %NULL ++ * @flags: (optional) (out): return location for h264 constraint set flags or %NULL ++ * @level: (optional) (out): return location h264 level_idc or %NULL ++ * ++ * Parses profile, flags, and level from a H264 AVCC extradata/sequence_header. ++ * These are most commonly retrieved from a video/x-h264 caps with a codec_data ++ * buffer. ++ * ++ * The format of H264 AVCC extradata/sequence_header is documented in the ++ * ITU-T H.264 specification section 7.3.2.1.1 as well as in ISO/IEC 14496-15 ++ * section 5.3.3.1.2. ++ * ++ * Returns: %TRUE on success, %FALSE on failure ++ * ++ * Since: 1.20 ++ */ ++gboolean ++gst_codec_utils_h264_get_profile_flags_level (const guint8 * codec_data, ++ guint len, guint8 * profile, guint8 * flags, guint8 * level) ++{ ++ gboolean ret = FALSE; ++ ++ g_return_val_if_fail (codec_data != NULL, FALSE); ++ ++ if (len < 7) { ++ GST_WARNING ("avc codec data is too small"); ++ goto done; ++ } ++ if (codec_data[0] != 1) { ++ GST_WARNING ("failed to parse avc codec version, must be 1"); ++ goto done; ++ } ++ ++ if (profile) { ++ *profile = codec_data[1]; ++ } ++ if (flags) { ++ *flags = codec_data[2]; ++ } ++ if (level) { ++ *level = codec_data[3]; ++ } ++ ++ ret = TRUE; ++ ++done: ++ return ret; ++} ++ + /* forked from gsth265parse.c */ + typedef struct + { +@@ -1127,7 +1201,7 @@ utils_get_scalable_format_range_extensions_profile (GstH265ExtensionProfile * + * * Bit 44:87 - See below + * * Bit 88:95 - general_level_idc + * +- * Returns: The profile as a const string, or %NULL if there is an error. ++ * Returns: (nullable): The profile as a const string, or %NULL if there is an error. + * + * Since: 1.4 + */ +@@ -1293,7 +1367,7 @@ gst_codec_utils_h265_get_profile (const guint8 * profile_tier_level, guint len) + * profile_tier_level structure into a string. The profile_tier_level + * is expected to have the same format as for gst_codec_utils_h264_get_profile(). + * +- * Returns: The tier as a const string, or %NULL if there is an error. ++ * Returns: (nullable): The tier as a const string, or %NULL if there is an error. + * + * Since: 1.4 + */ +@@ -1330,7 +1404,7 @@ gst_codec_utils_h265_get_tier (const guint8 * profile_tier_level, guint len) + * profile_tier_level structure into a string. The profiel_tier_level is + * expected to have the same format as for gst_codec_utils_h264_get_profile(). + * +- * Returns: The level as a const string, or %NULL if there is an error. ++ * Returns: (nullable): The level as a const string, or %NULL if there is an error. + * + * Since: 1.4 + */ +@@ -1480,7 +1554,7 @@ gst_codec_utils_h265_caps_set_level_tier_and_profile (GstCaps * caps, + * object sequence start code. Only the first byte + * (profile_and_level_indication) is used. + * +- * Returns: The profile as a const string, or NULL if there is an error. ++ * Returns: (nullable): The profile as a const string, or NULL if there is an error. + */ + const gchar * + gst_codec_utils_mpeg4video_get_profile (const guint8 * vis_obj_seq, guint len) +@@ -1553,7 +1627,7 @@ gst_codec_utils_mpeg4video_get_profile (const guint8 * vis_obj_seq, guint len) + * object sequence start code. Only the first byte + * (profile_and_level_indication) is used. + * +- * Returns: The level as a const string, or NULL if there is an error. ++ * Returns: (nullable): The level as a const string, or NULL if there is an error. + */ + const gchar * + gst_codec_utils_mpeg4video_get_level (const guint8 * vis_obj_seq, guint len) +@@ -1677,12 +1751,12 @@ gst_codec_utils_mpeg4video_caps_set_level_and_profile (GstCaps * caps, + /** + * gst_codec_utils_opus_parse_caps: + * @caps: the #GstCaps to parse the data from +- * @rate: (out): the sample rate +- * @channels: (out): the number of channels +- * @channel_mapping_family: (out): the channel mapping family +- * @stream_count: (out): the number of independent streams +- * @coupled_count: (out): the number of stereo streams +- * @channel_mapping: (out) (array fixed-size=256): the mapping between the streams ++ * @rate: (optional) (out): the sample rate ++ * @channels: (optional) (out): the number of channels ++ * @channel_mapping_family: (optional) (out): the channel mapping family ++ * @stream_count: (optional) (out): the number of independent streams ++ * @coupled_count: (optional) (out): the number of stereo streams ++ * @channel_mapping: (optional) (out) (array fixed-size=256): the mapping between the streams + * + * Parses Opus caps and fills the different fields with defaults if possible. + * +@@ -1798,11 +1872,11 @@ gst_codec_utils_opus_parse_caps (GstCaps * caps, + * @channel_mapping_family: the channel mapping family + * @stream_count: the number of independent streams + * @coupled_count: the number of stereo streams +- * @channel_mapping: (allow-none) (array): the mapping between the streams ++ * @channel_mapping: (nullable) (array): the mapping between the streams + * + * Creates Opus caps from the given parameters. + * +- * Returns: The #GstCaps, or %NULL if the parameters would lead to ++ * Returns: (transfer full) (nullable): The #GstCaps, or %NULL if the parameters would lead to + * invalid Opus caps. + * + * Since: 1.8 +@@ -1963,12 +2037,12 @@ _gst_caps_set_buffer_array (GstCaps * caps, const gchar * field, + /** + * gst_codec_utils_opus_create_caps_from_header: + * @header: OpusHead header +- * @comments: (allow-none): Comment header or NULL ++ * @comments: (nullable): Comment header or NULL + * + * Creates Opus caps from the given OpusHead @header and comment header + * @comments. + * +- * Returns: The #GstCaps. ++ * Returns: (transfer full) (nullable): The #GstCaps. + * + * Since: 1.8 + */ +@@ -2022,13 +2096,13 @@ gst_codec_utils_opus_create_caps_from_header (GstBuffer * header, + * @channel_mapping_family: the channel mapping family + * @stream_count: the number of independent streams + * @coupled_count: the number of stereo streams +- * @channel_mapping: (allow-none) (array): the mapping between the streams ++ * @channel_mapping: (nullable) (array): the mapping between the streams + * @pre_skip: Pre-skip in 48kHz samples or 0 + * @output_gain: Output gain or 0 + * + * Creates OpusHead header from the given parameters. + * +- * Returns: The #GstBuffer containing the OpusHead. ++ * Returns: (transfer full) (nullable): The #GstBuffer containing the OpusHead. + * + * Since: 1.8 + */ +@@ -2099,14 +2173,14 @@ gst_codec_utils_opus_create_header (guint32 rate, + /** + * gst_codec_utils_opus_parse_header: + * @header: the OpusHead #GstBuffer +- * @rate: (out): the sample rate +- * @channels: (out): the number of channels +- * @channel_mapping_family: (out): the channel mapping family +- * @stream_count: (out): the number of independent streams +- * @coupled_count: (out): the number of stereo streams +- * @channel_mapping: (out) (array fixed-size=256): the mapping between the streams +- * @pre_skip: (out): Pre-skip in 48kHz samples or 0 +- * @output_gain: (out): Output gain or 0 ++ * @rate: (optional) (out): the sample rate ++ * @channels: (optional) (out): the number of channels ++ * @channel_mapping_family: (optional) (out): the channel mapping family ++ * @stream_count: (optional) (out): the number of independent streams ++ * @coupled_count: (optional) (out): the number of stereo streams ++ * @channel_mapping: (optional) (out) (array fixed-size=256): the mapping between the streams ++ * @pre_skip: (optional) (out): Pre-skip in 48kHz samples or 0 ++ * @output_gain: (optional) (out): Output gain or 0 + * + * Parses the OpusHead header. + * +@@ -2205,3 +2279,608 @@ done: + + return ret; + } ++ ++static gboolean ++h264_caps_structure_get_profile_flags_level (GstStructure * caps_st, ++ guint8 * profile, guint8 * flags, guint8 * level) ++{ ++ const GValue *codec_data_value = NULL; ++ GstBuffer *codec_data = NULL; ++ GstMapInfo map; ++ gboolean ret = FALSE; ++ guint8 *data = NULL; ++ gsize size; ++ ++ codec_data_value = gst_structure_get_value (caps_st, "codec_data"); ++ if (!codec_data_value) { ++ GST_DEBUG ++ ("video/x-h264 caps did not have codec_data set, cannot parse profile, flags and level"); ++ return FALSE; ++ } ++ ++ codec_data = gst_value_get_buffer (codec_data_value); ++ if (!gst_buffer_map (codec_data, &map, GST_MAP_READ)) { ++ return FALSE; ++ } ++ data = map.data; ++ size = map.size; ++ ++ if (!gst_codec_utils_h264_get_profile_flags_level (data, (guint) size, ++ profile, flags, level)) { ++ GST_WARNING ++ ("Failed to parse profile, flags and level from h264 codec data"); ++ goto done; ++ } ++ ++ ret = TRUE; ++ ++done: ++ gst_buffer_unmap (codec_data, &map); ++ ++ return ret; ++} ++ ++static gboolean ++aac_caps_structure_get_audio_object_type (GstStructure * caps_st, ++ guint8 * audio_object_type) ++{ ++ gboolean ret = FALSE; ++ const GValue *codec_data_value = NULL; ++ GstBuffer *codec_data = NULL; ++ GstMapInfo map; ++ guint8 *data = NULL; ++ gsize size; ++ GstBitReader br; ++ ++ codec_data_value = gst_structure_get_value (caps_st, "codec_data"); ++ if (!codec_data_value) { ++ GST_DEBUG ++ ("audio/mpeg pad did not have codec_data set, cannot parse audio object type"); ++ return FALSE; ++ } ++ ++ codec_data = gst_value_get_buffer (codec_data_value); ++ if (!gst_buffer_map (codec_data, &map, GST_MAP_READ)) { ++ return FALSE; ++ } ++ data = map.data; ++ size = map.size; ++ ++ if (size < 2) { ++ GST_WARNING ("aac codec data is too small"); ++ goto done; ++ } ++ ++ gst_bit_reader_init (&br, data, size); ++ ret = gst_codec_utils_aac_get_audio_object_type (&br, audio_object_type); ++ ++done: ++ gst_buffer_unmap (codec_data, &map); ++ ++ return ret; ++} ++ ++static gboolean ++hevc_caps_get_mime_codec (GstCaps * caps, gchar ** mime_codec) ++{ ++ GstStructure *caps_st = NULL; ++ const GValue *codec_data_value = NULL; ++ GstBuffer *codec_data = NULL; ++ GstMapInfo map; ++ gboolean ret = FALSE; ++ const gchar *stream_format; ++ guint8 *data = NULL; ++ gsize size; ++ guint16 profile_space; ++ guint8 tier_flag; ++ guint16 profile_idc; ++ guint32 compat_flags; ++ guchar constraint_indicator_flags[6]; ++ guint8 level_idc; ++ guint32 compat_flag_parameter = 0; ++ GString *codec_string; ++ const guint8 *profile_tier_level; ++ gint last_flag_index; ++ ++ caps_st = gst_caps_get_structure (caps, 0); ++ codec_data_value = gst_structure_get_value (caps_st, "codec_data"); ++ stream_format = gst_structure_get_string (caps_st, "stream-format"); ++ if (!codec_data_value) { ++ GST_DEBUG ("video/x-h265 caps did not have codec_data set, cannot parse"); ++ return FALSE; ++ } else if (!stream_format) { ++ GST_DEBUG ++ ("video/x-h265 caps did not have stream-format set, cannot parse"); ++ return FALSE; ++ } ++ ++ codec_data = gst_value_get_buffer (codec_data_value); ++ if (!gst_buffer_map (codec_data, &map, GST_MAP_READ)) { ++ return FALSE; ++ } ++ data = map.data; ++ size = map.size; ++ ++ /* HEVCDecoderConfigurationRecord is at a minimum 23 bytes long */ ++ if (size < 23) { ++ GST_DEBUG ("Incomplete HEVCDecoderConfigurationRecord"); ++ goto done; ++ } ++ ++ if (!g_str_equal (stream_format, "hev1") ++ && !g_str_equal (stream_format, "hvc1")) { ++ GST_DEBUG ("Unknown stream-format %s", stream_format); ++ goto done; ++ } ++ ++ profile_tier_level = data + 1; ++ profile_space = (profile_tier_level[0] & 0x11) >> 6; ++ tier_flag = (profile_tier_level[0] & 0x001) >> 5; ++ profile_idc = (profile_tier_level[0] & 0x1f); ++ ++ compat_flags = GST_READ_UINT32_BE (data + 2); ++ for (unsigned i = 0; i < 6; ++i) ++ constraint_indicator_flags[i] = GST_READ_UINT8 (data + 6 + i); ++ ++ level_idc = data[12]; ++ ++ /* The 32 bits of the compat_flags, but in reverse bit order */ ++ compat_flags = ++ ((compat_flags & 0xaaaaaaaa) >> 1) | ((compat_flags & 0x55555555) << 1); ++ compat_flags = ++ ((compat_flags & 0xcccccccc) >> 2) | ((compat_flags & 0x33333333) << 2); ++ compat_flags = ++ ((compat_flags & 0xf0f0f0f0) >> 4) | ((compat_flags & 0x0f0f0f0f) << 4); ++ compat_flags = ++ ((compat_flags & 0xff00ff00) >> 8) | ((compat_flags & 0x00ff00ff) << 8); ++ compat_flag_parameter = (compat_flags >> 16) | (compat_flags << 16); ++ ++ codec_string = g_string_new (stream_format); ++ codec_string = g_string_append_c (codec_string, '.'); ++ if (profile_space) ++ codec_string = g_string_append_c (codec_string, 'A' + profile_space - 1); ++ g_string_append_printf (codec_string, "%" G_GUINT16_FORMAT ".%X.%c%d", ++ profile_idc, compat_flag_parameter, tier_flag ? 'H' : 'L', level_idc); ++ ++ /* Each of the 6 bytes of the constraint flags, starting from the byte containing the ++ * progressive_source_flag, each encoded as a hexadecimal number, and the encoding ++ * of each byte separated by a period; trailing bytes that are zero may be omitted. ++ */ ++ last_flag_index = 5; ++ while (last_flag_index >= 0 ++ && (int) (constraint_indicator_flags[last_flag_index]) == 0) ++ --last_flag_index; ++ for (gint i = 0; i <= last_flag_index; ++i) { ++ g_string_append_printf (codec_string, ".%02X", ++ constraint_indicator_flags[i]); ++ } ++ ++ *mime_codec = g_string_free (codec_string, FALSE); ++ ++ ret = TRUE; ++ ++done: ++ gst_buffer_unmap (codec_data, &map); ++ return ret; ++} ++ ++/* https://www.webmproject.org/vp9/mp4/#codecs-parameter-string */ ++static char * ++vp9_caps_get_mime_codec (GstCaps * caps) ++{ ++ GstStructure *caps_st; ++ const char *profile_str, *chroma_format_str, *colorimetry_str; ++ guint bitdepth_luma, bitdepth_chroma; ++ guint8 profile = -1, chroma_format = -1, level = -1, color_primaries = ++ -1, color_transfer = -1, color_matrix = -1; ++ gboolean video_full_range; ++ GstVideoColorimetry cinfo = { 0, }; ++ GString *codec_string; ++ ++ caps_st = gst_caps_get_structure (caps, 0); ++ codec_string = g_string_new ("vp09"); ++ ++ profile_str = gst_structure_get_string (caps_st, "profile"); ++ if (g_strcmp0 (profile_str, "0") == 0) { ++ profile = 0; ++ } else if (g_strcmp0 (profile_str, "1") == 0) { ++ profile = 1; ++ } else if (g_strcmp0 (profile_str, "2") == 0) { ++ profile = 2; ++ } else if (g_strcmp0 (profile_str, "3") == 0) { ++ profile = 3; ++ } else { ++ goto done; ++ } ++ ++ /* XXX: hardcoded level */ ++ level = 10; ++ ++ gst_structure_get (caps_st, "bit-depth-luma", G_TYPE_UINT, ++ &bitdepth_luma, "bit-depth-chroma", G_TYPE_UINT, &bitdepth_chroma, NULL); ++ ++ if (bitdepth_luma == 0) ++ goto done; ++ if (bitdepth_luma != bitdepth_chroma) ++ goto done; ++ ++ /* mandatory elements */ ++ g_string_append_printf (codec_string, ".%02u.%02u.%02u", profile, level, ++ bitdepth_luma); ++ ++ colorimetry_str = gst_structure_get_string (caps_st, "colorimetry"); ++ if (!colorimetry_str) ++ goto done; ++ if (!gst_video_colorimetry_from_string (&cinfo, colorimetry_str)) ++ goto done; ++ video_full_range = cinfo.range == GST_VIDEO_COLOR_RANGE_0_255; ++ ++ chroma_format_str = gst_structure_get_string (caps_st, "chroma-format"); ++ if (g_strcmp0 (chroma_format_str, "4:2:0") == 0) { ++ const char *chroma_site_str; ++ GstVideoChromaSite chroma_site; ++ ++ chroma_site_str = gst_structure_get_string (caps_st, "chroma-site"); ++ if (chroma_site_str) ++ chroma_site = gst_video_chroma_site_from_string (chroma_site_str); ++ else ++ chroma_site = GST_VIDEO_CHROMA_SITE_UNKNOWN; ++ if (chroma_site == GST_VIDEO_CHROMA_SITE_V_COSITED) { ++ chroma_format = 0; ++ } else if (chroma_site == GST_VIDEO_CHROMA_SITE_COSITED) { ++ chroma_format = 1; ++ } else { ++ chroma_format = 1; ++ } ++ } else if (g_strcmp0 (chroma_format_str, "4:2:2") == 0) { ++ chroma_format = 2; ++ } else if (g_strcmp0 (chroma_format_str, "4:4:4") == 0) { ++ chroma_format = 3; ++ } else { ++ goto done; ++ } ++ ++ /* optional but all or nothing. Include them if any parameter differs from the default value */ ++ color_primaries = gst_video_color_primaries_to_iso (cinfo.primaries); ++ color_transfer = gst_video_transfer_function_to_iso (cinfo.transfer); ++ color_matrix = gst_video_color_matrix_to_iso (cinfo.matrix); ++ if (chroma_format != 1 || color_primaries != 1 || color_transfer != 1 ++ || color_matrix != 1 || video_full_range) { ++ g_string_append_printf (codec_string, ".%02u.%02u.%02u.%02u.%02u", ++ chroma_format, color_primaries, color_transfer, color_matrix, ++ video_full_range); ++ } ++ ++done: ++ return g_string_free (codec_string, FALSE); ++} ++ ++/** ++ * gst_codec_utils_caps_get_mime_codec: ++ * @caps: A #GstCaps to convert to mime codec ++ * ++ * Converts @caps to a RFC 6381 compatible codec string if possible. ++ * ++ * Useful for providing the 'codecs' field inside the 'Content-Type' HTTP ++ * header for containerized formats, such as mp4 or matroska. ++ * ++ * Registered codecs can be found at http://mp4ra.org/#/codecs ++ * ++ * Returns: (transfer full) (nullable): a RFC 6381 compatible codec string or %NULL ++ * ++ * Since: 1.20 ++ */ ++gchar * ++gst_codec_utils_caps_get_mime_codec (GstCaps * caps) ++{ ++ gchar *mime_codec = NULL; ++ GstStructure *caps_st = NULL; ++ const gchar *media_type = NULL; ++ ++ g_return_val_if_fail (caps != NULL, NULL); ++ g_return_val_if_fail (gst_caps_is_fixed (caps), NULL); ++ ++ caps_st = gst_caps_get_structure (caps, 0); ++ if (caps_st == NULL) { ++ GST_WARNING ("Failed to get structure from caps"); ++ goto done; ++ } ++ ++ media_type = gst_structure_get_name (caps_st); ++ ++ if (g_strcmp0 (media_type, "video/x-h264") == 0) { ++ /* avc1.AABBCC ++ * AA = profile ++ * BB = constraint set flags ++ * CC = level ++ */ ++ guint8 profile = 0; ++ guint8 flags = 0; ++ guint8 level = 0; ++ ++ if (!h264_caps_structure_get_profile_flags_level (caps_st, &profile, &flags, ++ &level)) { ++ GST_DEBUG ++ ("h264 caps did not contain 'codec_data', cannot determine detailed codecs info"); ++ mime_codec = g_strdup ("avc1"); ++ } else { ++ mime_codec = g_strdup_printf ("avc1.%02X%02X%02X", profile, flags, level); ++ } ++ } else if (g_strcmp0 (media_type, "video/x-h265") == 0) { ++ if (!hevc_caps_get_mime_codec (caps, &mime_codec)) { ++ GST_DEBUG ("h265 caps parsing failed"); ++ mime_codec = g_strdup ("hev1"); ++ } ++ } else if (g_strcmp0 (media_type, "video/x-av1") == 0) { ++ /* TODO: Some browsers won't play the video unless more codec information is ++ * available in the mime codec for av1. This is documented in ++ * https://aomediacodec.github.io/av1-isobmff/#codecsparam */ ++ mime_codec = g_strdup ("av01"); ++ } else if (g_strcmp0 (media_type, "video/x-vp8") == 0) { ++ /* TODO: most browsers won't play the video unless more codec information is ++ * available in the mime codec for vp8. */ ++ mime_codec = g_strdup ("vp08"); ++ } else if (g_strcmp0 (media_type, "video/x-vp9") == 0) { ++ mime_codec = vp9_caps_get_mime_codec (caps); ++ } else if (g_strcmp0 (media_type, "image/jpeg") == 0) { ++ mime_codec = g_strdup ("mjpg"); ++ } else if (g_strcmp0 (media_type, "audio/mpeg") == 0) { ++ guint8 audio_object_type = 0; ++ if (aac_caps_structure_get_audio_object_type (caps_st, &audio_object_type)) { ++ mime_codec = g_strdup_printf ("mp4a.40.%u", audio_object_type); ++ } else { ++ mime_codec = g_strdup ("mp4a.40"); ++ } ++ } else if (g_strcmp0 (media_type, "audio/x-opus") == 0) { ++ mime_codec = g_strdup ("opus"); ++ } else if (g_strcmp0 (media_type, "audio/x-mulaw") == 0) { ++ mime_codec = g_strdup ("ulaw"); ++ } else if (g_strcmp0 (media_type, "audio/x-adpcm") == 0) { ++ if (g_strcmp0 (gst_structure_get_string (caps_st, "layout"), "g726") == 0) { ++ mime_codec = g_strdup ("g726"); ++ } ++ } ++ ++done: ++ return mime_codec; ++} ++ ++static GstCaps * ++gst_codec_utils_caps_from_mime_codec_single (const gchar * codec) ++{ ++ GstCaps *caps = NULL; ++ gchar **subcodec = NULL; ++ gchar *subcodec0; ++ guint32 codec_fourcc; ++ ++ GST_DEBUG ("Analyzing codec '%s'", codec); ++ ++ /* rfc 6381 3.3 ++ * ++ * For the ISO Base Media File Format, and the QuickTime movie file ++ * format, the first element of a 'codecs' parameter value is a sample ++ * description entry four-character code as registered by the MP4 ++ * Registration Authority [MP4RA]. ++ * ++ * See Also : http://mp4ra.org/#/codecs ++ */ ++ if (strlen (codec) < 4) { ++ GST_WARNING ("Invalid codec (smaller than 4 characters) : '%s'", codec); ++ goto beach; ++ } ++ ++ subcodec = g_strsplit (codec, ".", 0); ++ subcodec0 = subcodec[0]; ++ ++ if (subcodec0 == NULL) ++ goto beach; ++ ++ /* Skip any leading spaces */ ++ while (*subcodec0 == ' ') ++ subcodec0++; ++ ++ if (strlen (subcodec0) < 4) { ++ GST_WARNING ("Invalid codec (smaller than 4 characters) : '%s'", subcodec0); ++ goto beach; ++ } ++ ++ GST_LOG ("subcodec[0] '%s'", subcodec0); ++ ++ codec_fourcc = GST_READ_UINT32_LE (subcodec0); ++ switch (codec_fourcc) { ++ case GST_MAKE_FOURCC ('a', 'v', 'c', '1'): ++ case GST_MAKE_FOURCC ('a', 'v', 'c', '2'): ++ case GST_MAKE_FOURCC ('a', 'v', 'c', '3'): ++ case GST_MAKE_FOURCC ('a', 'v', 'c', '4'): ++ { ++ guint8 sps[3]; ++ guint64 spsint64; ++ ++ /* ISO 14496-15 Annex E : Sub-parameters for the MIME type “codecs” ++ * parameter */ ++ caps = gst_caps_new_empty_simple ("video/x-h264"); ++ ++ if (subcodec[1]) { ++ /* The second element is the hexadecimal representation of the following ++ * three bytes in the (subset) sequence parameter set Network ++ * Abstraction Layer (NAL) unit specified in [AVC]: ++ * * profile_idc ++ * * constraint_set flags ++ * * level_idc ++ * */ ++ spsint64 = g_ascii_strtoull (subcodec[1], NULL, 16); ++ sps[0] = spsint64 >> 16; ++ sps[1] = (spsint64 >> 8) & 0xff; ++ sps[2] = spsint64 & 0xff; ++ gst_codec_utils_h264_caps_set_level_and_profile (caps, ++ (const guint8 *) &sps, 3); ++ } ++ } ++ break; ++ case GST_MAKE_FOURCC ('m', 'p', '4', 'a'): ++ { ++ guint64 oti; ++ ++ if (!subcodec[1]) ++ break; ++ oti = g_ascii_strtoull (subcodec[1], NULL, 16); ++ /* For mp4a, mp4v and mp4s, the second element is the hexadecimal ++ * representation of the MP4 Registration Authority ++ * ObjectTypeIndication */ ++ switch (oti) { ++ case 0x40: ++ { ++ guint64 audio_oti; ++ const gchar *profile = NULL; ++ ++ /* MPEG-4 Audio (ISO/IEC 14496-3 */ ++ caps = ++ gst_caps_new_simple ("audio/mpeg", "mpegversion", G_TYPE_INT, 4, ++ NULL); ++ ++ if (!subcodec[2]) ++ break; ++ /* If present, last element is the audio object type */ ++ audio_oti = g_ascii_strtoull (subcodec[2], NULL, 16); ++ ++ switch (audio_oti) { ++ case 1: ++ profile = "main"; ++ break; ++ case 2: ++ profile = "lc"; ++ break; ++ case 3: ++ profile = "ssr"; ++ break; ++ case 4: ++ profile = "ltp"; ++ break; ++ default: ++ GST_WARNING ("Unhandled MPEG-4 Audio Object Type: 0x%" ++ G_GUINT64_FORMAT "x", audio_oti); ++ break; ++ } ++ if (profile) ++ gst_caps_set_simple (caps, "profile", G_TYPE_STRING, profile, NULL); ++ break; ++ } ++ default: ++ GST_WARNING ("Unknown ObjectTypeIndication 0x%" G_GUINT64_FORMAT "x", ++ oti); ++ break; ++ } ++ } ++ break; ++ case GST_MAKE_FOURCC ('h', 'e', 'v', '1'): ++ case GST_MAKE_FOURCC ('h', 'v', 'c', '1'): ++ { ++ /* ISO 14496-15 Annex E : Sub-parameters for the MIME type “codecs” ++ * parameter */ ++ caps = gst_caps_new_empty_simple ("video/x-h265"); ++ ++ /* FIXME : Extract information from the following component */ ++ break; ++ } ++ /* Following are not defined in rfc 6831 but are registered MP4RA codecs */ ++ case GST_MAKE_FOURCC ('a', 'c', '-', '3'): ++ /* ETSI TS 102 366 v1.4.1 - Digital Audio Compression (AC-3, Enhanced AC-3) Standard, Annex F */ ++ caps = gst_caps_new_empty_simple ("audio/x-ac3"); ++ break; ++ case GST_MAKE_FOURCC ('e', 'c', '+', '3'): ++ GST_FIXME ++ ("Signalling of ATMOS ('ec+3') isn't defined yet. Falling back to EAC3 caps"); ++ /* withdrawn, unused, do not use (was enhanced AC-3 audio with JOC) */ ++ case GST_MAKE_FOURCC ('e', 'c', '-', '3'): ++ /* ETSI TS 102 366 v1.4.1 - Digital Audio Compression (AC-3, Enhanced AC-3) Standard, Annex F */ ++ caps = gst_caps_new_empty_simple ("audio/x-eac3"); ++ break; ++ case GST_MAKE_FOURCC ('s', 't', 'p', 'p'): ++ /* IMSC1-conformant TTM XML */ ++ caps = gst_caps_new_empty_simple ("application/ttml+xml"); ++ break; ++ case GST_MAKE_FOURCC ('w', 'v', 't', 't'): ++ /* WebVTT subtitles */ ++ caps = gst_caps_new_empty_simple ("application/x-subtitle-vtt"); ++ break; ++ case GST_MAKE_FOURCC ('v', 'p', '0', '8'): ++ /* VP8 */ ++ caps = gst_caps_new_empty_simple ("video/x-vp8"); ++ break; ++ case GST_MAKE_FOURCC ('v', 'p', '0', '9'): ++ /* VP9 */ ++ caps = gst_caps_new_empty_simple ("video/x-vp9"); ++ break; ++ case GST_MAKE_FOURCC ('a', 'v', '0', '1'): ++ /* AV1 */ ++ caps = gst_caps_new_empty_simple ("video/x-av1"); ++ break; ++ case GST_MAKE_FOURCC ('o', 'p', 'u', 's'): ++ /* Opus */ ++ caps = gst_caps_new_empty_simple ("audio/x-opus"); ++ break; ++ case GST_MAKE_FOURCC ('u', 'l', 'a', 'w'): ++ /* ulaw */ ++ caps = gst_caps_new_empty_simple ("audio/x-mulaw"); ++ break; ++ case GST_MAKE_FOURCC ('g', '7', '2', '6'): ++ /* ulaw */ ++ caps = ++ gst_caps_new_simple ("audio/x-adpcm", "layout", G_TYPE_STRING, "g726", ++ NULL); ++ break; ++ default: ++ GST_WARNING ("Unknown codec '%s' please file a bug", codec); ++ break; ++ } ++ ++beach: ++ if (subcodec != NULL) ++ g_strfreev (subcodec); ++ return caps; ++} ++ ++/** ++ * gst_codec_utils_caps_from_mime_codec: ++ * @codecs_field: A mime codec string field ++ * ++ * Converts a RFC 6381 compatible codec string to #GstCaps. More than one codec ++ * string can be present (separated by `,`). ++ * ++ * Registered codecs can be found at http://mp4ra.org/#/codecs ++ * ++ * Returns: (transfer full) (nullable): The corresponding #GstCaps or %NULL ++ * ++ * Since: 1.22 ++ */ ++GstCaps * ++gst_codec_utils_caps_from_mime_codec (const gchar * codecs_field) ++{ ++ gchar **codecs = NULL; ++ GstCaps *caps = NULL; ++ guint i; ++ ++ g_return_val_if_fail (codecs_field != NULL, NULL); ++ ++ GST_LOG ("codecs_field '%s'", codecs_field); ++ ++ codecs = g_strsplit (codecs_field, ",", 0); ++ if (codecs == NULL) { ++ GST_WARNING ("Invalid 'codecs' field : '%s'", codecs_field); ++ goto beach; ++ } ++ ++ for (i = 0; codecs[i]; i++) { ++ const gchar *codec = codecs[i]; ++ if (caps == NULL) ++ caps = gst_codec_utils_caps_from_mime_codec_single (codec); ++ else ++ gst_caps_append (caps, ++ gst_codec_utils_caps_from_mime_codec_single (codec)); ++ } ++ ++beach: ++ g_strfreev (codecs); ++ GST_LOG ("caps %" GST_PTR_FORMAT, caps); ++ return caps; ++} +diff --git a/gst-libs/gst/pbutils/codec-utils.h b/gst-libs/gst/pbutils/codec-utils.h +index 2d3dc0c96..43398c4ea 100644 +--- a/gst-libs/gst/pbutils/codec-utils.h ++++ b/gst-libs/gst/pbutils/codec-utils.h +@@ -68,6 +68,13 @@ gboolean gst_codec_utils_h264_caps_set_level_and_profile (GstCaps * ca + const guint8 * sps, + guint len); + ++GST_PBUTILS_API ++gboolean gst_codec_utils_h264_get_profile_flags_level (const guint8 * codec_data, ++ guint len, ++ guint8 * profile, ++ guint8 * flags, ++ guint8 * level); ++ + /* H.265 */ + + GST_PBUTILS_API +@@ -145,6 +152,13 @@ gboolean gst_codec_utils_opus_parse_header (GstBuffer * header, + guint16 * pre_skip, + gint16 * output_gain); + ++/* General */ ++GST_PBUTILS_API ++gchar * gst_codec_utils_caps_get_mime_codec (GstCaps * caps); ++ ++GST_PBUTILS_API ++GstCaps * gst_codec_utils_caps_from_mime_codec (const gchar *codecs_field); ++ + G_END_DECLS + + #endif /* __GST_PB_UTILS_CODEC_UTILS_H__ */ +diff --git a/gst-libs/gst/pbutils/descriptions.c b/gst-libs/gst/pbutils/descriptions.c +index d79a6cc67..e8a8827e9 100644 +--- a/gst-libs/gst/pbutils/descriptions.c ++++ b/gst-libs/gst/pbutils/descriptions.c +@@ -37,7 +37,7 @@ + # include "config.h" + #endif + +-#include "gst/gst-i18n-plugin.h" ++#include + + #include + #include +@@ -56,7 +56,8 @@ typedef enum + FLAG_IMAGE = (1 << 4), /* format is an image format, or image container/tag */ + FLAG_SUB = (1 << 5), /* format is a subtitle format, or subtitle container */ + FLAG_TAG = (1 << 6), /* format is a tag/container */ +- FLAG_GENERIC = (1 << 7) /* format is a generic container (e.g. multipart) */ ++ FLAG_GENERIC = (1 << 7), /* format is a generic container (e.g. multipart) */ ++ FLAG_METADATA = (1 << 8), /* format is a metadata format, or metadata container/tag */ + } FormatFlags; + + typedef struct +@@ -151,6 +152,7 @@ static const FormatInfo formats[] = { + {"audio/x-mod", "Module Music Format (MOD)", FLAG_AUDIO, "mod"}, + {"audio/x-mulaw", "Mu-Law", FLAG_AUDIO, ""}, + {"audio/x-musepack", "Musepack (MPC)", FLAG_AUDIO, "mpc"}, ++ {"audio/x-ffmpeg-parsed-musepack", "Musepack (MPC)", FLAG_AUDIO, "mpc"}, + {"audio/x-nellymoser", "Nellymoser Asao", FLAG_AUDIO, ""}, + {"audio/x-nist", "Sphere NIST", FLAG_AUDIO, ""}, + {"audio/x-nsf", "Nintendo NSF", FLAG_AUDIO, ""}, +@@ -330,7 +332,10 @@ static const FormatInfo formats[] = { + {"video/x-svq", NULL, FLAG_VIDEO, ""}, + {"video/x-wmv", NULL, FLAG_VIDEO, ""}, + {"video/x-xan", NULL, FLAG_VIDEO, ""}, +- {"video/x-tscc", NULL, FLAG_VIDEO, ""} ++ {"video/x-tscc", NULL, FLAG_VIDEO, ""}, ++ /* metadata */ ++ {"application/x-onvif-metadata", "ONVIF Timed Metadata", FLAG_METADATA, ""}, ++ {"meta/x-klv", "KLV Metadata", FLAG_METADATA, ""}, + }; + + static const gchar * +@@ -466,8 +471,8 @@ format_info_get_desc (const FormatInfo * info, const GstCaps * caps) + const gchar *subs; + gint w_sub, h_sub, n_semi; + +- w_sub = GST_VIDEO_FORMAT_INFO_W_SUB (finfo, 1); +- h_sub = GST_VIDEO_FORMAT_INFO_H_SUB (finfo, 1); ++ w_sub = 1 << GST_VIDEO_FORMAT_INFO_W_SUB (finfo, 1); ++ h_sub = 1 << GST_VIDEO_FORMAT_INFO_H_SUB (finfo, 1); + + if (w_sub == 1 && h_sub == 1) { + subs = "4:4:4"; +@@ -649,7 +654,7 @@ format_info_get_desc (const FormatInfo * info, const GstCaps * caps) + case 1: + case 2: + case 3: +- if (str && strncmp (str, "MSS", 3)) { ++ if (str && !strncmp (str, "MSS", 3)) { + return g_strdup_printf ("Windows Media Video %d Screen", ver + 6); + } else { + return g_strdup_printf ("Windows Media Video %d", ver + 6); +@@ -921,13 +926,13 @@ caps_are_rtp_caps (const GstCaps * caps, const gchar * media, gchar ** format) + * + * Returns a localised string describing a source element handling the protocol + * specified in @protocol, for use in error dialogs or other messages to be +- * seen by the user. Should never return NULL unless @protocol is invalid. ++ * seen by the user. + * + * This function is mainly for internal use, applications would typically + * use gst_missing_plugin_message_get_description() to get a description of + * a missing feature from a missing-plugin message. + * +- * Returns: a newly-allocated description string, or NULL on error. Free ++ * Returns: a newly-allocated description string. Free + * string with g_free() when not needed any longer. + */ + gchar * +@@ -971,13 +976,13 @@ gst_pb_utils_get_source_description (const gchar * protocol) + * + * Returns a localised string describing a sink element handling the protocol + * specified in @protocol, for use in error dialogs or other messages to be +- * seen by the user. Should never return NULL unless @protocol is invalid. ++ * seen by the user. + * + * This function is mainly for internal use, applications would typically + * use gst_missing_plugin_message_get_description() to get a description of + * a missing feature from a missing-plugin message. + * +- * Returns: a newly-allocated description string, or NULL on error. Free ++ * Returns: a newly-allocated description string. Free + * string with g_free() when not needed any longer. + */ + gchar * +@@ -1006,13 +1011,12 @@ gst_pb_utils_get_sink_description (const gchar * protocol) + * + * Returns a localised string describing an decoder for the format specified + * in @caps, for use in error dialogs or other messages to be seen by the user. +- * Should never return NULL unless @factory_name or @caps are invalid. + * + * This function is mainly for internal use, applications would typically + * use gst_missing_plugin_message_get_description() to get a description of + * a missing feature from a missing-plugin message. + * +- * Returns: a newly-allocated description string, or NULL on error. Free ++ * Returns: a newly-allocated description string. Free + * string with g_free() when not needed any longer. + */ + gchar * +@@ -1061,13 +1065,12 @@ gst_pb_utils_get_decoder_description (const GstCaps * caps) + * + * Returns a localised string describing an encoder for the format specified + * in @caps, for use in error dialogs or other messages to be seen by the user. +- * Should never return NULL unless @factory_name or @caps are invalid. + * + * This function is mainly for internal use, applications would typically + * use gst_missing_plugin_message_get_description() to get a description of + * a missing feature from a missing-plugin message. + * +- * Returns: a newly-allocated description string, or NULL on error. Free ++ * Returns: a newly-allocated description string. Free + * string with g_free() when not needed any longer. + */ + gchar * +@@ -1112,14 +1115,13 @@ gst_pb_utils_get_encoder_description (const GstCaps * caps) + * @factory_name: the name of the element, e.g. "giosrc" + * + * Returns a localised string describing the given element, for use in +- * error dialogs or other messages to be seen by the user. Should never +- * return NULL unless @factory_name is invalid. ++ * error dialogs or other messages to be seen by the user. + * + * This function is mainly for internal use, applications would typically + * use gst_missing_plugin_message_get_description() to get a description of + * a missing feature from a missing-plugin message. + * +- * Returns: a newly-allocated description string, or NULL on error. Free ++ * Returns: a newly-allocated description string. Free + * string with g_free() when not needed any longer. + */ + gchar * +@@ -1141,7 +1143,7 @@ gst_pb_utils_get_element_description (const gchar * factory_name) + /** + * gst_pb_utils_add_codec_description_to_tag_list: + * @taglist: a #GstTagList +- * @codec_tag: (allow-none): a GStreamer codec tag such as #GST_TAG_AUDIO_CODEC, ++ * @codec_tag: (nullable): a GStreamer codec tag such as #GST_TAG_AUDIO_CODEC, + * #GST_TAG_VIDEO_CODEC or #GST_TAG_CODEC. If none is specified, + * the function will attempt to detect the appropriate category. + * @caps: the (fixed) #GstCaps for which a codec tag should be added. +@@ -1200,7 +1202,7 @@ gst_pb_utils_add_codec_description_to_tag_list (GstTagList * taglist, + * Also see the convenience function + * gst_pb_utils_add_codec_description_to_tag_list(). + * +- * Returns: a newly-allocated description string, or NULL on error. Free ++ * Returns: (nullable): a newly-allocated description string, or NULL on error. Free + * string with g_free() when not needed any longer. + */ + gchar * +@@ -1287,6 +1289,75 @@ pb_utils_get_file_extension_from_caps (const GstCaps * caps) + return ext; + } + ++/** ++ * gst_pb_utils_get_file_extension_from_caps: ++ * @caps: the (fixed) #GstCaps for which a file extension is needed ++ * ++ * Returns a possible file extension for the given caps, if known. ++ * ++ * Returns: (nullable): a newly-allocated file extension string, or NULL on error. Free ++ * string with g_free() when not needed any longer. ++ * ++ * Since: 1.20 ++ */ ++gchar * ++gst_pb_utils_get_file_extension_from_caps (const GstCaps * caps) ++{ ++ const gchar *extension = pb_utils_get_file_extension_from_caps (caps); ++ return extension ? g_strdup (extension) : NULL; ++} ++ ++/** ++ * gst_pb_utils_get_caps_description_flags: ++ * @caps: the (fixed) #GstCaps for which flags are requested ++ * ++ * Returns flags that describe the format of the caps if known. No flags are ++ * set for unknown caps. ++ * ++ * Returns: #GstPbUtilsCapsDescriptionFlags that describe @caps, or no flags ++ * if the caps are unknown. ++ * ++ * Since: 1.20 ++ */ ++GstPbUtilsCapsDescriptionFlags ++gst_pb_utils_get_caps_description_flags (const GstCaps * caps) ++{ ++ GstCaps *tmp; ++ const FormatInfo *info; ++ GstPbUtilsCapsDescriptionFlags flags = 0; ++ ++ g_return_val_if_fail (caps != NULL, 0); ++ g_return_val_if_fail (GST_IS_CAPS (caps), 0); ++ tmp = copy_and_clean_caps (caps); ++ g_return_val_if_fail (gst_caps_is_fixed (tmp), 0); ++ ++ info = find_format_info (tmp); ++ /* A separate flags type is used because internally more flags are needed ++ * for filtering purposes, e.g. the SYSTEMSTREAM flag */ ++ if (info) { ++ if ((info->flags & FLAG_CONTAINER)) ++ flags |= GST_PBUTILS_CAPS_DESCRIPTION_FLAG_CONTAINER; ++ if ((info->flags & FLAG_AUDIO)) ++ flags |= GST_PBUTILS_CAPS_DESCRIPTION_FLAG_AUDIO; ++ if ((info->flags & FLAG_VIDEO)) ++ flags |= GST_PBUTILS_CAPS_DESCRIPTION_FLAG_VIDEO; ++ if ((info->flags & FLAG_IMAGE)) ++ flags |= GST_PBUTILS_CAPS_DESCRIPTION_FLAG_IMAGE; ++ if ((info->flags & FLAG_SUB)) ++ flags |= GST_PBUTILS_CAPS_DESCRIPTION_FLAG_SUBTITLE; ++ if ((info->flags & FLAG_TAG)) ++ flags |= GST_PBUTILS_CAPS_DESCRIPTION_FLAG_TAG; ++ if ((info->flags & FLAG_GENERIC)) ++ flags |= GST_PBUTILS_CAPS_DESCRIPTION_FLAG_GENERIC; ++ if ((info->flags & FLAG_METADATA)) ++ flags |= GST_PBUTILS_CAPS_DESCRIPTION_FLAG_METADATA; ++ } ++ ++ gst_caps_unref (tmp); ++ ++ return flags; ++} ++ + gboolean + pb_utils_is_tag (const GstCaps * caps) + { +diff --git a/gst-libs/gst/pbutils/descriptions.h b/gst-libs/gst/pbutils/descriptions.h +index c03f46abb..c1b7845f2 100644 +--- a/gst-libs/gst/pbutils/descriptions.h ++++ b/gst-libs/gst/pbutils/descriptions.h +@@ -26,6 +26,49 @@ + + G_BEGIN_DECLS + ++/** ++ * GstPbUtilsCapsDescriptionFlags: ++ * @GST_PBUTILS_CAPS_DESCRIPTION_FLAG_CONTAINER: Caps describe a container format. ++ * @GST_PBUTILS_CAPS_DESCRIPTION_FLAG_AUDIO: Caps describe an audio format, or a ++ * container format that can store audio. ++ * @GST_PBUTILS_CAPS_DESCRIPTION_FLAG_VIDEO: Caps describe an video format, or a ++ * container format that can store video. ++ * @GST_PBUTILS_CAPS_DESCRIPTION_FLAG_IMAGE: Caps describe an image format, or a ++ * container format that can store image. ++ * @GST_PBUTILS_CAPS_DESCRIPTION_FLAG_SUBTITLE: Caps describe an subtitle format, or a ++ * container format that can store subtitles. ++ * @GST_PBUTILS_CAPS_DESCRIPTION_FLAG_TAG: Container format is a tags container. ++ * @GST_PBUTILS_CAPS_DESCRIPTION_FLAG_GENERIC: Container format can store any kind of ++ * stream type. ++ * @GST_PBUTILS_CAPS_DESCRIPTION_FLAG_METADATA: Caps describe a metadata ++ * format, or a container format that can store metadata. ++ * ++ * Flags that are returned by gst_pb_utils_get_caps_description_flags() and ++ * describe the format of the caps. ++ * ++ * Since: 1.20 ++ */ ++typedef enum { ++ GST_PBUTILS_CAPS_DESCRIPTION_FLAG_CONTAINER = 1 << 0, ++ GST_PBUTILS_CAPS_DESCRIPTION_FLAG_AUDIO = 1 << 1, ++ GST_PBUTILS_CAPS_DESCRIPTION_FLAG_VIDEO = 1 << 2, ++ GST_PBUTILS_CAPS_DESCRIPTION_FLAG_IMAGE = 1 << 3, ++ GST_PBUTILS_CAPS_DESCRIPTION_FLAG_SUBTITLE = 1 << 4, ++ GST_PBUTILS_CAPS_DESCRIPTION_FLAG_TAG = 1 << 5, ++ GST_PBUTILS_CAPS_DESCRIPTION_FLAG_GENERIC = 1 << 6, ++ ++ /** ++ * GST_PBUTILS_CAPS_DESCRIPTION_FLAG_METADATA: ++ * ++ * Caps describe a metadata format, or a container format that can store ++ * metadata. ++ * ++ * Since: 1.22 ++ */ ++ ++ GST_PBUTILS_CAPS_DESCRIPTION_FLAG_METADATA = 1 << 7, ++} GstPbUtilsCapsDescriptionFlags; ++ + /* + * functions for use by demuxers or decoders to add CODEC tags to tag lists + * from caps +@@ -59,6 +102,11 @@ gchar * gst_pb_utils_get_encoder_description (const GstCaps * caps); + GST_PBUTILS_API + gchar * gst_pb_utils_get_element_description (const gchar * factory_name); + ++GST_PBUTILS_API ++GstPbUtilsCapsDescriptionFlags gst_pb_utils_get_caps_description_flags (const GstCaps * caps); ++ ++GST_PBUTILS_API ++gchar * gst_pb_utils_get_file_extension_from_caps (const GstCaps *caps); + + G_END_DECLS + +diff --git a/gst-libs/gst/pbutils/pbutils-private.h b/gst-libs/gst/pbutils/pbutils-private.h +index 856b63f61..4a10167b9 100644 +--- a/gst-libs/gst/pbutils/pbutils-private.h ++++ b/gst-libs/gst/pbutils/pbutils-private.h +@@ -31,12 +31,14 @@ struct _GstDiscovererStreamInfo { + GstToc *toc; + gchar *stream_id; + GstStructure *misc; ++ gint stream_number; + }; + + struct _GstDiscovererContainerInfo { + GstDiscovererStreamInfo parent; + + GList *streams; ++ GstTagList *tags; + }; + + struct _GstDiscovererAudioInfo { +@@ -96,6 +98,8 @@ struct _GstDiscovererInfo { + gboolean seekable; + GPtrArray *missing_elements_details; + ++ gint stream_count; ++ + gchar *cachefile; + gpointer from_cache; + }; +diff --git a/gst-libs/gst/pbutils/pbutils.c b/gst-libs/gst/pbutils/pbutils.c +index 8115de356..a6408d82f 100644 +--- a/gst-libs/gst/pbutils/pbutils.c ++++ b/gst-libs/gst/pbutils/pbutils.c +@@ -59,7 +59,27 @@ + #include "pbutils.h" + #include "pbutils-private.h" + +-#include "gst/gst-i18n-plugin.h" ++#include ++ ++#ifndef GST_DISABLE_GST_DEBUG ++#define GST_CAT_DEFAULT gst_pb_utils_ensure_debug_category() ++ ++static GstDebugCategory * ++gst_pb_utils_ensure_debug_category (void) ++{ ++ static gsize cat_gonce = 0; ++ ++ if (g_once_init_enter (&cat_gonce)) { ++ GstDebugCategory *cat = NULL; ++ ++ GST_DEBUG_CATEGORY_INIT (cat, "pbutils", 0, "GStreamer Plugins Base utils"); ++ ++ g_once_init_leave (&cat_gonce, (gsize) cat); ++ } ++ ++ return (GstDebugCategory *) cat_gonce; ++} ++#endif /* GST_DISABLE_GST_DEBUG */ + + static gpointer + _init_locale_text_domain (gpointer data) +diff --git a/gst-libs/gst/rtp/gstrtcpbuffer.c b/gst-libs/gst/rtp/gstrtcpbuffer.c +index 29dfd82a0..dea2a7933 100644 +--- a/gst-libs/gst/rtp/gstrtcpbuffer.c ++++ b/gst-libs/gst/rtp/gstrtcpbuffer.c +@@ -84,7 +84,7 @@ gst_rtcp_buffer_new_take_data (gpointer data, guint len) + GstBuffer * + gst_rtcp_buffer_new_copy_data (gconstpointer data, guint len) + { +- return gst_rtcp_buffer_new_take_data (g_memdup (data, len), len); ++ return gst_rtcp_buffer_new_take_data (g_memdup2 (data, len), len); + } + + static gboolean +@@ -109,8 +109,7 @@ gst_rtcp_buffer_validate_data_internal (guint8 * data, guint len, + if (G_UNLIKELY (header_mask != GST_RTCP_VALID_VALUE)) + goto wrong_mask; + +- /* no padding when mask succeeds */ +- padding = FALSE; ++ padding = data[0] & 0x20; + + /* store len */ + data_len = len; +@@ -129,7 +128,7 @@ gst_rtcp_buffer_validate_data_internal (guint8 * data, guint len, + if (data_len < 4) + break; + +- /* padding only allowed on last packet */ ++ /* Version already checked for first packet through mask */ + if (padding) + break; + +@@ -1213,7 +1212,7 @@ gst_rtcp_packet_copy_profile_specific_ext (GstRTCPPacket * packet, + if (data != NULL) { + guint8 *ptr = packet->rtcp->map.data + packet->offset; + ptr += ((packet->length + 1 - pse_len) * sizeof (guint32)); +- *data = g_memdup (ptr, pse_len * sizeof (guint32)); ++ *data = g_memdup2 (ptr, pse_len * sizeof (guint32)); + } + + return TRUE; +@@ -1864,7 +1863,7 @@ gst_rtcp_packet_bye_get_reason_len (GstRTCPPacket * packet) + * + * Get the reason in @packet. + * +- * Returns: The reason for the BYE @packet or NULL if the packet did not contain ++ * Returns: (nullable): The reason for the BYE @packet or NULL if the packet did not contain + * a reason string. The string must be freed with g_free() after usage. + */ + gchar * +@@ -2211,6 +2210,27 @@ gst_rtcp_sdes_type_to_name (GstRTCPSDESType type) + case GST_RTCP_SDES_PRIV: + result = "priv"; + break; ++ case GST_RTCP_SDES_H323_CADDR: ++ result = "h323-caddr"; ++ break; ++ case GST_RTCP_SDES_APSI: ++ result = "apsi"; ++ break; ++ case GST_RTCP_SDES_RGRP: ++ result = "rgrp"; ++ break; ++ case GST_RTCP_SDES_REPAIRED_RTP_STREAM_ID: ++ result = "repaired-rtp-stream-id"; ++ break; ++ case GST_RTCP_SDES_CCID: ++ result = "ccid"; ++ break; ++ case GST_RTCP_SDES_RTP_STREAM_ID: ++ result = "rtp-stream-id"; ++ break; ++ case GST_RTCP_SDES_MID: ++ result = "mid"; ++ break; + default: + result = NULL; + break; +@@ -2255,6 +2275,27 @@ gst_rtcp_sdes_name_to_type (const gchar * name) + if (strcmp ("note", name) == 0) + return GST_RTCP_SDES_NOTE; + ++ if (strcmp ("h323-caddr", name) == 0) ++ return GST_RTCP_SDES_H323_CADDR; ++ ++ if (strcmp ("apsi", name) == 0) ++ return GST_RTCP_SDES_APSI; ++ ++ if (strcmp ("rgrp", name) == 0) ++ return GST_RTCP_SDES_RGRP; ++ ++ if (strcmp ("rtp-stream-id", name) == 0) ++ return GST_RTCP_SDES_RTP_STREAM_ID; ++ ++ if (strcmp ("repaired-rtp-stream-id", name) == 0) ++ return GST_RTCP_SDES_REPAIRED_RTP_STREAM_ID; ++ ++ if (strcmp ("ccid", name) == 0) ++ return GST_RTCP_SDES_CCID; ++ ++ if (strcmp ("mid", name) == 0) ++ return GST_RTCP_SDES_MID; ++ + return GST_RTCP_SDES_PRIV; + } + +diff --git a/gst-libs/gst/rtp/gstrtcpbuffer.h b/gst-libs/gst/rtp/gstrtcpbuffer.h +index 32291fa9d..b6410a5a1 100644 +--- a/gst-libs/gst/rtp/gstrtcpbuffer.h ++++ b/gst-libs/gst/rtp/gstrtcpbuffer.h +@@ -126,18 +126,74 @@ typedef enum + * + * Different types of SDES content. + */ ++/** ++ * GST_RTCP_SDES_H323_CADDR: ++ * ++ * H.323 callable address ++ * ++ * Since: 1.20: ++ */ ++/** ++ * GST_RTCP_SDES_APSI: ++ * ++ * Application Specific Identifier (RFC6776) ++ * ++ * Since: 1.20: ++ */ ++/** ++ * GST_RTCP_SDES_RGRP: ++ * ++ * Reporting Group Identifier (RFC8861) ++ * ++ * Since: 1.20: ++ */ ++/** ++ * GST_RTCP_SDES_RTP_STREAM_ID: ++ * ++ * RtpStreamId SDES item (RFC8852). ++ * ++ * Since: 1.20: ++ */ ++/** ++ * GST_RTCP_SDES_REPAIRED_RTP_STREAM_ID: ++ * ++ * RepairedRtpStreamId SDES item (RFC8852). ++ * ++ * Since: 1.20: ++ */ ++/** ++ * GST_RTCP_SDES_CCID: ++ * ++ * CLUE CaptId (RFC8849) ++ * ++ * Since: 1.20: ++ */ ++/** ++ * GST_RTCP_SDES_MID: ++ * ++ * MID SDES item (RFC8843). ++ * ++ * Since: 1.20: ++ */ + typedef enum + { +- GST_RTCP_SDES_INVALID = -1, +- GST_RTCP_SDES_END = 0, +- GST_RTCP_SDES_CNAME = 1, +- GST_RTCP_SDES_NAME = 2, +- GST_RTCP_SDES_EMAIL = 3, +- GST_RTCP_SDES_PHONE = 4, +- GST_RTCP_SDES_LOC = 5, +- GST_RTCP_SDES_TOOL = 6, +- GST_RTCP_SDES_NOTE = 7, +- GST_RTCP_SDES_PRIV = 8 ++ GST_RTCP_SDES_INVALID = -1, ++ GST_RTCP_SDES_END = 0, ++ GST_RTCP_SDES_CNAME = 1, ++ GST_RTCP_SDES_NAME = 2, ++ GST_RTCP_SDES_EMAIL = 3, ++ GST_RTCP_SDES_PHONE = 4, ++ GST_RTCP_SDES_LOC = 5, ++ GST_RTCP_SDES_TOOL = 6, ++ GST_RTCP_SDES_NOTE = 7, ++ GST_RTCP_SDES_PRIV = 8, ++ GST_RTCP_SDES_H323_CADDR = 9, ++ GST_RTCP_SDES_APSI = 10, ++ GST_RTCP_SDES_RGRP = 11, ++ GST_RTCP_SDES_RTP_STREAM_ID = 12, ++ GST_RTCP_SDES_REPAIRED_RTP_STREAM_ID = 13, ++ GST_RTCP_SDES_CCID = 14, ++ GST_RTCP_SDES_MID = 15, + } GstRTCPSDESType; + + /** +@@ -206,10 +262,10 @@ typedef enum + /** + * GST_RTCP_REDUCED_SIZE_VALID_MASK: + * +- * Mask for version, padding bit and packet type pair allowing reduced size ++ * Mask for version and packet type pair allowing reduced size + * packets, basically it accepts other types than RR and SR + */ +-#define GST_RTCP_REDUCED_SIZE_VALID_MASK (0xc000 | 0x2000 | 0xf8) ++#define GST_RTCP_REDUCED_SIZE_VALID_MASK (0xc000 | 0xf8) + + /** + * GST_RTCP_VALID_VALUE: +diff --git a/gst-libs/gst/rtp/gstrtpbasedepayload.c b/gst-libs/gst/rtp/gstrtpbasedepayload.c +index 46b75b920..0fd2d827a 100644 +--- a/gst-libs/gst/rtp/gstrtpbasedepayload.c ++++ b/gst-libs/gst/rtp/gstrtpbasedepayload.c +@@ -31,10 +31,14 @@ + + #include "gstrtpbasedepayload.h" + #include "gstrtpmeta.h" ++#include "gstrtphdrext.h" + + GST_DEBUG_CATEGORY_STATIC (rtpbasedepayload_debug); + #define GST_CAT_DEFAULT (rtpbasedepayload_debug) + ++static GstStaticCaps ntp_reference_timestamp_caps = ++GST_STATIC_CAPS ("timestamp/x-ntp"); ++ + struct _GstRTPBaseDepayloadPrivate + { + GstClockTime npt_start; +@@ -49,11 +53,14 @@ struct _GstRTPBaseDepayloadPrivate + GstClockTime dts; + GstClockTime duration; + ++ GstClockTime ref_ts; ++ + guint32 last_ssrc; + guint32 last_seqnum; + guint32 last_rtptime; + guint32 next_seqnum; + gint max_reorder; ++ gboolean auto_hdr_ext; + + gboolean negotiated; + +@@ -65,17 +72,26 @@ struct _GstRTPBaseDepayloadPrivate + GstBuffer *input_buffer; + + GstFlowReturn process_flow_ret; ++ ++ /* array of GstRTPHeaderExtension's * */ ++ GPtrArray *header_exts; + }; + + /* Filter signals and args */ + enum + { +- /* FILL ME */ ++ SIGNAL_0, ++ SIGNAL_REQUEST_EXTENSION, ++ SIGNAL_ADD_EXTENSION, ++ SIGNAL_CLEAR_EXTENSIONS, + LAST_SIGNAL + }; + ++static guint gst_rtp_base_depayload_signals[LAST_SIGNAL] = { 0 }; ++ + #define DEFAULT_SOURCE_INFO FALSE + #define DEFAULT_MAX_REORDER 100 ++#define DEFAULT_AUTO_HEADER_EXTENSION TRUE + + enum + { +@@ -83,6 +99,7 @@ enum + PROP_STATS, + PROP_SOURCE_INFO, + PROP_MAX_REORDER, ++ PROP_AUTO_HEADER_EXTENSION, + PROP_LAST + }; + +@@ -117,6 +134,11 @@ static void gst_rtp_base_depayload_init (GstRTPBaseDepayload * rtpbasepayload, + static GstEvent *create_segment_event (GstRTPBaseDepayload * filter, + guint rtptime, GstClockTime position); + ++static void gst_rtp_base_depayload_add_extension (GstRTPBaseDepayload * ++ rtpbasepayload, GstRTPHeaderExtension * ext); ++static void gst_rtp_base_depayload_clear_extensions (GstRTPBaseDepayload * ++ rtpbasepayload); ++ + GType + gst_rtp_base_depayload_get_type (void) + { +@@ -154,6 +176,45 @@ gst_rtp_base_depayload_get_instance_private (GstRTPBaseDepayload * self) + return (G_STRUCT_MEMBER_P (self, private_offset)); + } + ++static GstRTPHeaderExtension * ++gst_rtp_base_depayload_request_extension_default (GstRTPBaseDepayload * ++ depayload, guint ext_id, const gchar * uri) ++{ ++ GstRTPHeaderExtension *ext = NULL; ++ ++ if (!depayload->priv->auto_hdr_ext) ++ return NULL; ++ ++ ext = gst_rtp_header_extension_create_from_uri (uri); ++ if (ext) { ++ GST_DEBUG_OBJECT (depayload, ++ "Automatically enabled extension %s for uri \'%s\'", ++ GST_ELEMENT_NAME (ext), uri); ++ ++ gst_rtp_header_extension_set_id (ext, ext_id); ++ } else { ++ GST_DEBUG_OBJECT (depayload, ++ "Didn't find any extension implementing uri \'%s\'", uri); ++ } ++ ++ return ext; ++} ++ ++static gboolean ++extension_accumulator (GSignalInvocationHint * ihint, ++ GValue * return_accu, const GValue * handler_return, gpointer data) ++{ ++ gpointer ext; ++ ++ /* Call default handler if user callback didn't create the extension */ ++ ext = g_value_get_object (handler_return); ++ if (!ext) ++ return TRUE; ++ ++ g_value_set_object (return_accu, ext); ++ return FALSE; ++} ++ + static void + gst_rtp_base_depayload_class_init (GstRTPBaseDepayloadClass * klass) + { +@@ -223,6 +284,73 @@ gst_rtp_base_depayload_class_init (GstRTPBaseDepayloadClass * klass) + "Max seqnum reorder before assuming sender has restarted", + 0, G_MAXINT, DEFAULT_MAX_REORDER, G_PARAM_READWRITE)); + ++ /** ++ * GstRTPBaseDepayload:auto-header-extension: ++ * ++ * If enabled, the depayloader will automatically try to enable all the ++ * RTP header extensions provided in the sink caps, saving the application ++ * the need to handle these extensions manually using the ++ * GstRTPBaseDepayload::request-extension: signal. ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (G_OBJECT_CLASS (klass), ++ PROP_AUTO_HEADER_EXTENSION, g_param_spec_boolean ("auto-header-extension", ++ "Automatic RTP header extension", ++ "Whether RTP header extensions should be automatically enabled, if an implementation is available", ++ DEFAULT_AUTO_HEADER_EXTENSION, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstRTPBaseDepayload::request-extension: ++ * @object: the #GstRTPBaseDepayload ++ * @ext_id: the extension id being requested ++ * @ext_uri: (nullable): the extension URI being requested ++ * ++ * The returned @ext must be configured with the correct @ext_id and with the ++ * necessary attributes as required by the extension implementation. ++ * ++ * Returns: (transfer full) (nullable): the #GstRTPHeaderExtension for @ext_id, or %NULL ++ * ++ * Since: 1.20 ++ */ ++ gst_rtp_base_depayload_signals[SIGNAL_REQUEST_EXTENSION] = ++ g_signal_new_class_handler ("request-extension", ++ G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST, ++ G_CALLBACK (gst_rtp_base_depayload_request_extension_default), ++ extension_accumulator, NULL, NULL, ++ GST_TYPE_RTP_HEADER_EXTENSION, 2, G_TYPE_UINT, G_TYPE_STRING); ++ ++ /** ++ * GstRTPBaseDepayload::add-extension: ++ * @object: the #GstRTPBaseDepayload ++ * @ext: (transfer full): the #GstRTPHeaderExtension ++ * ++ * Add @ext as an extension for reading part of an RTP header extension from ++ * incoming RTP packets. ++ * ++ * Since: 1.20 ++ */ ++ gst_rtp_base_depayload_signals[SIGNAL_ADD_EXTENSION] = ++ g_signal_new_class_handler ("add-extension", G_TYPE_FROM_CLASS (klass), ++ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, ++ G_CALLBACK (gst_rtp_base_depayload_add_extension), NULL, NULL, NULL, ++ G_TYPE_NONE, 1, GST_TYPE_RTP_HEADER_EXTENSION); ++ ++ /** ++ * GstRTPBaseDepayload::clear-extensions: ++ * @object: the #GstRTPBaseDepayload ++ * ++ * Clear all RTP header extensions used by this depayloader. ++ * ++ * Since: 1.20 ++ */ ++ gst_rtp_base_depayload_signals[SIGNAL_CLEAR_EXTENSIONS] = ++ g_signal_new_class_handler ("clear-extensions", G_TYPE_FROM_CLASS (klass), ++ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, ++ G_CALLBACK (gst_rtp_base_depayload_clear_extensions), NULL, NULL, NULL, ++ G_TYPE_NONE, 0); ++ + gstelement_class->change_state = gst_rtp_base_depayload_change_state; + + klass->packet_lost = gst_rtp_base_depayload_packet_lost; +@@ -272,24 +400,52 @@ gst_rtp_base_depayload_init (GstRTPBaseDepayload * filter, + priv->dts = -1; + priv->pts = -1; + priv->duration = -1; ++ priv->ref_ts = -1; + priv->source_info = DEFAULT_SOURCE_INFO; + priv->max_reorder = DEFAULT_MAX_REORDER; ++ priv->auto_hdr_ext = DEFAULT_AUTO_HEADER_EXTENSION; + + gst_segment_init (&filter->segment, GST_FORMAT_UNDEFINED); ++ ++ priv->header_exts = ++ g_ptr_array_new_with_free_func ((GDestroyNotify) gst_object_unref); + } + + static void + gst_rtp_base_depayload_finalize (GObject * object) + { ++ GstRTPBaseDepayload *rtpbasedepayload = GST_RTP_BASE_DEPAYLOAD (object); ++ ++ g_ptr_array_unref (rtpbasedepayload->priv->header_exts); ++ rtpbasedepayload->priv->header_exts = NULL; ++ + G_OBJECT_CLASS (parent_class)->finalize (object); + } + ++static void ++add_and_ref_item (GstRTPHeaderExtension * ext, GPtrArray * ret) ++{ ++ g_ptr_array_add (ret, gst_object_ref (ext)); ++} ++ ++static void ++remove_item_from (GstRTPHeaderExtension * ext, GPtrArray * ret) ++{ ++ g_ptr_array_remove_fast (ret, ext); ++} ++ ++static void ++add_item_to (GstRTPHeaderExtension * ext, GPtrArray * ret) ++{ ++ g_ptr_array_add (ret, ext); ++} ++ + static gboolean + gst_rtp_base_depayload_setcaps (GstRTPBaseDepayload * filter, GstCaps * caps) + { + GstRTPBaseDepayloadClass *bclass; + GstRTPBaseDepayloadPrivate *priv; +- gboolean res; ++ gboolean res = TRUE; + GstStructure *caps_struct; + const GValue *value; + +@@ -355,6 +511,140 @@ gst_rtp_base_depayload_setcaps (GstRTPBaseDepayload * filter, GstCaps * caps) + else + priv->clock_base = -1; + ++ { ++ /* ensure we have header extension implementations for the list in the ++ * caps */ ++ guint i, j, n_fields = gst_structure_n_fields (caps_struct); ++ GPtrArray *header_exts = g_ptr_array_new_with_free_func (gst_object_unref); ++ GPtrArray *to_add = g_ptr_array_new (); ++ GPtrArray *to_remove = g_ptr_array_new (); ++ ++ GST_OBJECT_LOCK (filter); ++ g_ptr_array_foreach (filter->priv->header_exts, ++ (GFunc) add_and_ref_item, header_exts); ++ GST_OBJECT_UNLOCK (filter); ++ ++ for (i = 0; i < n_fields; i++) { ++ const gchar *field_name = gst_structure_nth_field_name (caps_struct, i); ++ if (g_str_has_prefix (field_name, "extmap-")) { ++ const GValue *val; ++ const gchar *uri = NULL; ++ gchar *nptr; ++ guint ext_id; ++ GstRTPHeaderExtension *ext = NULL; ++ ++ errno = 0; ++ ext_id = g_ascii_strtoull (&field_name[strlen ("extmap-")], &nptr, 10); ++ if (errno != 0 || (ext_id == 0 && field_name == nptr)) { ++ GST_WARNING_OBJECT (filter, "could not parse id from %s", field_name); ++ res = FALSE; ++ goto ext_out; ++ } ++ ++ val = gst_structure_get_value (caps_struct, field_name); ++ if (G_VALUE_HOLDS_STRING (val)) { ++ uri = g_value_get_string (val); ++ } else if (GST_VALUE_HOLDS_ARRAY (val)) { ++ /* the uri is the second value in the array */ ++ const GValue *str = gst_value_array_get_value (val, 1); ++ if (G_VALUE_HOLDS_STRING (str)) { ++ uri = g_value_get_string (str); ++ } ++ } ++ ++ if (!uri) { ++ GST_WARNING_OBJECT (filter, "could not get extmap uri for " ++ "field %s", field_name); ++ res = FALSE; ++ goto ext_out; ++ } ++ ++ /* try to find if this extension mapping already exists */ ++ for (j = 0; j < header_exts->len; j++) { ++ ext = g_ptr_array_index (header_exts, j); ++ if (gst_rtp_header_extension_get_id (ext) == ext_id) { ++ if (g_strcmp0 (uri, gst_rtp_header_extension_get_uri (ext)) == 0) { ++ /* still matching, we're good, set attributes from caps in case ++ * the caps have changed */ ++ if (!gst_rtp_header_extension_set_attributes_from_caps (ext, ++ caps)) { ++ GST_WARNING_OBJECT (filter, ++ "Failed to configure rtp header " "extension %" ++ GST_PTR_FORMAT " attributes from caps %" GST_PTR_FORMAT, ++ ext, caps); ++ res = FALSE; ++ goto ext_out; ++ } ++ break; ++ } else { ++ GST_DEBUG_OBJECT (filter, "extension id %u" ++ "was replaced with a different extension uri " ++ "original:\'%s' vs \'%s\'", ext_id, ++ gst_rtp_header_extension_get_uri (ext), uri); ++ g_ptr_array_add (to_remove, ext); ++ ext = NULL; ++ break; ++ } ++ } else { ++ ext = NULL; ++ } ++ } ++ ++ /* if no extension, attempt to request one */ ++ if (!ext) { ++ GST_DEBUG_OBJECT (filter, "requesting extension for id %u" ++ " and uri %s", ext_id, uri); ++ g_signal_emit (filter, ++ gst_rtp_base_depayload_signals[SIGNAL_REQUEST_EXTENSION], 0, ++ ext_id, uri, &ext); ++ GST_DEBUG_OBJECT (filter, "request returned extension %p \'%s\' " ++ "for id %u and uri %s", ext, ++ ext ? GST_OBJECT_NAME (ext) : "", ext_id, uri); ++ ++ /* We require the caller to set the appropriate extension if it's required */ ++ if (ext && gst_rtp_header_extension_get_id (ext) != ext_id) { ++ g_warning ("\'request-extension\' signal provided an rtp header " ++ "extension for uri \'%s\' that does not match the requested " ++ "extension id %u", uri, ext_id); ++ gst_clear_object (&ext); ++ } ++ ++ if (ext && !gst_rtp_header_extension_set_attributes_from_caps (ext, ++ caps)) { ++ GST_WARNING_OBJECT (filter, ++ "Failed to configure rtp header " "extension %" ++ GST_PTR_FORMAT " attributes from caps %" GST_PTR_FORMAT, ++ ext, caps); ++ res = FALSE; ++ g_clear_object (&ext); ++ goto ext_out; ++ } ++ ++ if (ext) ++ g_ptr_array_add (to_add, ext); ++ } ++ } ++ } ++ ++ /* Note: we intentionally don't remove extensions that are not listed ++ * in caps */ ++ ++ GST_OBJECT_LOCK (filter); ++ g_ptr_array_foreach (to_remove, (GFunc) remove_item_from, ++ filter->priv->header_exts); ++ g_ptr_array_foreach (to_add, (GFunc) add_item_to, ++ filter->priv->header_exts); ++ GST_OBJECT_UNLOCK (filter); ++ ++ ext_out: ++ g_ptr_array_unref (to_add); ++ g_ptr_array_unref (to_remove); ++ g_ptr_array_unref (header_exts); ++ ++ if (!res) ++ return res; ++ } ++ + if (bclass->set_caps) { + res = bclass->set_caps (filter, caps); + if (!res) { +@@ -395,6 +685,8 @@ gst_rtp_base_depayload_handle_buffer (GstRTPBaseDepayload * filter, + gboolean discont, buf_discont; + gint gap; + GstRTPBuffer rtp = { NULL }; ++ GstReferenceTimestampMeta *meta; ++ GstCaps *ref_caps; + + priv = filter->priv; + priv->process_flow_ret = GST_FLOW_OK; +@@ -406,6 +698,21 @@ gst_rtp_base_depayload_handle_buffer (GstRTPBaseDepayload * filter, + if (G_UNLIKELY (!priv->negotiated)) + goto not_negotiated; + ++ /* Check for duplicate reference timestamp metadata */ ++ ref_caps = gst_static_caps_get (&ntp_reference_timestamp_caps); ++ meta = gst_buffer_get_reference_timestamp_meta (in, ref_caps); ++ gst_caps_unref (ref_caps); ++ if (meta) { ++ guint64 ref_ts = meta->timestamp; ++ if (ref_ts == priv->ref_ts) { ++ /* Drop the redundant/duplicate reference timstamp metadata */ ++ in = gst_buffer_make_writable (in); ++ gst_buffer_remove_meta (in, GST_META_CAST (meta)); ++ } else { ++ priv->ref_ts = ref_ts; ++ } ++ } ++ + if (G_UNLIKELY (!gst_rtp_buffer_map (in, GST_MAP_READ, &rtp))) + goto invalid_buffer; + +@@ -639,6 +946,7 @@ gst_rtp_base_depayload_handle_event (GstRTPBaseDepayload * filter, + + filter->need_newsegment = !filter->priv->onvif_mode; + filter->priv->next_seqnum = -1; ++ filter->priv->ref_ts = -1; + gst_event_replace (&filter->priv->segment_event, NULL); + break; + case GST_EVENT_CAPS: +@@ -834,30 +1142,170 @@ add_rtp_source_meta (GstBuffer * outbuf, GstBuffer * rtpbuf) + gst_rtp_buffer_unmap (&rtp); + } + ++static void ++gst_rtp_base_depayload_add_extension (GstRTPBaseDepayload * rtpbasepayload, ++ GstRTPHeaderExtension * ext) ++{ ++ g_return_if_fail (GST_IS_RTP_HEADER_EXTENSION (ext)); ++ g_return_if_fail (gst_rtp_header_extension_get_id (ext) > 0); ++ ++ /* XXX: check for duplicate ids? */ ++ GST_OBJECT_LOCK (rtpbasepayload); ++ g_ptr_array_add (rtpbasepayload->priv->header_exts, gst_object_ref (ext)); ++ GST_OBJECT_UNLOCK (rtpbasepayload); ++} ++ ++static void ++gst_rtp_base_depayload_clear_extensions (GstRTPBaseDepayload * rtpbasepayload) ++{ ++ GST_OBJECT_LOCK (rtpbasepayload); ++ g_ptr_array_set_size (rtpbasepayload->priv->header_exts, 0); ++ GST_OBJECT_UNLOCK (rtpbasepayload); ++} ++ ++static gboolean ++read_rtp_header_extensions (GstRTPBaseDepayload * depayload, ++ GstBuffer * input, GstBuffer * output) ++{ ++ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; ++ guint16 bit_pattern; ++ guint8 *pdata; ++ guint wordlen; ++ gboolean needs_src_caps_update = FALSE; ++ ++ if (!input) { ++ GST_DEBUG_OBJECT (depayload, "no input buffer"); ++ return needs_src_caps_update; ++ } ++ ++ if (!gst_rtp_buffer_map (input, GST_MAP_READ, &rtp)) { ++ GST_WARNING_OBJECT (depayload, "Failed to map buffer"); ++ return needs_src_caps_update; ++ } ++ ++ if (gst_rtp_buffer_get_extension_data (&rtp, &bit_pattern, (gpointer) & pdata, ++ &wordlen)) { ++ GstRTPHeaderExtensionFlags ext_flags = 0; ++ gsize bytelen = wordlen * 4; ++ guint hdr_unit_bytes; ++ gsize offset = 0; ++ ++ if (bit_pattern == 0xBEDE) { ++ /* one byte extensions */ ++ hdr_unit_bytes = 1; ++ ext_flags |= GST_RTP_HEADER_EXTENSION_ONE_BYTE; ++ } else if (bit_pattern >> 4 == 0x100) { ++ /* two byte extensions */ ++ hdr_unit_bytes = 2; ++ ext_flags |= GST_RTP_HEADER_EXTENSION_TWO_BYTE; ++ } else { ++ GST_DEBUG_OBJECT (depayload, "unknown extension bit pattern 0x%02x%02x", ++ bit_pattern >> 8, bit_pattern & 0xff); ++ goto out; ++ } ++ ++ while (TRUE) { ++ guint8 read_id, read_len; ++ GstRTPHeaderExtension *ext = NULL; ++ guint i; ++ ++ if (offset + hdr_unit_bytes >= bytelen) ++ /* not enough remaning data */ ++ break; ++ ++ if (ext_flags & GST_RTP_HEADER_EXTENSION_ONE_BYTE) { ++ read_id = GST_READ_UINT8 (pdata + offset) >> 4; ++ read_len = (GST_READ_UINT8 (pdata + offset) & 0x0F) + 1; ++ offset += 1; ++ ++ if (read_id == 0) ++ /* padding */ ++ continue; ++ ++ if (read_id == 15) ++ /* special id for possible future expansion */ ++ break; ++ } else { ++ read_id = GST_READ_UINT8 (pdata + offset); ++ offset += 1; ++ ++ if (read_id == 0) ++ /* padding */ ++ continue; ++ ++ read_len = GST_READ_UINT8 (pdata + offset); ++ offset += 1; ++ } ++ GST_TRACE_OBJECT (depayload, "found rtp header extension with id %u and " ++ "length %u", read_id, read_len); ++ ++ /* Ignore extension headers where the size does not fit */ ++ if (offset + read_len > bytelen) { ++ GST_WARNING_OBJECT (depayload, "Extension length extends past the " ++ "size of the extension data"); ++ break; ++ } ++ ++ GST_OBJECT_LOCK (depayload); ++ for (i = 0; i < depayload->priv->header_exts->len; i++) { ++ ext = g_ptr_array_index (depayload->priv->header_exts, i); ++ if (read_id == gst_rtp_header_extension_get_id (ext)) { ++ gst_object_ref (ext); ++ break; ++ } ++ ext = NULL; ++ } ++ ++ if (ext) { ++ if (!gst_rtp_header_extension_read (ext, ext_flags, &pdata[offset], ++ read_len, output)) { ++ GST_WARNING_OBJECT (depayload, "RTP header extension (%s) could " ++ "not read payloaded data", GST_OBJECT_NAME (ext)); ++ gst_object_unref (ext); ++ goto out; ++ } ++ ++ if (gst_rtp_header_extension_wants_update_non_rtp_src_caps (ext)) { ++ needs_src_caps_update = TRUE; ++ } ++ ++ gst_object_unref (ext); ++ } ++ GST_OBJECT_UNLOCK (depayload); ++ ++ offset += read_len; ++ } ++ } ++ ++out: ++ gst_rtp_buffer_unmap (&rtp); ++ ++ return needs_src_caps_update; ++} ++ + static gboolean +-set_headers (GstBuffer ** buffer, guint idx, GstRTPBaseDepayload * depayload) ++gst_rtp_base_depayload_set_headers (GstRTPBaseDepayload * depayload, ++ GstBuffer * buffer) + { + GstRTPBaseDepayloadPrivate *priv = depayload->priv; + GstClockTime pts, dts, duration; + +- *buffer = gst_buffer_make_writable (*buffer); +- +- pts = GST_BUFFER_PTS (*buffer); +- dts = GST_BUFFER_DTS (*buffer); +- duration = GST_BUFFER_DURATION (*buffer); ++ pts = GST_BUFFER_PTS (buffer); ++ dts = GST_BUFFER_DTS (buffer); ++ duration = GST_BUFFER_DURATION (buffer); + + /* apply last incoming timestamp and duration to outgoing buffer if + * not otherwise set. */ + if (!GST_CLOCK_TIME_IS_VALID (pts)) +- GST_BUFFER_PTS (*buffer) = priv->pts; ++ GST_BUFFER_PTS (buffer) = priv->pts; + if (!GST_CLOCK_TIME_IS_VALID (dts)) +- GST_BUFFER_DTS (*buffer) = priv->dts; ++ GST_BUFFER_DTS (buffer) = priv->dts; + if (!GST_CLOCK_TIME_IS_VALID (duration)) +- GST_BUFFER_DURATION (*buffer) = priv->duration; ++ GST_BUFFER_DURATION (buffer) = priv->duration; + + if (G_UNLIKELY (depayload->priv->discont)) { + GST_LOG_OBJECT (depayload, "Marking DISCONT on output buffer"); +- GST_BUFFER_FLAG_SET (*buffer, GST_BUFFER_FLAG_DISCONT); ++ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); + depayload->priv->discont = FALSE; + } + +@@ -866,24 +1314,20 @@ set_headers (GstBuffer ** buffer, guint idx, GstRTPBaseDepayload * depayload) + priv->dts = GST_CLOCK_TIME_NONE; + priv->duration = GST_CLOCK_TIME_NONE; + +- if (priv->source_info && priv->input_buffer) +- add_rtp_source_meta (*buffer, priv->input_buffer); ++ if (priv->input_buffer) { ++ if (priv->source_info) ++ add_rtp_source_meta (buffer, priv->input_buffer); + +- return TRUE; ++ return read_rtp_header_extensions (depayload, priv->input_buffer, buffer); ++ } ++ ++ return FALSE; + } + + static GstFlowReturn +-gst_rtp_base_depayload_prepare_push (GstRTPBaseDepayload * filter, ++gst_rtp_base_depayload_finish_push (GstRTPBaseDepayload * filter, + gboolean is_list, gpointer obj) + { +- if (is_list) { +- GstBufferList **blist = obj; +- gst_buffer_list_foreach (*blist, (GstBufferListFunc) set_headers, filter); +- } else { +- GstBuffer **buf = obj; +- set_headers (buf, 0, filter); +- } +- + /* if this is the first buffer send a NEWSEGMENT */ + if (G_UNLIKELY (filter->priv->segment_event)) { + gst_pad_push_event (filter->srcpad, filter->priv->segment_event); +@@ -891,13 +1335,125 @@ gst_rtp_base_depayload_prepare_push (GstRTPBaseDepayload * filter, + GST_DEBUG_OBJECT (filter, "Pushed newsegment event on this first buffer"); + } + +- return GST_FLOW_OK; ++ if (is_list) { ++ GstBufferList *blist = obj; ++ return gst_pad_push_list (filter->srcpad, blist); ++ } else { ++ GstBuffer *buf = obj; ++ return gst_pad_push (filter->srcpad, buf); ++ } ++} ++ ++static gboolean ++gst_rtp_base_depayload_set_src_caps_from_hdrext (GstRTPBaseDepayload * filter) ++{ ++ gboolean update_ok = TRUE; ++ GstCaps *src_caps = gst_pad_get_current_caps (filter->srcpad); ++ ++ if (src_caps) { ++ GstCaps *new_caps; ++ gint i; ++ ++ new_caps = gst_caps_copy (src_caps); ++ for (i = 0; i < filter->priv->header_exts->len; i++) { ++ GstRTPHeaderExtension *ext; ++ ++ ext = g_ptr_array_index (filter->priv->header_exts, i); ++ update_ok = ++ gst_rtp_header_extension_update_non_rtp_src_caps (ext, new_caps); ++ ++ if (!update_ok) { ++ GST_ELEMENT_ERROR (filter, STREAM, DECODE, ++ ("RTP header extension (%s) could not update src caps", ++ GST_OBJECT_NAME (ext)), (NULL)); ++ break; ++ } ++ } ++ ++ if (G_UNLIKELY (update_ok && !gst_caps_is_equal (src_caps, new_caps))) { ++ gst_pad_set_caps (filter->srcpad, new_caps); ++ } ++ ++ gst_caps_unref (src_caps); ++ gst_caps_unref (new_caps); ++ } ++ ++ return update_ok; ++} ++ ++static GstFlowReturn ++gst_rtp_base_depayload_do_push (GstRTPBaseDepayload * filter, gboolean is_list, ++ gpointer obj) ++{ ++ GstFlowReturn res; ++ ++ if (is_list) { ++ GstBufferList *blist = obj; ++ guint i; ++ guint first_not_pushed_idx = 0; ++ ++ for (i = 0; i < gst_buffer_list_length (blist); ++i) { ++ GstBuffer *buf = gst_buffer_list_get_writable (blist, i); ++ ++ if (G_UNLIKELY (gst_rtp_base_depayload_set_headers (filter, buf))) { ++ /* src caps have changed; push the buffers preceding the current one, ++ * then apply the new caps on the src pad */ ++ guint j; ++ ++ for (j = first_not_pushed_idx; j < i; ++j) { ++ res = gst_rtp_base_depayload_finish_push (filter, FALSE, ++ gst_buffer_ref (gst_buffer_list_get (blist, j))); ++ if (G_UNLIKELY (res != GST_FLOW_OK)) { ++ goto error_list; ++ } ++ } ++ first_not_pushed_idx = i; ++ ++ if (!gst_rtp_base_depayload_set_src_caps_from_hdrext (filter)) { ++ res = GST_FLOW_ERROR; ++ goto error_list; ++ } ++ } ++ } ++ ++ if (G_LIKELY (first_not_pushed_idx == 0)) { ++ res = gst_rtp_base_depayload_finish_push (filter, TRUE, blist); ++ blist = NULL; ++ } else { ++ for (i = first_not_pushed_idx; i < gst_buffer_list_length (blist); ++i) { ++ res = gst_rtp_base_depayload_finish_push (filter, FALSE, ++ gst_buffer_ref (gst_buffer_list_get (blist, i))); ++ if (G_UNLIKELY (res != GST_FLOW_OK)) { ++ break; ++ } ++ } ++ } ++ ++ error_list: ++ gst_clear_buffer_list (&blist); ++ } else { ++ GstBuffer *buf = obj; ++ if (G_UNLIKELY (gst_rtp_base_depayload_set_headers (filter, buf))) { ++ if (!gst_rtp_base_depayload_set_src_caps_from_hdrext (filter)) { ++ res = GST_FLOW_ERROR; ++ goto error_buffer; ++ } ++ } ++ ++ res = gst_rtp_base_depayload_finish_push (filter, FALSE, buf); ++ buf = NULL; ++ ++ error_buffer: ++ gst_clear_buffer (&buf); ++ } ++ ++ return res; + } + + /** + * gst_rtp_base_depayload_push: + * @filter: a #GstRTPBaseDepayload +- * @out_buf: a #GstBuffer ++ * @out_buf: (transfer full): a #GstBuffer + * + * Push @out_buf to the peer of @filter. This function takes ownership of + * @out_buf. +@@ -912,12 +1468,7 @@ gst_rtp_base_depayload_push (GstRTPBaseDepayload * filter, GstBuffer * out_buf) + { + GstFlowReturn res; + +- res = gst_rtp_base_depayload_prepare_push (filter, FALSE, &out_buf); +- +- if (G_LIKELY (res == GST_FLOW_OK)) +- res = gst_pad_push (filter->srcpad, out_buf); +- else +- gst_buffer_unref (out_buf); ++ res = gst_rtp_base_depayload_do_push (filter, FALSE, out_buf); + + if (res != GST_FLOW_OK) + filter->priv->process_flow_ret = res; +@@ -928,7 +1479,7 @@ gst_rtp_base_depayload_push (GstRTPBaseDepayload * filter, GstBuffer * out_buf) + /** + * gst_rtp_base_depayload_push_list: + * @filter: a #GstRTPBaseDepayload +- * @out_list: a #GstBufferList ++ * @out_list: (transfer full): a #GstBufferList + * + * Push @out_list to the peer of @filter. This function takes ownership of + * @out_list. +@@ -941,12 +1492,7 @@ gst_rtp_base_depayload_push_list (GstRTPBaseDepayload * filter, + { + GstFlowReturn res; + +- res = gst_rtp_base_depayload_prepare_push (filter, TRUE, &out_list); +- +- if (G_LIKELY (res == GST_FLOW_OK)) +- res = gst_pad_push_list (filter->srcpad, out_list); +- else +- gst_buffer_list_unref (out_list); ++ res = gst_rtp_base_depayload_do_push (filter, TRUE, out_list); + + if (res != GST_FLOW_OK) + filter->priv->process_flow_ret = res; +@@ -992,6 +1538,7 @@ gst_rtp_base_depayload_packet_lost (GstRTPBaseDepayload * filter, + &might_have_been_fec) || !might_have_been_fec) { + /* send GAP event */ + sevent = gst_event_new_gap (timestamp, duration); ++ gst_event_set_gap_flags (sevent, GST_GAP_FLAG_MISSING_DATA); + res = gst_pad_push_event (filter->srcpad, sevent); + } + +@@ -1019,6 +1566,7 @@ gst_rtp_base_depayload_change_state (GstElement * element, + priv->play_speed = 1.0; + priv->play_scale = 1.0; + priv->clock_base = -1; ++ priv->ref_ts = -1; + priv->onvif_mode = FALSE; + priv->next_seqnum = -1; + priv->negotiated = FALSE; +@@ -1099,6 +1647,9 @@ gst_rtp_base_depayload_set_property (GObject * object, guint prop_id, + case PROP_MAX_REORDER: + priv->max_reorder = g_value_get_int (value); + break; ++ case PROP_AUTO_HEADER_EXTENSION: ++ priv->auto_hdr_ext = g_value_get_boolean (value); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -1127,6 +1678,9 @@ gst_rtp_base_depayload_get_property (GObject * object, guint prop_id, + case PROP_MAX_REORDER: + g_value_set_int (value, priv->max_reorder); + break; ++ case PROP_AUTO_HEADER_EXTENSION: ++ g_value_set_boolean (value, priv->auto_hdr_ext); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +diff --git a/gst-libs/gst/rtp/gstrtpbasepayload.c b/gst-libs/gst/rtp/gstrtpbasepayload.c +index 0f5feb44e..02d0f51d2 100644 +--- a/gst-libs/gst/rtp/gstrtpbasepayload.c ++++ b/gst-libs/gst/rtp/gstrtpbasepayload.c +@@ -30,12 +30,11 @@ + + #include "gstrtpbasepayload.h" + #include "gstrtpmeta.h" ++#include "gstrtphdrext.h" + + GST_DEBUG_CATEGORY_STATIC (rtpbasepayload_debug); + #define GST_CAT_DEFAULT (rtpbasepayload_debug) + +-static gboolean enable_experimental_twcc = FALSE; +- + struct _GstRTPBasePayloadPrivate + { + gboolean ts_offset_random; +@@ -49,13 +48,13 @@ struct _GstRTPBasePayloadPrivate + + gboolean source_info; + GstBuffer *input_meta_buffer; +- guint8 twcc_ext_id; + + guint64 base_offset; + gint64 base_rtime; + guint64 base_rtime_hz; + guint64 running_time; + gboolean scale_rtptime; ++ gboolean auto_hdr_ext; + + gint64 prop_max_ptime; + gint64 caps_max_ptime; +@@ -64,20 +63,35 @@ struct _GstRTPBasePayloadPrivate + + gboolean negotiated; + ++ /* We need to know whether negotiate was called in order to decide ++ * whether we should store the input buffer as input meta in case ++ * negotiate() gets called from the subclass' handle_buffer() implementation, ++ * as negotiate() is where we instantiate header extensions. ++ */ ++ gboolean negotiate_called; ++ + gboolean delay_segment; + GstEvent *pending_segment; + + GstCaps *subclass_srccaps; + GstCaps *sinkcaps; ++ ++ /* array of GstRTPHeaderExtension's * */ ++ GPtrArray *header_exts; + }; + + /* RTPBasePayload signals and args */ + enum + { +- /* FILL ME */ ++ SIGNAL_0, ++ SIGNAL_REQUEST_EXTENSION, ++ SIGNAL_ADD_EXTENSION, ++ SIGNAL_CLEAR_EXTENSIONS, + LAST_SIGNAL + }; + ++static guint gst_rtp_base_payload_signals[LAST_SIGNAL] = { 0 }; ++ + /* FIXME 0.11, a better default is the Ethernet MTU of + * 1500 - sizeof(headers) as pointed out by marcelm in IRC: + * So an Ethernet MTU of 1500, minus 60 for the max IP, minus 8 for UDP, gives +@@ -96,8 +110,13 @@ enum + #define DEFAULT_RUNNING_TIME GST_CLOCK_TIME_NONE + #define DEFAULT_SOURCE_INFO FALSE + #define DEFAULT_ONVIF_NO_RATE_CONTROL FALSE +-#define DEFAULT_TWCC_EXT_ID 0 + #define DEFAULT_SCALE_RTPTIME TRUE ++#define DEFAULT_AUTO_HEADER_EXTENSION TRUE ++ ++#define RTP_HEADER_EXT_ONE_BYTE_MAX_SIZE 16 ++#define RTP_HEADER_EXT_TWO_BYTE_MAX_SIZE 256 ++#define RTP_HEADER_EXT_ONE_BYTE_MAX_ID 14 ++#define RTP_HEADER_EXT_TWO_BYTE_MAX_ID 255 + + enum + { +@@ -116,8 +135,8 @@ enum + PROP_STATS, + PROP_SOURCE_INFO, + PROP_ONVIF_NO_RATE_CONTROL, +- PROP_TWCC_EXT_ID, + PROP_SCALE_RTPTIME, ++ PROP_AUTO_HEADER_EXTENSION, + PROP_LAST + }; + +@@ -154,6 +173,9 @@ static GstStateChangeReturn gst_rtp_base_payload_change_state (GstElement * + + static gboolean gst_rtp_base_payload_negotiate (GstRTPBasePayload * payload); + ++static void gst_rtp_base_payload_add_extension (GstRTPBasePayload * payload, ++ GstRTPHeaderExtension * ext); ++static void gst_rtp_base_payload_clear_extensions (GstRTPBasePayload * payload); + + static GstElementClass *parent_class = NULL; + static gint private_offset = 0; +@@ -194,6 +216,45 @@ gst_rtp_base_payload_get_instance_private (GstRTPBasePayload * self) + return (G_STRUCT_MEMBER_P (self, private_offset)); + } + ++static GstRTPHeaderExtension * ++gst_rtp_base_payload_request_extension_default (GstRTPBasePayload * payload, ++ guint ext_id, const gchar * uri) ++{ ++ GstRTPHeaderExtension *ext = NULL; ++ ++ if (!payload->priv->auto_hdr_ext) ++ return NULL; ++ ++ ext = gst_rtp_header_extension_create_from_uri (uri); ++ if (ext) { ++ GST_DEBUG_OBJECT (payload, ++ "Automatically enabled extension %s for uri \'%s\'", ++ GST_ELEMENT_NAME (ext), uri); ++ ++ gst_rtp_header_extension_set_id (ext, ext_id); ++ } else { ++ GST_DEBUG_OBJECT (payload, ++ "Didn't find any extension implementing uri \'%s\'", uri); ++ } ++ ++ return ext; ++} ++ ++static gboolean ++extension_accumulator (GSignalInvocationHint * ihint, ++ GValue * return_accu, const GValue * handler_return, gpointer data) ++{ ++ gpointer ext; ++ ++ /* Call default handler if user callback didn't create the extension */ ++ ext = g_value_get_object (handler_return); ++ if (!ext) ++ return TRUE; ++ ++ g_value_set_object (return_accu, ext); ++ return FALSE; ++} ++ + static void + gst_rtp_base_payload_class_init (GstRTPBasePayloadClass * klass) + { +@@ -203,9 +264,6 @@ gst_rtp_base_payload_class_init (GstRTPBasePayloadClass * klass) + gobject_class = (GObjectClass *) klass; + gstelement_class = (GstElementClass *) klass; + +- if (g_getenv ("GST_RTP_ENABLE_EXPERIMENTAL_TWCC_PROPERTY")) +- enable_experimental_twcc = TRUE; +- + if (private_offset != 0) + g_type_class_adjust_private_offset (klass, &private_offset); + +@@ -348,32 +406,6 @@ gst_rtp_base_payload_class_init (GstRTPBasePayloadClass * klass) + DEFAULT_ONVIF_NO_RATE_CONTROL, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + +- /** +- * GstRTPBasePayload:twcc-ext-id: +- * +- * The RTP header-extension ID used for tagging buffers with Transport-Wide +- * Congestion Control sequence-numbers. +- * +- * To use this across multiple bundled streams (transport wide), the +- * GstRTPFunnel can mux TWCC sequence-numbers together. +- * +- * This is experimental and requires setting the +- * 'GST_RTP_ENABLE_EXPERIMENTAL_TWCC_PROPERTY' environment variable as it is +- * still a draft and not yet a standard. This property may also be removed +- * in the future for 1.20. +- * +- * Since: 1.18 +- */ +- if (enable_experimental_twcc) { +- g_object_class_install_property (gobject_class, PROP_TWCC_EXT_ID, +- g_param_spec_uint ("twcc-ext-id", +- "Transport-wide Congestion Control Extension ID (experimental)", +- "The RTP header-extension ID to use for tagging buffers with " +- "Transport-wide Congestion Control sequencenumbers (0 = disable)", +- 0, 15, DEFAULT_TWCC_EXT_ID, +- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); +- } +- + /** + * GstRTPBasePayload:scale-rtptime: + * +@@ -393,6 +425,73 @@ gst_rtp_base_payload_class_init (GstRTPBasePayloadClass * klass) + "Whether the RTP timestamp should be scaled with the rate (speed)", + DEFAULT_SCALE_RTPTIME, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + ++ /** ++ * GstRTPBasePayload:auto-header-extension: ++ * ++ * If enabled, the payloader will automatically try to enable all the ++ * RTP header extensions provided in the src caps, saving the application ++ * the need to handle these extensions manually using the ++ * GstRTPBasePayload::request-extension: signal. ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (G_OBJECT_CLASS (klass), ++ PROP_AUTO_HEADER_EXTENSION, g_param_spec_boolean ("auto-header-extension", ++ "Automatic RTP header extension", ++ "Whether RTP header extensions should be automatically enabled, if an implementation is available", ++ DEFAULT_AUTO_HEADER_EXTENSION, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstRTPBasePayload::add-extension: ++ * @object: the #GstRTPBasePayload ++ * @ext: (transfer full): the #GstRTPHeaderExtension ++ * ++ * Add @ext as an extension for writing part of an RTP header extension onto ++ * outgoing RTP packets. ++ * ++ * Since: 1.20 ++ */ ++ gst_rtp_base_payload_signals[SIGNAL_ADD_EXTENSION] = ++ g_signal_new_class_handler ("add-extension", G_TYPE_FROM_CLASS (klass), ++ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, ++ G_CALLBACK (gst_rtp_base_payload_add_extension), NULL, NULL, NULL, ++ G_TYPE_NONE, 1, GST_TYPE_RTP_HEADER_EXTENSION); ++ ++ /** ++ * GstRTPBasePayload::request-extension: ++ * @object: the #GstRTPBasePayload ++ * @ext_id: the extension id being requested ++ * @ext_uri: the extension URI being requested ++ * ++ * The returned @ext must be configured with the correct @ext_id and with the ++ * necessary attributes as required by the extension implementation. ++ * ++ * Returns: (transfer full) (nullable): the #GstRTPHeaderExtension for @ext_id, or %NULL ++ * ++ * Since: 1.20 ++ */ ++ gst_rtp_base_payload_signals[SIGNAL_REQUEST_EXTENSION] = ++ g_signal_new_class_handler ("request-extension", ++ G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST, ++ G_CALLBACK (gst_rtp_base_payload_request_extension_default), ++ extension_accumulator, NULL, NULL, ++ GST_TYPE_RTP_HEADER_EXTENSION, 2, G_TYPE_UINT, G_TYPE_STRING); ++ ++ /** ++ * GstRTPBasePayload::clear-extensions: ++ * @object: the #GstRTPBasePayload ++ * ++ * Clear all RTP header extensions used by this payloader. ++ * ++ * Since: 1.20 ++ */ ++ gst_rtp_base_payload_signals[SIGNAL_CLEAR_EXTENSIONS] = ++ g_signal_new_class_handler ("clear-extensions", G_TYPE_FROM_CLASS (klass), ++ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, ++ G_CALLBACK (gst_rtp_base_payload_clear_extensions), NULL, NULL, NULL, ++ G_TYPE_NONE, 0); ++ + gstelement_class->change_state = gst_rtp_base_payload_change_state; + + klass->get_caps = gst_rtp_base_payload_getcaps_default; +@@ -455,6 +554,7 @@ gst_rtp_base_payload_init (GstRTPBasePayload * rtpbasepayload, gpointer g_class) + rtpbasepayload->priv->base_rtime_hz = GST_BUFFER_OFFSET_NONE; + rtpbasepayload->priv->onvif_no_rate_control = DEFAULT_ONVIF_NO_RATE_CONTROL; + rtpbasepayload->priv->scale_rtptime = DEFAULT_SCALE_RTPTIME; ++ rtpbasepayload->priv->auto_hdr_ext = DEFAULT_AUTO_HEADER_EXTENSION; + + rtpbasepayload->media = NULL; + rtpbasepayload->encoding_name = NULL; +@@ -463,6 +563,8 @@ gst_rtp_base_payload_init (GstRTPBasePayload * rtpbasepayload, gpointer g_class) + + rtpbasepayload->priv->caps_max_ptime = DEFAULT_MAX_PTIME; + rtpbasepayload->priv->prop_max_ptime = DEFAULT_MAX_PTIME; ++ rtpbasepayload->priv->header_exts = ++ g_ptr_array_new_with_free_func ((GDestroyNotify) gst_object_unref); + } + + static void +@@ -480,6 +582,9 @@ gst_rtp_base_payload_finalize (GObject * object) + gst_caps_replace (&rtpbasepayload->priv->subclass_srccaps, NULL); + gst_caps_replace (&rtpbasepayload->priv->sinkcaps, NULL); + ++ g_ptr_array_unref (rtpbasepayload->priv->header_exts); ++ rtpbasepayload->priv->header_exts = NULL; ++ + G_OBJECT_CLASS (parent_class)->finalize (object); + } + +@@ -741,13 +846,15 @@ gst_rtp_base_payload_chain (GstPad * pad, GstObject * parent, + if (!rtpbasepayload->priv->negotiated) + goto not_negotiated; + +- if (rtpbasepayload->priv->source_info) { ++ if (rtpbasepayload->priv->source_info ++ || rtpbasepayload->priv->header_exts->len > 0 ++ || !rtpbasepayload->priv->negotiate_called) { + /* Save a copy of meta (instead of taking an extra reference before + * handle_buffer) to make the meta available when allocating a output + * buffer. */ + rtpbasepayload->priv->input_meta_buffer = gst_buffer_new (); + gst_buffer_copy_into (rtpbasepayload->priv->input_meta_buffer, buffer, +- GST_BUFFER_COPY_META, 0, -1); ++ GST_BUFFER_COPY_METADATA, 0, -1); + } + + if (gst_pad_check_reconfigure (GST_RTP_BASE_PAYLOAD_SRCPAD (rtpbasepayload))) { +@@ -849,22 +956,28 @@ update_max_ptime (GstRTPBasePayload * rtpbasepayload) + rtpbasepayload->max_ptime = DEFAULT_MAX_PTIME; + } + ++static gboolean ++_set_caps (GQuark field_id, const GValue * value, GstCaps * caps) ++{ ++ gst_caps_set_value (caps, g_quark_to_string (field_id), value); ++ ++ return TRUE; ++} ++ + /** +- * gst_rtp_base_payload_set_outcaps: ++ * gst_rtp_base_payload_set_outcaps_structure: + * @payload: a #GstRTPBasePayload +- * @fieldname: the first field name or %NULL +- * @...: field values +- * +- * Configure the output caps with the optional parameters. ++ * @s: (nullable): a #GstStructure with the caps fields + * +- * Variable arguments should be in the form field name, field type +- * (as a GType), value(s). The last variable argument should be NULL. ++ * Configure the output caps with the optional fields. + * + * Returns: %TRUE if the caps could be set. ++ * ++ * Since: 1.20 + */ + gboolean +-gst_rtp_base_payload_set_outcaps (GstRTPBasePayload * payload, +- const gchar * fieldname, ...) ++gst_rtp_base_payload_set_outcaps_structure (GstRTPBasePayload * payload, ++ GstStructure * s) + { + GstCaps *srccaps; + +@@ -876,21 +989,80 @@ gst_rtp_base_payload_set_outcaps (GstRTPBasePayload * payload, + + GST_DEBUG_OBJECT (payload, "defaults: %" GST_PTR_FORMAT, srccaps); + ++ if (s && gst_structure_n_fields (s) > 0) { ++ gst_structure_foreach (s, (GstStructureForeachFunc) _set_caps, srccaps); ++ ++ GST_DEBUG_OBJECT (payload, "custom added: %" GST_PTR_FORMAT, srccaps); ++ } ++ ++ gst_caps_replace (&payload->priv->subclass_srccaps, srccaps); ++ gst_caps_unref (srccaps); ++ ++ return gst_rtp_base_payload_negotiate (payload); ++} ++ ++/** ++ * gst_rtp_base_payload_set_outcaps: ++ * @payload: a #GstRTPBasePayload ++ * @fieldname: the first field name or %NULL ++ * @...: field values ++ * ++ * Configure the output caps with the optional parameters. ++ * ++ * Variable arguments should be in the form field name, field type ++ * (as a GType), value(s). The last variable argument should be NULL. ++ * ++ * Returns: %TRUE if the caps could be set. ++ */ ++gboolean ++gst_rtp_base_payload_set_outcaps (GstRTPBasePayload * payload, ++ const gchar * fieldname, ...) ++{ ++ gboolean result; ++ GstStructure *s = NULL; ++ + if (fieldname) { + va_list varargs; + ++ s = gst_structure_new_empty ("unused"); ++ + /* override with custom properties */ + va_start (varargs, fieldname); +- gst_caps_set_simple_valist (srccaps, fieldname, varargs); ++ gst_structure_set_valist (s, fieldname, varargs); + va_end (varargs); +- +- GST_DEBUG_OBJECT (payload, "custom added: %" GST_PTR_FORMAT, srccaps); + } + +- gst_caps_replace (&payload->priv->subclass_srccaps, srccaps); +- gst_caps_unref (srccaps); ++ result = gst_rtp_base_payload_set_outcaps_structure (payload, s); + +- return gst_rtp_base_payload_negotiate (payload); ++ gst_clear_structure (&s); ++ ++ return result; ++} ++ ++static void ++add_and_ref_item (GstRTPHeaderExtension * ext, GPtrArray * ret) ++{ ++ g_ptr_array_add (ret, gst_object_ref (ext)); ++} ++ ++static void ++remove_item_from (GstRTPHeaderExtension * ext, GPtrArray * ret) ++{ ++ g_ptr_array_remove_fast (ret, ext); ++} ++ ++static void ++add_item_to (GstRTPHeaderExtension * ext, GPtrArray * ret) ++{ ++ g_ptr_array_add (ret, ext); ++} ++ ++static void ++add_header_ext_to_caps (GstRTPHeaderExtension * ext, GstCaps * caps) ++{ ++ if (!gst_rtp_header_extension_set_caps_from_attributes (ext, caps)) { ++ GST_WARNING ("Failed to set caps from rtp header extension"); ++ } + } + + static gboolean +@@ -898,7 +1070,7 @@ gst_rtp_base_payload_negotiate (GstRTPBasePayload * payload) + { + GstCaps *templ, *peercaps, *srccaps; + GstStructure *s, *d; +- gboolean res; ++ gboolean res = TRUE; + + payload->priv->caps_max_ptime = DEFAULT_MAX_PTIME; + payload->ptime = 0; +@@ -1183,21 +1355,168 @@ gst_rtp_base_payload_negotiate (GstRTPBasePayload * payload) + + update_max_ptime (payload); + ++ { ++ /* try to find header extension implementations for the list in the ++ * caps */ ++ GstStructure *s = gst_caps_get_structure (srccaps, 0); ++ guint i, j, n_fields = gst_structure_n_fields (s); ++ GPtrArray *header_exts = g_ptr_array_new_with_free_func (gst_object_unref); ++ GPtrArray *to_add = g_ptr_array_new (); ++ GPtrArray *to_remove = g_ptr_array_new (); ++ ++ GST_OBJECT_LOCK (payload); ++ g_ptr_array_foreach (payload->priv->header_exts, ++ (GFunc) add_and_ref_item, header_exts); ++ GST_OBJECT_UNLOCK (payload); ++ ++ for (i = 0; i < n_fields; i++) { ++ const gchar *field_name = gst_structure_nth_field_name (s, i); ++ if (g_str_has_prefix (field_name, "extmap-")) { ++ const GValue *val; ++ const gchar *uri = NULL; ++ gchar *nptr; ++ guint ext_id; ++ GstRTPHeaderExtension *ext = NULL; ++ ++ errno = 0; ++ ext_id = g_ascii_strtoull (&field_name[strlen ("extmap-")], &nptr, 10); ++ if (errno != 0 || (ext_id == 0 && field_name == nptr)) { ++ GST_WARNING_OBJECT (payload, "could not parse id from %s", ++ field_name); ++ res = FALSE; ++ goto ext_out; ++ } + +- if (enable_experimental_twcc && payload->priv->twcc_ext_id > 0) { +- /* TODO: put this as a separate utility-function for RTP extensions */ +- gchar *name = g_strdup_printf ("extmap-%u", payload->priv->twcc_ext_id); +- gst_caps_set_simple (srccaps, name, G_TYPE_STRING, +- "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01", +- NULL); +- g_free (name); ++ val = gst_structure_get_value (s, field_name); ++ if (G_VALUE_HOLDS_STRING (val)) { ++ uri = g_value_get_string (val); ++ } else if (GST_VALUE_HOLDS_ARRAY (val)) { ++ /* the uri is the second value in the array */ ++ const GValue *str = gst_value_array_get_value (val, 1); ++ if (G_VALUE_HOLDS_STRING (str)) { ++ uri = g_value_get_string (str); ++ } ++ } ++ ++ if (!uri) { ++ GST_WARNING_OBJECT (payload, "could not get extmap uri for " ++ "field %s", field_name); ++ res = FALSE; ++ goto ext_out; ++ } ++ ++ /* try to find if this extension mapping already exists */ ++ for (j = 0; j < header_exts->len; j++) { ++ ext = g_ptr_array_index (header_exts, j); ++ if (gst_rtp_header_extension_get_id (ext) == ext_id) { ++ if (g_strcmp0 (uri, gst_rtp_header_extension_get_uri (ext)) == 0) { ++ /* still matching, we're good, set attributes from caps in case ++ * the caps have been updated */ ++ if (!gst_rtp_header_extension_set_attributes_from_caps (ext, ++ srccaps)) { ++ GST_WARNING_OBJECT (payload, ++ "Failed to configure rtp header " "extension %" ++ GST_PTR_FORMAT " attributes from caps %" GST_PTR_FORMAT, ++ ext, srccaps); ++ res = FALSE; ++ goto ext_out; ++ } ++ break; ++ } else { ++ GST_DEBUG_OBJECT (payload, "extension id %u" ++ "was replaced with a different extension uri " ++ "original:\'%s' vs \'%s\'", ext_id, ++ gst_rtp_header_extension_get_uri (ext), uri); ++ g_ptr_array_add (to_remove, ext); ++ ext = NULL; ++ break; ++ } ++ } else { ++ ext = NULL; ++ } ++ } ++ ++ /* if no extension, attempt to request one */ ++ if (!ext) { ++ GST_DEBUG_OBJECT (payload, "requesting extension for id %u" ++ " and uri %s", ext_id, uri); ++ g_signal_emit (payload, ++ gst_rtp_base_payload_signals[SIGNAL_REQUEST_EXTENSION], 0, ++ ext_id, uri, &ext); ++ GST_DEBUG_OBJECT (payload, "request returned extension %p \'%s\' " ++ "for id %u and uri %s", ext, ++ ext ? GST_OBJECT_NAME (ext) : "", ext_id, uri); ++ ++ /* We require caller to set the appropriate extension if it's required */ ++ if (ext && gst_rtp_header_extension_get_id (ext) != ext_id) { ++ g_warning ("\'request-extension\' signal provided an rtp header " ++ "extension for uri \'%s\' that does not match the requested " ++ "extension id %u", uri, ext_id); ++ gst_clear_object (&ext); ++ } ++ ++ if (ext && !gst_rtp_header_extension_set_attributes_from_caps (ext, ++ srccaps)) { ++ GST_WARNING_OBJECT (payload, ++ "Failed to configure rtp header " "extension %" ++ GST_PTR_FORMAT " attributes from caps %" GST_PTR_FORMAT, ++ ext, srccaps); ++ res = FALSE; ++ g_clear_object (&ext); ++ goto ext_out; ++ } ++ ++ if (ext) { ++ g_ptr_array_add (to_add, ext); ++ } ++ } ++ } ++ } ++ ++ GST_OBJECT_LOCK (payload); ++ g_ptr_array_foreach (to_remove, (GFunc) remove_item_from, ++ payload->priv->header_exts); ++ g_ptr_array_foreach (to_add, (GFunc) add_item_to, ++ payload->priv->header_exts); ++ /* let extensions update their internal state from sinkcaps */ ++ if (payload->priv->sinkcaps) { ++ gint i; ++ ++ for (i = 0; i < payload->priv->header_exts->len; i++) { ++ GstRTPHeaderExtension *ext; ++ ++ ext = g_ptr_array_index (payload->priv->header_exts, i); ++ if (!gst_rtp_header_extension_set_non_rtp_sink_caps (ext, ++ payload->priv->sinkcaps)) { ++ GST_WARNING_OBJECT (payload, ++ "Failed to update rtp header extension (%s) from sink caps", ++ GST_OBJECT_NAME (ext)); ++ res = FALSE; ++ GST_OBJECT_UNLOCK (payload); ++ goto ext_out; ++ } ++ } ++ } ++ /* add extension information to srccaps */ ++ g_ptr_array_foreach (payload->priv->header_exts, ++ (GFunc) add_header_ext_to_caps, srccaps); ++ GST_OBJECT_UNLOCK (payload); ++ ++ ext_out: ++ g_ptr_array_unref (to_add); ++ g_ptr_array_unref (to_remove); ++ g_ptr_array_unref (header_exts); + } + +- res = gst_pad_set_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), srccaps); ++ GST_DEBUG_OBJECT (payload, "configuring caps %" GST_PTR_FORMAT, srccaps); ++ ++ if (res) ++ res = gst_pad_set_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), srccaps); + gst_caps_unref (srccaps); + gst_caps_unref (templ); + + out: ++ payload->priv->negotiate_called = TRUE; + + if (!res) + gst_pad_mark_reconfigure (GST_RTP_BASE_PAYLOAD_SRCPAD (payload)); +@@ -1240,7 +1559,6 @@ typedef struct + GstClockTime pts; + guint64 offset; + guint32 rtptime; +- guint8 twcc_ext_id; + } HeaderData; + + static gboolean +@@ -1260,30 +1578,213 @@ find_timestamp (GstBuffer ** buffer, guint idx, gpointer user_data) + } + + static void +-_set_twcc_seq (GstRTPBuffer * rtp, guint16 seq, guint8 ext_id) ++gst_rtp_base_payload_add_extension (GstRTPBasePayload * payload, ++ GstRTPHeaderExtension * ext) ++{ ++ g_return_if_fail (GST_IS_RTP_HEADER_EXTENSION (ext)); ++ g_return_if_fail (gst_rtp_header_extension_get_id (ext) > 0); ++ ++ /* XXX: check for duplicate ids? */ ++ GST_OBJECT_LOCK (payload); ++ g_ptr_array_add (payload->priv->header_exts, gst_object_ref (ext)); ++ gst_pad_mark_reconfigure (GST_RTP_BASE_PAYLOAD_SRCPAD (payload)); ++ GST_OBJECT_UNLOCK (payload); ++} ++ ++static void ++gst_rtp_base_payload_clear_extensions (GstRTPBasePayload * payload) + { +- guint16 data; +- if (ext_id == 0 || ext_id > 14) ++ GST_OBJECT_LOCK (payload); ++ g_ptr_array_set_size (payload->priv->header_exts, 0); ++ GST_OBJECT_UNLOCK (payload); ++} ++ ++typedef struct ++{ ++ GstRTPBasePayload *payload; ++ GstRTPHeaderExtensionFlags flags; ++ GstBuffer *output; ++ guint8 *data; ++ gsize allocated_size; ++ gsize written_size; ++ gsize hdr_unit_size; ++ gboolean abort; ++} HeaderExt; ++ ++static void ++determine_header_extension_flags_size (GstRTPHeaderExtension * ext, ++ gpointer user_data) ++{ ++ HeaderExt *hdr = user_data; ++ guint ext_id; ++ gsize max_size; ++ ++ hdr->flags &= gst_rtp_header_extension_get_supported_flags (ext); ++ max_size = ++ gst_rtp_header_extension_get_max_size (ext, ++ hdr->payload->priv->input_meta_buffer); ++ ++ if (max_size > RTP_HEADER_EXT_ONE_BYTE_MAX_SIZE) ++ hdr->flags &= ~GST_RTP_HEADER_EXTENSION_ONE_BYTE; ++ if (max_size > RTP_HEADER_EXT_TWO_BYTE_MAX_SIZE) ++ hdr->flags &= ~GST_RTP_HEADER_EXTENSION_TWO_BYTE; ++ ++ ext_id = gst_rtp_header_extension_get_id (ext); ++ if (ext_id > RTP_HEADER_EXT_ONE_BYTE_MAX_ID) ++ hdr->flags &= ~GST_RTP_HEADER_EXTENSION_ONE_BYTE; ++ if (ext_id > RTP_HEADER_EXT_TWO_BYTE_MAX_ID) ++ hdr->flags &= ~GST_RTP_HEADER_EXTENSION_TWO_BYTE; ++ ++ hdr->allocated_size += max_size; ++} ++ ++static void ++write_header_extension (GstRTPHeaderExtension * ext, gpointer user_data) ++{ ++ HeaderExt *hdr = user_data; ++ gsize remaining = ++ hdr->allocated_size - hdr->written_size - hdr->hdr_unit_size; ++ gsize offset = hdr->written_size + hdr->hdr_unit_size; ++ gssize written; ++ guint ext_id; ++ ++ if (hdr->abort) + return; +- GST_WRITE_UINT16_BE (&data, seq); +- gst_rtp_buffer_add_extension_onebyte_header (rtp, ext_id, &data, 2); ++ ++ written = gst_rtp_header_extension_write (ext, ++ hdr->payload->priv->input_meta_buffer, hdr->flags, hdr->output, ++ &hdr->data[offset], remaining); ++ ++ GST_TRACE_OBJECT (hdr->payload, "extension %" GST_PTR_FORMAT " wrote %" ++ G_GSIZE_FORMAT, ext, written); ++ ++ if (written == 0) { ++ /* extension wrote no data */ ++ return; ++ } else if (written < 0) { ++ GST_WARNING_OBJECT (hdr->payload, "%s failed to write extension data", ++ GST_OBJECT_NAME (ext)); ++ goto error; ++ } else if (written > remaining) { ++ /* wrote too much! */ ++ g_error ("Overflow detected writing rtp header extensions. One of the " ++ "instances likely did not report a large enough maximum size. " ++ "Memory corruption has occured. Aborting"); ++ goto error; ++ } ++ ++ ext_id = gst_rtp_header_extension_get_id (ext); ++ ++ /* move to the beginning of the extension header */ ++ offset -= hdr->hdr_unit_size; ++ ++ /* write extension header */ ++ if (hdr->flags & GST_RTP_HEADER_EXTENSION_ONE_BYTE) { ++ if (written > RTP_HEADER_EXT_ONE_BYTE_MAX_SIZE) { ++ g_critical ("Amount of data written by %s is larger than allowed with " ++ "a one byte header.", GST_OBJECT_NAME (ext)); ++ goto error; ++ } ++ ++ hdr->data[offset] = ((ext_id & 0x0F) << 4) | ((written - 1) & 0x0F); ++ } else if (hdr->flags & GST_RTP_HEADER_EXTENSION_TWO_BYTE) { ++ if (written > RTP_HEADER_EXT_TWO_BYTE_MAX_SIZE) { ++ g_critical ("Amount of data written by %s is larger than allowed with " ++ "a two byte header.", GST_OBJECT_NAME (ext)); ++ goto error; ++ } ++ ++ hdr->data[offset] = ext_id & 0xFF; ++ hdr->data[offset + 1] = written & 0xFF; ++ } else { ++ g_critical ("Don't know how to write extension data with flags 0x%x!", ++ hdr->flags); ++ goto error; ++ } ++ ++ hdr->written_size += written + hdr->hdr_unit_size; ++ ++ return; ++ ++error: ++ hdr->abort = TRUE; ++ return; + } + + static gboolean + set_headers (GstBuffer ** buffer, guint idx, gpointer user_data) + { + HeaderData *data = user_data; ++ HeaderExt hdrext = { NULL, }; + GstRTPBuffer rtp = { NULL, }; + +- if (!gst_rtp_buffer_map (*buffer, GST_MAP_WRITE, &rtp)) ++ if (!gst_rtp_buffer_map (*buffer, GST_MAP_READWRITE, &rtp)) + goto map_failed; + + gst_rtp_buffer_set_ssrc (&rtp, data->ssrc); + gst_rtp_buffer_set_payload_type (&rtp, data->pt); + gst_rtp_buffer_set_seq (&rtp, data->seqnum); + gst_rtp_buffer_set_timestamp (&rtp, data->rtptime); +- if (enable_experimental_twcc) +- _set_twcc_seq (&rtp, data->seqnum, data->twcc_ext_id); ++ ++ GST_OBJECT_LOCK (data->payload); ++ if (data->payload->priv->header_exts->len > 0 ++ && data->payload->priv->input_meta_buffer) { ++ guint wordlen; ++ gsize extlen; ++ guint16 bit_pattern; ++ ++ /* write header extensions */ ++ hdrext.payload = data->payload; ++ hdrext.output = *buffer; ++ /* XXX: pre-calculate these flags and sizes? */ ++ hdrext.flags = ++ GST_RTP_HEADER_EXTENSION_ONE_BYTE | GST_RTP_HEADER_EXTENSION_TWO_BYTE; ++ g_ptr_array_foreach (data->payload->priv->header_exts, ++ (GFunc) determine_header_extension_flags_size, &hdrext); ++ hdrext.hdr_unit_size = 0; ++ if (hdrext.flags & GST_RTP_HEADER_EXTENSION_ONE_BYTE) { ++ /* prefer the one byte header */ ++ hdrext.hdr_unit_size = 1; ++ /* TODO: support mixed size writing modes, i.e. RFC8285 */ ++ hdrext.flags &= ~GST_RTP_HEADER_EXTENSION_TWO_BYTE; ++ bit_pattern = 0xBEDE; ++ } else if (hdrext.flags & GST_RTP_HEADER_EXTENSION_TWO_BYTE) { ++ hdrext.hdr_unit_size = 2; ++ bit_pattern = 0x1000; ++ } else { ++ goto unsupported_flags; ++ } ++ ++ extlen = ++ hdrext.hdr_unit_size * data->payload->priv->header_exts->len + ++ hdrext.allocated_size; ++ wordlen = extlen / 4 + ((extlen % 4) ? 1 : 0); ++ ++ /* XXX: do we need to add to any existing extension data instead of ++ * overwriting everything? */ ++ gst_rtp_buffer_set_extension_data (&rtp, bit_pattern, wordlen); ++ gst_rtp_buffer_get_extension_data (&rtp, NULL, (gpointer) & hdrext.data, ++ &wordlen); ++ ++ /* from 32-bit words to bytes */ ++ hdrext.allocated_size = wordlen * 4; ++ ++ g_ptr_array_foreach (data->payload->priv->header_exts, ++ (GFunc) write_header_extension, &hdrext); ++ ++ if (hdrext.written_size > 0) { ++ wordlen = hdrext.written_size / 4 + ((hdrext.written_size % 4) ? 1 : 0); ++ ++ /* zero-fill the hdrext padding bytes */ ++ memset (&hdrext.data[hdrext.written_size], 0, ++ wordlen * 4 - hdrext.written_size); ++ ++ gst_rtp_buffer_set_extension_data (&rtp, bit_pattern, wordlen); ++ } else { ++ gst_rtp_buffer_remove_extension_data (&rtp); ++ } ++ } ++ GST_OBJECT_UNLOCK (data->payload); + gst_rtp_buffer_unmap (&rtp); + + /* increment the seqnum for each buffer */ +@@ -1296,6 +1797,14 @@ map_failed: + GST_ERROR ("failed to map buffer %p", *buffer); + return FALSE; + } ++ ++unsupported_flags: ++ { ++ GST_OBJECT_UNLOCK (data->payload); ++ gst_rtp_buffer_unmap (&rtp); ++ GST_ERROR ("Cannot add rtp header extensions with mixed header types"); ++ return FALSE; ++ } + } + + static gboolean +@@ -1340,7 +1849,6 @@ gst_rtp_base_payload_prepare_push (GstRTPBasePayload * payload, + data.seqnum = payload->seqnum; + data.ssrc = payload->current_ssrc; + data.pt = payload->pt; +- data.twcc_ext_id = priv->twcc_ext_id; + + /* find the first buffer with a timestamp */ + if (is_list) { +@@ -1461,7 +1969,7 @@ no_rate: + /** + * gst_rtp_base_payload_push_list: + * @payload: a #GstRTPBasePayload +- * @list: a #GstBufferList ++ * @list: (transfer full): a #GstBufferList + * + * Push @list to the peer element of the payloader. The SSRC, payload type, + * seqnum and timestamp of the RTP buffer will be updated first. +@@ -1495,7 +2003,7 @@ gst_rtp_base_payload_push_list (GstRTPBasePayload * payload, + /** + * gst_rtp_base_payload_push: + * @payload: a #GstRTPBasePayload +- * @buffer: a #GstBuffer ++ * @buffer: (transfer full): a #GstBuffer + * + * Push @buffer to the peer element of the payloader. The SSRC, payload type, + * seqnum and timestamp of the RTP buffer will be updated first. +@@ -1658,12 +2166,12 @@ gst_rtp_base_payload_set_property (GObject * object, guint prop_id, + case PROP_ONVIF_NO_RATE_CONTROL: + priv->onvif_no_rate_control = g_value_get_boolean (value); + break; +- case PROP_TWCC_EXT_ID: +- priv->twcc_ext_id = g_value_get_uint (value); +- break; + case PROP_SCALE_RTPTIME: + priv->scale_rtptime = g_value_get_boolean (value); + break; ++ case PROP_AUTO_HEADER_EXTENSION: ++ priv->auto_hdr_ext = g_value_get_boolean (value); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -1734,12 +2242,12 @@ gst_rtp_base_payload_get_property (GObject * object, guint prop_id, + case PROP_ONVIF_NO_RATE_CONTROL: + g_value_set_boolean (value, priv->onvif_no_rate_control); + break; +- case PROP_TWCC_EXT_ID: +- g_value_set_uint (value, priv->twcc_ext_id); +- break; + case PROP_SCALE_RTPTIME: + g_value_set_boolean (value, priv->scale_rtptime); + break; ++ case PROP_AUTO_HEADER_EXTENSION: ++ g_value_set_boolean (value, priv->auto_hdr_ext); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -1786,6 +2294,7 @@ gst_rtp_base_payload_change_state (GstElement * element, + g_atomic_int_set (&rtpbasepayload->priv->notified_first_timestamp, 1); + priv->base_offset = GST_BUFFER_OFFSET_NONE; + priv->negotiated = FALSE; ++ priv->negotiate_called = FALSE; + gst_caps_replace (&rtpbasepayload->priv->subclass_srccaps, NULL); + gst_caps_replace (&rtpbasepayload->priv->sinkcaps, NULL); + break; +diff --git a/gst-libs/gst/rtp/gstrtpbasepayload.h b/gst-libs/gst/rtp/gstrtpbasepayload.h +index 8cc78528d..00bf99e19 100644 +--- a/gst-libs/gst/rtp/gstrtpbasepayload.h ++++ b/gst-libs/gst/rtp/gstrtpbasepayload.h +@@ -160,6 +160,10 @@ GST_RTP_API + gboolean gst_rtp_base_payload_set_outcaps (GstRTPBasePayload *payload, + const gchar *fieldname, ...); + ++GST_RTP_API ++gboolean gst_rtp_base_payload_set_outcaps_structure (GstRTPBasePayload *payload, ++ GstStructure *s); ++ + GST_RTP_API + gboolean gst_rtp_base_payload_is_filled (GstRTPBasePayload *payload, + guint size, GstClockTime duration); +diff --git a/gst-libs/gst/rtp/gstrtpbuffer.c b/gst-libs/gst/rtp/gstrtpbuffer.c +index ae519f313..ec4b53bd7 100644 +--- a/gst-libs/gst/rtp/gstrtpbuffer.c ++++ b/gst-libs/gst/rtp/gstrtpbuffer.c +@@ -158,7 +158,7 @@ gst_rtp_buffer_allocate_data (GstBuffer * buffer, guint payload_len, + * respectively. @data will be freed when the buffer is unreffed, so this + * function transfers ownership of @data to the new buffer. + * +- * Returns: A newly allocated buffer with @data and of size @len. ++ * Returns: (transfer full): A newly allocated buffer with @data and of size @len. + */ + GstBuffer * + gst_rtp_buffer_new_take_data (gpointer data, gsize len) +@@ -179,12 +179,12 @@ gst_rtp_buffer_new_take_data (gpointer data, gsize len) + * bytes of @data and the size to @len. The data will be freed when the buffer + * is freed. + * +- * Returns: A newly allocated buffer with a copy of @data and of size @len. ++ * Returns: (transfer full): A newly allocated buffer with a copy of @data and of size @len. + */ + GstBuffer * + gst_rtp_buffer_new_copy_data (gconstpointer data, gsize len) + { +- return gst_rtp_buffer_new_take_data (g_memdup (data, len), len); ++ return gst_rtp_buffer_new_take_data (g_memdup2 (data, len), len); + } + + /** +@@ -197,7 +197,7 @@ gst_rtp_buffer_new_copy_data (gconstpointer data, gsize len) + * @csrc_count CSRCs, a payload length of @payload_len and padding of @pad_len. + * All other RTP header fields will be set to 0/FALSE. + * +- * Returns: A newly allocated buffer that can hold an RTP packet with given ++ * Returns: (transfer full): A newly allocated buffer that can hold an RTP packet with given + * parameters. + */ + GstBuffer * +@@ -225,7 +225,7 @@ gst_rtp_buffer_new_allocate (guint payload_len, guint8 pad_len, + * @csrc_count and can be calculated with gst_rtp_buffer_calc_payload_len(). + * All RTP header fields will be set to 0/FALSE. + * +- * Returns: A newly allocated buffer that can hold an RTP packet of @packet_len. ++ * Returns: (transfer full): A newly allocated buffer that can hold an RTP packet of @packet_len. + */ + GstBuffer * + gst_rtp_buffer_new_allocate_len (guint packet_len, guint8 pad_len, +@@ -684,9 +684,9 @@ gst_rtp_buffer_set_extension (GstRTPBuffer * rtp, gboolean extension) + /** + * gst_rtp_buffer_get_extension_data: (skip) + * @rtp: the RTP packet +- * @bits: (out): location for result bits +- * @data: (out) (array) (element-type guint8) (transfer none): location for data +- * @wordlen: (out): location for length of @data in 32 bits words ++ * @bits: (optional) (out): location for result bits ++ * @data: (optional) (out) (array) (element-type guint8) (transfer none): location for data ++ * @wordlen: (optional) (out): location for length of @data in 32 bits words + * + * Get the extension data. @bits will contain the extension 16 bits of custom + * data. @data will point to the data in the extension and @wordlen will contain +@@ -733,7 +733,7 @@ gst_rtp_buffer_get_extension_data (GstRTPBuffer * rtp, guint16 * bits, + * @bits unchanged. If there is an extension header but no extension data then + * an empty #GBytes will be returned. + * +- * Returns: (transfer full): A new #GBytes if an extension header was present ++ * Returns: (transfer full) (nullable): A new #GBytes if an extension header was present + * and %NULL otherwise. + * + * Since: 1.2 +@@ -833,6 +833,8 @@ ensure_buffers (GstRTPBuffer * rtp) + * extension header. If the existing extension data is not large enough, it will + * be made larger. + * ++ * Will also shorten the extension data from 1.20. ++ * + * Returns: True if done. + */ + gboolean +@@ -856,15 +858,23 @@ gst_rtp_buffer_set_extension_data (GstRTPBuffer * rtp, guint16 bits, + mem = gst_allocator_alloc (NULL, min_size, NULL); + + if (rtp->data[1]) { +- /* copy old data */ ++ /* copy old data & initialize the remainder of the new buffer */ + gst_memory_map (mem, &map, GST_MAP_WRITE); + memcpy (map.data, rtp->data[1], rtp->size[1]); ++ if (min_size > rtp->size[1]) { ++ memset (map.data + rtp->size[1], 0, min_size - rtp->size[1]); ++ } + gst_memory_unmap (mem, &map); + + /* unmap old */ + gst_buffer_unmap (rtp->buffer, &rtp->map[1]); + gst_buffer_replace_memory (rtp->buffer, 1, mem); + } else { ++ /* don't leak data from uninitialized memory via the padding */ ++ gst_memory_map (mem, &map, GST_MAP_WRITE); ++ memset (map.data, 0, map.size); ++ gst_memory_unmap (mem, &map); ++ + /* we didn't have extension data, add */ + gst_buffer_insert_memory (rtp->buffer, 1, mem); + } +@@ -874,6 +884,15 @@ gst_rtp_buffer_set_extension_data (GstRTPBuffer * rtp, guint16 bits, + gst_memory_ref (mem); + rtp->data[1] = rtp->map[1].data; + rtp->size[1] = rtp->map[1].size; ++ } else if (min_size < rtp->size[1]) { ++ GstMemory *mem = rtp->map[1].memory; ++ ++ gst_memory_ref (mem); ++ gst_buffer_unmap (rtp->buffer, &rtp->map[1]); ++ gst_memory_resize (mem, 0, min_size); ++ gst_memory_map (mem, &rtp->map[1], GST_MAP_READWRITE); ++ rtp->data[1] = rtp->map[1].data; ++ rtp->size[1] = rtp->map[1].size; + } + + /* now we can set the extension bit */ +@@ -887,6 +906,37 @@ gst_rtp_buffer_set_extension_data (GstRTPBuffer * rtp, guint16 bits, + return TRUE; + } + ++/** ++ * gst_rtp_buffer_remove_extension_data: ++ * @rtp: the RTP packet ++ * ++ * Unsets the extension bit of the RTP buffer and removes the extension header ++ * and data. ++ * ++ * If the RTP buffer has no header extension data, the action has no effect. ++ * The RTP buffer must be mapped READWRITE only once and the underlying ++ * GstBuffer must be writable. ++ * ++ * Since: 1.20 ++ */ ++void ++gst_rtp_buffer_remove_extension_data (GstRTPBuffer * rtp) ++{ ++ g_return_if_fail (gst_buffer_is_writable (rtp->buffer)); ++ g_return_if_fail (rtp->map[0].flags & GST_MAP_WRITE); ++ ++ if (rtp->data[1] != NULL) { ++ GstBuffer *buf = rtp->buffer; ++ ++ ensure_buffers (rtp); ++ ++ GST_RTP_HEADER_EXTENSION (rtp->data[0]) = FALSE; ++ gst_rtp_buffer_unmap (rtp); ++ gst_buffer_remove_memory (buf, 1); ++ gst_rtp_buffer_map (buf, GST_MAP_READWRITE, rtp); ++ } ++} ++ + /** + * gst_rtp_buffer_get_ssrc: + * @rtp: the RTP packet +@@ -1090,7 +1140,7 @@ gst_rtp_buffer_set_timestamp (GstRTPBuffer * rtp, guint32 timestamp) + * are skipped in the payload and the subbuffer will be of size @len. + * If @len is -1 the total payload starting from @offset is subbuffered. + * +- * Returns: A new buffer with the specified data of the payload. ++ * Returns: (transfer full): A new buffer with the specified data of the payload. + */ + GstBuffer * + gst_rtp_buffer_get_payload_subbuffer (GstRTPBuffer * rtp, guint offset, +@@ -1130,7 +1180,7 @@ wrong_offset: + * will internally create a subbuffer of @buffer so that a memcpy can be + * avoided. + * +- * Returns: A new buffer with the data of the payload. ++ * Returns: (transfer full): A new buffer with the data of the payload. + */ + GstBuffer * + gst_rtp_buffer_get_payload_buffer (GstRTPBuffer * rtp) +@@ -1160,7 +1210,7 @@ gst_rtp_buffer_get_payload_len (GstRTPBuffer * rtp) + * Get a pointer to the payload data in @buffer. This pointer is valid as long + * as a reference to @buffer is held. + * +- * Returns: (array) (element-type guint8) (transfer none): A pointer ++ * Returns: (array) (element-type guint8) (transfer none) (nullable): A pointer + * to the payload data in @buffer. + */ + gpointer +@@ -1183,7 +1233,7 @@ gst_rtp_buffer_get_payload (GstRTPBuffer * rtp) + * bindings usage. The return value is a pointer to a #GBytes structure + * containing the payload data in @rtp. + * +- * Returns: (transfer full): A new #GBytes containing the payload data in @rtp. ++ * Returns: (transfer full) (nullable): A new #GBytes containing the payload data in @rtp. + * + * Since: 1.2 + */ +@@ -1276,7 +1326,7 @@ gst_rtp_buffer_ext_timestamp (guint64 * exttimestamp, guint32 timestamp) + ext = *exttimestamp; + + if (ext == -1) { +- result = timestamp; ++ result = (G_GUINT64_CONSTANT (1) << 32) + timestamp; + } else { + /* pick wraparound counter from previous timestamp and add to new timestamp */ + result = timestamp + (ext & ~(G_GUINT64_CONSTANT (0xffffffff))); +@@ -1407,9 +1457,9 @@ gst_rtp_buffer_get_extension_onebyte_header_from_bytes (GBytes * bytes, + * @rtp: the RTP packet + * @id: The ID of the header extension to be read (between 1 and 14). + * @nth: Read the nth extension packet with the requested ID +- * @data: (out) (array length=size) (element-type guint8) (transfer none): ++ * @data: (optional) (out) (array length=size) (element-type guint8) (transfer none): + * location for data +- * @size: (out): the size of the data in bytes ++ * @size: (optional) (out): the size of the data in bytes + * + * Parses RFC 5285 style header extensions with a one byte header. It will + * return the nth extension with the requested id. +@@ -1437,12 +1487,12 @@ gst_rtp_buffer_get_extension_onebyte_header (GstRTPBuffer * rtp, guint8 id, + /** + * gst_rtp_buffer_get_extension_twobytes_header: + * @rtp: the RTP packet +- * @appbits: (out): Application specific bits ++ * @appbits: (optional) (out): Application specific bits + * @id: The ID of the header extension to be read (between 1 and 14). + * @nth: Read the nth extension packet with the requested ID +- * @data: (out) (array length=size) (element-type guint8) (transfer none): ++ * @data: (optional) (out) (array length=size) (element-type guint8) (transfer none): + * location for data +- * @size: (out): the size of the data in bytes ++ * @size: (optional) (out): the size of the data in bytes + * + * Parses RFC 5285 style header extensions with a two bytes header. It will + * return the nth extension with the requested id. +diff --git a/gst-libs/gst/rtp/gstrtpbuffer.h b/gst-libs/gst/rtp/gstrtpbuffer.h +index ae3af1438..cac8998c2 100644 +--- a/gst-libs/gst/rtp/gstrtpbuffer.h ++++ b/gst-libs/gst/rtp/gstrtpbuffer.h +@@ -135,6 +135,9 @@ GBytes* gst_rtp_buffer_get_extension_bytes (GstRTPBuffer *rtp, guint16 + GST_RTP_API + gboolean gst_rtp_buffer_set_extension_data (GstRTPBuffer *rtp, guint16 bits, guint16 length); + ++GST_RTP_API ++void gst_rtp_buffer_remove_extension_data (GstRTPBuffer *rtp); ++ + GST_RTP_API + guint32 gst_rtp_buffer_get_ssrc (GstRTPBuffer *rtp); + +diff --git a/gst-libs/gst/rtp/gstrtphdrext.c b/gst-libs/gst/rtp/gstrtphdrext.c +index f653f2f3d..fd683c5c6 100644 +--- a/gst-libs/gst/rtp/gstrtphdrext.c ++++ b/gst-libs/gst/rtp/gstrtphdrext.c +@@ -1,5 +1,6 @@ + /* GStreamer + * Copyright (C) <2012> Wim Taymans ++ * Copyright (C) <2020> Matthew Waters + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public +@@ -33,6 +34,26 @@ + #include + #include + ++static gboolean ++gst_rtp_header_extension_set_caps_from_attributes_default (GstRTPHeaderExtension ++ * ext, GstCaps * caps); ++ ++GST_DEBUG_CATEGORY_STATIC (rtphderext_debug); ++#define GST_CAT_DEFAULT (rtphderext_debug) ++ ++#define MAX_RTP_EXT_ID 256 ++ ++#define GST_RTP_HEADER_EXTENSION_DIRECTION_DEFAULT \ ++ (GST_RTP_HEADER_EXTENSION_DIRECTION_SENDRECV | \ ++ GST_RTP_HEADER_EXTENSION_DIRECTION_INHERITED) ++ ++typedef struct ++{ ++ guint ext_id; ++ gboolean wants_update_non_rtp_src_caps; ++ GstRTPHeaderExtensionDirection direction; ++} GstRTPHeaderExtensionPrivate; ++ + /** + * gst_rtp_hdrext_set_ntp_64: + * @data: the data to write to +@@ -135,3 +156,761 @@ gst_rtp_hdrext_get_ntp_56 (gpointer data, guint size, guint64 * ntptime) + } + return TRUE; + } ++ ++#define gst_rtp_header_extension_parent_class parent_class ++G_DEFINE_TYPE_EXTENDED (GstRTPHeaderExtension, gst_rtp_header_extension, ++ GST_TYPE_ELEMENT, G_TYPE_FLAG_ABSTRACT, ++ G_ADD_PRIVATE (GstRTPHeaderExtension) ++ GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, "rtphdrext", 0, ++ "RTP Header Extensions") ++ ); ++ ++/** ++ * gst_rtp_header_extension_class_set_uri: ++ * @klass: the #GstRTPHeaderExtensionClass ++ * @uri: the RTP Header extension uri for @klass ++ * ++ * Set the URI for this RTP header extension implementation. ++ * ++ * Since: 1.20 ++ */ ++void ++gst_rtp_header_extension_class_set_uri (GstRTPHeaderExtensionClass * klass, ++ const gchar * uri) ++{ ++ GstElementClass *element_class = GST_ELEMENT_CLASS (klass); ++ ++ gst_element_class_add_metadata (element_class, ++ GST_RTP_HEADER_EXTENSION_URI_METADATA_KEY, uri); ++} ++ ++static void ++gst_rtp_header_extension_class_init (GstRTPHeaderExtensionClass * klass) ++{ ++ klass->set_caps_from_attributes = ++ gst_rtp_header_extension_set_caps_from_attributes_default; ++} ++ ++static void ++gst_rtp_header_extension_init (GstRTPHeaderExtension * ext) ++{ ++ GstRTPHeaderExtensionPrivate *priv = ++ gst_rtp_header_extension_get_instance_private (ext); ++ ++ priv->ext_id = G_MAXUINT32; ++ priv->direction = GST_RTP_HEADER_EXTENSION_DIRECTION_DEFAULT; ++} ++ ++/** ++ * gst_rtp_header_extension_get_uri: ++ * @ext: a #GstRTPHeaderExtension ++ * ++ * Returns: (nullable): the RTP extension URI for this object ++ * ++ * Since: 1.20 ++ */ ++const gchar * ++gst_rtp_header_extension_get_uri (GstRTPHeaderExtension * ext) ++{ ++ GstRTPHeaderExtensionClass *klass; ++ GstElementClass *element_class; ++ ++ g_return_val_if_fail (GST_IS_RTP_HEADER_EXTENSION (ext), NULL); ++ klass = GST_RTP_HEADER_EXTENSION_GET_CLASS (ext); ++ element_class = GST_ELEMENT_CLASS (klass); ++ ++ return gst_element_class_get_metadata (element_class, ++ GST_RTP_HEADER_EXTENSION_URI_METADATA_KEY); ++} ++ ++/** ++ * gst_rtp_header_extension_get_supported_flags: ++ * @ext: a #GstRTPHeaderExtension ++ * ++ * Returns: the flags supported by this instance of @ext ++ * ++ * Since: 1.20 ++ */ ++GstRTPHeaderExtensionFlags ++gst_rtp_header_extension_get_supported_flags (GstRTPHeaderExtension * ext) ++{ ++ GstRTPHeaderExtensionClass *klass; ++ ++ g_return_val_if_fail (GST_IS_RTP_HEADER_EXTENSION (ext), 0); ++ klass = GST_RTP_HEADER_EXTENSION_GET_CLASS (ext); ++ g_return_val_if_fail (klass->get_supported_flags != NULL, 0); ++ ++ return klass->get_supported_flags (ext); ++} ++ ++/** ++ * gst_rtp_header_extension_get_max_size: ++ * @ext: a #GstRTPHeaderExtension ++ * @input_meta: a #GstBuffer ++ * ++ * This is used to know how much data a certain header extension will need for ++ * both allocating the resulting data, and deciding how much payload data can ++ * be generated. ++ * ++ * Implementations should return as accurate a value as is possible using the ++ * information given in the input @buffer. ++ * ++ * Returns: the maximum size of the data written by this extension ++ * ++ * Since: 1.20 ++ */ ++gsize ++gst_rtp_header_extension_get_max_size (GstRTPHeaderExtension * ext, ++ const GstBuffer * input_meta) ++{ ++ GstRTPHeaderExtensionClass *klass; ++ ++ g_return_val_if_fail (GST_IS_BUFFER (input_meta), 0); ++ g_return_val_if_fail (GST_IS_RTP_HEADER_EXTENSION (ext), 0); ++ klass = GST_RTP_HEADER_EXTENSION_GET_CLASS (ext); ++ g_return_val_if_fail (klass->get_max_size != NULL, 0); ++ ++ return klass->get_max_size (ext, input_meta); ++} ++ ++/** ++ * gst_rtp_header_extension_write: ++ * @ext: a #GstRTPHeaderExtension ++ * @input_meta: the input #GstBuffer to read information from if necessary ++ * @write_flags: #GstRTPHeaderExtensionFlags for how the extension should ++ * be written ++ * @output: output RTP #GstBuffer ++ * @data: (array length=size): location to write the rtp header extension into ++ * @size: size of @data ++ * ++ * Writes the RTP header extension to @data using information available from ++ * the @input_meta. @data will be sized to be at least the value returned ++ * from gst_rtp_header_extension_get_max_size(). ++ * ++ * Returns: the size of the data written, < 0 on failure ++ * ++ * Since: 1.20 ++ */ ++gssize ++gst_rtp_header_extension_write (GstRTPHeaderExtension * ext, ++ const GstBuffer * input_meta, GstRTPHeaderExtensionFlags write_flags, ++ GstBuffer * output, guint8 * data, gsize size) ++{ ++ GstRTPHeaderExtensionPrivate *priv = ++ gst_rtp_header_extension_get_instance_private (ext); ++ GstRTPHeaderExtensionClass *klass; ++ ++ g_return_val_if_fail (GST_IS_BUFFER (input_meta), -1); ++ g_return_val_if_fail (GST_IS_BUFFER (output), -1); ++ g_return_val_if_fail (gst_buffer_is_writable (output), -1); ++ g_return_val_if_fail (data != NULL, -1); ++ g_return_val_if_fail (GST_IS_RTP_HEADER_EXTENSION (ext), -1); ++ g_return_val_if_fail (priv->ext_id <= MAX_RTP_EXT_ID, -1); ++ klass = GST_RTP_HEADER_EXTENSION_GET_CLASS (ext); ++ g_return_val_if_fail (klass->write != NULL, -1); ++ ++ return klass->write (ext, input_meta, write_flags, output, data, size); ++} ++ ++/** ++ * gst_rtp_header_extension_read: ++ * @ext: a #GstRTPHeaderExtension ++ * @read_flags: #GstRTPHeaderExtensionFlags for how the extension should ++ * be written ++ * @data: (array length=size): location to read the rtp header extension from ++ * @size: size of @data ++ * @buffer: a #GstBuffer to modify if necessary ++ * ++ * Read the RTP header extension from @data. ++ * ++ * Returns: whether the extension could be read from @data ++ * ++ * Since: 1.20 ++ */ ++gboolean ++gst_rtp_header_extension_read (GstRTPHeaderExtension * ext, ++ GstRTPHeaderExtensionFlags read_flags, const guint8 * data, gsize size, ++ GstBuffer * buffer) ++{ ++ GstRTPHeaderExtensionPrivate *priv = ++ gst_rtp_header_extension_get_instance_private (ext); ++ GstRTPHeaderExtensionClass *klass; ++ ++ g_return_val_if_fail (GST_IS_BUFFER (buffer), FALSE); ++ g_return_val_if_fail (gst_buffer_is_writable (buffer), FALSE); ++ g_return_val_if_fail (data != NULL, FALSE); ++ g_return_val_if_fail (GST_IS_RTP_HEADER_EXTENSION (ext), FALSE); ++ g_return_val_if_fail (priv->ext_id <= MAX_RTP_EXT_ID, FALSE); ++ klass = GST_RTP_HEADER_EXTENSION_GET_CLASS (ext); ++ g_return_val_if_fail (klass->read != NULL, FALSE); ++ ++ return klass->read (ext, read_flags, data, size, buffer); ++} ++ ++/** ++ * gst_rtp_header_extension_get_id: ++ * @ext: a #GstRTPHeaderExtension ++ * ++ * Returns: the RTP extension id configured on @ext ++ * ++ * Since: 1.20 ++ */ ++guint ++gst_rtp_header_extension_get_id (GstRTPHeaderExtension * ext) ++{ ++ GstRTPHeaderExtensionPrivate *priv = ++ gst_rtp_header_extension_get_instance_private (ext); ++ ++ g_return_val_if_fail (GST_IS_RTP_HEADER_EXTENSION (ext), 0); ++ ++ return priv->ext_id; ++} ++ ++/** ++ * gst_rtp_header_extension_set_id: ++ * @ext: a #GstRTPHeaderExtension ++ * @ext_id: The id of this extension ++ * ++ * sets the RTP extension id on @ext ++ * ++ * Since: 1.20 ++ */ ++void ++gst_rtp_header_extension_set_id (GstRTPHeaderExtension * ext, guint ext_id) ++{ ++ GstRTPHeaderExtensionPrivate *priv = ++ gst_rtp_header_extension_get_instance_private (ext); ++ ++ g_return_if_fail (GST_IS_RTP_HEADER_EXTENSION (ext)); ++ g_return_if_fail (ext_id < MAX_RTP_EXT_ID); ++ ++ priv->ext_id = ext_id; ++} ++ ++static int ++strcasecmp0 (const gchar * str1, const gchar * str2) ++{ ++ if (!str1) ++ return -(str1 != str2); ++ if (!str2) ++ return str1 != str2; ++ ++ return g_ascii_strcasecmp (str1, str2); ++} ++ ++/** ++ * gst_rtp_header_extension_set_attributes_from_caps: ++ * @ext: a #GstRTPHeaderExtension ++ * @caps: the #GstCaps to configure this extension with ++ * ++ * gst_rtp_header_extension_set_id() must have been called with a valid ++ * extension id that is contained in these caps. ++ * ++ * The only current known caps format is based on the SDP standard as produced ++ * by gst_sdp_media_attributes_to_caps(). ++ * ++ * Returns: whether the @caps could be successfully set on @ext. ++ * ++ * Since: 1.20 ++ */ ++gboolean ++gst_rtp_header_extension_set_attributes_from_caps (GstRTPHeaderExtension * ext, ++ const GstCaps * caps) ++{ ++ GstRTPHeaderExtensionPrivate *priv = ++ gst_rtp_header_extension_get_instance_private (ext); ++ GstRTPHeaderExtensionClass *klass; ++ GstStructure *structure; ++ gchar *field_name; ++ const GValue *val; ++ GstRTPHeaderExtensionDirection direction = ++ GST_RTP_HEADER_EXTENSION_DIRECTION_DEFAULT; ++ const gchar *attributes = ""; ++ gboolean ret = FALSE; ++ ++ g_return_val_if_fail (GST_IS_CAPS (caps), FALSE); ++ g_return_val_if_fail (gst_caps_is_fixed (caps), FALSE); ++ g_return_val_if_fail (GST_IS_RTP_HEADER_EXTENSION (ext), FALSE); ++ g_return_val_if_fail (priv->ext_id <= MAX_RTP_EXT_ID, FALSE); ++ klass = GST_RTP_HEADER_EXTENSION_GET_CLASS (ext); ++ ++ structure = gst_caps_get_structure (caps, 0); ++ g_return_val_if_fail (structure != NULL, FALSE); ++ field_name = g_strdup_printf ("extmap-%u", priv->ext_id); ++ g_return_val_if_fail (gst_structure_has_field (structure, field_name), FALSE); ++ ++ val = gst_structure_get_value (structure, field_name); ++ ++ if (G_VALUE_HOLDS_STRING (val)) { ++ const gchar *ext_uri = g_value_get_string (val); ++ ++ if (strcasecmp0 (ext_uri, gst_rtp_header_extension_get_uri (ext)) != 0) { ++ /* incompatible extension uri for this instance */ ++ GST_WARNING_OBJECT (ext, "Field %s, URI doesn't match RTP Header" ++ " extension: \"%s\", expected \"%s\"", field_name, ext_uri, ++ gst_rtp_header_extension_get_uri (ext)); ++ goto done; ++ } ++ } else if (GST_VALUE_HOLDS_ARRAY (val) ++ && gst_value_array_get_size (val) == 3) { ++ const GValue *inner_val; ++ ++ inner_val = gst_value_array_get_value (val, 0); ++ if (G_VALUE_HOLDS_STRING (inner_val)) { ++ const gchar *dir = g_value_get_string (inner_val); ++ ++ if (!strcasecmp0 (dir, "")) ++ direction = GST_RTP_HEADER_EXTENSION_DIRECTION_DEFAULT; ++ else if (!strcasecmp0 (dir, "sendrecv")) ++ direction = GST_RTP_HEADER_EXTENSION_DIRECTION_SENDRECV; ++ else if (!strcasecmp0 (dir, "sendonly")) ++ direction = GST_RTP_HEADER_EXTENSION_DIRECTION_SENDONLY; ++ else if (!strcasecmp0 (dir, "recvonly")) ++ direction = GST_RTP_HEADER_EXTENSION_DIRECTION_RECVONLY; ++ else if (!strcasecmp0 (dir, "inactive")) ++ direction = GST_RTP_HEADER_EXTENSION_DIRECTION_INACTIVE; ++ else { ++ GST_WARNING_OBJECT (ext, "Unexpected direction \"%s\", expected one" ++ " of: sendrecv, sendonly, recvonly or inactive", dir); ++ goto done; ++ } ++ } else { ++ GST_WARNING_OBJECT (ext, "Caps should hold an array of 3 strings, " ++ "but first member is %s instead", G_VALUE_TYPE_NAME (inner_val)); ++ goto done; ++ } ++ ++ inner_val = gst_value_array_get_value (val, 1); ++ if (!G_VALUE_HOLDS_STRING (inner_val)) { ++ GST_WARNING_OBJECT (ext, "Caps should hold an array of 3 strings, " ++ "but second member is %s instead", G_VALUE_TYPE_NAME (inner_val)); ++ ++ goto done; ++ } ++ if (strcasecmp0 (g_value_get_string (inner_val), ++ gst_rtp_header_extension_get_uri (ext)) != 0) { ++ GST_WARNING_OBJECT (ext, "URI doesn't match RTP Header extension:" ++ " \"%s\", expected \"%s\"", g_value_get_string (inner_val), ++ gst_rtp_header_extension_get_uri (ext)); ++ goto done; ++ } ++ ++ inner_val = gst_value_array_get_value (val, 2); ++ if (!G_VALUE_HOLDS_STRING (inner_val)) { ++ GST_WARNING_OBJECT (ext, "Caps should hold an array of 3 strings, " ++ "but third member is %s instead", G_VALUE_TYPE_NAME (inner_val)); ++ goto done; ++ } ++ ++ attributes = g_value_get_string (inner_val); ++ } else { ++ GST_WARNING_OBJECT (ext, "Caps field %s should be either a string" ++ " containing the URI or an array of 3 strings containing the" ++ " direction, URI and attributes, but contains %s", field_name, ++ G_VALUE_TYPE_NAME (val)); ++ goto done; ++ } ++ ++ /* If the caps don't include directions, use the ones that were ++ * previously set by the application. ++ */ ++ if (direction == GST_RTP_HEADER_EXTENSION_DIRECTION_DEFAULT && ++ priv->direction & GST_RTP_HEADER_EXTENSION_DIRECTION_INHERITED) ++ direction = priv->direction; ++ ++ if (klass->set_attributes) ++ ret = klass->set_attributes (ext, direction, attributes); ++ else ++ ret = TRUE; ++ ++ if (ret) ++ priv->direction = direction; ++ ++done: ++ ++ g_free (field_name); ++ return ret; ++} ++ ++/** ++ * gst_rtp_header_extension_wants_update_non_rtp_src_caps: ++ * @ext: a #GstRTPHeaderExtension ++ * ++ * Call this function after gst_rtp_header_extension_read() to check if ++ * the depayloader's src caps need updating with data received in the last RTP ++ * packet. ++ * ++ * Returns: Whether @ext wants to update depayloader's src caps. ++ * ++ * Since: 1.20 ++ */ ++gboolean ++gst_rtp_header_extension_wants_update_non_rtp_src_caps (GstRTPHeaderExtension * ++ ext) ++{ ++ GstRTPHeaderExtensionPrivate *priv = ++ gst_rtp_header_extension_get_instance_private (ext); ++ ++ g_return_val_if_fail (GST_IS_RTP_HEADER_EXTENSION (ext), FALSE); ++ ++ return priv->wants_update_non_rtp_src_caps; ++} ++ ++/** ++ * gst_rtp_header_extension_set_wants_update_non_rtp_src_caps: ++ * @ext: a #GstRTPHeaderExtension ++ * @state: TRUE if caps update is needed ++ * ++ * Call this function in a subclass from #GstRTPHeaderExtensionClass::read to ++ * tell the depayloader whether the data just parsed from RTP packet require ++ * updating its src (non-RTP) caps. If @state is TRUE, #GstRTPBaseDepayload will ++ * eventually invoke gst_rtp_header_extension_update_non_rtp_src_caps() to ++ * have the caps update applied. Applying the update also flips the internal ++ * "wants update" flag back to FALSE. ++ * ++ * Since: 1.20 ++ */ ++void gst_rtp_header_extension_set_wants_update_non_rtp_src_caps ++ (GstRTPHeaderExtension * ext, gboolean state) ++{ ++ GstRTPHeaderExtensionPrivate *priv = ++ gst_rtp_header_extension_get_instance_private (ext); ++ ++ g_return_if_fail (GST_IS_RTP_HEADER_EXTENSION (ext)); ++ ++ priv->wants_update_non_rtp_src_caps = state; ++} ++ ++/** ++ * gst_rtp_header_extension_set_non_rtp_sink_caps: ++ * @ext: a #GstRTPHeaderExtension ++ * @caps: sink #GstCaps ++ * ++ * Passes RTP payloader's sink (i.e. not payloaded) @caps to the header ++ * extension. ++ * ++ * Returns: Whether @caps could be read successfully ++ * ++ * Since: 1.20 ++ */ ++gboolean ++gst_rtp_header_extension_set_non_rtp_sink_caps (GstRTPHeaderExtension * ext, ++ const GstCaps * caps) ++{ ++ GstRTPHeaderExtensionPrivate *priv = ++ gst_rtp_header_extension_get_instance_private (ext); ++ GstRTPHeaderExtensionClass *klass; ++ ++ g_return_val_if_fail (GST_IS_CAPS (caps), FALSE); ++ g_return_val_if_fail (GST_IS_RTP_HEADER_EXTENSION (ext), FALSE); ++ g_return_val_if_fail (priv->ext_id <= MAX_RTP_EXT_ID, FALSE); ++ klass = GST_RTP_HEADER_EXTENSION_GET_CLASS (ext); ++ ++ if (klass->set_non_rtp_sink_caps) { ++ return klass->set_non_rtp_sink_caps (ext, caps); ++ } ++ ++ return TRUE; ++} ++ ++/** ++ * gst_rtp_header_extension_update_non_rtp_src_caps: ++ * @ext: a #GstRTPHeaderExtension ++ * @caps: src #GstCaps to modify ++ * ++ * Updates depayloader src caps based on the information received in RTP header. ++ * @caps must be writable as this function may modify them. ++ * ++ * Returns: whether @caps were modified successfully ++ * ++ * Since: 1.20 ++ */ ++gboolean ++gst_rtp_header_extension_update_non_rtp_src_caps (GstRTPHeaderExtension * ext, ++ GstCaps * caps) ++{ ++ GstRTPHeaderExtensionPrivate *priv = ++ gst_rtp_header_extension_get_instance_private (ext); ++ GstRTPHeaderExtensionClass *klass; ++ ++ g_return_val_if_fail (GST_IS_CAPS (caps), FALSE); ++ g_return_val_if_fail (gst_caps_is_writable (caps), FALSE); ++ g_return_val_if_fail (GST_IS_RTP_HEADER_EXTENSION (ext), FALSE); ++ g_return_val_if_fail (priv->ext_id <= MAX_RTP_EXT_ID, FALSE); ++ klass = GST_RTP_HEADER_EXTENSION_GET_CLASS (ext); ++ ++ priv->wants_update_non_rtp_src_caps = FALSE; ++ ++ if (klass->update_non_rtp_src_caps) { ++ return klass->update_non_rtp_src_caps (ext, caps); ++ } ++ ++ return TRUE; ++} ++ ++/** ++ * gst_rtp_header_extension_set_caps_from_attributes: ++ * @ext: a #GstRTPHeaderExtension ++ * @caps: writable #GstCaps to modify ++ * ++ * gst_rtp_header_extension_set_id() must have been called with a valid ++ * extension id that is contained in these caps. ++ * ++ * The only current known caps format is based on the SDP standard as produced ++ * by gst_sdp_media_attributes_to_caps(). ++ * ++ * Returns: whether the configured attributes on @ext can successfully be set on ++ * @caps ++ * ++ * Since: 1.20 ++ */ ++gboolean ++gst_rtp_header_extension_set_caps_from_attributes (GstRTPHeaderExtension * ext, ++ GstCaps * caps) ++{ ++ GstRTPHeaderExtensionPrivate *priv = ++ gst_rtp_header_extension_get_instance_private (ext); ++ GstRTPHeaderExtensionClass *klass; ++ ++ g_return_val_if_fail (GST_IS_CAPS (caps), FALSE); ++ g_return_val_if_fail (gst_caps_is_writable (caps), FALSE); ++ g_return_val_if_fail (GST_IS_RTP_HEADER_EXTENSION (ext), FALSE); ++ g_return_val_if_fail (priv->ext_id <= MAX_RTP_EXT_ID, FALSE); ++ klass = GST_RTP_HEADER_EXTENSION_GET_CLASS (ext); ++ g_return_val_if_fail (klass->set_caps_from_attributes != NULL, FALSE); ++ ++ return klass->set_caps_from_attributes (ext, caps); ++} ++ ++/** ++ * gst_rtp_header_extension_get_sdp_caps_field_name: ++ * @ext: the #GstRTPHeaderExtension ++ * ++ * Returns: (transfer full): the #GstStructure field name used in SDP-like #GstCaps for this @ext configuration ++ * ++ * Since: 1.20 ++ */ ++gchar * ++gst_rtp_header_extension_get_sdp_caps_field_name (GstRTPHeaderExtension * ext) ++{ ++ GstRTPHeaderExtensionPrivate *priv = ++ gst_rtp_header_extension_get_instance_private (ext); ++ ++ g_return_val_if_fail (GST_IS_RTP_HEADER_EXTENSION (ext), NULL); ++ g_return_val_if_fail (priv->ext_id <= MAX_RTP_EXT_ID, NULL); ++ ++ return g_strdup_printf ("extmap-%u", priv->ext_id); ++} ++ ++/** ++ * gst_rtp_header_extension_set_caps_from_attributes_helper: ++ * @ext: the #GstRTPHeaderExtension ++ * @caps: #GstCaps to write fields into ++ * ++ * Helper implementation for GstRTPExtensionClass::set_caps_from_attributes ++ * that sets the @ext uri on caps with the specified extension id as required ++ * for sdp #GstCaps. ++ * ++ * Requires that the extension does not have any attributes or direction ++ * advertised in @caps. ++ * ++ * Returns: whether the @ext attributes could be set on @caps. ++ * ++ * Since: 1.20 ++ */ ++gboolean ++gst_rtp_header_extension_set_caps_from_attributes_helper (GstRTPHeaderExtension ++ * ext, GstCaps * caps, const gchar * attributes) ++{ ++ GstRTPHeaderExtensionPrivate *priv = ++ gst_rtp_header_extension_get_instance_private (ext); ++ gchar *field_name = gst_rtp_header_extension_get_sdp_caps_field_name (ext); ++ GstStructure *s = gst_caps_get_structure (caps, 0); ++ ++ if (priv->direction & GST_RTP_HEADER_EXTENSION_DIRECTION_INHERITED && ++ (attributes == NULL || attributes[0] == 0)) { ++ gst_structure_set (s, field_name, G_TYPE_STRING, ++ gst_rtp_header_extension_get_uri (ext), NULL); ++ } else { ++ GValue arr = G_VALUE_INIT; ++ GValue val = G_VALUE_INIT; ++ ++ g_value_init (&arr, GST_TYPE_ARRAY); ++ g_value_init (&val, G_TYPE_STRING); ++ ++ if (priv->direction & GST_RTP_HEADER_EXTENSION_DIRECTION_INHERITED) { ++ g_value_set_string (&val, ""); ++ } else { ++ if ((priv->direction & GST_RTP_HEADER_EXTENSION_DIRECTION_SENDRECV) == ++ GST_RTP_HEADER_EXTENSION_DIRECTION_SENDRECV) ++ g_value_set_string (&val, "sendrecv"); ++ else if (priv->direction & GST_RTP_HEADER_EXTENSION_DIRECTION_SENDONLY) ++ g_value_set_string (&val, "sendonly"); ++ else if (priv->direction & GST_RTP_HEADER_EXTENSION_DIRECTION_RECVONLY) ++ g_value_set_string (&val, "recvonly"); ++ else ++ g_value_set_string (&val, "inactive"); ++ } ++ gst_value_array_append_value (&arr, &val); ++ ++ /* uri */ ++ g_value_set_string (&val, gst_rtp_header_extension_get_uri (ext)); ++ gst_value_array_append_value (&arr, &val); ++ ++ /* attributes */ ++ g_value_set_string (&val, attributes); ++ gst_value_array_append_value (&arr, &val); ++ ++ gst_structure_set_value (s, field_name, &arr); ++ ++ GST_DEBUG_OBJECT (ext, "%" GST_PTR_FORMAT, caps); ++ ++ g_value_unset (&val); ++ g_value_unset (&arr); ++ } ++ ++ g_free (field_name); ++ return TRUE; ++} ++ ++static gboolean ++gst_rtp_header_extension_set_caps_from_attributes_default (GstRTPHeaderExtension ++ * ext, GstCaps * caps) ++{ ++ return gst_rtp_header_extension_set_caps_from_attributes_helper (ext, caps, ++ NULL); ++} ++ ++static gboolean ++gst_rtp_ext_list_filter (GstPluginFeature * feature, gpointer user_data) ++{ ++ GstElementFactory *factory; ++ gchar *uri = user_data; ++ const gchar *klass, *factory_uri; ++ guint rank; ++ ++ /* we only care about element factories */ ++ if (!GST_IS_ELEMENT_FACTORY (feature)) ++ return FALSE; ++ ++ factory = GST_ELEMENT_FACTORY (feature); ++ ++ /* only select elements with autoplugging rank */ ++ rank = gst_plugin_feature_get_rank (feature); ++ if (rank < GST_RANK_MARGINAL) ++ return FALSE; ++ ++ klass = ++ gst_element_factory_get_metadata (factory, GST_ELEMENT_METADATA_KLASS); ++ if (!strstr (klass, "Network") || !strstr (klass, "Extension") || ++ !strstr (klass, "RTPHeader")) ++ return FALSE; ++ ++ factory_uri = ++ gst_element_factory_get_metadata (factory, ++ GST_RTP_HEADER_EXTENSION_URI_METADATA_KEY); ++ if (!factory_uri) ++ return FALSE; ++ ++ if (uri && g_strcmp0 (uri, factory_uri) != 0) ++ return FALSE; ++ ++ return TRUE; ++} ++ ++/** ++ * gst_rtp_get_header_extension_list: ++ * ++ * Retrieve all the factories of the currently registered RTP header ++ * extensions. Call gst_element_factory_create() with each factory to create ++ * the associated #GstRTPHeaderExtension. ++ * ++ * Returns: (transfer full) (element-type GstElementFactory): a #GList of ++ * #GstElementFactory's. Use gst_plugin_feature_list_free() after use ++ * ++ * Since: 1.20 ++ */ ++GList * ++gst_rtp_get_header_extension_list (void) ++{ ++ return gst_registry_feature_filter (gst_registry_get (), ++ (GstPluginFeatureFilter) gst_rtp_ext_list_filter, FALSE, NULL); ++} ++ ++/** ++ * gst_rtp_header_extension_create_from_uri: ++ * @uri: the rtp header extension URI to search for ++ * ++ * Returns: (transfer full) (nullable): the #GstRTPHeaderExtension for @uri or %NULL ++ * ++ * Since: 1.20 ++ */ ++GstRTPHeaderExtension * ++gst_rtp_header_extension_create_from_uri (const gchar * uri) ++{ ++ GList *l; ++ ++ l = gst_registry_feature_filter (gst_registry_get (), ++ (GstPluginFeatureFilter) gst_rtp_ext_list_filter, TRUE, (gpointer) uri); ++ if (l) { ++ GstElementFactory *factory = GST_ELEMENT_FACTORY (l->data); ++ GstElement *element = gst_element_factory_create (factory, NULL); ++ ++ g_list_free_full (l, (GDestroyNotify) gst_object_unref); ++ ++ gst_object_ref_sink (element); ++ ++ return GST_RTP_HEADER_EXTENSION (element); ++ } ++ ++ return NULL; ++} ++ ++/** ++ * gst_rtp_header_extension_set_direction: ++ * @ext: the #GstRTPHeaderExtension ++ * @direction: The direction ++ * ++ * Set the direction that this header extension should be used in. ++ * If #GST_RTP_HEADER_EXTENSION_DIRECTION_INHERITED is included, the ++ * direction will not be included in the caps (as it shouldn't be in the ++ * extmap line in the SDP). ++ * ++ * Since: 1.20 ++ */ ++ ++void ++gst_rtp_header_extension_set_direction (GstRTPHeaderExtension * ext, ++ GstRTPHeaderExtensionDirection direction) ++{ ++ GstRTPHeaderExtensionPrivate *priv = ++ gst_rtp_header_extension_get_instance_private (ext); ++ ++ g_return_if_fail (GST_IS_RTP_HEADER_EXTENSION (ext)); ++ g_return_if_fail (direction <= GST_RTP_HEADER_EXTENSION_DIRECTION_DEFAULT); ++ ++ priv->direction = direction; ++} ++ ++/** ++ * gst_rtp_header_extension_get_direction: ++ * @ext: the #GstRTPHeaderExtension ++ * ++ * Retrieve the direction ++ * ++ * Returns: The direction ++ * ++ * Since: 1.20 ++ */ ++ ++GstRTPHeaderExtensionDirection ++gst_rtp_header_extension_get_direction (GstRTPHeaderExtension * ext) ++{ ++ GstRTPHeaderExtensionPrivate *priv = ++ gst_rtp_header_extension_get_instance_private (ext); ++ ++ g_return_val_if_fail (GST_IS_RTP_HEADER_EXTENSION (ext), ++ GST_RTP_HEADER_EXTENSION_DIRECTION_DEFAULT); ++ ++ return priv->direction; ++} +diff --git a/gst-libs/gst/rtp/gstrtphdrext.h b/gst-libs/gst/rtp/gstrtphdrext.h +index df36fe616..442c27af6 100644 +--- a/gst-libs/gst/rtp/gstrtphdrext.h ++++ b/gst-libs/gst/rtp/gstrtphdrext.h +@@ -1,5 +1,6 @@ + /* GStreamer + * Copyright (C) <2012> Wim Taymans ++ * Copyright (C) <2020> Matthew Waters + * + * gstrtphdrext.h: RTP header extensions + * +@@ -50,6 +51,243 @@ gboolean gst_rtp_hdrext_set_ntp_56 (gpointer data, guint size, guint64 nt + GST_RTP_API + gboolean gst_rtp_hdrext_get_ntp_56 (gpointer data, guint size, guint64 *ntptime); + ++/** ++ * GST_RTP_HDREXT_ELEMENT_CLASS: ++ * ++ * Constant string used in element classification to signal that this element ++ * is a RTP header extension. ++ * ++ * Since: 1.20 ++ */ ++#define GST_RTP_HDREXT_ELEMENT_CLASS "Network/Extension/RTPHeader" ++ ++GST_RTP_API ++GType gst_rtp_header_extension_get_type (void); ++#define GST_TYPE_RTP_HEADER_EXTENSION (gst_rtp_header_extension_get_type()) ++#define GST_RTP_HEADER_EXTENSION(obj) \ ++ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_HEADER_EXTENSION,GstRTPHeaderExtension)) ++#define GST_RTP_HEADER_EXTENSION_CLASS(klass) \ ++ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_HEADER_EXTENSION,GstRTPHeaderExtensionClass)) ++#define GST_RTP_HEADER_EXTENSION_GET_CLASS(obj) \ ++ (G_TYPE_INSTANCE_GET_CLASS ((obj),GST_TYPE_RTP_HEADER_EXTENSION,GstRTPHeaderExtensionClass)) ++#define GST_IS_RTP_HEADER_EXTENSION(obj) \ ++ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_HEADER_EXTENSION)) ++#define GST_IS_RTP_HEADER_EXTENSION_CLASS(klass) \ ++ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_HEADER_EXTENSION)) ++/** ++ * GST_RTP_HEADER_EXTENSION_CAST: ++ * ++ * Since: 1.20 ++ */ ++#define GST_RTP_HEADER_EXTENSION_CAST(obj) ((GstRTPHeaderExtension *)(obj)) ++ ++typedef struct _GstRTPHeaderExtension GstRTPHeaderExtension; ++typedef struct _GstRTPHeaderExtensionClass GstRTPHeaderExtensionClass; ++ ++/** ++ * GstRTPHeaderExtensionFlags: ++ * @GST_RTP_HEADER_EXTENSION_ONE_BYTE: The one byte rtp extension header. ++ * 1-16 data bytes per extension with a maximum of ++ * 14 extension ids in total. ++ * @GST_RTP_HEADER_EXTENSION_TWO_BYTE: The two byte rtp extension header. ++ * 256 data bytes per extension with a maximum of 255 (or 256 ++ * including appbits) extensions in total. ++ * ++ * Flags that apply to a RTP Audio/Video header extension. ++ * ++ * Since: 1.20 ++ */ ++typedef enum /*< underscore_name=gst_rtp_header_extension_flags >*/ ++{ ++ GST_RTP_HEADER_EXTENSION_ONE_BYTE = (1 << 0), ++ GST_RTP_HEADER_EXTENSION_TWO_BYTE = (1 << 1), ++} GstRTPHeaderExtensionFlags; ++ ++/** ++ * GstRTPHeaderExtensionDirection: ++ * @GST_RTP_HEADER_EXTENSION_DIRECTION_INACTIVE: Neither send nor ++ * receive RTP Header Extensions ++ * @GST_RTP_HEADER_EXTENSION_DIRECTION_SENDONLY: Only send RTP Header ++ * Extensions @GST_RTP_HEADER_EXTENSION_DIRECTION_RECVONLY: Only ++ * receive RTP Header Extensions ++ * @GST_RTP_HEADER_EXTENSION_DIRECTION_SENDRECV: Send and receive RTP ++ * Header Extensions ext ++ * @GST_RTP_HEADER_EXTENSION_DIRECTION_INHERITED: RTP header extension ++ * direction is inherited from the stream ++ * ++ * Direction to which to apply the RTP Header Extension ++ * ++ * Since: 1.20 ++ */ ++typedef enum /*< underscore_name=gst_rtp_header_extension_direction >*/ ++{ ++ GST_RTP_HEADER_EXTENSION_DIRECTION_INACTIVE = 0, ++ GST_RTP_HEADER_EXTENSION_DIRECTION_SENDONLY = (1 << 0), ++ GST_RTP_HEADER_EXTENSION_DIRECTION_RECVONLY = (1 << 1), ++ GST_RTP_HEADER_EXTENSION_DIRECTION_SENDRECV = ( ++ GST_RTP_HEADER_EXTENSION_DIRECTION_SENDONLY | ++ GST_RTP_HEADER_EXTENSION_DIRECTION_RECVONLY), ++ GST_RTP_HEADER_EXTENSION_DIRECTION_INHERITED = (1 << 2) ++} GstRTPHeaderExtensionDirection; ++ ++/** ++ * GstRTPHeaderExtension: ++ * @parent: the parent #GObject ++ * @ext_id: the configured extension id ++ * ++ * Instance struct for a RTP Audio/Video header extension. ++ * ++ * Since: 1.20 ++ */ ++struct _GstRTPHeaderExtension ++{ ++ GstElement parent; ++ ++ /*< private >*/ ++ gpointer _gst_reserved[GST_PADDING]; ++}; ++ ++/** ++ * GstRTPHeaderExtensionClass: ++ * @parent_class: the parent class ++ * @get_uri: retrieve the RTP extension uri ++ * @get_supported_flags: retrieve the supported flags ++ * @get_max_size: retrieve the maximum size for this extension based on the ++ * information available from input_meta. Implementations should attempt ++ * to provide as accurate information as possible as the returned value ++ * will be used to control the amount of possible data in the payload. ++ * Implementations must return the maximum as the allocated size for ++ * writing the extension will be at least the size of the returned value. ++ * Return the amount of data read or <0 on failure. ++ * @write: write into @data the information for this extension. Various ++ * information is provided to help writing extensions in particular cases. ++ * @read: read from a rtp payloaded buffer and extract the extension ++ * information, optionally adding some meta onto the output buffer. ++ * @set_non_rtp_sink_caps: read any information from sink caps that the header ++ * extension needs for its function. ++ * @update_non_rtp_src_caps: update depayloader non-RTP (depayloaded) caps with ++ * the information parsed from RTP header. ++ * @set_attributes: set the necessary attributes that may be signaled e.g. with ++ * an SDP. ++ * @set_caps_from_attributes: write the necessary caps field/s for the configured ++ * attributes e.g. as signalled with SDP. ++ * ++ * Base class for RTP Header extensions. ++ * ++ * Since: 1.20 ++ */ ++ ++struct _GstRTPHeaderExtensionClass ++{ ++ GstElementClass parent_class; ++ ++ /*< public >*/ ++ GstRTPHeaderExtensionFlags (*get_supported_flags) (GstRTPHeaderExtension * ext); ++ ++ gsize (*get_max_size) (GstRTPHeaderExtension * ext, ++ const GstBuffer * input_meta); ++ ++ gssize (*write) (GstRTPHeaderExtension * ext, ++ const GstBuffer * input_meta, ++ GstRTPHeaderExtensionFlags write_flags, ++ GstBuffer * output, ++ guint8 * data, ++ gsize size); ++ ++ gboolean (*read) (GstRTPHeaderExtension * ext, ++ GstRTPHeaderExtensionFlags read_flags, ++ const guint8 * data, ++ gsize size, ++ GstBuffer * buffer); ++ gboolean (*set_non_rtp_sink_caps) (GstRTPHeaderExtension * ext, ++ const GstCaps * caps); ++ gboolean (*update_non_rtp_src_caps) (GstRTPHeaderExtension * ext, ++ GstCaps * caps); ++ gboolean (*set_attributes) (GstRTPHeaderExtension * ext, ++ GstRTPHeaderExtensionDirection direction, ++ const gchar * attributes); ++ gboolean (*set_caps_from_attributes) (GstRTPHeaderExtension * ext, ++ GstCaps * caps); ++ ++ /*< private >*/ ++ gpointer _gst_reserved[GST_PADDING_LARGE]; ++}; ++ ++G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstRTPHeaderExtension, gst_object_unref) ++ ++/** ++ * GST_RTP_HEADER_EXTENSION_URI_METADATA_KEY: ++ * ++ * Since: 1.20 ++ */ ++#define GST_RTP_HEADER_EXTENSION_URI_METADATA_KEY "RTP-Header-Extension-URI" ++ ++GST_RTP_API ++void gst_rtp_header_extension_class_set_uri (GstRTPHeaderExtensionClass *klass, ++ const gchar * uri); ++ ++GST_RTP_API ++const gchar * gst_rtp_header_extension_get_uri (GstRTPHeaderExtension * ext); ++GST_RTP_API ++gsize gst_rtp_header_extension_get_max_size (GstRTPHeaderExtension * ext, ++ const GstBuffer * input_meta); ++GST_RTP_API ++GstRTPHeaderExtensionFlags gst_rtp_header_extension_get_supported_flags (GstRTPHeaderExtension * ext); ++GST_RTP_API ++guint gst_rtp_header_extension_get_id (GstRTPHeaderExtension * ext); ++GST_RTP_API ++void gst_rtp_header_extension_set_id (GstRTPHeaderExtension * ext, ++ guint ext_id); ++GST_RTP_API ++gssize gst_rtp_header_extension_write (GstRTPHeaderExtension * ext, ++ const GstBuffer * input_meta, ++ GstRTPHeaderExtensionFlags write_flags, ++ GstBuffer * output, ++ guint8 * data, ++ gsize size); ++GST_RTP_API ++gboolean gst_rtp_header_extension_read (GstRTPHeaderExtension * ext, ++ GstRTPHeaderExtensionFlags read_flags, ++ const guint8 * data, ++ gsize size, ++ GstBuffer * buffer); ++GST_RTP_API ++gboolean gst_rtp_header_extension_set_non_rtp_sink_caps (GstRTPHeaderExtension * ext, ++ const GstCaps * caps); ++GST_RTP_API ++gboolean gst_rtp_header_extension_wants_update_non_rtp_src_caps (GstRTPHeaderExtension * ext); ++GST_RTP_API ++void gst_rtp_header_extension_set_wants_update_non_rtp_src_caps (GstRTPHeaderExtension * ext, ++ gboolean state); ++GST_RTP_API ++gboolean gst_rtp_header_extension_update_non_rtp_src_caps (GstRTPHeaderExtension * ext, ++ GstCaps * caps); ++GST_RTP_API ++gboolean gst_rtp_header_extension_set_caps_from_attributes (GstRTPHeaderExtension * ext, ++ GstCaps * caps); ++GST_RTP_API ++gboolean gst_rtp_header_extension_set_attributes_from_caps (GstRTPHeaderExtension * ext, ++ const GstCaps * caps); ++ ++GST_RTP_API ++GList * gst_rtp_get_header_extension_list (void); ++GST_RTP_API ++GstRTPHeaderExtension * gst_rtp_header_extension_create_from_uri (const gchar * uri); ++ ++GST_RTP_API ++gchar * gst_rtp_header_extension_get_sdp_caps_field_name (GstRTPHeaderExtension * ext); ++ ++GST_RTP_API ++void gst_rtp_header_extension_set_direction (GstRTPHeaderExtension * ext, ++ GstRTPHeaderExtensionDirection direction); ++GST_RTP_API ++GstRTPHeaderExtensionDirection gst_rtp_header_extension_get_direction (GstRTPHeaderExtension * ext); ++ ++GST_RTP_API ++gboolean gst_rtp_header_extension_set_caps_from_attributes_helper (GstRTPHeaderExtension * ext, ++ GstCaps * caps, ++ const gchar * attributes); ++ + G_END_DECLS + + #endif /* __GST_RTPHDREXT_H__ */ +diff --git a/gst-libs/gst/rtp/gstrtpmeta.c b/gst-libs/gst/rtp/gstrtpmeta.c +index 1968f4964..76941efd0 100644 +--- a/gst-libs/gst/rtp/gstrtpmeta.c ++++ b/gst-libs/gst/rtp/gstrtpmeta.c +@@ -34,8 +34,8 @@ + /** + * gst_buffer_add_rtp_source_meta: + * @buffer: a #GstBuffer +- * @ssrc: (allow-none) (transfer none): pointer to the SSRC +- * @csrc: (allow-none) (transfer none): pointer to the CSRCs ++ * @ssrc: (nullable) (transfer none): pointer to the SSRC ++ * @csrc: (nullable) (transfer none) (array length=csrc_count): pointer to the CSRCs + * @csrc_count: number of elements in @csrc + * + * Attaches RTP source information to @buffer. +@@ -81,7 +81,7 @@ gst_buffer_add_rtp_source_meta (GstBuffer * buffer, const guint32 * ssrc, + * + * Find the #GstRTPSourceMeta on @buffer. + * +- * Returns: (transfer none): the #GstRTPSourceMeta or %NULL when there ++ * Returns: (transfer none) (nullable): the #GstRTPSourceMeta or %NULL when there + * is no such metadata on @buffer. + * + * Since: 1.16 +@@ -137,7 +137,7 @@ gst_rtp_source_meta_get_source_count (const GstRTPSourceMeta * meta) + /** + * gst_rtp_source_meta_set_ssrc: + * @meta: a #GstRTPSourceMeta +- * @ssrc: (allow-none) (transfer none): pointer to the SSRC ++ * @ssrc: (nullable) (transfer none): pointer to the SSRC + * + * Sets @ssrc in @meta. If @ssrc is %NULL the ssrc of @meta will be unset. + * +@@ -161,7 +161,7 @@ gst_rtp_source_meta_set_ssrc (GstRTPSourceMeta * meta, guint32 * ssrc) + /** + * gst_rtp_source_meta_append_csrc: + * @meta: a #GstRTPSourceMeta +- * @csrc: the csrcs to append ++ * @csrc: (array length=csrc_count): the csrcs to append + * @csrc_count: number of elements in @csrc + * + * Appends @csrc to the list of contributing sources in @meta. +diff --git a/gst-libs/gst/rtp/gstrtppayloads.c b/gst-libs/gst/rtp/gstrtppayloads.c +index 76ac069c6..1ab579d4d 100644 +--- a/gst-libs/gst/rtp/gstrtppayloads.c ++++ b/gst-libs/gst/rtp/gstrtppayloads.c +@@ -185,7 +185,7 @@ static const GstRTPPayloadInfo info[] = { + * mostly used to get the default clock-rate and bandwidth for static payload + * types specified with @payload_type. + * +- * Returns: a #GstRTPPayloadInfo or NULL when no info could be found. ++ * Returns: (nullable): a #GstRTPPayloadInfo or NULL when no info could be found. + */ + const GstRTPPayloadInfo * + gst_rtp_payload_info_for_pt (guint8 payload_type) +@@ -213,7 +213,7 @@ gst_rtp_payload_info_for_pt (guint8 payload_type) + * + * The search for @encoding_name will be performed in a case insensitive way. + * +- * Returns: a #GstRTPPayloadInfo or NULL when no info could be found. ++ * Returns: (nullable): a #GstRTPPayloadInfo or NULL when no info could be found. + */ + const GstRTPPayloadInfo * + gst_rtp_payload_info_for_name (const gchar * media, const gchar * encoding_name) +diff --git a/gst-libs/gst/sdp/gstmikey.c b/gst-libs/gst/sdp/gstmikey.c +index 4f8a31389..d87547238 100644 +--- a/gst-libs/gst/sdp/gstmikey.c ++++ b/gst-libs/gst/sdp/gstmikey.c +@@ -66,7 +66,7 @@ G_STMT_START { \ + #define INIT_MEMDUP(field, data, len) \ + G_STMT_START { \ + g_free ((field)); \ +- (field) = g_memdup (data, len); \ ++ (field) = g_memdup2 (data, len); \ + } G_STMT_END + #define FREE_MEMDUP(field) \ + G_STMT_START { \ +@@ -177,7 +177,7 @@ gst_mikey_payload_kemac_get_n_sub (const GstMIKEYPayload * payload) + * Get the sub payload of @payload at @idx. @payload should be of type + * %GST_MIKEY_PT_KEMAC. + * +- * Returns: (transfer none): the #GstMIKEYPayload at @idx. ++ * Returns: (transfer none) (nullable): the #GstMIKEYPayload at @idx. + * + * Since: 1.4 + */ +@@ -458,7 +458,7 @@ gst_mikey_payload_sp_get_n_params (const GstMIKEYPayload * payload) + * Get the Security Policy parameter in a %GST_MIKEY_PT_SP @payload + * at @idx. + * +- * Returns: the #GstMIKEYPayloadSPParam at @idx in @payload ++ * Returns: (transfer none) (nullable): the #GstMIKEYPayloadSPParam at @idx in @payload + * + * Since: 1.4 + */ +@@ -683,7 +683,7 @@ gst_mikey_payload_key_data_set_spi (GstMIKEYPayload * payload, + * gst_mikey_payload_key_data_set_interval: + * @payload: a #GstMIKEYPayload + * @vf_len: the length of @vf_data +- * @vf_data: (array length=vf_data): the Valid From data ++ * @vf_data: (array length=vf_len): the Valid From data + * @vt_len: the length of @vt_data + * @vt_data: (array length=vt_len): the Valid To data + * +@@ -763,7 +763,7 @@ mikey_payload_free (GstMIKEYPayload * payload) + * + * Make a new #GstMIKEYPayload with @type. + * +- * Returns: (nullable): a new #GstMIKEYPayload or %NULL on failure. ++ * Returns: (transfer full) (nullable): a new #GstMIKEYPayload or %NULL on failure. + * + * Since: 1.4 + */ +@@ -870,7 +870,7 @@ mikey_message_free (GstMIKEYMessage * msg) + * + * Make a new MIKEY message. + * +- * Returns: a new #GstMIKEYMessage on success ++ * Returns: (transfer full): a new #GstMIKEYMessage on success + * + * Since: 1.4 + */ +@@ -899,7 +899,7 @@ gst_mikey_message_new (void) + * + * Make a new #GstMIKEYMessage from @bytes. + * +- * Returns: a new #GstMIKEYMessage ++ * Returns: (transfer full): a new #GstMIKEYMessage + * + * Since: 1.4 + */ +@@ -974,7 +974,7 @@ gst_mikey_message_get_n_cs (const GstMIKEYMessage * msg) + * + * Get the policy information of @msg at @idx. + * +- * Returns: a #GstMIKEYMapSRTP ++ * Returns: (transfer none) (nullable): a #GstMIKEYMapSRTP + * + * Since: 1.4 + */ +@@ -1125,7 +1125,7 @@ gst_mikey_message_get_n_payloads (const GstMIKEYMessage * msg) + * + * Get the #GstMIKEYPayload at @idx in @msg + * +- * Returns: (transfer none): the #GstMIKEYPayload at @idx. The payload ++ * Returns: (transfer none) (nullable): the #GstMIKEYPayload at @idx. The payload + * remains valid for as long as it is part of @msg. + * + * Since: 1.4 +@@ -1149,7 +1149,7 @@ gst_mikey_message_get_payload (const GstMIKEYMessage * msg, guint idx) + * + * Find the @nth occurrence of the payload with @type in @msg. + * +- * Returns: the @nth #GstMIKEYPayload of @type. ++ * Returns: (transfer none) (nullable): the @nth #GstMIKEYPayload of @type. + * + * Since: 1.4 + */ +@@ -1667,7 +1667,7 @@ payloads_to_bytes (GArray * payloads, GByteArray * arr, guint8 ** ptr, + * + * Convert @msg to a #GBytes. + * +- * Returns: a new #GBytes for @msg. ++ * Returns: (transfer full): a new #GBytes for @msg. + * + * Since: 1.4 + */ +@@ -2053,7 +2053,7 @@ invalid_data: + * Parse @size bytes from @data into a #GstMIKEYMessage. @info contains the + * parameters to decrypt and verify the data. + * +- * Returns: a #GstMIKEYMessage on success or %NULL when parsing failed and ++ * Returns: (transfer full): a #GstMIKEYMessage on success or %NULL when parsing failed and + * @error will be set. + * + * Since: 1.4 +@@ -2238,7 +2238,7 @@ auth_key_length_from_auth_cipher_name (const gchar * auth, const gchar * cipher, + * - Key Data Transport Payload + * - Key Data Sub-Payload + * +- * Returns: (transfer full): a #GstMIKEYMessage, ++ * Returns: (transfer full) (nullable): a #GstMIKEYMessage, + * or %NULL if there is no srtp information in the caps. + * + * Since: 1.8 +@@ -2377,6 +2377,7 @@ gboolean + gst_mikey_message_to_caps (const GstMIKEYMessage * msg, GstCaps * caps) + { + gboolean res = FALSE; ++ const GstMIKEYMapSRTP *srtp; + const GstMIKEYPayload *payload; + const gchar *srtp_cipher; + const gchar *srtp_auth; +@@ -2384,8 +2385,16 @@ gst_mikey_message_to_caps (const GstMIKEYMessage * msg, GstCaps * caps) + srtp_cipher = "aes-128-icm"; + srtp_auth = "hmac-sha1-80"; + +- /* check the Security policy if any */ +- if ((payload = gst_mikey_message_find_payload (msg, GST_MIKEY_PT_SP, 0))) { ++ /* Look for first crypto session */ ++ if (!(srtp = gst_mikey_message_get_cs_srtp (msg, 0))) { ++ GST_ERROR ("No crypto session found at index 0"); ++ goto done; ++ } ++ ++ /* Look for crypto policy corresponding to first crypto session */ ++ if ((payload = ++ gst_mikey_message_find_payload (msg, GST_MIKEY_PT_SP, ++ (unsigned int) srtp->policy))) { + GstMIKEYPayloadSP *p = (GstMIKEYPayloadSP *) payload; + guint len, i; + guint enc_alg = GST_MIKEY_ENC_NULL; +@@ -2478,7 +2487,7 @@ gst_mikey_message_to_caps (const GstMIKEYMessage * msg, GstCaps * caps) + GstMIKEYPayloadKEMAC *p = (GstMIKEYPayloadKEMAC *) payload; + const GstMIKEYPayload *sub; + GstMIKEYPayloadKeyData *pkd; +- GstBuffer *buf; ++ GstBuffer *buf, *saltbuf; + + if (p->enc_alg != GST_MIKEY_ENC_NULL || p->mac_alg != GST_MIKEY_MAC_NULL) + goto done; +@@ -2490,11 +2499,16 @@ gst_mikey_message_to_caps (const GstMIKEYMessage * msg, GstCaps * caps) + goto done; + + pkd = (GstMIKEYPayloadKeyData *) sub; +- buf = +- gst_buffer_new_wrapped (g_memdup (pkd->key_data, pkd->key_len), +- pkd->key_len); ++ buf = gst_buffer_new_memdup (pkd->key_data, pkd->key_len); ++ if (pkd->salt_len) { ++ saltbuf = gst_buffer_new_memdup (pkd->salt_data, pkd->salt_len); ++ gst_buffer_append (buf, saltbuf); ++ gst_buffer_unref (saltbuf); ++ } + gst_caps_set_simple (caps, "srtp-key", GST_TYPE_BUFFER, buf, NULL); + gst_buffer_unref (buf); ++ ++ gst_caps_set_simple (caps, "roc", G_TYPE_UINT, srtp->roc, NULL); + } + + gst_caps_set_simple (caps, +diff --git a/gst-libs/gst/sdp/gstsdpmessage.c b/gst-libs/gst/sdp/gstsdpmessage.c +index 80870f113..2b40bd911 100644 +--- a/gst-libs/gst/sdp/gstsdpmessage.c ++++ b/gst-libs/gst/sdp/gstsdpmessage.c +@@ -61,6 +61,7 @@ + #include + + #include ++#include + #include "gstsdpmessage.h" + + #define FREE_STRING(field) g_free (field); (field) = NULL +@@ -262,7 +263,7 @@ gst_sdp_message_new_from_text (const gchar * text, GstSDPMessage ** msg) + + /** + * gst_sdp_message_init: +- * @msg: a #GstSDPMessage ++ * @msg: (out caller-allocates): a #GstSDPMessage + * + * Initialize @msg so that its contents are as if it was freshly allocated + * with gst_sdp_message_new(). This function is mostly used to initialize a message +@@ -339,16 +340,12 @@ gst_sdp_message_uninit (GstSDPMessage * msg) + GstSDPResult + gst_sdp_message_copy (const GstSDPMessage * msg, GstSDPMessage ** copy) + { +- GstSDPResult ret; + GstSDPMessage *cp; + guint i, len; + +- if (msg == NULL) +- return GST_SDP_EINVAL; ++ g_return_val_if_fail (msg != NULL, GST_SDP_EINVAL); + +- ret = gst_sdp_message_new (copy); +- if (ret != GST_SDP_OK) +- return ret; ++ gst_sdp_message_new (copy); + + cp = *copy; + +@@ -488,7 +485,7 @@ gst_sdp_address_is_multicast (const gchar * nettype, const gchar * addrtype, + * + * Convert the contents of @msg to a text string. + * +- * Returns: A dynamically allocated string representing the SDP description. ++ * Returns: (transfer full): A dynamically allocated string representing the SDP description. + */ + gchar * + gst_sdp_message_as_text (const GstSDPMessage * msg) +@@ -623,7 +620,7 @@ hex_to_int (gchar c) + /** + * gst_sdp_message_parse_uri: + * @uri: the start of the uri +- * @msg: the result #GstSDPMessage ++ * @msg: (transfer none): the result #GstSDPMessage + * + * Parse the null-terminated @uri and store the result in @msg. + * +@@ -735,7 +732,7 @@ static const gchar hex[16] = "0123456789ABCDEF"; + * + * Where each value is url encoded. + * +- * Returns: a uri for @msg. ++ * Returns: (transfer full): a uri for @msg. + */ + gchar * + gst_sdp_message_as_uri (const gchar * scheme, const GstSDPMessage * msg) +@@ -1630,7 +1627,7 @@ DEFINE_ARRAY_GETTER (attribute, attributes, GstSDPAttribute); + * + * Get the @nth attribute with key @key in @msg. + * +- * Returns: the attribute value of the @nth attribute with @key. ++ * Returns: (nullable): the attribute value of the @nth attribute with @key. + */ + const gchar * + gst_sdp_message_get_attribute_val_n (const GstSDPMessage * msg, +@@ -1662,7 +1659,7 @@ gst_sdp_message_get_attribute_val_n (const GstSDPMessage * msg, + * + * Get the first attribute with key @key in @msg. + * +- * Returns: the attribute value of the first attribute with @key. ++ * Returns: (nullable): the attribute value of the first attribute with @key. + */ + const gchar * + gst_sdp_message_get_attribute_val (const GstSDPMessage * msg, const gchar * key) +@@ -1819,7 +1816,7 @@ gst_sdp_media_new (GstSDPMedia ** media) + + /** + * gst_sdp_media_init: +- * @media: a #GstSDPMedia ++ * @media: (out caller-allocates): a #GstSDPMedia + * + * Initialize @media so that its contents are as if it was freshly allocated + * with gst_sdp_media_new(). This function is mostly used to initialize a media +@@ -1909,16 +1906,12 @@ gst_sdp_media_free (GstSDPMedia * media) + GstSDPResult + gst_sdp_media_copy (const GstSDPMedia * media, GstSDPMedia ** copy) + { +- GstSDPResult ret; + GstSDPMedia *cp; + guint i, len; + +- if (media == NULL) +- return GST_SDP_EINVAL; ++ g_return_val_if_fail (media != NULL, GST_SDP_EINVAL); + +- ret = gst_sdp_media_new (copy); +- if (ret != GST_SDP_OK) +- return ret; ++ gst_sdp_media_new (copy); + + cp = *copy; + +@@ -1965,7 +1958,7 @@ gst_sdp_media_copy (const GstSDPMedia * media, GstSDPMedia ** copy) + * + * Convert the contents of @media to a text string. + * +- * Returns: A dynamically allocated string representing the media. ++ * Returns: (transfer full): A dynamically allocated string representing the media. + */ + gchar * + gst_sdp_media_as_text (const GstSDPMedia * media) +@@ -2802,7 +2795,7 @@ gst_sdp_media_get_attribute (const GstSDPMedia * media, guint idx) + * + * Get the @nth attribute value for @key in @media. + * +- * Returns: the @nth attribute value. ++ * Returns: (nullable): the @nth attribute value. + */ + const gchar * + gst_sdp_media_get_attribute_val_n (const GstSDPMedia * media, const gchar * key, +@@ -2834,7 +2827,7 @@ gst_sdp_media_get_attribute_val_n (const GstSDPMedia * media, const gchar * key, + * + * Get the first attribute value for @key in @media. + * +- * Returns: the first attribute value for @key. ++ * Returns: (nullable): the first attribute value for @key. + */ + const gchar * + gst_sdp_media_get_attribute_val (const GstSDPMedia * media, const gchar * key) +@@ -3128,7 +3121,7 @@ gst_sdp_parse_line (SDPContext * c, gchar type, gchar * buffer) + * gst_sdp_message_parse_buffer: + * @data: (array length=size): the start of the buffer + * @size: the size of the buffer +- * @msg: the result #GstSDPMessage ++ * @msg: (transfer none): the result #GstSDPMessage + * + * Parse the contents of @size bytes pointed to by @data and store the result in + * @msg. +@@ -3446,7 +3439,7 @@ gst_sdp_parse_rtpmap (const gchar * rtpmap, gint * payload, gchar ** name, + *params = NULL; + goto out; + } else { +- *name = strdup (*name); ++ *name = g_strdup (*name); + } + + t = p; +@@ -3540,6 +3533,35 @@ gst_sdp_media_add_rtcp_fb_attributes_from_media (const GstSDPMedia * media, + return GST_SDP_OK; + } + ++static void ++gst_sdp_media_caps_adjust_h264 (GstCaps * caps) ++{ ++ long int spsint; ++ guint8 sps[2]; ++ const gchar *profile_level_id; ++ GstStructure *s = gst_caps_get_structure (caps, 0); ++ ++ if (g_strcmp0 (gst_structure_get_string (s, "encoding-name"), "H264") || ++ g_strcmp0 (gst_structure_get_string (s, "level-asymmetry-allowed"), "1")) ++ return; ++ ++ profile_level_id = gst_structure_get_string (s, "profile-level-id"); ++ if (!profile_level_id) ++ return; ++ ++ spsint = strtol (profile_level_id, NULL, 16); ++ sps[0] = spsint >> 16; ++ sps[1] = spsint >> 8; ++ ++ GST_DEBUG ("'level-asymmetry-allowed' is set so we shouldn't care about " ++ "'profile-level-id' and only set a 'profile' instead"); ++ gst_structure_set (s, "profile", G_TYPE_STRING, ++ gst_codec_utils_h264_get_profile (sps, 2), NULL); ++ ++ gst_structure_remove_fields (s, "level-asymmetry-allowed", "profile-level-id", ++ NULL); ++} ++ + /** + * gst_sdp_media_get_caps_from_media: + * @media: a #GstSDPMedia +@@ -3553,9 +3575,9 @@ gst_sdp_media_add_rtcp_fb_attributes_from_media (const GstSDPMedia * media, + * + * a=fmtp:(payload) (param)[=(value)];... + * +- * Note that the extmap attribute is set only by gst_sdp_media_attributes_to_caps(). ++ * Note that the extmap, ssrc and rid attributes are set only by gst_sdp_media_attributes_to_caps(). + * +- * Returns: a #GstCaps, or %NULL if an error happened ++ * Returns: (transfer full) (nullable): a #GstCaps, or %NULL if an error happened + * + * Since: 1.8 + */ +@@ -3592,7 +3614,9 @@ gst_sdp_media_get_caps_from_media (const GstSDPMedia * media, gint pt) + + /* check if we have a rate, if not, we need to look up the rate from the + * default rates based on the payload types. */ +- if (rate == -1) { ++ /* Some broken RTSP server puts a rate of 0, also use the default in that ++ * case */ ++ if (rate <= 0) { + const GstRTPPayloadInfo *info; + + if (GST_RTP_PAYLOAD_IS_DYNAMIC (pt)) { +@@ -3714,6 +3738,8 @@ gst_sdp_media_get_caps_from_media (const GstSDPMedia * media, gint pt) + } + } + ++ gst_sdp_media_caps_adjust_h264 (caps); ++ + /* parse rtcp-fb: field */ + gst_sdp_media_add_rtcp_fb_attributes_from_media (media, pt, caps); + +@@ -3742,7 +3768,7 @@ no_rate: + /** + * gst_sdp_media_set_media_from_caps: + * @caps: a #GstCaps +- * @media: a #GstSDPMedia ++ * @media: (out caller-allocates): a #GstSDPMedia + * + * Mapping of caps to SDP fields: + * +@@ -3767,7 +3793,7 @@ gst_sdp_media_set_media_from_caps (const GstCaps * caps, GstSDPMedia * media) + gchar *tmp; + gint caps_pt, caps_rate; + guint n_fields, j; +- gboolean first, nack, nack_pli, ccm_fir; ++ gboolean first, nack, nack_pli, ccm_fir, transport_cc; + GString *fmtp; + GstStructure *s; + +@@ -3782,15 +3808,25 @@ gst_sdp_media_set_media_from_caps (const GstCaps * caps, GstSDPMedia * media) + + /* get media type and payload for the m= line */ + caps_str = gst_structure_get_string (s, "media"); ++ if (!caps_str) { ++ GST_ERROR ("ignoring stream without media type"); ++ goto error; ++ } + gst_sdp_media_set_media (media, caps_str); + +- gst_structure_get_int (s, "payload", &caps_pt); ++ if (!gst_structure_get_int (s, "payload", &caps_pt)) { ++ GST_ERROR ("ignoring stream without payload type"); ++ goto error; ++ } + tmp = g_strdup_printf ("%d", caps_pt); + gst_sdp_media_add_format (media, tmp); + g_free (tmp); + + /* get clock-rate, media type and params for the rtpmap attribute */ +- gst_structure_get_int (s, "clock-rate", &caps_rate); ++ if (!gst_structure_get_int (s, "clock-rate", &caps_rate)) { ++ GST_ERROR ("ignoring stream without clock rate"); ++ goto error; ++ } + caps_enc = gst_structure_get_string (s, "encoding-name"); + caps_params = gst_structure_get_string (s, "encoding-params"); + +@@ -3833,6 +3869,15 @@ gst_sdp_media_set_media_from_caps (const GstCaps * caps, GstSDPMedia * media) + } + } + ++ if (gst_structure_get_boolean (s, "rtcp-fb-transport-cc", &transport_cc)) { ++ if (transport_cc) { ++ tmp = g_strdup_printf ("%d transport-cc", caps_pt); ++ gst_sdp_media_add_attribute (media, "rtcp-fb", tmp); ++ g_free (tmp); ++ GST_DEBUG ("adding rtcp-fb-transport-cc to pt=%d", caps_pt); ++ } ++ } ++ + /* collect all other properties and add them to fmtp, extmap or attributes */ + fmtp = g_string_new (""); + g_string_append_printf (fmtp, "%d ", caps_pt); +@@ -3869,6 +3914,8 @@ gst_sdp_media_set_media_from_caps (const GstCaps * caps, GstSDPMedia * media) + continue; + if (g_str_has_prefix (fname, "rtcp-fb-")) + continue; ++ if (g_str_has_prefix (fname, "ssrc-")) ++ continue; + + if (!strcmp (fname, "a-framesize")) { + /* a-framesize attribute */ +@@ -3950,7 +3997,63 @@ gst_sdp_media_set_media_from_caps (const GstCaps * caps, GstSDPMedia * media) + continue; + } + ++ /* rid values */ ++ if (g_str_has_prefix (fname, "rid-")) { ++ const char *rid_id = &fname[strlen ("rid-")]; ++ const GValue *arr; ++ ++ if (!rid_id || !*rid_id) ++ continue; ++ ++ if ((fval = gst_structure_get_string (s, fname))) { ++ char *rid_val = g_strdup_printf ("%s %s", rid_id, fval); ++ gst_sdp_media_add_attribute (media, "rid", rid_val); ++ g_free (rid_val); ++ } else if ((arr = gst_structure_get_value (s, fname)) ++ && GST_VALUE_HOLDS_ARRAY (arr) ++ && gst_value_array_get_size (arr) > 1) { ++ const gchar *direction, *param; ++ GString *str; ++ guint i, n; ++ ++ str = g_string_new (NULL); ++ ++ g_string_append_printf (str, "%s ", rid_id); ++ ++ n = gst_value_array_get_size (arr); ++ for (i = 0; i < n; i++) { ++ const GValue *val = gst_value_array_get_value (arr, i); ++ if (i == 0) { ++ direction = g_value_get_string (val); ++ g_string_append_printf (str, "%s", direction); ++ } else { ++ param = g_value_get_string (val); ++ if (i == 1) ++ g_string_append_c (str, ' '); ++ else ++ g_string_append_c (str, ';'); ++ g_string_append_printf (str, "%s", param); ++ } ++ } ++ gst_sdp_media_add_attribute (media, "rid", str->str); ++ g_string_free (str, TRUE); ++ } else { ++ GST_WARNING ("caps field %s is an unsupported format", fname); ++ } ++ continue; ++ } ++ + if ((fval = gst_structure_get_string (s, fname))) { ++ ++ /* "profile" is our internal representation of the notion of ++ * "level-asymmetry-allowed" with caps, convert it back to the SDP ++ * representation */ ++ if (!g_strcmp0 (gst_structure_get_string (s, "encoding-name"), "H264") ++ && !g_strcmp0 (fname, "profile")) { ++ fname = "level-asymmetry-allowed"; ++ fval = "1"; ++ } ++ + g_string_append_printf (fmtp, "%s%s=%s", first ? "" : ";", fname, fval); + first = FALSE; + } +@@ -4119,6 +4222,10 @@ sdp_add_attributes_to_caps (GArray * attributes, GstCaps * caps) + continue; + if (!strcmp (key, "extmap")) + continue; ++ if (!strcmp (key, "ssrc")) ++ continue; ++ if (!strcmp (key, "rid")) ++ continue; + + /* string must be valid UTF8 */ + if (!g_utf8_validate (attr->value, -1, NULL)) +@@ -4244,6 +4351,205 @@ gst_sdp_media_add_extmap_attributes (GArray * attributes, GstCaps * caps) + return GST_SDP_OK; + } + ++/* parses Source-specific media SDP attributes (RFC5576) into caps */ ++static GstSDPResult ++gst_sdp_media_add_ssrc_attributes (GArray * attributes, GstCaps * caps) ++{ ++ gchar *p, *tmp, *to_free; ++ guint i; ++ GstStructure *s; ++ ++ g_return_val_if_fail (attributes != NULL, GST_SDP_EINVAL); ++ g_return_val_if_fail (caps != NULL && GST_IS_CAPS (caps), GST_SDP_EINVAL); ++ ++ s = gst_caps_get_structure (caps, 0); ++ ++ for (i = 0; i < attributes->len; i++) { ++ const gchar *value; ++ GstSDPAttribute *attr; ++ guint32 ssrc; ++ gchar *ssrc_val, *ssrc_attr; ++ gchar *key; ++ ++ attr = &g_array_index (attributes, GstSDPAttribute, i); ++ if (strcmp (attr->key, "ssrc") != 0) ++ continue; ++ ++ value = attr->value; ++ ++ /* p is now of the format ssrc attribute[:value] */ ++ to_free = p = g_strdup (value); ++ ++ ssrc = strtoul (p, &tmp, 10); ++ if (*tmp != ' ') { ++ GST_ERROR ("Invalid ssrc attribute '%s'", to_free); ++ goto next; ++ } ++ ++ /* At the space */ ++ p = tmp; ++ ++ SKIP_SPACES (p); ++ ++ tmp = strstr (p, ":"); ++ if (tmp == NULL) { ++ ssrc_attr = tmp; ++ ssrc_val = (gchar *) ""; ++ } else { ++ ssrc_attr = p; ++ *tmp = '\0'; ++ p = tmp + 1; ++ ssrc_val = p; ++ } ++ ++ if (ssrc_attr == NULL || *ssrc_attr == '\0') { ++ GST_ERROR ("Invalid ssrc attribute '%s'", to_free); ++ goto next; ++ } ++ ++ key = g_strdup_printf ("ssrc-%u-%s", ssrc, ssrc_attr); ++ gst_structure_set (s, key, G_TYPE_STRING, ssrc_val, NULL); ++ GST_DEBUG ("adding caps: %s=%s", key, ssrc_val); ++ g_free (key); ++ ++ next: ++ g_free (to_free); ++ } ++ return GST_SDP_OK; ++} ++ ++/* parses RID SDP attributes (RFC8851) into caps */ ++static GstSDPResult ++gst_sdp_media_add_rid_attributes (GArray * attributes, GstCaps * caps) ++{ ++ const gchar *rid; ++ char *p, *to_free; ++ guint i; ++ GstStructure *s; ++ ++ g_return_val_if_fail (attributes != NULL, GST_SDP_EINVAL); ++ g_return_val_if_fail (caps != NULL && GST_IS_CAPS (caps), GST_SDP_EINVAL); ++ g_return_val_if_fail (gst_caps_is_writable (caps), GST_SDP_EINVAL); ++ ++ s = gst_caps_get_structure (caps, 0); ++ ++ for (i = 0; i < attributes->len; i++) { ++ GstSDPAttribute *attr; ++ const char *direction, *params, *id; ++ const char *tmp; ++ ++ attr = &g_array_index (attributes, GstSDPAttribute, i); ++ if (strcmp (attr->key, "rid") != 0) ++ continue; ++ ++ rid = attr->value; ++ ++ /* p is now of the format id dir ;-separated-params */ ++ to_free = p = g_strdup (rid); ++ ++ PARSE_STRING (p, " ", id); ++ if (id == NULL || *id == '\0') { ++ GST_ERROR ("Invalid rid \'%s\'", to_free); ++ goto next; ++ } ++ tmp = id; ++ while (*tmp && (*tmp == '-' || *tmp == '_' || g_ascii_isalnum (*tmp))) ++ tmp++; ++ if (*tmp != '\0') { ++ GST_ERROR ("Invalid rid-id \'%s\'", id); ++ goto next; ++ } ++ ++ SKIP_SPACES (p); ++ ++ PARSE_STRING (p, " ", direction); ++ if (direction == NULL || *direction == '\0') { ++ direction = p; ++ params = NULL; ++ } else { ++ SKIP_SPACES (p); ++ ++ params = p; ++ } ++ ++ if (direction == NULL || *direction == '\0' ++ || (g_strcmp0 (direction, "send") != 0 ++ && g_strcmp0 (direction, "recv") != 0)) { ++ GST_ERROR ("Invalid rid direction \'%s\'", p); ++ goto next; ++ } ++ ++ if (params && *params != '\0') { ++ GValue arr = G_VALUE_INIT; ++ GValue val = G_VALUE_INIT; ++ gchar *key; ++#if !defined(GST_DISABLE_DEBUG) ++ GString *debug_params = g_string_new (NULL); ++ int i = 0; ++#endif ++ ++ key = g_strdup_printf ("rid-%s", id); ++ ++ g_value_init (&arr, GST_TYPE_ARRAY); ++ g_value_init (&val, G_TYPE_STRING); ++ ++ g_value_set_string (&val, direction); ++ gst_value_array_append_and_take_value (&arr, &val); ++ val = (GValue) G_VALUE_INIT; ++ ++ while (*p) { ++ const char *param; ++ gboolean done = FALSE; ++ ++ PARSE_STRING (p, ";", param); ++ ++ if (param) { ++ } else if (*p) { ++ param = p; ++ done = TRUE; ++ } else { ++ break; ++ } ++ ++ g_value_init (&val, G_TYPE_STRING); ++ g_value_set_string (&val, param); ++ gst_value_array_append_and_take_value (&arr, &val); ++ val = (GValue) G_VALUE_INIT; ++#if !defined(GST_DISABLE_DEBUG) ++ if (i++ > 0) ++ g_string_append_c (debug_params, ','); ++ g_string_append (debug_params, param); ++#endif ++ ++ if (done) ++ break; ++ } ++ ++ gst_structure_take_value (s, key, &arr); ++ arr = (GValue) G_VALUE_INIT; ++#if !defined(GST_DISABLE_DEBUG) ++ { ++ char *debug_str = g_string_free (debug_params, FALSE); ++ GST_DEBUG ("adding caps: %s=<%s,%s>", key, direction, debug_str); ++ g_free (debug_str); ++ } ++#endif ++ g_free (key); ++ } else { ++ gchar *key; ++ ++ key = g_strdup_printf ("rid-%s", id); ++ gst_structure_set (s, key, G_TYPE_STRING, direction, NULL); ++ GST_DEBUG ("adding caps: %s=%s", key, direction); ++ g_free (key); ++ } ++ ++ next: ++ g_clear_pointer (&to_free, g_free); ++ } ++ return GST_SDP_OK; ++} ++ + /** + * gst_sdp_message_attributes_to_caps: + * @msg: a #GstSDPMessage +@@ -4320,6 +4626,16 @@ gst_sdp_media_attributes_to_caps (const GstSDPMedia * media, GstCaps * caps) + res = gst_sdp_media_add_extmap_attributes (media->attributes, caps); + } + ++ if (res == GST_SDP_OK) { ++ /* parse media ssrc field */ ++ res = gst_sdp_media_add_ssrc_attributes (media->attributes, caps); ++ } ++ ++ if (res == GST_SDP_OK) { ++ /* parse media rid fields */ ++ res = gst_sdp_media_add_rid_attributes (media->attributes, caps); ++ } ++ + done: + if (mikey) + gst_mikey_message_unref (mikey); +diff --git a/gst-libs/gst/sdp/meson.build b/gst-libs/gst/sdp/meson.build +index 24cdb5293..7efa721f2 100644 +--- a/gst-libs/gst/sdp/meson.build ++++ b/gst-libs/gst/sdp/meson.build +@@ -1,23 +1,23 @@ +-gst_sdp_headers = [ ++gst_sdp_headers = files([ + 'sdp.h', + 'sdp-prelude.h', + 'gstsdp.h', + 'gstmikey.h', + 'gstsdpmessage.h', +-] ++]) + install_headers(gst_sdp_headers, subdir : 'gstreamer-1.0/gst/sdp/') + +-rtsp_deps = [rtp_dep, gst_dep, gio_dep] +-gst_sdp_sources = ['gstsdpmessage.c', 'gstmikey.c'] ++sdp_deps = [rtp_dep, gst_dep, gio_dep, pbutils_dep] ++gst_sdp_sources = files(['gstsdpmessage.c', 'gstmikey.c']) + gstsdp = library('gstsdp-@0@'.format(api_version), + gst_sdp_sources, +- c_args : gst_plugins_base_args + ['-DBUILDING_GST_SDP'], ++ c_args : gst_plugins_base_args + ['-DBUILDING_GST_SDP', '-DG_LOG_DOMAIN="GStreamer-SDP"'], + include_directories: [configinc, libsinc], + version : libversion, + soversion : soversion, + darwin_versions : osxversion, + install : true, +- dependencies : rtsp_deps, ++ dependencies : sdp_deps, + ) + + sdp_gen_sources = [] +@@ -41,5 +41,5 @@ endif + + sdp_dep = declare_dependency(link_with: gstsdp, + include_directories : [libsinc], +- dependencies : rtsp_deps, ++ dependencies : sdp_deps, + sources: sdp_gen_sources) +diff --git a/gst-libs/gst/video/convertframe.c b/gst-libs/gst/video/convertframe.c +index bc55c6caf..144e23be8 100644 +--- a/gst-libs/gst/video/convertframe.c ++++ b/gst-libs/gst/video/convertframe.c +@@ -24,6 +24,9 @@ + + #include + #include "video.h" ++#ifdef HAVE_GL ++#include ++#endif + + static gboolean + caps_are_raw (const GstCaps * caps) +@@ -110,15 +113,105 @@ fail: + return encoder; + } + ++static GstElement * ++build_convert_frame_pipeline_d3d11 (GstElement ** src_element, ++ GstElement ** sink_element, GstCaps * from_caps, GstCaps * to_caps, ++ GError ** err) ++{ ++ GstElement *pipeline = NULL; ++ GstElement *appsrc = NULL; ++ GstElement *d3d11_convert = NULL; ++ GstElement *d3d11_download = NULL; ++ GstElement *convert = NULL; ++ GstElement *enc = NULL; ++ GstElement *appsink = NULL; ++ GError *error = NULL; ++ ++ if (!create_element ("appsrc", &appsrc, &error) || ++ !create_element ("d3d11convert", &d3d11_convert, &error) || ++ !create_element ("d3d11download", &d3d11_download, &error) || ++ !create_element ("videoconvert", &convert, &error) || ++ !create_element ("appsink", &appsink, &error)) { ++ GST_ERROR ("Could not create element"); ++ goto failed; ++ } ++ ++ if (caps_are_raw (to_caps)) { ++ if (!create_element ("identity", &enc, &error)) { ++ GST_ERROR ("Could not create identity element"); ++ goto failed; ++ } ++ } else { ++ enc = get_encoder (to_caps, &error); ++ if (!enc) { ++ GST_ERROR ("Could not create encoder"); ++ goto failed; ++ } ++ } ++ ++ g_object_set (appsrc, "caps", from_caps, "emit-signals", TRUE, ++ "format", GST_FORMAT_TIME, NULL); ++ g_object_set (appsink, "caps", to_caps, "emit-signals", TRUE, NULL); ++ ++ pipeline = gst_pipeline_new ("d3d11-convert-frame-pipeline"); ++ gst_bin_add_many (GST_BIN (pipeline), appsrc, d3d11_convert, d3d11_download, ++ convert, enc, appsink, NULL); ++ ++ if (!gst_element_link_many (appsrc, ++ d3d11_convert, d3d11_download, convert, enc, appsink, NULL)) { ++ /* Now pipeline takes ownership of all elements, so only top-level ++ * pipeline should be cleared */ ++ appsrc = d3d11_convert = convert = enc = appsink = NULL; ++ ++ error = g_error_new (GST_CORE_ERROR, GST_CORE_ERROR_NEGOTIATION, ++ "Could not configure pipeline for conversion"); ++ } ++ ++ *src_element = appsrc; ++ *sink_element = appsink; ++ ++ return pipeline; ++ ++failed: ++ if (err) ++ *err = error; ++ else ++ g_clear_error (&error); ++ ++ gst_clear_object (&pipeline); ++ gst_clear_object (&appsrc); ++ gst_clear_object (&d3d11_convert); ++ gst_clear_object (&d3d11_download); ++ gst_clear_object (&convert); ++ gst_clear_object (&enc); ++ gst_clear_object (&appsink); ++ ++ return NULL; ++} ++ + static GstElement * + build_convert_frame_pipeline (GstElement ** src_element, +- GstElement ** sink_element, const GstCaps * from_caps, +- GstVideoCropMeta * cmeta, const GstCaps * to_caps, GError ** err) ++ GstElement ** sink_element, GstCaps * from_caps, ++ GstVideoCropMeta * cmeta, GstCaps * to_caps, GError ** err) + { + GstElement *vcrop = NULL, *csp = NULL, *csp2 = NULL, *vscale = NULL; + GstElement *src = NULL, *sink = NULL, *encoder = NULL, *pipeline; ++ GstElement *dl = NULL; + GstVideoInfo info; + GError *error = NULL; ++ GstCapsFeatures *features; ++ ++ features = gst_caps_get_features (from_caps, 0); ++ if (features && gst_caps_features_contains (features, "memory:D3D11Memory")) { ++ return build_convert_frame_pipeline_d3d11 (src_element, sink_element, ++ from_caps, to_caps, err); ++ } ++#ifdef HAVE_GL ++ if (features && ++ gst_caps_features_contains (features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY)) ++ if (!create_element ("gldownload", &dl, &error)) ++ goto no_elements; ++#endif + + if (cmeta) { + if (!create_element ("videocrop", &vcrop, &error)) { +@@ -150,6 +243,8 @@ build_convert_frame_pipeline (GstElement ** src_element, + gst_bin_add_many (GST_BIN (pipeline), src, csp, vscale, sink, NULL); + if (vcrop) + gst_bin_add_many (GST_BIN (pipeline), vcrop, csp2, NULL); ++ if (dl) ++ gst_bin_add (GST_BIN (pipeline), dl); + + /* set caps */ + g_object_set (src, "caps", from_caps, NULL); +@@ -168,9 +263,19 @@ build_convert_frame_pipeline (GstElement ** src_element, + + /* FIXME: linking is still way too expensive, profile this properly */ + if (vcrop) { +- GST_DEBUG ("linking src->csp2"); +- if (!gst_element_link_pads (src, "src", csp2, "sink")) +- goto link_failed; ++ if (!dl) { ++ GST_DEBUG ("linking src->csp2"); ++ if (!gst_element_link_pads (src, "src", csp2, "sink")) ++ goto link_failed; ++ } else { ++ GST_DEBUG ("linking src->dl"); ++ if (!gst_element_link_pads (src, "src", dl, "sink")) ++ goto link_failed; ++ ++ GST_DEBUG ("linking dl->csp2"); ++ if (!gst_element_link_pads (dl, "src", csp2, "sink")) ++ goto link_failed; ++ } + + GST_DEBUG ("linking csp2->vcrop"); + if (!gst_element_link_pads (csp2, "src", vcrop, "sink")) +@@ -181,8 +286,18 @@ build_convert_frame_pipeline (GstElement ** src_element, + goto link_failed; + } else { + GST_DEBUG ("linking src->csp"); +- if (!gst_element_link_pads (src, "src", csp, "sink")) +- goto link_failed; ++ if (!dl) { ++ if (!gst_element_link_pads (src, "src", csp, "sink")) ++ goto link_failed; ++ } else { ++ GST_DEBUG ("linking src->dl"); ++ if (!gst_element_link_pads (src, "src", dl, "sink")) ++ goto link_failed; ++ ++ GST_DEBUG ("linking dl->csp"); ++ if (!gst_element_link_pads (dl, "src", csp, "sink")) ++ goto link_failed; ++ } + } + + GST_DEBUG ("linking csp->vscale"); +diff --git a/gst-libs/gst/video/gstvideocodecalphameta.c b/gst-libs/gst/video/gstvideocodecalphameta.c +new file mode 100644 +index 000000000..027f5435e +--- /dev/null ++++ b/gst-libs/gst/video/gstvideocodecalphameta.c +@@ -0,0 +1,155 @@ ++/* GStreamer ++ * Copyright (C) 2021 Collabora Ltd. ++ * Author: Nicolas Dufresne ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include "gstvideocodecalphameta.h" ++ ++/** ++ * SECTION:gstvideocodecalphameta ++ * @title: GstVideoCodecAlphaMeta ++ * @short_description: GstMeta that can carry an extra buffer holding an ++ * encoded a frame whith luma that can be used as an alpha channel. ++ * ++ * This meta is primarily for internal use in GStreamer elements to support ++ * VP8/VP9 transparent video stored into WebM or Matroska containers, or ++ * transparent static AV1 images. Nothing prevents you from using this meta ++ * for custom purposes, but it generally can't be used to easily to add support ++ * for alpha channels to CODECs or formats that don't support that out of the ++ * box. ++ * ++ * Since: 1.20 ++ */ ++ ++/** ++ * gst_video_codec_alpha_meta_api_get_type: ++ * ++ * Returns: #GType for the #GstVideoCodecAlphaMeta structure. ++ * ++ * Since: 1.20 ++ */ ++GType ++gst_video_codec_alpha_meta_api_get_type (void) ++{ ++ static GType type = 0; ++ static const gchar *tags[] = { GST_META_TAG_VIDEO_STR, NULL }; ++ ++ if (g_once_init_enter (&type)) { ++ GType _type = ++ gst_meta_api_type_register ("GstVideoCodecAlphaMetaAPI", tags); ++ g_once_init_leave (&type, _type); ++ } ++ return type; ++} ++ ++static gboolean ++gst_video_codec_alpha_meta_transform (GstBuffer * dest, ++ GstMeta * meta, GstBuffer * buffer, GQuark type, gpointer data) ++{ ++ GstVideoCodecAlphaMeta *dmeta, *smeta; ++ ++ smeta = (GstVideoCodecAlphaMeta *) meta; ++ ++ if (GST_META_TRANSFORM_IS_COPY (type)) { ++ dmeta = ++ (GstVideoCodecAlphaMeta *) gst_buffer_add_meta (dest, ++ GST_VIDEO_CODEC_ALPHA_META_INFO, NULL); ++ ++ if (!dmeta) ++ return FALSE; ++ ++ dmeta->buffer = gst_buffer_ref (smeta->buffer); ++ } ++ return TRUE; ++} ++ ++static gboolean ++gst_video_codec_alpha_meta_init (GstMeta * meta, gpointer params, ++ GstBuffer * buffer) ++{ ++ GstVideoCodecAlphaMeta *ca_meta = (GstVideoCodecAlphaMeta *) meta; ++ ++ /* the buffer ownership is transfered to the Meta */ ++ ca_meta->buffer = (GstBuffer *) params; ++ ++ return TRUE; ++} ++ ++static void ++gst_video_codec_alpha_meta_free (GstMeta * meta, GstBuffer * buffer) ++{ ++ GstVideoCodecAlphaMeta *ca_meta = (GstVideoCodecAlphaMeta *) meta; ++ gst_clear_buffer (&ca_meta->buffer); ++} ++ ++/** ++ * gst_video_codec_alpha_meta_get_info: ++ * ++ * Returns: #GstMetaInfo pointer that describes #GstVideoCodecAlphaMeta. ++ * ++ * Since: 1.20 ++ */ ++const GstMetaInfo * ++gst_video_codec_alpha_meta_get_info (void) ++{ ++ static const GstMetaInfo *info = NULL; ++ ++ if (g_once_init_enter ((GstMetaInfo **) & info)) { ++ const GstMetaInfo *meta = ++ gst_meta_register (GST_VIDEO_CODEC_ALPHA_META_API_TYPE, ++ "GstVideoCodecAlphaMeta", ++ sizeof (GstVideoCodecAlphaMeta), ++ gst_video_codec_alpha_meta_init, ++ gst_video_codec_alpha_meta_free, ++ gst_video_codec_alpha_meta_transform); ++ g_once_init_leave ((GstMetaInfo **) & info, (GstMetaInfo *) meta); ++ } ++ ++ return info; ++} ++ ++/** ++ * gst_buffer_add_video_codec_alpha_meta: ++ * @buffer: (transfer none): a #GstBuffer ++ * @alpha_buffer: (transfer full): a #GstBuffer ++ * ++ * Attaches a #GstVideoCodecAlphaMeta metadata to @buffer with ++ * the given alpha buffer. ++ * ++ * Returns: (transfer none): the #GstVideoCodecAlphaMeta on @buffer. ++ * ++ * Since: 1.20 ++ */ ++GstVideoCodecAlphaMeta * ++gst_buffer_add_video_codec_alpha_meta (GstBuffer * buffer, ++ GstBuffer * alpha_buffer) ++{ ++ GstVideoCodecAlphaMeta *meta; ++ ++ g_return_val_if_fail (buffer != NULL, NULL); ++ g_return_val_if_fail (alpha_buffer != NULL, NULL); ++ ++ meta = ++ (GstVideoCodecAlphaMeta *) gst_buffer_add_meta (buffer, ++ GST_VIDEO_CODEC_ALPHA_META_INFO, alpha_buffer); ++ ++ return meta; ++} +diff --git a/gst-libs/gst/video/gstvideocodecalphameta.h b/gst-libs/gst/video/gstvideocodecalphameta.h +new file mode 100644 +index 000000000..14e371c39 +--- /dev/null ++++ b/gst-libs/gst/video/gstvideocodecalphameta.h +@@ -0,0 +1,88 @@ ++/* GStreamer ++ * Copyright (C) 2021 Collabora Ltd. ++ * Author: Nicolas Dufresne ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_VIDEO_CODEC_ALPHA_META_H__ ++#define __GST_VIDEO_CODEC_ALPHA_META_H__ ++ ++#include ++#include ++ ++G_BEGIN_DECLS ++ ++/** ++ * GST_VIDEO_CODEC_ALPHA_META_API_TYPE: ++ * ++ * Since: 1.20 ++ */ ++#define GST_VIDEO_CODEC_ALPHA_META_API_TYPE (gst_video_codec_alpha_meta_api_get_type()) ++ ++/** ++ * GST_VIDEO_CODEC_ALPHA_META_INFO: ++ * ++ * Since: 1.20 ++ */ ++#define GST_VIDEO_CODEC_ALPHA_META_INFO (gst_video_codec_alpha_meta_get_info()) ++ ++typedef struct _GstVideoCodecAlphaMeta GstVideoCodecAlphaMeta; ++ ++/** ++ * GstVideoCodecAlphaMeta: ++ * @meta: parent #GstMeta ++ * @buffer: the encoded alpha frame ++ * ++ * Encapsulate an extra frame containing the encoded alpha channel for the ++ * currently negotiated CODEC. The streams must be of the same dimention as ++ * the original one. ++ * ++ * Since: 1.20 ++ */ ++struct _GstVideoCodecAlphaMeta ++{ ++ GstMeta meta; ++ ++ GstBuffer *buffer; ++}; ++ ++GST_VIDEO_API ++GType gst_video_codec_alpha_meta_api_get_type (void); ++ ++GST_VIDEO_API ++const GstMetaInfo *gst_video_codec_alpha_meta_get_info (void); ++ ++/** ++ * gst_buffer_get_video_codec_alpha_meta: ++ * @b: A #GstBuffer pointer, must be writable. ++ * ++ * Helper macro to get #GstVideoCodecAlphaMeta from an existing #GstBuffer. ++ * ++ * Returns: (nullable): the #GstVideoCodecAlphaMeta pointer, or %NULL if none. ++ * ++ * Since: 1.20 ++ */ ++#define gst_buffer_get_video_codec_alpha_meta(b) \ ++ ((GstVideoCodecAlphaMeta *)gst_buffer_get_meta((b),GST_VIDEO_CODEC_ALPHA_META_API_TYPE)) ++ ++GST_VIDEO_API ++GstVideoCodecAlphaMeta *gst_buffer_add_video_codec_alpha_meta (GstBuffer * buffer, ++ GstBuffer * alpha_buffer); ++ ++G_END_DECLS ++ ++#endif /* __GST_VIDEO_CODEC_ALPHA_META_H__ */ +diff --git a/gst-libs/gst/video/gstvideodecoder.c b/gst-libs/gst/video/gstvideodecoder.c +index 36f19de6d..6a040863e 100644 +--- a/gst-libs/gst/video/gstvideodecoder.c ++++ b/gst-libs/gst/video/gstvideodecoder.c +@@ -57,14 +57,29 @@ + * + * * Each input frame is provided in turn to the subclass' @handle_frame + * callback. +- * The ownership of the frame is given to the @handle_frame callback. ++ * * When the subclass enables the subframe mode with `gst_video_decoder_set_subframe_mode`, ++ * the base class will provide to the subclass the same input frame with ++ * different input buffers to the subclass @handle_frame ++ * callback. During this call, the subclass needs to take ++ * ownership of the input_buffer as @GstVideoCodecFrame.input_buffer ++ * will have been changed before the next subframe buffer is received. ++ * The subclass will call `gst_video_decoder_have_last_subframe` ++ * when a new input frame can be created by the base class. ++ * Every subframe will share the same @GstVideoCodecFrame.output_buffer ++ * to write the decoding result. The subclass is responsible to protect ++ * its access. + * + * * If codec processing results in decoded data, the subclass should call +- * @gst_video_decoder_finish_frame to have decoded data pushed. +- * downstream. Otherwise, the subclass must call +- * @gst_video_decoder_drop_frame, to allow the base class to do timestamp +- * and offset tracking, and possibly to requeue the frame for a later +- * attempt in the case of reverse playback. ++ * @gst_video_decoder_finish_frame to have decoded data pushed ++ * downstream. In subframe mode ++ * the subclass should call @gst_video_decoder_finish_subframe until the ++ * last subframe where it should call @gst_video_decoder_finish_frame. ++ * The subclass can detect the last subframe using GST_VIDEO_BUFFER_FLAG_MARKER ++ * on buffers or using its own logic to collect the subframes. ++ * In case of decoding failure, the subclass must call ++ * @gst_video_decoder_drop_frame or @gst_video_decoder_drop_subframe, ++ * to allow the base class to do timestamp and offset tracking, and possibly ++ * to requeue the frame for a later attempt in the case of reverse playback. + * + * ## Shutdown phase + * +@@ -290,12 +305,25 @@ GST_DEBUG_CATEGORY (videodecoder_debug); + /* properties */ + #define DEFAULT_QOS TRUE + #define DEFAULT_MAX_ERRORS GST_VIDEO_DECODER_MAX_ERRORS ++#define DEFAULT_MIN_FORCE_KEY_UNIT_INTERVAL 0 ++#define DEFAULT_DISCARD_CORRUPTED_FRAMES FALSE ++#define DEFAULT_AUTOMATIC_REQUEST_SYNC_POINTS FALSE ++#define DEFAULT_AUTOMATIC_REQUEST_SYNC_POINT_FLAGS (GST_VIDEO_DECODER_REQUEST_SYNC_POINT_DISCARD_INPUT | GST_VIDEO_DECODER_REQUEST_SYNC_POINT_CORRUPT_OUTPUT) ++ ++/* Used for request_sync_point_frame_number. These are out of range for the ++ * frame numbers and can be given special meaning */ ++#define REQUEST_SYNC_POINT_PENDING G_MAXUINT + 1 ++#define REQUEST_SYNC_POINT_UNSET G_MAXUINT64 + + enum + { + PROP_0, + PROP_QOS, + PROP_MAX_ERRORS, ++ PROP_MIN_FORCE_KEY_UNIT_INTERVAL, ++ PROP_DISCARD_CORRUPTED_FRAMES, ++ PROP_AUTOMATIC_REQUEST_SYNC_POINTS, ++ PROP_AUTOMATIC_REQUEST_SYNC_POINT_FLAGS, + }; + + struct _GstVideoDecoderPrivate +@@ -319,6 +347,9 @@ struct _GstVideoDecoderPrivate + /* Whether input is considered packetized or not */ + gboolean packetized; + ++ /* whether input is considered as subframes */ ++ gboolean subframe_mode; ++ + /* Error handling */ + gint max_errors; + gint error_count; +@@ -334,7 +365,7 @@ struct _GstVideoDecoderPrivate + GstSegmentFlags decode_flags; + + /* ... being tracked here; +- * only available during parsing */ ++ * only available during parsing or when doing subframe decoding */ + GstVideoCodecFrame *current_frame; + /* events that should apply to the current frame */ + /* FIXME 2.0: Use a GQueue or similar, see GstVideoCodecFrame::events */ +@@ -375,9 +406,20 @@ struct _GstVideoDecoderPrivate + /* combine with base_picture_number, framerate and calcs to yield (presentation) ts */ + GstClockTime base_timestamp; + +- /* FIXME : reorder_depth is never set */ +- int reorder_depth; +- int distance_from_sync; ++ /* Properties */ ++ GstClockTime min_force_key_unit_interval; ++ gboolean discard_corrupted_frames; ++ ++ /* Key unit related state */ ++ gboolean needs_sync_point; ++ GstVideoDecoderRequestSyncPointFlags request_sync_point_flags; ++ guint64 request_sync_point_frame_number; ++ GstClockTime last_force_key_unit_time; ++ /* -1 if we saw no sync point yet */ ++ guint64 distance_from_sync; ++ ++ gboolean automatic_request_sync_points; ++ GstVideoDecoderRequestSyncPointFlags automatic_request_sync_point_flags; + + guint32 system_frame_number; + guint32 decode_frame_number; +@@ -404,6 +446,9 @@ struct _GstVideoDecoderPrivate + gint64 min_latency; + gint64 max_latency; + ++ /* Tracks whether the latency message was posted at least once */ ++ gboolean posted_latency_msg; ++ + /* upstream stream tags (global tags are passed through as-is) */ + GstTagList *upstream_tags; + +@@ -494,6 +539,19 @@ static gboolean gst_video_decoder_src_query_default (GstVideoDecoder * decoder, + static gboolean gst_video_decoder_transform_meta_default (GstVideoDecoder * + decoder, GstVideoCodecFrame * frame, GstMeta * meta); + ++static gboolean gst_video_decoder_handle_missing_data_default (GstVideoDecoder * ++ decoder, GstClockTime timestamp, GstClockTime duration); ++ ++static void gst_video_decoder_replace_input_buffer (GstVideoDecoder * decoder, ++ GstVideoCodecFrame * frame, GstBuffer ** dest_buffer); ++ ++static void gst_video_decoder_copy_metas (GstVideoDecoder * decoder, ++ GstVideoCodecFrame * frame, GstBuffer * src_buffer, ++ GstBuffer * dest_buffer); ++ ++static void gst_video_decoder_request_sync_point_internal (GstVideoDecoder * ++ dec, GstClockTime deadline, GstVideoDecoderRequestSyncPointFlags flags); ++ + /* we can't use G_DEFINE_ABSTRACT_TYPE because we need the klass in the _init + * method to get to the padtemplates */ + GType +@@ -564,6 +622,7 @@ gst_video_decoder_class_init (GstVideoDecoderClass * klass) + klass->sink_query = gst_video_decoder_sink_query_default; + klass->src_query = gst_video_decoder_src_query_default; + klass->transform_meta = gst_video_decoder_transform_meta_default; ++ klass->handle_missing_data = gst_video_decoder_handle_missing_data_default; + + /** + * GstVideoDecoder:qos: +@@ -594,6 +653,75 @@ gst_video_decoder_class_init (GstVideoDecoderClass * klass) + -1, G_MAXINT, DEFAULT_MAX_ERRORS, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + ++ /** ++ * GstVideoDecoder:min-force-key-unit-interval: ++ * ++ * Minimum interval between force-key-unit events sent upstream by the ++ * decoder. Setting this to 0 will cause every event to be handled, setting ++ * this to %GST_CLOCK_TIME_NONE will cause every event to be ignored. ++ * ++ * See gst_video_event_new_upstream_force_key_unit() for more details about ++ * force-key-unit events. ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, ++ PROP_MIN_FORCE_KEY_UNIT_INTERVAL, ++ g_param_spec_uint64 ("min-force-key-unit-interval", ++ "Minimum Force Keyunit Interval", ++ "Minimum interval between force-keyunit requests in nanoseconds", 0, ++ G_MAXUINT64, DEFAULT_MIN_FORCE_KEY_UNIT_INTERVAL, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstVideoDecoder:discard-corrupted-frames: ++ * ++ * If set to %TRUE the decoder will discard frames that are marked as ++ * corrupted instead of outputting them. ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_DISCARD_CORRUPTED_FRAMES, ++ g_param_spec_boolean ("discard-corrupted-frames", ++ "Discard Corrupted Frames", ++ "Discard frames marked as corrupted instead of outputting them", ++ DEFAULT_DISCARD_CORRUPTED_FRAMES, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstVideoDecoder:automatic-request-sync-points: ++ * ++ * If set to %TRUE the decoder will automatically request sync points when ++ * it seems like a good idea, e.g. if the first frames are not key frames or ++ * if packet loss was reported by upstream. ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, ++ PROP_AUTOMATIC_REQUEST_SYNC_POINTS, ++ g_param_spec_boolean ("automatic-request-sync-points", ++ "Automatic Request Sync Points", ++ "Automatically request sync points when it would be useful", ++ DEFAULT_AUTOMATIC_REQUEST_SYNC_POINTS, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstVideoDecoder:automatic-request-sync-point-flags: ++ * ++ * GstVideoDecoderRequestSyncPointFlags to use for the automatically ++ * requested sync points if `automatic-request-sync-points` is enabled. ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, ++ PROP_AUTOMATIC_REQUEST_SYNC_POINT_FLAGS, ++ g_param_spec_flags ("automatic-request-sync-point-flags", ++ "Automatic Request Sync Point Flags", ++ "Flags to use when automatically requesting sync points", ++ GST_TYPE_VIDEO_DECODER_REQUEST_SYNC_POINT_FLAGS, ++ DEFAULT_AUTOMATIC_REQUEST_SYNC_POINT_FLAGS, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ + meta_tag_video_quark = g_quark_from_static_string (GST_META_TAG_VIDEO_STR); + } + +@@ -652,6 +780,11 @@ gst_video_decoder_init (GstVideoDecoder * decoder, GstVideoDecoderClass * klass) + decoder->priv->min_latency = 0; + decoder->priv->max_latency = 0; + ++ decoder->priv->automatic_request_sync_points = ++ DEFAULT_AUTOMATIC_REQUEST_SYNC_POINTS; ++ decoder->priv->automatic_request_sync_point_flags = ++ DEFAULT_AUTOMATIC_REQUEST_SYNC_POINT_FLAGS; ++ + gst_video_decoder_reset (decoder, TRUE, TRUE); + } + +@@ -710,18 +843,8 @@ _new_output_state (GstVideoFormat fmt, GstVideoInterlaceMode interlace_mode, + if (copy_interlace_mode) + tgt->interlace_mode = ref->interlace_mode; + tgt->flags = ref->flags; +- /* only copy values that are not unknown so that we don't override the +- * defaults. subclasses should really fill these in when they know. */ +- if (ref->chroma_site) +- tgt->chroma_site = ref->chroma_site; +- if (ref->colorimetry.range) +- tgt->colorimetry.range = ref->colorimetry.range; +- if (ref->colorimetry.matrix) +- tgt->colorimetry.matrix = ref->colorimetry.matrix; +- if (ref->colorimetry.transfer) +- tgt->colorimetry.transfer = ref->colorimetry.transfer; +- if (ref->colorimetry.primaries) +- tgt->colorimetry.primaries = ref->colorimetry.primaries; ++ tgt->chroma_site = ref->chroma_site; ++ tgt->colorimetry = ref->colorimetry; + GST_DEBUG ("reference par %d/%d fps %d/%d", + ref->par_n, ref->par_d, ref->fps_n, ref->fps_d); + tgt->par_n = ref->par_n; +@@ -866,6 +989,18 @@ gst_video_decoder_get_property (GObject * object, guint property_id, + case PROP_MAX_ERRORS: + g_value_set_int (value, gst_video_decoder_get_max_errors (dec)); + break; ++ case PROP_MIN_FORCE_KEY_UNIT_INTERVAL: ++ g_value_set_uint64 (value, priv->min_force_key_unit_interval); ++ break; ++ case PROP_DISCARD_CORRUPTED_FRAMES: ++ g_value_set_boolean (value, priv->discard_corrupted_frames); ++ break; ++ case PROP_AUTOMATIC_REQUEST_SYNC_POINTS: ++ g_value_set_boolean (value, priv->automatic_request_sync_points); ++ break; ++ case PROP_AUTOMATIC_REQUEST_SYNC_POINT_FLAGS: ++ g_value_set_flags (value, priv->automatic_request_sync_point_flags); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); + break; +@@ -886,6 +1021,18 @@ gst_video_decoder_set_property (GObject * object, guint property_id, + case PROP_MAX_ERRORS: + gst_video_decoder_set_max_errors (dec, g_value_get_int (value)); + break; ++ case PROP_MIN_FORCE_KEY_UNIT_INTERVAL: ++ priv->min_force_key_unit_interval = g_value_get_uint64 (value); ++ break; ++ case PROP_DISCARD_CORRUPTED_FRAMES: ++ priv->discard_corrupted_frames = g_value_get_boolean (value); ++ break; ++ case PROP_AUTOMATIC_REQUEST_SYNC_POINTS: ++ priv->automatic_request_sync_points = g_value_get_boolean (value); ++ break; ++ case PROP_AUTOMATIC_REQUEST_SYNC_POINT_FLAGS: ++ priv->automatic_request_sync_point_flags = g_value_get_flags (value); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); + break; +@@ -1166,14 +1313,43 @@ caps_error: + } + } + ++static gboolean ++gst_video_decoder_handle_missing_data_default (GstVideoDecoder * ++ decoder, GstClockTime timestamp, GstClockTime duration) ++{ ++ GstVideoDecoderPrivate *priv; ++ ++ priv = decoder->priv; ++ ++ if (priv->automatic_request_sync_points) { ++ GstClockTime deadline = ++ gst_segment_to_running_time (&decoder->input_segment, GST_FORMAT_TIME, ++ timestamp); ++ ++ GST_DEBUG_OBJECT (decoder, ++ "Requesting sync point for missing data at running time %" ++ GST_TIME_FORMAT " timestamp %" GST_TIME_FORMAT " with duration %" ++ GST_TIME_FORMAT, GST_TIME_ARGS (deadline), GST_TIME_ARGS (timestamp), ++ GST_TIME_ARGS (duration)); ++ ++ gst_video_decoder_request_sync_point_internal (decoder, deadline, ++ priv->automatic_request_sync_point_flags); ++ } ++ ++ return TRUE; ++} ++ + static gboolean + gst_video_decoder_sink_event_default (GstVideoDecoder * decoder, + GstEvent * event) + { ++ GstVideoDecoderClass *decoder_class; + GstVideoDecoderPrivate *priv; + gboolean ret = FALSE; + gboolean forward_immediate = FALSE; + ++ decoder_class = GST_VIDEO_DECODER_GET_CLASS (decoder); ++ + priv = decoder->priv; + + switch (GST_EVENT_TYPE (event)) { +@@ -1216,7 +1392,7 @@ gst_video_decoder_sink_event_default (GstVideoDecoder * decoder, + GstFlowReturn flow_ret = GST_FLOW_OK; + + GST_VIDEO_DECODER_STREAM_LOCK (decoder); +- flow_ret = gst_video_decoder_drain_out (decoder, TRUE); ++ flow_ret = gst_video_decoder_drain_out (decoder, FALSE); + GST_VIDEO_DECODER_STREAM_UNLOCK (decoder); + ret = (flow_ret == GST_FLOW_OK); + +@@ -1261,54 +1437,70 @@ gst_video_decoder_sink_event_default (GstVideoDecoder * decoder, + } + case GST_EVENT_GAP: + { ++ GstClockTime timestamp, duration; ++ GstGapFlags gap_flags = 0; + GstFlowReturn flow_ret = GST_FLOW_OK; + gboolean needs_reconfigure = FALSE; + GList *events; + GList *frame_events; + +- GST_VIDEO_DECODER_STREAM_LOCK (decoder); +- if (decoder->input_segment.flags & GST_SEEK_FLAG_TRICKMODE_KEY_UNITS) +- flow_ret = gst_video_decoder_drain_out (decoder, FALSE); +- ret = (flow_ret == GST_FLOW_OK); ++ gst_event_parse_gap (event, ×tamp, &duration); ++ gst_event_parse_gap_flags (event, &gap_flags); + +- /* Ensure we have caps before forwarding the event */ +- if (!decoder->priv->output_state) { +- if (!gst_video_decoder_negotiate_default_caps (decoder)) { +- GST_VIDEO_DECODER_STREAM_UNLOCK (decoder); +- GST_ELEMENT_ERROR (decoder, STREAM, FORMAT, (NULL), +- ("Decoder output not negotiated before GAP event.")); +- forward_immediate = TRUE; +- break; ++ GST_VIDEO_DECODER_STREAM_LOCK (decoder); ++ /* If this is not missing data, or the subclass does not handle it ++ * specifically, then drain out the decoder and forward the event ++ * directly. */ ++ if ((gap_flags & GST_GAP_FLAG_MISSING_DATA) == 0 ++ || !decoder_class->handle_missing_data ++ || decoder_class->handle_missing_data (decoder, timestamp, ++ duration)) { ++ if (decoder->input_segment.flags & GST_SEEK_FLAG_TRICKMODE_KEY_UNITS) ++ flow_ret = gst_video_decoder_drain_out (decoder, FALSE); ++ ret = (flow_ret == GST_FLOW_OK); ++ ++ /* Ensure we have caps before forwarding the event */ ++ if (!decoder->priv->output_state) { ++ if (!gst_video_decoder_negotiate_default_caps (decoder)) { ++ GST_VIDEO_DECODER_STREAM_UNLOCK (decoder); ++ GST_ELEMENT_ERROR (decoder, STREAM, FORMAT, (NULL), ++ ("Decoder output not negotiated before GAP event.")); ++ forward_immediate = TRUE; ++ break; ++ } ++ needs_reconfigure = TRUE; + } +- needs_reconfigure = TRUE; +- } + +- needs_reconfigure = gst_pad_check_reconfigure (decoder->srcpad) +- || needs_reconfigure; +- if (decoder->priv->output_state_changed || needs_reconfigure) { +- if (!gst_video_decoder_negotiate_unlocked (decoder)) { +- GST_WARNING_OBJECT (decoder, "Failed to negotiate with downstream"); +- gst_pad_mark_reconfigure (decoder->srcpad); ++ needs_reconfigure = gst_pad_check_reconfigure (decoder->srcpad) ++ || needs_reconfigure; ++ if (decoder->priv->output_state_changed || needs_reconfigure) { ++ if (!gst_video_decoder_negotiate_unlocked (decoder)) { ++ GST_WARNING_OBJECT (decoder, "Failed to negotiate with downstream"); ++ gst_pad_mark_reconfigure (decoder->srcpad); ++ } + } +- } + +- GST_DEBUG_OBJECT (decoder, "Pushing all pending serialized events" +- " before the gap"); +- events = decoder->priv->pending_events; +- frame_events = decoder->priv->current_frame_events; +- decoder->priv->pending_events = NULL; +- decoder->priv->current_frame_events = NULL; ++ GST_DEBUG_OBJECT (decoder, "Pushing all pending serialized events" ++ " before the gap"); ++ events = decoder->priv->pending_events; ++ frame_events = decoder->priv->current_frame_events; ++ decoder->priv->pending_events = NULL; ++ decoder->priv->current_frame_events = NULL; + +- GST_VIDEO_DECODER_STREAM_UNLOCK (decoder); ++ GST_VIDEO_DECODER_STREAM_UNLOCK (decoder); + +- gst_video_decoder_push_event_list (decoder, events); +- gst_video_decoder_push_event_list (decoder, frame_events); ++ gst_video_decoder_push_event_list (decoder, events); ++ gst_video_decoder_push_event_list (decoder, frame_events); + +- /* Forward GAP immediately. Everything is drained after +- * the GAP event and we can forward this event immediately +- * now without having buffers out of order. +- */ +- forward_immediate = TRUE; ++ /* Forward GAP immediately. Everything is drained after ++ * the GAP event and we can forward this event immediately ++ * now without having buffers out of order. ++ */ ++ forward_immediate = TRUE; ++ } else { ++ GST_VIDEO_DECODER_STREAM_UNLOCK (decoder); ++ gst_clear_event (&event); ++ } + break; + } + case GST_EVENT_CUSTOM_DOWNSTREAM: +@@ -1850,8 +2042,8 @@ gst_video_decoder_src_query (GstPad * pad, GstObject * parent, GstQuery * query) + /** + * gst_video_decoder_proxy_getcaps: + * @decoder: a #GstVideoDecoder +- * @caps: (allow-none): initial caps +- * @filter: (allow-none): filter caps ++ * @caps: (nullable): initial caps ++ * @filter: (nullable): filter caps + * + * Returns caps that express @caps (or sink template caps if @caps == NULL) + * restricted to resolution/format/... combinations supported by downstream +@@ -2045,6 +2237,12 @@ gst_video_decoder_add_buffer_info (GstVideoDecoder * decoder, + ts->flags = GST_BUFFER_FLAGS (buffer); + + g_queue_push_tail (&priv->timestamps, ts); ++ ++ if (g_queue_get_length (&priv->timestamps) > 40) { ++ GST_WARNING_OBJECT (decoder, ++ "decoder timestamp list getting long: %d timestamps," ++ "possible internal leaking?", g_queue_get_length (&priv->timestamps)); ++ } + } + + static void +@@ -2089,18 +2287,6 @@ gst_video_decoder_get_buffer_info_at_offset (GstVideoDecoder * + GST_TIME_ARGS (*pts), GST_TIME_ARGS (*dts), *flags, got_offset, offset); + } + +-#if !GLIB_CHECK_VERSION(2, 60, 0) +-#define g_queue_clear_full queue_clear_full +-static void +-queue_clear_full (GQueue * queue, GDestroyNotify free_func) +-{ +- gpointer data; +- +- while ((data = g_queue_pop_head (queue)) != NULL) +- free_func (data); +-} +-#endif +- + static void + gst_video_decoder_clear_queues (GstVideoDecoder * dec) + { +@@ -2158,7 +2344,12 @@ gst_video_decoder_reset (GstVideoDecoder * decoder, gboolean full, + priv->earliest_time = GST_CLOCK_TIME_NONE; + priv->proportion = 0.5; + priv->decode_flags_override = FALSE; ++ ++ priv->request_sync_point_flags = 0; ++ priv->request_sync_point_frame_number = REQUEST_SYNC_POINT_UNSET; ++ priv->last_force_key_unit_time = GST_CLOCK_TIME_NONE; + GST_OBJECT_UNLOCK (decoder); ++ priv->distance_from_sync = -1; + } + + if (full) { +@@ -2187,6 +2378,8 @@ gst_video_decoder_reset (GstVideoDecoder * decoder, gboolean full, + priv->dropped = 0; + priv->processed = 0; + ++ priv->posted_latency_msg = FALSE; ++ + priv->decode_frame_number = 0; + priv->base_picture_number = 0; + +@@ -2257,22 +2450,39 @@ gst_video_decoder_chain_forward (GstVideoDecoder * decoder, + priv->input_offset += gst_buffer_get_size (buf); + + if (priv->packetized) { ++ GstVideoCodecFrame *frame; + gboolean was_keyframe = FALSE; ++ ++ frame = priv->current_frame; ++ ++ frame->abidata.ABI.num_subframes++; ++ if (gst_video_decoder_get_subframe_mode (decoder)) { ++ /* End the frame if the marker flag is set */ ++ if (!GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_FLAG_MARKER) ++ && (decoder->input_segment.rate > 0.0)) ++ priv->current_frame = gst_video_codec_frame_ref (frame); ++ else ++ priv->current_frame = NULL; ++ } else { ++ priv->current_frame = frame; ++ } ++ + if (!GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT)) { + was_keyframe = TRUE; +- GST_LOG_OBJECT (decoder, "Marking current_frame as sync point"); +- GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (priv->current_frame); ++ GST_DEBUG_OBJECT (decoder, "Marking current_frame as sync point"); ++ GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame); + } + +- priv->current_frame->input_buffer = buf; ++ gst_video_decoder_replace_input_buffer (decoder, frame, &buf); + + if (decoder->input_segment.rate < 0.0) { +- priv->parse_gather = +- g_list_prepend (priv->parse_gather, priv->current_frame); ++ priv->parse_gather = g_list_prepend (priv->parse_gather, frame); ++ priv->current_frame = NULL; + } else { +- ret = gst_video_decoder_decode_frame (decoder, priv->current_frame); ++ ret = gst_video_decoder_decode_frame (decoder, frame); ++ if (!gst_video_decoder_get_subframe_mode (decoder)) ++ priv->current_frame = NULL; + } +- priv->current_frame = NULL; + /* If in trick mode and it was a keyframe, drain decoder to avoid extra + * latency. Only do this for forwards playback as reverse playback handles + * draining on keyframes in flush_parse(), and would otherwise call back +@@ -2301,13 +2511,37 @@ gst_video_decoder_flush_decode (GstVideoDecoder * dec) + GstVideoDecoderPrivate *priv = dec->priv; + GstFlowReturn res = GST_FLOW_OK; + GList *walk; +- ++ GstVideoCodecFrame *current_frame = NULL; ++ gboolean last_subframe; + GST_DEBUG_OBJECT (dec, "flushing buffers to decode"); + + walk = priv->decode; + while (walk) { + GList *next; + GstVideoCodecFrame *frame = (GstVideoCodecFrame *) (walk->data); ++ last_subframe = TRUE; ++ /* In subframe mode, we need to get rid of intermediary frames ++ * created during the buffer gather stage. That's why that we keep a current ++ * frame as the main frame and drop all the frame afterwhile until the end ++ * of the subframes batch. ++ * */ ++ if (gst_video_decoder_get_subframe_mode (dec)) { ++ if (current_frame == NULL) { ++ current_frame = gst_video_codec_frame_ref (frame); ++ } else { ++ if (current_frame->input_buffer) { ++ gst_video_decoder_copy_metas (dec, current_frame, ++ current_frame->input_buffer, current_frame->output_buffer); ++ gst_buffer_unref (current_frame->input_buffer); ++ } ++ current_frame->input_buffer = gst_buffer_ref (frame->input_buffer); ++ gst_video_codec_frame_unref (frame); ++ } ++ last_subframe = GST_BUFFER_FLAG_IS_SET (current_frame->input_buffer, ++ GST_VIDEO_BUFFER_FLAG_MARKER); ++ } else { ++ current_frame = frame; ++ } + + GST_DEBUG_OBJECT (dec, "decoding frame %p buffer %p, PTS %" GST_TIME_FORMAT + ", DTS %" GST_TIME_FORMAT, frame, frame->input_buffer, +@@ -2319,10 +2553,12 @@ gst_video_decoder_flush_decode (GstVideoDecoder * dec) + priv->decode = g_list_delete_link (priv->decode, walk); + + /* decode buffer, resulting data prepended to queue */ +- res = gst_video_decoder_decode_frame (dec, frame); ++ res = gst_video_decoder_decode_frame (dec, current_frame); + if (res != GST_FLOW_OK) + break; +- ++ if (!gst_video_decoder_get_subframe_mode (dec) ++ || last_subframe) ++ current_frame = NULL; + walk = next; + } + +@@ -2751,9 +2987,9 @@ gst_video_decoder_prepare_finish_frame (GstVideoDecoder * + sync = GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame); + + GST_LOG_OBJECT (decoder, +- "finish frame %p (#%d) sync:%d PTS:%" GST_TIME_FORMAT " DTS:%" ++ "finish frame %p (#%d)(sub=#%d) sync:%d PTS:%" GST_TIME_FORMAT " DTS:%" + GST_TIME_FORMAT, +- frame, frame->system_frame_number, ++ frame, frame->system_frame_number, frame->abidata.ABI.num_subframes, + sync, GST_TIME_ARGS (frame->pts), GST_TIME_ARGS (frame->dts)); + + /* Push all pending events that arrived before this frame */ +@@ -2891,11 +3127,17 @@ gst_video_decoder_prepare_finish_frame (GstVideoDecoder * + if (frame->duration != GST_CLOCK_TIME_NONE) { + if (GST_CLOCK_TIME_IS_VALID (priv->last_timestamp_out)) + frame->pts = priv->last_timestamp_out + frame->duration; +- else if (decoder->output_segment.rate > 0.0) ++ else if (frame->dts != GST_CLOCK_TIME_NONE) { ++ frame->pts = frame->dts; ++ GST_LOG_OBJECT (decoder, ++ "Setting DTS as PTS %" GST_TIME_FORMAT " for frame...", ++ GST_TIME_ARGS (frame->pts)); ++ } else if (decoder->output_segment.rate > 0.0) + frame->pts = decoder->output_segment.start; +- GST_LOG_OBJECT (decoder, +- "Guessing timestamp %" GST_TIME_FORMAT " for frame...", +- GST_TIME_ARGS (frame->pts)); ++ GST_INFO_OBJECT (decoder, ++ "Guessing PTS=%" GST_TIME_FORMAT " for frame... DTS=%" ++ GST_TIME_FORMAT, GST_TIME_ARGS (frame->pts), ++ GST_TIME_ARGS (frame->dts)); + } else if (sync && frame->dts != GST_CLOCK_TIME_NONE) { + frame->pts = frame->dts; + GST_LOG_OBJECT (decoder, +@@ -3011,6 +3253,10 @@ gst_video_decoder_drop_frame (GstVideoDecoder * dec, GstVideoCodecFrame * frame) + { + GST_LOG_OBJECT (dec, "drop frame %p", frame); + ++ if (gst_video_decoder_get_subframe_mode (dec)) ++ GST_DEBUG_OBJECT (dec, "Drop subframe %d. Must be the last one.", ++ frame->abidata.ABI.num_subframes); ++ + GST_VIDEO_DECODER_STREAM_LOCK (dec); + + gst_video_decoder_prepare_finish_frame (dec, frame, TRUE); +@@ -3023,6 +3269,41 @@ gst_video_decoder_drop_frame (GstVideoDecoder * dec, GstVideoCodecFrame * frame) + /* now free the frame */ + gst_video_decoder_release_frame (dec, frame); + ++ /* store that we have valid decoded data */ ++ dec->priv->had_output_data = TRUE; ++ ++ GST_VIDEO_DECODER_STREAM_UNLOCK (dec); ++ ++ return GST_FLOW_OK; ++} ++ ++/** ++ * gst_video_decoder_drop_subframe: ++ * @dec: a #GstVideoDecoder ++ * @frame: (transfer full): the #GstVideoCodecFrame ++ * ++ * Drops input data. ++ * The frame is not considered finished until the whole frame ++ * is finished or dropped by the subclass. ++ * ++ * Returns: a #GstFlowReturn, usually GST_FLOW_OK. ++ * ++ * Since: 1.20 ++ */ ++GstFlowReturn ++gst_video_decoder_drop_subframe (GstVideoDecoder * dec, ++ GstVideoCodecFrame * frame) ++{ ++ g_return_val_if_fail (gst_video_decoder_get_subframe_mode (dec), ++ GST_FLOW_NOT_SUPPORTED); ++ ++ GST_LOG_OBJECT (dec, "drop subframe %p num=%d", frame->input_buffer, ++ gst_video_decoder_get_input_subframe_index (dec, frame)); ++ ++ GST_VIDEO_DECODER_STREAM_LOCK (dec); ++ ++ gst_video_codec_frame_unref (frame); ++ + GST_VIDEO_DECODER_STREAM_UNLOCK (dec); + + return GST_FLOW_OK; +@@ -3059,6 +3340,7 @@ typedef struct + { + GstVideoDecoder *decoder; + GstVideoCodecFrame *frame; ++ GstBuffer *buffer; + } CopyMetaData; + + static gboolean +@@ -3068,10 +3350,12 @@ foreach_metadata (GstBuffer * inbuf, GstMeta ** meta, gpointer user_data) + GstVideoDecoder *decoder = data->decoder; + GstVideoDecoderClass *klass = GST_VIDEO_DECODER_GET_CLASS (decoder); + GstVideoCodecFrame *frame = data->frame; ++ GstBuffer *buffer = data->buffer; + const GstMetaInfo *info = (*meta)->info; + gboolean do_copy = FALSE; + +- if (gst_meta_api_type_has_tag (info->api, _gst_meta_tag_memory)) { ++ if (gst_meta_api_type_has_tag (info->api, _gst_meta_tag_memory) ++ || gst_meta_api_type_has_tag (info->api, _gst_meta_tag_memory_reference)) { + /* never call the transform_meta with memory specific metadata */ + GST_DEBUG_OBJECT (decoder, "not copying memory specific metadata %s", + g_type_name (info->api)); +@@ -3088,12 +3372,48 @@ foreach_metadata (GstBuffer * inbuf, GstMeta ** meta, gpointer user_data) + GstMetaTransformCopy copy_data = { FALSE, 0, -1 }; + GST_DEBUG_OBJECT (decoder, "copy metadata %s", g_type_name (info->api)); + /* simply copy then */ +- info->transform_func (frame->output_buffer, *meta, inbuf, +- _gst_meta_transform_copy, ©_data); ++ ++ info->transform_func (buffer, *meta, inbuf, _gst_meta_transform_copy, ++ ©_data); + } + return TRUE; + } + ++static void ++gst_video_decoder_copy_metas (GstVideoDecoder * decoder, ++ GstVideoCodecFrame * frame, GstBuffer * src_buffer, GstBuffer * dest_buffer) ++{ ++ GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_GET_CLASS (decoder); ++ ++ if (decoder_class->transform_meta) { ++ if (G_LIKELY (frame)) { ++ CopyMetaData data; ++ ++ data.decoder = decoder; ++ data.frame = frame; ++ data.buffer = dest_buffer; ++ gst_buffer_foreach_meta (src_buffer, foreach_metadata, &data); ++ } else { ++ GST_WARNING_OBJECT (decoder, ++ "Can't copy metadata because input frame disappeared"); ++ } ++ } ++} ++ ++static void ++gst_video_decoder_replace_input_buffer (GstVideoDecoder * decoder, ++ GstVideoCodecFrame * frame, GstBuffer ** dest_buffer) ++{ ++ if (frame->input_buffer) { ++ *dest_buffer = gst_buffer_make_writable (*dest_buffer); ++ gst_video_decoder_copy_metas (decoder, frame, frame->input_buffer, ++ *dest_buffer); ++ gst_buffer_unref (frame->input_buffer); ++ } ++ ++ frame->input_buffer = *dest_buffer; ++} ++ + /** + * gst_video_decoder_finish_frame: + * @decoder: a #GstVideoDecoder +@@ -3115,7 +3435,6 @@ gst_video_decoder_finish_frame (GstVideoDecoder * decoder, + GstVideoCodecFrame * frame) + { + GstFlowReturn ret = GST_FLOW_OK; +- GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_GET_CLASS (decoder); + GstVideoDecoderPrivate *priv = decoder->priv; + GstBuffer *output_buffer; + gboolean needs_reconfigure = FALSE; +@@ -3153,7 +3472,40 @@ gst_video_decoder_finish_frame (GstVideoDecoder * decoder, + + /* no buffer data means this frame is skipped */ + if (!frame->output_buffer || GST_VIDEO_CODEC_FRAME_IS_DECODE_ONLY (frame)) { +- GST_DEBUG_OBJECT (decoder, "skipping frame %" GST_TIME_FORMAT, ++ GST_DEBUG_OBJECT (decoder, ++ "skipping frame %" GST_TIME_FORMAT " because not output was produced", ++ GST_TIME_ARGS (frame->pts)); ++ goto done; ++ } ++ ++ /* Mark output as corrupted if the subclass requested so and we're either ++ * still before the sync point after the request, or we don't even know the ++ * frame number of the sync point yet (it is 0) */ ++ GST_OBJECT_LOCK (decoder); ++ if (frame->system_frame_number <= priv->request_sync_point_frame_number ++ && priv->request_sync_point_frame_number != REQUEST_SYNC_POINT_UNSET) { ++ if (priv->request_sync_point_flags & ++ GST_VIDEO_DECODER_REQUEST_SYNC_POINT_CORRUPT_OUTPUT) { ++ GST_DEBUG_OBJECT (decoder, ++ "marking frame %" GST_TIME_FORMAT ++ " as corrupted because it is still before the sync point", ++ GST_TIME_ARGS (frame->pts)); ++ GST_VIDEO_CODEC_FRAME_FLAG_SET (frame, ++ GST_VIDEO_CODEC_FRAME_FLAG_CORRUPTED); ++ } ++ } else { ++ /* Reset to -1 to mark it as unset now that we've reached the frame */ ++ priv->request_sync_point_frame_number = REQUEST_SYNC_POINT_UNSET; ++ } ++ GST_OBJECT_UNLOCK (decoder); ++ ++ if (priv->discard_corrupted_frames ++ && (GST_VIDEO_CODEC_FRAME_FLAG_IS_SET (frame, ++ GST_VIDEO_CODEC_FRAME_FLAG_CORRUPTED) ++ || GST_BUFFER_FLAG_IS_SET (frame->output_buffer, ++ GST_BUFFER_FLAG_CORRUPTED))) { ++ GST_DEBUG_OBJECT (decoder, ++ "skipping frame %" GST_TIME_FORMAT " because it is corrupted", + GST_TIME_ARGS (frame->pts)); + goto done; + } +@@ -3175,19 +3527,17 @@ gst_video_decoder_finish_frame (GstVideoDecoder * decoder, + GST_BUFFER_FLAG_SET (output_buffer, GST_BUFFER_FLAG_DISCONT); + } + +- if (decoder_class->transform_meta) { +- if (G_LIKELY (frame->input_buffer)) { +- CopyMetaData data; +- +- data.decoder = decoder; +- data.frame = frame; +- gst_buffer_foreach_meta (frame->input_buffer, foreach_metadata, &data); +- } else { +- GST_WARNING_OBJECT (decoder, +- "Can't copy metadata because input frame disappeared"); +- } ++ if (GST_VIDEO_CODEC_FRAME_FLAG_IS_SET (frame, ++ GST_VIDEO_CODEC_FRAME_FLAG_CORRUPTED)) { ++ GST_DEBUG_OBJECT (decoder, ++ "marking frame %" GST_TIME_FORMAT " as corrupted", ++ GST_TIME_ARGS (frame->pts)); ++ GST_BUFFER_FLAG_SET (output_buffer, GST_BUFFER_FLAG_CORRUPTED); + } + ++ gst_video_decoder_copy_metas (decoder, frame, frame->input_buffer, ++ frame->output_buffer); ++ + /* Get an additional ref to the buffer, which is going to be pushed + * downstream, the original ref is owned by the frame + */ +@@ -3215,6 +3565,39 @@ done: + return ret; + } + ++/** ++ * gst_video_decoder_finish_subframe: ++ * @decoder: a #GstVideoDecoder ++ * @frame: (transfer full): the #GstVideoCodecFrame ++ * ++ * Indicate that a subframe has been finished to be decoded ++ * by the subclass. This method should be called for all subframes ++ * except the last subframe where @gst_video_decoder_finish_frame ++ * should be called instead. ++ * ++ * Returns: a #GstFlowReturn, usually GST_FLOW_OK. ++ * ++ * Since: 1.20 ++ */ ++GstFlowReturn ++gst_video_decoder_finish_subframe (GstVideoDecoder * decoder, ++ GstVideoCodecFrame * frame) ++{ ++ g_return_val_if_fail (gst_video_decoder_get_subframe_mode (decoder), ++ GST_FLOW_NOT_SUPPORTED); ++ ++ GST_LOG_OBJECT (decoder, "finish subframe %p num=%d", frame->input_buffer, ++ gst_video_decoder_get_input_subframe_index (decoder, frame)); ++ ++ GST_VIDEO_DECODER_STREAM_LOCK (decoder); ++ frame->abidata.ABI.subframes_processed++; ++ gst_video_codec_frame_unref (frame); ++ ++ GST_VIDEO_DECODER_STREAM_UNLOCK (decoder); ++ ++ return GST_FLOW_OK; ++} ++ + /* With stream lock, takes the frame reference */ + static GstFlowReturn + gst_video_decoder_clip_and_push_buf (GstVideoDecoder * decoder, GstBuffer * buf) +@@ -3351,7 +3734,7 @@ gst_video_decoder_clip_and_push_buf (GstVideoDecoder * decoder, GstBuffer * buf) + } + #endif + +- /* release STREAM_LOCK not to block upstream ++ /* release STREAM_LOCK not to block upstream + * while pushing buffer downstream */ + GST_VIDEO_DECODER_STREAM_UNLOCK (decoder); + ret = gst_pad_push (decoder->srcpad, buf); +@@ -3471,7 +3854,8 @@ gst_video_decoder_have_frame (GstVideoDecoder * decoder) + buffer = gst_buffer_new_and_alloc (0); + } + +- priv->current_frame->input_buffer = buffer; ++ gst_video_decoder_replace_input_buffer (decoder, priv->current_frame, ++ &buffer); + + gst_video_decoder_get_buffer_info_at_offset (decoder, + priv->frame_offset, &pts, &dts, &duration, &flags); +@@ -3487,20 +3871,36 @@ gst_video_decoder_have_frame (GstVideoDecoder * decoder) + GST_TIME_ARGS (duration)); + + if (!GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT)) { +- GST_LOG_OBJECT (decoder, "Marking as sync point"); ++ GST_DEBUG_OBJECT (decoder, "Marking as sync point"); + GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (priv->current_frame); + } + ++ if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_CORRUPTED)) { ++ GST_DEBUG_OBJECT (decoder, "Marking as corrupted"); ++ GST_VIDEO_CODEC_FRAME_FLAG_SET (priv->current_frame, ++ GST_VIDEO_CODEC_FRAME_FLAG_CORRUPTED); ++ } ++ + /* In reverse playback, just capture and queue frames for later processing */ + if (decoder->input_segment.rate < 0.0) { + priv->parse_gather = + g_list_prepend (priv->parse_gather, priv->current_frame); ++ priv->current_frame = NULL; + } else { +- /* Otherwise, decode the frame, which gives away our ref */ +- ret = gst_video_decoder_decode_frame (decoder, priv->current_frame); ++ GstVideoCodecFrame *frame = priv->current_frame; ++ frame->abidata.ABI.num_subframes++; ++ /* In subframe mode, we keep a ref for ourselves ++ * as this frame will be kept during the data collection ++ * in parsed mode. The frame reference will be released by ++ * finish_(sub)frame or drop_(sub)frame.*/ ++ if (gst_video_decoder_get_subframe_mode (decoder)) ++ gst_video_codec_frame_ref (priv->current_frame); ++ else ++ priv->current_frame = NULL; ++ ++ /* Decode the frame, which gives away our ref */ ++ ret = gst_video_decoder_decode_frame (decoder, frame); + } +- /* Current frame is gone now, either way */ +- priv->current_frame = NULL; + + GST_VIDEO_DECODER_STREAM_UNLOCK (decoder); + +@@ -3523,46 +3923,89 @@ gst_video_decoder_decode_frame (GstVideoDecoder * decoder, + /* FIXME : This should only have to be checked once (either the subclass has an + * implementation, or it doesn't) */ + g_return_val_if_fail (decoder_class->handle_frame != NULL, GST_FLOW_ERROR); ++ g_return_val_if_fail (frame != NULL, GST_FLOW_ERROR); + +- frame->distance_from_sync = priv->distance_from_sync; +- priv->distance_from_sync++; + frame->pts = GST_BUFFER_PTS (frame->input_buffer); + frame->dts = GST_BUFFER_DTS (frame->input_buffer); + frame->duration = GST_BUFFER_DURATION (frame->input_buffer); ++ frame->deadline = ++ gst_segment_to_running_time (&decoder->input_segment, GST_FORMAT_TIME, ++ frame->pts); + + /* For keyframes, PTS = DTS + constant_offset, usually 0 to 3 frame + * durations. */ + /* FIXME upstream can be quite wrong about the keyframe aspect, + * so we could be going off here as well, + * maybe let subclass decide if it really is/was a keyframe */ +- if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && +- GST_CLOCK_TIME_IS_VALID (frame->pts) +- && GST_CLOCK_TIME_IS_VALID (frame->dts)) { +- /* just in case they are not equal as might ideally be, +- * e.g. quicktime has a (positive) delta approach */ +- priv->pts_delta = frame->pts - frame->dts; +- GST_DEBUG_OBJECT (decoder, "PTS delta %d ms", +- (gint) (priv->pts_delta / GST_MSECOND)); ++ if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) { ++ priv->distance_from_sync = 0; ++ ++ GST_OBJECT_LOCK (decoder); ++ priv->request_sync_point_flags &= ++ ~GST_VIDEO_DECODER_REQUEST_SYNC_POINT_DISCARD_INPUT; ++ if (priv->request_sync_point_frame_number == REQUEST_SYNC_POINT_PENDING) ++ priv->request_sync_point_frame_number = frame->system_frame_number; ++ GST_OBJECT_UNLOCK (decoder); ++ ++ if (GST_CLOCK_TIME_IS_VALID (frame->pts) ++ && GST_CLOCK_TIME_IS_VALID (frame->dts)) { ++ /* just in case they are not equal as might ideally be, ++ * e.g. quicktime has a (positive) delta approach */ ++ priv->pts_delta = frame->pts - frame->dts; ++ GST_DEBUG_OBJECT (decoder, "PTS delta %d ms", ++ (gint) (priv->pts_delta / GST_MSECOND)); ++ } ++ } else { ++ if (priv->distance_from_sync == -1 && priv->automatic_request_sync_points) { ++ GST_DEBUG_OBJECT (decoder, ++ "Didn't receive a keyframe yet, requesting sync point"); ++ gst_video_decoder_request_sync_point (decoder, frame, ++ priv->automatic_request_sync_point_flags); ++ } ++ ++ GST_OBJECT_LOCK (decoder); ++ if ((priv->needs_sync_point && priv->distance_from_sync == -1) ++ || (priv->request_sync_point_flags & ++ GST_VIDEO_DECODER_REQUEST_SYNC_POINT_DISCARD_INPUT)) { ++ GST_WARNING_OBJECT (decoder, ++ "Subclass requires a sync point but we didn't receive one yet, discarding input"); ++ GST_OBJECT_UNLOCK (decoder); ++ if (priv->automatic_request_sync_points) { ++ gst_video_decoder_request_sync_point (decoder, frame, ++ priv->automatic_request_sync_point_flags); ++ } ++ gst_video_decoder_release_frame (decoder, frame); ++ return GST_FLOW_OK; ++ } ++ GST_OBJECT_UNLOCK (decoder); ++ ++ priv->distance_from_sync++; + } + +- frame->abidata.ABI.ts = frame->dts; +- frame->abidata.ABI.ts2 = frame->pts; ++ frame->distance_from_sync = priv->distance_from_sync; ++ ++ if (frame->abidata.ABI.num_subframes == 1) { ++ frame->abidata.ABI.ts = frame->dts; ++ frame->abidata.ABI.ts2 = frame->pts; ++ } + +- GST_LOG_OBJECT (decoder, "PTS %" GST_TIME_FORMAT ", DTS %" GST_TIME_FORMAT +- ", dist %d", GST_TIME_ARGS (frame->pts), GST_TIME_ARGS (frame->dts), ++ GST_LOG_OBJECT (decoder, ++ "frame %p PTS %" GST_TIME_FORMAT ", DTS %" GST_TIME_FORMAT ", dist %d", ++ frame, GST_TIME_ARGS (frame->pts), GST_TIME_ARGS (frame->dts), + frame->distance_from_sync); +- +- g_queue_push_tail (&priv->frames, gst_video_codec_frame_ref (frame)); ++ /* FIXME: suboptimal way to add a unique frame to the list, in case of subframe mode. */ ++ if (!g_queue_find (&priv->frames, frame)) { ++ g_queue_push_tail (&priv->frames, gst_video_codec_frame_ref (frame)); ++ } else { ++ GST_LOG_OBJECT (decoder, ++ "Do not add an existing frame used to decode subframes"); ++ } + + if (priv->frames.length > 10) { + GST_DEBUG_OBJECT (decoder, "decoder frame list getting long: %d frames," + "possible internal leaking?", priv->frames.length); + } + +- frame->deadline = +- gst_segment_to_running_time (&decoder->input_segment, GST_FORMAT_TIME, +- frame->pts); +- + /* do something with frame */ + ret = decoder_class->handle_frame (decoder, frame); + if (ret != GST_FLOW_OK) +@@ -3580,7 +4023,7 @@ gst_video_decoder_decode_frame (GstVideoDecoder * decoder, + * + * Get the #GstVideoCodecState currently describing the output stream. + * +- * Returns: (transfer full): #GstVideoCodecState describing format of video data. ++ * Returns: (transfer full) (nullable): #GstVideoCodecState describing format of video data. + */ + GstVideoCodecState * + gst_video_decoder_get_output_state (GstVideoDecoder * decoder) +@@ -3648,7 +4091,7 @@ _set_interlaced_output_state (GstVideoDecoder * decoder, + * @fmt: a #GstVideoFormat + * @width: The width in pixels + * @height: The height in pixels +- * @reference: (allow-none) (transfer none): An optional reference #GstVideoCodecState ++ * @reference: (nullable) (transfer none): An optional reference #GstVideoCodecState + * + * Creates a new #GstVideoCodecState with the specified @fmt, @width and @height + * as the output state for the decoder. +@@ -3665,7 +4108,7 @@ _set_interlaced_output_state (GstVideoDecoder * decoder, + * The new output state will only take effect (set on pads and buffers) starting + * from the next call to #gst_video_decoder_finish_frame(). + * +- * Returns: (transfer full): the newly configured output state. ++ * Returns: (transfer full) (nullable): the newly configured output state. + */ + GstVideoCodecState * + gst_video_decoder_set_output_state (GstVideoDecoder * decoder, +@@ -3683,12 +4126,12 @@ gst_video_decoder_set_output_state (GstVideoDecoder * decoder, + * @width: The width in pixels + * @height: The height in pixels + * @interlace_mode: A #GstVideoInterlaceMode +- * @reference: (allow-none) (transfer none): An optional reference #GstVideoCodecState ++ * @reference: (nullable) (transfer none): An optional reference #GstVideoCodecState + * + * Same as #gst_video_decoder_set_output_state() but also allows you to also set + * the interlacing mode. + * +- * Returns: (transfer full): the newly configured output state. ++ * Returns: (transfer full) (nullable): the newly configured output state. + * + * Since: 1.16. + */ +@@ -3708,7 +4151,7 @@ gst_video_decoder_set_interlaced_output_state (GstVideoDecoder * decoder, + * + * Get the oldest pending unfinished #GstVideoCodecFrame + * +- * Returns: (transfer full): oldest pending unfinished #GstVideoCodecFrame. ++ * Returns: (transfer full) (nullable): oldest pending unfinished #GstVideoCodecFrame. + */ + GstVideoCodecFrame * + gst_video_decoder_get_oldest_frame (GstVideoDecoder * decoder) +@@ -3730,7 +4173,7 @@ gst_video_decoder_get_oldest_frame (GstVideoDecoder * decoder) + * + * Get a pending unfinished #GstVideoCodecFrame + * +- * Returns: (transfer full): pending unfinished #GstVideoCodecFrame identified by @frame_number. ++ * Returns: (transfer full) (nullable): pending unfinished #GstVideoCodecFrame identified by @frame_number. + */ + GstVideoCodecFrame * + gst_video_decoder_get_frame (GstVideoDecoder * decoder, int frame_number) +@@ -4139,7 +4582,7 @@ gst_video_decoder_negotiate (GstVideoDecoder * decoder) + * You should use gst_video_decoder_allocate_output_frame() instead of this + * function, if possible at all. + * +- * Returns: (transfer full): allocated buffer, or NULL if no buffer could be ++ * Returns: (transfer full) (nullable): allocated buffer, or NULL if no buffer could be + * allocated (e.g. when downstream is flushing or shutting down) + */ + GstBuffer * +@@ -4483,6 +4926,123 @@ gst_video_decoder_get_packetized (GstVideoDecoder * decoder) + return decoder->priv->packetized; + } + ++/** ++ * gst_video_decoder_have_last_subframe: ++ * @decoder: a #GstVideoDecoder ++ * @frame: (transfer none): the #GstVideoCodecFrame to update ++ * ++ * Indicates that the last subframe has been processed by the decoder ++ * in @frame. This will release the current frame in video decoder ++ * allowing to receive new frames from upstream elements. This method ++ * must be called in the subclass @handle_frame callback. ++ * ++ * Returns: a #GstFlowReturn, usually GST_FLOW_OK. ++ * ++ * Since: 1.20 ++ */ ++GstFlowReturn ++gst_video_decoder_have_last_subframe (GstVideoDecoder * decoder, ++ GstVideoCodecFrame * frame) ++{ ++ g_return_val_if_fail (gst_video_decoder_get_subframe_mode (decoder), ++ GST_FLOW_OK); ++ /* unref once from the list */ ++ GST_VIDEO_DECODER_STREAM_LOCK (decoder); ++ if (decoder->priv->current_frame == frame) { ++ gst_video_codec_frame_unref (decoder->priv->current_frame); ++ decoder->priv->current_frame = NULL; ++ } ++ GST_VIDEO_DECODER_STREAM_UNLOCK (decoder); ++ ++ return GST_FLOW_OK; ++} ++ ++/** ++ * gst_video_decoder_set_subframe_mode: ++ * @decoder: a #GstVideoDecoder ++ * @subframe_mode: whether the input data should be considered as subframes. ++ * ++ * If this is set to TRUE, it informs the base class that the subclass ++ * can receive the data at a granularity lower than one frame. ++ * ++ * Note that in this mode, the subclass has two options. It can either ++ * require the presence of a GST_VIDEO_BUFFER_FLAG_MARKER to mark the ++ * end of a frame. Or it can operate in such a way that it will decode ++ * a single frame at a time. In this second case, every buffer that ++ * arrives to the element is considered part of the same frame until ++ * gst_video_decoder_finish_frame() is called. ++ * ++ * In either case, the same #GstVideoCodecFrame will be passed to the ++ * GstVideoDecoderClass:handle_frame vmethod repeatedly with a ++ * different GstVideoCodecFrame:input_buffer every time until the end of the ++ * frame has been signaled using either method. ++ * This method must be called during the decoder subclass @set_format call. ++ * ++ * Since: 1.20 ++ */ ++void ++gst_video_decoder_set_subframe_mode (GstVideoDecoder * decoder, ++ gboolean subframe_mode) ++{ ++ decoder->priv->subframe_mode = subframe_mode; ++} ++ ++/** ++ * gst_video_decoder_get_subframe_mode: ++ * @decoder: a #GstVideoDecoder ++ * ++ * Queries whether input data is considered as subframes or not by the ++ * base class. If FALSE, each input buffer will be considered as a full ++ * frame. ++ * ++ * Returns: TRUE if input data is considered as sub frames. ++ * ++ * Since: 1.20 ++ */ ++gboolean ++gst_video_decoder_get_subframe_mode (GstVideoDecoder * decoder) ++{ ++ return decoder->priv->subframe_mode; ++} ++ ++/** ++ * gst_video_decoder_get_input_subframe_index: ++ * @decoder: a #GstVideoDecoder ++ * @frame: (transfer none): the #GstVideoCodecFrame to update ++ * ++ * Queries the number of the last subframe received by ++ * the decoder baseclass in the @frame. ++ * ++ * Returns: the current subframe index received in subframe mode, 1 otherwise. ++ * ++ * Since: 1.20 ++ */ ++guint ++gst_video_decoder_get_input_subframe_index (GstVideoDecoder * decoder, ++ GstVideoCodecFrame * frame) ++{ ++ return frame->abidata.ABI.num_subframes; ++} ++ ++/** ++ * gst_video_decoder_get_processed_subframe_index: ++ * @decoder: a #GstVideoDecoder ++ * @frame: (transfer none): the #GstVideoCodecFrame to update ++ * ++ * Queries the number of subframes in the frame processed by ++ * the decoder baseclass. ++ * ++ * Returns: the current subframe processed received in subframe mode. ++ * ++ * Since: 1.20 ++ */ ++guint ++gst_video_decoder_get_processed_subframe_index (GstVideoDecoder * decoder, ++ GstVideoCodecFrame * frame) ++{ ++ return frame->abidata.ABI.subframes_processed; ++} ++ + /** + * gst_video_decoder_set_estimate_rate: + * @dec: a #GstVideoDecoder +@@ -4518,32 +5078,49 @@ gst_video_decoder_get_estimate_rate (GstVideoDecoder * dec) + * @min_latency: minimum latency + * @max_latency: maximum latency + * +- * Lets #GstVideoDecoder sub-classes tell the baseclass what the decoder +- * latency is. Will also post a LATENCY message on the bus so the pipeline +- * can reconfigure its global latency. ++ * Lets #GstVideoDecoder sub-classes tell the baseclass what the decoder latency ++ * is. If the provided values changed from previously provided ones, this will ++ * also post a LATENCY message on the bus so the pipeline can reconfigure its ++ * global latency. + */ + void + gst_video_decoder_set_latency (GstVideoDecoder * decoder, + GstClockTime min_latency, GstClockTime max_latency) + { ++ gboolean post_message = FALSE; + g_return_if_fail (GST_CLOCK_TIME_IS_VALID (min_latency)); + g_return_if_fail (max_latency >= min_latency); + ++ GST_DEBUG_OBJECT (decoder, ++ "min_latency:%" GST_TIME_FORMAT " max_latency:%" GST_TIME_FORMAT, ++ GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency)); ++ + GST_OBJECT_LOCK (decoder); +- decoder->priv->min_latency = min_latency; +- decoder->priv->max_latency = max_latency; ++ if (decoder->priv->min_latency != min_latency) { ++ decoder->priv->min_latency = min_latency; ++ post_message = TRUE; ++ } ++ if (decoder->priv->max_latency != max_latency) { ++ decoder->priv->max_latency = max_latency; ++ post_message = TRUE; ++ } ++ if (!decoder->priv->posted_latency_msg) { ++ decoder->priv->posted_latency_msg = TRUE; ++ post_message = TRUE; ++ } + GST_OBJECT_UNLOCK (decoder); + +- gst_element_post_message (GST_ELEMENT_CAST (decoder), +- gst_message_new_latency (GST_OBJECT_CAST (decoder))); ++ if (post_message) ++ gst_element_post_message (GST_ELEMENT_CAST (decoder), ++ gst_message_new_latency (GST_OBJECT_CAST (decoder))); + } + + /** + * gst_video_decoder_get_latency: + * @decoder: a #GstVideoDecoder +- * @min_latency: (out) (allow-none): address of variable in which to store the ++ * @min_latency: (out) (optional): address of variable in which to store the + * configured minimum latency, or %NULL +- * @max_latency: (out) (allow-none): address of variable in which to store the ++ * @max_latency: (out) (optional): address of variable in which to store the + * configured mximum latency, or %NULL + * + * Query the configured decoder latency. Results will be returned via +@@ -4564,7 +5141,7 @@ gst_video_decoder_get_latency (GstVideoDecoder * decoder, + /** + * gst_video_decoder_merge_tags: + * @decoder: a #GstVideoDecoder +- * @tags: (allow-none): a #GstTagList to merge, or NULL to unset ++ * @tags: (nullable): a #GstTagList to merge, or NULL to unset + * previously-set tags + * @mode: the #GstTagMergeMode to use, usually #GST_TAG_MERGE_REPLACE + * +@@ -4607,7 +5184,7 @@ gst_video_decoder_merge_tags (GstVideoDecoder * decoder, + * gst_video_decoder_get_buffer_pool: + * @decoder: a #GstVideoDecoder + * +- * Returns: (transfer full): the instance of the #GstBufferPool used ++ * Returns: (transfer full) (nullable): the instance of the #GstBufferPool used + * by the decoder; free it after use it + */ + GstBufferPool * +@@ -4624,9 +5201,9 @@ gst_video_decoder_get_buffer_pool (GstVideoDecoder * decoder) + /** + * gst_video_decoder_get_allocator: + * @decoder: a #GstVideoDecoder +- * @allocator: (out) (allow-none) (transfer full): the #GstAllocator ++ * @allocator: (out) (optional) (nullable) (transfer full): the #GstAllocator + * used +- * @params: (out) (allow-none) (transfer full): the ++ * @params: (out) (optional) (transfer full): the + * #GstAllocationParams of @allocator + * + * Lets #GstVideoDecoder sub-classes to know the memory @allocator +@@ -4668,3 +5245,134 @@ gst_video_decoder_set_use_default_pad_acceptcaps (GstVideoDecoder * decoder, + { + decoder->priv->use_default_pad_acceptcaps = use; + } ++ ++static void ++gst_video_decoder_request_sync_point_internal (GstVideoDecoder * dec, ++ GstClockTime deadline, GstVideoDecoderRequestSyncPointFlags flags) ++{ ++ GstEvent *fku = NULL; ++ GstVideoDecoderPrivate *priv; ++ ++ g_return_if_fail (GST_IS_VIDEO_DECODER (dec)); ++ ++ priv = dec->priv; ++ ++ GST_OBJECT_LOCK (dec); ++ ++ /* Check if we're allowed to send a new force-keyunit event. ++ * frame->deadline is set to the running time of the PTS. */ ++ if (priv->min_force_key_unit_interval == 0 || ++ deadline == GST_CLOCK_TIME_NONE || ++ (priv->min_force_key_unit_interval != GST_CLOCK_TIME_NONE && ++ (priv->last_force_key_unit_time == GST_CLOCK_TIME_NONE ++ || (priv->last_force_key_unit_time + ++ priv->min_force_key_unit_interval <= deadline)))) { ++ GST_DEBUG_OBJECT (dec, ++ "Requesting a new key-unit for frame with deadline %" GST_TIME_FORMAT, ++ GST_TIME_ARGS (deadline)); ++ fku = ++ gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE, FALSE, ++ 0); ++ priv->last_force_key_unit_time = deadline; ++ } else { ++ GST_DEBUG_OBJECT (dec, ++ "Can't request a new key-unit for frame with deadline %" ++ GST_TIME_FORMAT, GST_TIME_ARGS (deadline)); ++ } ++ priv->request_sync_point_flags |= flags; ++ /* We don't know yet the frame number of the sync point so set it to a ++ * frame number higher than any allowed frame number */ ++ priv->request_sync_point_frame_number = REQUEST_SYNC_POINT_PENDING; ++ GST_OBJECT_UNLOCK (dec); ++ ++ if (fku) ++ gst_pad_push_event (dec->sinkpad, fku); ++} ++ ++/** ++ * gst_video_decoder_request_sync_point: ++ * @dec: a #GstVideoDecoder ++ * @frame: a #GstVideoCodecFrame ++ * @flags: #GstVideoDecoderRequestSyncPointFlags ++ * ++ * Allows the #GstVideoDecoder subclass to request from the base class that ++ * a new sync should be requested from upstream, and that @frame was the frame ++ * when the subclass noticed that a new sync point is required. A reason for ++ * the subclass to do this could be missing reference frames, for example. ++ * ++ * The base class will then request a new sync point from upstream as long as ++ * the time that passed since the last one is exceeding ++ * #GstVideoDecoder:min-force-key-unit-interval. ++ * ++ * The subclass can signal via @flags how the frames until the next sync point ++ * should be handled: ++ * ++ * * If %GST_VIDEO_DECODER_REQUEST_SYNC_POINT_DISCARD_INPUT is selected then ++ * all following input frames until the next sync point are discarded. ++ * This can be useful if the lack of a sync point will prevent all further ++ * decoding and the decoder implementation is not very robust in handling ++ * missing references frames. ++ * * If %GST_VIDEO_DECODER_REQUEST_SYNC_POINT_CORRUPT_OUTPUT is selected ++ * then all output frames following @frame are marked as corrupted via ++ * %GST_BUFFER_FLAG_CORRUPTED. Corrupted frames can be automatically ++ * dropped by the base class, see #GstVideoDecoder:discard-corrupted-frames. ++ * Subclasses can manually mark frames as corrupted via %GST_VIDEO_CODEC_FRAME_FLAG_CORRUPTED ++ * before calling gst_video_decoder_finish_frame(). ++ * ++ * Since: 1.20 ++ */ ++void ++gst_video_decoder_request_sync_point (GstVideoDecoder * dec, ++ GstVideoCodecFrame * frame, GstVideoDecoderRequestSyncPointFlags flags) ++{ ++ g_return_if_fail (GST_IS_VIDEO_DECODER (dec)); ++ g_return_if_fail (frame != NULL); ++ ++ gst_video_decoder_request_sync_point_internal (dec, frame->deadline, flags); ++} ++ ++/** ++ * gst_video_decoder_set_needs_sync_point: ++ * @dec: a #GstVideoDecoder ++ * @enabled: new state ++ * ++ * Configures whether the decoder requires a sync point before it starts ++ * outputting data in the beginning. If enabled, the base class will discard ++ * all non-sync point frames in the beginning and after a flush and does not ++ * pass it to the subclass. ++ * ++ * If the first frame is not a sync point, the base class will request a sync ++ * point via the force-key-unit event. ++ * ++ * Since: 1.20 ++ */ ++void ++gst_video_decoder_set_needs_sync_point (GstVideoDecoder * dec, gboolean enabled) ++{ ++ g_return_if_fail (GST_IS_VIDEO_DECODER (dec)); ++ ++ dec->priv->needs_sync_point = enabled; ++} ++ ++/** ++ * gst_video_decoder_get_needs_sync_point: ++ * @dec: a #GstVideoDecoder ++ * ++ * Queries if the decoder requires a sync point before it starts outputting ++ * data in the beginning. ++ * ++ * Returns: %TRUE if a sync point is required in the beginning. ++ * ++ * Since: 1.20 ++ */ ++gboolean ++gst_video_decoder_get_needs_sync_point (GstVideoDecoder * dec) ++{ ++ gboolean result; ++ ++ g_return_val_if_fail (GST_IS_VIDEO_DECODER (dec), FALSE); ++ ++ result = dec->priv->needs_sync_point; ++ ++ return result; ++} +diff --git a/gst-libs/gst/video/gstvideodecoder.h b/gst-libs/gst/video/gstvideodecoder.h +index 0233cfa08..3556fd3c8 100644 +--- a/gst-libs/gst/video/gstvideodecoder.h ++++ b/gst-libs/gst/video/gstvideodecoder.h +@@ -161,7 +161,7 @@ G_STMT_START { \ + * + * Default maximum number of errors tolerated before signaling error. + */ +-#define GST_VIDEO_DECODER_MAX_ERRORS 10 ++#define GST_VIDEO_DECODER_MAX_ERRORS -1 + + + /** +@@ -214,7 +214,9 @@ struct _GstVideoDecoder + * @reset: Optional. + * Allows subclass (decoder) to perform post-seek semantics reset. + * Deprecated. +- * @handle_frame: Provides input data frame to subclass. ++ * @handle_frame: Provides input data frame to subclass. In subframe mode, the subclass needs ++ * to take ownership of @GstVideoCodecFrame.input_buffer as it will be modified ++ * by the base class on the next subframe buffer receiving. + * @finish: Optional. + * Called to request subclass to dispatch any pending remaining + * data at EOS. Sub-classes can refuse to decode new data after. +@@ -305,6 +307,11 @@ struct _GstVideoDecoderClass + + GstFlowReturn (*finish) (GstVideoDecoder *decoder); + ++ /** ++ * GstVideoDecoderClass::handle_frame: ++ * @decoder: The #GstVideoDecoder ++ * @frame: (transfer full): The frame to handle ++ */ + GstFlowReturn (*handle_frame) (GstVideoDecoder *decoder, + GstVideoCodecFrame *frame); + +@@ -337,10 +344,41 @@ struct _GstVideoDecoderClass + GstVideoCodecFrame *frame, + GstMeta * meta); + ++ /** ++ * GstVideoDecoderClass::handle_missing_data: ++ * @decoder: The #GstVideoDecoder ++ * @timestamp: Timestamp of the missing data ++ * @duration: Duration of the missing data ++ * ++ * Returns: %TRUE if the decoder should be drained afterwards. ++ * ++ * Since: 1.20 ++ */ ++ gboolean (*handle_missing_data) (GstVideoDecoder *decoder, ++ GstClockTime timestamp, ++ GstClockTime duration); ++ + /*< private >*/ +- gpointer padding[GST_PADDING_LARGE-6]; ++ gpointer padding[GST_PADDING_LARGE-7]; + }; + ++/** ++ * GstVideoDecoderRequestSyncPointFlags: ++ * @GST_VIDEO_DECODER_REQUEST_SYNC_POINT_DISCARD_INPUT: discard all following ++ * input until the next sync point. ++ * @GST_VIDEO_DECODER_REQUEST_SYNC_POINT_CORRUPT_OUTPUT: discard all following ++ * output until the next sync point. ++ * ++ * Flags to be used in combination with gst_video_decoder_request_sync_point(). ++ * See the function documentation for more details. ++ * ++ * Since: 1.20 ++ */ ++typedef enum { ++ GST_VIDEO_DECODER_REQUEST_SYNC_POINT_DISCARD_INPUT = (1<<0), ++ GST_VIDEO_DECODER_REQUEST_SYNC_POINT_CORRUPT_OUTPUT = (1<<1), ++} GstVideoDecoderRequestSyncPointFlags; ++ + GST_VIDEO_API + GType gst_video_decoder_get_type (void); + +@@ -353,6 +391,19 @@ void gst_video_decoder_set_packetized (GstVideoDecoder * decoder, + GST_VIDEO_API + gboolean gst_video_decoder_get_packetized (GstVideoDecoder * decoder); + ++GST_VIDEO_API ++void gst_video_decoder_set_subframe_mode (GstVideoDecoder * decoder, ++ gboolean subframe_mode); ++ ++GST_VIDEO_API ++gboolean gst_video_decoder_get_subframe_mode (GstVideoDecoder * decoder); ++ ++GST_VIDEO_API ++guint gst_video_decoder_get_input_subframe_index (GstVideoDecoder * decoder, GstVideoCodecFrame * frame); ++ ++GST_VIDEO_API ++guint gst_video_decoder_get_processed_subframe_index (GstVideoDecoder * decoder, GstVideoCodecFrame * frame); ++ + GST_VIDEO_API + void gst_video_decoder_set_estimate_rate (GstVideoDecoder * dec, + gboolean enabled); +@@ -374,6 +425,13 @@ void gst_video_decoder_set_needs_format (GstVideoDecoder * dec, + GST_VIDEO_API + gboolean gst_video_decoder_get_needs_format (GstVideoDecoder * dec); + ++GST_VIDEO_API ++void gst_video_decoder_set_needs_sync_point (GstVideoDecoder * dec, ++ gboolean enabled); ++ ++GST_VIDEO_API ++gboolean gst_video_decoder_get_needs_sync_point (GstVideoDecoder * dec); ++ + GST_VIDEO_API + void gst_video_decoder_set_latency (GstVideoDecoder *decoder, + GstClockTime min_latency, +@@ -413,6 +471,10 @@ void gst_video_decoder_add_to_frame (GstVideoDecoder *decoder, + GST_VIDEO_API + GstFlowReturn gst_video_decoder_have_frame (GstVideoDecoder *decoder); + ++GST_VIDEO_API ++GstFlowReturn gst_video_decoder_have_last_subframe (GstVideoDecoder *decoder, ++ GstVideoCodecFrame * frame); ++ + GST_VIDEO_API + gsize gst_video_decoder_get_pending_frame_size (GstVideoDecoder *decoder); + +@@ -454,10 +516,21 @@ gdouble gst_video_decoder_get_qos_proportion (GstVideoDecoder * decoder + GST_VIDEO_API + GstFlowReturn gst_video_decoder_finish_frame (GstVideoDecoder *decoder, + GstVideoCodecFrame *frame); ++GST_VIDEO_API ++GstFlowReturn gst_video_decoder_finish_subframe (GstVideoDecoder *decoder, ++ GstVideoCodecFrame *frame); + + GST_VIDEO_API + GstFlowReturn gst_video_decoder_drop_frame (GstVideoDecoder *dec, + GstVideoCodecFrame *frame); ++GST_VIDEO_API ++GstFlowReturn gst_video_decoder_drop_subframe (GstVideoDecoder *dec, ++ GstVideoCodecFrame *frame); ++ ++GST_VIDEO_API ++void gst_video_decoder_request_sync_point (GstVideoDecoder *dec, ++ GstVideoCodecFrame *frame, ++ GstVideoDecoderRequestSyncPointFlags flags); + + GST_VIDEO_API + void gst_video_decoder_release_frame (GstVideoDecoder * dec, +diff --git a/gst-libs/gst/video/gstvideoencoder.c b/gst-libs/gst/video/gstvideoencoder.c +index ff3031503..fcfb15bbd 100644 +--- a/gst-libs/gst/video/gstvideoencoder.c ++++ b/gst-libs/gst/video/gstvideoencoder.c +@@ -134,6 +134,9 @@ struct _GstVideoEncoderPrivate + gint64 min_latency; + gint64 max_latency; + ++ /* Tracks whether the latency message was posted at least once */ ++ gboolean posted_latency_msg; ++ + /* FIXME 2.0: Use a GQueue or similar, see GstVideoCodecFrame::events */ + GList *current_frame_events; + +@@ -444,18 +447,6 @@ _flush_events (GstPad * pad, GList * events) + return NULL; + } + +-#if !GLIB_CHECK_VERSION(2, 60, 0) +-#define g_queue_clear_full queue_clear_full +-static void +-queue_clear_full (GQueue * queue, GDestroyNotify free_func) +-{ +- gpointer data; +- +- while ((data = g_queue_pop_head (queue)) != NULL) +- free_func (data); +-} +-#endif +- + static gboolean + gst_video_encoder_reset (GstVideoEncoder * encoder, gboolean hard) + { +@@ -524,6 +515,8 @@ gst_video_encoder_reset (GstVideoEncoder * encoder, gboolean hard) + + priv->dropped = 0; + priv->processed = 0; ++ ++ priv->posted_latency_msg = FALSE; + } else { + GList *l; + +@@ -674,6 +667,15 @@ _new_output_state (GstCaps * caps, GstVideoCodecState * reference) + + GST_VIDEO_INFO_MULTIVIEW_MODE (tgt) = GST_VIDEO_INFO_MULTIVIEW_MODE (ref); + GST_VIDEO_INFO_MULTIVIEW_FLAGS (tgt) = GST_VIDEO_INFO_MULTIVIEW_FLAGS (ref); ++ ++ if (reference->mastering_display_info) { ++ state->mastering_display_info = g_slice_dup (GstVideoMasteringDisplayInfo, ++ reference->mastering_display_info); ++ } ++ if (reference->content_light_level) { ++ state->content_light_level = g_slice_dup (GstVideoContentLightLevel, ++ reference->content_light_level); ++ } + } + + return state; +@@ -683,6 +685,8 @@ static GstVideoCodecState * + _new_input_state (GstCaps * caps) + { + GstVideoCodecState *state; ++ GstStructure *c_struct; ++ const gchar *s; + + state = g_slice_new0 (GstVideoCodecState); + state->ref_count = 1; +@@ -691,6 +695,18 @@ _new_input_state (GstCaps * caps) + goto parse_fail; + state->caps = gst_caps_ref (caps); + ++ c_struct = gst_caps_get_structure (caps, 0); ++ ++ if ((s = gst_structure_get_string (c_struct, "mastering-display-info"))) { ++ state->mastering_display_info = g_slice_new (GstVideoMasteringDisplayInfo); ++ gst_video_mastering_display_info_from_string (state->mastering_display_info, ++ s); ++ } ++ if ((s = gst_structure_get_string (c_struct, "content-light-level"))) { ++ state->content_light_level = g_slice_new (GstVideoContentLightLevel); ++ gst_video_content_light_level_from_string (state->content_light_level, s); ++ } ++ + return state; + + parse_fail: +@@ -775,8 +791,8 @@ parse_fail: + /** + * gst_video_encoder_proxy_getcaps: + * @enc: a #GstVideoEncoder +- * @caps: (allow-none): initial caps +- * @filter: (allow-none): filter caps ++ * @caps: (nullable): initial caps ++ * @filter: (nullable): filter caps + * + * Returns caps that express @caps (or sink template caps if @caps == NULL) + * restricted to resolution/format/... combinations supported by downstream +@@ -1815,7 +1831,7 @@ gst_video_encoder_negotiate_default (GstVideoEncoder * encoder) + g_return_val_if_fail (state->caps != NULL, FALSE); + + if (encoder->priv->output_state_changed) { +- GstCaps *incaps; ++ GstStructure *out_struct; + + state->caps = gst_caps_make_writable (state->caps); + +@@ -1851,9 +1867,18 @@ gst_video_encoder_negotiate_default (GstVideoEncoder * encoder) + colorimetry, NULL); + g_free (colorimetry); + +- if (info->chroma_site != GST_VIDEO_CHROMA_SITE_UNKNOWN) +- gst_caps_set_simple (state->caps, "chroma-site", G_TYPE_STRING, +- gst_video_chroma_to_string (info->chroma_site), NULL); ++ if (info->chroma_site != GST_VIDEO_CHROMA_SITE_UNKNOWN) { ++ gchar *chroma_site = gst_video_chroma_site_to_string (info->chroma_site); ++ ++ if (!chroma_site) { ++ GST_WARNING ("Couldn't convert chroma-site 0x%x to string", ++ info->chroma_site); ++ } else { ++ gst_caps_set_simple (state->caps, ++ "chroma-site", G_TYPE_STRING, chroma_site, NULL); ++ g_free (chroma_site); ++ } ++ } + + if (GST_VIDEO_INFO_MULTIVIEW_MODE (info) != GST_VIDEO_MULTIVIEW_MODE_NONE) { + const gchar *caps_mview_mode = +@@ -1865,30 +1890,20 @@ gst_video_encoder_negotiate_default (GstVideoEncoder * encoder) + GST_VIDEO_INFO_MULTIVIEW_FLAGS (info), GST_FLAG_SET_MASK_EXACT, NULL); + } + +- incaps = gst_pad_get_current_caps (GST_VIDEO_ENCODER_SINK_PAD (encoder)); +- if (incaps) { +- GstStructure *in_struct; +- GstStructure *out_struct; +- const gchar *s; +- +- in_struct = gst_caps_get_structure (incaps, 0); +- out_struct = gst_caps_get_structure (state->caps, 0); +- +- /* forward upstream mastering display info and content light level +- * if subclass didn't set */ +- if ((s = gst_structure_get_string (in_struct, "mastering-display-info")) +- && !gst_structure_has_field (out_struct, "mastering-display-info")) { +- gst_caps_set_simple (state->caps, "mastering-display-info", +- G_TYPE_STRING, s, NULL); +- } ++ out_struct = gst_caps_get_structure (state->caps, 0); + +- if ((s = gst_structure_get_string (in_struct, "content-light-level")) && +- !gst_structure_has_field (out_struct, "content-light-level")) { +- gst_caps_set_simple (state->caps, +- "content-light-level", G_TYPE_STRING, s, NULL); +- } ++ /* forward upstream mastering display info and content light level ++ * if subclass didn't set */ ++ if (state->mastering_display_info && ++ !gst_structure_has_field (out_struct, "mastering-display-info")) { ++ gst_video_mastering_display_info_add_to_caps ++ (state->mastering_display_info, state->caps); ++ } + +- gst_caps_unref (incaps); ++ if (state->content_light_level && ++ !gst_structure_has_field (out_struct, "content-light-level")) { ++ gst_video_content_light_level_add_to_caps (state->content_light_level, ++ state->caps); + } + + encoder->priv->output_state_changed = FALSE; +@@ -2176,7 +2191,8 @@ foreach_metadata (GstBuffer * inbuf, GstMeta ** meta, gpointer user_data) + const GstMetaInfo *info = (*meta)->info; + gboolean do_copy = FALSE; + +- if (gst_meta_api_type_has_tag (info->api, _gst_meta_tag_memory)) { ++ if (gst_meta_api_type_has_tag (info->api, _gst_meta_tag_memory) ++ || gst_meta_api_type_has_tag (info->api, _gst_meta_tag_memory_reference)) { + /* never call the transform_meta with memory specific metadata */ + GST_DEBUG_OBJECT (encoder, "not copying memory specific metadata %s", + g_type_name (info->api)); +@@ -2762,15 +2778,16 @@ done: + * + * Get the current #GstVideoCodecState + * +- * Returns: (transfer full): #GstVideoCodecState describing format of video data. ++ * Returns: (transfer full) (nullable): #GstVideoCodecState describing format of video data. + */ + GstVideoCodecState * + gst_video_encoder_get_output_state (GstVideoEncoder * encoder) + { +- GstVideoCodecState *state; ++ GstVideoCodecState *state = NULL; + + GST_VIDEO_ENCODER_STREAM_LOCK (encoder); +- state = gst_video_codec_state_ref (encoder->priv->output_state); ++ if (encoder->priv->output_state) ++ state = gst_video_codec_state_ref (encoder->priv->output_state); + GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder); + + return state; +@@ -2780,7 +2797,7 @@ gst_video_encoder_get_output_state (GstVideoEncoder * encoder) + * gst_video_encoder_set_output_state: + * @encoder: a #GstVideoEncoder + * @caps: (transfer full): the #GstCaps to use for the output +- * @reference: (allow-none) (transfer none): An optional reference @GstVideoCodecState ++ * @reference: (nullable) (transfer none): An optional reference @GstVideoCodecState + * + * Creates a new #GstVideoCodecState with the specified caps as the output state + * for the encoder. +@@ -2801,7 +2818,7 @@ gst_video_encoder_get_output_state (GstVideoEncoder * encoder) + * The new output state will only take effect (set on pads and buffers) starting + * from the next call to #gst_video_encoder_finish_frame(). + * +- * Returns: (transfer full): the newly configured output state. ++ * Returns: (transfer full) (nullable): the newly configured output state. + */ + GstVideoCodecState * + gst_video_encoder_set_output_state (GstVideoEncoder * encoder, GstCaps * caps, +@@ -2841,30 +2858,49 @@ gst_video_encoder_set_output_state (GstVideoEncoder * encoder, GstCaps * caps, + * @min_latency: minimum latency + * @max_latency: maximum latency + * +- * Informs baseclass of encoding latency. ++ * Informs baseclass of encoding latency. If the provided values changed from ++ * previously provided ones, this will also post a LATENCY message on the bus ++ * so the pipeline can reconfigure its global latency. + */ + void + gst_video_encoder_set_latency (GstVideoEncoder * encoder, + GstClockTime min_latency, GstClockTime max_latency) + { ++ gboolean post_message = FALSE; ++ + g_return_if_fail (GST_CLOCK_TIME_IS_VALID (min_latency)); + g_return_if_fail (max_latency >= min_latency); + ++ GST_DEBUG_OBJECT (encoder, ++ "min_latency:%" GST_TIME_FORMAT " max_latency:%" GST_TIME_FORMAT, ++ GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency)); ++ + GST_OBJECT_LOCK (encoder); +- encoder->priv->min_latency = min_latency; +- encoder->priv->max_latency = max_latency; ++ if (encoder->priv->min_latency != min_latency) { ++ encoder->priv->min_latency = min_latency; ++ post_message = TRUE; ++ } ++ if (encoder->priv->max_latency != max_latency) { ++ encoder->priv->max_latency = max_latency; ++ post_message = TRUE; ++ } ++ if (!encoder->priv->posted_latency_msg) { ++ encoder->priv->posted_latency_msg = TRUE; ++ post_message = TRUE; ++ } + GST_OBJECT_UNLOCK (encoder); + +- gst_element_post_message (GST_ELEMENT_CAST (encoder), +- gst_message_new_latency (GST_OBJECT_CAST (encoder))); ++ if (post_message) ++ gst_element_post_message (GST_ELEMENT_CAST (encoder), ++ gst_message_new_latency (GST_OBJECT_CAST (encoder))); + } + + /** + * gst_video_encoder_get_latency: + * @encoder: a #GstVideoEncoder +- * @min_latency: (out) (allow-none): address of variable in which to store the ++ * @min_latency: (out) (optional): address of variable in which to store the + * configured minimum latency, or %NULL +- * @max_latency: (out) (allow-none): address of variable in which to store the ++ * @max_latency: (out) (optional): address of variable in which to store the + * configured maximum latency, or %NULL + * + * Query the configured encoding latency. Results will be returned via +@@ -2888,7 +2924,7 @@ gst_video_encoder_get_latency (GstVideoEncoder * encoder, + * + * Get the oldest unfinished pending #GstVideoCodecFrame + * +- * Returns: (transfer full): oldest unfinished pending #GstVideoCodecFrame ++ * Returns: (transfer full) (nullable): oldest unfinished pending #GstVideoCodecFrame + */ + GstVideoCodecFrame * + gst_video_encoder_get_oldest_frame (GstVideoEncoder * encoder) +@@ -2910,7 +2946,7 @@ gst_video_encoder_get_oldest_frame (GstVideoEncoder * encoder) + * + * Get a pending unfinished #GstVideoCodecFrame + * +- * Returns: (transfer full): pending unfinished #GstVideoCodecFrame identified by @frame_number. ++ * Returns: (transfer full) (nullable): pending unfinished #GstVideoCodecFrame identified by @frame_number. + */ + GstVideoCodecFrame * + gst_video_encoder_get_frame (GstVideoEncoder * encoder, int frame_number) +@@ -2959,7 +2995,7 @@ gst_video_encoder_get_frames (GstVideoEncoder * encoder) + /** + * gst_video_encoder_merge_tags: + * @encoder: a #GstVideoEncoder +- * @tags: (allow-none): a #GstTagList to merge, or NULL to unset ++ * @tags: (nullable): a #GstTagList to merge, or NULL to unset + * previously-set tags + * @mode: the #GstTagMergeMode to use, usually #GST_TAG_MERGE_REPLACE + * +@@ -3002,9 +3038,9 @@ gst_video_encoder_merge_tags (GstVideoEncoder * encoder, + /** + * gst_video_encoder_get_allocator: + * @encoder: a #GstVideoEncoder +- * @allocator: (out) (allow-none) (transfer full): the #GstAllocator ++ * @allocator: (out) (optional) (nullable) (transfer full): the #GstAllocator + * used +- * @params: (out) (allow-none) (transfer full): the ++ * @params: (out) (optional) (transfer full): the + * #GstAllocationParams of @allocator + * + * Lets #GstVideoEncoder sub-classes to know the memory @allocator +diff --git a/gst-libs/gst/video/gstvideometa.c b/gst-libs/gst/video/gstvideometa.c +index bfd91465c..f9f1a156a 100644 +--- a/gst-libs/gst/video/gstvideometa.c ++++ b/gst-libs/gst/video/gstvideometa.c +@@ -160,7 +160,7 @@ gst_video_meta_get_info (void) + * Buffers can contain multiple #GstVideoMeta metadata items when dealing with + * multiview buffers. + * +- * Returns: (transfer none): the #GstVideoMeta with lowest id (usually 0) or %NULL when there ++ * Returns: (transfer none) (nullable): the #GstVideoMeta with lowest id (usually 0) or %NULL when there + * is no such metadata on @buffer. + */ + GstVideoMeta * +@@ -193,7 +193,7 @@ gst_buffer_get_video_meta (GstBuffer * buffer) + * Buffers can contain multiple #GstVideoMeta metadata items when dealing with + * multiview buffers. + * +- * Returns: (transfer none): the #GstVideoMeta with @id or %NULL when there is no such metadata ++ * Returns: (transfer none) (nullable): the #GstVideoMeta with @id or %NULL when there is no such metadata + * on @buffer. + */ + GstVideoMeta * +@@ -394,6 +394,25 @@ gst_video_meta_unmap (GstVideoMeta * meta, guint plane, GstMapInfo * info) + return meta->unmap (meta, plane, info); + } + ++static gboolean ++gst_video_meta_is_alignment_valid (GstVideoAlignment * align) ++{ ++ gint i; ++ ++ g_return_val_if_fail (align != NULL, FALSE); ++ ++ if (align->padding_top != 0 || align->padding_bottom != 0 || ++ align->padding_left != 0 || align->padding_right != 0) ++ return TRUE; ++ ++ for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) { ++ if (align->stride_align[i] != 0) ++ return TRUE; ++ } ++ ++ return FALSE; ++} ++ + static gboolean + gst_video_meta_validate_alignment (GstVideoMeta * meta, + gsize plane_size[GST_VIDEO_MAX_PLANES]) +@@ -401,6 +420,15 @@ gst_video_meta_validate_alignment (GstVideoMeta * meta, + GstVideoInfo info; + guint i; + ++ if (!gst_video_meta_is_alignment_valid (&meta->alignment)) { ++ GST_LOG ("Set alignment on meta to all zero"); ++ ++ /* When alignment is invalid, no further check is needed, ++ unless user wants to calculate the pitch for each plane. */ ++ if (!plane_size) ++ return TRUE; ++ } ++ + gst_video_info_init (&info); + gst_video_info_set_format (&info, meta->format, meta->width, meta->height); + +@@ -924,7 +952,7 @@ gst_video_region_of_interest_meta_get_info (void) + * Buffers can contain multiple #GstVideoRegionOfInterestMeta metadata items if + * multiple regions of interests are marked on a frame. + * +- * Returns: (transfer none): the #GstVideoRegionOfInterestMeta with @id or %NULL when there is ++ * Returns: (transfer none) (nullable): the #GstVideoRegionOfInterestMeta with @id or %NULL when there is + * no such metadata on @buffer. + */ + GstVideoRegionOfInterestMeta * +@@ -1178,7 +1206,7 @@ gst_buffer_add_video_time_code_meta (GstBuffer * buffer, + * Attaches #GstVideoTimeCodeMeta metadata to @buffer with the given + * parameters. + * +- * Returns: (transfer none): the #GstVideoTimeCodeMeta on @buffer, or ++ * Returns: (transfer none) (nullable): the #GstVideoTimeCodeMeta on @buffer, or + * (since 1.16) %NULL if the timecode was invalid. + * + * Since: 1.10 +diff --git a/gst-libs/gst/video/gstvideopool.c b/gst-libs/gst/video/gstvideopool.c +index 7cfd367e3..fd4198d79 100644 +--- a/gst-libs/gst/video/gstvideopool.c ++++ b/gst-libs/gst/video/gstvideopool.c +@@ -50,7 +50,7 @@ GST_DEBUG_CATEGORY_STATIC (gst_video_pool_debug); + */ + void + gst_buffer_pool_config_set_video_alignment (GstStructure * config, +- GstVideoAlignment * align) ++ const GstVideoAlignment * align) + { + g_return_if_fail (config != NULL); + g_return_if_fail (align != NULL); +diff --git a/gst-libs/gst/video/gstvideopool.h b/gst-libs/gst/video/gstvideopool.h +index 01e9abd0f..6fd0db1bc 100644 +--- a/gst-libs/gst/video/gstvideopool.h ++++ b/gst-libs/gst/video/gstvideopool.h +@@ -48,7 +48,7 @@ G_BEGIN_DECLS + /* setting a bufferpool config */ + + GST_VIDEO_API +-void gst_buffer_pool_config_set_video_alignment (GstStructure *config, GstVideoAlignment *align); ++void gst_buffer_pool_config_set_video_alignment (GstStructure *config, const GstVideoAlignment *align); + + GST_VIDEO_API + gboolean gst_buffer_pool_config_get_video_alignment (GstStructure *config, GstVideoAlignment *align); +diff --git a/gst-libs/gst/video/gstvideosink.c b/gst-libs/gst/video/gstvideosink.c +index 32f5ee713..8450a429c 100644 +--- a/gst-libs/gst/video/gstvideosink.c ++++ b/gst-libs/gst/video/gstvideosink.c +@@ -46,6 +46,7 @@ enum + + struct _GstVideoSinkPrivate + { ++ GstVideoInfo info; + gboolean show_preroll_frame; /* ATOMIC */ + }; + +@@ -78,59 +79,88 @@ static void gst_video_sink_set_property (GObject * object, guint prop_id, + static void gst_video_sink_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec); + ++static GstStateChangeReturn gst_video_sink_change_state (GstElement * element, ++ GstStateChange transition); + static GstFlowReturn gst_video_sink_show_preroll_frame (GstBaseSink * bsink, + GstBuffer * buf); + static GstFlowReturn gst_video_sink_show_frame (GstBaseSink * bsink, + GstBuffer * buf); ++static gboolean gst_video_sink_set_caps (GstBaseSink * bsink, GstCaps * caps); ++static void gst_video_sink_get_times (GstBaseSink * bsink, GstBuffer * buffer, ++ GstClockTime * start, GstClockTime * end); + + /** + * gst_video_sink_center_rect: + * @src: the #GstVideoRectangle describing the source area + * @dst: the #GstVideoRectangle describing the destination area +- * @result: a pointer to a #GstVideoRectangle which will receive the result area ++ * @result: (out caller-allocates): a pointer to a #GstVideoRectangle which will receive the result area ++ * @scaling: a #gboolean indicating if scaling should be applied or not ++ * ++ * Deprecated: 1.20: Use gst_video_center_rect() instead. ++ */ ++void ++gst_video_sink_center_rect (GstVideoRectangle src, GstVideoRectangle dst, ++ GstVideoRectangle * result, gboolean scaling) ++{ ++ gst_video_center_rect (&src, &dst, result, scaling); ++} ++ ++/** ++ * gst_video_center_rect: ++ * @src: a pointer to #GstVideoRectangle describing the source area ++ * @dst: a pointer to #GstVideoRectangle describing the destination area ++ * @result: (out caller-allocates): a pointer to a #GstVideoRectangle which will receive the result area + * @scaling: a #gboolean indicating if scaling should be applied or not + * + * Takes @src rectangle and position it at the center of @dst rectangle with or + * without @scaling. It handles clipping if the @src rectangle is bigger than + * the @dst one and @scaling is set to FALSE. ++ * ++ * Since: 1.20 + */ + void +-gst_video_sink_center_rect (GstVideoRectangle src, GstVideoRectangle dst, +- GstVideoRectangle * result, gboolean scaling) ++gst_video_center_rect (const GstVideoRectangle * src, ++ const GstVideoRectangle * dst, GstVideoRectangle * result, gboolean scaling) + { ++ g_return_if_fail (src != NULL); ++ g_return_if_fail (dst != NULL); + g_return_if_fail (result != NULL); + + if (!scaling) { +- result->w = MIN (src.w, dst.w); +- result->h = MIN (src.h, dst.h); +- result->x = dst.x + (dst.w - result->w) / 2; +- result->y = dst.y + (dst.h - result->h) / 2; ++ result->w = MIN (src->w, dst->w); ++ result->h = MIN (src->h, dst->h); ++ result->x = dst->x + (dst->w - result->w) / 2; ++ result->y = dst->y + (dst->h - result->h) / 2; + } else { + gdouble src_ratio, dst_ratio; + +- src_ratio = (gdouble) src.w / src.h; +- dst_ratio = (gdouble) dst.w / dst.h; ++ g_return_if_fail (src->h != 0); ++ g_return_if_fail (dst->h != 0); ++ ++ src_ratio = (gdouble) src->w / src->h; ++ dst_ratio = (gdouble) dst->w / dst->h; + + if (src_ratio > dst_ratio) { +- result->w = dst.w; +- result->h = dst.w / src_ratio; +- result->x = dst.x; +- result->y = dst.y + (dst.h - result->h) / 2; ++ result->w = dst->w; ++ result->h = dst->w / src_ratio; ++ result->x = dst->x; ++ result->y = dst->y + (dst->h - result->h) / 2; + } else if (src_ratio < dst_ratio) { +- result->w = dst.h * src_ratio; +- result->h = dst.h; +- result->x = dst.x + (dst.w - result->w) / 2; +- result->y = dst.y; ++ result->w = dst->h * src_ratio; ++ result->h = dst->h; ++ result->x = dst->x + (dst->w - result->w) / 2; ++ result->y = dst->y; + } else { +- result->x = dst.x; +- result->y = dst.y; +- result->w = dst.w; +- result->h = dst.h; ++ result->x = dst->x; ++ result->y = dst->y; ++ result->w = dst->w; ++ result->h = dst->h; + } + } + + GST_DEBUG ("source is %dx%d dest is %dx%d, result is %dx%d with x,y %dx%d", +- src.w, src.h, dst.w, dst.h, result->w, result->h, result->x, result->y); ++ src->w, src->h, dst->w, dst->h, ++ result->w, result->h, result->x, result->y); + } + + /* Initing stuff */ +@@ -153,6 +183,7 @@ gst_video_sink_init (GstVideoSink * videosink) + static void + gst_video_sink_class_init (GstVideoSinkClass * klass) + { ++ GstElementClass *element_class = (GstElementClass *) klass; + GstBaseSinkClass *basesink_class = (GstBaseSinkClass *) klass; + GObjectClass *gobject_class = (GObjectClass *) klass; + +@@ -173,9 +204,83 @@ gst_video_sink_class_init (GstVideoSinkClass * klass) + DEFAULT_SHOW_PREROLL_FRAME, + G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS)); + ++ element_class->change_state = GST_DEBUG_FUNCPTR (gst_video_sink_change_state); ++ + basesink_class->render = GST_DEBUG_FUNCPTR (gst_video_sink_show_frame); + basesink_class->preroll = + GST_DEBUG_FUNCPTR (gst_video_sink_show_preroll_frame); ++ basesink_class->set_caps = GST_DEBUG_FUNCPTR (gst_video_sink_set_caps); ++ basesink_class->get_times = GST_DEBUG_FUNCPTR (gst_video_sink_get_times); ++} ++ ++static GstStateChangeReturn ++gst_video_sink_change_state (GstElement * element, GstStateChange transition) ++{ ++ GstVideoSink *vsink; ++ ++ vsink = GST_VIDEO_SINK_CAST (element); ++ ++ switch (transition) { ++ case GST_STATE_CHANGE_READY_TO_PAUSED: ++ gst_video_info_init (&vsink->priv->info); ++ break; ++ default: ++ break; ++ } ++ ++ return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); ++} ++ ++static gboolean ++gst_video_sink_set_caps (GstBaseSink * bsink, GstCaps * caps) ++{ ++ GstVideoSink *vsink; ++ GstVideoSinkClass *klass; ++ GstVideoInfo info; ++ ++ vsink = GST_VIDEO_SINK_CAST (bsink); ++ klass = GST_VIDEO_SINK_GET_CLASS (vsink); ++ ++ if (!gst_video_info_from_caps (&info, caps)) { ++ GST_ERROR_OBJECT (bsink, "Failed to parse caps %" GST_PTR_FORMAT, caps); ++ return FALSE; ++ } ++ ++ GST_DEBUG_OBJECT (bsink, "Setting caps %" GST_PTR_FORMAT, caps); ++ vsink->priv->info = info; ++ ++ if (klass->set_info) ++ return klass->set_info (vsink, caps, &vsink->priv->info); ++ ++ return TRUE; ++} ++ ++static void ++gst_video_sink_get_times (GstBaseSink * bsink, GstBuffer * buffer, ++ GstClockTime * start, GstClockTime * end) ++{ ++ GstVideoSink *vsink; ++ GstClockTime timestamp; ++ ++ vsink = GST_VIDEO_SINK_CAST (bsink); ++ ++ timestamp = GST_BUFFER_DTS_OR_PTS (buffer); ++ if (GST_CLOCK_TIME_IS_VALID (timestamp)) { ++ *start = timestamp; ++ if (GST_BUFFER_DURATION_IS_VALID (buffer)) { ++ *end = timestamp + GST_BUFFER_DURATION (buffer); ++ } else if (vsink->priv->info.fps_n > 0) { ++ *end = timestamp + ++ gst_util_uint64_scale_int (GST_SECOND, vsink->priv->info.fps_d, ++ vsink->priv->info.fps_n); ++ } else if (bsink->segment.rate < 0) { ++ /* The end time will be used for clock waiting time position ++ * in case of revese playback, and unknown end time will result in ++ * never waiting for clock (as if sync=false). ++ * Returning timestamp here would be the best effort we can do */ ++ *end = timestamp; ++ } ++ } + } + + static GstFlowReturn +diff --git a/gst-libs/gst/video/gstvideosink.h b/gst-libs/gst/video/gstvideosink.h +index d28ef042a..a7a226dde 100644 +--- a/gst-libs/gst/video/gstvideosink.h ++++ b/gst-libs/gst/video/gstvideosink.h +@@ -26,6 +26,7 @@ + #include + #include + #include ++#include + + G_BEGIN_DECLS + +@@ -117,17 +118,34 @@ struct _GstVideoSinkClass { + + GstFlowReturn (*show_frame) (GstVideoSink *video_sink, GstBuffer *buf); + ++ /** ++ * GstVideoSinkClass::set_info: ++ * @caps: A #GstCaps. ++ * @info: A #GstVideoInfo corresponding to @caps. ++ * ++ * Notifies the subclass of changed #GstVideoInfo. ++ * ++ * Since: 1.20 ++ */ ++ gboolean (*set_info) (GstVideoSink *video_sink, GstCaps *caps, const GstVideoInfo *info); ++ + /*< private >*/ +- gpointer _gst_reserved[GST_PADDING]; ++ gpointer _gst_reserved[GST_PADDING-1]; + }; + + GST_VIDEO_API + GType gst_video_sink_get_type (void); + +-GST_VIDEO_API ++GST_VIDEO_DEPRECATED_FOR(gst_video_center_rect) + void gst_video_sink_center_rect (GstVideoRectangle src, GstVideoRectangle dst, + GstVideoRectangle *result, gboolean scaling); + ++GST_VIDEO_API ++void gst_video_center_rect (const GstVideoRectangle * src, ++ const GstVideoRectangle * dst, ++ GstVideoRectangle * result, ++ gboolean scaling); ++ + G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstVideoSink, gst_object_unref) + + G_END_DECLS +diff --git a/gst-libs/gst/video/gstvideotimecode.c b/gst-libs/gst/video/gstvideotimecode.c +index f3ab4d82b..5713032a4 100644 +--- a/gst-libs/gst/video/gstvideotimecode.c ++++ b/gst-libs/gst/video/gstvideotimecode.c +@@ -81,30 +81,42 @@ gst_video_time_code_is_valid (const GstVideoTimeCode * tc) + + /* We can't have more frames than rounded up frames per second */ + fr = (tc->config.fps_n + (tc->config.fps_d >> 1)) / tc->config.fps_d; +- if (tc->frames >= fr && (tc->config.fps_n != 0 || tc->config.fps_d != 1)) +- return FALSE; ++ if (tc->config.fps_d > tc->config.fps_n) { ++ guint64 s; ++ ++ if (tc->frames > 0) ++ return FALSE; ++ /* For less than 1 fps only certain second values are allowed */ ++ s = tc->seconds + (60 * (tc->minutes + (60 * tc->hours))); ++ if ((s * tc->config.fps_n) % tc->config.fps_d != 0) ++ return FALSE; ++ } else { ++ if (tc->frames >= fr && (tc->config.fps_n != 0 || tc->config.fps_d != 1)) ++ return FALSE; ++ } + +- /* We either need a specific X/1001 framerate or otherwise an integer +- * framerate */ ++ /* We need either a specific X/1001 framerate, or less than 1 FPS, ++ * otherwise an integer framerate. */ + if (tc->config.fps_d == 1001) { + if (tc->config.fps_n != 30000 && tc->config.fps_n != 60000 && +- tc->config.fps_n != 24000) ++ tc->config.fps_n != 24000 && tc->config.fps_n != 120000) + return FALSE; +- } else if (tc->config.fps_n % tc->config.fps_d != 0) { ++ } else if (tc->config.fps_n >= tc->config.fps_d ++ && tc->config.fps_n % tc->config.fps_d != 0) { + return FALSE; + } + +- /* We only support 30000/1001 and 60000/1001 as drop-frame framerates. +- * 24000/1001 is *not* a drop-frame framerate! */ ++ /* We support only 30000/1001, 60000/1001, and 120000/1001 (see above) as ++ * drop-frame framerates. 24000/1001 is *not* a drop-frame framerate! */ + if (tc->config.flags & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME) { +- if (tc->config.fps_d != 1001 || (tc->config.fps_n != 30000 +- && tc->config.fps_n != 60000)) ++ if (tc->config.fps_d != 1001 || tc->config.fps_n == 24000) + return FALSE; + } + + /* Drop-frame framerates require skipping over the first two +- * timecodes every minutes except for every tenth minute in case +- * of 30000/1001 and the first four timecodes for 60000/1001 */ ++ * timecodes every minute except for every tenth minute in case ++ * of 30000/1001, the first four timecodes for 60000/1001, ++ * and the first eight timecodes for 120000/1001. */ + if ((tc->config.flags & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME) && + tc->minutes % 10 && tc->seconds == 0 && tc->frames < fr / 15) { + return FALSE; +@@ -256,8 +268,6 @@ gst_video_time_code_init_from_date_time_full (GstVideoTimeCode * tc, + GDateTime * dt, GstVideoTimeCodeFlags flags, guint field_count) + { + GDateTime *jam; +- guint64 frames; +- gboolean add_a_frame = FALSE; + + g_return_val_if_fail (tc != NULL, FALSE); + g_return_val_if_fail (dt != NULL, FALSE); +@@ -268,31 +278,51 @@ gst_video_time_code_init_from_date_time_full (GstVideoTimeCode * tc, + jam = g_date_time_new_local (g_date_time_get_year (dt), + g_date_time_get_month (dt), g_date_time_get_day_of_month (dt), 0, 0, 0.0); + +- /* Note: This might be inaccurate for 1 frame +- * in case we have a drop frame timecode */ +- frames = +- gst_util_uint64_scale_round (g_date_time_get_microsecond (dt) * +- G_GINT64_CONSTANT (1000), fps_n, fps_d * GST_SECOND); +- if (G_UNLIKELY (((frames == fps_n) && (fps_d == 1)) || +- ((frames == fps_n / 1000) && (fps_d == 1001)))) { +- /* Avoid invalid timecodes */ +- frames--; +- add_a_frame = TRUE; +- } ++ if (fps_d > fps_n) { ++ guint64 hour, min, sec; + +- gst_video_time_code_init (tc, fps_n, fps_d, jam, flags, +- g_date_time_get_hour (dt), g_date_time_get_minute (dt), +- g_date_time_get_second (dt), frames, field_count); ++ sec = ++ g_date_time_get_second (dt) + (60 * (g_date_time_get_minute (dt) + ++ (60 * g_date_time_get_hour (dt)))); ++ sec -= (sec * fps_n) % fps_d; + +- if (tc->config.flags & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME) { +- guint df = (tc->config.fps_n + (tc->config.fps_d >> 1)) / +- (15 * tc->config.fps_d); +- if (tc->minutes % 10 && tc->seconds == 0 && tc->frames < df) { +- tc->frames = df; ++ min = sec / 60; ++ sec = sec % 60; ++ hour = min / 60; ++ min = min % 60; ++ ++ gst_video_time_code_init (tc, fps_n, fps_d, jam, flags, ++ hour, min, sec, 0, field_count); ++ } else { ++ guint64 frames; ++ gboolean add_a_frame = FALSE; ++ ++ /* Note: This might be inaccurate for 1 frame ++ * in case we have a drop frame timecode */ ++ frames = ++ gst_util_uint64_scale_round (g_date_time_get_microsecond (dt) * ++ G_GINT64_CONSTANT (1000), fps_n, fps_d * GST_SECOND); ++ if (G_UNLIKELY (((frames == fps_n) && (fps_d == 1)) || ++ ((frames == fps_n / 1000) && (fps_d == 1001)))) { ++ /* Avoid invalid timecodes */ ++ frames--; ++ add_a_frame = TRUE; ++ } ++ ++ gst_video_time_code_init (tc, fps_n, fps_d, jam, flags, ++ g_date_time_get_hour (dt), g_date_time_get_minute (dt), ++ g_date_time_get_second (dt), frames, field_count); ++ ++ if (tc->config.flags & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME) { ++ guint df = (tc->config.fps_n + (tc->config.fps_d >> 1)) / ++ (15 * tc->config.fps_d); ++ if (tc->minutes % 10 && tc->seconds == 0 && tc->frames < df) { ++ tc->frames = df; ++ } + } ++ if (add_a_frame) ++ gst_video_time_code_increment_frame (tc); + } +- if (add_a_frame) +- gst_video_time_code_increment_frame (tc); + + g_date_time_unref (jam); + +@@ -366,6 +396,9 @@ gst_video_time_code_frames_since_daily_jam (const GstVideoTimeCode * tc) + (ff_minutes * tc->minutes) + + dropframe_multiplier * ((gint) (tc->minutes / 10)) + + (ff_hours * tc->hours); ++ } else if (tc->config.fps_d > tc->config.fps_n) { ++ return gst_util_uint64_scale (tc->seconds + (60 * (tc->minutes + ++ (60 * tc->hours))), tc->config.fps_n, tc->config.fps_d); + } else { + return tc->frames + (ff_nom * (tc->seconds + (60 * (tc->minutes + + (60 * tc->hours))))); +@@ -468,6 +501,17 @@ gst_video_time_code_add_frames (GstVideoTimeCode * tc, gint64 frames) + framecount - (ff_nom * sec_new) - (ff_minutes * min_new) - + (dropframe_multiplier * ((gint) (min_new / 10))) - + (ff_hours * h_notmod24); ++ } else if (tc->config.fps_d > tc->config.fps_n) { ++ frames_new = ++ frames + gst_util_uint64_scale (tc->seconds + (60 * (tc->minutes + ++ (60 * tc->hours))), tc->config.fps_n, tc->config.fps_d); ++ sec_new = ++ gst_util_uint64_scale (frames_new, tc->config.fps_d, tc->config.fps_n); ++ frames_new = 0; ++ min_new = sec_new / 60; ++ sec_new = sec_new % 60; ++ h_notmod24 = min_new / 60; ++ min_new = min_new % 60; + } else { + framecount = + frames + tc->frames + (ff_nom * (tc->seconds + (sixty * (tc->minutes + +@@ -492,7 +536,7 @@ gst_video_time_code_add_frames (GstVideoTimeCode * tc, gint64 frames) + /* The calculations above should always give correct results */ + g_assert (min_new < 60); + g_assert (sec_new < 60); +- g_assert (frames_new < ff_nom); ++ g_assert (frames_new < ff_nom || (ff_nom == 0 && frames_new == 0)); + + tc->hours = h_new; + tc->minutes = min_new; +@@ -759,7 +803,7 @@ gst_video_time_code_new_from_date_time (guint fps_n, guint fps_d, + * The resulting config->latest_daily_jam is set to + * midnight, and timecode is set to the given time. + * +- * Returns: the #GstVideoTimeCode representation of @dt, or %NULL if ++ * Returns: (nullable): the #GstVideoTimeCode representation of @dt, or %NULL if + * no valid timecode could be created. + * + * Since: 1.16 +diff --git a/gst-libs/gst/video/gstvideoutils.c b/gst-libs/gst/video/gstvideoutils.c +index 7e543db35..03dffcd8f 100644 +--- a/gst-libs/gst/video/gstvideoutils.c ++++ b/gst-libs/gst/video/gstvideoutils.c +@@ -171,6 +171,10 @@ _gst_video_codec_state_free (GstVideoCodecState * state) + gst_caps_unref (state->allocation_caps); + if (state->codec_data) + gst_buffer_unref (state->codec_data); ++ if (state->mastering_display_info) ++ g_slice_free (GstVideoMasteringDisplayInfo, state->mastering_display_info); ++ if (state->content_light_level) ++ g_slice_free (GstVideoContentLightLevel, state->content_light_level); + g_slice_free (GstVideoCodecState, state); + } + +diff --git a/gst-libs/gst/video/gstvideoutils.h b/gst-libs/gst/video/gstvideoutils.h +index f49a08974..cbe19f0e6 100644 +--- a/gst-libs/gst/video/gstvideoutils.h ++++ b/gst-libs/gst/video/gstvideoutils.h +@@ -28,6 +28,7 @@ + + #include + #include ++#include + + G_BEGIN_DECLS + #define GST_TYPE_VIDEO_CODEC_STATE \ +@@ -47,6 +48,10 @@ typedef struct _GstVideoCodecFrame GstVideoCodecFrame; + * 'codec_data' field of a stream, or NULL. + * @allocation_caps: The #GstCaps for allocation query and pool + * negotiation. Since: 1.10 ++ * @mastering_display_info: Mastering display color volume information ++ * (HDR metadata) for the stream. Since: 1.20 ++ * @content_light_level: Content light level information for the stream. ++ * Since: 1.20 + * + * Structure representing the state of an incoming or outgoing video + * stream for encoders and decoders. +@@ -55,8 +60,22 @@ typedef struct _GstVideoCodecFrame GstVideoCodecFrame; + * respective @set_format vmethods. + * + * Decoders and encoders can set the downstream state, by using the +- * @gst_video_decoder_set_output_state() or +- * @gst_video_encoder_set_output_state() methods. ++ * gst_video_decoder_set_output_state() or ++ * gst_video_encoder_set_output_state() methods. ++ */ ++/** ++ * GstVideoCodecState.mastering_display_info: ++ * ++ * Mastering display color volume information (HDR metadata) for the stream. ++ * ++ * Since: 1.20 ++ */ ++/** ++ * GstVideoCodecState.content_light_level: ++ * ++ * Content light level information for the stream. ++ * ++ * Since: 1.20 + */ + struct _GstVideoCodecState + { +@@ -72,8 +91,11 @@ struct _GstVideoCodecState + + GstCaps *allocation_caps; + ++ GstVideoMasteringDisplayInfo *mastering_display_info; ++ GstVideoContentLightLevel *content_light_level; ++ + /*< private >*/ +- gpointer padding[GST_PADDING_LARGE - 1]; ++ gpointer padding[GST_PADDING_LARGE - 3]; + }; + + /** +@@ -82,6 +104,7 @@ struct _GstVideoCodecState + * @GST_VIDEO_CODEC_FRAME_FLAG_SYNC_POINT: is the frame a synchronization point (keyframe) + * @GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME: should the output frame be made a keyframe + * @GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME_HEADERS: should the encoder output stream headers ++ * @GST_VIDEO_CODEC_FRAME_FLAG_CORRUPTED: the buffer data is corrupted (Since: 1.20) + * + * Flags for #GstVideoCodecFrame + */ +@@ -90,7 +113,15 @@ typedef enum + GST_VIDEO_CODEC_FRAME_FLAG_DECODE_ONLY = (1<<0), + GST_VIDEO_CODEC_FRAME_FLAG_SYNC_POINT = (1<<1), + GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME = (1<<2), +- GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME_HEADERS = (1<<3) ++ GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME_HEADERS = (1<<3), ++ /** ++ * GST_VIDEO_CODEC_FRAME_FLAG_CORRUPTED: ++ * ++ * The buffer data is corrupted. ++ * ++ * Since: 1.20 ++ */ ++ GST_VIDEO_CODEC_FRAME_FLAG_CORRUPTED = (1<<4), + } GstVideoCodecFrameFlags; + + /** +@@ -214,8 +245,8 @@ typedef enum + * be kept. + * @output_buffer: the output #GstBuffer. Implementations should set this either + * directly, or by using the +- * @gst_video_decoder_allocate_output_frame() or +- * @gst_video_decoder_allocate_output_buffer() methods. The buffer is ++ * gst_video_decoder_allocate_output_frame() or ++ * gst_video_decoder_allocate_output_buffer() methods. The buffer is + * owned by the frame and references to the frame instead of the + * buffer should be kept. + * @deadline: Running time when the frame will be used. +@@ -260,9 +291,11 @@ struct _GstVideoCodecFrame + + union { + struct { ++ /*< private >*/ + GstClockTime ts; + GstClockTime ts2; + guint num_subframes; ++ guint subframes_processed; + } ABI; + gpointer padding[GST_PADDING_LARGE]; + } abidata; +diff --git a/gst-libs/gst/video/meson.build b/gst-libs/gst/video/meson.build +index d1d049209..57688df2d 100644 +--- a/gst-libs/gst/video/meson.build ++++ b/gst-libs/gst/video/meson.build +@@ -1,8 +1,9 @@ +-video_sources = [ ++video_sources = files([ + 'colorbalance.c', + 'colorbalancechannel.c', + 'convertframe.c', + 'gstvideoaffinetransformationmeta.c', ++ 'gstvideocodecalphameta.c', + 'gstvideoaggregator.c', + 'gstvideodecoder.c', + 'gstvideoencoder.c', +@@ -29,17 +30,19 @@ video_sources = [ + 'video-multiview.c', + 'video-resampler.c', + 'video-scaler.c', ++ 'video-sei.c', + 'video-tile.c', + 'video-overlay-composition.c', + 'videodirection.c', + 'videoorientation.c', + 'videooverlay.c', +-] ++]) + +-video_headers = [ ++video_headers = files([ + 'colorbalance.h', + 'colorbalancechannel.h', + 'gstvideoaffinetransformationmeta.h', ++ 'gstvideocodecalphameta.h', + 'gstvideoaggregator.h', + 'gstvideodecoder.h', + 'gstvideoencoder.h', +@@ -71,7 +74,8 @@ video_headers = [ + 'video-blend.h', + 'video-overlay-composition.h', + 'video-multiview.h', +-] ++ 'video-sei.h', ++]) + install_headers(video_headers, subdir : 'gstreamer-1.0/gst/video/') + + video_mkenum_headers = [ +@@ -88,7 +92,11 @@ video_mkenum_headers = [ + 'video-resampler.h', + 'video-scaler.h', + 'video-tile.h', ++ 'gstvideometa.h', + 'gstvideotimecode.h', ++ 'gstvideoutils.h', ++ 'gstvideoencoder.h', ++ 'gstvideodecoder.h', + 'colorbalance.h', + 'navigation.h', + ] +@@ -128,7 +136,7 @@ endif + + gstvideo = library('gstvideo-@0@'.format(api_version), + video_sources, gstvideo_h, gstvideo_c, orc_c, orc_h, +- c_args : gst_plugins_base_args + ['-DBUILDING_GST_VIDEO'], ++ c_args : gst_plugins_base_args + ['-DBUILDING_GST_VIDEO', '-DG_LOG_DOMAIN="GStreamer-Video"'], + include_directories: [configinc, libsinc], + version : libversion, + soversion : soversion, +diff --git a/gst-libs/gst/video/navigation.c b/gst-libs/gst/video/navigation.c +index 95481686c..09411ce0d 100644 +--- a/gst-libs/gst/video/navigation.c ++++ b/gst-libs/gst/video/navigation.c +@@ -26,27 +26,29 @@ + * @short_description: Interface for creating, sending and parsing navigation + * events. + * +- * The Navigation interface is used for creating and injecting navigation related +- * events such as mouse button presses, cursor motion and key presses. The associated +- * library also provides methods for parsing received events, and for sending and +- * receiving navigation related bus events. One main usecase is DVD menu navigation. ++ * The Navigation interface is used for creating and injecting navigation ++ * related events such as mouse button presses, cursor motion and key presses. ++ * The associated library also provides methods for parsing received events, and ++ * for sending and receiving navigation related bus events. One main usecase is ++ * DVD menu navigation. + * + * The main parts of the API are: + * +- * * The GstNavigation interface, implemented by elements which provide an application +- * with the ability to create and inject navigation events into the pipeline. +- * * GstNavigation event handling API. GstNavigation events are created in response to +- * calls on a GstNavigation interface implementation, and sent in the pipeline. Upstream +- * elements can use the navigation event API functions to parse the contents of received +- * messages. ++ * * The GstNavigation interface, implemented by elements which provide an ++ * application with the ability to create and inject navigation events into ++ * the pipeline. ++ * * GstNavigation event handling API. GstNavigation events are created in ++ * response to calls on a GstNavigation interface implementation, and sent in ++ * the pipeline. Upstream elements can use the navigation event API functions ++ * to parse the contents of received messages. + * +- * * GstNavigation message handling API. GstNavigation messages may be sent on the message +- * bus to inform applications of navigation related changes in the pipeline, such as the +- * mouse moving over a clickable region, or the set of available angles changing. +- * +- * The GstNavigation message functions provide functions for creating and parsing +- * custom bus messages for signaling GstNavigation changes. ++ * * GstNavigation message handling API. GstNavigation messages may be sent on ++ * the message bus to inform applications of navigation related changes in the ++ * pipeline, such as the mouse moving over a clickable region, or the set of ++ * available angles changing. + * ++ * The GstNavigation message functions provide functions for creating and ++ * parsing custom bus messages for signaling GstNavigation changes. + */ + + #ifdef HAVE_CONFIG_H +@@ -64,11 +66,26 @@ + + G_DEFINE_INTERFACE (GstNavigation, gst_navigation, 0); + ++static void ++gst_navigation_default_send_event_simple (GstNavigation * navigation, ++ GstEvent * event) ++{ ++ GstNavigationInterface *iface = GST_NAVIGATION_GET_INTERFACE (navigation); ++ ++ if (iface->send_event) { ++ iface->send_event (navigation, ++ gst_structure_copy (gst_event_get_structure (event))); ++ } else { ++ gst_event_unref (event); ++ } ++} ++ + static void + gst_navigation_default_init (GstNavigationInterface * iface) + { + /* default virtual functions */ + iface->send_event = NULL; ++ iface->send_event_simple = gst_navigation_default_send_event_simple; + } + + /* The interface implementer should make sure that the object can handle +@@ -80,6 +97,8 @@ gst_navigation_send_event (GstNavigation * navigation, GstStructure * structure) + + if (iface->send_event) { + iface->send_event (navigation, structure); ++ } else if (iface->send_event_simple) { ++ iface->send_event_simple (navigation, gst_event_new_navigation (structure)); + } else { + gst_structure_free (structure); + } +@@ -179,6 +198,32 @@ gst_navigation_send_command (GstNavigation * navigation, + "command", "command-code", G_TYPE_UINT, (guint) command, NULL)); + } + ++/** ++ * gst_navigation_send_event_simple: ++ * @navigation: The navigation interface instance ++ * @event: (transfer full): The event to send ++ * ++ * Sends an event to the navigation interface. ++ * Since: 1.22 ++ */ ++void ++gst_navigation_send_event_simple (GstNavigation * navigation, GstEvent * event) ++{ ++ GstNavigationInterface *iface = GST_NAVIGATION_GET_INTERFACE (navigation); ++ ++ g_return_if_fail (GST_EVENT_TYPE (event) == GST_EVENT_NAVIGATION); ++ ++ if (iface->send_event_simple) { ++ iface->send_event_simple (navigation, event); ++ } else if (iface->send_event) { ++ iface->send_event (navigation, ++ gst_structure_copy (gst_event_get_structure (event))); ++ gst_event_unref (event); ++ } else { ++ gst_event_unref (event); ++ } ++} ++ + /* Navigation Queries */ + + #define GST_NAVIGATION_QUERY_HAS_TYPE(query,query_type) \ +@@ -741,16 +786,324 @@ gst_navigation_event_get_type (GstEvent * event) + return GST_NAVIGATION_EVENT_KEY_RELEASE; + else if (g_str_equal (e_type, "command")) + return GST_NAVIGATION_EVENT_COMMAND; ++ else if (g_str_equal (e_type, "touch-down")) ++ return GST_NAVIGATION_EVENT_TOUCH_DOWN; ++ else if (g_str_equal (e_type, "touch-up")) ++ return GST_NAVIGATION_EVENT_TOUCH_UP; ++ else if (g_str_equal (e_type, "touch-cancel")) ++ return GST_NAVIGATION_EVENT_TOUCH_CANCEL; ++ else if (g_str_equal (e_type, "touch-motion")) ++ return GST_NAVIGATION_EVENT_TOUCH_MOTION; ++ else if (g_str_equal (e_type, "touch-frame")) ++ return GST_NAVIGATION_EVENT_TOUCH_FRAME; + + return GST_NAVIGATION_EVENT_INVALID; + } + ++/** ++ * gst_navigation_event_new_key_press: ++ * @key: A string identifying the key press. ++ * @state: a bit-mask representing the state of the modifier keys (e.g. Control, ++ * Shift and Alt). ++ * ++ * Create a new navigation event for the given key press. ++ * ++ * Returns: (transfer full): a new #GstEvent ++ * ++ * Since: 1.22 ++ */ ++GstEvent * ++gst_navigation_event_new_key_press (const gchar * key, ++ GstNavigationModifierType state) ++{ ++ return gst_event_new_navigation (gst_structure_new (GST_NAVIGATION_EVENT_NAME, ++ "event", G_TYPE_STRING, "key-press", "key", G_TYPE_STRING, key, ++ "state", GST_TYPE_NAVIGATION_MODIFIER_TYPE, state, NULL)); ++} ++ ++/** ++ * gst_navigation_event_new_key_release: ++ * @key: A string identifying the released key. ++ * @state: a bit-mask representing the state of the modifier keys (e.g. Control, ++ * Shift and Alt). ++ * ++ * Create a new navigation event for the given key release. ++ * ++ * Returns: (transfer full): a new #GstEvent ++ * ++ * Since: 1.22 ++ */ ++GstEvent * ++gst_navigation_event_new_key_release (const gchar * key, ++ GstNavigationModifierType state) ++{ ++ return gst_event_new_navigation (gst_structure_new (GST_NAVIGATION_EVENT_NAME, ++ "event", G_TYPE_STRING, "key-release", "key", G_TYPE_STRING, key, ++ "state", GST_TYPE_NAVIGATION_MODIFIER_TYPE, state, NULL)); ++} ++ ++/** ++ * gst_navigation_event_new_mouse_button_press: ++ * @button: The number of the pressed mouse button. ++ * @x: The x coordinate of the mouse cursor. ++ * @y: The y coordinate of the mouse cursor. ++ * @state: a bit-mask representing the state of the modifier keys (e.g. Control, ++ * Shift and Alt). ++ * ++ * Create a new navigation event for the given key mouse button press. ++ * ++ * Returns: (transfer full): a new #GstEvent ++ * ++ * Since: 1.22 ++ */ ++GstEvent * ++gst_navigation_event_new_mouse_button_press (gint button, gdouble x, gdouble y, ++ GstNavigationModifierType state) ++{ ++ return gst_event_new_navigation (gst_structure_new (GST_NAVIGATION_EVENT_NAME, ++ "event", G_TYPE_STRING, "mouse-button-press", ++ "button", G_TYPE_INT, button, "pointer_x", G_TYPE_DOUBLE, x, ++ "pointer_y", G_TYPE_DOUBLE, y, ++ "state", GST_TYPE_NAVIGATION_MODIFIER_TYPE, state, NULL)); ++} ++ ++/** ++ * gst_navigation_event_new_mouse_button_release: ++ * @button: The number of the released mouse button. ++ * @x: The x coordinate of the mouse cursor. ++ * @y: The y coordinate of the mouse cursor. ++ * @state: a bit-mask representing the state of the modifier keys (e.g. Control, ++ * Shift and Alt). ++ * ++ * Create a new navigation event for the given key mouse button release. ++ * ++ * Returns: (transfer full): a new #GstEvent ++ * ++ * Since: 1.22 ++ */ ++GstEvent * ++gst_navigation_event_new_mouse_button_release (gint button, gdouble x, ++ gdouble y, GstNavigationModifierType state) ++{ ++ return gst_event_new_navigation (gst_structure_new (GST_NAVIGATION_EVENT_NAME, ++ "event", G_TYPE_STRING, "mouse-button-release", ++ "button", G_TYPE_INT, button, "pointer_x", G_TYPE_DOUBLE, x, ++ "pointer_y", G_TYPE_DOUBLE, y, ++ "state", GST_TYPE_NAVIGATION_MODIFIER_TYPE, state, NULL)); ++} ++ ++/** ++ * gst_navigation_event_new_mouse_move: ++ * @x: The x coordinate of the mouse cursor. ++ * @y: The y coordinate of the mouse cursor. ++ * @state: a bit-mask representing the state of the modifier keys (e.g. Control, ++ * Shift and Alt). ++ * ++ * Create a new navigation event for the new mouse location. ++ * ++ * Returns: (transfer full): a new #GstEvent ++ * ++ * Since: 1.22 ++ */ ++GstEvent * ++gst_navigation_event_new_mouse_move (gdouble x, gdouble y, ++ GstNavigationModifierType state) ++{ ++ return gst_event_new_navigation (gst_structure_new (GST_NAVIGATION_EVENT_NAME, ++ "event", G_TYPE_STRING, "mouse-move", ++ "pointer_x", G_TYPE_DOUBLE, x, ++ "pointer_y", G_TYPE_DOUBLE, y, ++ "state", GST_TYPE_NAVIGATION_MODIFIER_TYPE, state, NULL)); ++} ++ ++/** ++ * gst_navigation_event_new_mouse_scroll: ++ * @x: The x coordinate of the mouse cursor. ++ * @y: The y coordinate of the mouse cursor. ++ * @delta_x: The x component of the scroll movement. ++ * @delta_y: The y component of the scroll movement. ++ * @state: a bit-mask representing the state of the modifier keys (e.g. Control, ++ * Shift and Alt). ++ * ++ * Create a new navigation event for the mouse scroll. ++ * ++ * Returns: (transfer full): a new #GstEvent ++ * ++ * Since: 1.22 ++ */ ++GstEvent * ++gst_navigation_event_new_mouse_scroll (gdouble x, gdouble y, gdouble delta_x, ++ gdouble delta_y, GstNavigationModifierType state) ++{ ++ return gst_event_new_navigation (gst_structure_new (GST_NAVIGATION_EVENT_NAME, ++ "event", G_TYPE_STRING, "mouse-scroll", ++ "pointer_x", G_TYPE_DOUBLE, x, "pointer_y", G_TYPE_DOUBLE, y, ++ "delta_pointer_x", G_TYPE_DOUBLE, delta_x, ++ "delta_pointer_y", G_TYPE_DOUBLE, delta_y, ++ "state", GST_TYPE_NAVIGATION_MODIFIER_TYPE, state, NULL)); ++} ++ ++/** ++ * gst_navigation_event_new_command: ++ * @command: The navigation command to use. ++ * ++ * Create a new navigation event given navigation command.. ++ * ++ * Returns: (transfer full): a new #GstEvent ++ * ++ * Since: 1.22 ++ */ ++GstEvent * ++gst_navigation_event_new_command (GstNavigationCommand command) ++{ ++ return gst_event_new_navigation (gst_structure_new (GST_NAVIGATION_EVENT_NAME, ++ "event", G_TYPE_STRING, "command", ++ "command-code", G_TYPE_UINT, (guint) command, NULL)); ++} ++ ++/** ++ * gst_navigation_event_new_touch_down: ++ * @identifier: A number uniquely identifying this touch point. It must stay ++ * unique to this touch point at least until an up event is sent for ++ * the same identifier, or all touch points are cancelled. ++ * @x: The x coordinate of the new touch point. ++ * @y: The y coordinate of the new touch point. ++ * @pressure: Pressure data of the touch point, from 0.0 to 1.0, or NaN if no ++ * data is available. ++ * @state: a bit-mask representing the state of the modifier keys (e.g. Control, ++ * Shift and Alt). ++ * ++ * Create a new navigation event for an added touch point. ++ * ++ * Returns: (transfer full): a new #GstEvent ++ * ++ * Since: 1.22 ++ */ ++GstEvent * ++gst_navigation_event_new_touch_down (guint identifier, gdouble x, gdouble y, ++ gdouble pressure, GstNavigationModifierType state) ++{ ++ return gst_event_new_navigation (gst_structure_new (GST_NAVIGATION_EVENT_NAME, ++ "event", G_TYPE_STRING, "touch-down", ++ "identifier", G_TYPE_UINT, identifier, ++ "pointer_x", G_TYPE_DOUBLE, x, ++ "pointer_y", G_TYPE_DOUBLE, y, ++ "pressure", G_TYPE_DOUBLE, pressure, ++ "state", GST_TYPE_NAVIGATION_MODIFIER_TYPE, state, NULL)); ++} ++ ++/** ++ * gst_navigation_event_new_touch_motion: ++ * @identifier: A number uniquely identifying this touch point. It must ++ * correlate to exactly one previous touch_start event. ++ * @x: The x coordinate of the touch point. ++ * @y: The y coordinate of the touch point. ++ * @pressure: Pressure data of the touch point, from 0.0 to 1.0, or NaN if no ++ * data is available. ++ * @state: a bit-mask representing the state of the modifier keys (e.g. Control, ++ * Shift and Alt). ++ * ++ * Create a new navigation event for a moved touch point. ++ * ++ * Returns: (transfer full): a new #GstEvent ++ * ++ * Since: 1.22 ++ */ ++GstEvent * ++gst_navigation_event_new_touch_motion (guint identifier, gdouble x, gdouble y, ++ gdouble pressure, GstNavigationModifierType state) ++{ ++ return gst_event_new_navigation (gst_structure_new (GST_NAVIGATION_EVENT_NAME, ++ "event", G_TYPE_STRING, "touch-motion", ++ "identifier", G_TYPE_UINT, identifier, ++ "pointer_x", G_TYPE_DOUBLE, x, ++ "pointer_y", G_TYPE_DOUBLE, y, ++ "pressure", G_TYPE_DOUBLE, pressure, ++ "state", GST_TYPE_NAVIGATION_MODIFIER_TYPE, state, NULL)); ++} ++ ++/** ++ * gst_navigation_event_new_touch_up: ++ * @identifier: A number uniquely identifying this touch point. It must ++ * correlate to exactly one previous down event, but can be reused ++ * after sending this event. ++ * @x: The x coordinate of the touch point. ++ * @y: The y coordinate of the touch point. ++ * @state: a bit-mask representing the state of the modifier keys (e.g. Control, ++ * Shift and Alt). ++ * ++ * Create a new navigation event for a removed touch point. ++ * ++ * Returns: (transfer full): a new #GstEvent ++ * ++ * Since: 1.22 ++ */ ++GstEvent * ++gst_navigation_event_new_touch_up (guint identifier, gdouble x, gdouble y, ++ GstNavigationModifierType state) ++{ ++ return gst_event_new_navigation (gst_structure_new (GST_NAVIGATION_EVENT_NAME, ++ "event", G_TYPE_STRING, "touch-up", ++ "identifier", G_TYPE_UINT, identifier, ++ "pointer_x", G_TYPE_DOUBLE, x, "pointer_y", G_TYPE_DOUBLE, y, ++ "state", GST_TYPE_NAVIGATION_MODIFIER_TYPE, state, NULL)); ++} ++ ++/** ++ * gst_navigation_event_new_touch_frame: ++ * @state: a bit-mask representing the state of the modifier keys (e.g. Control, ++ * Shift and Alt). ++ * ++ * Create a new navigation event signalling the end of a touch frame. Touch ++ * frames signal that all previous down, motion and up events not followed by ++ * another touch frame event already should be considered simultaneous. ++ * ++ * Returns: (transfer full): a new #GstEvent ++ * ++ * Since: 1.22 ++ */ ++GstEvent * ++gst_navigation_event_new_touch_frame (GstNavigationModifierType state) ++{ ++ return gst_event_new_navigation (gst_structure_new (GST_NAVIGATION_EVENT_NAME, ++ "event", G_TYPE_STRING, "touch-frame", ++ "state", GST_TYPE_NAVIGATION_MODIFIER_TYPE, state, NULL)); ++} ++ ++ ++/** ++ * gst_navigation_event_new_touch_cancel: ++ * @state: a bit-mask representing the state of the modifier keys (e.g. Control, ++ * Shift and Alt). ++ * ++ * Create a new navigation event signalling that all currently active touch ++ * points are cancelled and should be discarded. For example, under Wayland ++ * this event might be sent when a swipe passes the threshold to be recognized ++ * as a gesture by the compositor. ++ * ++ * Returns: (transfer full): a new #GstEvent ++ * ++ * Since: 1.22 ++ */ ++GstEvent * ++gst_navigation_event_new_touch_cancel (GstNavigationModifierType state) ++{ ++ return gst_event_new_navigation (gst_structure_new (GST_NAVIGATION_EVENT_NAME, ++ "event", G_TYPE_STRING, "touch-cancel", ++ "state", GST_TYPE_NAVIGATION_MODIFIER_TYPE, state, NULL)); ++} ++ + /** + * gst_navigation_event_parse_key_event: + * @event: A #GstEvent to inspect. + * @key: (out) (optional) (transfer none): A pointer to a location to receive + * the string identifying the key press. The returned string is owned by the + * event, and valid only until the event is unreffed. ++ * ++ * Note: Modifier keys (as defined in #GstNavigationModifierType) ++ * [press](GST_NAVIGATION_EVENT_KEY_PRESS) and ++ * [release](GST_NAVIGATION_KEY_PRESS) events are generated even if those states are ++ * present on all other related events + */ + gboolean + gst_navigation_event_parse_key_event (GstEvent * event, const gchar ** key) +@@ -920,3 +1273,204 @@ gst_navigation_event_parse_command (GstEvent * event, + + return ret; + } ++ ++/** ++ * gst_navigation_event_parse_touch_event: ++ * @event: A #GstEvent to inspect. ++ * @identifier: (out) (optional): Pointer to a guint that will receive the ++ * identifier unique to this touch point. ++ * @x: (out) (optional): Pointer to a gdouble that will receive the x ++ * coordinate of the touch event. ++ * @y: (out) (optional): Pointer to a gdouble that will receive the y ++ * coordinate of the touch event. ++ * @pressure: (out) (optional): Pointer to a gdouble that will receive the ++ * force of the touch event, in the range from 0.0 to 1.0. If pressure ++ * data is not available, NaN will be set instead. ++ * ++ * Retrieve the details of a #GstNavigation touch-down or touch-motion event. ++ * Determine which type the event is using gst_navigation_event_get_type() ++ * to retrieve the #GstNavigationEventType. ++ * ++ * Returns: TRUE if all details could be extracted, otherwise FALSE. ++ * ++ * Since: 1.22 ++ */ ++gboolean ++gst_navigation_event_parse_touch_event (GstEvent * event, guint * identifier, ++ gdouble * x, gdouble * y, gdouble * pressure) ++{ ++ GstNavigationEventType e_type; ++ const GstStructure *s; ++ gboolean ret = TRUE; ++ ++ e_type = gst_navigation_event_get_type (event); ++ g_return_val_if_fail (e_type == GST_NAVIGATION_EVENT_TOUCH_DOWN || ++ e_type == GST_NAVIGATION_EVENT_TOUCH_MOTION, FALSE); ++ ++ s = gst_event_get_structure (event); ++ if (identifier) ++ ret &= gst_structure_get_uint (s, "identifier", identifier); ++ if (x) ++ ret &= gst_structure_get_double (s, "pointer_x", x); ++ if (y) ++ ret &= gst_structure_get_double (s, "pointer_y", y); ++ if (pressure) ++ ret &= gst_structure_get_double (s, "pressure", pressure); ++ ++ WARN_IF_FAIL (ret, "Couldn't extract details from touch event"); ++ ++ return ret; ++} ++ ++/** ++ * gst_navigation_event_parse_touch_up_event: ++ * @event: A #GstEvent to inspect. ++ * @identifier: (out) (optional): Pointer to a guint that will receive the ++ * identifier unique to this touch point. ++ * @x: (out) (optional): Pointer to a gdouble that will receive the x ++ * coordinate of the touch event. ++ * @y: (out) (optional): Pointer to a gdouble that will receive the y ++ * coordinate of the touch event. ++ * ++ * Retrieve the details of a #GstNavigation touch-up event. ++ * ++ * Returns: TRUE if all details could be extracted, otherwise FALSE. ++ * ++ * Since: 1.22 ++ */ ++gboolean ++gst_navigation_event_parse_touch_up_event (GstEvent * event, ++ guint * identifier, gdouble * x, gdouble * y) ++{ ++ const GstStructure *s; ++ gboolean ret = TRUE; ++ ++ g_return_val_if_fail (GST_NAVIGATION_EVENT_HAS_TYPE (event, TOUCH_UP), FALSE); ++ ++ s = gst_event_get_structure (event); ++ if (identifier) ++ ret &= gst_structure_get_uint (s, "identifier", identifier); ++ if (x) ++ ret &= gst_structure_get_double (s, "pointer_x", x); ++ if (y) ++ ret &= gst_structure_get_double (s, "pointer_y", y); ++ ++ WARN_IF_FAIL (ret, "Couldn't extract details from touch-up event"); ++ ++ return ret; ++} ++ ++/** ++ * gst_navigation_event_get_coordinates: ++ * @event: The #GstEvent to inspect. ++ * @x: (out) (optional): Pointer to a gdouble to receive the x coordinate of the ++ * navigation event. ++ * @y: (out) (optional): Pointer to a gdouble to receive the y coordinate of the ++ * navigation event. ++ * ++ * Try to retrieve x and y coordinates of a #GstNavigation event. ++ * ++ * Returns: A boolean indicating success. ++ * ++ * Since: 1.22 ++ */ ++gboolean ++gst_navigation_event_get_coordinates (GstEvent * event, ++ gdouble * x, gdouble * y) ++{ ++ GstNavigationEventType e_type; ++ const GstStructure *s; ++ gboolean ret = TRUE; ++ ++ e_type = gst_navigation_event_get_type (event); ++ if (e_type != GST_NAVIGATION_EVENT_MOUSE_MOVE ++ && e_type != GST_NAVIGATION_EVENT_MOUSE_BUTTON_PRESS ++ && e_type != GST_NAVIGATION_EVENT_MOUSE_BUTTON_RELEASE ++ && e_type != GST_NAVIGATION_EVENT_TOUCH_DOWN ++ && e_type != GST_NAVIGATION_EVENT_TOUCH_MOTION ++ && e_type != GST_NAVIGATION_EVENT_TOUCH_UP) { ++ return FALSE; ++ } ++ ++ s = gst_event_get_structure (event); ++ if (x) ++ ret &= gst_structure_get_double (s, "pointer_x", x); ++ if (y) ++ ret &= gst_structure_get_double (s, "pointer_y", y); ++ ++ WARN_IF_FAIL (ret, "Couldn't extract coordinates from the event"); ++ ++ return ret; ++} ++ ++/** ++ * gst_navigation_event_set_coordinates: ++ * @event: The #GstEvent to modify. ++ * @x: The x coordinate to set. ++ * @y: The y coordinate to set. ++ * ++ * Try to set x and y coordinates on a #GstNavigation event. The event must ++ * be writable. ++ * ++ * Returns: A boolean indicating success. ++ * ++ * Since: 1.22 ++ */ ++gboolean ++gst_navigation_event_set_coordinates (GstEvent * event, gdouble x, gdouble y) ++{ ++ GstNavigationEventType e_type; ++ GstStructure *s; ++ ++ g_return_val_if_fail (gst_event_is_writable (event), FALSE); ++ ++ e_type = gst_navigation_event_get_type (event); ++ if (e_type != GST_NAVIGATION_EVENT_MOUSE_MOVE ++ && e_type != GST_NAVIGATION_EVENT_MOUSE_BUTTON_PRESS ++ && e_type != GST_NAVIGATION_EVENT_MOUSE_BUTTON_RELEASE ++ && e_type != GST_NAVIGATION_EVENT_TOUCH_DOWN ++ && e_type != GST_NAVIGATION_EVENT_TOUCH_MOTION ++ && e_type != GST_NAVIGATION_EVENT_TOUCH_UP) { ++ return FALSE; ++ } ++ ++ s = gst_event_writable_structure (event); ++ gst_structure_set (s, "pointer_x", G_TYPE_DOUBLE, x, ++ "pointer_y", G_TYPE_DOUBLE, y, NULL); ++ ++ return TRUE; ++} ++ ++ ++/** ++ * gst_navigation_event_parse_modifier_state: ++ * @event: The #GstEvent to modify. ++ * @state: a bit-mask representing the state of the modifier keys (e.g. Control, ++ * Shift and Alt). ++ * ++ * Returns: TRUE if the event is a #GstNavigation event with associated ++ * modifiers state, otherwise FALSE. ++ * ++ * Since: 1.22 ++ */ ++gboolean ++gst_navigation_event_parse_modifier_state (GstEvent * event, ++ GstNavigationModifierType * state) ++{ ++ GstNavigationEventType e_type; ++ const GstStructure *s; ++ ++ g_return_val_if_fail (GST_IS_EVENT (event), FALSE); ++ ++ e_type = gst_navigation_event_get_type (event); ++ if (e_type == GST_NAVIGATION_EVENT_COMMAND) { ++ return FALSE; ++ } ++ ++ s = gst_event_get_structure (event); ++ if (!gst_structure_get (s, "state", GST_TYPE_NAVIGATION_MODIFIER_TYPE, state, ++ NULL)) ++ *state = GST_NAVIGATION_MODIFIER_NONE; ++ ++ return TRUE; ++} +diff --git a/gst-libs/gst/video/navigation.h b/gst-libs/gst/video/navigation.h +index ced2892f1..650f58241 100644 +--- a/gst-libs/gst/video/navigation.h ++++ b/gst-libs/gst/video/navigation.h +@@ -40,10 +40,83 @@ G_BEGIN_DECLS + typedef struct _GstNavigation GstNavigation; + typedef struct _GstNavigationInterface GstNavigationInterface; + ++/** ++ * GstNavigationModifierType: ++ * @GST_NAVIGATION_MODIFIER_SHIFT_MASK: the Shift key. ++ * @GST_NAVIGATION_MODIFIER_CONTROL_MASK: the Control key. ++ * @GST_NAVIGATION_MODIFIER_MOD1_MASK: the third modifier key ++ * @GST_NAVIGATION_MODIFIER_MOD2_MASK: the fourth modifier key ++ * @GST_NAVIGATION_MODIFIER_MOD3_MASK: the fifth modifier key ++ * @GST_NAVIGATION_MODIFIER_MOD4_MASK: the sixth modifier key ++ * @GST_NAVIGATION_MODIFIER_MOD5_MASK: the seventh modifier key ++ * @GST_NAVIGATION_MODIFIER_BUTTON1_MASK: the first mouse button (usually the left button). ++ * @GST_NAVIGATION_MODIFIER_BUTTON2_MASK: the second mouse button (usually the right button). ++ * @GST_NAVIGATION_MODIFIER_BUTTON3_MASK: the third mouse button (usually the mouse wheel button or middle button). ++ * @GST_NAVIGATION_MODIFIER_BUTTON4_MASK: the fourth mouse button (typically the "Back" button). ++ * @GST_NAVIGATION_MODIFIER_BUTTON5_MASK: the fifth mouse button (typically the "forward" button). ++ * @GST_NAVIGATION_MODIFIER_SUPER_MASK: the Super modifier ++ * @GST_NAVIGATION_MODIFIER_HYPER_MASK: the Hyper modifier ++ * @GST_NAVIGATION_MODIFIER_META_MASK: the Meta modifier ++ * @GST_NAVIGATION_MODIFIER_MASK: A mask covering all entries in #GdkModifierType. ++ * ++ * Flags to indicate the state of modifier keys and mouse buttons ++ * in events. ++ * ++ * Typical modifier keys are Shift, Control, Meta, Super, Hyper, Alt, Compose, ++ * Apple, CapsLock or ShiftLock. ++ * ++ * Since: 1.22 ++ */ ++typedef enum ++{ ++ GST_NAVIGATION_MODIFIER_NONE = 0, ++ GST_NAVIGATION_MODIFIER_SHIFT_MASK = 1 << 0, ++ GST_NAVIGATION_MODIFIER_LOCK_MASK = 1 << 1, ++ GST_NAVIGATION_MODIFIER_CONTROL_MASK = 1 << 2, ++ ++ GST_NAVIGATION_MODIFIER_MOD1_MASK = 1 << 3, ++ GST_NAVIGATION_MODIFIER_MOD2_MASK = 1 << 4, ++ GST_NAVIGATION_MODIFIER_MOD3_MASK = 1 << 5, ++ GST_NAVIGATION_MODIFIER_MOD4_MASK = 1 << 6, ++ GST_NAVIGATION_MODIFIER_MOD5_MASK = 1 << 7, ++ ++ GST_NAVIGATION_MODIFIER_BUTTON1_MASK = 1 << 8, ++ GST_NAVIGATION_MODIFIER_BUTTON2_MASK = 1 << 9, ++ GST_NAVIGATION_MODIFIER_BUTTON3_MASK = 1 << 10, ++ GST_NAVIGATION_MODIFIER_BUTTON4_MASK = 1 << 11, ++ GST_NAVIGATION_MODIFIER_BUTTON5_MASK = 1 << 12, ++ ++ GST_NAVIGATION_MODIFIER_SUPER_MASK = 1 << 26, ++ GST_NAVIGATION_MODIFIER_HYPER_MASK = 1 << 27, ++ GST_NAVIGATION_MODIFIER_META_MASK = 1 << 28, ++ ++ GST_NAVIGATION_MODIFIER_MASK = ( ++ GST_NAVIGATION_MODIFIER_NONE | ++ GST_NAVIGATION_MODIFIER_SHIFT_MASK | ++ GST_NAVIGATION_MODIFIER_LOCK_MASK | ++ GST_NAVIGATION_MODIFIER_CONTROL_MASK | ++ GST_NAVIGATION_MODIFIER_MOD1_MASK | ++ GST_NAVIGATION_MODIFIER_MOD2_MASK | ++ GST_NAVIGATION_MODIFIER_MOD3_MASK | ++ GST_NAVIGATION_MODIFIER_MOD4_MASK | ++ GST_NAVIGATION_MODIFIER_MOD5_MASK | ++ GST_NAVIGATION_MODIFIER_BUTTON1_MASK | ++ GST_NAVIGATION_MODIFIER_BUTTON2_MASK | ++ GST_NAVIGATION_MODIFIER_BUTTON3_MASK | ++ GST_NAVIGATION_MODIFIER_BUTTON4_MASK | ++ GST_NAVIGATION_MODIFIER_BUTTON5_MASK | ++ GST_NAVIGATION_MODIFIER_SUPER_MASK | ++ GST_NAVIGATION_MODIFIER_HYPER_MASK | ++ GST_NAVIGATION_MODIFIER_META_MASK ++ ) ++ ++} GstNavigationModifierType; ++ + /** + * GstNavigationInterface: + * @iface: the parent interface + * @send_event: sending a navigation event ++ * @send_event_simple: sending a navigation event (Since: 1.22) + * + * Navigation interface. + */ +@@ -51,7 +124,26 @@ struct _GstNavigationInterface { + GTypeInterface iface; + + /* virtual functions */ ++ ++ /** ++ * GstNavigationInterface::send_event: ++ * ++ * sending a navigation event. ++ * ++ * Deprecated: 1.22: Use #GstNavigationInterface.send_event_simple() instead. ++ */ + void (*send_event) (GstNavigation *navigation, GstStructure *structure); ++ ++ /** ++ * GstNavigationInterface::send_event_simple: ++ * @navigation: The navigation interface instance ++ * @event: (transfer full): The event to send ++ * ++ * sending a navigation event. ++ * ++ * Since: 1.22 ++ */ ++ void (*send_event_simple) (GstNavigation *navigation, GstEvent *event); + }; + + GST_VIDEO_API +@@ -263,9 +355,26 @@ gboolean gst_navigation_message_parse_event (GstMessage *message + * @GST_NAVIGATION_EVENT_MOUSE_SCROLL: A mouse scroll event. Use + * gst_navigation_event_parse_mouse_scroll_event() to extract the details from + * the event. (Since: 1.18) ++ * @GST_NAVIGATION_EVENT_TOUCH_DOWN: An event describing a new touch point, ++ * which will be assigned an identifier that is unique to it for the duration ++ * of its movement on the screen. Use gst_navigation_event_parse_touch_event() ++ * to extract the details from the event. (Since: 1.22) ++ * @GST_NAVIGATION_EVENT_TOUCH_MOTION: An event describing the movement of an ++ * active touch point across the screen. Use ++ * gst_navigation_event_parse_touch_event() to extract the details from the ++ * event. (Since: 1.22) ++ * @GST_NAVIGATION_EVENT_TOUCH_UP: An event describing a removed touch point. ++ * After this event, its identifier may be reused for any new touch points. Use ++ * gst_navigation_event_parse_touch_up_event() to extract the details from the ++ * event. (Since: 1.22) ++ * @GST_NAVIGATION_EVENT_TOUCH_FRAME: An event signaling the end of a sequence ++ * of simultaneous touch events. (Since: 1.22) ++ * @GST_NAVIGATION_EVENT_TOUCH_CANCEL: An event cancelling all currently active ++ * touch points. (Since: 1.22) + * + * Enum values for the various events that an element implementing the +- * GstNavigation interface might send up the pipeline. ++ * GstNavigation interface might send up the pipeline. Touch events have been ++ * inspired by the libinput API, and have the same meaning here. + */ + typedef enum { + GST_NAVIGATION_EVENT_INVALID = 0, +@@ -275,12 +384,128 @@ typedef enum { + GST_NAVIGATION_EVENT_MOUSE_BUTTON_RELEASE = 4, + GST_NAVIGATION_EVENT_MOUSE_MOVE = 5, + GST_NAVIGATION_EVENT_COMMAND = 6, +- GST_NAVIGATION_EVENT_MOUSE_SCROLL = 7 ++ ++ /** ++ * GST_NAVIGATION_EVENT_MOUSE_SCROLL: ++ * ++ * A mouse scroll event. Use gst_navigation_event_parse_mouse_scroll_event() ++ * to extract the details from the event. ++ * ++ * Since: 1.18 ++ */ ++ GST_NAVIGATION_EVENT_MOUSE_SCROLL = 7, ++ ++ /** ++ * GST_NAVIGATION_EVENT_TOUCH_DOWN: ++ * ++ * An event describing a new touch point, which will be assigned an identifier ++ * that is unique to it for the duration of its movement on the screen. ++ * Use gst_navigation_event_parse_touch_event() to extract the details ++ * from the event. ++ * ++ * Since: 1.22 ++ */ ++ GST_NAVIGATION_EVENT_TOUCH_DOWN = 8, ++ ++ /** ++ * GST_NAVIGATION_EVENT_TOUCH_MOTION: ++ * ++ * An event describing the movement of an active touch point across ++ * the screen. Use gst_navigation_event_parse_touch_event() to extract ++ * the details from the event. ++ * ++ * Since: 1.22 ++ */ ++ GST_NAVIGATION_EVENT_TOUCH_MOTION = 9, ++ ++ /** ++ * GST_NAVIGATION_EVENT_TOUCH_UP: ++ * ++ * An event describing a removed touch point. After this event, ++ * its identifier may be reused for any new touch points. ++ * Use gst_navigation_event_parse_touch_up_event() to extract the details ++ * from the event. ++ * ++ * Since: 1.22 ++ */ ++ GST_NAVIGATION_EVENT_TOUCH_UP = 10, ++ ++ /** ++ * GST_NAVIGATION_EVENT_TOUCH_FRAME: ++ * ++ * An event signaling the end of a sequence of simultaneous touch events. ++ * ++ * Since: 1.22 ++ */ ++ GST_NAVIGATION_EVENT_TOUCH_FRAME = 11, ++ ++ /** ++ * GST_NAVIGATION_EVENT_TOUCH_CANCEL: ++ * ++ * An event cancelling all currently active touch points. ++ * ++ * Since: 1.22 ++ */ ++ GST_NAVIGATION_EVENT_TOUCH_CANCEL = 12, + } GstNavigationEventType; + + GST_VIDEO_API + GstNavigationEventType gst_navigation_event_get_type (GstEvent *event); + ++GST_VIDEO_API ++GstEvent* gst_navigation_event_new_key_press (const gchar * key, ++ GstNavigationModifierType state) G_GNUC_MALLOC; ++ ++GST_VIDEO_API ++GstEvent* gst_navigation_event_new_key_release (const gchar * key, ++ GstNavigationModifierType state) G_GNUC_MALLOC; ++ ++GST_VIDEO_API ++GstEvent* gst_navigation_event_new_mouse_button_press (gint button, gdouble x, ++ gdouble y, ++ GstNavigationModifierType state) G_GNUC_MALLOC; ++ ++GST_VIDEO_API ++GstEvent* gst_navigation_event_new_mouse_button_release (gint button, gdouble x, ++ gdouble y, ++ GstNavigationModifierType state) G_GNUC_MALLOC; ++ ++GST_VIDEO_API ++GstEvent* gst_navigation_event_new_mouse_move (gdouble x, ++ gdouble y, ++ GstNavigationModifierType state) G_GNUC_MALLOC; ++ ++GST_VIDEO_API ++GstEvent* gst_navigation_event_new_mouse_scroll (gdouble x, gdouble y, ++ gdouble delta_x, gdouble delta_y, ++ GstNavigationModifierType state) G_GNUC_MALLOC; ++ ++GST_VIDEO_API ++GstEvent* gst_navigation_event_new_command (GstNavigationCommand command) G_GNUC_MALLOC; ++ ++GST_VIDEO_API ++GstEvent* gst_navigation_event_new_touch_down (guint identifier, ++ gdouble x, gdouble y, ++ gdouble pressure, ++ GstNavigationModifierType state) G_GNUC_MALLOC; ++ ++GST_VIDEO_API ++GstEvent* gst_navigation_event_new_touch_motion (guint identifier, ++ gdouble x, gdouble y, ++ gdouble pressure, ++ GstNavigationModifierType state) G_GNUC_MALLOC; ++ ++GST_VIDEO_API ++GstEvent* gst_navigation_event_new_touch_up (guint identifier, ++ gdouble x, gdouble y, ++ GstNavigationModifierType state) G_GNUC_MALLOC; ++ ++GST_VIDEO_API ++GstEvent* gst_navigation_event_new_touch_frame (GstNavigationModifierType state) G_GNUC_MALLOC; ++ ++GST_VIDEO_API ++GstEvent* gst_navigation_event_new_touch_cancel (GstNavigationModifierType state) G_GNUC_MALLOC; ++ + GST_VIDEO_API + gboolean gst_navigation_event_parse_key_event (GstEvent *event, + const gchar **key); +@@ -302,27 +527,54 @@ GST_VIDEO_API + gboolean gst_navigation_event_parse_command (GstEvent *event, + GstNavigationCommand *command); + +-/* interface virtual function wrappers */ +- + GST_VIDEO_API +-void gst_navigation_send_event (GstNavigation *navigation, +- GstStructure *structure); ++gboolean gst_navigation_event_parse_touch_event (GstEvent * event, ++ guint * identifier, ++ gdouble * x, gdouble * y, ++ gdouble * pressure); + + GST_VIDEO_API +-void gst_navigation_send_key_event (GstNavigation *navigation, +- const char *event, const char *key); ++gboolean gst_navigation_event_parse_touch_up_event (GstEvent * event, ++ guint * identifier, ++ gdouble * x, gdouble * y); + + GST_VIDEO_API +-void gst_navigation_send_mouse_event (GstNavigation *navigation, +- const char *event, int button, double x, double y); ++gboolean gst_navigation_event_get_coordinates (GstEvent * event, ++ gdouble * x, gdouble * y); + + GST_VIDEO_API ++gboolean gst_navigation_event_set_coordinates (GstEvent * event, ++ gdouble x, gdouble y); ++ ++/* interface virtual function wrappers */ ++ ++GST_VIDEO_DEPRECATED_FOR(gst_navigation_send_event_simple) ++void gst_navigation_send_event (GstNavigation *navigation, ++ GstStructure *structure); ++ ++GST_VIDEO_DEPRECATED_FOR(gst_navigation_send_event_simple) ++void gst_navigation_send_key_event (GstNavigation *navigation, ++ const char *event, const char *key); ++ ++GST_VIDEO_DEPRECATED_FOR(gst_navigation_send_event_simple) ++void gst_navigation_send_mouse_event (GstNavigation *navigation, ++ const char *event, int button, double x, double y); ++ ++GST_VIDEO_DEPRECATED_FOR(gst_navigation_send_event_simple) + void gst_navigation_send_mouse_scroll_event (GstNavigation *navigation, + double x, double y, double delta_x, double delta_y); + ++GST_VIDEO_DEPRECATED_FOR(gst_navigation_send_event_simple) ++void gst_navigation_send_command (GstNavigation *navigation, ++ GstNavigationCommand command); ++ ++GST_VIDEO_API ++void gst_navigation_send_event_simple (GstNavigation *navigation, ++ GstEvent *event); ++ + GST_VIDEO_API +-void gst_navigation_send_command (GstNavigation *navigation, +- GstNavigationCommand command); ++gboolean gst_navigation_event_parse_modifier_state (GstEvent *event, ++ GstNavigationModifierType *state); + + G_END_DECLS + +diff --git a/gst-libs/gst/video/video-chroma.c b/gst-libs/gst/video/video-chroma.c +index f761b4435..2a634073d 100644 +--- a/gst-libs/gst/video/video-chroma.c ++++ b/gst-libs/gst/video/video-chroma.c +@@ -26,7 +26,7 @@ + + #include "video-orc.h" + #include "video-format.h" +- ++#include + + /** + * SECTION:gstvideochroma +@@ -72,7 +72,9 @@ typedef struct + static const ChromaSiteInfo chromasite[] = { + {"jpeg", GST_VIDEO_CHROMA_SITE_JPEG}, + {"mpeg2", GST_VIDEO_CHROMA_SITE_MPEG2}, +- {"dv", GST_VIDEO_CHROMA_SITE_DV} ++ {"dv", GST_VIDEO_CHROMA_SITE_DV}, ++ {"alt-line", GST_VIDEO_CHROMA_SITE_ALT_LINE}, ++ {"cosited", GST_VIDEO_CHROMA_SITE_COSITED}, + }; + + /** +@@ -81,18 +83,66 @@ static const ChromaSiteInfo chromasite[] = { + * + * Convert @s to a #GstVideoChromaSite + * ++ * Deprecated: 1.20: Use gst_video_chroma_site_from_string() instead. ++ * + * Returns: a #GstVideoChromaSite or %GST_VIDEO_CHROMA_SITE_UNKNOWN when @s does + * not contain a valid chroma description. + */ + GstVideoChromaSite + gst_video_chroma_from_string (const gchar * s) ++{ ++ return gst_video_chroma_site_from_string (s); ++} ++ ++/** ++ * gst_video_chroma_site_from_string: ++ * @s: a chromasite string ++ * ++ * Convert @s to a #GstVideoChromaSite ++ * ++ * Returns: a #GstVideoChromaSite or %GST_VIDEO_CHROMA_SITE_UNKNOWN when @s does ++ * not contain a valid chroma-site description. ++ * ++ * Since: 1.20 ++ */ ++GstVideoChromaSite ++gst_video_chroma_site_from_string (const gchar * s) + { + gint i; ++ gchar **split; ++ gchar **iter; ++ GstVideoChromaSite ret = GST_VIDEO_CHROMA_SITE_UNKNOWN; ++ GFlagsClass *klass; ++ + for (i = 0; i < G_N_ELEMENTS (chromasite); i++) { + if (g_str_equal (chromasite[i].name, s)) + return chromasite[i].site; + } +- return GST_VIDEO_CHROMA_SITE_UNKNOWN; ++ ++ klass = (GFlagsClass *) g_type_class_ref (GST_TYPE_VIDEO_CHROMA_SITE); ++ split = g_strsplit (s, "+", 0); ++ for (iter = split; *iter; iter++) { ++ GFlagsValue *value; ++ ++ value = g_flags_get_value_by_nick (klass, *iter); ++ if (!value) { ++ ret = GST_VIDEO_CHROMA_SITE_UNKNOWN; ++ goto out; ++ } ++ ++ ret |= value->value; ++ } ++ ++out: ++ g_type_class_unref (klass); ++ g_strfreev (split); ++ ++ /* Doesn't make sense */ ++ if ((ret & GST_VIDEO_CHROMA_SITE_NONE) != 0 && ++ ret != GST_VIDEO_CHROMA_SITE_NONE) ++ return GST_VIDEO_CHROMA_SITE_UNKNOWN; ++ ++ return ret; + } + + /** +@@ -101,6 +151,8 @@ gst_video_chroma_from_string (const gchar * s) + * + * Converts @site to its string representation. + * ++ * Deprecated: 1.20: Use gst_video_chroma_site_to_string() instead. ++ * + * Returns: a string describing @site. + */ + const gchar * +@@ -114,6 +166,60 @@ gst_video_chroma_to_string (GstVideoChromaSite site) + return NULL; + } + ++/** ++ * gst_video_chroma_site_to_string: ++ * @site: a #GstVideoChromaSite ++ * ++ * Converts @site to its string representation. ++ * ++ * Returns: (transfer full) (nullable): a string representation of @site ++ * or %NULL if @site contains undefined value or ++ * is equal to %GST_VIDEO_CHROMA_SITE_UNKNOWN ++ * ++ * Since: 1.20 ++ */ ++gchar * ++gst_video_chroma_site_to_string (GstVideoChromaSite site) ++{ ++ gint i; ++ GString *str; ++ GFlagsValue *value; ++ GFlagsClass *klass; ++ ++ /* return null string for GST_VIDEO_CHROMA_SITE_UNKNOWN */ ++ if (site == 0) ++ return NULL; ++ ++ for (i = 0; i < G_N_ELEMENTS (chromasite); i++) { ++ if (chromasite[i].site == site) ++ return g_strdup (chromasite[i].name); ++ } ++ ++ /* Doesn't make sense */ ++ if ((site & GST_VIDEO_CHROMA_SITE_NONE) != 0 && ++ site != GST_VIDEO_CHROMA_SITE_NONE) ++ return NULL; ++ ++ /* Construct new string */ ++ klass = (GFlagsClass *) g_type_class_ref (GST_TYPE_VIDEO_CHROMA_SITE); ++ str = g_string_new (NULL); ++ while (site != GST_VIDEO_CHROMA_SITE_UNKNOWN && ++ (value = g_flags_get_first_value (klass, site))) { ++ if (str->len > 0) ++ g_string_append (str, "+"); ++ ++ g_string_append (str, value->value_nick); ++ site &= ~value->value; ++ } ++ g_type_class_unref (klass); ++ ++ /* This means given chroma-site has unknown value */ ++ if (site != 0) ++ return g_string_free (str, TRUE); ++ ++ return g_string_free (str, FALSE); ++} ++ + struct _GstVideoChromaResample + { + GstVideoChromaMethod method; +@@ -154,9 +260,9 @@ struct _GstVideoChromaResample + #define FILT_5_3(a,b) (5*(a) + 3*(b) + 4) >> 3 + #define FILT_3_5(a,b) (3*(a) + 5*(b) + 4) >> 3 + +-#define FILT_10_3_2_1(a,b,c,d) (10*(a) + 3*(b) + 2*(c) + (d) + 8) >> 16 +-#define FILT_1_2_3_10(a,b,c,d) ((a) + 2*(b) + 3*(c) + 10*(d) + 8) >> 16 +-#define FILT_1_2_3_4_3_2_1(a,b,c,d,e,f,g) ((a) + 2*((b)+(f)) + 3*((c)+(e)) + 4*(d) + (g) + 8) >> 16 ++#define FILT_10_3_2_1(a,b,c,d) (10*(a) + 3*(b) + 2*(c) + (d) + 8) >> 4 ++#define FILT_1_2_3_10(a,b,c,d) ((a) + 2*(b) + 3*(c) + 10*(d) + 8) >> 4 ++#define FILT_1_2_3_4_3_2_1(a,b,c,d,e,f,g) ((a) + 2*((b)+(f)) + 3*((c)+(e)) + 4*(d) + (g) + 8) >> 4 + + /* 2x horizontal upsampling without cositing + * +@@ -1005,8 +1111,8 @@ gst_video_chroma_resample_new (GstVideoChromaMethod method, + /** + * gst_video_chroma_resample_get_info: + * @resample: a #GstVideoChromaResample +- * @n_lines: the number of input lines +- * @offset: the first line ++ * @n_lines: (out) (optional): the number of input lines ++ * @offset: (out) (optional): the first line + * + * The resampler must be fed @n_lines at a time. The first line should be + * at @offset. +diff --git a/gst-libs/gst/video/video-chroma.h b/gst-libs/gst/video/video-chroma.h +index 5240078c0..16720b4b5 100644 +--- a/gst-libs/gst/video/video-chroma.h ++++ b/gst-libs/gst/video/video-chroma.h +@@ -52,12 +52,18 @@ typedef enum { + GST_VIDEO_CHROMA_SITE_DV = (GST_VIDEO_CHROMA_SITE_COSITED | GST_VIDEO_CHROMA_SITE_ALT_LINE), + } GstVideoChromaSite; + +-GST_VIDEO_API ++GST_VIDEO_DEPRECATED_FOR(gst_video_chroma_site_from_string) + GstVideoChromaSite gst_video_chroma_from_string (const gchar * s); + +-GST_VIDEO_API ++GST_VIDEO_DEPRECATED_FOR(gst_video_chroma_site_to_string) + const gchar * gst_video_chroma_to_string (GstVideoChromaSite site); + ++GST_VIDEO_API ++GstVideoChromaSite gst_video_chroma_site_from_string (const gchar * s); ++ ++GST_VIDEO_API ++gchar * gst_video_chroma_site_to_string (GstVideoChromaSite site); ++ + /** + * GstVideoChromaMethod: + * @GST_VIDEO_CHROMA_METHOD_NEAREST: Duplicates the chroma samples when +diff --git a/gst-libs/gst/video/video-color.c b/gst-libs/gst/video/video-color.c +index 01f8d1773..90fa380b1 100644 +--- a/gst-libs/gst/video/video-color.c ++++ b/gst-libs/gst/video/video-color.c +@@ -269,6 +269,38 @@ gst_video_colorimetry_is_equal (const GstVideoColorimetry * cinfo, + return CI_IS_EQUAL (cinfo, other); + } + ++/** ++ * gst_video_colorimetry_is_equivalent: ++ * @cinfo: a #GstVideoColorimetry ++ * @bitdepth: bitdepth of a format associated with @cinfo ++ * @other: another #GstVideoColorimetry ++ * @other_bitdepth: bitdepth of a format associated with @other ++ * ++ * Compare the 2 colorimetry sets for functionally equality ++ * ++ * Returns: %TRUE if @cinfo and @other are equivalent. ++ * ++ * Since: 1.22 ++ */ ++gboolean ++gst_video_colorimetry_is_equivalent (const GstVideoColorimetry * cinfo, ++ guint bitdepth, const GstVideoColorimetry * other, guint other_bitdepth) ++{ ++ g_return_val_if_fail (cinfo != NULL, FALSE); ++ g_return_val_if_fail (other != NULL, FALSE); ++ ++ if (cinfo->range != other->range || cinfo->matrix != other->matrix) ++ return FALSE; ++ ++ if (!gst_video_color_primaries_is_equivalent (cinfo->primaries, ++ other->primaries)) { ++ return FALSE; ++ } ++ ++ return gst_video_transfer_function_is_equivalent (cinfo->transfer, bitdepth, ++ other->transfer, other_bitdepth); ++} ++ + #define WP_C 0.31006, 0.31616 + #define WP_D65 0.31271, 0.32902 + #define WP_CENTRE (1/3), (1/3) +@@ -319,6 +351,35 @@ gst_video_color_primaries_get_info (GstVideoColorPrimaries primaries) + return &color_primaries[primaries]; + } + ++/** ++ * gst_video_color_primaries_is_equivalent: ++ * @primaries: a #GstVideoColorPrimaries ++ * @other: another #GstVideoColorPrimaries ++ * ++ * Checks whether @primaries and @other are functionally equivalent ++ * ++ * Returns: TRUE if @primaries and @other can be considered equivalent. ++ * ++ * Since: 1.22 ++ */ ++gboolean ++gst_video_color_primaries_is_equivalent (GstVideoColorPrimaries primaries, ++ GstVideoColorPrimaries other) ++{ ++ if (primaries == other) ++ return TRUE; ++ ++ /* smpte-170m and 240m use the same reference RGB primaries and white point */ ++ if ((primaries == GST_VIDEO_COLOR_PRIMARIES_SMPTE170M || ++ primaries == GST_VIDEO_COLOR_PRIMARIES_SMPTE240M) && ++ (other == GST_VIDEO_COLOR_PRIMARIES_SMPTE170M || ++ other == GST_VIDEO_COLOR_PRIMARIES_SMPTE240M)) { ++ return TRUE; ++ } ++ ++ return FALSE; ++} ++ + /** + * gst_video_color_matrix_get_Kr_Kb: + * @matrix: a #GstVideoColorMatrix +@@ -399,6 +460,21 @@ gst_video_color_matrix_get_Kr_Kb (GstVideoColorMatrix matrix, gdouble * Kr, + * @func: a #GstVideoTransferFunction + * @val: a value + * ++ * Deprecated: 1.20: Use gst_video_transfer_function_encode() instead. ++ * ++ * Since: 1.6 ++ */ ++gdouble ++gst_video_color_transfer_encode (GstVideoTransferFunction func, gdouble val) ++{ ++ return gst_video_transfer_function_encode (func, val); ++} ++ ++/** ++ * gst_video_transfer_function_encode: ++ * @func: a #GstVideoTransferFunction ++ * @val: a value ++ * + * Convert @val to its gamma encoded value. + * + * For a linear value L in the range [0..1], conversion to the non-linear +@@ -413,10 +489,10 @@ gst_video_color_matrix_get_Kr_Kb (GstVideoColorMatrix matrix, gdouble * Kr, + * + * Returns: the gamma encoded value of @val + * +- * Since: 1.6 ++ * Since: 1.20 + */ + gdouble +-gst_video_color_transfer_encode (GstVideoTransferFunction func, gdouble val) ++gst_video_transfer_function_encode (GstVideoTransferFunction func, gdouble val) + { + gdouble res; + +@@ -481,16 +557,16 @@ gst_video_color_transfer_encode (GstVideoTransferFunction func, gdouble val) + break; + case GST_VIDEO_TRANSFER_SMPTE2084: + { +- gdouble c1 = 3424.0 / 4096.0; /* c3 - c2 + 1 */ +- gdouble c2 = 32 * 2413 / 4096.0; +- gdouble c3 = 32 * 2392 / 4096.0; +- gdouble m = 128 * 2523 / 4096.0; +- gdouble n = 0.25 * 2610 / 4096.0; +- gdouble Ln = pow (val, n); ++ gdouble c1 = 0.8359375; ++ gdouble c2 = 18.8515625; ++ gdouble c3 = 18.6875; ++ gdouble m1 = 0.1593017578125; ++ gdouble m2 = 78.84375; ++ gdouble Ln = pow (val, m1); + + /* val equal to 1 for peak white is ordinarily intended to + * correspond to a reference output luminance level of 10000 cd/m^2 */ +- res = pow ((c1 + c2 * Ln) / (1.0 + c3 * Ln), m); ++ res = pow ((c1 + c2 * Ln) / (1.0 + c3 * Ln), m2); + break; + } + case GST_VIDEO_TRANSFER_ARIB_STD_B67: +@@ -516,8 +592,23 @@ gst_video_color_transfer_encode (GstVideoTransferFunction func, gdouble val) + * @func: a #GstVideoTransferFunction + * @val: a value + * ++ * Deprecated: 1.20: Use gst_video_transfer_function_decode() instead. ++ * ++ * Since: 1.6 ++ */ ++gdouble ++gst_video_color_transfer_decode (GstVideoTransferFunction func, gdouble val) ++{ ++ return gst_video_transfer_function_decode (func, val); ++} ++ ++/** ++ * gst_video_transfer_function_decode: ++ * @func: a #GstVideoTransferFunction ++ * @val: a value ++ * + * Convert @val to its gamma decoded value. This is the inverse operation of +- * @gst_video_color_transfer_encode(). ++ * gst_video_color_transfer_encode(). + * + * For a non-linear value L' in the range [0..1], conversion to the linear + * L is in general performed with a power function like: +@@ -531,10 +622,10 @@ gst_video_color_transfer_encode (GstVideoTransferFunction func, gdouble val) + * + * Returns: the gamma decoded value of @val + * +- * Since: 1.6 ++ * Since: 1.20 + */ + gdouble +-gst_video_color_transfer_decode (GstVideoTransferFunction func, gdouble val) ++gst_video_transfer_function_decode (GstVideoTransferFunction func, gdouble val) + { + gdouble res; + +@@ -599,14 +690,15 @@ gst_video_color_transfer_decode (GstVideoTransferFunction func, gdouble val) + break; + case GST_VIDEO_TRANSFER_SMPTE2084: + { +- gdouble c1 = 3424.0 / 4096.0; /* c3 - c2 + 1 */ +- gdouble c2 = 32 * 2413 / 4096.0; +- gdouble c3 = 32 * 2392 / 4096.0; +- gdouble mi = 1 / (128 * 2523 / 4096.0); +- gdouble ni = 1 / (0.25 * 2610 / 4096.0); +- gdouble nm = pow (val, mi); +- +- res = pow ((nm - c1) / (c2 - c3 * nm), ni); ++ gdouble c1 = 0.8359375; ++ gdouble c2 = 18.8515625; ++ gdouble c3 = 18.6875; ++ gdouble m1 = 0.1593017578125; ++ gdouble m2 = 78.84375; ++ gdouble tmp = pow (val, 1 / m2); ++ gdouble tmp2 = MAX (tmp - c1, 0.0f); ++ ++ res = pow (tmp2 / (c2 - c3 * tmp), 1 / m1); + break; + } + case GST_VIDEO_TRANSFER_ARIB_STD_B67: +diff --git a/gst-libs/gst/video/video-color.h b/gst-libs/gst/video/video-color.h +index ba99e079a..ccdf64cf9 100644 +--- a/gst-libs/gst/video/video-color.h ++++ b/gst-libs/gst/video/video-color.h +@@ -138,11 +138,15 @@ typedef enum { + GST_VIDEO_TRANSFER_BT601 + } GstVideoTransferFunction; + ++GST_VIDEO_DEPRECATED_FOR(gst_video_transfer_function_encode) ++gdouble gst_video_color_transfer_encode (GstVideoTransferFunction func, gdouble val); + GST_VIDEO_API +-gdouble gst_video_color_transfer_encode (GstVideoTransferFunction func, gdouble val); ++gdouble gst_video_transfer_function_encode (GstVideoTransferFunction func, gdouble val); + ++GST_VIDEO_DEPRECATED_FOR(gst_video_transfer_function_decode) ++gdouble gst_video_color_transfer_decode (GstVideoTransferFunction func, gdouble val); + GST_VIDEO_API +-gdouble gst_video_color_transfer_decode (GstVideoTransferFunction func, gdouble val); ++gdouble gst_video_transfer_function_decode (GstVideoTransferFunction func, gdouble val); + + /** + * GstVideoColorPrimaries: +@@ -218,6 +222,10 @@ GST_VIDEO_API + const GstVideoColorPrimariesInfo * + gst_video_color_primaries_get_info (GstVideoColorPrimaries primaries); + ++GST_VIDEO_API ++gboolean gst_video_color_primaries_is_equivalent (GstVideoColorPrimaries primaries, ++ GstVideoColorPrimaries other); ++ + /** + * GstVideoColorimetry: + * @range: the color range. This is the valid range for the samples. +@@ -258,6 +266,12 @@ gchar * gst_video_colorimetry_to_string (const GstVideoColorimetry *cinfo + GST_VIDEO_API + gboolean gst_video_colorimetry_is_equal (const GstVideoColorimetry *cinfo, const GstVideoColorimetry *other); + ++GST_VIDEO_API ++gboolean gst_video_colorimetry_is_equivalent (const GstVideoColorimetry *cinfo, ++ guint bitdepth, ++ const GstVideoColorimetry *other, ++ guint other_bitdepth); ++ + /* compute offset and scale */ + + GST_VIDEO_API +diff --git a/gst-libs/gst/video/video-hdr.c b/gst-libs/gst/video/video-hdr.c +index 41c3131d1..280d8e83b 100644 +--- a/gst-libs/gst/video/video-hdr.c ++++ b/gst-libs/gst/video/video-hdr.c +@@ -41,24 +41,6 @@ + (m)->max_display_mastering_luminance, \ + (m)->min_display_mastering_luminance + +-/* g_ascii_string_to_unsigned is available since 2.54. Get rid of this wrapper +- * when we bump the version in 1.18 */ +-#if !GLIB_CHECK_VERSION(2,54,0) +-#define g_ascii_string_to_unsigned video_hdr_ascii_string_to_unsigned +-static gboolean +-video_hdr_ascii_string_to_unsigned (const gchar * str, guint base, guint64 min, +- guint64 max, guint64 * out_num, GError ** error) +-{ +- gchar *endptr = NULL; +- *out_num = g_ascii_strtoull (str, &endptr, base); +- if (errno) +- return FALSE; +- if (endptr == str) +- return FALSE; +- return TRUE; +-} +-#endif +- + /** + * gst_video_mastering_display_info_init: + * @minfo: a #GstVideoMasteringDisplayInfo +@@ -352,6 +334,29 @@ gst_video_content_light_level_to_string (const GstVideoContentLightLevel * + linfo->max_content_light_level, linfo->max_frame_average_light_level); + } + ++/** ++ * gst_video_content_light_level_is_equal: ++ * @linfo: a #GstVideoContentLightLevel ++ * @other: a #GstVideoContentLightLevel ++ * ++ * Checks equality between @linfo and @other. ++ * ++ * Returns: %TRUE if @linfo and @other are equal. ++ * ++ * Since: 1.20 ++ */ ++gboolean ++gst_video_content_light_level_is_equal (const GstVideoContentLightLevel * linfo, ++ const GstVideoContentLightLevel * other) ++{ ++ g_return_val_if_fail (linfo != NULL, FALSE); ++ g_return_val_if_fail (other != NULL, FALSE); ++ ++ return (linfo->max_content_light_level == other->max_content_light_level && ++ linfo->max_frame_average_light_level == ++ other->max_frame_average_light_level); ++} ++ + /** + * gst_video_content_light_level_from_caps: + * @linfo: a #GstVideoContentLightLevel +diff --git a/gst-libs/gst/video/video-hdr.h b/gst-libs/gst/video/video-hdr.h +index 35765cb80..b6ce36023 100644 +--- a/gst-libs/gst/video/video-hdr.h ++++ b/gst-libs/gst/video/video-hdr.h +@@ -125,6 +125,10 @@ gboolean gst_video_content_light_level_from_string (GstVideoContentLightLevel + GST_VIDEO_API + gchar * gst_video_content_light_level_to_string (const GstVideoContentLightLevel * linfo); + ++GST_VIDEO_API ++gboolean gst_video_content_light_level_is_equal (const GstVideoContentLightLevel * linfo, ++ const GstVideoContentLightLevel * other); ++ + GST_VIDEO_API + gboolean gst_video_content_light_level_from_caps (GstVideoContentLightLevel * linfo, + const GstCaps * caps); +diff --git a/gst-libs/gst/video/video-sei.c b/gst-libs/gst/video/video-sei.c +new file mode 100644 +index 000000000..0b981c407 +--- /dev/null ++++ b/gst-libs/gst/video/video-sei.c +@@ -0,0 +1,236 @@ ++/* GStreamer ++ * Copyright (C) 2021 Fluendo S.A. ++ * Authors: Andoni Morales Alastruey ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifdef HAVE_CONFIG_H ++# include "config.h" ++#endif ++ ++#include ++#include ++#include "video-sei.h" ++ ++/** ++ * SECTION:gstvideosei ++ * @title: GstVideo SEI Unregistered User Data ++ * @short_description: Utilities for SEI User Data Unregistered ++ * ++ * A collection of objects and methods to assist with SEI User Data Unregistered ++ * metadata in H.264 and H.265 streams. ++ * ++ * Since: 1.22 ++ */ ++ ++#ifndef GST_DISABLE_GST_DEBUG ++#define GST_CAT_DEFAULT ensure_debug_category() ++static GstDebugCategory * ++ensure_debug_category (void) ++{ ++ static gsize cat_gonce = 0; ++ ++ if (g_once_init_enter (&cat_gonce)) { ++ gsize cat_done; ++ ++ cat_done = (gsize) _gst_debug_category_new ("video-sei", 0, ++ "H.264 / H.265 SEI messages utilities"); ++ ++ g_once_init_leave (&cat_gonce, cat_done); ++ } ++ ++ return (GstDebugCategory *) cat_gonce; ++} ++#else ++#define ensure_debug_category() /* NOOP */ ++#endif /* GST_DISABLE_GST_DEBUG */ ++ ++/* SEI User Data Unregistered implementation */ ++ ++/** ++ * gst_video_sei_user_data_unregistered_meta_api_get_type: ++ * ++ * Returns: #GType for the #GstVideoSEIUserDataUnregisteredMeta structure. ++ * ++ * Since: 1.22 ++ */ ++GType ++gst_video_sei_user_data_unregistered_meta_api_get_type (void) ++{ ++ static GType type = 0; ++ ++ if (g_once_init_enter (&type)) { ++ static const gchar *tags[] = { ++ GST_META_TAG_VIDEO_STR, ++ NULL ++ }; ++ GType _type = ++ gst_meta_api_type_register ("GstVideoSEIUserDataUnregisteredMetaAPI", ++ tags); ++ g_once_init_leave (&type, _type); ++ } ++ return type; ++} ++ ++static gboolean ++gst_video_sei_user_data_unregistered_meta_init (GstMeta * meta, gpointer params, ++ GstBuffer * buffer) ++{ ++ GstVideoSEIUserDataUnregisteredMeta *emeta = ++ (GstVideoSEIUserDataUnregisteredMeta *) meta; ++ ++ emeta->data = NULL; ++ emeta->size = 0; ++ ++ return TRUE; ++} ++ ++static gboolean ++gst_video_sei_user_data_unregistered_meta_transform (GstBuffer * dest, ++ GstMeta * meta, GstBuffer * buffer, GQuark type, gpointer data) ++{ ++ GstVideoSEIUserDataUnregisteredMeta *smeta = ++ (GstVideoSEIUserDataUnregisteredMeta *) meta; ++ ++ if (GST_META_TRANSFORM_IS_COPY (type)) { ++ GST_DEBUG ("copy SEI User Data Unregistered metadata"); ++ gst_buffer_add_video_sei_user_data_unregistered_meta (dest, ++ smeta->uuid, smeta->data, smeta->size); ++ return TRUE; ++ } else { ++ /* return FALSE, if transform type is not supported */ ++ return FALSE; ++ } ++} ++ ++static void ++gst_video_sei_user_data_unregistered_meta_free (GstMeta * meta, GstBuffer * buf) ++{ ++ GstVideoSEIUserDataUnregisteredMeta *smeta = ++ (GstVideoSEIUserDataUnregisteredMeta *) meta; ++ ++ g_free (smeta->data); ++ smeta->data = NULL; ++} ++ ++/** ++ * gst_video_sei_user_data_unregistered_meta_get_info: ++ * ++ * Returns: #GstMetaInfo pointer that describes #GstVideoSEIUserDataUnregisteredMeta. ++ * ++ * Since: 1.22 ++ */ ++const GstMetaInfo * ++gst_video_sei_user_data_unregistered_meta_get_info (void) ++{ ++ static const GstMetaInfo *meta_info = NULL; ++ ++ if (g_once_init_enter ((GstMetaInfo **) & meta_info)) { ++ const GstMetaInfo *mi = ++ gst_meta_register (GST_VIDEO_SEI_USER_DATA_UNREGISTERED_META_API_TYPE, ++ "GstVideoSEIUserDataUnregisteredMeta", ++ sizeof (GstVideoSEIUserDataUnregisteredMeta), ++ gst_video_sei_user_data_unregistered_meta_init, ++ gst_video_sei_user_data_unregistered_meta_free, ++ gst_video_sei_user_data_unregistered_meta_transform); ++ g_once_init_leave ((GstMetaInfo **) & meta_info, (GstMetaInfo *) mi); ++ } ++ return meta_info; ++} ++ ++/** ++ * gst_buffer_add_video_sei_user_data_unregistered_meta: ++ * @buffer: a #GstBuffer ++ * @uuid: User Data Unregistered UUID ++ * @data: (transfer none): SEI User Data Unregistered buffer ++ * @size: size of the data buffer ++ * ++ * Attaches #GstVideoSEIUserDataUnregisteredMeta metadata to @buffer with the given ++ * parameters. ++ * ++ * Returns: (transfer none): the #GstVideoSEIUserDataUnregisteredMeta on @buffer. ++ * ++ * Since: 1.22 ++ */ ++GstVideoSEIUserDataUnregisteredMeta * ++gst_buffer_add_video_sei_user_data_unregistered_meta (GstBuffer * buffer, ++ guint8 uuid[16], guint8 * data, gsize size) ++{ ++ GstVideoSEIUserDataUnregisteredMeta *meta; ++ g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL); ++ g_return_val_if_fail (data != NULL, NULL); ++ ++ meta = (GstVideoSEIUserDataUnregisteredMeta *) gst_buffer_add_meta (buffer, ++ GST_VIDEO_SEI_USER_DATA_UNREGISTERED_META_INFO, NULL); ++ g_assert (meta != NULL); ++ memcpy (meta->uuid, uuid, 16); ++ meta->data = g_malloc (size); ++ memcpy (meta->data, data, size); ++ meta->size = size; ++ ++ return meta; ++} ++ ++/** ++ * gst_video_sei_user_data_unregistered_parse_precision_time_stamp: ++ * @user_data: (transfer none): a #GstVideoSEIUserDataUnregisteredMeta ++ * @status: (out): User Data Unregistered UUID ++ * @precision_time_stamp: (out): The parsed Precision Time Stamp SEI ++ * ++ * Parses and returns the Precision Time Stamp (ST 0604) from the SEI User Data Unregistered buffer ++ * ++ * Returns: True if data is a Precision Time Stamp and it was parsed correctly ++ * ++ * Since: 1.22 ++ */ ++gboolean ++ gst_video_sei_user_data_unregistered_parse_precision_time_stamp ++ (GstVideoSEIUserDataUnregisteredMeta * user_data, guint8 * status, ++ guint64 * precision_time_stamp) { ++ guint8 *data = user_data->data; ++ ++ if (memcmp (user_data->uuid, &H264_MISP_MICROSECTIME, 16) != 0 && ++ memcmp (user_data->uuid, &H265_MISP_MICROSECONDS, 16) != 0 && ++ memcmp (user_data->uuid, &H265_MISP_NANOSECONDS, 16) != 0) { ++ GST_WARNING ++ ("User Data Unregistered UUID is not a known MISP Timestamp UUID"); ++ return FALSE; ++ } ++ ++ if (user_data->size < 12) { ++ GST_WARNING ("MISP Precision Time Stamp data size is too short, ignoring"); ++ return FALSE; ++ } ++ ++ /* Status */ ++ *status = data[0]; ++ ++ *precision_time_stamp = ++ /* Two MS bytes of Time Stamp (microseconds) */ ++ _GST_GET (data, 1, 64, 56) | _GST_GET (data, 2, 64, 48) | ++ /* Start Code Emulation Prevention Byte (0xFF) */ ++ /* Two next MS bytes of Time Stamp (microseconds) */ ++ _GST_GET (data, 4, 64, 40) | _GST_GET (data, 5, 64, 32) | ++ /* Start Code Emulation Prevention Byte (0xFF) */ ++ /* Two LS bytes of Time Stamp (microseconds) */ ++ _GST_GET (data, 7, 64, 24) | _GST_GET (data, 8, 64, 16) | ++ /* Start Code Emulation Prevention Byte (0xFF) */ ++ /* Two next LS bytes of Time Stamp (microseconds) */ ++ _GST_GET (data, 10, 64, 8) | _GST_GET (data, 11, 64, 0); ++ ++ return TRUE; ++} +diff --git a/gst-libs/gst/video/video-sei.h b/gst-libs/gst/video/video-sei.h +new file mode 100644 +index 000000000..cb5d7bea4 +--- /dev/null ++++ b/gst-libs/gst/video/video-sei.h +@@ -0,0 +1,110 @@ ++/* GStreamer ++ * Copyright (C) <2021> Fluendo S.A. ++ * Authors: Andoni Morales Alastruey ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_VIDEO_SEI_USER_DATA_UNREGISTERED_H__ ++#define __GST_VIDEO_SEI_USER_DATA_UNREGISTERED_H__ ++ ++#include ++#include ++ ++G_BEGIN_DECLS ++ ++static const guint8 H264_MISP_MICROSECTIME[] = { ++ 0x4D, 0x49, 0x53, 0x50, 0x6D, 0x69, 0x63, 0x72, ++ 0x6F, 0x73, 0x65, 0x63, 0x74, 0x69, 0x6D, 0x65 ++}; ++ ++static const guint8 H265_MISP_MICROSECONDS[] = { ++ 0xA8, 0x68, 0x7D, 0xD4, 0xD7, 0x59, 0x37, 0x58, ++ 0xA5, 0xCE, 0xF0, 0x33, 0x8B, 0x65, 0x45, 0xF1 ++}; ++ ++static const guint8 H265_MISP_NANOSECONDS[] = { ++ 0xCF, 0x84, 0x82, 0x78, 0xEE, 0x23, 0x30, 0x6C, ++ 0x92, 0x65, 0xE8, 0xFE, 0xF2, 0x2F, 0xB8, 0xB8 ++}; ++ ++/** ++ * GstVideoSEIUserDataUnregisteredMeta: ++ * @meta: parent #GstMeta ++ * @uuid: User Data Unregistered UUID ++ * @data: Unparsed data buffer ++ * @size: Size of the data buffer ++ * ++ * H.264 H.265 metadata from SEI User Data Unregistered messages ++ * ++ * Since: 1.22 ++ */ ++typedef struct { ++ GstMeta meta; ++ ++ guint8 uuid[16]; ++ guint8 *data; ++ gsize size; ++} GstVideoSEIUserDataUnregisteredMeta; ++ ++GST_VIDEO_API ++GType gst_video_sei_user_data_unregistered_meta_api_get_type (void); ++/** ++ * GST_VIDEO_SEI_USER_DATA_UNREGISTERED_META_API_TYPE: ++ * ++ * Since: 1.22 ++ */ ++#define GST_VIDEO_SEI_USER_DATA_UNREGISTERED_META_API_TYPE (\ ++ gst_video_sei_user_data_unregistered_meta_api_get_type()) ++ ++GST_VIDEO_API ++const GstMetaInfo *gst_video_sei_user_data_unregistered_meta_get_info (void); ++/** ++ * GST_VIDEO_SEI_USER_DATA_UNREGISTERED_META_INFO: ++ * ++ * Since: 1.22 ++ */ ++#define GST_VIDEO_SEI_USER_DATA_UNREGISTERED_META_INFO (\ ++ gst_video_sei_user_data_unregistered_meta_get_info()) ++ ++/** ++ * gst_buffer_get_video_sei_user_data_unregistered_meta: ++ * @b: A #GstBuffer ++ * ++ * Gets the GstVideoSEIUserDataUnregisteredMeta that might be present on @b. ++ * ++ * Returns: (nullable): The first #GstVideoSEIUserDataUnregisteredMeta present on @b, or %NULL if ++ * no #GstVideoSEIUserDataUnregisteredMeta are present ++ * ++ * Since: 1.22 ++ */ ++#define gst_buffer_get_video_sei_user_data_unregistered_meta(b) \ ++ ((GstVideoSEIUserDataUnregisteredMeta*)gst_buffer_get_meta((b),GST_VIDEO_SEI_USER_DATA_UNREGISTERED_META_API_TYPE)) ++ ++GST_VIDEO_API ++GstVideoSEIUserDataUnregisteredMeta *gst_buffer_add_video_sei_user_data_unregistered_meta (GstBuffer * buffer, ++ guint8 uuid[16], ++ guint8 * data, ++ gsize size); ++ ++GST_VIDEO_API ++gboolean gst_video_sei_user_data_unregistered_parse_precision_time_stamp (GstVideoSEIUserDataUnregisteredMeta * user_data, ++ guint8 * status, ++ guint64 * precision_time_stamp); ++ ++G_END_DECLS ++ ++#endif /* __GST_VIDEO_SEI_USER_DATA_UNREGISTERED_H__ */ +diff --git a/gst-libs/gst/video/video.c b/gst-libs/gst/video/video.c +index 074304457..6c3e913bc 100644 +--- a/gst-libs/gst/video/video.c ++++ b/gst-libs/gst/video/video.c +@@ -183,6 +183,44 @@ gst_video_guess_framerate (GstClockTime duration, gint * dest_n, gint * dest_d) + return (best_error != G_MAXUINT64); + } + ++/** ++ * gst_video_is_common_aspect_ratio: ++ * @width: Width of the video frame ++ * @height: Height of the video frame ++ * @par_n: Pixel aspect ratio numerator ++ * @par_d: Pixel aspect ratio denominator ++ * ++ * Given a frame's dimensions and pixel aspect ratio, this function will ++ * calculate the frame's aspect ratio and compare it against a set of ++ * common well-known "standard" aspect ratios. ++ * ++ * Returns: %TRUE if a known "standard" aspect ratio was ++ * recognised, and %FALSE otherwise. ++ * ++ * Since: 1.22 ++ */ ++gboolean ++gst_video_is_common_aspect_ratio (gint width, gint height, gint par_n, ++ gint par_d) ++{ ++ gint dar_n, dar_d; ++ ++ gst_util_fraction_multiply (width, height, par_n, par_d, &dar_n, &dar_d); ++ ++ if (dar_n == 16 && dar_d == 9) ++ return TRUE; ++ if (dar_n == 4 && dar_d == 3) ++ return TRUE; ++ if (dar_n == 14 && dar_d == 9) ++ return TRUE; ++ if (dar_n == 8 && dar_d == 5) ++ return TRUE; ++ if (dar_n == 21 && dar_d == 11) ++ return TRUE; ++ ++ return FALSE; ++} ++ + + /** + * gst_video_alignment_reset: +@@ -204,3 +242,52 @@ gst_video_alignment_reset (GstVideoAlignment * align) + for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) + align->stride_align[i] = 0; + } ++ ++/** ++ * gst_video_orientation_from_tag: ++ * @taglist: A #GstTagList ++ * @method: (out): The location where to return the orientation. ++ * ++ * Parses the "image-orientation" tag and transforms it into the ++ * #GstVideoOrientationMethod enum. ++ * ++ * Returns: TRUE if there was a valid "image-orientation" tag in the taglist. ++ * ++ * Since: 1.20 ++ */ ++gboolean ++gst_video_orientation_from_tag (GstTagList * taglist, ++ GstVideoOrientationMethod * method) ++{ ++ gchar *orientation; ++ gboolean ret = TRUE; ++ ++ g_return_val_if_fail (GST_IS_TAG_LIST (taglist), FALSE); ++ g_return_val_if_fail (method != NULL, FALSE); ++ ++ if (!gst_tag_list_get_string (taglist, "image-orientation", &orientation)) ++ return FALSE; ++ ++ if (!g_strcmp0 ("rotate-0", orientation)) ++ *method = GST_VIDEO_ORIENTATION_IDENTITY; ++ else if (!g_strcmp0 ("rotate-90", orientation)) ++ *method = GST_VIDEO_ORIENTATION_90R; ++ else if (!g_strcmp0 ("rotate-180", orientation)) ++ *method = GST_VIDEO_ORIENTATION_180; ++ else if (!g_strcmp0 ("rotate-270", orientation)) ++ *method = GST_VIDEO_ORIENTATION_90L; ++ else if (!g_strcmp0 ("flip-rotate-0", orientation)) ++ *method = GST_VIDEO_ORIENTATION_HORIZ; ++ else if (!g_strcmp0 ("flip-rotate-90", orientation)) ++ *method = GST_VIDEO_ORIENTATION_UR_LL; ++ else if (!g_strcmp0 ("flip-rotate-180", orientation)) ++ *method = GST_VIDEO_ORIENTATION_VERT; ++ else if (!g_strcmp0 ("flip-rotate-270", orientation)) ++ *method = GST_VIDEO_ORIENTATION_UL_LR; ++ else ++ ret = FALSE; ++ ++ g_free (orientation); ++ ++ return ret; ++} +diff --git a/gst-libs/gst/video/video.h b/gst-libs/gst/video/video.h +index c6ca091e9..a43e45a4a 100644 +--- a/gst-libs/gst/video/video.h ++++ b/gst-libs/gst/video/video.h +@@ -89,6 +89,12 @@ typedef enum { + GST_VIDEO_ORIENTATION_CUSTOM, + } GstVideoOrientationMethod; + ++/** ++ * GST_TYPE_VIDEO_ORIENTATION_METHOD: ++ * ++ * Since: 1.20 ++ */ ++ + /* metadata macros */ + /** + * GST_META_TAG_VIDEO_STR: +@@ -143,6 +149,9 @@ GST_VIDEO_API + gboolean gst_video_guess_framerate (GstClockTime duration, + gint * dest_n, gint * dest_d); + ++GST_VIDEO_API ++gboolean gst_video_is_common_aspect_ratio (gint width, gint height, gint par_n, gint par_d); ++ + /* convert/encode video sample from one format to another */ + + typedef void (*GstVideoConvertSampleCallback) (GstSample * sample, GError *error, gpointer user_data); +@@ -161,28 +170,35 @@ GstSample * gst_video_convert_sample (GstSample * sample, + GstClockTime timeout, + GError ** error); + ++ ++GST_VIDEO_API ++gboolean gst_video_orientation_from_tag (GstTagList * taglist, ++ GstVideoOrientationMethod * method); ++ + G_END_DECLS + + #include + #include ++#include + #include ++#include + #include + #include + #include + #include + #include + #include ++#include + #include + #include ++#include + #include +-#include + #include ++#include ++#include + #include + #include + #include +-#include +-#include +-#include +-#include ++#include + + #endif /* __GST_VIDEO_H__ */ +-- +2.47.1 + diff --git a/package/gstreamer1/gst1-plugins-base/gst1-plugins-base.mk b/package/gstreamer1/gst1-plugins-base/gst1-plugins-base.mk index dd90301be5f7..feb760d9032f 100644 --- a/package/gstreamer1/gst1-plugins-base/gst1-plugins-base.mk +++ b/package/gstreamer1/gst1-plugins-base/gst1-plugins-base.mk @@ -313,10 +313,17 @@ endif ifeq ($(BR2_PACKAGE_WPEWEBKIT2_38),y) define GST1_PLUGINS_BASE_APPLY_WPEWEBKIT_EXTRA_PATCHES_POST_HOOK - cd $(@D) && { for P in ../../../package/gstreamer1/gst1-plugins-base/$(GST1_PLUGINS_BASE_VERSION)-wpe-2.38/*.patch; do patch -p1 < "$$P" ; done; } + cd $(@D) && { for P in $(TOPDIR)/$(GST1_PLUGINS_BASE_PKGDIR)/$(GST1_PLUGINS_BASE_VERSION)-wpe-2.38/*.patch; do patch -p1 < "$$P" ; done; } endef endif GST1_PLUGINS_BASE_POST_PATCH_HOOKS += GST1_PLUGINS_BASE_APPLY_WPEWEBKIT_EXTRA_PATCHES_POST_HOOK +ifeq ($(BR2_PACKAGE_WPEWEBKIT_USE_GSTREAMER_WEBRTC),y) +define GST1_PLUGINS_BASE_APPLY_GSTWEBRTC_PATCHES_POST_HOOK + cd $(@D) && { for P in $(TOPDIR)/$(GST1_PLUGINS_BASE_PKGDIR)/$(GST1_PLUGINS_BASE_VERSION)-gstwebrtc/*.patch; do patch -p1 < "$$P" ; done; } +endef +GST1_PLUGINS_BASE_POST_PATCH_HOOKS += GST1_PLUGINS_BASE_APPLY_GSTWEBRTC_PATCHES_POST_HOOK +endif + $(eval $(meson-package)) diff --git a/package/gstreamer1/gst1-plugins-good/1.18.6-gstwebrtc/0001-Backports-from-GStreamer-1.22.patch b/package/gstreamer1/gst1-plugins-good/1.18.6-gstwebrtc/0001-Backports-from-GStreamer-1.22.patch new file mode 100644 index 000000000000..f5644a6c7f6b --- /dev/null +++ b/package/gstreamer1/gst1-plugins-good/1.18.6-gstwebrtc/0001-Backports-from-GStreamer-1.22.patch @@ -0,0 +1,25445 @@ +From 6639757bc43a702b544feb7297f2beeb2f61653d Mon Sep 17 00:00:00 2001 +From: Philippe Normand +Date: Thu, 19 Jan 2023 14:52:07 +0000 +Subject: [PATCH 1/2] Backports from GStreamer 1.22 + +--- + ext/vpx/gstvp8dec.c | 12 + + ext/vpx/gstvp8enc.c | 81 ++ + ext/vpx/gstvp9dec.c | 117 +- + ext/vpx/gstvp9enc.c | 432 ++++++- + ext/vpx/gstvp9enc.h | 10 +- + ext/vpx/gstvpxcompat.h | 47 + + ext/vpx/gstvpxdec.c | 43 +- + ext/vpx/gstvpxdec.h | 7 + + ext/vpx/gstvpxelement.c | 39 + + ext/vpx/gstvpxelements.h | 36 + + ext/vpx/gstvpxenc.c | 384 +++++- + ext/vpx/gstvpxenc.h | 32 +- + ext/vpx/gstvpxenums.h | 49 + + ext/vpx/meson.build | 16 +- + ext/vpx/plugin.c | 21 +- + gst/rtp/gstasteriskh263.c | 10 +- + gst/rtp/gstasteriskh263.h | 2 - + gst/rtp/gstrtp.c | 488 ++------ + gst/rtp/gstrtpL16depay.c | 10 +- + gst/rtp/gstrtpL16depay.h | 2 - + gst/rtp/gstrtpL16pay.c | 10 +- + gst/rtp/gstrtpL16pay.h | 2 - + gst/rtp/gstrtpL24depay.c | 10 +- + gst/rtp/gstrtpL24depay.h | 2 - + gst/rtp/gstrtpL24pay.c | 10 +- + gst/rtp/gstrtpL24pay.h | 2 - + gst/rtp/gstrtpL8depay.c | 12 +- + gst/rtp/gstrtpL8depay.h | 2 - + gst/rtp/gstrtpL8pay.c | 12 +- + gst/rtp/gstrtpL8pay.h | 2 - + gst/rtp/gstrtpac3depay.c | 14 +- + gst/rtp/gstrtpac3depay.h | 2 - + gst/rtp/gstrtpac3pay.c | 14 +- + gst/rtp/gstrtpac3pay.h | 2 - + gst/rtp/gstrtpamrdepay.c | 10 +- + gst/rtp/gstrtpamrdepay.h | 2 - + gst/rtp/gstrtpamrpay.c | 11 +- + gst/rtp/gstrtpamrpay.h | 2 - + gst/rtp/gstrtpbvdepay.c | 10 +- + gst/rtp/gstrtpbvdepay.h | 2 - + gst/rtp/gstrtpbvpay.c | 10 +- + gst/rtp/gstrtpbvpay.h | 2 - + gst/rtp/gstrtpceltdepay.c | 11 +- + gst/rtp/gstrtpceltdepay.h | 2 - + gst/rtp/gstrtpceltpay.c | 10 +- + gst/rtp/gstrtpceltpay.h | 2 - + gst/rtp/gstrtpdvdepay.c | 12 +- + gst/rtp/gstrtpdvdepay.h | 2 - + gst/rtp/gstrtpdvpay.c | 15 +- + gst/rtp/gstrtpdvpay.h | 2 - + gst/rtp/gstrtpelement.c | 46 + + gst/rtp/gstrtpelements.h | 134 ++ + gst/rtp/gstrtpg722depay.c | 10 +- + gst/rtp/gstrtpg722depay.h | 2 - + gst/rtp/gstrtpg722pay.c | 10 +- + gst/rtp/gstrtpg722pay.h | 2 - + gst/rtp/gstrtpg723depay.c | 10 +- + gst/rtp/gstrtpg723depay.h | 2 - + gst/rtp/gstrtpg723pay.c | 12 +- + gst/rtp/gstrtpg723pay.h | 2 - + gst/rtp/gstrtpg726depay.c | 12 +- + gst/rtp/gstrtpg726depay.h | 2 - + gst/rtp/gstrtpg726pay.c | 10 +- + gst/rtp/gstrtpg726pay.h | 2 - + gst/rtp/gstrtpg729depay.c | 10 +- + gst/rtp/gstrtpg729depay.h | 2 - + gst/rtp/gstrtpg729pay.c | 11 +- + gst/rtp/gstrtpg729pay.h | 2 - + gst/rtp/gstrtpgsmdepay.c | 10 +- + gst/rtp/gstrtpgsmdepay.h | 2 - + gst/rtp/gstrtpgsmpay.c | 10 +- + gst/rtp/gstrtpgsmpay.h | 1 - + gst/rtp/gstrtpgstdepay.c | 11 +- + gst/rtp/gstrtpgstdepay.h | 2 - + gst/rtp/gstrtpgstpay.c | 67 +- + gst/rtp/gstrtpgstpay.h | 4 +- + gst/rtp/gstrtph261depay.c | 13 +- + gst/rtp/gstrtph261depay.h | 2 - + gst/rtp/gstrtph261pay.c | 19 +- + gst/rtp/gstrtph261pay.h | 2 - + gst/rtp/gstrtph263depay.c | 10 +- + gst/rtp/gstrtph263depay.h | 2 - + gst/rtp/gstrtph263pay.c | 25 +- + gst/rtp/gstrtph263pay.h | 2 - + gst/rtp/gstrtph263pdepay.c | 104 +- + gst/rtp/gstrtph263pdepay.h | 2 - + gst/rtp/gstrtph263ppay.c | 16 +- + gst/rtp/gstrtph263ppay.h | 2 - + gst/rtp/gstrtph264depay.c | 218 +++- + gst/rtp/gstrtph264depay.h | 6 +- + gst/rtp/gstrtph264pay.c | 73 +- + gst/rtp/gstrtph264pay.h | 4 +- + gst/rtp/gstrtph265depay.c | 19 +- + gst/rtp/gstrtph265depay.h | 2 - + gst/rtp/gstrtph265pay.c | 108 +- + gst/rtp/gstrtph265pay.h | 5 +- + gst/rtp/gstrtphdrext-colorspace.c | 483 ++++++++ + gst/rtp/gstrtphdrext-colorspace.h | 41 + + gst/rtp/gstrtpilbcdepay.c | 10 +- + gst/rtp/gstrtpilbcdepay.h | 2 - + gst/rtp/gstrtpilbcpay.c | 10 +- + gst/rtp/gstrtpilbcpay.h | 2 - + gst/rtp/gstrtpisacdepay.c | 147 +++ + gst/rtp/gstrtpisacdepay.h | 31 + + gst/rtp/gstrtpisacpay.c | 183 +++ + gst/rtp/gstrtpisacpay.h | 31 + + gst/rtp/gstrtpj2kdepay.c | 10 +- + gst/rtp/gstrtpj2kdepay.h | 1 - + gst/rtp/gstrtpj2kpay.c | 15 +- + gst/rtp/gstrtpj2kpay.h | 2 - + gst/rtp/gstrtpjpegdepay.c | 13 +- + gst/rtp/gstrtpjpegdepay.h | 1 - + gst/rtp/gstrtpjpegpay.c | 15 +- + gst/rtp/gstrtpjpegpay.h | 2 - + gst/rtp/gstrtpklvdepay.c | 14 +- + gst/rtp/gstrtpklvdepay.h | 2 - + gst/rtp/gstrtpklvpay.c | 13 +- + gst/rtp/gstrtpklvpay.h | 2 - + gst/rtp/gstrtpldacpay.c | 228 ++++ + gst/rtp/gstrtpldacpay.h | 56 + + gst/rtp/gstrtpmp1sdepay.c | 10 +- + gst/rtp/gstrtpmp1sdepay.h | 2 - + gst/rtp/gstrtpmp2tdepay.c | 10 +- + gst/rtp/gstrtpmp2tdepay.h | 2 - + gst/rtp/gstrtpmp2tpay.c | 10 +- + gst/rtp/gstrtpmp2tpay.h | 2 - + gst/rtp/gstrtpmp4adepay.c | 10 +- + gst/rtp/gstrtpmp4adepay.h | 2 - + gst/rtp/gstrtpmp4apay.c | 12 +- + gst/rtp/gstrtpmp4apay.h | 2 - + gst/rtp/gstrtpmp4gdepay.c | 10 +- + gst/rtp/gstrtpmp4gdepay.h | 2 - + gst/rtp/gstrtpmp4gpay.c | 12 +- + gst/rtp/gstrtpmp4gpay.h | 2 - + gst/rtp/gstrtpmp4vdepay.c | 10 +- + gst/rtp/gstrtpmp4vdepay.h | 2 - + gst/rtp/gstrtpmp4vpay.c | 24 +- + gst/rtp/gstrtpmp4vpay.h | 2 - + gst/rtp/gstrtpmpadepay.c | 10 +- + gst/rtp/gstrtpmpadepay.h | 2 - + gst/rtp/gstrtpmpapay.c | 14 +- + gst/rtp/gstrtpmpapay.h | 2 - + gst/rtp/gstrtpmparobustdepay.c | 11 +- + gst/rtp/gstrtpmparobustdepay.h | 2 - + gst/rtp/gstrtpmpvdepay.c | 10 +- + gst/rtp/gstrtpmpvdepay.h | 2 - + gst/rtp/gstrtpmpvpay.c | 17 +- + gst/rtp/gstrtpmpvpay.h | 2 - + gst/rtp/gstrtpopusdepay.c | 151 ++- + gst/rtp/gstrtpopusdepay.h | 2 - + gst/rtp/gstrtpopuspay.c | 316 ++++- + gst/rtp/gstrtpopuspay.h | 7 +- + gst/rtp/gstrtppcmadepay.c | 10 +- + gst/rtp/gstrtppcmadepay.h | 2 - + gst/rtp/gstrtppcmapay.c | 10 +- + gst/rtp/gstrtppcmapay.h | 2 - + gst/rtp/gstrtppcmudepay.c | 10 +- + gst/rtp/gstrtppcmudepay.h | 2 - + gst/rtp/gstrtppcmupay.c | 10 +- + gst/rtp/gstrtppcmupay.h | 2 - + gst/rtp/gstrtpqcelpdepay.c | 10 +- + gst/rtp/gstrtpqcelpdepay.h | 2 - + gst/rtp/gstrtpqdmdepay.c | 17 +- + gst/rtp/gstrtpqdmdepay.h | 2 - + gst/rtp/gstrtpreddec.c | 193 ++- + gst/rtp/gstrtpreddec.h | 9 +- + gst/rtp/gstrtpredenc.c | 68 +- + gst/rtp/gstrtpredenc.h | 3 + + gst/rtp/gstrtpsbcdepay.c | 10 +- + gst/rtp/gstrtpsbcdepay.h | 2 - + gst/rtp/gstrtpsbcpay.c | 89 +- + gst/rtp/gstrtpsbcpay.h | 2 - + gst/rtp/gstrtpsirendepay.c | 10 +- + gst/rtp/gstrtpsirendepay.h | 2 - + gst/rtp/gstrtpsirenpay.c | 10 +- + gst/rtp/gstrtpsirenpay.h | 2 - + gst/rtp/gstrtpspeexdepay.c | 10 +- + gst/rtp/gstrtpspeexdepay.h | 2 - + gst/rtp/gstrtpspeexpay.c | 10 +- + gst/rtp/gstrtpspeexpay.h | 2 - + gst/rtp/gstrtpstorage.c | 3 + + gst/rtp/gstrtpstreamdepay.c | 10 +- + gst/rtp/gstrtpstreamdepay.h | 1 - + gst/rtp/gstrtpstreampay.c | 10 +- + gst/rtp/gstrtpstreampay.h | 2 - + gst/rtp/gstrtpsv3vdepay.c | 18 +- + gst/rtp/gstrtpsv3vdepay.h | 2 - + gst/rtp/gstrtptheoradepay.c | 10 +- + gst/rtp/gstrtptheoradepay.h | 2 - + gst/rtp/gstrtptheorapay.c | 14 +- + gst/rtp/gstrtptheorapay.h | 2 - + gst/rtp/gstrtpulpfecdec.c | 71 +- + gst/rtp/gstrtpulpfecdec.h | 2 + + gst/rtp/gstrtpulpfecenc.c | 133 +- + gst/rtp/gstrtpulpfecenc.h | 1 + + gst/rtp/gstrtpvorbisdepay.c | 10 +- + gst/rtp/gstrtpvorbisdepay.h | 2 - + gst/rtp/gstrtpvorbispay.c | 10 +- + gst/rtp/gstrtpvorbispay.h | 2 - + gst/rtp/gstrtpvp8depay.c | 277 ++++- + gst/rtp/gstrtpvp8depay.h | 14 +- + gst/rtp/gstrtpvp8pay.c | 263 +++- + gst/rtp/gstrtpvp8pay.h | 5 +- + gst/rtp/gstrtpvp9depay.c | 298 ++++- + gst/rtp/gstrtpvp9depay.h | 19 +- + gst/rtp/gstrtpvp9pay.c | 16 +- + gst/rtp/gstrtpvp9pay.h | 2 - + gst/rtp/gstrtpvrawdepay.c | 10 +- + gst/rtp/gstrtpvrawdepay.h | 2 - + gst/rtp/gstrtpvrawpay.c | 13 +- + gst/rtp/gstrtpvrawpay.h | 2 - + gst/rtp/meson.build | 6 +- + gst/rtp/rtpulpfeccommon.c | 10 +- + gst/rtpmanager/gstrtpbin.c | 1090 +++++++++++++++-- + gst/rtpmanager/gstrtpbin.h | 15 + + gst/rtpmanager/gstrtpdtmfmux.c | 16 +- + gst/rtpmanager/gstrtpdtmfmux.h | 3 +- + gst/rtpmanager/gstrtpfunnel.c | 129 +- + gst/rtpmanager/gstrtpfunnel.h | 2 + + .../gstrtphdrext-clientaudiolevel.c | 268 ++++ + .../gstrtphdrext-clientaudiolevel.h | 32 + + gst/rtpmanager/gstrtphdrext-mid.c | 318 +++++ + gst/rtpmanager/gstrtphdrext-mid.h | 36 + + gst/rtpmanager/gstrtphdrext-ntp.c | 269 ++++ + gst/rtpmanager/gstrtphdrext-ntp.h | 36 + + .../gstrtphdrext-repairedstreamid.c | 307 +++++ + .../gstrtphdrext-repairedstreamid.h | 36 + + gst/rtpmanager/gstrtphdrext-streamid.c | 297 +++++ + gst/rtpmanager/gstrtphdrext-streamid.h | 36 + + gst/rtpmanager/gstrtphdrext-twcc.c | 230 ++++ + gst/rtpmanager/gstrtphdrext-twcc.h | 83 ++ + gst/rtpmanager/gstrtpjitterbuffer.c | 894 +++++++++++--- + gst/rtpmanager/gstrtpjitterbuffer.h | 2 + + gst/rtpmanager/gstrtpmanager.c | 74 +- + gst/rtpmanager/gstrtpmux.c | 14 +- + gst/rtpmanager/gstrtpmux.h | 4 +- + gst/rtpmanager/gstrtpptdemux.c | 37 +- + gst/rtpmanager/gstrtpptdemux.h | 2 + + gst/rtpmanager/gstrtprtxqueue.c | 16 +- + gst/rtpmanager/gstrtprtxqueue.h | 4 +- + gst/rtpmanager/gstrtprtxreceive.c | 454 +++++-- + gst/rtpmanager/gstrtprtxreceive.h | 18 +- + gst/rtpmanager/gstrtprtxsend.c | 409 ++++++- + gst/rtpmanager/gstrtprtxsend.h | 19 +- + gst/rtpmanager/gstrtpsession.c | 189 ++- + gst/rtpmanager/gstrtpsession.h | 2 + + gst/rtpmanager/gstrtpssrcdemux.c | 165 ++- + gst/rtpmanager/gstrtpssrcdemux.h | 3 +- + gst/rtpmanager/gstrtpst2022-1-fecdec.c | 1013 +++++++++++++++ + gst/rtpmanager/gstrtpst2022-1-fecdec.h | 39 + + gst/rtpmanager/gstrtpst2022-1-fecenc.c | 803 ++++++++++++ + gst/rtpmanager/gstrtpst2022-1-fecenc.h | 39 + + gst/rtpmanager/gstrtputils.c | 44 + + gst/rtpmanager/gstrtputils.h | 34 + + gst/rtpmanager/meson.build | 12 +- + gst/rtpmanager/rtpjitterbuffer.c | 134 +- + gst/rtpmanager/rtpjitterbuffer.h | 2 +- + gst/rtpmanager/rtpsession.c | 656 ++++++---- + gst/rtpmanager/rtpsession.h | 27 +- + gst/rtpmanager/rtpsource.c | 112 +- + gst/rtpmanager/rtpsource.h | 21 +- + gst/rtpmanager/rtpstats.c | 1 + + gst/rtpmanager/rtpstats.h | 16 +- + gst/rtpmanager/rtptimerqueue.c | 10 +- + gst/rtpmanager/rtptimerqueue.h | 2 - + gst/rtpmanager/rtptwcc.c | 367 ++++-- + gst/rtpmanager/rtptwcc.h | 16 +- + 267 files changed, 12899 insertions(+), 2752 deletions(-) + create mode 100644 ext/vpx/gstvpxcompat.h + create mode 100644 ext/vpx/gstvpxelement.c + create mode 100644 ext/vpx/gstvpxelements.h + create mode 100644 ext/vpx/gstvpxenums.h + create mode 100644 gst/rtp/gstrtpelement.c + create mode 100644 gst/rtp/gstrtpelements.h + create mode 100644 gst/rtp/gstrtphdrext-colorspace.c + create mode 100644 gst/rtp/gstrtphdrext-colorspace.h + create mode 100644 gst/rtp/gstrtpisacdepay.c + create mode 100644 gst/rtp/gstrtpisacdepay.h + create mode 100644 gst/rtp/gstrtpisacpay.c + create mode 100644 gst/rtp/gstrtpisacpay.h + create mode 100644 gst/rtp/gstrtpldacpay.c + create mode 100644 gst/rtp/gstrtpldacpay.h + create mode 100644 gst/rtpmanager/gstrtphdrext-clientaudiolevel.c + create mode 100644 gst/rtpmanager/gstrtphdrext-clientaudiolevel.h + create mode 100644 gst/rtpmanager/gstrtphdrext-mid.c + create mode 100644 gst/rtpmanager/gstrtphdrext-mid.h + create mode 100644 gst/rtpmanager/gstrtphdrext-ntp.c + create mode 100644 gst/rtpmanager/gstrtphdrext-ntp.h + create mode 100644 gst/rtpmanager/gstrtphdrext-repairedstreamid.c + create mode 100644 gst/rtpmanager/gstrtphdrext-repairedstreamid.h + create mode 100644 gst/rtpmanager/gstrtphdrext-streamid.c + create mode 100644 gst/rtpmanager/gstrtphdrext-streamid.h + create mode 100644 gst/rtpmanager/gstrtphdrext-twcc.c + create mode 100644 gst/rtpmanager/gstrtphdrext-twcc.h + create mode 100644 gst/rtpmanager/gstrtpst2022-1-fecdec.c + create mode 100644 gst/rtpmanager/gstrtpst2022-1-fecdec.h + create mode 100644 gst/rtpmanager/gstrtpst2022-1-fecenc.c + create mode 100644 gst/rtpmanager/gstrtpst2022-1-fecenc.h + create mode 100644 gst/rtpmanager/gstrtputils.c + create mode 100644 gst/rtpmanager/gstrtputils.h + +diff --git a/ext/vpx/gstvp8dec.c b/ext/vpx/gstvp8dec.c +index 8aea6aced..16a1b5ae2 100644 +--- a/ext/vpx/gstvp8dec.c ++++ b/ext/vpx/gstvp8dec.c +@@ -44,6 +44,7 @@ + + #include + ++#include "gstvpxelements.h" + #include "gstvp8dec.h" + #include "gstvp8utils.h" + +@@ -59,6 +60,7 @@ static void gst_vp8_dec_set_default_format (GstVPXDec * dec, GstVideoFormat fmt, + int width, int height); + static void gst_vp8_dec_handle_resolution_change (GstVPXDec * dec, + vpx_image_t * img, GstVideoFormat fmt); ++static gboolean gst_vp8_dec_get_needs_sync_point (GstVPXDec * dec); + + static GstStaticPadTemplate gst_vp8_dec_sink_template = + GST_STATIC_PAD_TEMPLATE ("sink", +@@ -76,6 +78,8 @@ GST_STATIC_PAD_TEMPLATE ("src", + + #define parent_class gst_vp8_dec_parent_class + G_DEFINE_TYPE (GstVP8Dec, gst_vp8_dec, GST_TYPE_VPX_DEC); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (vp8dec, "vp8dec", GST_RANK_PRIMARY, ++ gst_vp8_dec_get_type (), vpx_element_init (plugin)); + + static void + gst_vp8_dec_class_init (GstVP8DecClass * klass) +@@ -103,6 +107,8 @@ gst_vp8_dec_class_init (GstVP8DecClass * klass) + GST_DEBUG_FUNCPTR (gst_vp8_dec_set_default_format); + vpx_class->handle_resolution_change = + GST_DEBUG_FUNCPTR (gst_vp8_dec_handle_resolution_change); ++ vpx_class->get_needs_sync_point = ++ GST_DEBUG_FUNCPTR (gst_vp8_dec_get_needs_sync_point); + + GST_DEBUG_CATEGORY_INIT (gst_vp8dec_debug, "vp8dec", 0, "VP8 Decoder"); + } +@@ -153,4 +159,10 @@ gst_vp8_dec_handle_resolution_change (GstVPXDec * dec, vpx_image_t * img, + } + } + ++static gboolean ++gst_vp8_dec_get_needs_sync_point (GstVPXDec * dec) ++{ ++ return FALSE; ++} ++ + #endif /* HAVE_VP8_DECODER */ +diff --git a/ext/vpx/gstvp8enc.c b/ext/vpx/gstvp8enc.c +index 8bc56f980..93ef43e83 100644 +--- a/ext/vpx/gstvp8enc.c ++++ b/ext/vpx/gstvp8enc.c +@@ -62,6 +62,7 @@ + #include + #include + ++#include "gstvpxelements.h" + #include "gstvp8utils.h" + #include "gstvp8enc.h" + +@@ -72,6 +73,9 @@ typedef struct + { + vpx_image_t *image; + GList *invisible; ++ guint layer_id; ++ guint8 tl0picidx; ++ gboolean layer_sync; + } GstVP8EncUserData; + + static void +@@ -104,6 +108,15 @@ static GstFlowReturn gst_vp8_enc_handle_invisible_frame_buffer (GstVPXEnc * enc, + void *user_data, GstBuffer * buffer); + static void gst_vp8_enc_set_frame_user_data (GstVPXEnc * enc, + GstVideoCodecFrame * frame, vpx_image_t * image); ++static void gst_vp8_enc_apply_frame_temporal_settings (GstVPXEnc * enc, ++ GstVideoCodecFrame * frame, guint layer_id, guint8 tl0picidx, ++ gboolean layer_sync); ++static void gst_vp8_enc_get_frame_temporal_settings (GstVPXEnc * enc, ++ GstVideoCodecFrame * frame, guint * layer_id, guint8 * tl0picidx, ++ gboolean * layer_sync); ++static void gst_vp8_enc_preflight_buffer (GstVPXEnc * enc, ++ GstVideoCodecFrame * frame, GstBuffer * buffer, ++ gboolean layer_sync, guint layer_id, guint8 tl0picidx); + + static GstFlowReturn gst_vp8_enc_pre_push (GstVideoEncoder * encoder, + GstVideoCodecFrame * frame); +@@ -127,6 +140,8 @@ GST_STATIC_PAD_TEMPLATE ("src", + + #define parent_class gst_vp8_enc_parent_class + G_DEFINE_TYPE (GstVP8Enc, gst_vp8_enc, GST_TYPE_VPX_ENC); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (vp8enc, "vp8enc", GST_RANK_PRIMARY, ++ gst_vp8_enc_get_type (), vpx_element_init (plugin)); + + static void + gst_vp8_enc_class_init (GstVP8EncClass * klass) +@@ -163,6 +178,11 @@ gst_vp8_enc_class_init (GstVP8EncClass * klass) + vpx_encoder_class->handle_invisible_frame_buffer = + gst_vp8_enc_handle_invisible_frame_buffer; + vpx_encoder_class->set_frame_user_data = gst_vp8_enc_set_frame_user_data; ++ vpx_encoder_class->apply_frame_temporal_settings = ++ gst_vp8_enc_apply_frame_temporal_settings; ++ vpx_encoder_class->get_frame_temporal_settings = ++ gst_vp8_enc_get_frame_temporal_settings; ++ vpx_encoder_class->preflight_buffer = gst_vp8_enc_preflight_buffer; + + GST_DEBUG_CATEGORY_INIT (gst_vp8enc_debug, "vp8enc", 0, "VP8 Encoder"); + } +@@ -330,6 +350,67 @@ gst_vp8_enc_set_frame_user_data (GstVPXEnc * enc, GstVideoCodecFrame * frame, + return; + } + ++static void ++gst_vp8_enc_apply_frame_temporal_settings (GstVPXEnc * enc, ++ GstVideoCodecFrame * frame, guint layer_id, guint8 tl0picidx, ++ gboolean layer_sync) ++{ ++ GstVP8EncUserData *user_data; ++ ++ user_data = gst_video_codec_frame_get_user_data (frame); ++ ++ if (!user_data) { ++ GST_ERROR_OBJECT (enc, "Have no frame user data"); ++ return; ++ } ++ ++ vpx_codec_control (&enc->encoder, VP8E_SET_TEMPORAL_LAYER_ID, layer_id); ++ user_data->layer_id = layer_id; ++ user_data->tl0picidx = tl0picidx; ++ user_data->layer_sync = layer_sync; ++ ++ return; ++} ++ ++static void ++gst_vp8_enc_get_frame_temporal_settings (GstVPXEnc * enc, ++ GstVideoCodecFrame * frame, guint * layer_id, guint8 * tl0picidx, ++ gboolean * layer_sync) ++{ ++ GstVP8EncUserData *user_data; ++ ++ user_data = gst_video_codec_frame_get_user_data (frame); ++ ++ if (!user_data) { ++ GST_ERROR_OBJECT (enc, "Have no frame user data"); ++ *layer_id = 0; ++ *tl0picidx = 0; ++ *layer_sync = FALSE; ++ return; ++ } ++ ++ *layer_id = user_data->layer_id; ++ *tl0picidx = user_data->tl0picidx; ++ *layer_sync = user_data->layer_sync; ++ ++ return; ++} ++ ++static void ++gst_vp8_enc_preflight_buffer (GstVPXEnc * enc, ++ GstVideoCodecFrame * frame, GstBuffer * buffer, ++ gboolean layer_sync, guint layer_id, guint8 tl0picidx) ++{ ++ GstCustomMeta *meta = gst_buffer_add_custom_meta (buffer, "GstVP8Meta"); ++ GstStructure *s = gst_custom_meta_get_structure (meta); ++ ++ gst_structure_set (s, ++ "use-temporal-scaling", G_TYPE_BOOLEAN, (enc->cfg.ts_periodicity != 0), ++ "layer-sync", G_TYPE_BOOLEAN, layer_sync, ++ "layer-id", G_TYPE_UINT, layer_id, ++ "tl0picidx", G_TYPE_UINT, tl0picidx, NULL); ++} ++ + static guint64 + _to_granulepos (guint64 frame_end_number, guint inv_count, guint keyframe_dist) + { +diff --git a/ext/vpx/gstvp9dec.c b/ext/vpx/gstvp9dec.c +index 43a0dda47..3819e602a 100644 +--- a/ext/vpx/gstvp9dec.c ++++ b/ext/vpx/gstvp9dec.c +@@ -44,6 +44,7 @@ + + #include + ++#include "gstvpxelements.h" + #include "gstvp8utils.h" + #include "gstvp9dec.h" + +@@ -61,6 +62,7 @@ static gboolean gst_vp9_dec_get_valid_format (GstVPXDec * dec, + vpx_image_t * img, GstVideoFormat * fmt); + static void gst_vp9_dec_handle_resolution_change (GstVPXDec * dec, + vpx_image_t * img, GstVideoFormat fmt); ++static gboolean gst_vp9_dec_get_needs_sync_point (GstVPXDec * dec); + + static GstStaticPadTemplate gst_vp9_dec_sink_template = + GST_STATIC_PAD_TEMPLATE ("sink", +@@ -69,27 +71,41 @@ GST_STATIC_PAD_TEMPLATE ("sink", + GST_STATIC_CAPS ("video/x-vp9") + ); + +-static GstStaticPadTemplate gst_vp9_dec_src_template = +-GST_STATIC_PAD_TEMPLATE ("src", +- GST_PAD_SRC, +- GST_PAD_ALWAYS, +- GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ I420, YV12, Y42B, Y444, GBR }")) +- ); ++#define GST_VP9_DEC_VIDEO_FORMATS_8BIT "I420, YV12, Y42B, Y444, GBR" ++#define GST_VP9_DEC_VIDEO_FORMATS_HIGHBIT \ ++ "I420_10LE, I420_12LE, I422_10LE, I422_12LE, Y444_10LE, Y444_12LE, GBR_10LE, GBR_12LE" + + #define parent_class gst_vp9_dec_parent_class + G_DEFINE_TYPE (GstVP9Dec, gst_vp9_dec, GST_TYPE_VPX_DEC); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (vp9dec, "vp9dec", GST_RANK_PRIMARY, ++ gst_vp9_dec_get_type (), vpx_element_init (plugin)); ++ ++static GstCaps * ++gst_vp9_dec_get_src_caps (void) ++{ ++#define CAPS_8BIT GST_VIDEO_CAPS_MAKE ("{ " GST_VP9_DEC_VIDEO_FORMATS_8BIT " }") ++#define CAPS_HIGHBIT GST_VIDEO_CAPS_MAKE ( "{ " GST_VP9_DEC_VIDEO_FORMATS_8BIT ", " \ ++ GST_VP9_DEC_VIDEO_FORMATS_HIGHBIT "}") ++ ++ return gst_caps_from_string ((vpx_codec_get_caps (&vpx_codec_vp9_dx_algo) ++ & VPX_CODEC_CAP_HIGHBITDEPTH) ? CAPS_HIGHBIT : CAPS_8BIT); ++} + + static void + gst_vp9_dec_class_init (GstVP9DecClass * klass) + { + GstElementClass *element_class; + GstVPXDecClass *vpx_class; ++ GstCaps *caps; + + element_class = GST_ELEMENT_CLASS (klass); + vpx_class = GST_VPX_DEC_CLASS (klass); + +- gst_element_class_add_static_pad_template (element_class, +- &gst_vp9_dec_src_template); ++ caps = gst_vp9_dec_get_src_caps (); ++ gst_element_class_add_pad_template (element_class, ++ gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, caps)); ++ gst_clear_caps (&caps); ++ + gst_element_class_add_static_pad_template (element_class, + &gst_vp9_dec_sink_template); + +@@ -106,6 +122,8 @@ gst_vp9_dec_class_init (GstVP9DecClass * klass) + GST_DEBUG_FUNCPTR (gst_vp9_dec_get_valid_format); + vpx_class->handle_resolution_change = + GST_DEBUG_FUNCPTR (gst_vp9_dec_handle_resolution_change); ++ vpx_class->get_needs_sync_point = ++ GST_DEBUG_FUNCPTR (gst_vp9_dec_get_needs_sync_point); + + GST_DEBUG_CATEGORY_INIT (gst_vp9dec_debug, "vp9dec", 0, "VP9 Decoder"); + } +@@ -130,65 +148,82 @@ static gboolean + gst_vp9_dec_get_valid_format (GstVPXDec * dec, vpx_image_t * img, + GstVideoFormat * fmt) + { +- switch (img->fmt) { +- case VPX_IMG_FMT_I420: ++ switch ((gst_vpx_img_fmt_t) img->fmt) { ++ case GST_VPX_IMG_FMT_I420: + *fmt = GST_VIDEO_FORMAT_I420; + return TRUE; + +- case VPX_IMG_FMT_YV12: ++ case GST_VPX_IMG_FMT_YV12: + *fmt = GST_VIDEO_FORMAT_YV12; + return TRUE; + +- case VPX_IMG_FMT_I422: ++ case GST_VPX_IMG_FMT_I422: + *fmt = GST_VIDEO_FORMAT_Y42B; + return TRUE; + +- case VPX_IMG_FMT_I444: ++ case GST_VPX_IMG_FMT_I444: + if (img->cs == VPX_CS_SRGB) + *fmt = GST_VIDEO_FORMAT_GBR; + else + *fmt = GST_VIDEO_FORMAT_Y444; + return TRUE; +-#ifdef VPX_IMG_FMT_I440 +- case VPX_IMG_FMT_I440: ++ case GST_VPX_IMG_FMT_I440: + /* Planar, half height, full width U/V */ + GST_FIXME_OBJECT (dec, "Please add a 4:4:0 planar frame format"); + GST_ELEMENT_WARNING (dec, STREAM, NOT_IMPLEMENTED, + (NULL), ("Unsupported frame format - 4:4:0 planar")); + return FALSE; +-#endif +-#ifdef VPX_IMG_FMT_I42016 +- case VPX_IMG_FMT_I42016: +- /* VPX_IMG_FMT_I420 | VPX_IMG_FMT_HIGHBITDEPTH */ +- GST_FIXME_OBJECT (dec, "Please add 16-bit I420 format"); ++ case GST_VPX_IMG_FMT_I42016: ++ if (img->bit_depth == 10) { ++ *fmt = GST_VIDEO_FORMAT_I420_10LE; ++ return TRUE; ++ } else if (img->bit_depth == 12) { ++ *fmt = GST_VIDEO_FORMAT_I420_12LE; ++ return TRUE; ++ } + GST_ELEMENT_WARNING (dec, STREAM, NOT_IMPLEMENTED, +- (NULL), ("Unsupported frame format - 16-bit 4:2:0 planar")); ++ (NULL), ("Unsupported frame format - %d-bit 4:2:0 planar", ++ img->bit_depth)); + return FALSE; +-#endif +-#ifdef VPX_IMG_FMT_I42216 +- case VPX_IMG_FMT_I42216: +- /* VPX_IMG_FMT_I422 | VPX_IMG_FMT_HIGHBITDEPTH */ +- GST_FIXME_OBJECT (dec, "Please add 16-bit Y42B format"); ++ case GST_VPX_IMG_FMT_I42216: ++ if (img->bit_depth == 10) { ++ *fmt = GST_VIDEO_FORMAT_I422_10LE; ++ return TRUE; ++ } else if (img->bit_depth == 12) { ++ *fmt = GST_VIDEO_FORMAT_I422_12LE; ++ return TRUE; ++ } + GST_ELEMENT_WARNING (dec, STREAM, NOT_IMPLEMENTED, +- (NULL), ("Unsupported frame format - 16-bit 4:2:2 planar")); ++ (NULL), ("Unsupported frame format - %d-bit 4:2:2 planar", ++ img->bit_depth)); + return FALSE; +-#endif +-#ifdef VPX_IMG_FMT_I44416 +- case VPX_IMG_FMT_I44416: +- /* VPX_IMG_FMT_I444 | VPX_IMG_FMT_HIGHBITDEPTH */ +- GST_FIXME_OBJECT (dec, "Please add 16-bit Y444 format"); ++ case GST_VPX_IMG_FMT_I44416: ++ if (img->cs == VPX_CS_SRGB) { ++ if (img->bit_depth == 10) { ++ *fmt = GST_VIDEO_FORMAT_GBR_10LE; ++ return TRUE; ++ } else if (img->bit_depth == 12) { ++ *fmt = GST_VIDEO_FORMAT_GBR_12LE; ++ return TRUE; ++ } ++ } else { ++ if (img->bit_depth == 10) { ++ *fmt = GST_VIDEO_FORMAT_Y444_10LE; ++ return TRUE; ++ } else if (img->bit_depth == 12) { ++ *fmt = GST_VIDEO_FORMAT_Y444_12LE; ++ return TRUE; ++ } ++ } + GST_ELEMENT_WARNING (dec, STREAM, NOT_IMPLEMENTED, +- (NULL), ("Unsupported frame format - 16-bit 4:4:4 planar")); ++ (NULL), ("Unsupported frame format - %d-bit 4:4:4 planar", ++ img->bit_depth)); + return FALSE; +-#endif +-#ifdef VPX_IMG_FMT_I44016 +- case VPX_IMG_FMT_I44016: +- /* VPX_IMG_FMT_I440 | VPX_IMG_FMT_HIGHBITDEPTH */ ++ case GST_VPX_IMG_FMT_I44016: + GST_FIXME_OBJECT (dec, "Please add 16-bit 4:4:0 planar frame format"); + GST_ELEMENT_WARNING (dec, STREAM, NOT_IMPLEMENTED, + (NULL), ("Unsupported frame format - 16-bit 4:4:0 planar")); + return FALSE; +-#endif + default: + return FALSE; + } +@@ -218,4 +253,10 @@ gst_vp9_dec_handle_resolution_change (GstVPXDec * dec, vpx_image_t * img, + } + } + ++static gboolean ++gst_vp9_dec_get_needs_sync_point (GstVPXDec * dec) ++{ ++ return TRUE; ++} ++ + #endif /* HAVE_VP9_DECODER */ +diff --git a/ext/vpx/gstvp9enc.c b/ext/vpx/gstvp9enc.c +index 1705b0602..eb75fa1d0 100644 +--- a/ext/vpx/gstvp9enc.c ++++ b/ext/vpx/gstvp9enc.c +@@ -62,21 +62,34 @@ + #include + #include + ++#include "gstvpxelements.h" ++#include "gstvpxenums.h" ++#include "gstvpx-enumtypes.h" + #include "gstvp8utils.h" + #include "gstvp9enc.h" + + GST_DEBUG_CATEGORY_STATIC (gst_vp9enc_debug); + #define GST_CAT_DEFAULT gst_vp9enc_debug + ++#define DEFAULT_TILE_COLUMNS 6 ++#define DEFAULT_TILE_ROWS 0 ++#define DEFAULT_ROW_MT 0 ++#define DEFAULT_AQ_MODE GST_VPX_AQ_OFF ++#define DEFAULT_FRAME_PARALLEL_DECODING TRUE + +-/* FIXME: Y42B and Y444 do not work yet it seems */ +-static GstStaticPadTemplate gst_vp9_enc_sink_template = +-GST_STATIC_PAD_TEMPLATE ("sink", +- GST_PAD_SINK, +- GST_PAD_ALWAYS, +- /*GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ I420, YV12, Y42B, Y444 }")) */ +- GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ I420, YV12 }")) +- ); ++enum ++{ ++ PROP_0, ++ PROP_TILE_COLUMNS, ++ PROP_TILE_ROWS, ++ PROP_ROW_MT, ++ PROP_AQ_MODE, ++ PROP_FRAME_PARALLEL_DECODING, ++}; ++ ++#define GST_VP9_ENC_VIDEO_FORMATS_8BIT "I420, YV12, Y444" ++#define GST_VP9_ENC_VIDEO_FORMATS_HIGHBIT \ ++ "I420_10LE, I420_12LE, I422_10LE, I422_12LE, Y444_10LE, Y444_12LE" + + static GstStaticPadTemplate gst_vp9_enc_src_template = + GST_STATIC_PAD_TEMPLATE ("src", +@@ -87,6 +100,8 @@ GST_STATIC_PAD_TEMPLATE ("src", + + #define parent_class gst_vp9_enc_parent_class + G_DEFINE_TYPE (GstVP9Enc, gst_vp9_enc, GST_TYPE_VPX_ENC); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (vp9enc, "vp9enc", GST_RANK_PRIMARY, ++ gst_vp9_enc_get_type (), vpx_element_init (plugin)); + + static vpx_codec_iface_t *gst_vp9_enc_get_algo (GstVPXEnc * enc); + static gboolean gst_vp9_enc_enable_scaling (GstVPXEnc * enc); +@@ -100,22 +115,115 @@ static GstFlowReturn gst_vp9_enc_handle_invisible_frame_buffer (GstVPXEnc * enc, + void *user_data, GstBuffer * buffer); + static void gst_vp9_enc_set_frame_user_data (GstVPXEnc * enc, + GstVideoCodecFrame * frame, vpx_image_t * image); ++static void gst_vp9_enc_set_property (GObject * object, guint prop_id, ++ const GValue * value, GParamSpec * pspec); ++static void gst_vp9_enc_get_property (GObject * object, guint prop_id, ++ GValue * value, GParamSpec * pspec); ++static gboolean gst_vp9_enc_configure_encoder (GstVPXEnc * encoder, ++ GstVideoCodecState * state); + + #define DEFAULT_BITS_PER_PIXEL 0.0289 + ++static GstCaps * ++gst_vp9_enc_get_sink_caps (void) ++{ ++#define CAPS_8BIT GST_VIDEO_CAPS_MAKE ("{ " GST_VP9_ENC_VIDEO_FORMATS_8BIT " }") ++#define CAPS_HIGHBIT GST_VIDEO_CAPS_MAKE ( "{ " GST_VP9_ENC_VIDEO_FORMATS_8BIT ", " \ ++ GST_VP9_ENC_VIDEO_FORMATS_HIGHBIT "}") ++ ++ return gst_caps_from_string ((vpx_codec_get_caps (gst_vp9_enc_get_algo (NULL)) ++ & VPX_CODEC_CAP_HIGHBITDEPTH) ? CAPS_HIGHBIT : CAPS_8BIT); ++} ++ + static void + gst_vp9_enc_class_init (GstVP9EncClass * klass) + { ++ GObjectClass *gobject_class; + GstElementClass *element_class; + GstVPXEncClass *vpx_encoder_class; ++ GstCaps *caps; + ++ gobject_class = G_OBJECT_CLASS (klass); + element_class = GST_ELEMENT_CLASS (klass); + vpx_encoder_class = GST_VPX_ENC_CLASS (klass); + ++ gobject_class->set_property = gst_vp9_enc_set_property; ++ gobject_class->get_property = gst_vp9_enc_get_property; ++ ++ /** ++ * GstVP9Enc:tile-columns: ++ * ++ * Number of tile columns, log2 ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_TILE_COLUMNS, ++ g_param_spec_int ("tile-columns", "Tile Columns", ++ "Number of tile columns, log2", ++ 0, 6, DEFAULT_TILE_COLUMNS, ++ (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); ++ ++ /** ++ * GstVP9Enc:tile-rows: ++ * ++ * Number of tile rows, log2 ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_TILE_ROWS, ++ g_param_spec_int ("tile-rows", "Tile Rows", ++ "Number of tile rows, log2", ++ 0, 2, DEFAULT_TILE_ROWS, ++ (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); ++ ++ /** ++ * GstVP9Enc:row-mt: ++ * ++ * Whether each row should be encoded using multiple threads ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_ROW_MT, ++ g_param_spec_boolean ("row-mt", "Row Multithreading", ++ "Whether each row should be encoded using multiple threads", ++ DEFAULT_ROW_MT, ++ (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); ++ ++ /** ++ * GstVP9Enc:aq-mode: ++ * ++ * Adaptive Quantization Mode ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_AQ_MODE, ++ g_param_spec_enum ("aq-mode", "Adaptive Quantization Mode", ++ "Which adaptive quantization mode should be used", ++ GST_TYPE_VPXAQ, DEFAULT_AQ_MODE, ++ (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); ++ gst_type_mark_as_plugin_api (GST_TYPE_VPXAQ, 0); ++ ++ /** ++ * GstVP9Enc:frame-parallel-decoding: ++ * ++ * Whether encoded bitstream should allow parallel processing of video frames in the decoder ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_FRAME_PARALLEL_DECODING, ++ g_param_spec_boolean ("frame-parallel-decoding", ++ "Frame Parallel Decoding", ++ "Whether encoded bitstream should allow parallel processing of video frames in the decoder " ++ "(default is on)", DEFAULT_FRAME_PARALLEL_DECODING, ++ (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); ++ + gst_element_class_add_static_pad_template (element_class, + &gst_vp9_enc_src_template); +- gst_element_class_add_static_pad_template (element_class, +- &gst_vp9_enc_sink_template); ++ ++ caps = gst_vp9_enc_get_sink_caps (); ++ gst_element_class_add_pad_template (element_class, ++ gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, caps)); ++ gst_clear_caps (&caps); + + gst_element_class_set_static_metadata (element_class, + "On2 VP9 Encoder", +@@ -133,6 +241,7 @@ gst_vp9_enc_class_init (GstVP9EncClass * klass) + vpx_encoder_class->handle_invisible_frame_buffer = + gst_vp9_enc_handle_invisible_frame_buffer; + vpx_encoder_class->set_frame_user_data = gst_vp9_enc_set_frame_user_data; ++ vpx_encoder_class->configure_encoder = gst_vp9_enc_configure_encoder; + + GST_DEBUG_CATEGORY_INIT (gst_vp9enc_debug, "vp9enc", 0, "VP9 Encoder"); + } +@@ -155,6 +264,259 @@ gst_vp9_enc_init (GstVP9Enc * gst_vp9_enc) + gst_vpx_enc->have_default_config = TRUE; + } + gst_vpx_enc->bits_per_pixel = DEFAULT_BITS_PER_PIXEL; ++ ++ gst_vp9_enc->tile_columns = DEFAULT_TILE_COLUMNS; ++ gst_vp9_enc->tile_rows = DEFAULT_TILE_ROWS; ++ gst_vp9_enc->row_mt = DEFAULT_ROW_MT; ++ gst_vp9_enc->aq_mode = DEFAULT_AQ_MODE; ++ gst_vp9_enc->frame_parallel_decoding = DEFAULT_FRAME_PARALLEL_DECODING; ++} ++ ++static void ++gst_vp9_enc_set_property (GObject * object, guint prop_id, ++ const GValue * value, GParamSpec * pspec) ++{ ++ GstVPXEnc *gst_vpx_enc = GST_VPX_ENC (object); ++ GstVP9Enc *gst_vp9_enc = GST_VP9_ENC (object); ++ vpx_codec_err_t status; ++ ++ g_mutex_lock (&gst_vpx_enc->encoder_lock); ++ ++ switch (prop_id) { ++ case PROP_TILE_COLUMNS: ++ gst_vp9_enc->tile_columns = g_value_get_int (value); ++ if (gst_vpx_enc->inited) { ++ status = ++ vpx_codec_control (&gst_vpx_enc->encoder, VP9E_SET_TILE_COLUMNS, ++ gst_vp9_enc->tile_columns); ++ if (status != VPX_CODEC_OK) { ++ GST_WARNING_OBJECT (gst_vpx_enc, ++ "Failed to set VP9E_SET_TILE_COLUMNS: %s", ++ gst_vpx_error_name (status)); ++ } ++ } ++ break; ++ case PROP_TILE_ROWS: ++ gst_vp9_enc->tile_rows = g_value_get_int (value); ++ if (gst_vpx_enc->inited) { ++ status = ++ vpx_codec_control (&gst_vpx_enc->encoder, VP9E_SET_TILE_ROWS, ++ gst_vp9_enc->tile_rows); ++ if (status != VPX_CODEC_OK) { ++ GST_WARNING_OBJECT (gst_vpx_enc, ++ "Failed to set VP9E_SET_TILE_ROWS: %s", ++ gst_vpx_error_name (status)); ++ } ++ } ++ break; ++ case PROP_ROW_MT: ++ gst_vp9_enc->row_mt = g_value_get_boolean (value); ++ if (gst_vpx_enc->inited) { ++ status = ++ vpx_codec_control (&gst_vpx_enc->encoder, VP9E_SET_ROW_MT, ++ gst_vp9_enc->row_mt ? 1 : 0); ++ if (status != VPX_CODEC_OK) { ++ GST_WARNING_OBJECT (gst_vpx_enc, ++ "Failed to set VP9E_SET_ROW_MT: %s", gst_vpx_error_name (status)); ++ } ++ } ++ break; ++ case PROP_AQ_MODE: ++ gst_vp9_enc->aq_mode = g_value_get_enum (value); ++ if (gst_vpx_enc->inited) { ++ status = vpx_codec_control (&gst_vpx_enc->encoder, VP9E_SET_AQ_MODE, ++ gst_vp9_enc->aq_mode); ++ if (status != VPX_CODEC_OK) { ++ GST_WARNING_OBJECT (gst_vpx_enc, ++ "Failed to set VP9E_SET_AQ_MODE: %s", ++ gst_vpx_error_name (status)); ++ } ++ } ++ break; ++ case PROP_FRAME_PARALLEL_DECODING: ++ gst_vp9_enc->frame_parallel_decoding = g_value_get_boolean (value); ++ if (gst_vpx_enc->inited) { ++ status = vpx_codec_control (&gst_vpx_enc->encoder, ++ VP9E_SET_FRAME_PARALLEL_DECODING, ++ gst_vp9_enc->frame_parallel_decoding ? 1 : 0); ++ if (status != VPX_CODEC_OK) { ++ GST_WARNING_OBJECT (gst_vpx_enc, ++ "Failed to set VP9E_SET_FRAME_PARALLEL_DECODING: %s", ++ gst_vpx_error_name (status)); ++ } ++ } ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++ ++ g_mutex_unlock (&gst_vpx_enc->encoder_lock); ++} ++ ++static void ++gst_vp9_enc_get_property (GObject * object, guint prop_id, GValue * value, ++ GParamSpec * pspec) ++{ ++ GstVPXEnc *gst_vpx_enc = GST_VPX_ENC (object); ++ GstVP9Enc *gst_vp9_enc = GST_VP9_ENC (object); ++ ++ g_mutex_lock (&gst_vpx_enc->encoder_lock); ++ ++ switch (prop_id) { ++ case PROP_TILE_COLUMNS: ++ g_value_set_int (value, gst_vp9_enc->tile_columns); ++ break; ++ case PROP_TILE_ROWS: ++ g_value_set_int (value, gst_vp9_enc->tile_rows); ++ break; ++ case PROP_ROW_MT: ++ g_value_set_boolean (value, gst_vp9_enc->row_mt); ++ break; ++ case PROP_AQ_MODE: ++ g_value_set_enum (value, gst_vp9_enc->aq_mode); ++ break; ++ case PROP_FRAME_PARALLEL_DECODING: ++ g_value_set_boolean (value, gst_vp9_enc->frame_parallel_decoding); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++ ++ g_mutex_unlock (&gst_vpx_enc->encoder_lock); ++} ++ ++static vpx_color_space_t ++gst_vp9_get_vpx_colorspace (GstVPXEnc * encoder, GstVideoColorimetry * in_cinfo, ++ GstVideoFormat format) ++{ ++ vpx_color_space_t colorspace = VPX_CS_UNKNOWN; ++ GstVideoColorimetry cinfo = *in_cinfo; ++ gchar *colorimetry_str; ++ guint i; ++ ++ /* *INDENT-OFF* */ ++ static const struct ++ { ++ const gchar *str; ++ vpx_color_space_t vpx_color_space; ++ } colorimetry_map[] = { ++ { ++ GST_VIDEO_COLORIMETRY_BT601, VPX_CS_BT_601}, { ++ GST_VIDEO_COLORIMETRY_BT709, VPX_CS_BT_709}, { ++ GST_VIDEO_COLORIMETRY_SMPTE240M, VPX_CS_SMPTE_240}, { ++ GST_VIDEO_COLORIMETRY_BT2020, VPX_CS_BT_2020} ++ }; ++ /* *INDENT-ON* */ ++ ++ /* We support any range, all mapped CSC are by default reduced range. */ ++ cinfo.range = GST_VIDEO_COLOR_RANGE_16_235; ++ colorimetry_str = gst_video_colorimetry_to_string (&cinfo); ++ ++ if (colorimetry_str != NULL) { ++ for (i = 0; i < G_N_ELEMENTS (colorimetry_map); ++i) { ++ if (g_strcmp0 (colorimetry_map[i].str, colorimetry_str) == 0) { ++ colorspace = colorimetry_map[i].vpx_color_space; ++ break; ++ } ++ } ++ } ++ ++ if (colorspace == VPX_CS_UNKNOWN) { ++ if (format == GST_VIDEO_FORMAT_GBR ++ || format == GST_VIDEO_FORMAT_GBR_10BE ++ || format == GST_VIDEO_FORMAT_GBR_10LE ++ || format == GST_VIDEO_FORMAT_GBR_12BE ++ || format == GST_VIDEO_FORMAT_GBR_12LE) { ++ /* Currently has no effect because vp*enc elements only accept YUV video ++ * formats. ++ * ++ * FIXME: Support encoding GST_VIDEO_FORMAT_GBR and its high bits variants. ++ */ ++ colorspace = VPX_CS_SRGB; ++ } else { ++ GST_WARNING_OBJECT (encoder, "Unsupported colorspace \"%s\"", ++ GST_STR_NULL (colorimetry_str)); ++ } ++ } ++ ++ g_free (colorimetry_str); ++ ++ return colorspace; ++} ++ ++static gint ++gst_vp9_get_vpx_color_range (GstVideoColorimetry * colorimetry) ++{ ++ if (colorimetry->range == GST_VIDEO_COLOR_RANGE_0_255) ++ /* Full range (0..255 or HBD equivalent) */ ++ return 1; ++ ++ /* Limited range (16..235 or HBD equivalent) */ ++ return 0; ++} ++ ++static gboolean ++gst_vp9_enc_configure_encoder (GstVPXEnc * encoder, GstVideoCodecState * state) ++{ ++ GstVP9Enc *vp9enc = GST_VP9_ENC (encoder); ++ GstVideoInfo *info = &state->info; ++ vpx_codec_err_t status; ++ ++ status = vpx_codec_control (&encoder->encoder, VP9E_SET_COLOR_SPACE, ++ gst_vp9_get_vpx_colorspace (encoder, &GST_VIDEO_INFO_COLORIMETRY (info), ++ GST_VIDEO_INFO_FORMAT (info))); ++ if (status != VPX_CODEC_OK) { ++ GST_WARNING_OBJECT (encoder, ++ "Failed to set VP9E_SET_COLOR_SPACE: %s", gst_vpx_error_name (status)); ++ } ++ ++ status = vpx_codec_control (&encoder->encoder, VP9E_SET_COLOR_RANGE, ++ gst_vp9_get_vpx_color_range (&GST_VIDEO_INFO_COLORIMETRY (info))); ++ if (status != VPX_CODEC_OK) { ++ GST_WARNING_OBJECT (encoder, ++ "Failed to set VP9E_SET_COLOR_RANGE: %s", gst_vpx_error_name (status)); ++ } ++ ++ status = ++ vpx_codec_control (&encoder->encoder, VP9E_SET_TILE_COLUMNS, ++ vp9enc->tile_columns); ++ if (status != VPX_CODEC_OK) { ++ GST_DEBUG_OBJECT (encoder, "Failed to set VP9E_SET_TILE_COLUMNS: %s", ++ gst_vpx_error_name (status)); ++ } ++ ++ status = ++ vpx_codec_control (&encoder->encoder, VP9E_SET_TILE_ROWS, ++ vp9enc->tile_rows); ++ if (status != VPX_CODEC_OK) { ++ GST_DEBUG_OBJECT (encoder, "Failed to set VP9E_SET_TILE_ROWS: %s", ++ gst_vpx_error_name (status)); ++ } ++ status = ++ vpx_codec_control (&encoder->encoder, VP9E_SET_ROW_MT, ++ vp9enc->row_mt ? 1 : 0); ++ if (status != VPX_CODEC_OK) { ++ GST_DEBUG_OBJECT (encoder, ++ "Failed to set VP9E_SET_ROW_MT: %s", gst_vpx_error_name (status)); ++ } ++ status = ++ vpx_codec_control (&encoder->encoder, VP9E_SET_AQ_MODE, vp9enc->aq_mode); ++ if (status != VPX_CODEC_OK) { ++ GST_WARNING_OBJECT (encoder, ++ "Failed to set VP9E_SET_AQ_MODE: %s", gst_vpx_error_name (status)); ++ } ++ status = ++ vpx_codec_control (&encoder->encoder, VP9E_SET_FRAME_PARALLEL_DECODING, ++ vp9enc->frame_parallel_decoding ? 1 : 0); ++ if (status != VPX_CODEC_OK) { ++ GST_WARNING_OBJECT (encoder, ++ "Failed to set VP9E_SET_FRAME_PARALLEL_DECODING: %s", ++ gst_vpx_error_name (status)); ++ } ++ ++ return TRUE; + } + + static vpx_codec_iface_t * +@@ -174,24 +536,66 @@ gst_vp9_enc_set_image_format (GstVPXEnc * enc, vpx_image_t * image) + { + switch (enc->input_state->info.finfo->format) { + case GST_VIDEO_FORMAT_I420: +- image->fmt = VPX_IMG_FMT_I420; ++ image->fmt = (vpx_img_fmt_t) GST_VPX_IMG_FMT_I420; + image->bps = 12; ++ image->bit_depth = 8; + image->x_chroma_shift = image->y_chroma_shift = 1; + break; + case GST_VIDEO_FORMAT_YV12: +- image->fmt = VPX_IMG_FMT_YV12; ++ image->fmt = (vpx_img_fmt_t) GST_VPX_IMG_FMT_YV12; + image->bps = 12; ++ image->bit_depth = 8; + image->x_chroma_shift = image->y_chroma_shift = 1; + break; + case GST_VIDEO_FORMAT_Y42B: +- image->fmt = VPX_IMG_FMT_I422; ++ image->fmt = (vpx_img_fmt_t) GST_VPX_IMG_FMT_I422; + image->bps = 16; ++ image->bit_depth = 8; + image->x_chroma_shift = 1; + image->y_chroma_shift = 0; + break; + case GST_VIDEO_FORMAT_Y444: +- image->fmt = VPX_IMG_FMT_I444; ++ image->fmt = (vpx_img_fmt_t) GST_VPX_IMG_FMT_I444; ++ image->bps = 24; ++ image->bit_depth = 8; ++ image->x_chroma_shift = image->y_chroma_shift = 0; ++ break; ++ case GST_VIDEO_FORMAT_I420_10LE: ++ image->fmt = (vpx_img_fmt_t) GST_VPX_IMG_FMT_I42016; ++ image->bps = 15; ++ image->bit_depth = 10; ++ image->x_chroma_shift = image->y_chroma_shift = 1; ++ break; ++ case GST_VIDEO_FORMAT_I420_12LE: ++ image->fmt = (vpx_img_fmt_t) GST_VPX_IMG_FMT_I42016; ++ image->bps = 18; ++ image->bit_depth = 12; ++ image->x_chroma_shift = image->y_chroma_shift = 1; ++ break; ++ case GST_VIDEO_FORMAT_I422_10LE: ++ image->fmt = (vpx_img_fmt_t) GST_VPX_IMG_FMT_I42216; ++ image->bps = 20; ++ image->bit_depth = 10; ++ image->x_chroma_shift = 1; ++ image->y_chroma_shift = 0; ++ break; ++ case GST_VIDEO_FORMAT_I422_12LE: ++ image->fmt = (vpx_img_fmt_t) GST_VPX_IMG_FMT_I42216; + image->bps = 24; ++ image->bit_depth = 12; ++ image->x_chroma_shift = 1; ++ image->y_chroma_shift = 0; ++ break; ++ case GST_VIDEO_FORMAT_Y444_10LE: ++ image->fmt = (vpx_img_fmt_t) GST_VPX_IMG_FMT_I44416; ++ image->bps = 30; ++ image->bit_depth = 10; ++ image->x_chroma_shift = image->y_chroma_shift = 0; ++ break; ++ case GST_VIDEO_FORMAT_Y444_12LE: ++ image->fmt = (vpx_img_fmt_t) GST_VPX_IMG_FMT_I44416; ++ image->bps = 36; ++ image->bit_depth = 12; + image->x_chroma_shift = image->y_chroma_shift = 0; + break; + default: +diff --git a/ext/vpx/gstvp9enc.h b/ext/vpx/gstvp9enc.h +index a61a2f91a..03d31d1bd 100644 +--- a/ext/vpx/gstvp9enc.h ++++ b/ext/vpx/gstvp9enc.h +@@ -43,7 +43,15 @@ G_DECLARE_FINAL_TYPE (GstVP9Enc, gst_vp9_enc, GST, VP9_ENC, GstVPXEnc) + + struct _GstVP9Enc + { +- GstVPXEnc base_vpx_encoder; ++ GstVPXEnc base_vpx_encoder; ++ ++ guint tile_columns; ++ guint tile_rows; ++#ifdef VPX_CTRL_VP9E_SET_ROW_MT ++ gboolean row_mt; ++#endif ++ GstVPXAQ aq_mode; ++ gboolean frame_parallel_decoding; + }; + + G_END_DECLS +diff --git a/ext/vpx/gstvpxcompat.h b/ext/vpx/gstvpxcompat.h +new file mode 100644 +index 000000000..1919791a1 +--- /dev/null ++++ b/ext/vpx/gstvpxcompat.h +@@ -0,0 +1,47 @@ ++/* ++ * GStreamer ++ * Copyright (C) 2022 Seungha Yang ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#pragma once ++ ++#include ++ ++G_BEGIN_DECLS ++ ++#define GST_VPX_IMG_FMT_PLANAR 0x100 ++#define GST_VPX_IMG_FMT_UV_FLIP 0x200 ++#define GST_VPX_IMG_FMT_HIGHBITDEPTH 0x800 ++ ++/* vpx_img_fmt with GST_ prefix */ ++typedef enum gst_vpx_img_fmt ++{ ++ GST_VPX_IMG_FMT_NONE, ++ GST_VPX_IMG_FMT_YV12 = GST_VPX_IMG_FMT_PLANAR | GST_VPX_IMG_FMT_UV_FLIP | 1, ++ GST_VPX_IMG_FMT_I420 = GST_VPX_IMG_FMT_PLANAR | 2, ++ GST_VPX_IMG_FMT_I422 = GST_VPX_IMG_FMT_PLANAR | 5, ++ GST_VPX_IMG_FMT_I444 = GST_VPX_IMG_FMT_PLANAR | 6, ++ GST_VPX_IMG_FMT_I440 = GST_VPX_IMG_FMT_PLANAR | 7, ++ GST_VPX_IMG_FMT_NV12 = GST_VPX_IMG_FMT_PLANAR | 9, ++ GST_VPX_IMG_FMT_I42016 = GST_VPX_IMG_FMT_I420 | GST_VPX_IMG_FMT_HIGHBITDEPTH, ++ GST_VPX_IMG_FMT_I42216 = GST_VPX_IMG_FMT_I422 | GST_VPX_IMG_FMT_HIGHBITDEPTH, ++ GST_VPX_IMG_FMT_I44416 = GST_VPX_IMG_FMT_I444 | GST_VPX_IMG_FMT_HIGHBITDEPTH, ++ GST_VPX_IMG_FMT_I44016 = GST_VPX_IMG_FMT_I440 | GST_VPX_IMG_FMT_HIGHBITDEPTH ++} gst_vpx_img_fmt_t; ++ ++G_END_DECLS +diff --git a/ext/vpx/gstvpxdec.c b/ext/vpx/gstvpxdec.c +index 74cc3c970..091207d95 100644 +--- a/ext/vpx/gstvpxdec.c ++++ b/ext/vpx/gstvpxdec.c +@@ -197,6 +197,7 @@ static void + gst_vpx_dec_init (GstVPXDec * gst_vpx_dec) + { + GstVideoDecoder *decoder = (GstVideoDecoder *) gst_vpx_dec; ++ GstVPXDecClass *vpxclass = GST_VPX_DEC_GET_CLASS (gst_vpx_dec); + + GST_DEBUG_OBJECT (gst_vpx_dec, "gst_vpx_dec_init"); + gst_video_decoder_set_packetized (decoder, TRUE); +@@ -205,6 +206,11 @@ gst_vpx_dec_init (GstVPXDec * gst_vpx_dec) + gst_vpx_dec->deblocking_level = DEFAULT_DEBLOCKING_LEVEL; + gst_vpx_dec->noise_level = DEFAULT_NOISE_LEVEL; + ++ if (vpxclass->get_needs_sync_point) { ++ gst_video_decoder_set_needs_sync_point (GST_VIDEO_DECODER (gst_vpx_dec), ++ vpxclass->get_needs_sync_point (gst_vpx_dec)); ++ } ++ + gst_video_decoder_set_needs_format (decoder, TRUE); + gst_video_decoder_set_use_default_pad_acceptcaps (decoder, TRUE); + GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (decoder)); +@@ -280,6 +286,7 @@ gst_vpx_dec_start (GstVideoDecoder * decoder) + + GST_DEBUG_OBJECT (gst_vpx_dec, "start"); + gst_vpx_dec->decoder_inited = FALSE; ++ gst_vpx_dec->safe_remap = FALSE; + + return TRUE; + } +@@ -395,6 +402,15 @@ gst_vpx_dec_prepare_image (GstVPXDec * dec, const vpx_image_t * img) + + buffer = gst_buffer_ref (frame->buffer); + ++ /* FIXME: an atomic remap would be preferable, for now we simply ++ * remap the buffer from RW to RO when using a sysmem allocator, ++ * in order to avoid a useless memcpy in GstVideoDecoder. ++ */ ++ if (dec->safe_remap) { ++ gst_buffer_unmap (buffer, &frame->info); ++ gst_buffer_map (buffer, &frame->info, GST_MAP_READ); ++ } ++ + vmeta = gst_buffer_get_video_meta (buffer); + vmeta->format = GST_VIDEO_INFO_FORMAT (info); + vmeta->width = GST_VIDEO_INFO_WIDTH (info); +@@ -444,6 +460,9 @@ gst_vpx_dec_get_buffer_cb (gpointer priv, gsize min_size, + allocator = NULL; + } + ++ dec->safe_remap = (allocator == NULL ++ || !g_strcmp0 (allocator->mem_type, GST_ALLOCATOR_SYSMEM)); ++ + pool = gst_buffer_pool_new (); + config = gst_buffer_pool_get_config (pool); + gst_buffer_pool_config_set_allocator (config, allocator, ¶ms); +@@ -581,14 +600,11 @@ gst_vpx_dec_open_codec (GstVPXDec * dec, GstVideoCodecFrame * frame) + gst_buffer_unmap (frame->input_buffer, &minfo); + + if (status != VPX_CODEC_OK) { +- GST_WARNING_OBJECT (dec, "VPX preprocessing error: %s", ++ GST_INFO_OBJECT (dec, "VPX preprocessing error: %s", + gst_vpx_error_name (status)); + return GST_FLOW_CUSTOM_SUCCESS_1; + } +- if (!stream_info.is_kf) { +- GST_WARNING_OBJECT (dec, "No keyframe, skipping"); +- return GST_FLOW_CUSTOM_SUCCESS_1; +- } ++ + if (stream_info.w == 0 || stream_info.h == 0) { + /* For VP8 it's possible to signal width or height to be 0, but it does + * not make sense to do so. For VP9 it's impossible. Hence, we most likely +@@ -672,6 +688,12 @@ gst_vpx_dec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame) + if (!dec->decoder_inited) { + ret = vpxclass->open_codec (dec, frame); + if (ret == GST_FLOW_CUSTOM_SUCCESS_1) { ++ GstVideoDecoderRequestSyncPointFlags flags = 0; ++ ++ if (gst_video_decoder_get_needs_sync_point (decoder)) ++ flags |= GST_VIDEO_DECODER_REQUEST_SYNC_POINT_DISCARD_INPUT; ++ ++ gst_video_decoder_request_sync_point (decoder, frame, flags); + gst_video_decoder_drop_frame (decoder, frame); + return GST_FLOW_OK; + } else if (ret != GST_FLOW_OK) { +@@ -701,8 +723,15 @@ gst_vpx_dec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame) + gst_buffer_unmap (frame->input_buffer, &minfo); + + if (status) { +- GST_VIDEO_DECODER_ERROR (decoder, 1, LIBRARY, ENCODE, ++ GstVideoDecoderRequestSyncPointFlags flags = 0; ++ ++ GST_VIDEO_DECODER_ERROR (decoder, 1, STREAM, DECODE, + ("Failed to decode frame"), ("%s", gst_vpx_error_name (status)), ret); ++ ++ if (gst_video_decoder_get_needs_sync_point (decoder)) ++ flags |= GST_VIDEO_DECODER_REQUEST_SYNC_POINT_DISCARD_INPUT; ++ ++ gst_video_decoder_request_sync_point (decoder, frame, flags); + gst_video_codec_frame_unref (frame); + return ret; + } +@@ -711,7 +740,7 @@ gst_vpx_dec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame) + if (img) { + if (vpxclass->get_frame_format (dec, img, &fmt) == FALSE) { + vpx_img_free (img); +- GST_ELEMENT_ERROR (decoder, LIBRARY, ENCODE, ++ GST_ELEMENT_ERROR (decoder, STREAM, DECODE, + ("Failed to decode frame"), ("Unsupported color format %d", + img->fmt)); + gst_video_codec_frame_unref (frame); +diff --git a/ext/vpx/gstvpxdec.h b/ext/vpx/gstvpxdec.h +index 36b3c272e..e48f4e714 100644 +--- a/ext/vpx/gstvpxdec.h ++++ b/ext/vpx/gstvpxdec.h +@@ -31,6 +31,7 @@ + + #include + #include ++#include "gstvpxcompat.h" + + /* FIXME: Undef HAVE_CONFIG_H because vpx_codec.h uses it, + * which causes compilation failures */ +@@ -82,6 +83,7 @@ struct _GstVPXDec + gboolean have_video_meta; + GstBufferPool *pool; + gsize buf_size; ++ gboolean safe_remap; + }; + + struct _GstVPXDecClass +@@ -102,6 +104,11 @@ struct _GstVPXDecClass + void (*handle_resolution_change) (GstVPXDec *dec, vpx_image_t *img, GstVideoFormat fmt); + /*virtual function to check valid format*/ + gboolean (*get_frame_format)(GstVPXDec *dec, vpx_image_t *img, GstVideoFormat* fmt); ++ /* virtual function to check whether the decoder can handle data ++ * before receiving a sync_point, either at the start of after a ++ * decoding error ++ */ ++ gboolean (*get_needs_sync_point)(GstVPXDec *dec); + }; + + GType gst_vpx_dec_get_type (void); +diff --git a/ext/vpx/gstvpxelement.c b/ext/vpx/gstvpxelement.c +new file mode 100644 +index 000000000..2550283bf +--- /dev/null ++++ b/ext/vpx/gstvpxelement.c +@@ -0,0 +1,39 @@ ++/* VPX ++ * Copyright (C) 2006 David Schleef ++ * Copyright (C) 2010 Entropy Wave Inc ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ * ++ */ ++ ++#ifdef HAVE_CONFIG_H ++#include ++#endif ++ ++#include "gstvpxelements.h" ++ ++#include ++ ++void ++vpx_element_init (GstPlugin * plugin) ++{ ++ static gsize res = FALSE; ++ static const gchar *tags[] = { NULL }; ++ if (g_once_init_enter (&res)) { ++ gst_meta_register_custom ("GstVP8Meta", tags, NULL, NULL, NULL); ++ g_once_init_leave (&res, TRUE); ++ } ++} +diff --git a/ext/vpx/gstvpxelements.h b/ext/vpx/gstvpxelements.h +new file mode 100644 +index 000000000..b2c1f88f3 +--- /dev/null ++++ b/ext/vpx/gstvpxelements.h +@@ -0,0 +1,36 @@ ++/* ++ * Copyright (C) 2020 Huawei Technologies Co., Ltd. ++ * @Author: Julian Bouzas ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the Free ++ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_VPX_ELEMENTS_H__ ++#define __GST_VPX_ELEMENTS_H__ ++ ++#include ++ ++G_BEGIN_DECLS ++ ++void vpx_element_init (GstPlugin * plugin); ++ ++GST_ELEMENT_REGISTER_DECLARE (vp8dec); ++GST_ELEMENT_REGISTER_DECLARE (vp8enc); ++GST_ELEMENT_REGISTER_DECLARE (vp9dec); ++GST_ELEMENT_REGISTER_DECLARE (vp9enc); ++ ++G_END_DECLS ++ ++#endif /* __GST_VPX_ELEMENTS_H__ */ +diff --git a/ext/vpx/gstvpxenc.c b/ext/vpx/gstvpxenc.c +index 4b3b4f770..612a7a59f 100644 +--- a/ext/vpx/gstvpxenc.c ++++ b/ext/vpx/gstvpxenc.c +@@ -75,6 +75,8 @@ GST_DEBUG_CATEGORY_STATIC (gst_vpxenc_debug); + #define DEFAULT_TS_RATE_DECIMATOR NULL + #define DEFAULT_TS_PERIODICITY 0 + #define DEFAULT_TS_LAYER_ID NULL ++#define DEFAULT_TS_LAYER_FLAGS NULL ++#define DEFAULT_TS_LAYER_SYNC_FLAGS NULL + + #define DEFAULT_ERROR_RESILIENT 0 + #define DEFAULT_LAG_IN_FRAMES 0 +@@ -85,7 +87,7 @@ GST_DEBUG_CATEGORY_STATIC (gst_vpxenc_debug); + #define DEFAULT_V_SCALING_MODE VP8E_NORMAL + #define DEFAULT_CPU_USED 0 + #define DEFAULT_ENABLE_AUTO_ALT_REF FALSE +-#define DEFAULT_DEADLINE VPX_DL_BEST_QUALITY ++#define DEFAULT_DEADLINE VPX_DL_GOOD_QUALITY + #define DEFAULT_NOISE_SENSITIVITY 0 + #define DEFAULT_SHARPNESS 0 + +@@ -130,6 +132,8 @@ enum + PROP_TS_RATE_DECIMATOR, + PROP_TS_PERIODICITY, + PROP_TS_LAYER_ID, ++ PROP_TS_LAYER_FLAGS, ++ PROP_TS_LAYER_SYNC_FLAGS, + PROP_MULTIPASS_MODE, + PROP_MULTIPASS_CACHE_FILE, + PROP_ERROR_RESILIENT, +@@ -316,6 +320,35 @@ gst_vpx_enc_er_flags_get_type (void) + return id; + } + ++#define GST_VPX_ENC_TS_LAYER_FLAGS_TYPE (gst_vpx_enc_ts_layer_flags_get_type()) ++static GType ++gst_vpx_enc_ts_layer_flags_get_type (void) ++{ ++ static const GFlagsValue values[] = { ++ {VP8_EFLAG_NO_REF_LAST, "Don't reference the last frame", "no-ref-last"}, ++ {VP8_EFLAG_NO_REF_GF, "Don't reference the golden frame", "no-ref-golden"}, ++ {VP8_EFLAG_NO_REF_ARF, "Don't reference the alternate reference frame", ++ "no-ref-alt"}, ++ {VP8_EFLAG_NO_UPD_LAST, "Don't update the last frame", "no-upd-last"}, ++ {VP8_EFLAG_NO_UPD_GF, "Don't update the golden frame", "no-upd-golden"}, ++ {VP8_EFLAG_NO_UPD_ARF, "Don't update the alternate reference frame", ++ "no-upd-alt"}, ++ {VP8_EFLAG_NO_UPD_ENTROPY, "Disable entropy update", "no-upd-entropy"}, ++ {0, NULL, NULL} ++ }; ++ static GType id = 0; ++ ++ if (g_once_init_enter ((gsize *) & id)) { ++ GType _id; ++ ++ _id = g_flags_register_static ("GstVPXEncTsLayerFlags", values); ++ ++ g_once_init_leave ((gsize *) & id, _id); ++ } ++ ++ return id; ++} ++ + static void gst_vpx_enc_finalize (GObject * object); + static void gst_vpx_enc_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec); +@@ -335,12 +368,13 @@ static gboolean gst_vpx_enc_sink_event (GstVideoEncoder * + video_encoder, GstEvent * event); + static gboolean gst_vpx_enc_propose_allocation (GstVideoEncoder * encoder, + GstQuery * query); ++static gboolean gst_vpx_enc_transform_meta (GstVideoEncoder * encoder, ++ GstVideoCodecFrame * frame, GstMeta * meta); + + #define parent_class gst_vpx_enc_parent_class + G_DEFINE_TYPE_WITH_CODE (GstVPXEnc, gst_vpx_enc, GST_TYPE_VIDEO_ENCODER, + G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL); +- G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL); +- ); ++ G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL);); + + static void + gst_vpx_enc_class_init (GstVPXEncClass * klass) +@@ -363,6 +397,7 @@ gst_vpx_enc_class_init (GstVPXEncClass * klass) + video_encoder_class->finish = gst_vpx_enc_finish; + video_encoder_class->sink_event = gst_vpx_enc_sink_event; + video_encoder_class->propose_allocation = gst_vpx_enc_propose_allocation; ++ video_encoder_class->transform_meta = gst_vpx_enc_transform_meta; + + g_object_class_install_property (gobject_class, PROP_RC_END_USAGE, + g_param_spec_enum ("end-usage", "Rate control mode", +@@ -518,7 +553,7 @@ gst_vpx_enc_class_init (GstVPXEncClass * klass) + g_object_class_install_property (gobject_class, PROP_TS_TARGET_BITRATE, + g_param_spec_value_array ("temporal-scalability-target-bitrate", + "Coding layer target bitrates", +- "Target bitrates for coding layers (one per layer, decreasing)", ++ "Target bitrates (bits/sec) for coding layers (one per layer)", + g_param_spec_int ("target-bitrate", "Target bitrate", + "Target bitrate", 0, G_MAXINT, DEFAULT_RC_TARGET_BITRATE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | +@@ -555,6 +590,36 @@ gst_vpx_enc_class_init (GstVPXEncClass * klass) + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_DOC_SHOW_DEFAULT)); + ++ /** ++ * GstVPXEnc:temporal-scalability-layer-flags: ++ * ++ * Sequence defining coding layer flags ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_TS_LAYER_FLAGS, ++ gst_param_spec_array ("temporal-scalability-layer-flags", ++ "Coding layer flags", "Sequence defining coding layer flags", ++ g_param_spec_flags ("flags", "Flags", "Flags", ++ GST_VPX_ENC_TS_LAYER_FLAGS_TYPE, 0, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS), ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstVPXEnc:temporal-scalability-layer-sync-flags: ++ * ++ * Sequence defining coding layer sync flags ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_TS_LAYER_SYNC_FLAGS, ++ gst_param_spec_array ("temporal-scalability-layer-sync-flags", ++ "Coding layer sync flags", ++ "Sequence defining coding layer sync flags", ++ g_param_spec_boolean ("flags", "Flags", "Flags", FALSE, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS), ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ + g_object_class_install_property (gobject_class, PROP_LAG_IN_FRAMES, + g_param_spec_int ("lag-in-frames", "Lag in frames", + "Maximum number of frames to lag", +@@ -578,7 +643,7 @@ gst_vpx_enc_class_init (GstVPXEncClass * klass) + + g_object_class_install_property (gobject_class, PROP_DEADLINE, + g_param_spec_int64 ("deadline", "Deadline", +- "Deadline per frame (usec, 0=disabled)", ++ "Deadline per frame (usec, 0=best, 1=realtime)", + 0, G_MAXINT64, DEFAULT_DEADLINE, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_DOC_SHOW_DEFAULT))); +@@ -744,6 +809,10 @@ gst_vpx_enc_init (GstVPXEnc * gst_vpx_enc) + gst_vpx_enc->n_ts_rate_decimator = 0; + gst_vpx_enc->cfg.ts_periodicity = DEFAULT_TS_PERIODICITY; + gst_vpx_enc->n_ts_layer_id = 0; ++ gst_vpx_enc->n_ts_layer_flags = 0; ++ gst_vpx_enc->ts_layer_flags = NULL; ++ gst_vpx_enc->n_ts_layer_sync_flags = 0; ++ gst_vpx_enc->ts_layer_sync_flags = NULL; + gst_vpx_enc->cfg.g_error_resilient = DEFAULT_ERROR_RESILIENT; + gst_vpx_enc->cfg.g_lag_in_frames = DEFAULT_LAG_IN_FRAMES; + gst_vpx_enc->cfg.g_threads = DEFAULT_THREADS; +@@ -765,6 +834,8 @@ gst_vpx_enc_init (GstVPXEnc * gst_vpx_enc) + gst_vpx_enc->timebase_n = DEFAULT_TIMEBASE_N; + gst_vpx_enc->timebase_d = DEFAULT_TIMEBASE_D; + gst_vpx_enc->bits_per_pixel = DEFAULT_BITS_PER_PIXEL; ++ gst_vpx_enc->tl0picidx = 0; ++ gst_vpx_enc->prev_was_keyframe = FALSE; + + gst_vpx_enc->cfg.g_profile = DEFAULT_PROFILE; + +@@ -781,6 +852,9 @@ gst_vpx_enc_finalize (GObject * object) + g_return_if_fail (GST_IS_VPX_ENC (object)); + gst_vpx_enc = GST_VPX_ENC (object); + ++ g_free (gst_vpx_enc->ts_layer_flags); ++ g_free (gst_vpx_enc->ts_layer_sync_flags); ++ + g_free (gst_vpx_enc->multipass_cache_prefix); + g_free (gst_vpx_enc->multipass_cache_file); + gst_vpx_enc->multipass_cache_idx = 0; +@@ -950,7 +1024,7 @@ gst_vpx_enc_set_property (GObject * object, guint prop_id, + + for (i = 0; i < va->n_values; i++) + gst_vpx_enc->cfg.ts_target_bitrate[i] = +- g_value_get_int (g_value_array_get_nth (va, i)); ++ g_value_get_int (g_value_array_get_nth (va, i)) / 1000; + gst_vpx_enc->n_ts_target_bitrate = va->n_values; + } + global = TRUE; +@@ -1002,6 +1076,45 @@ gst_vpx_enc_set_property (GObject * object, guint prop_id, + global = TRUE; + break; + } ++ case PROP_TS_LAYER_FLAGS:{ ++ gint l = gst_value_array_get_size (value); ++ ++ g_free (gst_vpx_enc->ts_layer_flags); ++ gst_vpx_enc->n_ts_layer_flags = 0; ++ ++ if (l > 0) { ++ gint i; ++ ++ gst_vpx_enc->ts_layer_flags = g_new (gint, l); ++ ++ for (i = 0; i < l; i++) ++ gst_vpx_enc->ts_layer_flags[i] = ++ g_value_get_flags (gst_value_array_get_value (value, i)); ++ gst_vpx_enc->n_ts_layer_flags = l; ++ } else { ++ gst_vpx_enc->ts_layer_flags = NULL; ++ } ++ break; ++ } ++ case PROP_TS_LAYER_SYNC_FLAGS:{ ++ gint l = gst_value_array_get_size (value); ++ ++ g_free (gst_vpx_enc->ts_layer_sync_flags); ++ gst_vpx_enc->n_ts_layer_sync_flags = 0; ++ ++ if (l > 0) { ++ gint i; ++ ++ gst_vpx_enc->ts_layer_sync_flags = g_new (gboolean, l); ++ for (i = 0; i < l; i++) ++ gst_vpx_enc->ts_layer_sync_flags[i] = ++ g_value_get_boolean (gst_value_array_get_value (value, i)); ++ gst_vpx_enc->n_ts_layer_sync_flags = l; ++ } else { ++ gst_vpx_enc->ts_layer_sync_flags = NULL; ++ } ++ break; ++ } + case PROP_ERROR_RESILIENT: + gst_vpx_enc->cfg.g_error_resilient = g_value_get_flags (value); + global = TRUE; +@@ -1312,7 +1425,7 @@ gst_vpx_enc_get_property (GObject * object, guint prop_id, GValue * value, + GValue v = { 0, }; + + g_value_init (&v, G_TYPE_INT); +- g_value_set_int (&v, gst_vpx_enc->cfg.ts_target_bitrate[i]); ++ g_value_set_int (&v, gst_vpx_enc->cfg.ts_target_bitrate[i] * 1000); + g_value_array_append (va, &v); + g_value_unset (&v); + } +@@ -1368,6 +1481,32 @@ gst_vpx_enc_get_property (GObject * object, guint prop_id, GValue * value, + } + break; + } ++ case PROP_TS_LAYER_FLAGS:{ ++ gint i; ++ ++ for (i = 0; i < gst_vpx_enc->n_ts_layer_flags; i++) { ++ GValue v = { 0, }; ++ ++ g_value_init (&v, GST_VPX_ENC_TS_LAYER_FLAGS_TYPE); ++ g_value_set_flags (&v, gst_vpx_enc->ts_layer_flags[i]); ++ gst_value_array_append_value (value, &v); ++ g_value_unset (&v); ++ } ++ break; ++ } ++ case PROP_TS_LAYER_SYNC_FLAGS:{ ++ gint i; ++ ++ for (i = 0; i < gst_vpx_enc->n_ts_layer_sync_flags; i++) { ++ GValue v = { 0, }; ++ ++ g_value_init (&v, G_TYPE_BOOLEAN); ++ g_value_set_boolean (&v, gst_vpx_enc->ts_layer_sync_flags[i]); ++ gst_value_array_append_value (value, &v); ++ g_value_unset (&v); ++ } ++ break; ++ } + case PROP_ERROR_RESILIENT: + g_value_set_flags (value, gst_vpx_enc->cfg.g_error_resilient); + break; +@@ -1472,6 +1611,10 @@ gst_vpx_enc_destroy_encoder (GstVPXEnc * encoder) + encoder->cfg.rc_twopass_stats_in.buf = NULL; + encoder->cfg.rc_twopass_stats_in.sz = 0; + } ++ ++ encoder->last_pts = GST_CLOCK_TIME_NONE; ++ encoder->last_input_duration = GST_CLOCK_TIME_NONE; ++ + g_mutex_unlock (&encoder->encoder_lock); + } + +@@ -1495,12 +1638,46 @@ gst_vpx_enc_stop (GstVideoEncoder * video_encoder) + return TRUE; + } + ++#define INVALID_PROFILE -1 ++ + static gint +-gst_vpx_enc_get_downstream_profile (GstVPXEnc * encoder) ++gst_vpx_gvalue_to_profile (const GValue * v) ++{ ++ gchar *endptr = NULL; ++ gint profile = g_ascii_strtoull (g_value_get_string (v), &endptr, 10); ++ ++ if (*endptr != '\0') { ++ profile = INVALID_PROFILE; ++ } ++ ++ return profile; ++} ++ ++static gint ++gst_vpx_enc_get_downstream_profile (GstVPXEnc * encoder, GstVideoInfo * info) + { + GstCaps *allowed; + GstStructure *s; +- gint profile = DEFAULT_PROFILE; ++ gint min_profile; ++ gint profile = INVALID_PROFILE; ++ ++ switch (GST_VIDEO_INFO_FORMAT (info)) { ++ case GST_VIDEO_FORMAT_Y444: ++ min_profile = 1; ++ break; ++ case GST_VIDEO_FORMAT_I420_10LE: ++ case GST_VIDEO_FORMAT_I420_12LE: ++ min_profile = 2; ++ break; ++ case GST_VIDEO_FORMAT_I422_10LE: ++ case GST_VIDEO_FORMAT_I422_12LE: ++ case GST_VIDEO_FORMAT_Y444_10LE: ++ case GST_VIDEO_FORMAT_Y444_12LE: ++ min_profile = 3; ++ break; ++ default: ++ min_profile = 0; ++ } + + allowed = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder)); + if (allowed) { +@@ -1508,22 +1685,29 @@ gst_vpx_enc_get_downstream_profile (GstVPXEnc * encoder) + s = gst_caps_get_structure (allowed, 0); + if (gst_structure_has_field (s, "profile")) { + const GValue *v = gst_structure_get_value (s, "profile"); +- const gchar *profile_str = NULL; + +- if (GST_VALUE_HOLDS_LIST (v) && gst_value_list_get_size (v) > 0) { +- profile_str = g_value_get_string (gst_value_list_get_value (v, 0)); ++ if (GST_VALUE_HOLDS_LIST (v)) { ++ gint i; ++ ++ for (i = 0; i != gst_value_list_get_size (v); ++i) { ++ gint p = gst_vpx_gvalue_to_profile (gst_value_list_get_value (v, i)); ++ if (p >= min_profile) { ++ profile = p; ++ break; ++ } ++ } + } else if (G_VALUE_HOLDS_STRING (v)) { +- profile_str = g_value_get_string (v); ++ profile = gst_vpx_gvalue_to_profile (v); + } + +- if (profile_str) { +- gchar *endptr = NULL; ++ if (profile < min_profile || profile > 3) { ++ profile = INVALID_PROFILE; ++ } + +- profile = g_ascii_strtoull (profile_str, &endptr, 10); +- if (*endptr != '\0' || profile < 0 || profile > 3) { +- GST_ERROR_OBJECT (encoder, "Invalid profile '%s'", profile_str); +- profile = DEFAULT_PROFILE; +- } ++ if (profile > 1 && info->finfo->bits == 8) { ++ GST_DEBUG_OBJECT (encoder, ++ "Codec bit-depth 8 not supported in profile > 1"); ++ profile = INVALID_PROFILE; + } + } + gst_caps_unref (allowed); +@@ -1541,6 +1725,7 @@ gst_vpx_enc_set_format (GstVideoEncoder * video_encoder, + GstVPXEnc *encoder; + vpx_codec_err_t status; + vpx_image_t *image; ++ vpx_codec_flags_t flags = 0; + GstCaps *caps; + gboolean ret = TRUE; + GstVideoInfo *info = &state->info; +@@ -1558,11 +1743,25 @@ gst_vpx_enc_set_format (GstVideoEncoder * video_encoder, + vpx_codec_destroy (&encoder->encoder); + encoder->inited = FALSE; + encoder->multipass_cache_idx++; ++ encoder->last_pts = GST_CLOCK_TIME_NONE; ++ encoder->last_input_duration = GST_CLOCK_TIME_NONE; + } else { + g_mutex_lock (&encoder->encoder_lock); + } + +- encoder->cfg.g_profile = gst_vpx_enc_get_downstream_profile (encoder); ++ encoder->cfg.g_bit_depth = encoder->cfg.g_input_bit_depth = info->finfo->bits; ++ if (encoder->cfg.g_bit_depth > 8) { ++ flags |= VPX_CODEC_USE_HIGHBITDEPTH; ++ } ++ ++ encoder->cfg.g_profile = gst_vpx_enc_get_downstream_profile (encoder, info); ++ if (encoder->cfg.g_profile == INVALID_PROFILE) { ++ GST_ELEMENT_ERROR (encoder, RESOURCE, OPEN_READ, ++ ("Invalid vpx profile"), (NULL)); ++ g_mutex_unlock (&encoder->encoder_lock); ++ return FALSE; ++ } ++ + encoder->cfg.g_w = GST_VIDEO_INFO_WIDTH (info); + encoder->cfg.g_h = GST_VIDEO_INFO_HEIGHT (info); + +@@ -1628,7 +1827,7 @@ gst_vpx_enc_set_format (GstVideoEncoder * video_encoder, + + status = + vpx_codec_enc_init (&encoder->encoder, vpx_enc_class->get_algo (encoder), +- &encoder->cfg, 0); ++ &encoder->cfg, flags); + if (status != VPX_CODEC_OK) { + GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, + ("Failed to initialize encoder"), ("%s", gst_vpx_error_name (status))); +@@ -1726,6 +1925,13 @@ gst_vpx_enc_set_format (GstVideoEncoder * video_encoder, + gst_vpx_error_name (status)); + } + ++ if (vpx_enc_class->configure_encoder ++ && !vpx_enc_class->configure_encoder (encoder, state)) { ++ ret = FALSE; ++ g_mutex_unlock (&encoder->encoder_lock); ++ goto done; ++ } ++ + if (GST_VIDEO_INFO_FPS_D (info) == 0 || GST_VIDEO_INFO_FPS_N (info) == 0) { + /* FIXME: Assume 25fps for unknown framerates. Better than reporting + * that we introduce no latency while we actually do +@@ -1773,6 +1979,7 @@ gst_vpx_enc_set_format (GstVideoEncoder * video_encoder, + + gst_video_encoder_negotiate (GST_VIDEO_ENCODER (encoder)); + ++done: + return ret; + } + +@@ -1787,6 +1994,9 @@ gst_vpx_enc_process (GstVPXEnc * encoder) + GstFlowReturn ret = GST_FLOW_OK; + GstVPXEncClass *vpx_enc_class; + vpx_codec_pts_t pts; ++ guint layer_id = 0; ++ guint8 tl0picidx = 0; ++ gboolean layer_sync = FALSE; + + video_encoder = GST_VIDEO_ENCODER (encoder); + vpx_enc_class = GST_VPX_ENC_GET_CLASS (encoder); +@@ -1830,11 +2040,24 @@ gst_vpx_enc_process (GstVPXEnc * encoder) + /* discard older frames that were dropped by libvpx */ + frame = NULL; + do { ++ GstClockTime pts_rt; ++ + if (frame) + gst_video_encoder_finish_frame (video_encoder, frame); + frame = gst_video_encoder_get_oldest_frame (video_encoder); ++ if (!frame) { ++ GST_WARNING_OBJECT (encoder, ++ "vpx pts %" G_GINT64_FORMAT ++ " does not match input frames, discarding", pkt->data.frame.pts); ++ goto out; ++ } ++ ++ pts_rt = ++ gst_segment_to_running_time (&video_encoder->input_segment, ++ GST_FORMAT_TIME, frame->pts); ++ + pts = +- gst_util_uint64_scale (frame->pts, ++ gst_util_uint64_scale (pts_rt, + encoder->cfg.g_timebase.den, + encoder->cfg.g_timebase.num * (GstClockTime) GST_SECOND); + GST_TRACE_OBJECT (encoder, "vpx pts: %" G_GINT64_FORMAT +@@ -1843,17 +2066,47 @@ gst_vpx_enc_process (GstVPXEnc * encoder) + } while (pkt->data.frame.pts > pts); + + g_assert (frame != NULL); +- if ((pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0) +- GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame); +- else +- GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame); + + /* FIXME : It would be nice to avoid the memory copy ... */ +- buffer = +- gst_buffer_new_wrapped (g_memdup (pkt->data.frame.buf, +- pkt->data.frame.sz), pkt->data.frame.sz); ++ buffer = gst_buffer_new_memdup (pkt->data.frame.buf, pkt->data.frame.sz); + + user_data = vpx_enc_class->process_frame_user_data (encoder, frame); ++ if (vpx_enc_class->get_frame_temporal_settings && ++ encoder->cfg.ts_periodicity != 0) { ++ vpx_enc_class->get_frame_temporal_settings (encoder, frame, ++ &layer_id, &tl0picidx, &layer_sync); ++ } ++ ++ if (layer_id != 0 && encoder->prev_was_keyframe) { ++ /* Non-base layer frame immediately after a keyframe is a layer sync */ ++ layer_sync = TRUE; ++ } ++ ++ if ((pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0) { ++ GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame); ++ /* Key frames always live on layer 0 */ ++ layer_id = 0; ++ layer_sync = TRUE; ++ encoder->prev_was_keyframe = TRUE; ++ } else { ++ GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame); ++ encoder->prev_was_keyframe = FALSE; ++ } ++ ++ if ((pkt->data.frame.flags & VPX_FRAME_IS_DROPPABLE) != 0) ++ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DROPPABLE); ++ else ++ GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DROPPABLE); ++ ++ if (layer_id == 0) { ++ /* Allocate a new tl0picidx if this is layer 0 */ ++ tl0picidx = ++encoder->tl0picidx; ++ } ++ ++ if (vpx_enc_class->preflight_buffer) { ++ vpx_enc_class->preflight_buffer (encoder, frame, buffer, ++ layer_sync, layer_id, tl0picidx); ++ } + + if (invisible) { + ret = +@@ -1869,6 +2122,8 @@ gst_vpx_enc_process (GstVPXEnc * encoder) + + pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter); + } ++ ++out: + g_mutex_unlock (&encoder->encoder_lock); + + return ret; +@@ -1883,14 +2138,20 @@ gst_vpx_enc_drain (GstVideoEncoder * video_encoder) + vpx_codec_err_t status; + gint64 deadline; + vpx_codec_pts_t pts; ++ GstClockTime gst_pts = 0; + + encoder = GST_VPX_ENC (video_encoder); + + g_mutex_lock (&encoder->encoder_lock); + deadline = encoder->deadline; + ++ if (GST_CLOCK_TIME_IS_VALID (encoder->last_pts)) ++ gst_pts = encoder->last_pts; ++ if (GST_CLOCK_TIME_IS_VALID (encoder->last_input_duration)) ++ gst_pts += encoder->last_input_duration; ++ + pts = +- gst_util_uint64_scale (encoder->last_pts, ++ gst_util_uint64_scale (gst_pts, + encoder->cfg.g_timebase.den, + encoder->cfg.g_timebase.num * (GstClockTime) GST_SECOND); + +@@ -1988,6 +2249,7 @@ gst_vpx_enc_handle_frame (GstVideoEncoder * video_encoder, + int flags = 0; + vpx_image_t *image; + GstVideoFrame vframe; ++ GstClockTime pts_rt; + vpx_codec_pts_t pts; + unsigned long duration; + GstVPXEncClass *vpx_enc_class; +@@ -2012,11 +2274,28 @@ gst_vpx_enc_handle_frame (GstVideoEncoder * video_encoder, + } + + g_mutex_lock (&encoder->encoder_lock); ++ ++ /* the input pts needs to be strictly increasing, see vpx_codec_encode() doc, so convert it to ++ * running time as we don't want to reset the encoder for each segment. */ ++ pts_rt = ++ gst_segment_to_running_time (&video_encoder->input_segment, ++ GST_FORMAT_TIME, frame->pts); ++ ++ /* vpx_codec_encode() enforces us to pass strictly increasing pts */ ++ if (GST_CLOCK_TIME_IS_VALID (encoder->last_pts) ++ && pts_rt <= encoder->last_pts) { ++ GST_WARNING_OBJECT (encoder, ++ "decreasing pts %" GST_TIME_FORMAT " previous buffer was %" ++ GST_TIME_FORMAT " enforce increasing pts", GST_TIME_ARGS (pts_rt), ++ GST_TIME_ARGS (encoder->last_pts)); ++ pts_rt = encoder->last_pts + 1; ++ } ++ + pts = +- gst_util_uint64_scale (frame->pts, ++ gst_util_uint64_scale (pts_rt, + encoder->cfg.g_timebase.den, + encoder->cfg.g_timebase.num * (GstClockTime) GST_SECOND); +- encoder->last_pts = frame->pts; ++ encoder->last_pts = pts_rt; + + if (frame->duration != GST_CLOCK_TIME_NONE) { + duration = +@@ -2024,7 +2303,7 @@ gst_vpx_enc_handle_frame (GstVideoEncoder * video_encoder, + encoder->cfg.g_timebase.num * (GstClockTime) GST_SECOND); + + if (duration > 0) { +- encoder->last_pts += frame->duration; ++ encoder->last_input_duration = frame->duration; + } else { + /* We force the path ignoring the duration if we end up with a zero + * value for duration after scaling (e.g. duration value too small) */ +@@ -2037,6 +2316,25 @@ gst_vpx_enc_handle_frame (GstVideoEncoder * video_encoder, + duration = 1; + } + ++ if (encoder->n_ts_layer_flags != 0) { ++ /* If we need a keyframe, then the pattern is irrelevant */ ++ if ((flags & VPX_EFLAG_FORCE_KF) == 0) { ++ flags |= ++ encoder->ts_layer_flags[frame->system_frame_number % ++ encoder->n_ts_layer_flags]; ++ } ++ } ++ ++ if (vpx_enc_class->apply_frame_temporal_settings && ++ encoder->cfg.ts_periodicity != 0 && ++ encoder->n_ts_layer_id >= encoder->cfg.ts_periodicity) { ++ vpx_enc_class->apply_frame_temporal_settings (encoder, frame, ++ encoder->cfg.ts_layer_id[frame->system_frame_number % ++ encoder->cfg.ts_periodicity], encoder->tl0picidx, ++ encoder->ts_layer_sync_flags[frame->system_frame_number % ++ encoder->n_ts_layer_sync_flags]); ++ } ++ + status = vpx_codec_encode (&encoder->encoder, image, + pts, duration, flags, encoder->deadline); + +@@ -2084,4 +2382,22 @@ gst_vpx_enc_propose_allocation (GstVideoEncoder * encoder, GstQuery * query) + query); + } + ++static gboolean ++gst_vpx_enc_transform_meta (GstVideoEncoder * encoder, ++ GstVideoCodecFrame * frame, GstMeta * meta) ++{ ++ const GstMetaInfo *info = meta->info; ++ gboolean ret = FALSE; ++ ++ /* Do not copy GstVP8Meta from input to output buffer */ ++ if (gst_meta_info_is_custom (info) ++ && gst_custom_meta_has_name ((GstCustomMeta *) meta, "GstVP8Meta")) ++ goto done; ++ ++ ret = TRUE; ++ ++done: ++ return ret; ++} ++ + #endif /* HAVE_VP8_ENCODER || HAVE_VP9_ENCODER */ +diff --git a/ext/vpx/gstvpxenc.h b/ext/vpx/gstvpxenc.h +index fbf5476ba..d9c40e544 100644 +--- a/ext/vpx/gstvpxenc.h ++++ b/ext/vpx/gstvpxenc.h +@@ -30,6 +30,7 @@ + + #include + #include ++#include "gstvpxcompat.h" + + /* FIXME: Undef HAVE_CONFIG_H because vpx_codec.h uses it, + * which causes compilation failures */ +@@ -61,7 +62,7 @@ struct _GstVPXEnc + { + GstVideoEncoder base_video_encoder; + +- /* < private > */ ++ /* < protected > */ + vpx_codec_ctx_t encoder; + GMutex encoder_lock; + +@@ -72,6 +73,10 @@ struct _GstVPXEnc + gint n_ts_target_bitrate; + gint n_ts_rate_decimator; + gint n_ts_layer_id; ++ gint n_ts_layer_flags; ++ gint *ts_layer_flags; ++ gint n_ts_layer_sync_flags; ++ gboolean *ts_layer_sync_flags; + /* Global two-pass options */ + gchar *multipass_cache_file; + gchar *multipass_cache_prefix; +@@ -105,10 +110,15 @@ struct _GstVPXEnc + + /* state */ + gboolean inited; ++ guint8 tl0picidx; ++ gboolean prev_was_keyframe; + + vpx_image_t image; + ++ /* last input pts, in running time */ + GstClockTime last_pts; ++ /* duration of the last input buffer */ ++ GstClockTime last_input_duration; + + GstVideoCodecState *input_state; + }; +@@ -120,6 +130,8 @@ struct _GstVPXEncClass + vpx_codec_iface_t* (*get_algo) (GstVPXEnc *enc); + /*enabled scaling*/ + gboolean (*enable_scaling) (GstVPXEnc *enc); ++ /*called from set_format with lock taken*/ ++ gboolean (*configure_encoder) (GstVPXEnc *enc, GstVideoCodecState *state); + /*set image format info*/ + void (*set_image_format) (GstVPXEnc *enc, vpx_image_t *image); + /*get new simple caps*/ +@@ -129,9 +141,23 @@ struct _GstVPXEncClass + /*process user data*/ + void* (*process_frame_user_data) (GstVPXEnc *enc, GstVideoCodecFrame* frame); + /*set frame user data*/ +- void (*set_frame_user_data) (GstVPXEnc *enc, GstVideoCodecFrame* frame, vpx_image_t *image); ++ void (*set_frame_user_data) (GstVPXEnc *enc, GstVideoCodecFrame* frame, ++ vpx_image_t *image); + /*Handle invisible frame*/ +- GstFlowReturn (*handle_invisible_frame_buffer) (GstVPXEnc *enc, void* user_data, GstBuffer* buffer); ++ GstFlowReturn (*handle_invisible_frame_buffer) (GstVPXEnc *enc, ++ void* user_data, GstBuffer* buffer); ++ /*apply temporal settings -- called with encoder lock*/ ++ void (*apply_frame_temporal_settings) (GstVPXEnc *enc, ++ GstVideoCodecFrame* frame, guint layer_id, guint8 tl0picidx, ++ gboolean layer_sync); ++ /*get temporal settings*/ ++ void (*get_frame_temporal_settings) (GstVPXEnc *enc, ++ GstVideoCodecFrame *frame, guint * layer_id, guint8 *tl0picidx, ++ gboolean *layer_sync); ++ /* preflight buffer */ ++ void (*preflight_buffer) (GstVPXEnc *enc, ++ GstVideoCodecFrame *frame, GstBuffer *buffer, ++ gboolean layer_sync, guint layer_id, guint8 tl0picidx); + }; + + GType gst_vpx_enc_get_type (void); +diff --git a/ext/vpx/gstvpxenums.h b/ext/vpx/gstvpxenums.h +new file mode 100644 +index 000000000..a3bf1c640 +--- /dev/null ++++ b/ext/vpx/gstvpxenums.h +@@ -0,0 +1,49 @@ ++/* GStreamer ++ * Copyright (C) 2021, Collabora Ltd. ++ * @author: Jakub Adam ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_VPX_ENUM_H__ ++#define __GST_VPX_ENUM_H__ ++ ++#include ++ ++G_BEGIN_DECLS ++ ++/** ++ * GstVPXAQ: ++ * ++ * VPX Adaptive Quantization modes. ++ * ++ * Since: 1.20 ++ */ ++typedef enum ++{ ++ GST_VPX_AQ_OFF = 0, ++ GST_VPX_AQ_VARIANCE = 1, ++ GST_VPX_AQ_COMPLEXITY = 2, ++ GST_VPX_AQ_CYCLIC_REFRESH = 3, ++ GST_VPX_AQ_EQUATOR360 = 4, ++ GST_VPX_AQ_PERCEPTUAL = 5, ++ GST_VPX_AQ_PSNR = 6, ++ GST_VPX_AQ_LOOKAHEAD = 7, ++} GstVPXAQ; ++ ++G_END_DECLS ++ ++#endif // __GST_VPX_ENUM_H__ +diff --git a/ext/vpx/meson.build b/ext/vpx/meson.build +index 349915cdc..e007cafc0 100644 +--- a/ext/vpx/meson.build ++++ b/ext/vpx/meson.build +@@ -6,6 +6,7 @@ vpx_sources = [ + 'gstvp9enc.c', + 'gstvpxdec.c', + 'gstvpxenc.c', ++ 'gstvpxelement.c', + 'plugin.c', + ] + +@@ -17,7 +18,7 @@ vpx_features = [ + ] + + vpx_option = get_option('vpx') +-vpx_dep = dependency('vpx', version : '>=1.5.0', required : vpx_option) ++vpx_dep = dependency('vpx', version : '>=1.7.0', required : vpx_option) + + if vpx_dep.found() + vpx_args = [] +@@ -56,16 +57,25 @@ if vpx_dep.found() + vpx_args += '-DHAVE_VPX_1_8' + endif + ++ gnome = import('gnome') ++ ++ gstvpx_enums = gnome.mkenums_simple('gstvpx-enumtypes', ++ sources : ['gstvpxenums.h'], ++ decorator : 'G_GNUC_INTERNAL', ++ install_header: false) ++ + gstvpx = library('gstvpx', +- vpx_sources, ++ vpx_sources, gstvpx_enums, + c_args : gst_plugins_good_args + vpx_args, + include_directories : [configinc], + dependencies : [gstbase_dep, gsttag_dep, gstvideo_dep, vpx_dep], + install : true, + install_dir : plugins_install_dir, + ) +- pkgconfig.generate(gstvpx, install_dir : plugins_pkgconfig_install_dir) + plugins += [gstvpx] + + install_data(sources: ['GstVP8Enc.prs'], install_dir: presetdir) ++ env = environment() ++ env.prepend('GST_PRESET_PATH', meson.current_source_dir()) ++ meson.add_devenv(env) + endif +diff --git a/ext/vpx/plugin.c b/ext/vpx/plugin.c +index 4f04efaff..1887aee70 100644 +--- a/ext/vpx/plugin.c ++++ b/ext/vpx/plugin.c +@@ -25,35 +25,30 @@ + + #include + +-#include "gstvp8dec.h" +-#include "gstvp8enc.h" +-#include "gstvp9dec.h" +-#include "gstvp9enc.h" ++#include "gstvpxelements.h" + + static gboolean + plugin_init (GstPlugin * plugin) + { ++ gboolean ret = FALSE; ++ + #ifdef HAVE_VP8_DECODER +- gst_element_register (plugin, "vp8dec", GST_RANK_PRIMARY, +- gst_vp8_dec_get_type ()); ++ ret |= GST_ELEMENT_REGISTER (vp8dec, plugin); + #endif + + #ifdef HAVE_VP8_ENCODER +- gst_element_register (plugin, "vp8enc", GST_RANK_PRIMARY, +- gst_vp8_enc_get_type ()); ++ ret |= GST_ELEMENT_REGISTER (vp8enc, plugin); + #endif + + #ifdef HAVE_VP9_DECODER +- gst_element_register (plugin, "vp9dec", GST_RANK_PRIMARY, +- gst_vp9_dec_get_type ()); ++ ret |= GST_ELEMENT_REGISTER (vp9dec, plugin); + #endif + + #ifdef HAVE_VP9_ENCODER +- gst_element_register (plugin, "vp9enc", GST_RANK_PRIMARY, +- gst_vp9_enc_get_type ()); ++ ret |= GST_ELEMENT_REGISTER (vp9enc, plugin); + #endif + +- return TRUE; ++ return ret; + } + + GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, +diff --git a/gst/rtp/gstasteriskh263.c b/gst/rtp/gstasteriskh263.c +index 8aa7fd451..f22c33f31 100644 +--- a/gst/rtp/gstasteriskh263.c ++++ b/gst/rtp/gstasteriskh263.c +@@ -24,6 +24,7 @@ + #include + + #include ++#include "gstrtpelements.h" + #include "gstasteriskh263.h" + + #define GST_ASTERISKH263_HEADER_LEN 6 +@@ -64,6 +65,8 @@ static GstStateChangeReturn gst_asteriskh263_change_state (GstElement * + + #define gst_asteriskh263_parent_class parent_class + G_DEFINE_TYPE (GstAsteriskh263, gst_asteriskh263, GST_TYPE_ELEMENT); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (asteriskh263, "asteriskh263", ++ GST_RANK_NONE, GST_TYPE_ASTERISK_H263, rtp_element_init (plugin)); + + static void + gst_asteriskh263_class_init (GstAsteriskh263Class * klass) +@@ -221,10 +224,3 @@ gst_asteriskh263_change_state (GstElement * element, GstStateChange transition) + */ + return ret; + } +- +-gboolean +-gst_asteriskh263_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "asteriskh263", +- GST_RANK_NONE, GST_TYPE_ASTERISK_H263); +-} +diff --git a/gst/rtp/gstasteriskh263.h b/gst/rtp/gstasteriskh263.h +index 1c9523d4c..f0416b561 100644 +--- a/gst/rtp/gstasteriskh263.h ++++ b/gst/rtp/gstasteriskh263.h +@@ -58,8 +58,6 @@ struct _GstAsteriskh263Class + + GType gst_asteriskh263_get_type (void); + +-gboolean gst_asteriskh263_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_ASTERISK_H263_H__ */ +diff --git a/gst/rtp/gstrtp.c b/gst/rtp/gstrtp.c +index b2a6706e1..9528ffb10 100644 +--- a/gst/rtp/gstrtp.c ++++ b/gst/rtp/gstrtp.c +@@ -23,397 +23,113 @@ + + #include + +-#include "gstrtputils.h" ++#include "gstrtpelements.h" + +-#include "gstrtpac3depay.h" +-#include "gstrtpac3pay.h" +-#include "gstrtpbvdepay.h" +-#include "gstrtpbvpay.h" +-#include "gstrtpceltdepay.h" +-#include "gstrtpceltpay.h" +-#include "gstrtpdvdepay.h" +-#include "gstrtpdvpay.h" +-#include "gstrtpgstdepay.h" +-#include "gstrtpgstpay.h" +-#include "gstrtpilbcdepay.h" +-#include "gstrtpilbcpay.h" +-#include "gstrtppcmupay.h" +-#include "gstrtppcmapay.h" +-#include "gstrtppcmadepay.h" +-#include "gstrtppcmudepay.h" +-#include "gstrtpg722depay.h" +-#include "gstrtpg722pay.h" +-#include "gstrtpg723depay.h" +-#include "gstrtpg723pay.h" +-#include "gstrtpg726depay.h" +-#include "gstrtpg726pay.h" +-#include "gstrtpg729depay.h" +-#include "gstrtpg729pay.h" +-#include "gstrtpgsmpay.h" +-#include "gstrtpgsmdepay.h" +-#include "gstrtpamrpay.h" +-#include "gstrtpamrdepay.h" +-#include "gstrtpmpapay.h" +-#include "gstrtpmpadepay.h" +-#include "gstrtpmparobustdepay.h" +-#include "gstrtpmpvdepay.h" +-#include "gstrtpmpvpay.h" +-#include "gstrtpopusdepay.h" +-#include "gstrtpopuspay.h" +-#include "gstrtph261pay.h" +-#include "gstrtph261depay.h" +-#include "gstrtph263pdepay.h" +-#include "gstrtph263ppay.h" +-#include "gstrtph263depay.h" +-#include "gstrtph263pay.h" +-#include "gstrtph264depay.h" +-#include "gstrtph264pay.h" +-#include "gstrtph265depay.h" +-#include "gstrtph265pay.h" +-#include "gstrtpj2kdepay.h" +-#include "gstrtpj2kpay.h" +-#include "gstrtpjpegdepay.h" +-#include "gstrtpjpegpay.h" +-#include "gstrtpklvdepay.h" +-#include "gstrtpklvpay.h" +-#include "gstrtpL8depay.h" +-#include "gstrtpL8pay.h" +-#include "gstrtpL16depay.h" +-#include "gstrtpL16pay.h" +-#include "gstrtpL24depay.h" +-#include "gstrtpL24pay.h" +-#include "gstasteriskh263.h" +-#include "gstrtpmp1sdepay.h" +-#include "gstrtpmp2tdepay.h" +-#include "gstrtpmp2tpay.h" +-#include "gstrtpmp4vdepay.h" +-#include "gstrtpmp4vpay.h" +-#include "gstrtpmp4adepay.h" +-#include "gstrtpmp4apay.h" +-#include "gstrtpmp4gdepay.h" +-#include "gstrtpmp4gpay.h" +-#include "gstrtpqcelpdepay.h" +-#include "gstrtpqdmdepay.h" +-#include "gstrtpsbcdepay.h" +-#include "gstrtpsbcpay.h" +-#include "gstrtpsirenpay.h" +-#include "gstrtpsirendepay.h" +-#include "gstrtpspeexpay.h" +-#include "gstrtpspeexdepay.h" +-#include "gstrtpsv3vdepay.h" +-#include "gstrtptheoradepay.h" +-#include "gstrtptheorapay.h" +-#include "gstrtpvorbisdepay.h" +-#include "gstrtpvorbispay.h" +-#include "gstrtpvp8depay.h" +-#include "gstrtpvp8pay.h" +-#include "gstrtpvp9depay.h" +-#include "gstrtpvp9pay.h" +-#include "gstrtpvrawdepay.h" +-#include "gstrtpvrawpay.h" +-#include "gstrtpstreampay.h" +-#include "gstrtpstreamdepay.h" +-#include "gstrtpredenc.h" +-#include "gstrtpreddec.h" +-#include "gstrtpulpfecdec.h" +-#include "gstrtpulpfecenc.h" +-#include "gstrtpstorage.h" + + static gboolean + plugin_init (GstPlugin * plugin) + { +- gst_tag_image_type_get_type (); +- +- rtp_quark_meta_tag_video = +- g_quark_from_static_string (GST_META_TAG_VIDEO_STR); +- rtp_quark_meta_tag_audio = +- g_quark_from_static_string (GST_META_TAG_AUDIO_STR); +- +- if (!gst_rtp_ac3_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_ac3_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_bv_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_bv_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_celt_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_celt_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_dv_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_dv_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_gst_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_gst_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_ilbc_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_ilbc_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_g722_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_g722_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_g723_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_g723_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_g726_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_g726_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_g729_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_g729_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_gsm_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_gsm_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_amr_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_amr_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_pcma_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_pcmu_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_pcmu_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_pcma_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_mpa_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_mpa_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_mpa_robust_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_mpv_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_mpv_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_opus_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_opus_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_h261_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_h261_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_h263p_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_h263p_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_h263_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_h263_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_h264_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_h264_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_h265_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_h265_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_j2k_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_j2k_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_jpeg_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_jpeg_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_klv_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_klv_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_L8_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_L8_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_L16_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_L16_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_L24_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_L24_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_asteriskh263_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_mp1s_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_mp2t_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_mp2t_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_mp4v_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_mp4v_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_mp4a_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_mp4a_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_mp4g_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_mp4g_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_qcelp_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_qdm2_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_sbc_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_sbc_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_siren_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_siren_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_speex_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_speex_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_sv3v_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_theora_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_theora_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_vorbis_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_vorbis_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_vp8_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_vp8_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_vp9_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_vp9_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_vraw_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_vraw_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_stream_pay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_stream_depay_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_element_register (plugin, "rtpredenc", GST_RANK_NONE, +- GST_TYPE_RTP_RED_ENC)) +- return FALSE; +- +- if (!gst_element_register (plugin, "rtpreddec", GST_RANK_NONE, +- GST_TYPE_RTP_RED_DEC)) +- return FALSE; +- +- if (!gst_element_register (plugin, "rtpulpfecdec", GST_RANK_NONE, +- GST_TYPE_RTP_ULPFEC_DEC)) +- return FALSE; +- +- if (!gst_element_register (plugin, "rtpulpfecenc", GST_RANK_NONE, +- GST_TYPE_RTP_ULPFEC_ENC)) +- return FALSE; +- +- if (!gst_element_register (plugin, "rtpstorage", GST_RANK_NONE, +- GST_TYPE_RTP_STORAGE)) +- return FALSE; +- +- return TRUE; ++ gboolean ret = FALSE; ++ ++ ret |= GST_ELEMENT_REGISTER (rtpac3depay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpac3pay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpbvdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpbvpay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpceltdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpceltpay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpdvdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpdvpay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpgstdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpgstpay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpilbcpay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpilbcdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpg722depay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpg722pay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpg723depay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpg723pay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpg726depay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpg726pay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpg729depay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpg729pay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpgsmdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpgsmpay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpamrdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpamrpay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtppcmadepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtppcmudepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtppcmupay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtppcmapay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpmpadepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpmpapay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpmparobustdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpmpvdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpmpvpay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpopusdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpopuspay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtph261pay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtph261depay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtph263ppay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtph263pdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtph263depay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtph263pay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtph264depay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtph264pay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtph265depay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtph265pay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpj2kdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpj2kpay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpjpegdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpjpegpay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpklvdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpklvpay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpL8pay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpL8depay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpL16pay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpL16depay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpL24pay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpL24depay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpldacpay, plugin); ++ ret |= GST_ELEMENT_REGISTER (asteriskh263, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpmp1sdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpmp2tdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpmp2tpay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpmp4vpay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpmp4vdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpmp4apay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpmp4adepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpmp4gdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpmp4gpay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpqcelpdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpqdm2depay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpsbcdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpsbcpay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpsirenpay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpsirendepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpspeexpay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpspeexdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpsv3vdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtptheoradepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtptheorapay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpvorbisdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpvorbispay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpvp8depay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpvp8pay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpvp9depay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpvp9pay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpvrawdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpvrawpay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpstreampay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpstreamdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpisacpay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpisacdepay, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpredenc, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpreddec, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpulpfecdec, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpulpfecenc, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpstorage, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtphdrextcolorspace, plugin); ++ ++ return ret; + } + + GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, +diff --git a/gst/rtp/gstrtpL16depay.c b/gst/rtp/gstrtpL16depay.c +index f054a2480..aa7e5d03d 100644 +--- a/gst/rtp/gstrtpL16depay.c ++++ b/gst/rtp/gstrtpL16depay.c +@@ -42,6 +42,7 @@ + + #include + ++#include "gstrtpelements.h" + #include "gstrtpL16depay.h" + #include "gstrtpchannels.h" + #include "gstrtputils.h" +@@ -81,6 +82,8 @@ static GstStaticPadTemplate gst_rtp_L16_depay_sink_template = + + #define gst_rtp_L16_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpL16Depay, gst_rtp_L16_depay, GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpL16depay, "rtpL16depay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_L16_DEPAY, rtp_element_init (plugin)); + + static gboolean gst_rtp_L16_depay_setcaps (GstRTPBaseDepayload * depayload, + GstCaps * caps); +@@ -291,10 +294,3 @@ reorder_failed: + return NULL; + } + } +- +-gboolean +-gst_rtp_L16_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpL16depay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_L16_DEPAY); +-} +diff --git a/gst/rtp/gstrtpL16depay.h b/gst/rtp/gstrtpL16depay.h +index 125d4cd53..fac933bb1 100644 +--- a/gst/rtp/gstrtpL16depay.h ++++ b/gst/rtp/gstrtpL16depay.h +@@ -60,8 +60,6 @@ struct _GstRtpL16DepayClass + + GType gst_rtp_L16_depay_get_type (void); + +-gboolean gst_rtp_L16_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_L16_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpL16pay.c b/gst/rtp/gstrtpL16pay.c +index 7e358d3d0..41a7c0544 100644 +--- a/gst/rtp/gstrtpL16pay.c ++++ b/gst/rtp/gstrtpL16pay.c +@@ -42,6 +42,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpL16pay.h" + #include "gstrtpchannels.h" + +@@ -90,6 +91,8 @@ gst_rtp_L16_pay_handle_buffer (GstRTPBasePayload * basepayload, + + #define gst_rtp_L16_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpL16Pay, gst_rtp_L16_pay, GST_TYPE_RTP_BASE_AUDIO_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpL16pay, "rtpL16pay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_L16_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_L16_pay_class_init (GstRtpL16PayClass * klass) +@@ -254,10 +257,3 @@ gst_rtp_L16_pay_handle_buffer (GstRTPBasePayload * basepayload, + return GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->handle_buffer (basepayload, + buffer); + } +- +-gboolean +-gst_rtp_L16_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpL16pay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_L16_PAY); +-} +diff --git a/gst/rtp/gstrtpL16pay.h b/gst/rtp/gstrtpL16pay.h +index f4f3702e1..b3078db05 100644 +--- a/gst/rtp/gstrtpL16pay.h ++++ b/gst/rtp/gstrtpL16pay.h +@@ -56,8 +56,6 @@ struct _GstRtpL16PayClass + + GType gst_rtp_L16_pay_get_type (void); + +-gboolean gst_rtp_L16_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_L16_PAY_H__ */ +diff --git a/gst/rtp/gstrtpL24depay.c b/gst/rtp/gstrtpL24depay.c +index 448dac022..e39fe7c2f 100644 +--- a/gst/rtp/gstrtpL24depay.c ++++ b/gst/rtp/gstrtpL24depay.c +@@ -42,6 +42,7 @@ + + #include + ++#include "gstrtpelements.h" + #include "gstrtpL24depay.h" + #include "gstrtpchannels.h" + #include "gstrtputils.h" +@@ -70,6 +71,8 @@ GST_STATIC_PAD_TEMPLATE ("sink", + + #define gst_rtp_L24_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpL24Depay, gst_rtp_L24_depay, GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpL24depay, "rtpL24depay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_L24_DEPAY, rtp_element_init (plugin)); + + static gboolean gst_rtp_L24_depay_setcaps (GstRTPBaseDepayload * depayload, + GstCaps * caps); +@@ -256,10 +259,3 @@ reorder_failed: + return NULL; + } + } +- +-gboolean +-gst_rtp_L24_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpL24depay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_L24_DEPAY); +-} +diff --git a/gst/rtp/gstrtpL24depay.h b/gst/rtp/gstrtpL24depay.h +index c4e00e6f2..411adf910 100644 +--- a/gst/rtp/gstrtpL24depay.h ++++ b/gst/rtp/gstrtpL24depay.h +@@ -60,8 +60,6 @@ struct _GstRtpL24DepayClass + + GType gst_rtp_L24_depay_get_type (void); + +-gboolean gst_rtp_L24_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_L24_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpL24pay.c b/gst/rtp/gstrtpL24pay.c +index d2ad72535..aa8fc2273 100644 +--- a/gst/rtp/gstrtpL24pay.c ++++ b/gst/rtp/gstrtpL24pay.c +@@ -42,6 +42,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpL24pay.h" + #include "gstrtpchannels.h" + +@@ -79,6 +80,8 @@ gst_rtp_L24_pay_handle_buffer (GstRTPBasePayload * basepayload, + + #define gst_rtp_L24_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpL24Pay, gst_rtp_L24_pay, GST_TYPE_RTP_BASE_AUDIO_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpL24pay, "rtpL24pay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_L24_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_L24_pay_class_init (GstRtpL24PayClass * klass) +@@ -235,10 +238,3 @@ gst_rtp_L24_pay_handle_buffer (GstRTPBasePayload * basepayload, + return GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->handle_buffer (basepayload, + buffer); + } +- +-gboolean +-gst_rtp_L24_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpL24pay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_L24_PAY); +-} +diff --git a/gst/rtp/gstrtpL24pay.h b/gst/rtp/gstrtpL24pay.h +index 47395ad16..4dd64dbef 100644 +--- a/gst/rtp/gstrtpL24pay.h ++++ b/gst/rtp/gstrtpL24pay.h +@@ -56,8 +56,6 @@ struct _GstRtpL24PayClass + + GType gst_rtp_L24_pay_get_type (void); + +-gboolean gst_rtp_L24_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_L24_PAY_H__ */ +diff --git a/gst/rtp/gstrtpL8depay.c b/gst/rtp/gstrtpL8depay.c +index 121a749af..f1c0fe302 100644 +--- a/gst/rtp/gstrtpL8depay.c ++++ b/gst/rtp/gstrtpL8depay.c +@@ -42,6 +42,7 @@ + + #include + ++#include "gstrtpelements.h" + #include "gstrtpL8depay.h" + #include "gstrtpchannels.h" + +@@ -73,6 +74,10 @@ static GstStaticPadTemplate gst_rtp_L8_depay_sink_template = + #define gst_rtp_L8_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpL8Depay, gst_rtp_L8_depay, GST_TYPE_RTP_BASE_DEPAYLOAD); + ++ ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpL8depay, "rtpL8depay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_L8_DEPAY, rtp_element_init (plugin)); ++ + static gboolean gst_rtp_L8_depay_setcaps (GstRTPBaseDepayload * depayload, + GstCaps * caps); + static GstBuffer *gst_rtp_L8_depay_process (GstRTPBaseDepayload * depayload, +@@ -258,10 +263,3 @@ reorder_failed: + return NULL; + } + } +- +-gboolean +-gst_rtp_L8_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpL8depay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_L8_DEPAY); +-} +diff --git a/gst/rtp/gstrtpL8depay.h b/gst/rtp/gstrtpL8depay.h +index a2d9bec59..589e9fb27 100644 +--- a/gst/rtp/gstrtpL8depay.h ++++ b/gst/rtp/gstrtpL8depay.h +@@ -58,8 +58,6 @@ struct _GstRtpL8DepayClass + + GType gst_rtp_L8_depay_get_type (void); + +-gboolean gst_rtp_L8_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_L8_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpL8pay.c b/gst/rtp/gstrtpL8pay.c +index 6662cda91..b7a39e89f 100644 +--- a/gst/rtp/gstrtpL8pay.c ++++ b/gst/rtp/gstrtpL8pay.c +@@ -42,6 +42,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpL8pay.h" + #include "gstrtpchannels.h" + +@@ -80,6 +81,10 @@ gst_rtp_L8_pay_handle_buffer (GstRTPBasePayload * basepayload, + #define gst_rtp_L8_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpL8Pay, gst_rtp_L8_pay, GST_TYPE_RTP_BASE_AUDIO_PAYLOAD); + ++ ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpL8pay, "rtpL8pay", GST_RANK_SECONDARY, ++ GST_TYPE_RTP_L8_PAY, rtp_element_init (plugin)); ++ + static void + gst_rtp_L8_pay_class_init (GstRtpL8PayClass * klass) + { +@@ -234,10 +239,3 @@ gst_rtp_L8_pay_handle_buffer (GstRTPBasePayload * basepayload, + return GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->handle_buffer (basepayload, + buffer); + } +- +-gboolean +-gst_rtp_L8_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpL8pay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_L8_PAY); +-} +diff --git a/gst/rtp/gstrtpL8pay.h b/gst/rtp/gstrtpL8pay.h +index 183eb2fcf..bebd724f7 100644 +--- a/gst/rtp/gstrtpL8pay.h ++++ b/gst/rtp/gstrtpL8pay.h +@@ -57,8 +57,6 @@ struct _GstRtpL8PayClass + + GType gst_rtp_L8_pay_get_type (void); + +-gboolean gst_rtp_L8_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_L8_PAY_H__ */ +diff --git a/gst/rtp/gstrtpac3depay.c b/gst/rtp/gstrtpac3depay.c +index 54339f343..5bb607bdc 100644 +--- a/gst/rtp/gstrtpac3depay.c ++++ b/gst/rtp/gstrtpac3depay.c +@@ -41,6 +41,7 @@ + #include + + #include ++#include "gstrtpelements.h" + #include "gstrtpac3depay.h" + #include "gstrtputils.h" + +@@ -51,7 +52,7 @@ static GstStaticPadTemplate gst_rtp_ac3_depay_src_template = + GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, +- GST_STATIC_CAPS ("audio/ac3") ++ GST_STATIC_CAPS ("audio/x-ac3") + ); + + static GstStaticPadTemplate gst_rtp_ac3_depay_sink_template = +@@ -65,6 +66,8 @@ GST_STATIC_PAD_TEMPLATE ("sink", + ); + + G_DEFINE_TYPE (GstRtpAC3Depay, gst_rtp_ac3_depay, GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpac3depay, "rtpac3depay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_AC3_DEPAY, rtp_element_init (plugin)); + + static gboolean gst_rtp_ac3_depay_setcaps (GstRTPBaseDepayload * depayload, + GstCaps * caps); +@@ -117,7 +120,7 @@ gst_rtp_ac3_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps) + clock_rate = 90000; /* default */ + depayload->clock_rate = clock_rate; + +- srccaps = gst_caps_new_empty_simple ("audio/ac3"); ++ srccaps = gst_caps_new_empty_simple ("audio/x-ac3"); + res = gst_pad_set_caps (depayload->srcpad, srccaps); + gst_caps_unref (srccaps); + +@@ -171,10 +174,3 @@ empty_packet: + return NULL; + } + } +- +-gboolean +-gst_rtp_ac3_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpac3depay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_AC3_DEPAY); +-} +diff --git a/gst/rtp/gstrtpac3depay.h b/gst/rtp/gstrtpac3depay.h +index 294bb12c4..62478f9c8 100644 +--- a/gst/rtp/gstrtpac3depay.h ++++ b/gst/rtp/gstrtpac3depay.h +@@ -51,8 +51,6 @@ struct _GstRtpAC3DepayClass + + GType gst_rtp_ac3_depay_get_type (void); + +-gboolean gst_rtp_ac3_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_AC3_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpac3pay.c b/gst/rtp/gstrtpac3pay.c +index 1feccedf4..478c4a378 100644 +--- a/gst/rtp/gstrtpac3pay.c ++++ b/gst/rtp/gstrtpac3pay.c +@@ -42,6 +42,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpac3pay.h" + #include "gstrtputils.h" + +@@ -81,6 +82,8 @@ static GstFlowReturn gst_rtp_ac3_pay_handle_buffer (GstRTPBasePayload * payload, + + #define gst_rtp_ac3_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpAC3Pay, gst_rtp_ac3_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpac3pay, "rtpac3pay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_AC3_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_ac3_pay_class_init (GstRtpAC3PayClass * klass) +@@ -319,8 +322,10 @@ gst_rtp_ac3_pay_flush (GstRtpAC3Pay * rtpac3pay) + payload[1] = NF; + payload_len -= 2; + +- if (avail == payload_len) ++ if (avail == payload_len) { + gst_rtp_buffer_set_marker (&rtp, TRUE); ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); ++ } + gst_rtp_buffer_unmap (&rtp); + + payload_buffer = +@@ -470,10 +475,3 @@ gst_rtp_ac3_pay_change_state (GstElement * element, GstStateChange transition) + } + return ret; + } +- +-gboolean +-gst_rtp_ac3_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpac3pay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_AC3_PAY); +-} +diff --git a/gst/rtp/gstrtpac3pay.h b/gst/rtp/gstrtpac3pay.h +index c131eac13..918a250e4 100644 +--- a/gst/rtp/gstrtpac3pay.h ++++ b/gst/rtp/gstrtpac3pay.h +@@ -57,8 +57,6 @@ struct _GstRtpAC3PayClass + + GType gst_rtp_ac3_pay_get_type (void); + +-gboolean gst_rtp_ac3_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_AC3_PAY_H__ */ +diff --git a/gst/rtp/gstrtpamrdepay.c b/gst/rtp/gstrtpamrdepay.c +index de0275801..08e64e518 100644 +--- a/gst/rtp/gstrtpamrdepay.c ++++ b/gst/rtp/gstrtpamrdepay.c +@@ -48,6 +48,7 @@ + + #include + #include ++#include "gstrtpelements.h" + #include "gstrtpamrdepay.h" + #include "gstrtputils.h" + +@@ -130,6 +131,8 @@ static GstBuffer *gst_rtp_amr_depay_process (GstRTPBaseDepayload * depayload, + + #define gst_rtp_amr_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpAMRDepay, gst_rtp_amr_depay, GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpamrdepay, "rtpamrdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_AMR_DEPAY, rtp_element_init (plugin)); + + static void + gst_rtp_amr_depay_class_init (GstRtpAMRDepayClass * klass) +@@ -469,10 +472,3 @@ bad_packet: + return NULL; + } + } +- +-gboolean +-gst_rtp_amr_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpamrdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_AMR_DEPAY); +-} +diff --git a/gst/rtp/gstrtpamrdepay.h b/gst/rtp/gstrtpamrdepay.h +index 0b806345d..b1ebe4f8f 100644 +--- a/gst/rtp/gstrtpamrdepay.h ++++ b/gst/rtp/gstrtpamrdepay.h +@@ -70,8 +70,6 @@ struct _GstRtpAMRDepayClass + + GType gst_rtp_amr_depay_get_type (void); + +-gboolean gst_rtp_amr_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_AMR_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpamrpay.c b/gst/rtp/gstrtpamrpay.c +index 828a7189f..147348555 100644 +--- a/gst/rtp/gstrtpamrpay.c ++++ b/gst/rtp/gstrtpamrpay.c +@@ -55,6 +55,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpamrpay.h" + #include "gstrtputils.h" + +@@ -113,6 +114,8 @@ gst_rtp_amr_pay_change_state (GstElement * element, GstStateChange transition); + + #define gst_rtp_amr_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpAMRPay, gst_rtp_amr_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpamrpay, "rtpamrpay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_AMR_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_amr_pay_class_init (GstRtpAMRPayClass * klass) +@@ -330,6 +333,7 @@ gst_rtp_amr_pay_handle_buffer (GstRTPBasePayload * basepayload, + if (GST_BUFFER_IS_DISCONT (buffer)) { + GST_DEBUG_OBJECT (basepayload, "discont, setting marker bit"); + GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); + gst_rtp_buffer_set_marker (&rtp, TRUE); + gst_rtp_amr_pay_recalc_rtp_time (rtpamrpay, timestamp); + } +@@ -456,10 +460,3 @@ gst_rtp_amr_pay_change_state (GstElement * element, GstStateChange transition) + + return ret; + } +- +-gboolean +-gst_rtp_amr_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpamrpay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_AMR_PAY); +-} +diff --git a/gst/rtp/gstrtpamrpay.h b/gst/rtp/gstrtpamrpay.h +index a3df1ce63..b6e21483e 100644 +--- a/gst/rtp/gstrtpamrpay.h ++++ b/gst/rtp/gstrtpamrpay.h +@@ -63,8 +63,6 @@ struct _GstRtpAMRPayClass + + GType gst_rtp_amr_pay_get_type (void); + +-gboolean gst_rtp_amr_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_AMR_PAY_H__ */ +diff --git a/gst/rtp/gstrtpbvdepay.c b/gst/rtp/gstrtpbvdepay.c +index 625bb3731..98cff92fd 100644 +--- a/gst/rtp/gstrtpbvdepay.c ++++ b/gst/rtp/gstrtpbvdepay.c +@@ -35,6 +35,7 @@ + + #include + #include ++#include "gstrtpelements.h" + #include "gstrtpbvdepay.h" + #include "gstrtputils.h" + +@@ -65,6 +66,8 @@ static gboolean gst_rtp_bv_depay_setcaps (GstRTPBaseDepayload * depayload, + + #define gst_rtp_bv_depay_parent_class parent_class + G_DEFINE_TYPE (GstRTPBVDepay, gst_rtp_bv_depay, GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpbvdepay, "rtpbvdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_BV_DEPAY, rtp_element_init (plugin)); + + static void + gst_rtp_bv_depay_class_init (GstRTPBVDepayClass * klass) +@@ -182,10 +185,3 @@ gst_rtp_bv_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp) + + return outbuf; + } +- +-gboolean +-gst_rtp_bv_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpbvdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_BV_DEPAY); +-} +diff --git a/gst/rtp/gstrtpbvdepay.h b/gst/rtp/gstrtpbvdepay.h +index f130682c0..9a9ea7c40 100644 +--- a/gst/rtp/gstrtpbvdepay.h ++++ b/gst/rtp/gstrtpbvdepay.h +@@ -53,8 +53,6 @@ struct _GstRTPBVDepayClass + + GType gst_rtp_bv_depay_get_type (void); + +-gboolean gst_rtp_bv_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_BV_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpbvpay.c b/gst/rtp/gstrtpbvpay.c +index a396d2642..a70f3c237 100644 +--- a/gst/rtp/gstrtpbvpay.c ++++ b/gst/rtp/gstrtpbvpay.c +@@ -34,6 +34,7 @@ + #include + + #include ++#include "gstrtpelements.h" + #include "gstrtpbvpay.h" + + GST_DEBUG_CATEGORY_STATIC (rtpbvpay_debug); +@@ -69,6 +70,8 @@ static gboolean gst_rtp_bv_pay_sink_setcaps (GstRTPBasePayload * payload, + + #define gst_rtp_bv_pay_parent_class parent_class + G_DEFINE_TYPE (GstRTPBVPay, gst_rtp_bv_pay, GST_TYPE_RTP_BASE_AUDIO_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpbvpay, "rtpbvpay", GST_RANK_SECONDARY, ++ GST_TYPE_RTP_BV_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_bv_pay_class_init (GstRTPBVPayClass * klass) +@@ -231,10 +234,3 @@ gst_rtp_bv_pay_sink_getcaps (GstRTPBasePayload * rtppayload, GstPad * pad, + + return caps; + } +- +-gboolean +-gst_rtp_bv_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpbvpay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_BV_PAY); +-} +diff --git a/gst/rtp/gstrtpbvpay.h b/gst/rtp/gstrtpbvpay.h +index 09766ccaa..afb3485b9 100644 +--- a/gst/rtp/gstrtpbvpay.h ++++ b/gst/rtp/gstrtpbvpay.h +@@ -53,8 +53,6 @@ struct _GstRTPBVPayClass + + GType gst_rtp_bv_pay_get_type (void); + +-gboolean gst_rtp_bv_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_BV_PAY_H__ */ +diff --git a/gst/rtp/gstrtpceltdepay.c b/gst/rtp/gstrtpceltdepay.c +index 97c17f013..9054af7c6 100644 +--- a/gst/rtp/gstrtpceltdepay.c ++++ b/gst/rtp/gstrtpceltdepay.c +@@ -26,6 +26,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpceltdepay.h" + #include "gstrtputils.h" + +@@ -74,7 +75,8 @@ static gboolean gst_rtp_celt_depay_setcaps (GstRTPBaseDepayload * depayload, + #define gst_rtp_celt_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpCELTDepay, gst_rtp_celt_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); +- ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpceltdepay, "rtpceltdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_CELT_DEPAY, rtp_element_init (plugin)); + static void + gst_rtp_celt_depay_class_init (GstRtpCELTDepayClass * klass) + { +@@ -267,10 +269,3 @@ gst_rtp_celt_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp) + + return NULL; + } +- +-gboolean +-gst_rtp_celt_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpceltdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_CELT_DEPAY); +-} +diff --git a/gst/rtp/gstrtpceltdepay.h b/gst/rtp/gstrtpceltdepay.h +index 0905c681d..eb4bc3099 100644 +--- a/gst/rtp/gstrtpceltdepay.h ++++ b/gst/rtp/gstrtpceltdepay.h +@@ -47,8 +47,6 @@ struct _GstRtpCELTDepayClass + + GType gst_rtp_celt_depay_get_type (void); + +-gboolean gst_rtp_celt_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_CELT_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpceltpay.c b/gst/rtp/gstrtpceltpay.c +index a29d23b17..18ef55640 100644 +--- a/gst/rtp/gstrtpceltpay.c ++++ b/gst/rtp/gstrtpceltpay.c +@@ -26,6 +26,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpceltpay.h" + #include "gstrtputils.h" + +@@ -66,6 +67,8 @@ static GstFlowReturn gst_rtp_celt_pay_handle_buffer (GstRTPBasePayload * + + #define gst_rtp_celt_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpCELTPay, gst_rtp_celt_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpceltpay, "rtpceltpay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_CELT_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_celt_pay_class_init (GstRtpCELTPayClass * klass) +@@ -495,10 +498,3 @@ gst_rtp_celt_pay_change_state (GstElement * element, GstStateChange transition) + } + return ret; + } +- +-gboolean +-gst_rtp_celt_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpceltpay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_CELT_PAY); +-} +diff --git a/gst/rtp/gstrtpceltpay.h b/gst/rtp/gstrtpceltpay.h +index dcdd0ecc5..452c12452 100644 +--- a/gst/rtp/gstrtpceltpay.h ++++ b/gst/rtp/gstrtpceltpay.h +@@ -55,8 +55,6 @@ struct _GstRtpCELTPayClass + + GType gst_rtp_celt_pay_get_type (void); + +-gboolean gst_rtp_celt_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_CELT_PAY_H__ */ +diff --git a/gst/rtp/gstrtpdvdepay.c b/gst/rtp/gstrtpdvdepay.c +index 12297f004..6558dddd1 100644 +--- a/gst/rtp/gstrtpdvdepay.c ++++ b/gst/rtp/gstrtpdvdepay.c +@@ -33,6 +33,8 @@ + #include + + #include "gstrtpdvdepay.h" ++ ++#include "gstrtpelements.h" + #include "gstrtputils.h" + + GST_DEBUG_CATEGORY (rtpdvdepay_debug); +@@ -82,7 +84,8 @@ static gboolean gst_rtp_dv_depay_setcaps (GstRTPBaseDepayload * depayload, + + #define gst_rtp_dv_depay_parent_class parent_class + G_DEFINE_TYPE (GstRTPDVDepay, gst_rtp_dv_depay, GST_TYPE_RTP_BASE_DEPAYLOAD); +- ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpdvdepay, "rtpdvdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_DV_DEPAY, rtp_element_init (plugin)); + + static void + gst_rtp_dv_depay_class_init (GstRTPDVDepayClass * klass) +@@ -416,10 +419,3 @@ gst_rtp_dv_depay_change_state (GstElement * element, GstStateChange transition) + } + return ret; + } +- +-gboolean +-gst_rtp_dv_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpdvdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_DV_DEPAY); +-} +diff --git a/gst/rtp/gstrtpdvdepay.h b/gst/rtp/gstrtpdvdepay.h +index 1ce5b9715..3cd921460 100644 +--- a/gst/rtp/gstrtpdvdepay.h ++++ b/gst/rtp/gstrtpdvdepay.h +@@ -59,8 +59,6 @@ struct _GstRTPDVDepayClass + + GType gst_rtp_dv_depay_get_type (void); + +-gboolean gst_rtp_dv_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GSTRTPDVDEPAY_H__ */ +diff --git a/gst/rtp/gstrtpdvpay.c b/gst/rtp/gstrtpdvpay.c +index 540d68566..24bb9b41c 100644 +--- a/gst/rtp/gstrtpdvpay.c ++++ b/gst/rtp/gstrtpdvpay.c +@@ -26,6 +26,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpdvpay.h" + #include "gstrtputils.h" + +@@ -97,6 +98,8 @@ static void gst_dv_pay_get_property (GObject * object, + + #define gst_rtp_dv_pay_parent_class parent_class + G_DEFINE_TYPE (GstRTPDVPay, gst_rtp_dv_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpdvpay, "rtpdvpay", GST_RANK_SECONDARY, ++ GST_TYPE_RTP_DV_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_dv_pay_class_init (GstRTPDVPayClass * klass) +@@ -332,7 +335,9 @@ gst_rtp_dv_pay_handle_buffer (GstRTPBasePayload * basepayload, + while (size >= 80) { + /* Allocate a new buffer, set the timestamp */ + if (outbuf == NULL) { +- outbuf = gst_rtp_buffer_new_allocate (max_payload_size, 0, 0); ++ outbuf = ++ gst_rtp_base_payload_allocate_output_buffer (basepayload, ++ max_payload_size, 0, 0); + GST_BUFFER_PTS (outbuf) = GST_BUFFER_PTS (buffer); + + if (!gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp)) { +@@ -367,6 +372,7 @@ gst_rtp_dv_pay_handle_buffer (GstRTPBasePayload * basepayload, + + /* set marker */ + gst_rtp_buffer_set_marker (&rtp, TRUE); ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); + + /* shrink buffer to last packet */ + hlen = gst_rtp_buffer_get_header_len (&rtp); +@@ -390,10 +396,3 @@ beach: + + return ret; + } +- +-gboolean +-gst_rtp_dv_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpdvpay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_DV_PAY); +-} +diff --git a/gst/rtp/gstrtpdvpay.h b/gst/rtp/gstrtpdvpay.h +index 4c250a840..def525b2c 100644 +--- a/gst/rtp/gstrtpdvpay.h ++++ b/gst/rtp/gstrtpdvpay.h +@@ -62,8 +62,6 @@ struct _GstRTPDVPayClass + + GType gst_rtp_dv_pay_get_type (void); + +-gboolean gst_rtp_dv_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GSTRTPDVPAY_H__ */ +diff --git a/gst/rtp/gstrtpelement.c b/gst/rtp/gstrtpelement.c +new file mode 100644 +index 000000000..cd6d883b0 +--- /dev/null ++++ b/gst/rtp/gstrtpelement.c +@@ -0,0 +1,46 @@ ++/* GStreamer ++ * Copyright (C) <1999> Erik Walthinsen ++ * Copyright (C) 2020 Huawei Technologies Co., Ltd. ++ * @Author: Julian Bouzas ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include ++#include ++#include ++ ++#include "gstrtpelements.h" ++#include "gstrtputils.h" ++ ++ ++void ++rtp_element_init (GstPlugin * plugin) ++{ ++ static gsize res = FALSE; ++ if (g_once_init_enter (&res)) { ++ gst_tag_image_type_get_type (); ++ rtp_quark_meta_tag_video = ++ g_quark_from_static_string (GST_META_TAG_VIDEO_STR); ++ rtp_quark_meta_tag_audio = ++ g_quark_from_static_string (GST_META_TAG_AUDIO_STR); ++ g_once_init_leave (&res, TRUE); ++ } ++} +diff --git a/gst/rtp/gstrtpelements.h b/gst/rtp/gstrtpelements.h +new file mode 100644 +index 000000000..9321d3530 +--- /dev/null ++++ b/gst/rtp/gstrtpelements.h +@@ -0,0 +1,134 @@ ++/* GStreamer ++ * Copyright (C) <1999> Erik Walthinsen ++ * Copyright (C) 2020 Huawei Technologies Co., Ltd. ++ * @Author: Julian Bouzas ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the Free ++ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_RTP_ELEMENTS_H__ ++#define __GST_RTP_ELEMENTS_H__ ++ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include ++ ++G_BEGIN_DECLS ++ ++void rtp_element_init (GstPlugin * plugin); ++ ++GST_ELEMENT_REGISTER_DECLARE (rtpac3depay); ++GST_ELEMENT_REGISTER_DECLARE (rtpac3pay); ++GST_ELEMENT_REGISTER_DECLARE (rtpbvdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpbvpay); ++GST_ELEMENT_REGISTER_DECLARE (rtpceltdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpceltpay); ++GST_ELEMENT_REGISTER_DECLARE (rtpdvdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpdvpay); ++GST_ELEMENT_REGISTER_DECLARE (rtpgstdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpgstpay); ++GST_ELEMENT_REGISTER_DECLARE (rtpilbcpay); ++GST_ELEMENT_REGISTER_DECLARE (rtpilbcdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpg722depay); ++GST_ELEMENT_REGISTER_DECLARE (rtpg722pay); ++GST_ELEMENT_REGISTER_DECLARE (rtpg723depay); ++GST_ELEMENT_REGISTER_DECLARE (rtpg723pay); ++GST_ELEMENT_REGISTER_DECLARE (rtpg726depay); ++GST_ELEMENT_REGISTER_DECLARE (rtpg726pay); ++GST_ELEMENT_REGISTER_DECLARE (rtpg729depay); ++GST_ELEMENT_REGISTER_DECLARE (rtpg729pay); ++GST_ELEMENT_REGISTER_DECLARE (rtpgsmdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpgsmpay); ++GST_ELEMENT_REGISTER_DECLARE (rtpamrdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpamrpay); ++GST_ELEMENT_REGISTER_DECLARE (rtppcmadepay); ++GST_ELEMENT_REGISTER_DECLARE (rtppcmudepay); ++GST_ELEMENT_REGISTER_DECLARE (rtppcmupay); ++GST_ELEMENT_REGISTER_DECLARE (rtppcmapay); ++GST_ELEMENT_REGISTER_DECLARE (rtpmpadepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpmpapay); ++GST_ELEMENT_REGISTER_DECLARE (rtpmparobustdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpmpvdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpmpvpay); ++GST_ELEMENT_REGISTER_DECLARE (rtpopusdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpopuspay); ++GST_ELEMENT_REGISTER_DECLARE (rtph261pay); ++GST_ELEMENT_REGISTER_DECLARE (rtph261depay); ++GST_ELEMENT_REGISTER_DECLARE (rtph263ppay); ++GST_ELEMENT_REGISTER_DECLARE (rtph263pdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtph263depay); ++GST_ELEMENT_REGISTER_DECLARE (rtph263pay); ++GST_ELEMENT_REGISTER_DECLARE (rtph264depay); ++GST_ELEMENT_REGISTER_DECLARE (rtph264pay); ++GST_ELEMENT_REGISTER_DECLARE (rtph265depay); ++GST_ELEMENT_REGISTER_DECLARE (rtph265pay); ++GST_ELEMENT_REGISTER_DECLARE (rtpj2kdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpj2kpay); ++GST_ELEMENT_REGISTER_DECLARE (rtpjpegdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpjpegpay); ++GST_ELEMENT_REGISTER_DECLARE (rtpklvdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpklvpay); ++GST_ELEMENT_REGISTER_DECLARE (rtpL8pay); ++GST_ELEMENT_REGISTER_DECLARE (rtpL8depay); ++GST_ELEMENT_REGISTER_DECLARE (rtpL16pay); ++GST_ELEMENT_REGISTER_DECLARE (rtpL16depay); ++GST_ELEMENT_REGISTER_DECLARE (rtpL24pay); ++GST_ELEMENT_REGISTER_DECLARE (rtpL24depay); ++GST_ELEMENT_REGISTER_DECLARE (rtpldacpay); ++GST_ELEMENT_REGISTER_DECLARE (asteriskh263); ++GST_ELEMENT_REGISTER_DECLARE (rtpmp1sdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpmp2tdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpmp2tpay); ++GST_ELEMENT_REGISTER_DECLARE (rtpmp4vpay); ++GST_ELEMENT_REGISTER_DECLARE (rtpmp4vdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpmp4apay); ++GST_ELEMENT_REGISTER_DECLARE (rtpmp4adepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpmp4gdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpmp4gpay); ++GST_ELEMENT_REGISTER_DECLARE (rtpqcelpdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpqdm2depay); ++GST_ELEMENT_REGISTER_DECLARE (rtpsbcdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpsbcpay); ++GST_ELEMENT_REGISTER_DECLARE (rtpsirenpay); ++GST_ELEMENT_REGISTER_DECLARE (rtpsirendepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpspeexpay); ++GST_ELEMENT_REGISTER_DECLARE (rtpspeexdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpsv3vdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtptheoradepay); ++GST_ELEMENT_REGISTER_DECLARE (rtptheorapay); ++GST_ELEMENT_REGISTER_DECLARE (rtpvorbisdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpvorbispay); ++GST_ELEMENT_REGISTER_DECLARE (rtpvp8depay); ++GST_ELEMENT_REGISTER_DECLARE (rtpvp8pay); ++GST_ELEMENT_REGISTER_DECLARE (rtpvp9depay); ++GST_ELEMENT_REGISTER_DECLARE (rtpvp9pay); ++GST_ELEMENT_REGISTER_DECLARE (rtpvrawdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpvrawpay); ++GST_ELEMENT_REGISTER_DECLARE (rtpstreampay); ++GST_ELEMENT_REGISTER_DECLARE (rtpstreamdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpisacpay); ++GST_ELEMENT_REGISTER_DECLARE (rtpisacdepay); ++GST_ELEMENT_REGISTER_DECLARE (rtpredenc); ++GST_ELEMENT_REGISTER_DECLARE (rtpreddec); ++GST_ELEMENT_REGISTER_DECLARE (rtpulpfecdec); ++GST_ELEMENT_REGISTER_DECLARE (rtpulpfecenc); ++GST_ELEMENT_REGISTER_DECLARE (rtpstorage); ++GST_ELEMENT_REGISTER_DECLARE (rtphdrextcolorspace); ++ ++G_END_DECLS ++ ++#endif /* __GST_RTP_ELEMENTS_H__ */ +diff --git a/gst/rtp/gstrtpg722depay.c b/gst/rtp/gstrtpg722depay.c +index 5a25eef52..060c7e1c8 100644 +--- a/gst/rtp/gstrtpg722depay.c ++++ b/gst/rtp/gstrtpg722depay.c +@@ -26,6 +26,7 @@ + + #include + ++#include "gstrtpelements.h" + #include "gstrtpg722depay.h" + #include "gstrtpchannels.h" + #include "gstrtputils.h" +@@ -63,6 +64,8 @@ static GstStaticPadTemplate gst_rtp_g722_depay_sink_template = + #define gst_rtp_g722_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpG722Depay, gst_rtp_g722_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpg722depay, "rtpg722depay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_G722_DEPAY, rtp_element_init (plugin)); + + static gboolean gst_rtp_g722_depay_setcaps (GstRTPBaseDepayload * depayload, + GstCaps * caps); +@@ -253,10 +256,3 @@ empty_packet: + return NULL; + } + } +- +-gboolean +-gst_rtp_g722_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpg722depay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_G722_DEPAY); +-} +diff --git a/gst/rtp/gstrtpg722depay.h b/gst/rtp/gstrtpg722depay.h +index 8b6ffa046..2acdf28b2 100644 +--- a/gst/rtp/gstrtpg722depay.h ++++ b/gst/rtp/gstrtpg722depay.h +@@ -57,8 +57,6 @@ struct _GstRtpG722DepayClass + + GType gst_rtp_g722_depay_get_type (void); + +-gboolean gst_rtp_g722_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_G722_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpg722pay.c b/gst/rtp/gstrtpg722pay.c +index a383e0120..8afc2ebe0 100644 +--- a/gst/rtp/gstrtpg722pay.c ++++ b/gst/rtp/gstrtpg722pay.c +@@ -26,6 +26,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpg722pay.h" + #include "gstrtpchannels.h" + +@@ -64,6 +65,8 @@ static GstCaps *gst_rtp_g722_pay_getcaps (GstRTPBasePayload * rtppayload, + #define gst_rtp_g722_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpG722Pay, gst_rtp_g722_pay, + GST_TYPE_RTP_BASE_AUDIO_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpg722pay, "rtpg722pay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_G722_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_g722_pay_class_init (GstRtpG722PayClass * klass) +@@ -227,10 +230,3 @@ gst_rtp_g722_pay_getcaps (GstRTPBasePayload * rtppayload, GstPad * pad, + + return caps; + } +- +-gboolean +-gst_rtp_g722_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpg722pay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_G722_PAY); +-} +diff --git a/gst/rtp/gstrtpg722pay.h b/gst/rtp/gstrtpg722pay.h +index f238286ea..1211ca0a0 100644 +--- a/gst/rtp/gstrtpg722pay.h ++++ b/gst/rtp/gstrtpg722pay.h +@@ -54,8 +54,6 @@ struct _GstRtpG722PayClass + + GType gst_rtp_g722_pay_get_type (void); + +-gboolean gst_rtp_g722_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_G722_PAY_H__ */ +diff --git a/gst/rtp/gstrtpg723depay.c b/gst/rtp/gstrtpg723depay.c +index 901d9ae06..e4f416ea2 100644 +--- a/gst/rtp/gstrtpg723depay.c ++++ b/gst/rtp/gstrtpg723depay.c +@@ -26,6 +26,7 @@ + + #include + #include ++#include "gstrtpelements.h" + #include "gstrtpg723depay.h" + + GST_DEBUG_CATEGORY_STATIC (rtpg723depay_debug); +@@ -80,6 +81,8 @@ static GstBuffer *gst_rtp_g723_depay_process (GstRTPBaseDepayload * depayload, + #define gst_rtp_g723_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpG723Depay, gst_rtp_g723_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpg723depay, "rtpg723depay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_G723_DEPAY, rtp_element_init (plugin)); + + static void + gst_rtp_g723_depay_class_init (GstRtpG723DepayClass * klass) +@@ -214,10 +217,3 @@ bad_packet: + return NULL; + } + } +- +-gboolean +-gst_rtp_g723_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpg723depay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_G723_DEPAY); +-} +diff --git a/gst/rtp/gstrtpg723depay.h b/gst/rtp/gstrtpg723depay.h +index dd942b3b6..673a13766 100644 +--- a/gst/rtp/gstrtpg723depay.h ++++ b/gst/rtp/gstrtpg723depay.h +@@ -52,8 +52,6 @@ struct _GstRtpG723DepayClass + + GType gst_rtp_g723_depay_get_type (void); + +-gboolean gst_rtp_g723_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_G723_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpg723pay.c b/gst/rtp/gstrtpg723pay.c +index 18e294a3f..0c5fd1bdd 100644 +--- a/gst/rtp/gstrtpg723pay.c ++++ b/gst/rtp/gstrtpg723pay.c +@@ -28,6 +28,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpg723pay.h" + #include "gstrtputils.h" + +@@ -68,6 +69,8 @@ static GstStateChangeReturn gst_rtp_g723_pay_change_state (GstElement * element, + + #define gst_rtp_g723_pay_parent_class parent_class + G_DEFINE_TYPE (GstRTPG723Pay, gst_rtp_g723_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpg723pay, "rtpg723pay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_G723_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_g723_pay_class_init (GstRTPG723PayClass * klass) +@@ -161,6 +164,7 @@ gst_rtp_g723_pay_flush (GstRTPG723Pay * pay) + /* set discont and marker */ + if (pay->discont) { + GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); + gst_rtp_buffer_set_marker (&rtp, TRUE); + pay->discont = FALSE; + } +@@ -298,11 +302,3 @@ gst_rtp_g723_pay_change_state (GstElement * element, GstStateChange transition) + + return ret; + } +- +-/*Plugin init functions*/ +-gboolean +-gst_rtp_g723_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpg723pay", GST_RANK_SECONDARY, +- gst_rtp_g723_pay_get_type ()); +-} +diff --git a/gst/rtp/gstrtpg723pay.h b/gst/rtp/gstrtpg723pay.h +index 37807412a..03bff5002 100644 +--- a/gst/rtp/gstrtpg723pay.h ++++ b/gst/rtp/gstrtpg723pay.h +@@ -57,8 +57,6 @@ struct _GstRTPG723PayClass + + GType gst_rtp_g723_pay_get_type (void); + +-gboolean gst_rtp_g723_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_G723_PAY_H__ */ +diff --git a/gst/rtp/gstrtpg726depay.c b/gst/rtp/gstrtpg726depay.c +index 7af1928aa..d4bdc4f27 100644 +--- a/gst/rtp/gstrtpg726depay.c ++++ b/gst/rtp/gstrtpg726depay.c +@@ -30,6 +30,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpg726depay.h" + #include "gstrtputils.h" + +@@ -91,6 +92,8 @@ static gboolean gst_rtp_g726_depay_setcaps (GstRTPBaseDepayload * depayload, + #define gst_rtp_g726_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpG726Depay, gst_rtp_g726_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpg726depay, "rtpg726depay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_G726_DEPAY, rtp_element_init (plugin)); + + static void + gst_rtp_g726_depay_class_init (GstRtpG726DepayClass * klass) +@@ -282,7 +285,7 @@ gst_rtp_g726_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp) + ((tmp & 0x1c) << 1) | ((tmp & 0x03) << 6); + tmp = *in++; + *out++ = ((tmp & 0x80) >> 7) | +- ((tmp & 0x70) >> 3) | ((tmp & 0x0e) << 4) | ((tmp & 0x01) << 7); ++ ((tmp & 0x70) >> 3) | ((tmp & 0x0e) << 3) | ((tmp & 0x01) << 7); + tmp = *in++; + *out++ = ((tmp & 0xc0) >> 6) | + ((tmp & 0x38) >> 1) | ((tmp & 0x07) << 5); +@@ -384,10 +387,3 @@ gst_rtp_g726_depay_get_property (GObject * object, guint prop_id, + break; + } + } +- +-gboolean +-gst_rtp_g726_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpg726depay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_G726_DEPAY); +-} +diff --git a/gst/rtp/gstrtpg726depay.h b/gst/rtp/gstrtpg726depay.h +index 45fd2edbd..04ceec2f8 100644 +--- a/gst/rtp/gstrtpg726depay.h ++++ b/gst/rtp/gstrtpg726depay.h +@@ -52,7 +52,5 @@ struct _GstRtpG726DepayClass + + GType gst_rtp_g726_depay_get_type (void); + +-gboolean gst_rtp_g726_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + #endif /* __GST_RTP_G726_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpg726pay.c b/gst/rtp/gstrtpg726pay.c +index d251b4995..76dad4053 100644 +--- a/gst/rtp/gstrtpg726pay.c ++++ b/gst/rtp/gstrtpg726pay.c +@@ -28,6 +28,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpg726pay.h" + + GST_DEBUG_CATEGORY_STATIC (rtpg726pay_debug); +@@ -77,6 +78,8 @@ static GstFlowReturn gst_rtp_g726_pay_handle_buffer (GstRTPBasePayload * + #define gst_rtp_g726_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpG726Pay, gst_rtp_g726_pay, + GST_TYPE_RTP_BASE_AUDIO_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpg726pay, "rtpg726pay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_G726_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_g726_pay_class_init (GstRtpG726PayClass * klass) +@@ -413,10 +416,3 @@ gst_rtp_g726_pay_get_property (GObject * object, guint prop_id, + break; + } + } +- +-gboolean +-gst_rtp_g726_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpg726pay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_G726_PAY); +-} +diff --git a/gst/rtp/gstrtpg726pay.h b/gst/rtp/gstrtpg726pay.h +index d9dbfa436..fd3078c2f 100644 +--- a/gst/rtp/gstrtpg726pay.h ++++ b/gst/rtp/gstrtpg726pay.h +@@ -49,7 +49,5 @@ struct _GstRtpG726PayClass + + GType gst_rtp_g726_pay_get_type (void); + +-gboolean gst_rtp_g726_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + #endif /* __GST_RTP_G726_PAY_H__ */ +diff --git a/gst/rtp/gstrtpg729depay.c b/gst/rtp/gstrtpg729depay.c +index 69c1c4dfc..0fb0bcd13 100644 +--- a/gst/rtp/gstrtpg729depay.c ++++ b/gst/rtp/gstrtpg729depay.c +@@ -25,6 +25,7 @@ + + #include + #include ++#include "gstrtpelements.h" + #include "gstrtpg729depay.h" + #include "gstrtputils.h" + +@@ -80,6 +81,8 @@ static GstBuffer *gst_rtp_g729_depay_process (GstRTPBaseDepayload * depayload, + #define gst_rtp_g729_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpG729Depay, gst_rtp_g729_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpg729depay, "rtpg729depay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_G729_DEPAY, rtp_element_init (plugin)); + + static void + gst_rtp_g729_depay_class_init (GstRtpG729DepayClass * klass) +@@ -216,10 +219,3 @@ bad_packet: + return NULL; + } + } +- +-gboolean +-gst_rtp_g729_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpg729depay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_G729_DEPAY); +-} +diff --git a/gst/rtp/gstrtpg729depay.h b/gst/rtp/gstrtpg729depay.h +index a23562e56..355ee5068 100644 +--- a/gst/rtp/gstrtpg729depay.h ++++ b/gst/rtp/gstrtpg729depay.h +@@ -54,8 +54,6 @@ struct _GstRtpG729DepayClass + + GType gst_rtp_g729_depay_get_type (void); + +-gboolean gst_rtp_g729_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_G729_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpg729pay.c b/gst/rtp/gstrtpg729pay.c +index 78097805e..bc158c1f8 100644 +--- a/gst/rtp/gstrtpg729pay.c ++++ b/gst/rtp/gstrtpg729pay.c +@@ -34,6 +34,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpg729pay.h" + #include "gstrtputils.h" + +@@ -78,6 +79,8 @@ static GstStaticPadTemplate gst_rtp_g729_pay_src_template = + + #define gst_rtp_g729_pay_parent_class parent_class + G_DEFINE_TYPE (GstRTPG729Pay, gst_rtp_g729_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpg729pay, "rtpg729pay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_G729_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_g729_pay_finalize (GObject * object) +@@ -186,6 +189,7 @@ gst_rtp_g729_pay_push (GstRTPG729Pay * rtpg729pay, GstBuffer * buf) + if (G_UNLIKELY (rtpg729pay->discont)) { + GST_DEBUG_OBJECT (basepayload, "discont, setting marker bit"); + GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); + gst_rtp_buffer_set_marker (&rtp, TRUE); + rtpg729pay->discont = FALSE; + } +@@ -389,10 +393,3 @@ gst_rtp_g729_pay_change_state (GstElement * element, GstStateChange transition) + + return ret; + } +- +-gboolean +-gst_rtp_g729_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpg729pay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_G729_PAY); +-} +diff --git a/gst/rtp/gstrtpg729pay.h b/gst/rtp/gstrtpg729pay.h +index 1b9246050..c9e76c27b 100644 +--- a/gst/rtp/gstrtpg729pay.h ++++ b/gst/rtp/gstrtpg729pay.h +@@ -59,8 +59,6 @@ struct _GstRTPG729PayClass + + GType gst_rtp_g729_pay_get_type (void); + +-gboolean gst_rtp_g729_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_G729_PAY_H__ */ +diff --git a/gst/rtp/gstrtpgsmdepay.c b/gst/rtp/gstrtpgsmdepay.c +index c87c61852..313acb545 100644 +--- a/gst/rtp/gstrtpgsmdepay.c ++++ b/gst/rtp/gstrtpgsmdepay.c +@@ -25,6 +25,7 @@ + #include + #include + #include ++#include "gstrtpelements.h" + #include "gstrtpgsmdepay.h" + #include "gstrtputils.h" + +@@ -65,6 +66,8 @@ static gboolean gst_rtp_gsm_depay_setcaps (GstRTPBaseDepayload * _depayload, + + #define gst_rtp_gsm_depay_parent_class parent_class + G_DEFINE_TYPE (GstRTPGSMDepay, gst_rtp_gsm_depay, GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpgsmdepay, "rtpgsmdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_GSM_DEPAY, rtp_element_init (plugin)); + + static void + gst_rtp_gsm_depay_class_init (GstRTPGSMDepayClass * klass) +@@ -143,10 +146,3 @@ gst_rtp_gsm_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp) + + return outbuf; + } +- +-gboolean +-gst_rtp_gsm_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpgsmdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_GSM_DEPAY); +-} +diff --git a/gst/rtp/gstrtpgsmdepay.h b/gst/rtp/gstrtpgsmdepay.h +index e428aa09f..166f55869 100644 +--- a/gst/rtp/gstrtpgsmdepay.h ++++ b/gst/rtp/gstrtpgsmdepay.h +@@ -51,8 +51,6 @@ struct _GstRTPGSMDepayClass + + GType gst_rtp_gsm_depay_get_type (void); + +-gboolean gst_rtp_gsm_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_GSM_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpgsmpay.c b/gst/rtp/gstrtpgsmpay.c +index 25fa0fa0a..b3197cb31 100644 +--- a/gst/rtp/gstrtpgsmpay.c ++++ b/gst/rtp/gstrtpgsmpay.c +@@ -27,6 +27,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpgsmpay.h" + #include "gstrtputils.h" + +@@ -61,6 +62,8 @@ static GstFlowReturn gst_rtp_gsm_pay_handle_buffer (GstRTPBasePayload * payload, + + #define gst_rtp_gsm_pay_parent_class parent_class + G_DEFINE_TYPE (GstRTPGSMPay, gst_rtp_gsm_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpgsmpay, "rtpgsmpay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_GSM_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_gsm_pay_class_init (GstRTPGSMPayClass * klass) +@@ -172,10 +175,3 @@ too_big: + return GST_FLOW_ERROR; + } + } +- +-gboolean +-gst_rtp_gsm_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpgsmpay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_GSM_PAY); +-} +diff --git a/gst/rtp/gstrtpgsmpay.h b/gst/rtp/gstrtpgsmpay.h +index b6437f516..357a7c695 100644 +--- a/gst/rtp/gstrtpgsmpay.h ++++ b/gst/rtp/gstrtpgsmpay.h +@@ -52,7 +52,6 @@ struct _GstRTPGSMPayClass + + GType gst_rtp_gsm_pay_get_type (void); + +-gboolean gst_rtp_gsm_pay_plugin_init (GstPlugin * plugin); + + G_END_DECLS + +diff --git a/gst/rtp/gstrtpgstdepay.c b/gst/rtp/gstrtpgstdepay.c +index 0f081776e..ebf838296 100644 +--- a/gst/rtp/gstrtpgstdepay.c ++++ b/gst/rtp/gstrtpgstdepay.c +@@ -24,6 +24,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpgstdepay.h" + #include "gstrtputils.h" + +@@ -49,6 +50,8 @@ GST_STATIC_PAD_TEMPLATE ("sink", + + #define gst_rtp_gst_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpGSTDepay, gst_rtp_gst_depay, GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpgstdepay, "rtpgstdepay", ++ GST_RANK_MARGINAL, GST_TYPE_RTP_GST_DEPAY, rtp_element_init (plugin)); + + static void gst_rtp_gst_depay_finalize (GObject * object); + +@@ -602,11 +605,3 @@ gst_rtp_gst_depay_change_state (GstElement * element, GstStateChange transition) + } + return ret; + } +- +- +-gboolean +-gst_rtp_gst_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpgstdepay", +- GST_RANK_MARGINAL, GST_TYPE_RTP_GST_DEPAY); +-} +diff --git a/gst/rtp/gstrtpgstdepay.h b/gst/rtp/gstrtpgstdepay.h +index 9ea9ec6c9..41528792e 100644 +--- a/gst/rtp/gstrtpgstdepay.h ++++ b/gst/rtp/gstrtpgstdepay.h +@@ -59,8 +59,6 @@ struct _GstRtpGSTDepayClass + + GType gst_rtp_gst_depay_get_type (void); + +-gboolean gst_rtp_gst_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_GST_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpgstpay.c b/gst/rtp/gstrtpgstpay.c +index 15b281bd7..13fb7d1b1 100644 +--- a/gst/rtp/gstrtpgstpay.c ++++ b/gst/rtp/gstrtpgstpay.c +@@ -26,6 +26,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpgstpay.h" + #include "gstrtputils.h" + +@@ -102,6 +103,8 @@ static gboolean gst_rtp_gst_pay_src_event (GstRTPBasePayload * payload, + + #define gst_rtp_gst_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpGSTPay, gst_rtp_gst_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpgstpay, "rtpgstpay", GST_RANK_NONE, ++ GST_TYPE_RTP_GST_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_gst_pay_class_init (GstRtpGSTPayClass * klass) +@@ -168,8 +171,7 @@ gst_rtp_gst_pay_reset (GstRtpGSTPay * rtpgstpay, gboolean full) + rtpgstpay->flags &= 0x70; + rtpgstpay->etype = 0; + if (rtpgstpay->pending_buffers) +- g_list_free_full (rtpgstpay->pending_buffers, +- (GDestroyNotify) gst_buffer_list_unref); ++ gst_buffer_list_unref (rtpgstpay->pending_buffers); + rtpgstpay->pending_buffers = NULL; + if (full) { + if (rtpgstpay->taglist) +@@ -268,7 +270,6 @@ gst_rtp_gst_pay_create_from_adapter (GstRtpGSTPay * rtpgstpay, + { + guint avail, mtu; + guint frag_offset; +- GstBufferList *list; + + avail = gst_adapter_available (rtpgstpay->adapter); + if (avail == 0) +@@ -276,7 +277,9 @@ gst_rtp_gst_pay_create_from_adapter (GstRtpGSTPay * rtpgstpay, + + mtu = GST_RTP_BASE_PAYLOAD_MTU (rtpgstpay); + +- list = gst_buffer_list_new_sized ((avail / (mtu - (RTP_HEADER_LEN + 8))) + 1); ++ if (!rtpgstpay->pending_buffers) ++ rtpgstpay->pending_buffers = ++ gst_buffer_list_new_sized ((avail / (mtu - (RTP_HEADER_LEN + 8))) + 1); + frag_offset = 0; + + while (avail) { +@@ -332,8 +335,10 @@ gst_rtp_gst_pay_create_from_adapter (GstRtpGSTPay * rtpgstpay, + frag_offset += payload_len; + avail -= payload_len; + +- if (avail == 0) ++ if (avail == 0) { + gst_rtp_buffer_set_marker (&rtp, TRUE); ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); ++ } + + gst_rtp_buffer_unmap (&rtp); + +@@ -351,12 +356,21 @@ gst_rtp_gst_pay_create_from_adapter (GstRtpGSTPay * rtpgstpay, + GST_BUFFER_PTS (outbuf) = timestamp; + + /* and add to list */ +- gst_buffer_list_insert (list, -1, outbuf); ++ gst_buffer_list_insert (rtpgstpay->pending_buffers, -1, outbuf); + } + + rtpgstpay->flags &= 0x70; + rtpgstpay->etype = 0; +- rtpgstpay->pending_buffers = g_list_append (rtpgstpay->pending_buffers, list); ++ ++ return TRUE; ++} ++ ++static gboolean ++retimestamp_buffer (GstBuffer ** buffer, guint idx, gpointer user_data) ++{ ++ GstClockTime *timestamp = user_data; ++ ++ GST_BUFFER_PTS (*buffer) = *timestamp; + + return TRUE; + } +@@ -365,22 +379,21 @@ static GstFlowReturn + gst_rtp_gst_pay_flush (GstRtpGSTPay * rtpgstpay, GstClockTime timestamp) + { + GstFlowReturn ret = GST_FLOW_OK; +- GList *iter; + + gst_rtp_gst_pay_create_from_adapter (rtpgstpay, timestamp); + +- iter = rtpgstpay->pending_buffers; +- while (iter) { +- GstBufferList *list = iter->data; ++ if (rtpgstpay->pending_buffers) { ++ // make sure all buffers in the buffer list have the correct timestamp. ++ // If we created packets based on an event they would have ++ // GST_CLOCK_TIME_NONE as PTS. + +- rtpgstpay->pending_buffers = iter = +- g_list_delete_link (rtpgstpay->pending_buffers, iter); ++ gst_buffer_list_foreach (rtpgstpay->pending_buffers, retimestamp_buffer, ++ ×tamp); + + /* push the whole buffer list at once */ + ret = gst_rtp_base_payload_push_list (GST_RTP_BASE_PAYLOAD (rtpgstpay), +- list); +- if (ret != GST_FLOW_OK) +- break; ++ rtpgstpay->pending_buffers); ++ rtpgstpay->pending_buffers = NULL; + } + + return ret; +@@ -526,6 +539,11 @@ gst_rtp_gst_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event) + g_atomic_int_set (&rtpgstpay->force_config, TRUE); + } + ++ if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) { ++ // We must flush at this point, as no next input frame is expected ++ gst_rtp_gst_pay_flush (rtpgstpay, GST_CLOCK_TIME_NONE); ++ } ++ + ret = + GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload, + gst_event_ref (event)); +@@ -581,12 +599,10 @@ gst_rtp_gst_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event) + GST_DEBUG_OBJECT (rtpgstpay, "make event type %d for %s", + etype, GST_EVENT_TYPE_NAME (event)); + gst_rtp_gst_pay_send_event (rtpgstpay, etype, event); +- /* Do not send stream-start right away since caps/new-segment were not yet +- sent, so our data would be considered invalid */ +- if (etype != 4) { +- /* flush the adapter immediately */ +- gst_rtp_gst_pay_flush (rtpgstpay, GST_CLOCK_TIME_NONE); +- } ++ // do not flush events here yet as they would get no timestamp at all or ++ // the timestamp of the previous buffer, both of which are bogus. We need ++ // to wait until the next actual input frame to know the timestamp that ++ // applies to the event. + } + + gst_event_unref (event); +@@ -694,10 +710,3 @@ gst_rtp_gst_pay_handle_buffer (GstRTPBasePayload * basepayload, + + return ret; + } +- +-gboolean +-gst_rtp_gst_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpgstpay", +- GST_RANK_NONE, GST_TYPE_RTP_GST_PAY); +-} +diff --git a/gst/rtp/gstrtpgstpay.h b/gst/rtp/gstrtpgstpay.h +index 6625a4dbc..3db3d6169 100644 +--- a/gst/rtp/gstrtpgstpay.h ++++ b/gst/rtp/gstrtpgstpay.h +@@ -44,7 +44,7 @@ struct _GstRtpGSTPay + { + GstRTPBasePayload payload; + +- GList *pending_buffers; /* GstBufferList */ ++ GstBufferList *pending_buffers; + GstAdapter *adapter; + guint8 flags; + guint8 etype; +@@ -66,8 +66,6 @@ struct _GstRtpGSTPayClass + + GType gst_rtp_gst_pay_get_type (void); + +-gboolean gst_rtp_gst_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_GST_PAY_H__ */ +diff --git a/gst/rtp/gstrtph261depay.c b/gst/rtp/gstrtph261depay.c +index 164d2f04d..23a888a4e 100644 +--- a/gst/rtp/gstrtph261depay.c ++++ b/gst/rtp/gstrtph261depay.c +@@ -46,6 +46,7 @@ + + #include + #include ++#include "gstrtpelements.h" + #include "gstrtph261depay.h" + #include "gstrtph261pay.h" /* GstRtpH261PayHeader */ + #include "gstrtputils.h" +@@ -75,10 +76,11 @@ static GstStaticPadTemplate gst_rtp_h261_depay_sink_template = + "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", " + "clock-rate = (int) 90000, " "encoding-name = (string) \"H261\"") + ); +- ++#define parent_class gst_rtp_h261_depay_parent_class + G_DEFINE_TYPE (GstRtpH261Depay, gst_rtp_h261_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); +-#define parent_class gst_rtp_h261_depay_parent_class ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph261depay, "rtph261depay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_H261_DEPAY, rtp_element_init (plugin)); + + static GstBuffer * + gst_rtp_h261_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp) +@@ -285,10 +287,3 @@ gst_rtp_h261_depay_init (GstRtpH261Depay * depay) + depay->adapter = gst_adapter_new (); + depay->leftover = NO_LEFTOVER; + } +- +-gboolean +-gst_rtp_h261_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtph261depay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_H261_DEPAY); +-} +diff --git a/gst/rtp/gstrtph261depay.h b/gst/rtp/gstrtph261depay.h +index f87f81773..821eff993 100644 +--- a/gst/rtp/gstrtph261depay.h ++++ b/gst/rtp/gstrtph261depay.h +@@ -54,7 +54,5 @@ struct _GstRtpH261DepayClass + + GType gst_rtp_h261_depay_get_type (void); + +-gboolean gst_rtp_h261_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + #endif /* __GST_RTP_H261_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtph261pay.c b/gst/rtp/gstrtph261pay.c +index 9b8c0d922..a7c9033bb 100644 +--- a/gst/rtp/gstrtph261pay.c ++++ b/gst/rtp/gstrtph261pay.c +@@ -48,6 +48,7 @@ + # include "config.h" + #endif + ++#include "gstrtpelements.h" + #include "gstrtph261pay.h" + #include "gstrtputils.h" + #include +@@ -82,8 +83,10 @@ static GstStaticPadTemplate gst_rtp_h261_pay_src_template = + "clock-rate = (int) 90000, " "encoding-name = (string) \"H261\"") + ); + +-G_DEFINE_TYPE (GstRtpH261Pay, gst_rtp_h261_pay, GST_TYPE_RTP_BASE_PAYLOAD); + #define parent_class gst_rtp_h261_pay_parent_class ++G_DEFINE_TYPE (GstRtpH261Pay, gst_rtp_h261_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph261pay, "rtph261pay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_H261_PAY, rtp_element_init (plugin)); + + typedef struct + { +@@ -813,8 +816,9 @@ gst_rtp_h261_pay_fragment_push (GstRtpH261Pay * pay, GstBuffer * buffer, + + nbytes = bitrange_to_bytes (start, end); + +- outbuf = gst_rtp_buffer_new_allocate (nbytes + +- GST_RTP_H261_PAYLOAD_HEADER_LEN, 0, 0); ++ outbuf = ++ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD (pay), ++ nbytes + GST_RTP_H261_PAYLOAD_HEADER_LEN, 0, 0); + gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp); + payload = gst_rtp_buffer_get_payload (&rtp); + header = (GstRtpH261PayHeader *) payload; +@@ -840,6 +844,8 @@ gst_rtp_h261_pay_fragment_push (GstRtpH261Pay * pay, GstBuffer * buffer, + bits + GST_ROUND_DOWN_8 (start) / 8, nbytes); + + GST_BUFFER_TIMESTAMP (outbuf) = pay->timestamp; ++ if (marker) ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); + gst_rtp_buffer_set_marker (&rtp, marker); + pay->offset = end & 7; + +@@ -1062,10 +1068,3 @@ gst_rtp_h261_pay_class_init (GstRtpH261PayClass * klass) + GST_DEBUG_CATEGORY_INIT (rtph261pay_debug, "rtph261pay", 0, + "H261 RTP Payloader"); + } +- +-gboolean +-gst_rtp_h261_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtph261pay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_H261_PAY); +-} +diff --git a/gst/rtp/gstrtph261pay.h b/gst/rtp/gstrtph261pay.h +index eae4bf2db..1052d01be 100644 +--- a/gst/rtp/gstrtph261pay.h ++++ b/gst/rtp/gstrtph261pay.h +@@ -94,7 +94,5 @@ typedef struct _GstRtpH261PayHeader + + GType gst_rtp_h261_pay_get_type (void); + +-gboolean gst_rtp_h261_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + #endif /* __GST_RTP_H261_PAY_H__ */ +diff --git a/gst/rtp/gstrtph263depay.c b/gst/rtp/gstrtph263depay.c +index 296f91787..f6b41a5bc 100644 +--- a/gst/rtp/gstrtph263depay.c ++++ b/gst/rtp/gstrtph263depay.c +@@ -31,6 +31,7 @@ + + #include + #include ++#include "gstrtpelements.h" + #include "gstrtph263depay.h" + #include "gstrtputils.h" + +@@ -72,6 +73,8 @@ static GstStaticPadTemplate gst_rtp_h263_depay_sink_template = + #define gst_rtp_h263_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpH263Depay, gst_rtp_h263_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph263depay, "rtph263depay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_H263_DEPAY, rtp_element_init (plugin)); + + static void gst_rtp_h263_depay_finalize (GObject * object); + +@@ -438,10 +441,3 @@ gst_rtp_h263_depay_change_state (GstElement * element, + } + return ret; + } +- +-gboolean +-gst_rtp_h263_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtph263depay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_H263_DEPAY); +-} +diff --git a/gst/rtp/gstrtph263depay.h b/gst/rtp/gstrtph263depay.h +index 2d9ca55c7..485fc9dfa 100644 +--- a/gst/rtp/gstrtph263depay.h ++++ b/gst/rtp/gstrtph263depay.h +@@ -58,8 +58,6 @@ struct _GstRtpH263DepayClass + + GType gst_rtp_h263_depay_get_type (void); + +-gboolean gst_rtp_h263_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_H263_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtph263pay.c b/gst/rtp/gstrtph263pay.c +index a8def87aa..f89843a5b 100644 +--- a/gst/rtp/gstrtph263pay.c ++++ b/gst/rtp/gstrtph263pay.c +@@ -29,6 +29,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtph263pay.h" + #include "gstrtputils.h" + +@@ -399,6 +400,8 @@ static void gst_rtp_h263_pay_package_destroy (GstRtpH263PayPackage * pack); + + #define gst_rtp_h263_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpH263Pay, gst_rtp_h263_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph263pay, "rtph263pay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_H263_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_h263_pay_class_init (GstRtpH263PayClass * klass) +@@ -1308,8 +1311,10 @@ gst_rtp_h263_pay_push (GstRtpH263Pay * rtph263pay, + GST_BUFFER_PTS (package->outbuf) = rtph263pay->first_ts; + + gst_rtp_buffer_set_marker (&rtp, package->marker); +- if (package->marker) ++ if (package->marker) { ++ GST_BUFFER_FLAG_SET (package->outbuf, GST_BUFFER_FLAG_MARKER); + GST_DEBUG_OBJECT (rtph263pay, "Marker set!"); ++ } + + gst_rtp_buffer_unmap (&rtp); + +@@ -1355,7 +1360,9 @@ gst_rtp_h263_pay_A_fragment_push (GstRtpH263Pay * rtph263pay, + + pack->gobn = context->gobs[first]->gobn; + pack->mode = GST_RTP_H263_PAYLOAD_HEADER_MODE_A; +- pack->outbuf = gst_rtp_buffer_new_allocate (pack->mode, 0, 0); ++ pack->outbuf = ++ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD ++ (rtph263pay), pack->mode, 0, 0); + + GST_DEBUG_OBJECT (rtph263pay, "Sending len:%d data to push function", + pack->payload_len); +@@ -1412,7 +1419,9 @@ gst_rtp_h263_pay_B_fragment_push (GstRtpH263Pay * rtph263pay, + } + + pack->payload_len = pack->payload_end - pack->payload_start + 1; +- pack->outbuf = gst_rtp_buffer_new_allocate (pack->mode, 0, 0); ++ pack->outbuf = ++ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD ++ (rtph263pay), pack->mode, 0, 0); + + return gst_rtp_h263_pay_push (rtph263pay, context, pack); + } +@@ -1637,7 +1646,8 @@ gst_rtp_h263_send_entire_frame (GstRtpH263Pay * rtph263pay, + rtph263pay->available_data); + + pack->outbuf = +- gst_rtp_buffer_new_allocate (GST_RTP_H263_PAYLOAD_HEADER_MODE_A, 0, 0); ++ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD ++ (rtph263pay), GST_RTP_H263_PAYLOAD_HEADER_MODE_A, 0, 0); + + return gst_rtp_h263_pay_push (rtph263pay, context, pack); + } +@@ -1860,10 +1870,3 @@ gst_rtp_h263_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer) + + return ret; + } +- +-gboolean +-gst_rtp_h263_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtph263pay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_H263_PAY); +-} +diff --git a/gst/rtp/gstrtph263pay.h b/gst/rtp/gstrtph263pay.h +index 2abfc871e..c77e6ac15 100644 +--- a/gst/rtp/gstrtph263pay.h ++++ b/gst/rtp/gstrtph263pay.h +@@ -409,7 +409,5 @@ struct _GstRtpH263PayPackage + + GType gst_rtp_h263_pay_get_type (void); + +-gboolean gst_rtp_h263_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + #endif /* __GST_RTP_H263_PAY_H__ */ +diff --git a/gst/rtp/gstrtph263pdepay.c b/gst/rtp/gstrtph263pdepay.c +index a1fc40c4a..8b371ba92 100644 +--- a/gst/rtp/gstrtph263pdepay.c ++++ b/gst/rtp/gstrtph263pdepay.c +@@ -25,6 +25,7 @@ + + #include + #include ++#include "gstrtpelements.h" + #include "gstrtph263pdepay.h" + #include "gstrtputils.h" + +@@ -84,6 +85,8 @@ static GstStaticPadTemplate gst_rtp_h263p_depay_sink_template = + #define gst_rtp_h263p_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpH263PDepay, gst_rtp_h263p_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph263pdepay, "rtph263pdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_H263P_DEPAY, rtp_element_init (plugin)); + + static void gst_rtp_h263p_depay_finalize (GObject * object); + +@@ -231,6 +234,98 @@ no_caps: + } + } + ++static void ++gst_rtp_h263p_depay_decorate_output_buffer (GstRtpH263PDepay * rtph263pdepay, ++ GstBuffer * outbuf) ++{ ++ gboolean is_intra = FALSE; ++ GstBitReader bits; ++ guint8 pic_hdr[16]; ++ gsize pic_hdr_len = 0; ++ guint32 psc, ptype, mpptype; ++ guint8 ufep; ++ ++ pic_hdr_len = gst_buffer_extract (outbuf, 0, pic_hdr, sizeof (pic_hdr)); ++ ++ GST_MEMDUMP_OBJECT (rtph263pdepay, "pic_hdr", pic_hdr, pic_hdr_len); ++ ++#if 0 ++ if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= GST_LEVEL_MEMDUMP) { ++ gchar bit_str[1 + sizeof (pic_hdr) * 8] = { 0, }; ++ guint8 b; ++ ++ gst_bit_reader_init (&bits, pic_hdr, pic_hdr_len); ++ while ((gst_bit_reader_get_bits_uint8 (&bits, &b, 1))) { ++ g_strlcat (bit_str, b ? "1" : "0", sizeof (bit_str)); ++ } ++ GST_TRACE_OBJECT (rtph263pdepay, "pic_hdr bits: %s", bit_str); ++ } ++#endif ++ ++ gst_bit_reader_init (&bits, pic_hdr, pic_hdr_len); ++ ++ /* PSC - Picture Start Code: 22 bits: 0000 0000 0000 0000 10 0000 */ ++ if (!gst_bit_reader_get_bits_uint32 (&bits, &psc, 22) || psc != 0x20) { ++ GST_WARNING_OBJECT (rtph263pdepay, "No picture start code"); ++ return; ++ } ++ ++ /* TR - Temporal Reference: 8 bits */ ++ if (!gst_bit_reader_skip (&bits, 8)) { ++ GST_WARNING_OBJECT (rtph263pdepay, "Short picture header: no TR"); ++ return; ++ } ++ ++ /* PTYPE (first 8 bits) */ ++ if (!gst_bit_reader_get_bits_uint32 (&bits, &ptype, 8) || (ptype >> 6) != 2) { ++ GST_WARNING_OBJECT (rtph263pdepay, "Short picture header: no PTYPE"); ++ return; ++ } ++ ++ /* PTYPE: check for extended PTYPE (bits 6-8 = 111) */ ++ if ((ptype & 7) != 7) { ++ /* No extended PTYPE, read remaining 5 bits */ ++ if (!gst_bit_reader_get_bits_uint32 (&bits, &ptype, 5)) { ++ GST_WARNING_OBJECT (rtph263pdepay, "Short picture header: no PTYPE"); ++ return; ++ } ++ is_intra = (ptype & 0x10) == 0; ++ goto done; ++ } ++ ++ /* UFEP - Update Full Extended PTYPE */ ++ ufep = 0; ++ if (!gst_bit_reader_get_bits_uint8 (&bits, &ufep, 3) || ufep > 1) { ++ GST_WARNING_OBJECT (rtph263pdepay, "Short picture header: no PLUSPTYPE, %d", ++ ufep); ++ return; ++ } ++ ++ /* Skip optional part of PLUSPTYPE (OPPTYPE) */ ++ if (ufep == 1 && !gst_bit_reader_skip (&bits, 18)) { ++ GST_WARNING_OBJECT (rtph263pdepay, "Short picture header: no OPPTYPE"); ++ return; ++ } ++ ++ /* Mandatory part of PLUSPTYPE (MPPTYPE) */ ++ if (!gst_bit_reader_get_bits_uint32 (&bits, &mpptype, 9) ++ || (mpptype & 7) != 1) { ++ GST_WARNING_OBJECT (rtph263pdepay, "Short picture header: no MPPTYPE"); ++ return; ++ } ++ ++ is_intra = (mpptype >> 6) == 0; ++ ++done: ++ ++ if (is_intra) { ++ GST_LOG_OBJECT (rtph263pdepay, "I-frame"); ++ GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT); ++ } else { ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT); ++ } ++} ++ + static GstBuffer * + gst_rtp_h263p_depay_process (GstRTPBaseDepayload * depayload, + GstRTPBuffer * rtp) +@@ -333,6 +428,8 @@ gst_rtp_h263p_depay_process (GstRTPBaseDepayload * depayload, + + gst_rtp_drop_non_video_meta (rtph263pdepay, outbuf); + ++ gst_rtp_h263p_depay_decorate_output_buffer (rtph263pdepay, outbuf); ++ + return outbuf; + } else { + /* frame not completed: store in adapter */ +@@ -394,10 +491,3 @@ gst_rtp_h263p_depay_change_state (GstElement * element, + } + return ret; + } +- +-gboolean +-gst_rtp_h263p_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtph263pdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_H263P_DEPAY); +-} +diff --git a/gst/rtp/gstrtph263pdepay.h b/gst/rtp/gstrtph263pdepay.h +index bbdb2b0a4..bdcb826a9 100644 +--- a/gst/rtp/gstrtph263pdepay.h ++++ b/gst/rtp/gstrtph263pdepay.h +@@ -55,8 +55,6 @@ struct _GstRtpH263PDepayClass + + GType gst_rtp_h263p_depay_get_type (void); + +-gboolean gst_rtp_h263p_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_H263P_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtph263ppay.c b/gst/rtp/gstrtph263ppay.c +index 3b1ee0512..b6d8040ae 100644 +--- a/gst/rtp/gstrtph263ppay.c ++++ b/gst/rtp/gstrtph263ppay.c +@@ -28,6 +28,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtph263ppay.h" + #include "gstrtputils.h" + +@@ -112,6 +113,8 @@ static GstFlowReturn gst_rtp_h263p_pay_handle_buffer (GstRTPBasePayload * + + #define gst_rtp_h263p_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpH263PPay, gst_rtp_h263p_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph263ppay, "rtph263ppay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_H263P_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_h263p_pay_class_init (GstRtpH263PPayClass * klass) +@@ -732,11 +735,15 @@ gst_rtp_h263p_pay_flush (GstRtpH263PPay * rtph263ppay) + if (next_gop > 0) + towrite = MIN (next_gop, towrite); + +- outbuf = gst_rtp_buffer_new_allocate (header_len, 0, 0); ++ outbuf = ++ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD ++ (rtph263ppay), header_len, 0, 0); + + gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp); + /* last fragment gets the marker bit set */ + gst_rtp_buffer_set_marker (&rtp, avail > towrite ? 0 : 1); ++ if (avail <= towrite) ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); + + payload = gst_rtp_buffer_get_payload (&rtp); + +@@ -807,10 +814,3 @@ gst_rtp_h263p_pay_handle_buffer (GstRTPBasePayload * payload, + + return ret; + } +- +-gboolean +-gst_rtp_h263p_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtph263ppay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_H263P_PAY); +-} +diff --git a/gst/rtp/gstrtph263ppay.h b/gst/rtp/gstrtph263ppay.h +index 23ec8b8e9..7197b86e5 100644 +--- a/gst/rtp/gstrtph263ppay.h ++++ b/gst/rtp/gstrtph263ppay.h +@@ -63,8 +63,6 @@ struct _GstRtpH263PPayClass + + GType gst_rtp_h263p_pay_get_type (void); + +-gboolean gst_rtp_h263p_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_H263P_PAY_H__ */ +diff --git a/gst/rtp/gstrtph264depay.c b/gst/rtp/gstrtph264depay.c +index cc92c9a14..9cef347c2 100644 +--- a/gst/rtp/gstrtph264depay.c ++++ b/gst/rtp/gstrtph264depay.c +@@ -28,6 +28,7 @@ + #include + #include + #include ++#include "gstrtpelements.h" + #include "gstrtph264depay.h" + #include "gstrtputils.h" + +@@ -38,6 +39,16 @@ GST_DEBUG_CATEGORY_STATIC (rtph264depay_debug); + * expressed a restriction or preference via caps */ + #define DEFAULT_BYTE_STREAM TRUE + #define DEFAULT_ACCESS_UNIT FALSE ++#define DEFAULT_WAIT_FOR_KEYFRAME FALSE ++#define DEFAULT_REQUEST_KEYFRAME FALSE ++ ++enum ++{ ++ PROP_0, ++ PROP_WAIT_FOR_KEYFRAME, ++ PROP_REQUEST_KEYFRAME, ++}; ++ + + /* 3 zero bytes syncword */ + static const guint8 sync_bytes[] = { 0, 0, 0, 1 }; +@@ -79,8 +90,11 @@ GST_STATIC_PAD_TEMPLATE ("sink", + ); + + #define gst_rtp_h264_depay_parent_class parent_class +-G_DEFINE_TYPE (GstRtpH264Depay, gst_rtp_h264_depay, +- GST_TYPE_RTP_BASE_DEPAYLOAD); ++G_DEFINE_TYPE_WITH_CODE (GstRtpH264Depay, gst_rtp_h264_depay, ++ GST_TYPE_RTP_BASE_DEPAYLOAD, GST_DEBUG_CATEGORY_INIT (rtph264depay_debug, ++ "rtph264depay", 0, "H264 Video RTP Depayloader")); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph264depay, "rtph264depay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_H264_DEPAY, rtp_element_init (plugin)); + + static void gst_rtp_h264_depay_finalize (GObject * object); + +@@ -99,6 +113,44 @@ static void gst_rtp_h264_depay_push (GstRtpH264Depay * rtph264depay, + GstBuffer * outbuf, gboolean keyframe, GstClockTime timestamp, + gboolean marker); + ++static void ++gst_rtp_h264_depay_set_property (GObject * object, guint prop_id, ++ const GValue * value, GParamSpec * pspec) ++{ ++ GstRtpH264Depay *self = GST_RTP_H264_DEPAY (object); ++ ++ switch (prop_id) { ++ case PROP_WAIT_FOR_KEYFRAME: ++ self->wait_for_keyframe = g_value_get_boolean (value); ++ break; ++ case PROP_REQUEST_KEYFRAME: ++ self->request_keyframe = g_value_get_boolean (value); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_rtp_h264_depay_get_property (GObject * object, guint prop_id, ++ GValue * value, GParamSpec * pspec) ++{ ++ GstRtpH264Depay *self = GST_RTP_H264_DEPAY (object); ++ ++ switch (prop_id) { ++ case PROP_WAIT_FOR_KEYFRAME: ++ g_value_set_boolean (value, self->wait_for_keyframe); ++ break; ++ case PROP_REQUEST_KEYFRAME: ++ g_value_set_boolean (value, self->request_keyframe); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ + static void + gst_rtp_h264_depay_class_init (GstRtpH264DepayClass * klass) + { +@@ -111,6 +163,36 @@ gst_rtp_h264_depay_class_init (GstRtpH264DepayClass * klass) + gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass; + + gobject_class->finalize = gst_rtp_h264_depay_finalize; ++ gobject_class->set_property = gst_rtp_h264_depay_set_property; ++ gobject_class->get_property = gst_rtp_h264_depay_get_property; ++ ++ /** ++ * GstRtpH264Depay:wait-for-keyframe: ++ * ++ * Wait for the next keyframe after packet loss, ++ * meaningful only when outputting access units ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_WAIT_FOR_KEYFRAME, ++ g_param_spec_boolean ("wait-for-keyframe", "Wait for Keyframe", ++ "Wait for the next keyframe after packet loss, meaningful only when " ++ "outputting access units", ++ DEFAULT_WAIT_FOR_KEYFRAME, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstRtpH264Depay:request-keyframe: ++ * ++ * Request new keyframe when packet loss is detected ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_REQUEST_KEYFRAME, ++ g_param_spec_boolean ("request-keyframe", "Request Keyframe", ++ "Request new keyframe when packet loss is detected", ++ DEFAULT_REQUEST_KEYFRAME, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + gst_element_class_add_static_pad_template (gstelement_class, + &gst_rtp_h264_depay_src_template); +@@ -139,6 +221,8 @@ gst_rtp_h264_depay_init (GstRtpH264Depay * rtph264depay) + (GDestroyNotify) gst_buffer_unref); + rtph264depay->pps = g_ptr_array_new_with_free_func ( + (GDestroyNotify) gst_buffer_unref); ++ rtph264depay->wait_for_keyframe = DEFAULT_WAIT_FOR_KEYFRAME; ++ rtph264depay->request_keyframe = DEFAULT_REQUEST_KEYFRAME; + } + + static void +@@ -146,6 +230,7 @@ gst_rtp_h264_depay_reset (GstRtpH264Depay * rtph264depay, gboolean hard) + { + gst_adapter_clear (rtph264depay->adapter); + rtph264depay->wait_start = TRUE; ++ rtph264depay->waiting_for_keyframe = rtph264depay->wait_for_keyframe; + gst_adapter_clear (rtph264depay->picture_adapter); + rtph264depay->picture_start = FALSE; + rtph264depay->last_keyframe = FALSE; +@@ -351,7 +436,7 @@ gst_rtp_h264_set_src_caps (GstRtpH264Depay * rtph264depay) + guint8 *data; + guint len; + guint new_size; +- guint i; ++ guint i, first_sps, num_sps, first_pps, num_pps; + guchar level = 0; + guchar profile_compat = G_MAXUINT8; + +@@ -392,11 +477,22 @@ gst_rtp_h264_set_src_caps (GstRtpH264Depay * rtph264depay) + + /* 6 bits reserved | 2 bits lengthSizeMinusOn */ + *data++ = 0xff; ++ ++ if (rtph264depay->sps->len > 31) { ++ GST_WARNING_OBJECT (rtph264depay, ++ "Too many SPS to put in codec_data. Sending the most recent 31"); ++ num_sps = 31; ++ first_sps = rtph264depay->sps->len - 31; ++ } else { ++ num_sps = rtph264depay->sps->len; ++ first_sps = 0; ++ } ++ + /* 3 bits reserved | 5 bits numOfSequenceParameterSets */ +- *data++ = 0xe0 | (rtph264depay->sps->len & 0x1f); ++ *data++ = 0xe0 | (num_sps & 0x1f); + + /* copy all SPS */ +- for (i = 0; i < rtph264depay->sps->len; i++) { ++ for (i = first_sps; i < rtph264depay->sps->len; i++) { + gst_buffer_map (g_ptr_array_index (rtph264depay->sps, i), &nalmap, + GST_MAP_READ); + +@@ -409,10 +505,21 @@ gst_rtp_h264_set_src_caps (GstRtpH264Depay * rtph264depay) + gst_buffer_unmap (g_ptr_array_index (rtph264depay->sps, i), &nalmap); + } + ++ if (rtph264depay->pps->len > 255) { ++ GST_WARNING_OBJECT (rtph264depay, ++ "Too many PPS to put in codec_data. Sending the most recent 255"); ++ num_pps = 255; ++ first_pps = rtph264depay->pps->len - 255; ++ } else { ++ num_pps = rtph264depay->pps->len; ++ first_pps = 0; ++ } ++ + /* 8 bits numOfPictureParameterSets */ +- *data++ = rtph264depay->pps->len; ++ *data++ = num_pps; ++ + /* copy all PPS */ +- for (i = 0; i < rtph264depay->pps->len; i++) { ++ for (i = first_pps; i < rtph264depay->pps->len; i++) { + gst_buffer_map (g_ptr_array_index (rtph264depay->pps, i), &nalmap, + GST_MAP_READ); + +@@ -559,14 +666,16 @@ gst_rtp_h264_add_sps_pps (GstElement * rtph264, GPtrArray * sps_array, + parse_sps (&spsmap, &tmp_sps_id); + + if (sps_id == tmp_sps_id) { +- if (map.size == spsmap.size && ++ /* If this is already the most recent SPS and unchanged, nothing to do */ ++ if (i == (sps_array->len - 1) && map.size == spsmap.size && + memcmp (map.data, spsmap.data, spsmap.size) == 0) { +- GST_LOG_OBJECT (rtph264, "Unchanged SPS %u, not updating", sps_id); ++ GST_LOG_OBJECT (rtph264, ++ "Unchanged SPS %u already most recent, not updating", sps_id); + gst_buffer_unmap (sps, &spsmap); + goto drop; + } else { + gst_buffer_unmap (sps, &spsmap); +- g_ptr_array_remove_index_fast (sps_array, i); ++ g_ptr_array_remove_index (sps_array, i); + g_ptr_array_add (sps_array, nal); + GST_LOG_OBJECT (rtph264, "Modified SPS %u, replacing", sps_id); + goto done; +@@ -597,15 +706,17 @@ gst_rtp_h264_add_sps_pps (GstElement * rtph264, GPtrArray * sps_array, + parse_pps (&ppsmap, &tmp_sps_id, &tmp_pps_id); + + if (pps_id == tmp_pps_id) { +- if (map.size == ppsmap.size && ++ /* If this is already the most recent PPS and unchanged, nothing to do */ ++ if (i == (pps_array->len - 1) && map.size == ppsmap.size && + memcmp (map.data, ppsmap.data, ppsmap.size) == 0) { +- GST_LOG_OBJECT (rtph264, "Unchanged PPS %u:%u, not updating", sps_id, ++ GST_LOG_OBJECT (rtph264, ++ "Unchanged PPS %u:%u already most recent, not updating", sps_id, + pps_id); + gst_buffer_unmap (pps, &ppsmap); + goto drop; + } else { + gst_buffer_unmap (pps, &ppsmap); +- g_ptr_array_remove_index_fast (pps_array, i); ++ g_ptr_array_remove_index (pps_array, i); + g_ptr_array_add (pps_array, nal); + GST_LOG_OBJECT (rtph264, "Modified PPS %u:%u, replacing", + sps_id, pps_id); +@@ -926,31 +1037,31 @@ gst_rtp_h264_depay_handle_nal (GstRtpH264Depay * rtph264depay, GstBuffer * nal, + if (rtph264depay->merge) { + gboolean start = FALSE, complete = FALSE; + +- /* marker bit isn't mandatory so in the following code we try to guess +- * an AU boundary by detecting a new picture start */ +- if (!marker) { +- /* consider a coded slices (IDR or not) to start a picture, +- * (so ending the previous one) if first_mb_in_slice == 0 +- * (non-0 is part of previous one) */ +- /* NOTE this is not entirely according to Access Unit specs in 7.4.1.2.4, +- * but in practice it works in sane cases, needs not much parsing, +- * and also works with broken frame_num in NAL (where spec-wise would fail) */ +- /* FIXME: this code isn't correct for interlaced content as AUs should be +- * constructed with pairs of fields and the guess here will just push out +- * AUs with a single field in it */ +- if (nal_type == 1 || nal_type == 2 || nal_type == 5) { +- /* we have a picture start */ +- start = TRUE; +- if (map.data[5] & 0x80) { +- /* first_mb_in_slice == 0 completes a picture */ +- complete = TRUE; +- } +- } else if (nal_type >= 6 && nal_type <= 9) { +- /* SEI, SPS, PPS, AU terminate picture */ ++ /* consider a coded slices (IDR or not) to start a picture, ++ * (so ending the previous one) if first_mb_in_slice == 0 ++ * (non-0 is part of previous one) */ ++ /* NOTE this is not entirely according to Access Unit specs in 7.4.1.2.4, ++ * but in practice it works in sane cases, needs not much parsing, ++ * and also works with broken frame_num in NAL (where spec-wise would fail) */ ++ /* FIXME: this code isn't correct for interlaced content as AUs should be ++ * constructed with pairs of fields and the guess here will just push out ++ * AUs with a single field in it */ ++ if (nal_type == 1 || nal_type == 2 || nal_type == 5) { ++ /* we have a picture start */ ++ start = TRUE; ++ if (map.data[5] & 0x80) { ++ /* first_mb_in_slice == 0 completes a picture */ + complete = TRUE; + } +- GST_DEBUG_OBJECT (depayload, "start %d, complete %d", start, complete); ++ } else if (nal_type >= 6 && nal_type <= 9) { ++ /* SEI, SPS, PPS, AU terminate picture */ ++ complete = TRUE; ++ } ++ GST_DEBUG_OBJECT (depayload, "start %d, complete %d", start, complete); + ++ /* marker bit isn't mandatory so in the following code we try to guess ++ * an AU boundary by detecting a new picture start */ ++ if (!marker) { + if (complete && rtph264depay->picture_start) + outbuf = gst_rtp_h264_complete_au (rtph264depay, &out_timestamp, + &out_keyframe); +@@ -958,6 +1069,9 @@ gst_rtp_h264_depay_handle_nal (GstRtpH264Depay * rtph264depay, GstBuffer * nal, + /* add to adapter */ + gst_buffer_unmap (nal, &map); + ++ if (!rtph264depay->picture_start && start && out_keyframe) ++ rtph264depay->waiting_for_keyframe = FALSE; ++ + GST_DEBUG_OBJECT (depayload, "adding NAL to picture adapter"); + gst_adapter_push (rtph264depay->picture_adapter, nal); + rtph264depay->last_ts = in_timestamp; +@@ -975,8 +1089,15 @@ gst_rtp_h264_depay_handle_nal (GstRtpH264Depay * rtph264depay, GstBuffer * nal, + } + + if (outbuf) { +- gst_rtp_h264_depay_push (rtph264depay, outbuf, out_keyframe, out_timestamp, +- marker); ++ if (!rtph264depay->waiting_for_keyframe) { ++ gst_rtp_h264_depay_push (rtph264depay, outbuf, out_keyframe, ++ out_timestamp, marker); ++ } else { ++ GST_LOG_OBJECT (depayload, ++ "Dropping %" GST_PTR_FORMAT ", we are waiting for a keyframe", ++ outbuf); ++ gst_buffer_unref (outbuf); ++ } + } + + return; +@@ -1030,12 +1151,25 @@ gst_rtp_h264_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp) + + rtph264depay = GST_RTP_H264_DEPAY (depayload); + ++ if (!rtph264depay->merge) ++ rtph264depay->waiting_for_keyframe = FALSE; ++ + /* flush remaining data on discont */ + if (GST_BUFFER_IS_DISCONT (rtp->buffer)) { + gst_adapter_clear (rtph264depay->adapter); + rtph264depay->wait_start = TRUE; + rtph264depay->current_fu_type = 0; + rtph264depay->last_fu_seqnum = 0; ++ ++ if (rtph264depay->merge && rtph264depay->wait_for_keyframe) { ++ rtph264depay->waiting_for_keyframe = TRUE; ++ } ++ ++ ++ if (rtph264depay->request_keyframe) ++ gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depayload), ++ gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE, ++ TRUE, 0)); + } + + { +@@ -1376,13 +1510,3 @@ gst_rtp_h264_depay_change_state (GstElement * element, + } + return ret; + } +- +-gboolean +-gst_rtp_h264_depay_plugin_init (GstPlugin * plugin) +-{ +- GST_DEBUG_CATEGORY_INIT (rtph264depay_debug, "rtph264depay", 0, +- "H264 Video RTP Depayloader"); +- +- return gst_element_register (plugin, "rtph264depay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_H264_DEPAY); +-} +diff --git a/gst/rtp/gstrtph264depay.h b/gst/rtp/gstrtph264depay.h +index 2cb2167a4..8ca7381be 100644 +--- a/gst/rtp/gstrtph264depay.h ++++ b/gst/rtp/gstrtph264depay.h +@@ -71,6 +71,10 @@ struct _GstRtpH264Depay + /* downstream allocator */ + GstAllocator *allocator; + GstAllocationParams params; ++ ++ gboolean wait_for_keyframe; ++ gboolean request_keyframe; ++ gboolean waiting_for_keyframe; + }; + + struct _GstRtpH264DepayClass +@@ -80,8 +84,6 @@ struct _GstRtpH264DepayClass + + GType gst_rtp_h264_depay_get_type (void); + +-gboolean gst_rtp_h264_depay_plugin_init (GstPlugin * plugin); +- + gboolean gst_rtp_h264_add_sps_pps (GstElement * rtph264, GPtrArray * sps, + GPtrArray * pps, GstBuffer * nal); + +diff --git a/gst/rtp/gstrtph264pay.c b/gst/rtp/gstrtph264pay.c +index 67353283e..860913bfe 100644 +--- a/gst/rtp/gstrtph264pay.c ++++ b/gst/rtp/gstrtph264pay.c +@@ -31,6 +31,7 @@ + /* Included to not duplicate gst_rtp_h264_add_sps_pps () */ + #include "gstrtph264depay.h" + ++#include "gstrtpelements.h" + #include "gstrtph264pay.h" + #include "gstrtputils.h" + #include "gstbuffermemory.h" +@@ -133,6 +134,8 @@ static void gst_rtp_h264_pay_reset_bundle (GstRtpH264Pay * rtph264pay); + + #define gst_rtp_h264_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpH264Pay, gst_rtp_h264_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph264pay, "rtph264pay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_H264_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_h264_pay_class_init (GstRtpH264PayClass * klass) +@@ -220,7 +223,7 @@ static void + gst_rtp_h264_pay_init (GstRtpH264Pay * rtph264pay) + { + rtph264pay->queue = g_array_new (FALSE, FALSE, sizeof (guint)); +- rtph264pay->profile = 0; ++ rtph264pay->profile_level = 0; + rtph264pay->sps = g_ptr_array_new_with_free_func ( + (GDestroyNotify) gst_buffer_unref); + rtph264pay->pps = g_ptr_array_new_with_free_func ( +@@ -318,7 +321,7 @@ gst_rtp_h264_pay_getcaps (GstRTPBasePayload * payload, GstPad * pad, + for (i = 0; i < gst_caps_get_size (allowed_caps); i++) { + GstStructure *s = gst_caps_get_structure (allowed_caps, i); + GstStructure *new_s = gst_structure_new_empty ("video/x-h264"); +- const gchar *profile_level_id; ++ const gchar *profile_level_id, *profile; + + profile_level_id = gst_structure_get_string (s, "profile-level-id"); + +@@ -340,9 +343,9 @@ gst_rtp_h264_pay_getcaps (GstRTPBasePayload * payload, GstPad * pad, + GST_LOG_OBJECT (payload, "In caps, have profile %s and level %s", + profile, level); + +- if (!strcmp (profile, "constrained-baseline")) ++ if (!strcmp (profile, "constrained-baseline")) { + gst_structure_set (new_s, "profile", G_TYPE_STRING, profile, NULL); +- else { ++ } else { + GValue val = { 0, }; + GValue profiles = { 0, }; + +@@ -382,6 +385,8 @@ gst_rtp_h264_pay_getcaps (GstRTPBasePayload * payload, GstPad * pad, + gst_structure_set (new_s, + "profile", G_TYPE_STRING, "constrained-baseline", NULL); + } ++ } else if ((profile = gst_structure_get_string (s, "profile"))) { ++ gst_structure_set (new_s, "profile", G_TYPE_STRING, profile, NULL); + } else { + /* No profile-level-id means baseline or unrestricted */ + +@@ -454,14 +459,13 @@ gst_rtp_h264_pay_src_query (GstPad * pad, GstObject * parent, GstQuery * query) + return gst_pad_query_default (pad, parent, query); + } + +- + /* take the currently configured SPS and PPS lists and set them on the caps as + * sprop-parameter-sets */ + static gboolean + gst_rtp_h264_pay_set_sps_pps (GstRTPBasePayload * basepayload) + { ++ GstStructure *s = gst_structure_new_empty ("unused"); + GstRtpH264Pay *payloader = GST_RTP_H264_PAY (basepayload); +- gchar *profile; + gchar *set; + GString *sprops; + guint count; +@@ -499,24 +503,34 @@ gst_rtp_h264_pay_set_sps_pps (GstRTPBasePayload * basepayload) + } + + if (G_LIKELY (count)) { +- if (payloader->profile != 0) { +- /* profile is 24 bit. Force it to respect the limit */ +- profile = g_strdup_printf ("%06x", payloader->profile & 0xffffff); +- /* combine into output caps */ +- res = gst_rtp_base_payload_set_outcaps (basepayload, +- "packetization-mode", G_TYPE_STRING, "1", +- "profile-level-id", G_TYPE_STRING, profile, +- "sprop-parameter-sets", G_TYPE_STRING, sprops->str, NULL); +- g_free (profile); +- } else { +- res = gst_rtp_base_payload_set_outcaps (basepayload, +- "packetization-mode", G_TYPE_STRING, "1", +- "sprop-parameter-sets", G_TYPE_STRING, sprops->str, NULL); ++ gchar *profile_level; ++ ++ gst_structure_set (s, ++ "packetization-mode", G_TYPE_STRING, "1", ++ "sprop-parameter-sets", G_TYPE_STRING, sprops->str, NULL); ++ ++ if (payloader->profile_level != 0) { ++ guint8 sps[2] = { ++ payloader->profile_level >> 16, ++ payloader->profile_level >> 8, ++ }; ++ ++ profile_level = ++ g_strdup_printf ("%06x", payloader->profile_level & 0xffffff); ++ gst_structure_set (s, ++ "profile-level-id", G_TYPE_STRING, profile_level, ++ "profile", G_TYPE_STRING, gst_codec_utils_h264_get_profile (sps, 2), ++ NULL); ++ ++ g_free (profile_level); + } + ++ /* combine into output caps */ ++ res = gst_rtp_base_payload_set_outcaps_structure (basepayload, s); + } else { + res = gst_rtp_base_payload_set_outcaps (basepayload, NULL); + } ++ gst_structure_free (s); + g_string_free (sprops, TRUE); + + return res; +@@ -588,8 +602,8 @@ gst_rtp_h264_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps) + /* AVCProfileIndication */ + /* profile_compat */ + /* AVCLevelIndication */ +- rtph264pay->profile = (data[1] << 16) | (data[2] << 8) | data[3]; +- GST_DEBUG_OBJECT (rtph264pay, "profile %06x", rtph264pay->profile); ++ rtph264pay->profile_level = (data[1] << 16) | (data[2] << 8) | data[3]; ++ GST_DEBUG_OBJECT (rtph264pay, "profile %06x", rtph264pay->profile_level); + + /* 6 bits reserved | 2 bits lengthSizeMinusOne */ + /* this is the number of bytes in front of the NAL units to mark their +@@ -1060,7 +1074,7 @@ gst_rtp_h264_pay_payload_nal_fragment (GstRTPBasePayload * basepayload, + /* use buffer lists + * create buffer without payload containing only the RTP header + * (memory block at index 0) */ +- outbuf = gst_rtp_buffer_new_allocate (2, 0, 0); ++ outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 2, 0, 0); + + gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp); + +@@ -1071,6 +1085,8 @@ gst_rtp_h264_pay_payload_nal_fragment (GstRTPBasePayload * basepayload, + /* If it's the last fragment and the end of this au, mark the end of + * slice */ + gst_rtp_buffer_set_marker (&rtp, last_fragment && end_of_au); ++ if (last_fragment && end_of_au) ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); + + /* FU indicator */ + payload[0] = (nal_header & 0x60) | FU_A_TYPE_ID; +@@ -1122,12 +1138,14 @@ gst_rtp_h264_pay_payload_nal_single (GstRTPBasePayload * basepayload, + + /* create buffer without payload containing only the RTP header + * (memory block at index 0) */ +- outbuf = gst_rtp_buffer_new_allocate (0, 0, 0); ++ outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 0, 0, 0); + + gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp); + + /* Mark the end of a frame */ + gst_rtp_buffer_set_marker (&rtp, end_of_au); ++ if (end_of_au) ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); + + /* timestamp the outbuffer */ + GST_BUFFER_PTS (outbuf) = pts; +@@ -1236,7 +1254,7 @@ gst_rtp_h264_pay_send_bundle (GstRtpH264Pay * rtph264pay, gboolean end_of_au) + end_of_au, delta, discont); + } + +-static gboolean ++static GstFlowReturn + gst_rtp_h264_pay_payload_nal_bundle (GstRTPBasePayload * basepayload, + GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au, + gboolean delta_unit, gboolean discont, guint8 nal_header) +@@ -1809,10 +1827,3 @@ gst_rtp_h264_pay_get_property (GObject * object, guint prop_id, + break; + } + } +- +-gboolean +-gst_rtp_h264_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtph264pay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_H264_PAY); +-} +diff --git a/gst/rtp/gstrtph264pay.h b/gst/rtp/gstrtph264pay.h +index 879394bfa..c983a9251 100644 +--- a/gst/rtp/gstrtph264pay.h ++++ b/gst/rtp/gstrtph264pay.h +@@ -65,7 +65,7 @@ struct _GstRtpH264Pay + { + GstRTPBasePayload payload; + +- guint profile; ++ guint profile_level; + GPtrArray *sps, *pps; + + GstH264StreamFormat stream_format; +@@ -104,8 +104,6 @@ struct _GstRtpH264PayClass + + GType gst_rtp_h264_pay_get_type (void); + +-gboolean gst_rtp_h264_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_H264_PAY_H__ */ +diff --git a/gst/rtp/gstrtph265depay.c b/gst/rtp/gstrtph265depay.c +index 46553f0e1..41d2762ff 100644 +--- a/gst/rtp/gstrtph265depay.c ++++ b/gst/rtp/gstrtph265depay.c +@@ -28,6 +28,7 @@ + #include + #include + #include ++#include "gstrtpelements.h" + #include "gstrtph265depay.h" + #include "gstrtputils.h" + +@@ -95,8 +96,11 @@ GST_STATIC_PAD_TEMPLATE ("sink", + ); + + #define gst_rtp_h265_depay_parent_class parent_class +-G_DEFINE_TYPE (GstRtpH265Depay, gst_rtp_h265_depay, +- GST_TYPE_RTP_BASE_DEPAYLOAD); ++G_DEFINE_TYPE_WITH_CODE (GstRtpH265Depay, gst_rtp_h265_depay, ++ GST_TYPE_RTP_BASE_DEPAYLOAD, GST_DEBUG_CATEGORY_INIT (rtph265depay_debug, ++ "rtph265depay", 0, "H265 Video RTP Depayloader")); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph265depay, "rtph265depay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_H265_DEPAY, rtp_element_init (plugin)); + + static void gst_rtp_h265_depay_finalize (GObject * object); + +@@ -460,7 +464,6 @@ gst_rtp_h265_set_src_caps (GstRtpH265Depay * rtph265depay) + gst_rtp_read_golomb (&br, &chroma_format_idc); /* chroma_format_idc */ + + if (chroma_format_idc == 3) +- + gst_bit_reader_get_bits_uint8 (&br, &tmp8, 1); /* separate_colour_plane_flag */ + + gst_rtp_read_golomb (&br, &tmp); /* pic_width_in_luma_samples */ +@@ -1634,13 +1637,3 @@ gst_rtp_h265_depay_change_state (GstElement * element, + } + return ret; + } +- +-gboolean +-gst_rtp_h265_depay_plugin_init (GstPlugin * plugin) +-{ +- GST_DEBUG_CATEGORY_INIT (rtph265depay_debug, "rtph265depay", 0, +- "H265 Video RTP Depayloader"); +- +- return gst_element_register (plugin, "rtph265depay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_H265_DEPAY); +-} +diff --git a/gst/rtp/gstrtph265depay.h b/gst/rtp/gstrtph265depay.h +index b3b55ee7b..505bdb5cd 100644 +--- a/gst/rtp/gstrtph265depay.h ++++ b/gst/rtp/gstrtph265depay.h +@@ -108,8 +108,6 @@ typedef struct + + GType gst_rtp_h265_depay_get_type (void); + +-gboolean gst_rtp_h265_depay_plugin_init (GstPlugin * plugin); +- + gboolean gst_rtp_h265_add_vps_sps_pps (GstElement * rtph265, GPtrArray * vps, + GPtrArray * sps, GPtrArray * pps, GstBuffer * nal); + +diff --git a/gst/rtp/gstrtph265pay.c b/gst/rtp/gstrtph265pay.c +index 3793ad613..e06c928e4 100644 +--- a/gst/rtp/gstrtph265pay.c ++++ b/gst/rtp/gstrtph265pay.c +@@ -32,6 +32,7 @@ + /* Included to not duplicate gst_rtp_h265_add_vps_sps_pps () */ + #include "gstrtph265depay.h" + ++#include "gstrtpelements.h" + #include "gstrtph265pay.h" + #include "gstrtputils.h" + #include "gstbuffermemory.h" +@@ -166,6 +167,8 @@ static void gst_rtp_h265_pay_reset_bundle (GstRtpH265Pay * rtph265pay); + + #define gst_rtp_h265_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpH265Pay, gst_rtp_h265_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph265pay, "rtph265pay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_H265_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_h265_pay_class_init (GstRtpH265PayClass * klass) +@@ -919,21 +922,24 @@ gst_rtp_h265_pay_decode_nal (GstRtpH265Pay * payloader, + } + + static GstFlowReturn gst_rtp_h265_pay_payload_nal (GstRTPBasePayload * +- basepayload, GPtrArray * paybufs, GstClockTime dts, GstClockTime pts); ++ basepayload, GPtrArray * paybufs, GstClockTime dts, GstClockTime pts, ++ gboolean delta_unit); + static GstFlowReturn gst_rtp_h265_pay_payload_nal_single (GstRTPBasePayload * + basepayload, GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, +- gboolean marker); ++ gboolean marker, gboolean delta_unit); + static GstFlowReturn gst_rtp_h265_pay_payload_nal_fragment (GstRTPBasePayload * + basepayload, GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, +- gboolean marker, guint mtu, guint8 nal_type, const guint8 * nal_header, +- int size); ++ gboolean marker, gboolean delta_unit, guint mtu, guint8 nal_type, ++ const guint8 * nal_header, int size); + static GstFlowReturn gst_rtp_h265_pay_payload_nal_bundle (GstRTPBasePayload * + basepayload, GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, +- gboolean marker, guint8 nal_type, const guint8 * nal_header, int size); ++ gboolean marker, gboolean delta_unit, guint8 nal_type, ++ const guint8 * nal_header, int size); + + static GstFlowReturn + gst_rtp_h265_pay_send_vps_sps_pps (GstRTPBasePayload * basepayload, +- GstRtpH265Pay * rtph265pay, GstClockTime dts, GstClockTime pts) ++ GstRtpH265Pay * rtph265pay, GstClockTime dts, GstClockTime pts, ++ gboolean delta_unit) + { + GstFlowReturn ret = GST_FLOW_OK; + gboolean sent_all_vps_sps_pps = TRUE; +@@ -964,7 +970,7 @@ gst_rtp_h265_pay_send_vps_sps_pps (GstRTPBasePayload * basepayload, + g_ptr_array_add (bufs, gst_buffer_ref (pps_buf)); + } + +- ret = gst_rtp_h265_pay_payload_nal (basepayload, bufs, dts, pts); ++ ret = gst_rtp_h265_pay_payload_nal (basepayload, bufs, dts, pts, FALSE); + if (ret != GST_FLOW_OK) { + /* not critical but warn */ + GST_WARNING_OBJECT (basepayload, "failed pushing VPS/SPS/PPS"); +@@ -990,7 +996,8 @@ gst_rtp_h265_pay_reset_bundle (GstRtpH265Pay * rtph265pay) + + static GstFlowReturn + gst_rtp_h265_pay_payload_nal (GstRTPBasePayload * basepayload, +- GPtrArray * paybufs, GstClockTime dts, GstClockTime pts) ++ GPtrArray * paybufs, GstClockTime dts, GstClockTime pts, ++ gboolean delta_unit) + { + GstRtpH265Pay *rtph265pay; + guint mtu; +@@ -1100,7 +1107,8 @@ gst_rtp_h265_pay_payload_nal (GstRTPBasePayload * basepayload, + sent_ps = TRUE; + GST_DEBUG_OBJECT (rtph265pay, "sending VPS/SPS/PPS before current frame"); + ret = +- gst_rtp_h265_pay_send_vps_sps_pps (basepayload, rtph265pay, dts, pts); ++ gst_rtp_h265_pay_send_vps_sps_pps (basepayload, rtph265pay, dts, pts, ++ delta_unit); + if (ret != GST_FLOW_OK) { + gst_buffer_unref (paybuf); + continue; +@@ -1109,10 +1117,10 @@ gst_rtp_h265_pay_payload_nal (GstRTPBasePayload * basepayload, + + if (rtph265pay->aggregate_mode != GST_RTP_H265_AGGREGATE_NONE) + ret = gst_rtp_h265_pay_payload_nal_bundle (basepayload, paybuf, dts, pts, +- marker, nal_type, nal_header, size); ++ marker, delta_unit, nal_type, nal_header, size); + else + ret = gst_rtp_h265_pay_payload_nal_fragment (basepayload, paybuf, dts, +- pts, marker, mtu, nal_type, nal_header, size); ++ pts, marker, delta_unit, mtu, nal_type, nal_header, size); + } + + g_ptr_array_free (paybufs, TRUE); +@@ -1122,7 +1130,8 @@ gst_rtp_h265_pay_payload_nal (GstRTPBasePayload * basepayload, + + static GstFlowReturn + gst_rtp_h265_pay_payload_nal_single (GstRTPBasePayload * basepayload, +- GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean marker) ++ GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean marker, ++ gboolean delta_unit) + { + GstBufferList *outlist; + GstBuffer *outbuf; +@@ -1131,12 +1140,16 @@ gst_rtp_h265_pay_payload_nal_single (GstRTPBasePayload * basepayload, + /* use buffer lists + * create buffer without payload containing only the RTP header + * (memory block at index 0) */ +- outbuf = gst_rtp_buffer_new_allocate (0, 0, 0); ++ outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 0, 0, 0); + + gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp); + + /* Mark the end of a frame */ + gst_rtp_buffer_set_marker (&rtp, marker); ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); ++ ++ if (delta_unit) ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT); + + /* timestamp the outbuffer */ + GST_BUFFER_PTS (outbuf) = pts; +@@ -1160,6 +1173,7 @@ gst_rtp_h265_pay_payload_nal_single (GstRTPBasePayload * basepayload, + static GstFlowReturn + gst_rtp_h265_pay_payload_nal_fragment (GstRTPBasePayload * basepayload, + GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean marker, ++ gboolean delta_unit, + guint mtu, guint8 nal_type, const guint8 * nal_header, int size) + { + GstRtpH265Pay *rtph265pay = (GstRtpH265Pay *) basepayload; +@@ -1175,7 +1189,7 @@ gst_rtp_h265_pay_payload_nal_fragment (GstRTPBasePayload * basepayload, + "NAL Unit fit in one packet datasize=%d mtu=%d", size, mtu); + /* will fit in one packet */ + return gst_rtp_h265_pay_payload_nal_single (basepayload, paybuf, dts, pts, +- marker); ++ marker, delta_unit); + } + + GST_DEBUG_OBJECT (basepayload, +@@ -1206,7 +1220,7 @@ gst_rtp_h265_pay_payload_nal_fragment (GstRTPBasePayload * basepayload, + /* use buffer lists + * create buffer without payload containing only the RTP header + * (memory block at index 0), and with space for PayloadHdr and FU header */ +- outbuf = gst_rtp_buffer_new_allocate (3, 0, 0); ++ outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 3, 0, 0); + + gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp); + +@@ -1221,6 +1235,8 @@ gst_rtp_h265_pay_payload_nal_fragment (GstRTPBasePayload * basepayload, + /* If it's the last fragment and the end of this au, mark the end of + * slice */ + gst_rtp_buffer_set_marker (&rtp, last_fragment && marker); ++ if (last_fragment && marker) ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); + + /* FU Header */ + payload[2] = (first_fragment << 7) | (last_fragment << 6) | +@@ -1232,6 +1248,12 @@ gst_rtp_h265_pay_payload_nal_fragment (GstRTPBasePayload * basepayload, + gst_rtp_copy_video_meta (rtph265pay, outbuf, paybuf); + gst_buffer_copy_into (outbuf, paybuf, GST_BUFFER_COPY_MEMORY, pos, + fragment_size); ++ if (!delta_unit) ++ /* only the first packet sent should not have the flag */ ++ delta_unit = TRUE; ++ else ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT); ++ + /* add the buffer to the buffer list */ + gst_buffer_list_add (outlist, outbuf); + } +@@ -1250,6 +1272,7 @@ gst_rtp_h265_pay_send_bundle (GstRtpH265Pay * rtph265pay, gboolean marker) + guint length, bundle_size; + GstBuffer *first, *outbuf; + GstClockTime dts, pts; ++ gboolean delta_unit; + + bundle_size = rtph265pay->bundle_size; + +@@ -1265,6 +1288,7 @@ gst_rtp_h265_pay_send_bundle (GstRtpH265Pay * rtph265pay, gboolean marker) + first = gst_buffer_list_get (bundle, 0); + dts = GST_BUFFER_DTS (first); + pts = GST_BUFFER_PTS (first); ++ delta_unit = GST_BUFFER_FLAG_IS_SET (first, GST_BUFFER_FLAG_DELTA_UNIT); + + if (length == 1) { + /* Push unaggregated NALU */ +@@ -1324,13 +1348,14 @@ gst_rtp_h265_pay_send_bundle (GstRtpH265Pay * rtph265pay, gboolean marker) + + gst_rtp_h265_pay_reset_bundle (rtph265pay); + return gst_rtp_h265_pay_payload_nal_single (basepayload, outbuf, dts, pts, +- marker); ++ marker, delta_unit); + } + +-static gboolean ++static GstFlowReturn + gst_rtp_h265_pay_payload_nal_bundle (GstRTPBasePayload * basepayload, + GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, +- gboolean marker, guint8 nal_type, const guint8 * nal_header, int size) ++ gboolean marker, gboolean delta_unit, guint8 nal_type, ++ const guint8 * nal_header, int size) + { + GstRtpH265Pay *rtph265pay; + GstFlowReturn ret; +@@ -1380,7 +1405,7 @@ gst_rtp_h265_pay_payload_nal_bundle (GstRTPBasePayload * basepayload, + goto out; + + return gst_rtp_h265_pay_payload_nal_fragment (basepayload, paybuf, dts, pts, +- marker, mtu, nal_type, nal_header, size); ++ marker, delta_unit, mtu, nal_type, nal_header, size); + } + + bundle_size = rtph265pay->bundle_size + pay_size; +@@ -1412,6 +1437,11 @@ gst_rtp_h265_pay_payload_nal_bundle (GstRTPBasePayload * basepayload, + GST_BUFFER_PTS (paybuf) = pts; + GST_BUFFER_DTS (paybuf) = dts; + ++ if (delta_unit) ++ GST_BUFFER_FLAG_SET (paybuf, GST_BUFFER_FLAG_DELTA_UNIT); ++ else ++ GST_BUFFER_FLAG_UNSET (paybuf, GST_BUFFER_FLAG_DELTA_UNIT); ++ + gst_buffer_list_add (bundle, gst_buffer_ref (paybuf)); + rtph265pay->bundle_size += pay_size; + ret = GST_FLOW_OK; +@@ -1446,6 +1476,7 @@ gst_rtp_h265_pay_handle_buffer (GstRTPBasePayload * basepayload, + gboolean hevc; + GstBuffer *paybuf = NULL; + gsize skip; ++ gboolean delayed_not_delta_unit = FALSE; + gboolean marker = FALSE; + gboolean discont = FALSE; + gboolean draining = (buffer == NULL); +@@ -1463,8 +1494,14 @@ gst_rtp_h265_pay_handle_buffer (GstRTPBasePayload * basepayload, + return GST_FLOW_OK; + } else { + if (buffer) { +- if (gst_adapter_available (rtph265pay->adapter) == 0) +- discont = GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT); ++ if (!GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT)) { ++ if (gst_adapter_available (rtph265pay->adapter) == 0) ++ rtph265pay->delta_unit = FALSE; ++ else ++ delayed_not_delta_unit = TRUE; ++ } ++ ++ discont = GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT); + marker = GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_MARKER); + gst_adapter_push (rtph265pay->adapter, buffer); + buffer = NULL; +@@ -1500,6 +1537,8 @@ gst_rtp_h265_pay_handle_buffer (GstRTPBasePayload * basepayload, + + pts = GST_BUFFER_PTS (buffer); + dts = GST_BUFFER_DTS (buffer); ++ rtph265pay->delta_unit = GST_BUFFER_FLAG_IS_SET (buffer, ++ GST_BUFFER_FLAG_DELTA_UNIT); + marker = GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_MARKER); + GST_DEBUG_OBJECT (basepayload, "got %" G_GSIZE_FORMAT " bytes", + remaining_buffer_size); +@@ -1553,7 +1592,13 @@ gst_rtp_h265_pay_handle_buffer (GstRTPBasePayload * basepayload, + offset += nal_len; + remaining_buffer_size -= nal_len; + } +- ret = gst_rtp_h265_pay_payload_nal (basepayload, paybufs, dts, pts); ++ ret = ++ gst_rtp_h265_pay_payload_nal (basepayload, paybufs, dts, pts, ++ rtph265pay->delta_unit); ++ ++ if (!rtph265pay->delta_unit) ++ /* only the first outgoing packet doesn't have the DELTA_UNIT flag */ ++ rtph265pay->delta_unit = TRUE; + + gst_buffer_memory_unmap (&memory); + gst_buffer_unref (buffer); +@@ -1668,12 +1713,22 @@ gst_rtp_h265_pay_handle_buffer (GstRTPBasePayload * basepayload, + discont = FALSE; + } + ++ if (delayed_not_delta_unit) { ++ rtph265pay->delta_unit = FALSE; ++ delayed_not_delta_unit = FALSE; ++ } else { ++ /* only the first outgoing packet doesn't have the DELTA_UNIT flag */ ++ rtph265pay->delta_unit = TRUE; ++ } ++ + /* move to next NAL packet */ + /* Skips the trailing zeros */ + gst_adapter_flush (rtph265pay->adapter, nal_len - size); + } + /* put the data in one or more RTP packets */ +- ret = gst_rtp_h265_pay_payload_nal (basepayload, paybufs, dts, pts); ++ ret = ++ gst_rtp_h265_pay_payload_nal (basepayload, paybufs, dts, pts, ++ rtph265pay->delta_unit); + g_array_set_size (nal_queue, 0); + } + +@@ -1821,10 +1876,3 @@ gst_rtp_h265_pay_get_property (GObject * object, guint prop_id, + break; + } + } +- +-gboolean +-gst_rtp_h265_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtph265pay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_H265_PAY); +-} +diff --git a/gst/rtp/gstrtph265pay.h b/gst/rtp/gstrtph265pay.h +index 6d1409558..f2829d0a9 100644 +--- a/gst/rtp/gstrtph265pay.h ++++ b/gst/rtp/gstrtph265pay.h +@@ -73,6 +73,9 @@ struct _GstRtpH265Pay + gboolean send_vps_sps_pps; + GstClockTime last_vps_sps_pps; + ++ /* TRUE if the next NALU processed should have the DELTA_UNIT flag */ ++ gboolean delta_unit; ++ + /* aggregate buffers with AP */ + GstBufferList *bundle; + guint bundle_size; +@@ -87,7 +90,5 @@ struct _GstRtpH265PayClass + + GType gst_rtp_h265_pay_get_type (void); + +-gboolean gst_rtp_h265_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + #endif /* __GST_RTP_H265_PAY_H__ */ +diff --git a/gst/rtp/gstrtphdrext-colorspace.c b/gst/rtp/gstrtphdrext-colorspace.c +new file mode 100644 +index 000000000..5a77e3d8f +--- /dev/null ++++ b/gst/rtp/gstrtphdrext-colorspace.c +@@ -0,0 +1,483 @@ ++/* GStreamer ++ * Copyright (C) 2020-2021 Collabora Ltd. ++ * @author: Jakub Adam ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330, ++ * Boston, MA 02111-1307, USA. ++ */ ++ ++/** ++ * SECTION:rtphdrextcolorspace ++ * @title: GstRtphdrext-Colorspace ++ * @short_description: Helper methods for dealing with Color Space RTP header ++ * extension as defined in http://www.webrtc.org/experiments/rtp-hdrext/color-space ++ * @see_also: #GstRTPHeaderExtension, #GstRTPBasePayload, #GstRTPBaseDepayload ++ * ++ * Since: 1.20 ++ */ ++ ++#include "gstrtphdrext-colorspace.h" ++ ++#include "gstrtpelements.h" ++ ++#include ++#include ++#include ++ ++GST_DEBUG_CATEGORY_STATIC (rtphdrext_colorspace_debug); ++#define GST_CAT_DEFAULT (rtphdrext_colorspace_debug) ++ ++/** ++ * GstRTPHeaderExtensionColorspace: ++ * @parent: the parent #GstRTPHeaderExtension ++ * ++ * Instance struct for Color Space RTP header extension. ++ * ++ * http://www.webrtc.org/experiments/rtp-hdrext/color-space ++ */ ++struct _GstRTPHeaderExtensionColorspace ++{ ++ GstRTPHeaderExtension parent; ++ ++ GstVideoColorimetry colorimetry; ++ GstVideoChromaSite chroma_site; ++ GstVideoMasteringDisplayInfo mdi; ++ GstVideoContentLightLevel cll; ++ gboolean has_hdr_meta; ++}; ++ ++G_DEFINE_TYPE_WITH_CODE (GstRTPHeaderExtensionColorspace, ++ gst_rtp_header_extension_colorspace, GST_TYPE_RTP_HEADER_EXTENSION, ++ GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, "rtphdrextcolorspace", 0, ++ "RTP Color Space Header Extension"); ++ ); ++GST_ELEMENT_REGISTER_DEFINE (rtphdrextcolorspace, "rtphdrextcolorspace", ++ GST_RANK_MARGINAL, GST_TYPE_RTP_HEADER_EXTENSION_COLORSPACE); ++ ++static void ++gst_rtp_header_extension_colorspace_init (GstRTPHeaderExtensionColorspace * ++ self) ++{ ++} ++ ++static GstRTPHeaderExtensionFlags ++gst_rtp_header_extension_colorspace_get_supported_flags (GstRTPHeaderExtension * ++ ext) ++{ ++ GstRTPHeaderExtensionColorspace *self = ++ GST_RTP_HEADER_EXTENSION_COLORSPACE (ext); ++ ++ return self->has_hdr_meta ? ++ GST_RTP_HEADER_EXTENSION_TWO_BYTE : GST_RTP_HEADER_EXTENSION_ONE_BYTE; ++} ++ ++static gsize ++gst_rtp_header_extension_colorspace_get_max_size (GstRTPHeaderExtension * ext, ++ const GstBuffer * buffer) ++{ ++ GstRTPHeaderExtensionColorspace *self = ++ GST_RTP_HEADER_EXTENSION_COLORSPACE (ext); ++ ++ return self->has_hdr_meta ? ++ GST_RTP_HDREXT_COLORSPACE_WITH_HDR_META_SIZE : ++ GST_RTP_HDREXT_COLORSPACE_SIZE; ++} ++ ++static gssize ++gst_rtp_header_extension_colorspace_write (GstRTPHeaderExtension * ext, ++ const GstBuffer * input_meta, GstRTPHeaderExtensionFlags write_flags, ++ GstBuffer * output, guint8 * data, gsize size) ++{ ++ GstRTPHeaderExtensionColorspace *self = ++ GST_RTP_HEADER_EXTENSION_COLORSPACE (ext); ++ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; ++ gboolean is_frame_last_buffer; ++ guint8 *ptr = data; ++ guint8 range; ++ guint8 horizontal_site; ++ guint8 vertical_site; ++ ++ g_return_val_if_fail (size >= ++ gst_rtp_header_extension_colorspace_get_max_size (ext, NULL), -1); ++ g_return_val_if_fail (write_flags & ++ gst_rtp_header_extension_colorspace_get_supported_flags (ext), -1); ++ ++ if (self->colorimetry.matrix == GST_VIDEO_COLOR_MATRIX_UNKNOWN && ++ self->colorimetry.primaries == GST_VIDEO_COLOR_PRIMARIES_UNKNOWN && ++ self->colorimetry.range == GST_VIDEO_COLOR_RANGE_UNKNOWN && ++ self->colorimetry.transfer == GST_VIDEO_TRANSFER_UNKNOWN) { ++ /* Nothing to write. */ ++ return 0; ++ } ++ ++ gst_rtp_buffer_map (output, GST_MAP_READ, &rtp); ++ is_frame_last_buffer = gst_rtp_buffer_get_marker (&rtp); ++ gst_rtp_buffer_unmap (&rtp); ++ ++ if (!is_frame_last_buffer) { ++ /* Only a video frame's final packet should carry color space info. */ ++ return 0; ++ } ++ ++ *ptr++ = gst_video_color_primaries_to_iso (self->colorimetry.primaries); ++ *ptr++ = gst_video_transfer_function_to_iso (self->colorimetry.transfer); ++ *ptr++ = gst_video_color_matrix_to_iso (self->colorimetry.matrix); ++ ++ switch (self->colorimetry.range) { ++ case GST_VIDEO_COLOR_RANGE_0_255: ++ range = 2; ++ break; ++ case GST_VIDEO_COLOR_RANGE_16_235: ++ range = 1; ++ break; ++ default: ++ range = 0; ++ break; ++ } ++ ++ if (self->chroma_site & GST_VIDEO_CHROMA_SITE_H_COSITED) { ++ horizontal_site = 1; ++ } else if (self->chroma_site & GST_VIDEO_CHROMA_SITE_NONE) { ++ horizontal_site = 2; ++ } else { ++ horizontal_site = 0; ++ } ++ ++ if (self->chroma_site & GST_VIDEO_CHROMA_SITE_V_COSITED) { ++ vertical_site = 1; ++ } else if (self->chroma_site & GST_VIDEO_CHROMA_SITE_NONE) { ++ vertical_site = 2; ++ } else { ++ vertical_site = 0; ++ } ++ ++ *ptr++ = (range << 4) + (horizontal_site << 2) + vertical_site; ++ ++ if (self->has_hdr_meta) { ++ guint i; ++ ++ GST_WRITE_UINT16_BE (ptr, ++ self->mdi.max_display_mastering_luminance / 10000); ++ ptr += 2; ++ GST_WRITE_UINT16_BE (ptr, self->mdi.min_display_mastering_luminance); ++ ptr += 2; ++ ++ for (i = 0; i < 3; ++i) { ++ GST_WRITE_UINT16_BE (ptr, self->mdi.display_primaries[i].x); ++ ptr += 2; ++ GST_WRITE_UINT16_BE (ptr, self->mdi.display_primaries[i].y); ++ ptr += 2; ++ } ++ ++ GST_WRITE_UINT16_BE (ptr, self->mdi.white_point.x); ++ ptr += 2; ++ GST_WRITE_UINT16_BE (ptr, self->mdi.white_point.y); ++ ptr += 2; ++ ++ GST_WRITE_UINT16_BE (ptr, self->cll.max_content_light_level); ++ ptr += 2; ++ GST_WRITE_UINT16_BE (ptr, self->cll.max_frame_average_light_level); ++ ptr += 2; ++ } ++ ++ return ptr - data; ++} ++ ++static gboolean ++parse_colorspace (GstByteReader * reader, GstVideoColorimetry * colorimetry, ++ GstVideoChromaSite * chroma_site) ++{ ++ guint8 val; ++ ++ g_return_val_if_fail (reader != NULL, FALSE); ++ g_return_val_if_fail (colorimetry != NULL, FALSE); ++ g_return_val_if_fail (chroma_site != NULL, FALSE); ++ ++ if (gst_byte_reader_get_remaining (reader) < GST_RTP_HDREXT_COLORSPACE_SIZE) { ++ return FALSE; ++ } ++ ++ if (!gst_byte_reader_get_uint8 (reader, &val)) { ++ return FALSE; ++ } ++ colorimetry->primaries = gst_video_color_primaries_from_iso (val); ++ ++ if (!gst_byte_reader_get_uint8 (reader, &val)) { ++ return FALSE; ++ } ++ colorimetry->transfer = gst_video_transfer_function_from_iso (val); ++ ++ if (!gst_byte_reader_get_uint8 (reader, &val)) { ++ return FALSE; ++ } ++ colorimetry->matrix = gst_video_color_matrix_from_iso (val); ++ ++ *chroma_site = GST_VIDEO_CHROMA_SITE_UNKNOWN; ++ ++ if (!gst_byte_reader_get_uint8 (reader, &val)) { ++ return FALSE; ++ } ++ switch ((val >> 2) & 0x03) { ++ case 1: ++ *chroma_site |= GST_VIDEO_CHROMA_SITE_H_COSITED; ++ break; ++ case 2: ++ *chroma_site |= GST_VIDEO_CHROMA_SITE_NONE; ++ break; ++ } ++ ++ switch (val & 0x03) { ++ case 1: ++ *chroma_site |= GST_VIDEO_CHROMA_SITE_V_COSITED; ++ break; ++ case 2: ++ *chroma_site |= GST_VIDEO_CHROMA_SITE_NONE; ++ break; ++ } ++ ++ switch (val >> 4) { ++ case 1: ++ colorimetry->range = GST_VIDEO_COLOR_RANGE_16_235; ++ break; ++ case 2: ++ colorimetry->range = GST_VIDEO_COLOR_RANGE_0_255; ++ break; ++ default: ++ colorimetry->range = GST_VIDEO_COLOR_RANGE_UNKNOWN; ++ break; ++ } ++ ++ return TRUE; ++} ++ ++static gboolean ++parse_colorspace_with_hdr_meta (GstByteReader * reader, ++ GstVideoColorimetry * colorimetry, ++ GstVideoChromaSite * chroma_site, ++ GstVideoMasteringDisplayInfo * mastering_display_info, ++ GstVideoContentLightLevel * content_light_level) ++{ ++ guint i; ++ guint16 val16; ++ ++ g_return_val_if_fail (reader != NULL, FALSE); ++ g_return_val_if_fail (mastering_display_info != NULL, FALSE); ++ g_return_val_if_fail (content_light_level != NULL, FALSE); ++ ++ if (gst_byte_reader_get_remaining (reader) < ++ GST_RTP_HDREXT_COLORSPACE_WITH_HDR_META_SIZE) { ++ return FALSE; ++ } ++ ++ if (!parse_colorspace (reader, colorimetry, chroma_site)) { ++ return FALSE; ++ } ++ ++ if (!gst_byte_reader_get_uint16_be (reader, &val16)) { ++ return FALSE; ++ } ++ mastering_display_info->max_display_mastering_luminance = val16 * 10000; ++ ++ if (!gst_byte_reader_get_uint16_be (reader, &val16)) { ++ return FALSE; ++ } ++ mastering_display_info->min_display_mastering_luminance = val16; ++ ++ for (i = 0; i < 3; ++i) { ++ if (!gst_byte_reader_get_uint16_be (reader, ++ &mastering_display_info->display_primaries[i].x)) { ++ return FALSE; ++ } ++ ++ if (!gst_byte_reader_get_uint16_be (reader, ++ &mastering_display_info->display_primaries[i].y)) { ++ return FALSE; ++ } ++ } ++ ++ if (!gst_byte_reader_get_uint16_be (reader, ++ &mastering_display_info->white_point.x)) { ++ return FALSE; ++ } ++ if (!gst_byte_reader_get_uint16_be (reader, ++ &mastering_display_info->white_point.y)) { ++ return FALSE; ++ } ++ ++ if (!gst_byte_reader_get_uint16_be (reader, ++ &content_light_level->max_content_light_level)) { ++ return FALSE; ++ } ++ if (!gst_byte_reader_get_uint16_be (reader, ++ &content_light_level->max_frame_average_light_level)) { ++ return FALSE; ++ } ++ ++ return TRUE; ++} ++ ++static gboolean ++gst_rtp_header_extension_colorspace_read (GstRTPHeaderExtension * ext, ++ GstRTPHeaderExtensionFlags read_flags, const guint8 * data, gsize size, ++ GstBuffer * buffer) ++{ ++ GstRTPHeaderExtensionColorspace *self = ++ GST_RTP_HEADER_EXTENSION_COLORSPACE (ext); ++ gboolean has_hdr_meta; ++ GstByteReader *reader; ++ GstVideoColorimetry colorimetry; ++ GstVideoChromaSite chroma_site; ++ GstVideoMasteringDisplayInfo mdi; ++ GstVideoContentLightLevel cll; ++ gboolean caps_update_needed; ++ gboolean result; ++ ++ if (size != GST_RTP_HDREXT_COLORSPACE_SIZE && ++ size != GST_RTP_HDREXT_COLORSPACE_WITH_HDR_META_SIZE) { ++ GST_WARNING_OBJECT (ext, "Invalid Color Space header extension size %" ++ G_GSIZE_FORMAT, size); ++ return FALSE; ++ } ++ ++ has_hdr_meta = size == GST_RTP_HDREXT_COLORSPACE_WITH_HDR_META_SIZE; ++ ++ reader = gst_byte_reader_new (data, size); ++ ++ if (has_hdr_meta) { ++ result = parse_colorspace_with_hdr_meta (reader, &colorimetry, &chroma_site, ++ &mdi, &cll); ++ } else { ++ result = parse_colorspace (reader, &colorimetry, &chroma_site); ++ } ++ ++ g_clear_pointer (&reader, gst_byte_reader_free); ++ ++ if (!gst_video_colorimetry_is_equal (&self->colorimetry, &colorimetry)) { ++ caps_update_needed = TRUE; ++ self->colorimetry = colorimetry; ++ } ++ ++ if (self->chroma_site != chroma_site) { ++ caps_update_needed = TRUE; ++ self->chroma_site = chroma_site; ++ } ++ ++ if (self->has_hdr_meta != has_hdr_meta) { ++ caps_update_needed = TRUE; ++ self->has_hdr_meta = has_hdr_meta; ++ } ++ ++ if (has_hdr_meta) { ++ if (!gst_video_mastering_display_info_is_equal (&self->mdi, &mdi)) { ++ caps_update_needed = TRUE; ++ self->mdi = mdi; ++ } ++ if (!gst_video_content_light_level_is_equal (&self->cll, &cll)) { ++ caps_update_needed = TRUE; ++ self->cll = cll; ++ } ++ } ++ ++ if (caps_update_needed) { ++ gst_rtp_header_extension_set_wants_update_non_rtp_src_caps (ext, TRUE); ++ } ++ ++ return result; ++} ++ ++static gboolean ++ gst_rtp_header_extension_colorspace_set_non_rtp_sink_caps ++ (GstRTPHeaderExtension * ext, const GstCaps * caps) ++{ ++ GstRTPHeaderExtensionColorspace *self = ++ GST_RTP_HEADER_EXTENSION_COLORSPACE (ext); ++ GstStructure *s; ++ const gchar *colorimetry; ++ const gchar *chroma_site; ++ ++ s = gst_caps_get_structure (caps, 0); ++ ++ colorimetry = gst_structure_get_string (s, "colorimetry"); ++ if (colorimetry) { ++ gst_video_colorimetry_from_string (&self->colorimetry, colorimetry); ++ ++ self->has_hdr_meta = ++ gst_video_mastering_display_info_from_caps (&self->mdi, caps); ++ ++ gst_video_content_light_level_from_caps (&self->cll, caps); ++ } ++ ++ chroma_site = gst_structure_get_string (s, "chroma-site"); ++ if (chroma_site) { ++ self->chroma_site = gst_video_chroma_from_string (chroma_site); ++ } ++ ++ return TRUE; ++} ++ ++static gboolean ++ gst_rtp_header_extension_colorspace_update_non_rtp_src_caps ++ (GstRTPHeaderExtension * ext, GstCaps * caps) ++{ ++ GstRTPHeaderExtensionColorspace *self = ++ GST_RTP_HEADER_EXTENSION_COLORSPACE (ext); ++ ++ gchar *color_str; ++ ++ gst_structure_remove_fields (gst_caps_get_structure (caps, 0), ++ "mastering-display-info", "content-light-level", NULL); ++ ++ if ((color_str = gst_video_colorimetry_to_string (&self->colorimetry))) { ++ gst_caps_set_simple (caps, "colorimetry", G_TYPE_STRING, color_str, NULL); ++ g_free (color_str); ++ } ++ if (self->chroma_site != GST_VIDEO_CHROMA_SITE_UNKNOWN) { ++ gst_caps_set_simple (caps, "chroma-site", G_TYPE_STRING, ++ gst_video_chroma_to_string (self->chroma_site), NULL); ++ } ++ if (self->has_hdr_meta) { ++ gst_video_mastering_display_info_add_to_caps (&self->mdi, caps); ++ gst_video_content_light_level_add_to_caps (&self->cll, caps); ++ } ++ ++ return TRUE; ++} ++ ++static void ++ gst_rtp_header_extension_colorspace_class_init ++ (GstRTPHeaderExtensionColorspaceClass * klass) ++{ ++ GstRTPHeaderExtensionClass *rtp_hdr_class = ++ GST_RTP_HEADER_EXTENSION_CLASS (klass); ++ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass); ++ ++ rtp_hdr_class->get_supported_flags = ++ gst_rtp_header_extension_colorspace_get_supported_flags; ++ rtp_hdr_class->get_max_size = ++ gst_rtp_header_extension_colorspace_get_max_size; ++ rtp_hdr_class->write = gst_rtp_header_extension_colorspace_write; ++ rtp_hdr_class->read = gst_rtp_header_extension_colorspace_read; ++ rtp_hdr_class->set_non_rtp_sink_caps = ++ gst_rtp_header_extension_colorspace_set_non_rtp_sink_caps; ++ rtp_hdr_class->update_non_rtp_src_caps = ++ gst_rtp_header_extension_colorspace_update_non_rtp_src_caps; ++ ++ gst_element_class_set_static_metadata (gstelement_class, ++ "Color Space", GST_RTP_HDREXT_ELEMENT_CLASS, ++ "Extends RTP packets with color space and high dynamic range (HDR) information.", ++ "Jakub Adam "); ++ gst_rtp_header_extension_class_set_uri (rtp_hdr_class, ++ GST_RTP_HDREXT_COLORSPACE_URI); ++} +diff --git a/gst/rtp/gstrtphdrext-colorspace.h b/gst/rtp/gstrtphdrext-colorspace.h +new file mode 100644 +index 000000000..c451cc63f +--- /dev/null ++++ b/gst/rtp/gstrtphdrext-colorspace.h +@@ -0,0 +1,41 @@ ++/* GStreamer ++ * Copyright (C) 2020-2021 Collabora Ltd. ++ * @author: Jakub Adam ++ * ++ * gstrtphdrext-colorspace.h: Color Space RTP header extension ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330, ++ * Boston, MA 02111-1307, USA. ++ */ ++ ++#ifndef __GST_RTPHDREXT_COLORSPACE_H__ ++#define __GST_RTPHDREXT_COLORSPACE_H__ ++ ++#include ++ ++G_BEGIN_DECLS ++ ++#define GST_RTP_HDREXT_COLORSPACE_SIZE 4 ++#define GST_RTP_HDREXT_COLORSPACE_WITH_HDR_META_SIZE 28 ++#define GST_RTP_HDREXT_COLORSPACE_URI "http://www.webrtc.org/experiments/rtp-hdrext/color-space" ++ ++#define GST_TYPE_RTP_HEADER_EXTENSION_COLORSPACE (gst_rtp_header_extension_colorspace_get_type()) ++ ++G_DECLARE_FINAL_TYPE (GstRTPHeaderExtensionColorspace, gst_rtp_header_extension_colorspace, ++ GST, RTP_HEADER_EXTENSION_COLORSPACE, GstRTPHeaderExtension) ++ ++G_END_DECLS ++ ++#endif /* __GST_RTPHDREXT_COLORSPACE_H__ */ +diff --git a/gst/rtp/gstrtpilbcdepay.c b/gst/rtp/gstrtpilbcdepay.c +index 0b020afd7..043e065d7 100644 +--- a/gst/rtp/gstrtpilbcdepay.c ++++ b/gst/rtp/gstrtpilbcdepay.c +@@ -25,6 +25,7 @@ + #include + #include + #include ++#include "gstrtpelements.h" + #include "gstrtpilbcdepay.h" + #include "gstrtputils.h" + +@@ -74,6 +75,8 @@ static gboolean gst_rtp_ilbc_depay_setcaps (GstRTPBaseDepayload * depayload, + #define gst_rtp_ilbc_depay_parent_class parent_class + G_DEFINE_TYPE (GstRTPiLBCDepay, gst_rtp_ilbc_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpilbcdepay, "rtpilbcdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_ILBC_DEPAY, rtp_element_init (plugin)); + + #define GST_TYPE_ILBC_MODE (gst_ilbc_mode_get_type()) + static GType +@@ -230,10 +233,3 @@ gst_ilbc_depay_get_property (GObject * object, + break; + } + } +- +-gboolean +-gst_rtp_ilbc_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpilbcdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_ILBC_DEPAY); +-} +diff --git a/gst/rtp/gstrtpilbcdepay.h b/gst/rtp/gstrtpilbcdepay.h +index 01fd225ea..b016004e4 100644 +--- a/gst/rtp/gstrtpilbcdepay.h ++++ b/gst/rtp/gstrtpilbcdepay.h +@@ -58,8 +58,6 @@ struct _GstRTPiLBCDepayClass + + GType gst_rtp_ilbc_depay_get_type (void); + +-gboolean gst_rtp_ilbc_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_ILBC_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpilbcpay.c b/gst/rtp/gstrtpilbcpay.c +index cbc7d93de..0048045ae 100644 +--- a/gst/rtp/gstrtpilbcpay.c ++++ b/gst/rtp/gstrtpilbcpay.c +@@ -23,6 +23,7 @@ + + #include + #include ++#include "gstrtpelements.h" + #include "gstrtpilbcpay.h" + + GST_DEBUG_CATEGORY_STATIC (rtpilbcpay_debug); +@@ -56,6 +57,8 @@ static gboolean gst_rtp_ilbc_pay_sink_setcaps (GstRTPBasePayload * payload, + #define gst_rtp_ilbc_pay_parent_class parent_class + G_DEFINE_TYPE (GstRTPILBCPay, gst_rtp_ilbc_pay, + GST_TYPE_RTP_BASE_AUDIO_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpilbcpay, "rtpilbcpay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_ILBC_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_ilbc_pay_class_init (GstRTPILBCPayClass * klass) +@@ -219,10 +222,3 @@ gst_rtp_ilbc_pay_sink_getcaps (GstRTPBasePayload * rtppayload, GstPad * pad, + + return caps; + } +- +-gboolean +-gst_rtp_ilbc_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpilbcpay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_ILBC_PAY); +-} +diff --git a/gst/rtp/gstrtpilbcpay.h b/gst/rtp/gstrtpilbcpay.h +index 14363c077..d30c11239 100644 +--- a/gst/rtp/gstrtpilbcpay.h ++++ b/gst/rtp/gstrtpilbcpay.h +@@ -53,8 +53,6 @@ struct _GstRTPILBCPayClass + + GType gst_rtp_ilbc_pay_get_type (void); + +-gboolean gst_rtp_ilbc_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_ILBC_PAY_H__ */ +diff --git a/gst/rtp/gstrtpisacdepay.c b/gst/rtp/gstrtpisacdepay.c +new file mode 100644 +index 000000000..bac1fa048 +--- /dev/null ++++ b/gst/rtp/gstrtpisacdepay.c +@@ -0,0 +1,147 @@ ++/* GStreamer ++ * Copyright (C) 2020 Collabora Ltd. ++ * Author: Guillaume Desmottes , Collabora Ltd. ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++/** ++ * SECTION:element-rtpisacdepay ++ * @title: rtpisacdepay ++ * @short_description: iSAC RTP Depayloader ++ * ++ * Since: 1.20 ++ * ++ */ ++ ++#ifdef HAVE_CONFIG_H ++# include "config.h" ++#endif ++ ++#include ++#include ++#include ++#include ++ ++#include "gstrtpelements.h" ++#include "gstrtpisacdepay.h" ++#include "gstrtputils.h" ++ ++GST_DEBUG_CATEGORY_STATIC (rtpisacdepay_debug); ++#define GST_CAT_DEFAULT (rtpisacdepay_debug) ++ ++static GstStaticPadTemplate gst_rtp_isac_depay_sink_template = ++GST_STATIC_PAD_TEMPLATE ("sink", ++ GST_PAD_SINK, ++ GST_PAD_ALWAYS, ++ GST_STATIC_CAPS ("application/x-rtp, " ++ "media = (string) \"audio\", " ++ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", " ++ "clock-rate = (int) { 16000, 32000 }, " ++ "encoding-name = (string) \"ISAC\"") ++ ); ++ ++static GstStaticPadTemplate gst_rtp_isac_depay_src_template = ++GST_STATIC_PAD_TEMPLATE ("src", ++ GST_PAD_SRC, ++ GST_PAD_ALWAYS, ++ GST_STATIC_CAPS ("audio/isac, " ++ "rate = (int) { 16000, 32000 }, " "channels = (int) 1") ++ ); ++ ++struct _GstRtpIsacDepay ++{ ++ /*< private > */ ++ GstRTPBaseDepayload parent; ++ ++ guint64 packet; ++}; ++ ++#define gst_rtp_isac_depay_parent_class parent_class ++G_DEFINE_TYPE (GstRtpIsacDepay, gst_rtp_isac_depay, ++ GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpisacdepay, "rtpisacdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_ISAC_DEPAY, rtp_element_init (plugin)); ++ ++static gboolean ++gst_rtp_isac_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps) ++{ ++ GstCaps *src_caps; ++ GstStructure *s; ++ gint rate; ++ gboolean ret; ++ ++ GST_DEBUG_OBJECT (depayload, "sink caps: %" GST_PTR_FORMAT, caps); ++ ++ s = gst_caps_get_structure (caps, 0); ++ if (!gst_structure_get_int (s, "clock-rate", &rate)) { ++ GST_ERROR_OBJECT (depayload, "Missing 'clock-rate' in caps"); ++ return FALSE; ++ } ++ ++ src_caps = gst_caps_new_simple ("audio/isac", ++ "channels", G_TYPE_INT, 1, "rate", G_TYPE_INT, rate, NULL); ++ ++ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), src_caps); ++ ++ GST_DEBUG_OBJECT (depayload, ++ "set caps on source: %" GST_PTR_FORMAT " (ret=%d)", src_caps, ret); ++ gst_caps_unref (src_caps); ++ ++ return ret; ++} ++ ++static GstBuffer * ++gst_rtp_isac_depay_process (GstRTPBaseDepayload * depayload, ++ GstRTPBuffer * rtp_buffer) ++{ ++ GstBuffer *outbuf; ++ ++ outbuf = gst_rtp_buffer_get_payload_buffer (rtp_buffer); ++ ++ gst_rtp_drop_non_audio_meta (depayload, outbuf); ++ ++ return outbuf; ++} ++ ++static void ++gst_rtp_isac_depay_class_init (GstRtpIsacDepayClass * klass) ++{ ++ GstElementClass *gstelement_class = (GstElementClass *) klass; ++ GstRTPBaseDepayloadClass *depayload_class = ++ (GstRTPBaseDepayloadClass *) klass; ++ ++ depayload_class->set_caps = gst_rtp_isac_depay_setcaps; ++ depayload_class->process_rtp_packet = gst_rtp_isac_depay_process; ++ ++ gst_element_class_add_static_pad_template (gstelement_class, ++ &gst_rtp_isac_depay_sink_template); ++ gst_element_class_add_static_pad_template (gstelement_class, ++ &gst_rtp_isac_depay_src_template); ++ ++ gst_element_class_set_static_metadata (gstelement_class, ++ "RTP iSAC depayloader", "Codec/Depayloader/Network/RTP", ++ "Extracts iSAC audio from RTP packets", ++ "Guillaume Desmottes "); ++ ++ GST_DEBUG_CATEGORY_INIT (rtpisacdepay_debug, "rtpisacdepay", 0, ++ "iSAC RTP Depayloader"); ++} ++ ++static void ++gst_rtp_isac_depay_init (GstRtpIsacDepay * rtpisacdepay) ++{ ++} +diff --git a/gst/rtp/gstrtpisacdepay.h b/gst/rtp/gstrtpisacdepay.h +new file mode 100644 +index 000000000..f5ab28954 +--- /dev/null ++++ b/gst/rtp/gstrtpisacdepay.h +@@ -0,0 +1,31 @@ ++/* GStreamer ++ * Copyright (C) 2020 Collabora Ltd. ++ * Author: Guillaume Desmottes , Collabora Ltd. ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more ++ */ ++ ++ ++#ifndef __GST_RTP_ISAC_DEPAY_H__ ++#define __GST_RTP_ISAC_DEPAY_H__ ++ ++#include ++#include ++ ++G_BEGIN_DECLS ++ ++#define GST_TYPE_RTP_ISAC_DEPAY gst_rtp_isac_depay_get_type () ++ ++G_DECLARE_FINAL_TYPE (GstRtpIsacDepay, gst_rtp_isac_depay, GST, RTP_ISAC_DEPAY, ++ GstRTPBaseDepayload); ++ ++G_END_DECLS ++#endif /* __GST_RTP_ISAC_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpisacpay.c b/gst/rtp/gstrtpisacpay.c +new file mode 100644 +index 000000000..ad03a190f +--- /dev/null ++++ b/gst/rtp/gstrtpisacpay.c +@@ -0,0 +1,183 @@ ++/* GStreamer ++ * Copyright (C) 2020 Collabora Ltd. ++ * Author: Guillaume Desmottes , Collabora Ltd. ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++/** ++ * SECTION:element-rtpisacpay ++ * @title: rtpisacpay ++ * @short_description: iSAC RTP Payloader ++ * ++ * Since: 1.20 ++ * ++ */ ++ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include ++ ++#include "gstrtpelements.h" ++#include "gstrtpisacpay.h" ++#include "gstrtputils.h" ++ ++GST_DEBUG_CATEGORY_STATIC (rtpisacpay_debug); ++#define GST_CAT_DEFAULT (rtpisacpay_debug) ++ ++static GstStaticPadTemplate gst_rtp_isac_pay_sink_template = ++GST_STATIC_PAD_TEMPLATE ("sink", ++ GST_PAD_SINK, ++ GST_PAD_ALWAYS, ++ GST_STATIC_CAPS ("audio/isac, " ++ "rate = (int) { 16000, 32000 }, " "channels = (int) 1") ++ ); ++ ++static GstStaticPadTemplate gst_rtp_isac_pay_src_template = ++GST_STATIC_PAD_TEMPLATE ("src", ++ GST_PAD_SRC, ++ GST_PAD_ALWAYS, ++ GST_STATIC_CAPS ("application/x-rtp, " ++ "media = (string) \"audio\", " ++ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", " ++ "clock-rate = (int) { 16000, 32000 }, " ++ "encoding-name = (string) \"ISAC\", " ++ "encoding-params = (string) \"1\"") ++ ); ++ ++struct _GstRtpIsacPay ++{ ++ /*< private > */ ++ GstRTPBasePayload parent; ++}; ++ ++#define gst_rtp_isac_pay_parent_class parent_class ++G_DEFINE_TYPE (GstRtpIsacPay, gst_rtp_isac_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpisacpay, "rtpisacpay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_ISAC_PAY, rtp_element_init (plugin)); ++ ++static GstCaps * ++gst_rtp_isac_pay_getcaps (GstRTPBasePayload * payload, GstPad * pad, ++ GstCaps * filter) ++{ ++ GstCaps *otherpadcaps; ++ GstCaps *caps; ++ ++ otherpadcaps = gst_pad_get_allowed_caps (payload->srcpad); ++ caps = gst_pad_get_pad_template_caps (pad); ++ ++ if (otherpadcaps) { ++ if (!gst_caps_is_empty (otherpadcaps)) { ++ GstStructure *ps; ++ GstStructure *s; ++ const GValue *v; ++ ++ ps = gst_caps_get_structure (otherpadcaps, 0); ++ caps = gst_caps_make_writable (caps); ++ s = gst_caps_get_structure (caps, 0); ++ ++ v = gst_structure_get_value (ps, "clock-rate"); ++ if (v) ++ gst_structure_set_value (s, "rate", v); ++ } ++ gst_caps_unref (otherpadcaps); ++ } ++ ++ if (filter) { ++ GstCaps *tcaps = caps; ++ ++ caps = gst_caps_intersect_full (filter, tcaps, GST_CAPS_INTERSECT_FIRST); ++ gst_caps_unref (tcaps); ++ } ++ ++ GST_DEBUG_OBJECT (payload, "%" GST_PTR_FORMAT, caps); ++ ++ return caps; ++} ++ ++static gboolean ++gst_rtp_isac_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps) ++{ ++ GstStructure *s; ++ gint rate; ++ ++ GST_DEBUG_OBJECT (payload, "%" GST_PTR_FORMAT, caps); ++ ++ s = gst_caps_get_structure (caps, 0); ++ if (!gst_structure_get_int (s, "rate", &rate)) { ++ GST_ERROR_OBJECT (payload, "Missing 'rate' in caps"); ++ return FALSE; ++ } ++ ++ gst_rtp_base_payload_set_options (payload, "audio", TRUE, "ISAC", rate); ++ ++ return gst_rtp_base_payload_set_outcaps (payload, NULL); ++} ++ ++static GstFlowReturn ++gst_rtp_isac_pay_handle_buffer (GstRTPBasePayload * basepayload, ++ GstBuffer * buffer) ++{ ++ GstBuffer *outbuf; ++ GstClockTime pts, dts, duration; ++ ++ pts = GST_BUFFER_PTS (buffer); ++ dts = GST_BUFFER_DTS (buffer); ++ duration = GST_BUFFER_DURATION (buffer); ++ ++ outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 0, 0, 0); ++ ++ gst_rtp_copy_audio_meta (basepayload, outbuf, buffer); ++ ++ outbuf = gst_buffer_append (outbuf, buffer); ++ ++ GST_BUFFER_PTS (outbuf) = pts; ++ GST_BUFFER_DTS (outbuf) = dts; ++ GST_BUFFER_DURATION (outbuf) = duration; ++ ++ return gst_rtp_base_payload_push (basepayload, outbuf); ++} ++ ++static void ++gst_rtp_isac_pay_class_init (GstRtpIsacPayClass * klass) ++{ ++ GstElementClass *gstelement_class = (GstElementClass *) klass; ++ GstRTPBasePayloadClass *payload_class = (GstRTPBasePayloadClass *) klass; ++ ++ payload_class->get_caps = gst_rtp_isac_pay_getcaps; ++ payload_class->set_caps = gst_rtp_isac_pay_setcaps; ++ payload_class->handle_buffer = gst_rtp_isac_pay_handle_buffer; ++ ++ gst_element_class_add_static_pad_template (gstelement_class, ++ &gst_rtp_isac_pay_sink_template); ++ gst_element_class_add_static_pad_template (gstelement_class, ++ &gst_rtp_isac_pay_src_template); ++ ++ gst_element_class_set_static_metadata (gstelement_class, ++ "RTP iSAC payloader", "Codec/Payloader/Network/RTP", ++ "Payload-encodes iSAC audio into a RTP packet", ++ "Guillaume Desmottes "); ++ ++ GST_DEBUG_CATEGORY_INIT (rtpisacpay_debug, "rtpisacpay", 0, ++ "iSAC RTP Payloader"); ++} ++ ++static void ++gst_rtp_isac_pay_init (GstRtpIsacPay * rtpisacpay) ++{ ++} +diff --git a/gst/rtp/gstrtpisacpay.h b/gst/rtp/gstrtpisacpay.h +new file mode 100644 +index 000000000..82c072bdd +--- /dev/null ++++ b/gst/rtp/gstrtpisacpay.h +@@ -0,0 +1,31 @@ ++/* GStreamer ++ * Copyright (C) 2020 Collabora Ltd. ++ * Author: Guillaume Desmottes , Collabora Ltd. ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more ++ */ ++ ++ ++#ifndef __GST_RTP_ISAC_PAY_H__ ++#define __GST_RTP_ISAC_PAY_H__ ++ ++#include ++#include ++ ++G_BEGIN_DECLS ++ ++#define GST_TYPE_RTP_ISAC_PAY gst_rtp_isac_pay_get_type () ++ ++G_DECLARE_FINAL_TYPE(GstRtpIsacPay, gst_rtp_isac_pay, GST, RTP_ISAC_PAY, GstRTPBasePayload); ++ ++G_END_DECLS ++ ++#endif /* __GST_RTP_ISAC_PAY_H__ */ +diff --git a/gst/rtp/gstrtpj2kdepay.c b/gst/rtp/gstrtpj2kdepay.c +index 132bcf5df..4456b3dd8 100644 +--- a/gst/rtp/gstrtpj2kdepay.c ++++ b/gst/rtp/gstrtpj2kdepay.c +@@ -37,6 +37,7 @@ + #include + + #include ++#include "gstrtpelements.h" + #include "gstrtpj2kcommon.h" + #include "gstrtpj2kdepay.h" + #include "gstrtputils.h" +@@ -74,6 +75,8 @@ enum + + #define gst_rtp_j2k_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpJ2KDepay, gst_rtp_j2k_depay, GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpj2kdepay, "rtpj2kdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_J2K_DEPAY, rtp_element_init (plugin)); + + static void gst_rtp_j2k_depay_finalize (GObject * object); + +@@ -659,10 +662,3 @@ gst_rtp_j2k_depay_change_state (GstElement * element, GstStateChange transition) + } + return ret; + } +- +-gboolean +-gst_rtp_j2k_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpj2kdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_J2K_DEPAY); +-} +diff --git a/gst/rtp/gstrtpj2kdepay.h b/gst/rtp/gstrtpj2kdepay.h +index ebc743397..5f499393d 100644 +--- a/gst/rtp/gstrtpj2kdepay.h ++++ b/gst/rtp/gstrtpj2kdepay.h +@@ -66,7 +66,6 @@ struct _GstRtpJ2KDepayClass + + GType gst_rtp_j2k_depay_get_type (void); + +-gboolean gst_rtp_j2k_depay_plugin_init (GstPlugin * plugin); + + G_END_DECLS + #endif /* __GST_RTP_J2K_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpj2kpay.c b/gst/rtp/gstrtpj2kpay.c +index 9ead8a21a..90075d06c 100644 +--- a/gst/rtp/gstrtpj2kpay.c ++++ b/gst/rtp/gstrtpj2kpay.c +@@ -40,6 +40,7 @@ + #include + #include + #include ++#include "gstrtpelements.h" + #include "gstrtpj2kcommon.h" + #include "gstrtpj2kpay.h" + #include "gstrtputils.h" +@@ -97,6 +98,8 @@ static GstFlowReturn gst_rtp_j2k_pay_handle_buffer (GstRTPBasePayload * pad, + + #define gst_rtp_j2k_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpJ2KPay, gst_rtp_j2k_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpj2kpay, "rtpj2kpay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_J2K_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_j2k_pay_class_init (GstRtpJ2KPayClass * klass) +@@ -440,7 +443,9 @@ gst_rtp_j2k_pay_handle_buffer (GstRTPBasePayload * basepayload, + data_size = payload_size - GST_RTP_J2K_HEADER_SIZE; + + /* make buffer for header */ +- outbuf = gst_rtp_buffer_new_allocate (GST_RTP_J2K_HEADER_SIZE, 0, 0); ++ outbuf = ++ gst_rtp_base_payload_allocate_output_buffer (basepayload, ++ GST_RTP_J2K_HEADER_SIZE, 0, 0); + + GST_BUFFER_PTS (outbuf) = timestamp; + +@@ -454,6 +459,7 @@ gst_rtp_j2k_pay_handle_buffer (GstRTPBasePayload * basepayload, + /* reached the end of a packetization unit */ + if (pu_size == 0 && end >= map.size) { + gst_rtp_buffer_set_marker (&rtp, TRUE); ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); + } + /* If we were processing a header, see if all fits in one RTP packet + or if we have to fragment it */ +@@ -560,10 +566,3 @@ gst_rtp_j2k_pay_get_property (GObject * object, guint prop_id, + break; + } + } +- +-gboolean +-gst_rtp_j2k_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpj2kpay", GST_RANK_SECONDARY, +- GST_TYPE_RTP_J2K_PAY); +-} +diff --git a/gst/rtp/gstrtpj2kpay.h b/gst/rtp/gstrtpj2kpay.h +index 14f529595..e5474938b 100644 +--- a/gst/rtp/gstrtpj2kpay.h ++++ b/gst/rtp/gstrtpj2kpay.h +@@ -52,7 +52,5 @@ struct _GstRtpJ2KPayClass + + GType gst_rtp_j2k_pay_get_type (void); + +-gboolean gst_rtp_j2k_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + #endif /* __GST_RTP_J2K_PAY_H__ */ +diff --git a/gst/rtp/gstrtpjpegdepay.c b/gst/rtp/gstrtpjpegdepay.c +index 5cd542828..b85b7fbf1 100644 +--- a/gst/rtp/gstrtpjpegdepay.c ++++ b/gst/rtp/gstrtpjpegdepay.c +@@ -28,6 +28,7 @@ + #include + #include + #include ++#include "gstrtpelements.h" + #include "gstrtpjpegdepay.h" + #include "gstrtputils.h" + +@@ -70,6 +71,8 @@ static GstStaticPadTemplate gst_rtp_jpeg_depay_sink_template = + #define gst_rtp_jpeg_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpJPEGDepay, gst_rtp_jpeg_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpjpegdepay, "rtpjpegdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_JPEG_DEPAY, rtp_element_init (plugin)); + + static void gst_rtp_jpeg_depay_finalize (GObject * object); + +@@ -702,7 +705,7 @@ gst_rtp_jpeg_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp) + * marker */ + gst_adapter_copy (rtpjpegdepay->adapter, end, avail - 2, 2); + +- if (end[0] != 0xff && end[1] != 0xd9) { ++ if (GST_READ_UINT16_BE (end) != 0xffd9) { + GST_DEBUG_OBJECT (rtpjpegdepay, "no EOI marker, adding one"); + + /* no EOI marker, add one */ +@@ -790,11 +793,3 @@ gst_rtp_jpeg_depay_change_state (GstElement * element, + } + return ret; + } +- +- +-gboolean +-gst_rtp_jpeg_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpjpegdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_JPEG_DEPAY); +-} +diff --git a/gst/rtp/gstrtpjpegdepay.h b/gst/rtp/gstrtpjpegdepay.h +index cb74f12d4..3f7aea219 100644 +--- a/gst/rtp/gstrtpjpegdepay.h ++++ b/gst/rtp/gstrtpjpegdepay.h +@@ -63,7 +63,6 @@ struct _GstRtpJPEGDepayClass + + GType gst_rtp_jpeg_depay_get_type (void); + +-gboolean gst_rtp_jpeg_depay_plugin_init (GstPlugin * plugin); + + G_END_DECLS + +diff --git a/gst/rtp/gstrtpjpegpay.c b/gst/rtp/gstrtpjpegpay.c +index 7ba1f51eb..1f7b8d8c9 100644 +--- a/gst/rtp/gstrtpjpegpay.c ++++ b/gst/rtp/gstrtpjpegpay.c +@@ -41,6 +41,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpjpegpay.h" + #include "gstrtputils.h" + #include "gstbuffermemory.h" +@@ -248,6 +249,8 @@ static GstFlowReturn gst_rtp_jpeg_pay_handle_buffer (GstRTPBasePayload * pad, + + #define gst_rtp_jpeg_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpJPEGPay, gst_rtp_jpeg_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpjpegpay, "rtpjpegpay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_JPEG_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_jpeg_pay_class_init (GstRtpJPEGPayClass * klass) +@@ -886,7 +889,9 @@ gst_rtp_jpeg_pay_handle_buffer (GstRTPBasePayload * basepayload, + if (dri_found) + header_size += sizeof (restart_marker_header); + +- outbuf = gst_rtp_buffer_new_allocate (header_size, 0, 0); ++ outbuf = ++ gst_rtp_base_payload_allocate_output_buffer (basepayload, header_size, ++ 0, 0); + + gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp); + +@@ -894,6 +899,7 @@ gst_rtp_jpeg_pay_handle_buffer (GstRTPBasePayload * basepayload, + GST_LOG_OBJECT (pay, "last packet of frame"); + frame_done = TRUE; + gst_rtp_buffer_set_marker (&rtp, 1); ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); + } + + payload = gst_rtp_buffer_get_payload (&rtp); +@@ -1046,10 +1052,3 @@ gst_rtp_jpeg_pay_get_property (GObject * object, guint prop_id, + break; + } + } +- +-gboolean +-gst_rtp_jpeg_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpjpegpay", GST_RANK_SECONDARY, +- GST_TYPE_RTP_JPEG_PAY); +-} +diff --git a/gst/rtp/gstrtpjpegpay.h b/gst/rtp/gstrtpjpegpay.h +index 4d65ea71e..696dc39e3 100644 +--- a/gst/rtp/gstrtpjpegpay.h ++++ b/gst/rtp/gstrtpjpegpay.h +@@ -57,7 +57,5 @@ struct _GstRtpJPEGPayClass + + GType gst_rtp_jpeg_pay_get_type (void); + +-gboolean gst_rtp_jpeg_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + #endif /* __GST_RTP_JPEG_PAY_H__ */ +diff --git a/gst/rtp/gstrtpklvdepay.c b/gst/rtp/gstrtpklvdepay.c +index a5026712b..20673c8fb 100644 +--- a/gst/rtp/gstrtpklvdepay.c ++++ b/gst/rtp/gstrtpklvdepay.c +@@ -37,6 +37,7 @@ + #include "config.h" + #endif + ++#include "gstrtpelements.h" + #include "gstrtpklvdepay.h" + + #include +@@ -59,6 +60,8 @@ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink", + + #define gst_rtp_klv_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpKlvDepay, gst_rtp_klv_depay, GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpklvdepay, "rtpklvdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_KLV_DEPAY, rtp_element_init (plugin)); + + static void gst_rtp_klv_depay_finalize (GObject * object); + +@@ -327,8 +330,8 @@ gst_rtp_klv_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp) + } + } + +- /* If this is the first packet and looks like a start, clear resync flag */ +- if (klvdepay->resync && klvdepay->last_marker_seq == -1 && start) ++ /* If this looks like a start, clear the resync flag */ ++ if (klvdepay->resync && start) + klvdepay->resync = FALSE; + + if (marker) +@@ -388,10 +391,3 @@ gst_rtp_klv_depay_change_state (GstElement * element, GstStateChange transition) + } + return ret; + } +- +-gboolean +-gst_rtp_klv_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpklvdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_KLV_DEPAY); +-} +diff --git a/gst/rtp/gstrtpklvdepay.h b/gst/rtp/gstrtpklvdepay.h +index 71a256d2b..e1042a736 100644 +--- a/gst/rtp/gstrtpklvdepay.h ++++ b/gst/rtp/gstrtpklvdepay.h +@@ -58,8 +58,6 @@ struct _GstRtpKlvDepayClass + + G_GNUC_INTERNAL GType gst_rtp_klv_depay_get_type (void); + +-G_GNUC_INTERNAL gboolean gst_rtp_klv_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_KLV_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpklvpay.c b/gst/rtp/gstrtpklvpay.c +index f24d29f3c..00f1f4321 100644 +--- a/gst/rtp/gstrtpklvpay.c ++++ b/gst/rtp/gstrtpklvpay.c +@@ -37,6 +37,7 @@ + #include "config.h" + #endif + ++#include "gstrtpelements.h" + #include "gstrtpklvpay.h" + #include "gstrtputils.h" + +@@ -60,6 +61,8 @@ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink", + + #define gst_rtp_klv_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpKlvPay, gst_rtp_klv_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpklvpay, "rtpklvpay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_KLV_PAY, rtp_element_init (plugin)); + + static gboolean gst_rtp_klv_pay_setcaps (GstRTPBasePayload * pay, + GstCaps * caps); +@@ -147,13 +150,14 @@ gst_rtp_klv_pay_handle_buffer (GstRTPBasePayload * basepayload, GstBuffer * buf) + bytes_left = map.size - offset; + payload_size = MIN (bytes_left, max_payload_size); + +- outbuf = gst_rtp_buffer_new_allocate (0, 0, 0); ++ outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 0, 0, 0); + + if (payload_size == bytes_left) { + GST_LOG_OBJECT (pay, "last packet of KLV unit"); + gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp); + gst_rtp_buffer_set_marker (&rtp, 1); + gst_rtp_buffer_unmap (&rtp); ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); + } + + GST_LOG_OBJECT (pay, "packet with payload size %u", payload_size); +@@ -195,10 +199,3 @@ bad_input: + goto done; + } + } +- +-gboolean +-gst_rtp_klv_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpklvpay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_KLV_PAY); +-} +diff --git a/gst/rtp/gstrtpklvpay.h b/gst/rtp/gstrtpklvpay.h +index 9ee813429..41187d6cd 100644 +--- a/gst/rtp/gstrtpklvpay.h ++++ b/gst/rtp/gstrtpklvpay.h +@@ -53,8 +53,6 @@ struct _GstRtpKlvPayClass + + G_GNUC_INTERNAL GType gst_rtp_klv_pay_get_type (void); + +-G_GNUC_INTERNAL gboolean gst_rtp_klv_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_KLV_PAY_H__ */ +diff --git a/gst/rtp/gstrtpldacpay.c b/gst/rtp/gstrtpldacpay.c +new file mode 100644 +index 000000000..aa30673e7 +--- /dev/null ++++ b/gst/rtp/gstrtpldacpay.c +@@ -0,0 +1,228 @@ ++/* GStreamer RTP LDAC payloader ++ * Copyright (C) 2020 Asymptotic ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++/** ++ * SECTION:element-rtpldacpay ++ * @title: rtpldacpay ++ * ++ * Payload LDAC encoded audio into RTP packets. ++ * ++ * LDAC does not have a public specification and concerns itself only with ++ * bluetooth transmission. Due to the unavailability of a specification, we ++ * consider the encoding-name as X-GST-LDAC. ++ * ++ * The best reference is [libldac](https://android.googlesource.com/platform/external/libldac/) ++ * and the A2DP LDAC implementation in Android's bluetooth stack [Flouride] ++ * (https://android.googlesource.com/platform/system/bt/+/refs/heads/master/stack/a2dp/a2dp_vendor_ldac_encoder.cc). ++ * ++ * ## Example pipeline ++ * |[ ++ * gst-launch-1.0 -v audiotestsrc ! ldacenc ! rtpldacpay mtu=679 ! avdtpsink ++ * ]| This example pipeline will payload LDAC encoded audio. ++ * ++ * Since: 1.20 ++ */ ++ ++#ifdef HAVE_CONFIG_H ++#include ++#endif ++ ++#include ++#include "gstrtpelements.h" ++#include "gstrtpldacpay.h" ++#include "gstrtputils.h" ++ ++#define GST_RTP_LDAC_PAYLOAD_HEADER_SIZE 1 ++/* MTU size required for LDAC A2DP streaming */ ++#define GST_LDAC_MTU_REQUIRED 679 ++ ++GST_DEBUG_CATEGORY_STATIC (gst_rtp_ldac_pay_debug); ++#define GST_CAT_DEFAULT gst_rtp_ldac_pay_debug ++ ++#define parent_class gst_rtp_ldac_pay_parent_class ++G_DEFINE_TYPE (GstRtpLdacPay, gst_rtp_ldac_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpldacpay, "rtpldacpay", GST_RANK_NONE, ++ GST_TYPE_RTP_LDAC_PAY, rtp_element_init (plugin)); ++ ++static GstStaticPadTemplate gst_rtp_ldac_pay_sink_factory = ++GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, ++ GST_STATIC_CAPS ("audio/x-ldac, " ++ "channels = (int) [ 1, 2 ], " ++ "eqmid = (int) { 0, 1, 2 }, " ++ "rate = (int) { 44100, 48000, 88200, 96000 }") ++ ); ++ ++static GstStaticPadTemplate gst_rtp_ldac_pay_src_factory = ++GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS, ++ GST_STATIC_CAPS ("application/x-rtp, " ++ "media = (string) audio," ++ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", " ++ "clock-rate = (int) { 44100, 48000, 88200, 96000 }," ++ "encoding-name = (string) \"X-GST-LDAC\"") ++ ); ++ ++static gboolean gst_rtp_ldac_pay_set_caps (GstRTPBasePayload * payload, ++ GstCaps * caps); ++static GstFlowReturn gst_rtp_ldac_pay_handle_buffer (GstRTPBasePayload * ++ payload, GstBuffer * buffer); ++ ++/** ++ * gst_rtp_ldac_pay_get_num_frames ++ * @eqmid: Encode Quality Mode Index ++ * @channels: Number of channels ++ * ++ * Returns: Number of LDAC frames per packet. ++ */ ++static guint8 ++gst_rtp_ldac_pay_get_num_frames (gint eqmid, gint channels) ++{ ++ g_assert (channels == 1 || channels == 2); ++ ++ switch (eqmid) { ++ /* Encode setting for High Quality */ ++ case 0: ++ return 4 / channels; ++ /* Encode setting for Standard Quality */ ++ case 1: ++ return 6 / channels; ++ /* Encode setting for Mobile use Quality */ ++ case 2: ++ return 12 / channels; ++ default: ++ break; ++ } ++ ++ g_assert_not_reached (); ++ ++ /* If assertion gets compiled out */ ++ return 6 / channels; ++} ++ ++static void ++gst_rtp_ldac_pay_class_init (GstRtpLdacPayClass * klass) ++{ ++ GstRTPBasePayloadClass *payload_class = GST_RTP_BASE_PAYLOAD_CLASS (klass); ++ GstElementClass *element_class = GST_ELEMENT_CLASS (klass); ++ ++ payload_class->set_caps = GST_DEBUG_FUNCPTR (gst_rtp_ldac_pay_set_caps); ++ payload_class->handle_buffer = ++ GST_DEBUG_FUNCPTR (gst_rtp_ldac_pay_handle_buffer); ++ ++ gst_element_class_add_static_pad_template (element_class, ++ &gst_rtp_ldac_pay_sink_factory); ++ gst_element_class_add_static_pad_template (element_class, ++ &gst_rtp_ldac_pay_src_factory); ++ ++ gst_element_class_set_static_metadata (element_class, "RTP packet payloader", ++ "Codec/Payloader/Network", "Payload LDAC audio as RTP packets", ++ "Sanchayan Maity "); ++ ++ GST_DEBUG_CATEGORY_INIT (gst_rtp_ldac_pay_debug, "rtpldacpay", 0, ++ "RTP LDAC payloader"); ++} ++ ++static void ++gst_rtp_ldac_pay_init (GstRtpLdacPay * self) ++{ ++ ++} ++ ++static gboolean ++gst_rtp_ldac_pay_set_caps (GstRTPBasePayload * payload, GstCaps * caps) ++{ ++ GstRtpLdacPay *ldacpay = GST_RTP_LDAC_PAY (payload); ++ GstStructure *structure; ++ gint channels, eqmid, rate; ++ ++ if (GST_RTP_BASE_PAYLOAD_MTU (ldacpay) < GST_LDAC_MTU_REQUIRED) { ++ GST_ERROR_OBJECT (ldacpay, "Invalid MTU %d, should be >= %d", ++ GST_RTP_BASE_PAYLOAD_MTU (ldacpay), GST_LDAC_MTU_REQUIRED); ++ return FALSE; ++ } ++ ++ structure = gst_caps_get_structure (caps, 0); ++ if (!gst_structure_get_int (structure, "rate", &rate)) { ++ GST_ERROR_OBJECT (ldacpay, "Failed to get audio rate from caps"); ++ return FALSE; ++ } ++ ++ if (!gst_structure_get_int (structure, "channels", &channels)) { ++ GST_ERROR_OBJECT (ldacpay, "Failed to get audio rate from caps"); ++ return FALSE; ++ } ++ ++ if (!gst_structure_get_int (structure, "eqmid", &eqmid)) { ++ GST_ERROR_OBJECT (ldacpay, "Failed to get eqmid from caps"); ++ return FALSE; ++ } ++ ++ ldacpay->frame_count = gst_rtp_ldac_pay_get_num_frames (eqmid, channels); ++ ++ gst_rtp_base_payload_set_options (payload, "audio", TRUE, "X-GST-LDAC", rate); ++ ++ return gst_rtp_base_payload_set_outcaps (payload, NULL); ++} ++ ++/* ++ * LDAC encoder does not handle split frames. Currently, the encoder will ++ * always emit 660 bytes worth of payload encapsulating multiple LDAC frames. ++ * This is as per eqmid and GST_LDAC_MTU_REQUIRED passed for configuring the ++ * encoder upstream. Since the encoder always emit full frames and we do not ++ * need to handle frame splitting, we do not use an adapter and also push out ++ * the buffer as it is received. ++ */ ++static GstFlowReturn ++gst_rtp_ldac_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer) ++{ ++ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; ++ GstRtpLdacPay *ldacpay = GST_RTP_LDAC_PAY (payload); ++ GstBuffer *outbuf; ++ GstClockTime outbuf_frame_duration, outbuf_pts; ++ guint8 *payload_data; ++ gsize buf_sz; ++ ++ outbuf = ++ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD ++ (ldacpay), GST_RTP_LDAC_PAYLOAD_HEADER_SIZE, 0, 0); ++ ++ /* Get payload */ ++ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp); ++ ++ /* Write header and copy data into payload */ ++ payload_data = gst_rtp_buffer_get_payload (&rtp); ++ /* Upper 3 fragment bits not used, ref A2DP v13, 4.3.4 */ ++ payload_data[0] = ldacpay->frame_count & 0x0f; ++ ++ gst_rtp_buffer_unmap (&rtp); ++ ++ outbuf_pts = GST_BUFFER_PTS (buffer); ++ outbuf_frame_duration = GST_BUFFER_DURATION (buffer); ++ buf_sz = gst_buffer_get_size (buffer); ++ ++ gst_rtp_copy_audio_meta (ldacpay, outbuf, buffer); ++ outbuf = gst_buffer_append (outbuf, buffer); ++ ++ GST_BUFFER_PTS (outbuf) = outbuf_pts; ++ GST_BUFFER_DURATION (outbuf) = outbuf_frame_duration; ++ GST_DEBUG_OBJECT (ldacpay, ++ "Pushing %" G_GSIZE_FORMAT " bytes: %" GST_TIME_FORMAT, buf_sz, ++ GST_TIME_ARGS (GST_BUFFER_PTS (outbuf))); ++ ++ return gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (ldacpay), outbuf); ++} +diff --git a/gst/rtp/gstrtpldacpay.h b/gst/rtp/gstrtpldacpay.h +new file mode 100644 +index 000000000..013449175 +--- /dev/null ++++ b/gst/rtp/gstrtpldacpay.h +@@ -0,0 +1,56 @@ ++/* GStreamer RTP LDAC payloader ++ * Copyright (C) 2020 Asymptotic ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#include ++#include ++#include ++#include ++ ++G_BEGIN_DECLS ++ ++#define GST_TYPE_RTP_LDAC_PAY \ ++ (gst_rtp_ldac_pay_get_type()) ++#define GST_RTP_LDAC_PAY(obj) \ ++ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_LDAC_PAY,\ ++ GstRtpLdacPay)) ++#define GST_RTP_LDAC_PAY_CLASS(klass) \ ++ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_LDAC_PAY,\ ++ GstRtpLdacPayClass)) ++#define GST_IS_RTP_LDAC_PAY(obj) \ ++ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_LDAC_PAY)) ++#define GST_IS_RTP_LDAC_PAY_CLASS(obj) \ ++ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_LDAC_PAY)) ++ ++typedef struct _GstRtpLdacPay GstRtpLdacPay; ++typedef struct _GstRtpLdacPayClass GstRtpLdacPayClass; ++ ++struct _GstRtpLdacPay { ++ GstRTPBasePayload base; ++ guint8 frame_count; ++}; ++ ++struct _GstRtpLdacPayClass { ++ GstRTPBasePayloadClass parent_class; ++}; ++ ++GType gst_rtp_ldac_pay_get_type(void); ++ ++gboolean gst_rtp_ldac_pay_plugin_init (GstPlugin * plugin); ++ ++G_END_DECLS +diff --git a/gst/rtp/gstrtpmp1sdepay.c b/gst/rtp/gstrtpmp1sdepay.c +index 31a3108d2..e07dc2cd3 100644 +--- a/gst/rtp/gstrtpmp1sdepay.c ++++ b/gst/rtp/gstrtpmp1sdepay.c +@@ -24,6 +24,7 @@ + #include + + #include ++#include "gstrtpelements.h" + #include "gstrtpmp1sdepay.h" + #include "gstrtputils.h" + +@@ -63,6 +64,8 @@ static GstStaticPadTemplate gst_rtp_mp1s_depay_sink_template = + + G_DEFINE_TYPE (GstRtpMP1SDepay, gst_rtp_mp1s_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmp1sdepay, "rtpmp1sdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_MP1S_DEPAY, rtp_element_init (plugin)); + + static gboolean gst_rtp_mp1s_depay_setcaps (GstRTPBaseDepayload * depayload, + GstCaps * caps); +@@ -134,10 +137,3 @@ gst_rtp_mp1s_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp) + + return outbuf; + } +- +-gboolean +-gst_rtp_mp1s_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpmp1sdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_MP1S_DEPAY); +-} +diff --git a/gst/rtp/gstrtpmp1sdepay.h b/gst/rtp/gstrtpmp1sdepay.h +index 582933bad..e2e582c52 100644 +--- a/gst/rtp/gstrtpmp1sdepay.h ++++ b/gst/rtp/gstrtpmp1sdepay.h +@@ -51,8 +51,6 @@ struct _GstRtpMP1SDepayClass + + GType gst_rtp_mp1s_depay_get_type (void); + +-gboolean gst_rtp_mp1s_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_MP1S_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpmp2tdepay.c b/gst/rtp/gstrtpmp2tdepay.c +index f8e0d0c7b..7acf3f819 100644 +--- a/gst/rtp/gstrtpmp2tdepay.c ++++ b/gst/rtp/gstrtpmp2tdepay.c +@@ -24,6 +24,7 @@ + #include + + #include ++#include "gstrtpelements.h" + #include "gstrtpmp2tdepay.h" + #include "gstrtputils.h" + +@@ -71,6 +72,8 @@ static GstStaticPadTemplate gst_rtp_mp2t_depay_sink_template = + + G_DEFINE_TYPE (GstRtpMP2TDepay, gst_rtp_mp2t_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmp2tdepay, "rtpmp2tdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_MP2T_DEPAY, rtp_element_init (plugin)); + + static gboolean gst_rtp_mp2t_depay_setcaps (GstRTPBaseDepayload * depayload, + GstCaps * caps); +@@ -234,10 +237,3 @@ gst_rtp_mp2t_depay_get_property (GObject * object, guint prop_id, + break; + } + } +- +-gboolean +-gst_rtp_mp2t_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpmp2tdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_MP2T_DEPAY); +-} +diff --git a/gst/rtp/gstrtpmp2tdepay.h b/gst/rtp/gstrtpmp2tdepay.h +index aa936dca9..f1e5d0829 100644 +--- a/gst/rtp/gstrtpmp2tdepay.h ++++ b/gst/rtp/gstrtpmp2tdepay.h +@@ -53,8 +53,6 @@ struct _GstRtpMP2TDepayClass + + GType gst_rtp_mp2t_depay_get_type (void); + +-gboolean gst_rtp_mp2t_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_MP2T_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpmp2tpay.c b/gst/rtp/gstrtpmp2tpay.c +index 8dac50b9a..ecde5a298 100644 +--- a/gst/rtp/gstrtpmp2tpay.c ++++ b/gst/rtp/gstrtpmp2tpay.c +@@ -25,6 +25,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpmp2tpay.h" + #include "gstrtputils.h" + +@@ -59,6 +60,8 @@ static void gst_rtp_mp2t_pay_finalize (GObject * object); + + #define gst_rtp_mp2t_pay_parent_class parent_class + G_DEFINE_TYPE (GstRTPMP2TPay, gst_rtp_mp2t_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmp2tpay, "rtpmp2tpay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_MP2T_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_mp2t_pay_class_init (GstRTPMP2TPayClass * klass) +@@ -230,10 +233,3 @@ again: + return ret; + + } +- +-gboolean +-gst_rtp_mp2t_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpmp2tpay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_MP2T_PAY); +-} +diff --git a/gst/rtp/gstrtpmp2tpay.h b/gst/rtp/gstrtpmp2tpay.h +index 12f49599f..9ed02d998 100644 +--- a/gst/rtp/gstrtpmp2tpay.h ++++ b/gst/rtp/gstrtpmp2tpay.h +@@ -57,8 +57,6 @@ struct _GstRTPMP2TPayClass + + GType gst_rtp_mp2t_pay_get_type (void); + +-gboolean gst_rtp_mp2t_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_MP2T_PAY_H__ */ +diff --git a/gst/rtp/gstrtpmp4adepay.c b/gst/rtp/gstrtpmp4adepay.c +index 9177d7c3d..f278fc598 100644 +--- a/gst/rtp/gstrtpmp4adepay.c ++++ b/gst/rtp/gstrtpmp4adepay.c +@@ -26,6 +26,7 @@ + #include + + #include ++#include "gstrtpelements.h" + #include "gstrtpmp4adepay.h" + #include "gstrtputils.h" + +@@ -60,6 +61,8 @@ GST_STATIC_PAD_TEMPLATE ("sink", + #define gst_rtp_mp4a_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpMP4ADepay, gst_rtp_mp4a_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmp4adepay, "rtpmp4adepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_MP4A_DEPAY, rtp_element_init (plugin)); + + static void gst_rtp_mp4a_depay_finalize (GObject * object); + +@@ -457,10 +460,3 @@ gst_rtp_mp4a_depay_change_state (GstElement * element, + } + return ret; + } +- +-gboolean +-gst_rtp_mp4a_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpmp4adepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_MP4A_DEPAY); +-} +diff --git a/gst/rtp/gstrtpmp4adepay.h b/gst/rtp/gstrtpmp4adepay.h +index 31eaf560e..c5aaaa349 100644 +--- a/gst/rtp/gstrtpmp4adepay.h ++++ b/gst/rtp/gstrtpmp4adepay.h +@@ -56,8 +56,6 @@ struct _GstRtpMP4ADepayClass + + GType gst_rtp_mp4a_depay_get_type (void); + +-gboolean gst_rtp_mp4a_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_MP4A_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpmp4apay.c b/gst/rtp/gstrtpmp4apay.c +index ab1eeb55a..f94bb723d 100644 +--- a/gst/rtp/gstrtpmp4apay.c ++++ b/gst/rtp/gstrtpmp4apay.c +@@ -26,6 +26,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpmp4apay.h" + #include "gstrtputils.h" + +@@ -68,6 +69,8 @@ static GstFlowReturn gst_rtp_mp4a_pay_handle_buffer (GstRTPBasePayload * + + #define gst_rtp_mp4a_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpMP4APay, gst_rtp_mp4a_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmp4apay, "rtpmp4apay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_MP4A_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_mp4a_pay_class_init (GstRtpMP4APayClass * klass) +@@ -429,6 +432,8 @@ gst_rtp_mp4a_pay_handle_buffer (GstRTPBasePayload * basepayload, + + /* marker only if the packet is complete */ + gst_rtp_buffer_set_marker (&rtp, size == payload_len); ++ if (size == payload_len) ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); + + gst_rtp_buffer_unmap (&rtp); + +@@ -456,10 +461,3 @@ gst_rtp_mp4a_pay_handle_buffer (GstRTPBasePayload * basepayload, + + return ret; + } +- +-gboolean +-gst_rtp_mp4a_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpmp4apay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_MP4A_PAY); +-} +diff --git a/gst/rtp/gstrtpmp4apay.h b/gst/rtp/gstrtpmp4apay.h +index 49d9b650d..997aa3541 100644 +--- a/gst/rtp/gstrtpmp4apay.h ++++ b/gst/rtp/gstrtpmp4apay.h +@@ -58,8 +58,6 @@ struct _GstRtpMP4APayClass + + GType gst_rtp_mp4a_pay_get_type (void); + +-gboolean gst_rtp_mp4a_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_MP4A_PAY_H__ */ +diff --git a/gst/rtp/gstrtpmp4gdepay.c b/gst/rtp/gstrtpmp4gdepay.c +index a734be065..8ee094d5b 100644 +--- a/gst/rtp/gstrtpmp4gdepay.c ++++ b/gst/rtp/gstrtpmp4gdepay.c +@@ -25,6 +25,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpmp4gdepay.h" + #include "gstrtputils.h" + +@@ -129,6 +130,8 @@ gst_bs_parse_read (GstBsParse * bs, guint n) + #define gst_rtp_mp4g_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpMP4GDepay, gst_rtp_mp4g_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmp4gdepay, "rtpmp4gdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_MP4G_DEPAY, rtp_element_init (plugin)); + + static void gst_rtp_mp4g_depay_finalize (GObject * object); + +@@ -806,10 +809,3 @@ gst_rtp_mp4g_depay_change_state (GstElement * element, + } + return ret; + } +- +-gboolean +-gst_rtp_mp4g_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpmp4gdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_MP4G_DEPAY); +-} +diff --git a/gst/rtp/gstrtpmp4gdepay.h b/gst/rtp/gstrtpmp4gdepay.h +index 843732813..a6a88a0d4 100644 +--- a/gst/rtp/gstrtpmp4gdepay.h ++++ b/gst/rtp/gstrtpmp4gdepay.h +@@ -82,8 +82,6 @@ struct _GstRtpMP4GDepayClass + + GType gst_rtp_mp4g_depay_get_type (void); + +-gboolean gst_rtp_mp4g_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_MP4G_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpmp4gpay.c b/gst/rtp/gstrtpmp4gpay.c +index 532e2de25..2434d9035 100644 +--- a/gst/rtp/gstrtpmp4gpay.c ++++ b/gst/rtp/gstrtpmp4gpay.c +@@ -26,6 +26,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpmp4gpay.h" + #include "gstrtputils.h" + +@@ -88,6 +89,8 @@ static gboolean gst_rtp_mp4g_pay_sink_event (GstRTPBasePayload * payload, + + #define gst_rtp_mp4g_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpMP4GPay, gst_rtp_mp4g_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmp4gpay, "rtpmp4gpay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_MP4G_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_mp4g_pay_class_init (GstRtpMP4GPayClass * klass) +@@ -531,6 +534,8 @@ gst_rtp_mp4g_pay_flush (GstRtpMP4GPay * rtpmp4gpay) + + /* marker only if the packet is complete */ + gst_rtp_buffer_set_marker (&rtp, avail <= payload_len); ++ if (avail <= payload_len) ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); + + gst_rtp_buffer_unmap (&rtp); + +@@ -632,10 +637,3 @@ gst_rtp_mp4g_pay_change_state (GstElement * element, GstStateChange transition) + + return ret; + } +- +-gboolean +-gst_rtp_mp4g_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpmp4gpay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_MP4G_PAY); +-} +diff --git a/gst/rtp/gstrtpmp4gpay.h b/gst/rtp/gstrtpmp4gpay.h +index 14fd68ab1..6e7a62584 100644 +--- a/gst/rtp/gstrtpmp4gpay.h ++++ b/gst/rtp/gstrtpmp4gpay.h +@@ -65,8 +65,6 @@ struct _GstRtpMP4GPayClass + + GType gst_rtp_mp4g_pay_get_type (void); + +-gboolean gst_rtp_mp4g_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_MP4G_PAY_H__ */ +diff --git a/gst/rtp/gstrtpmp4vdepay.c b/gst/rtp/gstrtpmp4vdepay.c +index c860f88b8..204828c47 100644 +--- a/gst/rtp/gstrtpmp4vdepay.c ++++ b/gst/rtp/gstrtpmp4vdepay.c +@@ -25,6 +25,7 @@ + #include + + #include ++#include "gstrtpelements.h" + #include "gstrtpmp4vdepay.h" + #include "gstrtputils.h" + +@@ -57,6 +58,8 @@ GST_STATIC_PAD_TEMPLATE ("sink", + #define gst_rtp_mp4v_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpMP4VDepay, gst_rtp_mp4v_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmp4vdepay, "rtpmp4vdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_MP4V_DEPAY, rtp_element_init (plugin)); + + static void gst_rtp_mp4v_depay_finalize (GObject * object); + +@@ -218,10 +221,3 @@ gst_rtp_mp4v_depay_change_state (GstElement * element, + } + return ret; + } +- +-gboolean +-gst_rtp_mp4v_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpmp4vdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_MP4V_DEPAY); +-} +diff --git a/gst/rtp/gstrtpmp4vdepay.h b/gst/rtp/gstrtpmp4vdepay.h +index 436e0db31..0eecdf07f 100644 +--- a/gst/rtp/gstrtpmp4vdepay.h ++++ b/gst/rtp/gstrtpmp4vdepay.h +@@ -54,8 +54,6 @@ struct _GstRtpMP4VDepayClass + + GType gst_rtp_mp4v_depay_get_type (void); + +-gboolean gst_rtp_mp4v_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_MP4V_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpmp4vpay.c b/gst/rtp/gstrtpmp4vpay.c +index 29803392a..530df853d 100644 +--- a/gst/rtp/gstrtpmp4vpay.c ++++ b/gst/rtp/gstrtpmp4vpay.c +@@ -26,6 +26,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpmp4vpay.h" + #include "gstrtputils.h" + +@@ -81,6 +82,12 @@ static gboolean gst_rtp_mp4v_pay_sink_event (GstRTPBasePayload * pay, + + #define gst_rtp_mp4v_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpMP4VPay, gst_rtp_mp4v_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++/* Note: This element is marked at a "+1" rank to make sure that ++ * auto-plugging of payloaders for MPEG4 elementary streams don't ++ * end up using the 'rtpmp4gpay' element (generic mpeg4) which isn't ++ * as well supported as this RFC */ ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmp4vpay, "rtpmp4vpay", ++ GST_RANK_SECONDARY + 1, GST_TYPE_RTP_MP4V_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_mp4v_pay_class_init (GstRtpMP4VPayClass * klass) +@@ -278,7 +285,9 @@ gst_rtp_mp4v_pay_flush (GstRtpMP4VPay * rtpmp4vpay) + + /* create buffer without payload. The payload will be put + * in next buffer instead. Both buffers will be merged */ +- outbuf = gst_rtp_buffer_new_allocate (0, 0, 0); ++ outbuf = ++ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD ++ (rtpmp4vpay), 0, 0, 0); + + /* Take buffer with the payload from the adapter */ + outbuf_data = gst_adapter_take_buffer_fast (rtpmp4vpay->adapter, +@@ -288,6 +297,8 @@ gst_rtp_mp4v_pay_flush (GstRtpMP4VPay * rtpmp4vpay) + + gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp); + gst_rtp_buffer_set_marker (&rtp, avail == 0); ++ if (avail == 0) ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); + gst_rtp_buffer_unmap (&rtp); + gst_rtp_copy_video_meta (rtpmp4vpay, outbuf, outbuf_data); + outbuf = gst_buffer_append (outbuf, outbuf_data); +@@ -629,14 +640,3 @@ gst_rtp_mp4v_pay_get_property (GObject * object, guint prop_id, + break; + } + } +- +-gboolean +-gst_rtp_mp4v_pay_plugin_init (GstPlugin * plugin) +-{ +- /* Note: This element is marked at a "+1" rank to make sure that +- * auto-plugging of payloaders for MPEG4 elementary streams don't +- * end up using the 'rtpmp4gpay' element (generic mpeg4) which isn't +- * as well supported as this RFC */ +- return gst_element_register (plugin, "rtpmp4vpay", +- GST_RANK_SECONDARY + 1, GST_TYPE_RTP_MP4V_PAY); +-} +diff --git a/gst/rtp/gstrtpmp4vpay.h b/gst/rtp/gstrtpmp4vpay.h +index 26c0ba0dd..1d906fd6f 100644 +--- a/gst/rtp/gstrtpmp4vpay.h ++++ b/gst/rtp/gstrtpmp4vpay.h +@@ -67,8 +67,6 @@ struct _GstRtpMP4VPayClass + + GType gst_rtp_mp4v_pay_get_type (void); + +-gboolean gst_rtp_mp4v_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_MP4V_PAY_H__ */ +diff --git a/gst/rtp/gstrtpmpadepay.c b/gst/rtp/gstrtpmpadepay.c +index 18506db25..afa852f32 100644 +--- a/gst/rtp/gstrtpmpadepay.c ++++ b/gst/rtp/gstrtpmpadepay.c +@@ -25,6 +25,7 @@ + #include + + #include ++#include "gstrtpelements.h" + #include "gstrtpmpadepay.h" + #include "gstrtputils.h" + +@@ -53,6 +54,8 @@ static GstStaticPadTemplate gst_rtp_mpa_depay_sink_template = + + #define gst_rtp_mpa_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpMPADepay, gst_rtp_mpa_depay, GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmpadepay, "rtpmpadepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_MPA_DEPAY, rtp_element_init (plugin)); + + static gboolean gst_rtp_mpa_depay_setcaps (GstRTPBaseDepayload * depayload, + GstCaps * caps); +@@ -172,10 +175,3 @@ empty_packet: + return NULL; + } + } +- +-gboolean +-gst_rtp_mpa_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpmpadepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_MPA_DEPAY); +-} +diff --git a/gst/rtp/gstrtpmpadepay.h b/gst/rtp/gstrtpmpadepay.h +index 1070d779b..9c06df345 100644 +--- a/gst/rtp/gstrtpmpadepay.h ++++ b/gst/rtp/gstrtpmpadepay.h +@@ -51,8 +51,6 @@ struct _GstRtpMPADepayClass + + GType gst_rtp_mpa_depay_get_type (void); + +-gboolean gst_rtp_mpa_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_MPA_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpmpapay.c b/gst/rtp/gstrtpmpapay.c +index 62639f712..2c59a85e6 100644 +--- a/gst/rtp/gstrtpmpapay.c ++++ b/gst/rtp/gstrtpmpapay.c +@@ -26,6 +26,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpmpapay.h" + #include "gstrtputils.h" + +@@ -68,6 +69,8 @@ static GstFlowReturn gst_rtp_mpa_pay_handle_buffer (GstRTPBasePayload * payload, + + #define gst_rtp_mpa_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpMPAPay, gst_rtp_mpa_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmpapay, "rtpmpapay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_MPA_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_mpa_pay_class_init (GstRtpMPAPayClass * klass) +@@ -239,8 +242,10 @@ gst_rtp_mpa_pay_flush (GstRtpMPAPay * rtpmpapay) + avail -= payload_len; + frag_offset += payload_len; + +- if (avail == 0) ++ if (avail == 0) { + gst_rtp_buffer_set_marker (&rtp, TRUE); ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); ++ } + + gst_rtp_buffer_unmap (&rtp); + +@@ -336,10 +341,3 @@ gst_rtp_mpa_pay_change_state (GstElement * element, GstStateChange transition) + } + return ret; + } +- +-gboolean +-gst_rtp_mpa_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpmpapay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_MPA_PAY); +-} +diff --git a/gst/rtp/gstrtpmpapay.h b/gst/rtp/gstrtpmpapay.h +index db298528e..759ce87e8 100644 +--- a/gst/rtp/gstrtpmpapay.h ++++ b/gst/rtp/gstrtpmpapay.h +@@ -56,8 +56,6 @@ struct _GstRtpMPAPayClass + + GType gst_rtp_mpa_pay_get_type (void); + +-gboolean gst_rtp_mpa_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_MPA_PAY_H__ */ +diff --git a/gst/rtp/gstrtpmparobustdepay.c b/gst/rtp/gstrtpmparobustdepay.c +index 702d2b0ea..ca7f1f19a 100644 +--- a/gst/rtp/gstrtpmparobustdepay.c ++++ b/gst/rtp/gstrtpmparobustdepay.c +@@ -26,6 +26,7 @@ + + #include + #include ++#include "gstrtpelements.h" + #include "gstrtpmparobustdepay.h" + + GST_DEBUG_CATEGORY_STATIC (rtpmparobustdepay_debug); +@@ -70,6 +71,9 @@ typedef struct _GstADUFrame + #define gst_rtp_mpa_robust_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpMPARobustDepay, gst_rtp_mpa_robust_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmparobustdepay, "rtpmparobustdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_MPA_ROBUST_DEPAY, ++ rtp_element_init (plugin)); + + static GstStateChangeReturn gst_rtp_mpa_robust_change_state (GstElement * + element, GstStateChange transition); +@@ -802,10 +806,3 @@ gst_rtp_mpa_robust_change_state (GstElement * element, + + return ret; + } +- +-gboolean +-gst_rtp_mpa_robust_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpmparobustdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_MPA_ROBUST_DEPAY); +-} +diff --git a/gst/rtp/gstrtpmparobustdepay.h b/gst/rtp/gstrtpmparobustdepay.h +index fc9ec0b69..8a3f51bc5 100644 +--- a/gst/rtp/gstrtpmparobustdepay.h ++++ b/gst/rtp/gstrtpmparobustdepay.h +@@ -71,8 +71,6 @@ struct _GstRtpMPARobustDepayClass + + GType gst_rtp_mpa_robust_depay_get_type (void); + +-gboolean gst_rtp_mpa_robust_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_MPA_ROBUST_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpmpvdepay.c b/gst/rtp/gstrtpmpvdepay.c +index 973194605..71c071251 100644 +--- a/gst/rtp/gstrtpmpvdepay.c ++++ b/gst/rtp/gstrtpmpvdepay.c +@@ -25,6 +25,7 @@ + #include + + #include ++#include "gstrtpelements.h" + #include "gstrtpmpvdepay.h" + #include "gstrtputils.h" + +@@ -55,6 +56,8 @@ static GstStaticPadTemplate gst_rtp_mpv_depay_sink_template = + ); + + G_DEFINE_TYPE (GstRtpMPVDepay, gst_rtp_mpv_depay, GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmpvdepay, "rtpmpvdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_MPV_DEPAY, rtp_element_init (plugin)); + + static gboolean gst_rtp_mpv_depay_setcaps (GstRTPBaseDepayload * depayload, + GstCaps * caps); +@@ -189,10 +192,3 @@ empty_packet: + return NULL; + } + } +- +-gboolean +-gst_rtp_mpv_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpmpvdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_MPV_DEPAY); +-} +diff --git a/gst/rtp/gstrtpmpvdepay.h b/gst/rtp/gstrtpmpvdepay.h +index 80f6c4332..158c2dcd4 100644 +--- a/gst/rtp/gstrtpmpvdepay.h ++++ b/gst/rtp/gstrtpmpvdepay.h +@@ -51,8 +51,6 @@ struct _GstRtpMPVDepayClass + + GType gst_rtp_mpv_depay_get_type (void); + +-gboolean gst_rtp_mpv_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_MPV_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpmpvpay.c b/gst/rtp/gstrtpmpvpay.c +index eb73af25e..9d3f30f1c 100644 +--- a/gst/rtp/gstrtpmpvpay.c ++++ b/gst/rtp/gstrtpmpvpay.c +@@ -26,6 +26,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpmpvpay.h" + #include "gstrtputils.h" + +@@ -69,6 +70,8 @@ static gboolean gst_rtp_mpv_pay_sink_event (GstRTPBasePayload * payload, + + #define gst_rtp_mpv_pay_parent_class parent_class + G_DEFINE_TYPE (GstRTPMPVPay, gst_rtp_mpv_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmpvpay, "rtpmpvpay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_MPV_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_mpv_pay_class_init (GstRTPMPVPayClass * klass) +@@ -204,7 +207,9 @@ gst_rtp_mpv_pay_flush (GstRTPMPVPay * rtpmpvpay) + + payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0); + +- outbuf = gst_rtp_buffer_new_allocate (4, 0, 0); ++ outbuf = ++ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD ++ (rtpmpvpay), 4, 0, 0); + + payload_len -= 4; + +@@ -229,6 +234,8 @@ gst_rtp_mpv_pay_flush (GstRTPMPVPay * rtpmpvpay) + avail -= payload_len; + + gst_rtp_buffer_set_marker (&rtp, avail == 0); ++ if (avail == 0) ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); + gst_rtp_buffer_unmap (&rtp); + + paybuf = gst_adapter_take_buffer_fast (rtpmpvpay->adapter, payload_len); +@@ -325,11 +332,3 @@ gst_rtp_mpv_pay_change_state (GstElement * element, GstStateChange transition) + } + return ret; + } +- +- +-gboolean +-gst_rtp_mpv_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpmpvpay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_MPV_PAY); +-} +diff --git a/gst/rtp/gstrtpmpvpay.h b/gst/rtp/gstrtpmpvpay.h +index bcebad932..a909ce92a 100644 +--- a/gst/rtp/gstrtpmpvpay.h ++++ b/gst/rtp/gstrtpmpvpay.h +@@ -57,8 +57,6 @@ struct _GstRTPMPVPayClass + + GType gst_rtp_mpv_pay_get_type (void); + +-gboolean gst_rtp_mpv_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_MPV_PAY_H__ */ +diff --git a/gst/rtp/gstrtpopusdepay.c b/gst/rtp/gstrtpopusdepay.c +index f672339f4..220f5c0ae 100644 +--- a/gst/rtp/gstrtpopusdepay.c ++++ b/gst/rtp/gstrtpopusdepay.c +@@ -27,6 +27,7 @@ + #include + #include + #include ++#include "gstrtpelements.h" + #include "gstrtpopusdepay.h" + #include "gstrtputils.h" + +@@ -41,14 +42,14 @@ GST_STATIC_PAD_TEMPLATE ("sink", + "media = (string) \"audio\", " + "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING "," + "clock-rate = (int) 48000, " +- "encoding-name = (string) { \"OPUS\", \"X-GST-OPUS-DRAFT-SPITTKA-00\" }") ++ "encoding-name = (string) { \"OPUS\", \"X-GST-OPUS-DRAFT-SPITTKA-00\", \"MULTIOPUS\" }") + ); + + static GstStaticPadTemplate gst_rtp_opus_depay_src_template = + GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, +- GST_STATIC_CAPS ("audio/x-opus, channel-mapping-family = (int) 0") ++ GST_STATIC_CAPS ("audio/x-opus, channel-mapping-family = (int) [ 0, 1 ]") + ); + + static GstBuffer *gst_rtp_opus_depay_process (GstRTPBaseDepayload * depayload, +@@ -58,6 +59,8 @@ static gboolean gst_rtp_opus_depay_setcaps (GstRTPBaseDepayload * depayload, + + G_DEFINE_TYPE (GstRTPOpusDepay, gst_rtp_opus_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpopusdepay, "rtpopusdepay", ++ GST_RANK_PRIMARY, GST_TYPE_RTP_OPUS_DEPAY, rtp_element_init (plugin)); + + static void + gst_rtp_opus_depay_class_init (GstRTPOpusDepayClass * klass) +@@ -96,38 +99,140 @@ gst_rtp_opus_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps) + GstCaps *srccaps; + GstStructure *s; + gboolean ret; +- const gchar *sprop_stereo, *sprop_maxcapturerate; ++ const gchar *sprop_maxcapturerate; ++ /* Default unless overridden by sprop_maxcapturerate */ ++ gint rate = 48000; + +- srccaps = +- gst_caps_new_simple ("audio/x-opus", "channel-mapping-family", G_TYPE_INT, +- 0, NULL); ++ srccaps = gst_caps_new_empty_simple ("audio/x-opus"); + + s = gst_caps_get_structure (caps, 0); +- if ((sprop_stereo = gst_structure_get_string (s, "sprop-stereo"))) { +- if (strcmp (sprop_stereo, "0") == 0) +- gst_caps_set_simple (srccaps, "channels", G_TYPE_INT, 1, NULL); +- else if (strcmp (sprop_stereo, "1") == 0) ++ ++ if (g_str_equal (gst_structure_get_string (s, "encoding-name"), "MULTIOPUS")) { ++ gint channels; ++ gint stream_count; ++ gint coupled_count; ++ const gchar *encoding_params; ++ const gchar *num_streams; ++ const gchar *coupled_streams; ++ const gchar *channel_mapping; ++ gchar *endptr; ++ ++ if (!gst_structure_has_field_typed (s, "encoding-params", G_TYPE_STRING) || ++ !gst_structure_has_field_typed (s, "num_streams", G_TYPE_STRING) || ++ !gst_structure_has_field_typed (s, "coupled_streams", G_TYPE_STRING) || ++ !gst_structure_has_field_typed (s, "channel_mapping", G_TYPE_STRING)) { ++ GST_WARNING_OBJECT (depayload, "Encoding name 'MULTIOPUS' requires " ++ "encoding-params, num_streams, coupled_streams and channel_mapping " ++ "as string fields in caps."); ++ goto reject_caps; ++ } ++ ++ gst_caps_set_simple (srccaps, "channel-mapping-family", G_TYPE_INT, 1, ++ NULL); ++ ++ encoding_params = gst_structure_get_string (s, "encoding-params"); ++ channels = g_ascii_strtoull (encoding_params, &endptr, 10); ++ if (*endptr != '\0' || channels > 255) { ++ GST_WARNING_OBJECT (depayload, "Invalid encoding-params value '%s'", ++ encoding_params); ++ goto reject_caps; ++ } ++ gst_caps_set_simple (srccaps, "channels", G_TYPE_INT, channels, NULL); ++ ++ num_streams = gst_structure_get_string (s, "num_streams"); ++ stream_count = g_ascii_strtoull (num_streams, &endptr, 10); ++ if (*endptr != '\0' || stream_count > channels) { ++ GST_WARNING_OBJECT (depayload, "Invalid num_streams value '%s'", ++ num_streams); ++ goto reject_caps; ++ } ++ gst_caps_set_simple (srccaps, "stream-count", G_TYPE_INT, stream_count, ++ NULL); ++ ++ coupled_streams = gst_structure_get_string (s, "coupled_streams"); ++ coupled_count = g_ascii_strtoull (coupled_streams, &endptr, 10); ++ if (*endptr != '\0' || coupled_count > stream_count) { ++ GST_WARNING_OBJECT (depayload, "Invalid coupled_streams value '%s'", ++ coupled_streams); ++ goto reject_caps; ++ } ++ gst_caps_set_simple (srccaps, "coupled-count", G_TYPE_INT, coupled_count, ++ NULL); ++ ++ channel_mapping = gst_structure_get_string (s, "channel_mapping"); ++ { ++ gchar **split; ++ gchar **ptr; ++ GValue mapping = G_VALUE_INIT; ++ GValue v = G_VALUE_INIT; ++ ++ split = g_strsplit (channel_mapping, ",", -1); ++ ++ g_value_init (&mapping, GST_TYPE_ARRAY); ++ g_value_init (&v, G_TYPE_INT); ++ ++ for (ptr = split; *ptr; ++ptr) { ++ gint channel = g_ascii_strtoull (*ptr, &endptr, 10); ++ if (*endptr != '\0' || channel > channels) { ++ GST_WARNING_OBJECT (depayload, "Invalid channel_mapping value '%s'", ++ channel_mapping); ++ g_value_unset (&mapping); ++ break; ++ } ++ g_value_set_int (&v, channel); ++ gst_value_array_append_value (&mapping, &v); ++ } ++ ++ g_value_unset (&v); ++ g_strfreev (split); ++ ++ if (G_IS_VALUE (&mapping)) { ++ gst_caps_set_value (srccaps, "channel-mapping", &mapping); ++ g_value_unset (&mapping); ++ } else { ++ goto reject_caps; ++ } ++ } ++ } else { ++ const gchar *sprop_stereo; ++ ++ gst_caps_set_simple (srccaps, "channel-mapping-family", G_TYPE_INT, 0, ++ NULL); ++ ++ if ((sprop_stereo = gst_structure_get_string (s, "sprop-stereo"))) { ++ if (strcmp (sprop_stereo, "0") == 0) ++ gst_caps_set_simple (srccaps, "channels", G_TYPE_INT, 1, NULL); ++ else if (strcmp (sprop_stereo, "1") == 0) ++ gst_caps_set_simple (srccaps, "channels", G_TYPE_INT, 2, NULL); ++ else ++ GST_WARNING_OBJECT (depayload, "Unknown sprop-stereo value '%s'", ++ sprop_stereo); ++ } else { ++ /* Although sprop-stereo defaults to mono as per RFC 7587, this just means ++ that the signal is likely mono and can be safely downmixed, it may ++ still be stereo at times. */ + gst_caps_set_simple (srccaps, "channels", G_TYPE_INT, 2, NULL); +- else +- GST_WARNING_OBJECT (depayload, "Unknown sprop-stereo value '%s'", +- sprop_stereo); ++ } + } + + if ((sprop_maxcapturerate = + gst_structure_get_string (s, "sprop-maxcapturerate"))) { +- gulong rate; + gchar *tailptr; ++ gulong tmp_rate; + +- rate = strtoul (sprop_maxcapturerate, &tailptr, 10); +- if (rate > INT_MAX || *tailptr != '\0') { ++ tmp_rate = strtoul (sprop_maxcapturerate, &tailptr, 10); ++ if (tmp_rate > INT_MAX || *tailptr != '\0') { + GST_WARNING_OBJECT (depayload, + "Failed to parse sprop-maxcapturerate value '%s'", + sprop_maxcapturerate); + } else { +- gst_caps_set_simple (srccaps, "rate", G_TYPE_INT, rate, NULL); ++ /* Valid rate from sprop, let's use it */ ++ rate = tmp_rate; + } + } + ++ gst_caps_set_simple (srccaps, "rate", G_TYPE_INT, rate, NULL); ++ + ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps); + + GST_DEBUG_OBJECT (depayload, +@@ -137,6 +242,11 @@ gst_rtp_opus_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps) + depayload->clock_rate = 48000; + + return ret; ++ ++reject_caps: ++ gst_caps_unref (srccaps); ++ ++ return FALSE; + } + + static GstBuffer * +@@ -151,10 +261,3 @@ gst_rtp_opus_depay_process (GstRTPBaseDepayload * depayload, + + return outbuf; + } +- +-gboolean +-gst_rtp_opus_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpopusdepay", +- GST_RANK_PRIMARY, GST_TYPE_RTP_OPUS_DEPAY); +-} +diff --git a/gst/rtp/gstrtpopusdepay.h b/gst/rtp/gstrtpopusdepay.h +index 38cc85108..7890eb19d 100644 +--- a/gst/rtp/gstrtpopusdepay.h ++++ b/gst/rtp/gstrtpopusdepay.h +@@ -53,7 +53,5 @@ struct _GstRTPOpusDepayClass + + GType gst_rtp_opus_depay_get_type (void); + +-gboolean gst_rtp_opus_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + #endif /* __GST_RTP_OPUS_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpopuspay.c b/gst/rtp/gstrtpopuspay.c +index 871a18248..e62868c37 100644 +--- a/gst/rtp/gstrtpopuspay.c ++++ b/gst/rtp/gstrtpopuspay.c +@@ -19,6 +19,29 @@ + * Boston, MA 02110-1301, USA. + */ + ++/** ++ * SECTION:element-rtpopuspay ++ * @title: rtpopuspay ++ * ++ * rtpopuspay encapsulates Opus-encoded audio data into RTP packets following ++ * the payload format described in RFC 7587. ++ * ++ * In addition to the RFC, which assumes only mono and stereo payload, ++ * the element supports multichannel Opus audio streams using a non-standardized ++ * SDP config and "MULTIOPUS" codec developed by Google for libwebrtc. When the ++ * input data have more than 2 channels, rtpopuspay will add extra fields to ++ * output caps that can be used to generate SDP in the syntax understood by ++ * libwebrtc. For example in the case of 5.1 audio: ++ * ++ * |[ ++ * a=rtpmap:96 multiopus/48000/6 ++ * a=fmtp:96 num_streams=4;coupled_streams=2;channel_mapping=0,4,1,2,3,5 ++ * ]| ++ * ++ * See https://webrtc-review.googlesource.com/c/src/+/129768 for more details on ++ * multichannel Opus in libwebrtc. ++ */ ++ + #ifdef HAVE_CONFIG_H + # include "config.h" + #endif +@@ -28,18 +51,28 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpopuspay.h" + #include "gstrtputils.h" + + GST_DEBUG_CATEGORY_STATIC (rtpopuspay_debug); + #define GST_CAT_DEFAULT (rtpopuspay_debug) + ++enum ++{ ++ PROP_0, ++ PROP_DTX, ++}; ++ ++#define DEFAULT_DTX FALSE + + static GstStaticPadTemplate gst_rtp_opus_pay_sink_template = +-GST_STATIC_PAD_TEMPLATE ("sink", ++ GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, +- GST_STATIC_CAPS ("audio/x-opus, channel-mapping-family = (int) 0") ++ GST_STATIC_CAPS ("audio/x-opus, channel-mapping-family = (int) 0;" ++ "audio/x-opus, channel-mapping-family = (int) 0, channels = (int) [1, 2];" ++ "audio/x-opus, channel-mapping-family = (int) 1, channels = (int) [3, 255]") + ); + + static GstStaticPadTemplate gst_rtp_opus_pay_src_template = +@@ -50,8 +83,7 @@ GST_STATIC_PAD_TEMPLATE ("src", + "media = (string) \"audio\", " + "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", " + "clock-rate = (int) 48000, " +- "encoding-params = (string) \"2\", " +- "encoding-name = (string) { \"OPUS\", \"X-GST-OPUS-DRAFT-SPITTKA-00\" }") ++ "encoding-name = (string) { \"OPUS\", \"X-GST-OPUS-DRAFT-SPITTKA-00\", \"MULTIOPUS\" }") + ); + + static gboolean gst_rtp_opus_pay_setcaps (GstRTPBasePayload * payload, +@@ -62,25 +94,108 @@ static GstFlowReturn gst_rtp_opus_pay_handle_buffer (GstRTPBasePayload * + payload, GstBuffer * buffer); + + G_DEFINE_TYPE (GstRtpOPUSPay, gst_rtp_opus_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpopuspay, "rtpopuspay", ++ GST_RANK_PRIMARY, GST_TYPE_RTP_OPUS_PAY, rtp_element_init (plugin)); ++ ++#define GST_RTP_OPUS_PAY_CAST(obj) ((GstRtpOPUSPay *)(obj)) ++ ++static void ++gst_rtp_opus_pay_set_property (GObject * object, ++ guint prop_id, const GValue * value, GParamSpec * pspec) ++{ ++ GstRtpOPUSPay *self = GST_RTP_OPUS_PAY (object); ++ ++ switch (prop_id) { ++ case PROP_DTX: ++ self->dtx = g_value_get_boolean (value); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_rtp_opus_pay_get_property (GObject * object, ++ guint prop_id, GValue * value, GParamSpec * pspec) ++{ ++ GstRtpOPUSPay *self = GST_RTP_OPUS_PAY (object); ++ ++ switch (prop_id) { ++ case PROP_DTX: ++ g_value_set_boolean (value, self->dtx); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static GstStateChangeReturn ++gst_rtp_opus_pay_change_state (GstElement * element, GstStateChange transition) ++{ ++ GstRtpOPUSPay *self = GST_RTP_OPUS_PAY (element); ++ GstStateChangeReturn ret; ++ ++ switch (transition) { ++ case GST_STATE_CHANGE_READY_TO_PAUSED: ++ self->marker = TRUE; ++ break; ++ default: ++ break; ++ } ++ ++ ret = ++ GST_ELEMENT_CLASS (gst_rtp_opus_pay_parent_class)->change_state (element, ++ transition); ++ ++ switch (transition) { ++ default: ++ break; ++ } ++ ++ return ret; ++} + + static void + gst_rtp_opus_pay_class_init (GstRtpOPUSPayClass * klass) + { + GstRTPBasePayloadClass *gstbasertppayload_class; + GstElementClass *element_class; ++ GObjectClass *gobject_class; + + gstbasertppayload_class = (GstRTPBasePayloadClass *) klass; + element_class = GST_ELEMENT_CLASS (klass); ++ gobject_class = (GObjectClass *) klass; ++ ++ element_class->change_state = gst_rtp_opus_pay_change_state; + + gstbasertppayload_class->set_caps = gst_rtp_opus_pay_setcaps; + gstbasertppayload_class->get_caps = gst_rtp_opus_pay_getcaps; + gstbasertppayload_class->handle_buffer = gst_rtp_opus_pay_handle_buffer; + ++ gobject_class->set_property = gst_rtp_opus_pay_set_property; ++ gobject_class->get_property = gst_rtp_opus_pay_get_property; ++ + gst_element_class_add_static_pad_template (element_class, + &gst_rtp_opus_pay_src_template); + gst_element_class_add_static_pad_template (element_class, + &gst_rtp_opus_pay_sink_template); + ++ /** ++ * GstRtpOPUSPay:dtx: ++ * ++ * If enabled, the payloader will not transmit empty packets. ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_DTX, ++ g_param_spec_boolean ("dtx", "Discontinuous Transmission", ++ "If enabled, the payloader will not transmit empty packets", ++ DEFAULT_DTX, ++ G_PARAM_READWRITE | GST_PARAM_MUTABLE_PLAYING | ++ G_PARAM_STATIC_STRINGS)); ++ + gst_element_class_set_static_metadata (element_class, + "RTP Opus payloader", + "Codec/Payloader/Network/RTP", +@@ -94,6 +209,7 @@ gst_rtp_opus_pay_class_init (GstRtpOPUSPayClass * klass) + static void + gst_rtp_opus_pay_init (GstRtpOPUSPay * rtpopuspay) + { ++ rtpopuspay->dtx = DEFAULT_DTX; + } + + static gboolean +@@ -101,11 +217,13 @@ gst_rtp_opus_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps) + { + gboolean res; + GstCaps *src_caps; +- GstStructure *s; ++ GstStructure *s, *outcaps; + const char *encoding_name = "OPUS"; +- gint channels, rate; +- const char *sprop_stereo = NULL; +- char *sprop_maxcapturerate = NULL; ++ gint channels = 2; ++ gint rate; ++ gchar *encoding_params; ++ ++ outcaps = gst_structure_new_empty ("unused"); + + src_caps = gst_pad_get_allowed_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload)); + if (src_caps) { +@@ -130,41 +248,75 @@ gst_rtp_opus_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps) + s = gst_caps_get_structure (caps, 0); + if (gst_structure_get_int (s, "channels", &channels)) { + if (channels > 2) { +- GST_ERROR_OBJECT (payload, +- "More than 2 channels with channel-mapping-family=0 is invalid"); +- return FALSE; +- } else if (channels == 2) { +- sprop_stereo = "1"; ++ /* Implies channel-mapping-family = 1. */ ++ ++ gint stream_count, coupled_count; ++ const GValue *channel_mapping_array; ++ ++ /* libwebrtc only supports "multiopus" when channels > 2. Mono and stereo ++ * sound must always be payloaded according to RFC 7587. */ ++ encoding_name = "MULTIOPUS"; ++ ++ if (gst_structure_get_int (s, "stream-count", &stream_count)) { ++ char *num_streams = g_strdup_printf ("%d", stream_count); ++ gst_structure_set (outcaps, "num_streams", G_TYPE_STRING, num_streams, ++ NULL); ++ g_free (num_streams); ++ } ++ if (gst_structure_get_int (s, "coupled-count", &coupled_count)) { ++ char *coupled_streams = g_strdup_printf ("%d", coupled_count); ++ gst_structure_set (outcaps, "coupled_streams", G_TYPE_STRING, ++ coupled_streams, NULL); ++ g_free (coupled_streams); ++ } ++ ++ channel_mapping_array = gst_structure_get_value (s, "channel-mapping"); ++ if (GST_VALUE_HOLDS_ARRAY (channel_mapping_array)) { ++ GString *str = g_string_new (NULL); ++ guint i; ++ ++ for (i = 0; i < gst_value_array_get_size (channel_mapping_array); ++i) { ++ if (i != 0) { ++ g_string_append_c (str, ','); ++ } ++ g_string_append_printf (str, "%d", ++ g_value_get_int (gst_value_array_get_value (channel_mapping_array, ++ i))); ++ } ++ ++ gst_structure_set (outcaps, "channel_mapping", G_TYPE_STRING, str->str, ++ NULL); ++ ++ g_string_free (str, TRUE); ++ } + } else { +- sprop_stereo = "0"; ++ gst_structure_set (outcaps, "sprop-stereo", G_TYPE_STRING, ++ (channels == 2) ? "1" : "0", NULL); ++ /* RFC 7587 requires the number of channels always be 2. */ ++ channels = 2; + } + } + ++ encoding_params = g_strdup_printf ("%d", channels); ++ gst_structure_set (outcaps, "encoding-params", G_TYPE_STRING, ++ encoding_params, NULL); ++ g_free (encoding_params); ++ + if (gst_structure_get_int (s, "rate", &rate)) { +- sprop_maxcapturerate = g_strdup_printf ("%d", rate); ++ gchar *sprop_maxcapturerate = g_strdup_printf ("%d", rate); ++ ++ gst_structure_set (outcaps, "sprop-maxcapturerate", G_TYPE_STRING, ++ sprop_maxcapturerate, NULL); ++ ++ g_free (sprop_maxcapturerate); + } + + gst_rtp_base_payload_set_options (payload, "audio", FALSE, + encoding_name, 48000); + +- if (sprop_maxcapturerate && sprop_stereo) { +- res = +- gst_rtp_base_payload_set_outcaps (payload, "sprop-maxcapturerate", +- G_TYPE_STRING, sprop_maxcapturerate, "sprop-stereo", G_TYPE_STRING, +- sprop_stereo, NULL); +- } else if (sprop_maxcapturerate) { +- res = +- gst_rtp_base_payload_set_outcaps (payload, "sprop-maxcapturerate", +- G_TYPE_STRING, sprop_maxcapturerate, NULL); +- } else if (sprop_stereo) { +- res = +- gst_rtp_base_payload_set_outcaps (payload, "sprop-stereo", +- G_TYPE_STRING, sprop_stereo, NULL); +- } else { +- res = gst_rtp_base_payload_set_outcaps (payload, NULL); +- } ++ res = gst_rtp_base_payload_set_outcaps_structure (payload, outcaps); + +- g_free (sprop_maxcapturerate); ++ gst_structure_free (outcaps); + + return res; + } +@@ -173,9 +325,19 @@ static GstFlowReturn + gst_rtp_opus_pay_handle_buffer (GstRTPBasePayload * basepayload, + GstBuffer * buffer) + { ++ GstRtpOPUSPay *self = GST_RTP_OPUS_PAY_CAST (basepayload); + GstBuffer *outbuf; + GstClockTime pts, dts, duration; + ++ /* DTX packets are zero-length frames, with a 1 or 2-bytes header */ ++ if (self->dtx && gst_buffer_get_size (buffer) <= 2) { ++ GST_LOG_OBJECT (self, ++ "discard empty buffer as DTX is enabled: %" GST_PTR_FORMAT, buffer); ++ self->marker = TRUE; ++ gst_buffer_unref (buffer); ++ return GST_FLOW_OK; ++ } ++ + pts = GST_BUFFER_PTS (buffer); + dts = GST_BUFFER_DTS (buffer); + duration = GST_BUFFER_DURATION (buffer); +@@ -190,6 +352,17 @@ gst_rtp_opus_pay_handle_buffer (GstRTPBasePayload * basepayload, + GST_BUFFER_DTS (outbuf) = dts; + GST_BUFFER_DURATION (outbuf) = duration; + ++ if (self->marker) { ++ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; ++ ++ gst_rtp_buffer_map (outbuf, GST_MAP_READWRITE, &rtp); ++ gst_rtp_buffer_set_marker (&rtp, TRUE); ++ gst_rtp_buffer_unmap (&rtp); ++ ++ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER); ++ self->marker = FALSE; ++ } ++ + /* Push out */ + return gst_rtp_base_payload_push (basepayload, outbuf); + } +@@ -198,9 +371,13 @@ static GstCaps * + gst_rtp_opus_pay_getcaps (GstRTPBasePayload * payload, + GstPad * pad, GstCaps * filter) + { +- GstCaps *caps, *peercaps, *tcaps; + GstStructure *s; +- const gchar *stereo; ++ int channel_mapping_family = 0; ++ GstCaps *caps, *peercaps, *tcaps, *tempcaps; ++ static GstStaticCaps opus_static_caps = GST_STATIC_CAPS ("application/x-rtp, " ++ "encoding-name=(string) { \"OPUS\", \"X-GST-OPUS-DRAFT-SPITTKA-00\"}"); ++ static GstStaticCaps multiopus_static_caps = ++ GST_STATIC_CAPS ("application/x-rtp, encoding-name=(string)MULTIOPUS"); + + if (pad == GST_RTP_BASE_PAYLOAD_SRCPAD (payload)) + return +@@ -221,23 +398,59 @@ gst_rtp_opus_pay_getcaps (GstRTPBasePayload * payload, + + caps = gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SINKPAD (payload)); + +- s = gst_caps_get_structure (peercaps, 0); +- stereo = gst_structure_get_string (s, "stereo"); +- if (stereo != NULL) { +- caps = gst_caps_make_writable (caps); ++ tempcaps = gst_static_caps_get (&opus_static_caps); ++ if (!gst_caps_can_intersect (peercaps, tempcaps)) { ++ GstCaps *multiopuscaps = gst_caps_new_simple ("audio/x-opus", ++ "channel-mapping-family", G_TYPE_INT, 1, ++ "channels", GST_TYPE_INT_RANGE, 3, 255, ++ NULL); ++ GstCaps *intersect_caps; + +- if (!strcmp (stereo, "1")) { +- GstCaps *caps2 = gst_caps_copy (caps); ++ intersect_caps = gst_caps_intersect_full (caps, multiopuscaps, ++ GST_CAPS_INTERSECT_FIRST); ++ gst_caps_unref (caps); ++ gst_caps_unref (multiopuscaps); ++ caps = intersect_caps; ++ } ++ gst_caps_unref (tempcaps); + +- gst_caps_set_simple (caps, "channels", G_TYPE_INT, 2, NULL); +- gst_caps_set_simple (caps2, "channels", G_TYPE_INT, 1, NULL); +- caps = gst_caps_merge (caps, caps2); +- } else if (!strcmp (stereo, "0")) { +- GstCaps *caps2 = gst_caps_copy (caps); ++ tempcaps = gst_static_caps_get (&multiopus_static_caps); ++ if (!gst_caps_can_intersect (peercaps, tempcaps)) { ++ GstCaps *opuscaps = gst_caps_new_simple ("audio/x-opus", ++ "channel-mapping-family", G_TYPE_INT, 0, ++ "channels", GST_TYPE_INT_RANGE, 1, 2, ++ NULL); ++ GstCaps *intersect_caps; + +- gst_caps_set_simple (caps, "channels", G_TYPE_INT, 1, NULL); +- gst_caps_set_simple (caps2, "channels", G_TYPE_INT, 2, NULL); +- caps = gst_caps_merge (caps, caps2); ++ intersect_caps = gst_caps_intersect_full (caps, opuscaps, ++ GST_CAPS_INTERSECT_FIRST); ++ gst_caps_unref (caps); ++ gst_caps_unref (opuscaps); ++ caps = intersect_caps; ++ } ++ gst_caps_unref (tempcaps); ++ ++ s = gst_caps_get_structure (caps, 0); ++ gst_structure_get_int (s, "channel-mapping-family", &channel_mapping_family); ++ if (channel_mapping_family == 0) { ++ GstStructure *sp = gst_caps_get_structure (peercaps, 0); ++ const gchar *stereo = gst_structure_get_string (sp, "stereo"); ++ ++ if (stereo != NULL) { ++ guint channels = 0; ++ ++ if (!strcmp (stereo, "1")) ++ channels = 2; ++ else if (!strcmp (stereo, "0")) ++ channels = 1; ++ ++ if (channels) { ++ GstCaps *caps2 = gst_caps_copy_nth (caps, 0); ++ ++ gst_caps_set_simple (caps2, "channels", G_TYPE_INT, channels, NULL); ++ caps = gst_caps_make_writable (caps); ++ caps = gst_caps_merge (caps2, caps); ++ } + } + } + gst_caps_unref (peercaps); +@@ -252,10 +465,3 @@ gst_rtp_opus_pay_getcaps (GstRTPBasePayload * payload, + GST_DEBUG_OBJECT (payload, "Returning caps: %" GST_PTR_FORMAT, caps); + return caps; + } +- +-gboolean +-gst_rtp_opus_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpopuspay", +- GST_RANK_PRIMARY, GST_TYPE_RTP_OPUS_PAY); +-} +diff --git a/gst/rtp/gstrtpopuspay.h b/gst/rtp/gstrtpopuspay.h +index 45f40d85e..b862913af 100644 +--- a/gst/rtp/gstrtpopuspay.h ++++ b/gst/rtp/gstrtpopuspay.h +@@ -44,6 +44,11 @@ typedef struct _GstRtpOPUSPayClass GstRtpOPUSPayClass; + struct _GstRtpOPUSPay + { + GstRTPBasePayload payload; ++ ++ gboolean dtx; ++ ++ /* if the next produced buffer should have the MARKER flag */ ++ gboolean marker; + }; + + struct _GstRtpOPUSPayClass +@@ -53,8 +58,6 @@ struct _GstRtpOPUSPayClass + + GType gst_rtp_opus_pay_get_type (void); + +-gboolean gst_rtp_opus_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_OPUS_PAY_H__ */ +diff --git a/gst/rtp/gstrtppcmadepay.c b/gst/rtp/gstrtppcmadepay.c +index 476a20aad..82727fbe9 100644 +--- a/gst/rtp/gstrtppcmadepay.c ++++ b/gst/rtp/gstrtppcmadepay.c +@@ -26,6 +26,7 @@ + #include + #include + #include ++#include "gstrtpelements.h" + #include "gstrtppcmadepay.h" + #include "gstrtputils.h" + +@@ -69,6 +70,8 @@ static gboolean gst_rtp_pcma_depay_setcaps (GstRTPBaseDepayload * depayload, + #define gst_rtp_pcma_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpPcmaDepay, gst_rtp_pcma_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtppcmadepay, "rtppcmadepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_PCMA_DEPAY, rtp_element_init (plugin)); + + static void + gst_rtp_pcma_depay_class_init (GstRtpPcmaDepayClass * klass) +@@ -155,10 +158,3 @@ gst_rtp_pcma_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp) + + return outbuf; + } +- +-gboolean +-gst_rtp_pcma_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtppcmadepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_PCMA_DEPAY); +-} +diff --git a/gst/rtp/gstrtppcmadepay.h b/gst/rtp/gstrtppcmadepay.h +index e664a2d3d..c74bd0531 100644 +--- a/gst/rtp/gstrtppcmadepay.h ++++ b/gst/rtp/gstrtppcmadepay.h +@@ -46,8 +46,6 @@ struct _GstRtpPcmaDepayClass + + GType gst_rtp_pcma_depay_get_type (void); + +-gboolean gst_rtp_pcma_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_PCMA_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtppcmapay.c b/gst/rtp/gstrtppcmapay.c +index fde1b647c..c51639633 100644 +--- a/gst/rtp/gstrtppcmapay.c ++++ b/gst/rtp/gstrtppcmapay.c +@@ -27,6 +27,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtppcmapay.h" + + static GstStaticPadTemplate gst_rtp_pcma_pay_sink_template = +@@ -56,6 +57,8 @@ static gboolean gst_rtp_pcma_pay_setcaps (GstRTPBasePayload * payload, + #define gst_rtp_pcma_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpPcmaPay, gst_rtp_pcma_pay, + GST_TYPE_RTP_BASE_AUDIO_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtppcmapay, "rtppcmapay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_PCMA_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_pcma_pay_class_init (GstRtpPcmaPayClass * klass) +@@ -107,10 +110,3 @@ gst_rtp_pcma_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps) + + return res; + } +- +-gboolean +-gst_rtp_pcma_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtppcmapay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_PCMA_PAY); +-} +diff --git a/gst/rtp/gstrtppcmapay.h b/gst/rtp/gstrtppcmapay.h +index 4e084ad09..7f67dffcd 100644 +--- a/gst/rtp/gstrtppcmapay.h ++++ b/gst/rtp/gstrtppcmapay.h +@@ -47,8 +47,6 @@ struct _GstRtpPcmaPayClass + + GType gst_rtp_pcma_pay_get_type (void); + +-gboolean gst_rtp_pcma_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_PCMA_PAY_H__ */ +diff --git a/gst/rtp/gstrtppcmudepay.c b/gst/rtp/gstrtppcmudepay.c +index 1a06d187a..9bd3afb3d 100644 +--- a/gst/rtp/gstrtppcmudepay.c ++++ b/gst/rtp/gstrtppcmudepay.c +@@ -26,6 +26,7 @@ + #include + #include + #include ++#include "gstrtpelements.h" + #include "gstrtppcmudepay.h" + #include "gstrtputils.h" + +@@ -70,6 +71,8 @@ static gboolean gst_rtp_pcmu_depay_setcaps (GstRTPBaseDepayload * depayload, + #define gst_rtp_pcmu_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpPcmuDepay, gst_rtp_pcmu_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtppcmudepay, "rtppcmudepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_PCMU_DEPAY, rtp_element_init (plugin)); + + static void + gst_rtp_pcmu_depay_class_init (GstRtpPcmuDepayClass * klass) +@@ -156,10 +159,3 @@ gst_rtp_pcmu_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp) + + return outbuf; + } +- +-gboolean +-gst_rtp_pcmu_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtppcmudepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_PCMU_DEPAY); +-} +diff --git a/gst/rtp/gstrtppcmudepay.h b/gst/rtp/gstrtppcmudepay.h +index 6a97559f8..a3d017349 100644 +--- a/gst/rtp/gstrtppcmudepay.h ++++ b/gst/rtp/gstrtppcmudepay.h +@@ -46,8 +46,6 @@ struct _GstRtpPcmuDepayClass + + GType gst_rtp_pcmu_depay_get_type (void); + +-gboolean gst_rtp_pcmu_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_PCMU_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtppcmupay.c b/gst/rtp/gstrtppcmupay.c +index 5931f85e2..5662789cb 100644 +--- a/gst/rtp/gstrtppcmupay.c ++++ b/gst/rtp/gstrtppcmupay.c +@@ -27,6 +27,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtppcmupay.h" + + static GstStaticPadTemplate gst_rtp_pcmu_pay_sink_template = +@@ -56,6 +57,8 @@ static gboolean gst_rtp_pcmu_pay_setcaps (GstRTPBasePayload * payload, + #define gst_rtp_pcmu_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpPcmuPay, gst_rtp_pcmu_pay, + GST_TYPE_RTP_BASE_AUDIO_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtppcmupay, "rtppcmupay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_PCMU_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_pcmu_pay_class_init (GstRtpPcmuPayClass * klass) +@@ -107,10 +110,3 @@ gst_rtp_pcmu_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps) + + return res; + } +- +-gboolean +-gst_rtp_pcmu_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtppcmupay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_PCMU_PAY); +-} +diff --git a/gst/rtp/gstrtppcmupay.h b/gst/rtp/gstrtppcmupay.h +index 7529d461c..cdeb3a2bf 100644 +--- a/gst/rtp/gstrtppcmupay.h ++++ b/gst/rtp/gstrtppcmupay.h +@@ -47,8 +47,6 @@ struct _GstRtpPcmuPayClass + + GType gst_rtp_pcmu_pay_get_type (void); + +-gboolean gst_rtp_pcmu_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_PCMU_PAY_H__ */ +diff --git a/gst/rtp/gstrtpqcelpdepay.c b/gst/rtp/gstrtpqcelpdepay.c +index cb5d9eb95..949cf605b 100644 +--- a/gst/rtp/gstrtpqcelpdepay.c ++++ b/gst/rtp/gstrtpqcelpdepay.c +@@ -26,6 +26,7 @@ + + #include + #include ++#include "gstrtpelements.h" + #include "gstrtpqcelpdepay.h" + #include "gstrtputils.h" + +@@ -81,6 +82,8 @@ static GstBuffer *gst_rtp_qcelp_depay_process (GstRTPBaseDepayload * depayload, + #define gst_rtp_qcelp_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpQCELPDepay, gst_rtp_qcelp_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpqcelpdepay, "rtpqcelpdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_QCELP_DEPAY, rtp_element_init (plugin)); + + static void + gst_rtp_qcelp_depay_class_init (GstRtpQCELPDepayClass * klass) +@@ -423,10 +426,3 @@ invalid_frame: + return NULL; + } + } +- +-gboolean +-gst_rtp_qcelp_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpqcelpdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_QCELP_DEPAY); +-} +diff --git a/gst/rtp/gstrtpqcelpdepay.h b/gst/rtp/gstrtpqcelpdepay.h +index ade274de0..f278a7339 100644 +--- a/gst/rtp/gstrtpqcelpdepay.h ++++ b/gst/rtp/gstrtpqcelpdepay.h +@@ -55,8 +55,6 @@ struct _GstRtpQCELPDepayClass + + GType gst_rtp_qcelp_depay_get_type (void); + +-gboolean gst_rtp_qcelp_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_QCELP_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpqdmdepay.c b/gst/rtp/gstrtpqdmdepay.c +index 898663aa7..3edfb4630 100644 +--- a/gst/rtp/gstrtpqdmdepay.c ++++ b/gst/rtp/gstrtpqdmdepay.c +@@ -25,6 +25,7 @@ + + #include + #include ++#include "gstrtpelements.h" + #include "gstrtpqdmdepay.h" + #include "gstrtputils.h" + +@@ -49,6 +50,12 @@ GST_STATIC_PAD_TEMPLATE ("sink", + #define gst_rtp_qdm2_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpQDM2Depay, gst_rtp_qdm2_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++#define _do_init \ ++ GST_DEBUG_CATEGORY_INIT (rtpqdm2depay_debug, "rtpqdm2depay", 0, \ ++ "RTP QDM2 depayloader"); \ ++ rtp_element_init (plugin) ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpqdm2depay, "rtpqdm2depay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_QDM2_DEPAY, _do_init); + + static const guint8 headheader[20] = { + 0x0, 0x0, 0x0, 0xc, 0x66, 0x72, 0x6d, 0x61, +@@ -402,13 +409,3 @@ gst_rtp_qdm2_depay_change_state (GstElement * element, + } + return ret; + } +- +-gboolean +-gst_rtp_qdm2_depay_plugin_init (GstPlugin * plugin) +-{ +- GST_DEBUG_CATEGORY_INIT (rtpqdm2depay_debug, "rtpqdm2depay", 0, +- "RTP QDM2 depayloader"); +- +- return gst_element_register (plugin, "rtpqdm2depay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_QDM2_DEPAY); +-} +diff --git a/gst/rtp/gstrtpqdmdepay.h b/gst/rtp/gstrtpqdmdepay.h +index 8a6b2b20a..d5d37566e 100644 +--- a/gst/rtp/gstrtpqdmdepay.h ++++ b/gst/rtp/gstrtpqdmdepay.h +@@ -78,8 +78,6 @@ struct _GstRtpQDM2DepayClass + + GType gst_rtp_qdm2_depay_get_type (void); + +-gboolean gst_rtp_qdm2_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_QDM2_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpreddec.c b/gst/rtp/gstrtpreddec.c +index 1e4781727..6c7ba363f 100644 +--- a/gst/rtp/gstrtpreddec.c ++++ b/gst/rtp/gstrtpreddec.c +@@ -48,6 +48,7 @@ + + #include + ++#include "gstrtpelements.h" + #include "rtpredcommon.h" + #include "gstrtpreddec.h" + #include "rtpulpfeccommon.h" +@@ -82,12 +83,15 @@ GST_DEBUG_CATEGORY_STATIC (gst_rtp_red_dec_debug); + #define GST_CAT_DEFAULT gst_rtp_red_dec_debug + + G_DEFINE_TYPE (GstRtpRedDec, gst_rtp_red_dec, GST_TYPE_ELEMENT); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpreddec, "rtpreddec", GST_RANK_NONE, ++ GST_TYPE_RTP_RED_DEC, rtp_element_init (plugin)); + + enum + { + PROP_0, + PROP_PT, +- PROP_RECEIVED ++ PROP_RECEIVED, ++ PROP_PAYLOADS, + }; + + static RTPHistItem * +@@ -120,7 +124,8 @@ gst_rtp_red_history_find_less (gconstpointer item, gconstpointer timestamp) + } + + static void +-gst_rtp_red_history_update (GstRtpRedDec * self, GstRTPBuffer * rtp) ++gst_rtp_red_history_update (GstRtpRedDec * self, GQueue * rtp_history, ++ GstRTPBuffer * rtp) + { + RTPHistItem *item; + GList *link, *sibling; +@@ -129,12 +134,12 @@ gst_rtp_red_history_update (GstRtpRedDec * self, GstRTPBuffer * rtp) + * allocate a new link and a new item, + * otherwise reuse the tail (the oldest data) without any reallocations + */ +- if (self->rtp_history->length < RTP_HISTORY_MAX_SIZE) { ++ if (rtp_history->length < RTP_HISTORY_MAX_SIZE) { + item = rtp_hist_item_alloc (); + link = g_list_alloc (); + link->data = item; + } else { +- link = g_queue_pop_tail_link (self->rtp_history); ++ link = g_queue_pop_tail_link (rtp_history); + item = link->data; + } + +@@ -144,11 +149,11 @@ gst_rtp_red_history_update (GstRtpRedDec * self, GstRTPBuffer * rtp) + /* Looking for a place to insert new link. + * The queue has newest to oldest rtp timestamps, so in 99% cases + * it is inserted before the head of the queue */ +- sibling = g_list_find_custom (self->rtp_history->head, ++ sibling = g_list_find_custom (rtp_history->head, + GUINT_TO_POINTER (item->timestamp), + gst_rtp_red_history_find_less_or_equal); +- g_queue_push_nth_link (self->rtp_history, +- g_list_position (self->rtp_history->head, sibling), link); ++ g_queue_push_nth_link (rtp_history, ++ g_list_position (rtp_history->head, sibling), link); + } + + static gboolean +@@ -216,9 +221,9 @@ red_buffer_invalid: + + static gboolean + gst_red_history_lost_seq_num_for_timestamp (GstRtpRedDec * self, +- guint32 timestamp, guint16 * dst_seq_num) ++ GQueue * rtp_history, guint32 timestamp, guint16 * dst_seq_num) + { +- GList *older_sibling = g_list_find_custom (self->rtp_history->head, ++ GList *older_sibling = g_list_find_custom (rtp_history->head, + GUINT_TO_POINTER (timestamp), + gst_rtp_red_history_find_less); + RTPHistItem *older; +@@ -227,19 +232,19 @@ gst_red_history_lost_seq_num_for_timestamp (GstRtpRedDec * self, + gint seq_diff, lost_packet_idx; + + if (NULL == older_sibling) { +- if (self->rtp_history->length == RTP_HISTORY_MAX_SIZE) ++ if (rtp_history->length == RTP_HISTORY_MAX_SIZE) + GST_WARNING_OBJECT (self, "History is too short. " + "Oldest rtp timestamp %u, looking for %u, size %u", +- RTP_HIST_ITEM_TIMESTAMP (self->rtp_history->tail->data), +- timestamp, self->rtp_history->length); ++ RTP_HIST_ITEM_TIMESTAMP (rtp_history->tail->data), ++ timestamp, rtp_history->length); + return FALSE; + } + + if (NULL == older_sibling->prev) { + GST_WARNING_OBJECT (self, "RED block timestamp offset probably wrong. " + "Latest rtp timestamp %u, looking for %u, size %u", +- RTP_HIST_ITEM_TIMESTAMP (self->rtp_history->head->data), +- timestamp, self->rtp_history->length); ++ RTP_HIST_ITEM_TIMESTAMP (rtp_history->head->data), ++ timestamp, rtp_history->length); + return FALSE; + } + +@@ -308,12 +313,15 @@ gst_rtp_red_create_packet (GstRtpRedDec * self, GstRTPBuffer * red_rtp, + + /* Timestamps, meta, flags from the RED packet should go to main block packet */ + gst_buffer_copy_into (ret, red_rtp->buffer, GST_BUFFER_COPY_METADATA, 0, -1); ++ if (marker) ++ GST_BUFFER_FLAG_SET (ret, GST_BUFFER_FLAG_MARKER); + return ret; + } + + static GstBuffer * + gst_rtp_red_create_from_redundant_block (GstRtpRedDec * self, +- GstRTPBuffer * red_rtp, gsize * red_hdr_offset, gsize * red_payload_offset) ++ GQueue * rtp_history, GstRTPBuffer * red_rtp, gsize * red_hdr_offset, ++ gsize * red_payload_offset) + { + guint8 *payload = gst_rtp_buffer_get_payload (red_rtp); + guint8 *red_hdr = payload + *red_hdr_offset; +@@ -322,11 +330,12 @@ gst_rtp_red_create_from_redundant_block (GstRtpRedDec * self, + + GstBuffer *ret = NULL; + guint16 lost_seq = 0; +- if (gst_red_history_lost_seq_num_for_timestamp (self, lost_timestamp, +- &lost_seq)) { +- GST_LOG_OBJECT (self, "Recovering from RED packet pt=%u ts=%u seq=%u" +- " len=%u present", rtp_red_block_get_payload_type (red_hdr), +- lost_timestamp, lost_seq, rtp_red_block_get_payload_length (red_hdr)); ++ if (gst_red_history_lost_seq_num_for_timestamp (self, rtp_history, ++ lost_timestamp, &lost_seq)) { ++ GST_LOG_OBJECT (self, ++ "Recovering from RED packet pt=%u ts=%u seq=%u" " len=%u present", ++ rtp_red_block_get_payload_type (red_hdr), lost_timestamp, lost_seq, ++ rtp_red_block_get_payload_length (red_hdr)); + ret = + gst_rtp_red_create_packet (self, red_rtp, FALSE, + rtp_red_block_get_payload_type (red_hdr), lost_seq, lost_timestamp, +@@ -364,13 +373,13 @@ gst_rtp_red_create_from_main_block (GstRtpRedDec * self, + } + + static GstBuffer * +-gst_rtp_red_create_from_block (GstRtpRedDec * self, GstRTPBuffer * red_rtp, +- gsize * red_hdr_offset, gsize * red_payload_offset) ++gst_rtp_red_create_from_block (GstRtpRedDec * self, GQueue * rtp_history, ++ GstRTPBuffer * red_rtp, gsize * red_hdr_offset, gsize * red_payload_offset) + { + guint8 *payload = gst_rtp_buffer_get_payload (red_rtp); + + if (rtp_red_block_is_redundant (payload + (*red_hdr_offset))) +- return gst_rtp_red_create_from_redundant_block (self, red_rtp, ++ return gst_rtp_red_create_from_redundant_block (self, rtp_history, red_rtp, + red_hdr_offset, red_payload_offset); + + return gst_rtp_red_create_from_main_block (self, red_rtp, *red_hdr_offset, +@@ -378,8 +387,8 @@ gst_rtp_red_create_from_block (GstRtpRedDec * self, GstRTPBuffer * red_rtp, + } + + static GstFlowReturn +-gst_rtp_red_process (GstRtpRedDec * self, GstRTPBuffer * red_rtp, +- gsize first_red_payload_offset) ++gst_rtp_red_process (GstRtpRedDec * self, GQueue * rtp_history, ++ GstRTPBuffer * red_rtp, gsize first_red_payload_offset) + { + gsize red_hdr_offset = 0; + gsize red_payload_offset = first_red_payload_offset; +@@ -387,8 +396,8 @@ gst_rtp_red_process (GstRtpRedDec * self, GstRTPBuffer * red_rtp, + GstFlowReturn ret = GST_FLOW_OK; + + do { +- GstBuffer *buf = +- gst_rtp_red_create_from_block (self, red_rtp, &red_hdr_offset, ++ GstBuffer *buf = gst_rtp_red_create_from_block (self, rtp_history, red_rtp, ++ &red_hdr_offset, + &red_payload_offset); + if (buf) + ret = gst_pad_push (self->srcpad, buf); +@@ -397,6 +406,25 @@ gst_rtp_red_process (GstRtpRedDec * self, GstRTPBuffer * red_rtp, + return ret; + } + ++static gboolean ++is_red_pt (GstRtpRedDec * self, guint8 pt) ++{ ++ gboolean ret; ++ ++ g_mutex_lock (&self->lock); ++ if (pt == self->pt) { ++ ret = TRUE; ++ goto done; ++ } ++ ++ ret = self->payloads ++ && g_hash_table_contains (self->payloads, GINT_TO_POINTER (pt)); ++ ++done: ++ g_mutex_unlock (&self->lock); ++ return ret; ++} ++ + static GstFlowReturn + gst_rtp_red_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + { +@@ -404,16 +432,27 @@ gst_rtp_red_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + GstRTPBuffer irtp = GST_RTP_BUFFER_INIT; + GstFlowReturn ret = GST_FLOW_OK; + gsize first_red_payload_offset = 0; ++ GQueue *rtp_history; ++ guint32 ssrc; + +- if (self->pt == UNDEF_PT) ++ if (self->pt == UNDEF_PT && self->payloads == NULL) + return gst_pad_push (self->srcpad, buffer); + + if (!gst_rtp_buffer_map (buffer, GST_MAP_READ, &irtp)) + return gst_pad_push (self->srcpad, buffer); + +- gst_rtp_red_history_update (self, &irtp); ++ ssrc = gst_rtp_buffer_get_ssrc (&irtp); ++ ++ if (!(rtp_history = ++ g_hash_table_lookup (self->rtp_histories, GUINT_TO_POINTER (ssrc)))) { ++ rtp_history = g_queue_new (); ++ g_hash_table_insert (self->rtp_histories, GUINT_TO_POINTER (ssrc), ++ rtp_history); ++ } ++ ++ gst_rtp_red_history_update (self, rtp_history, &irtp); + +- if (self->pt != gst_rtp_buffer_get_payload_type (&irtp)) { ++ if (!is_red_pt (self, gst_rtp_buffer_get_payload_type (&irtp))) { + GST_LOG_RTP_PACKET (self, "rtp header (incoming)", &irtp); + + gst_rtp_buffer_unmap (&irtp); +@@ -424,7 +463,9 @@ gst_rtp_red_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + + if (rtp_red_buffer_is_valid (self, &irtp, &first_red_payload_offset)) { + GST_DEBUG_RTP_PACKET (self, "rtp header (red)", &irtp); +- ret = gst_rtp_red_process (self, &irtp, first_red_payload_offset); ++ ret = ++ gst_rtp_red_process (self, rtp_history, &irtp, ++ first_red_payload_offset); + } + + gst_rtp_buffer_unmap (&irtp); +@@ -437,11 +478,23 @@ gst_rtp_red_dec_dispose (GObject * obj) + { + GstRtpRedDec *self = GST_RTP_RED_DEC (obj); + +- g_queue_free_full (self->rtp_history, rtp_hist_item_free); ++ g_hash_table_unref (self->rtp_histories); ++ ++ if (self->payloads) { ++ g_hash_table_unref (self->payloads); ++ } ++ ++ g_mutex_clear (&self->lock); + + G_OBJECT_CLASS (gst_rtp_red_dec_parent_class)->dispose (obj); + } + ++static void ++free_rtp_history (GQueue * rtp_history) ++{ ++ g_queue_free_full (rtp_history, rtp_hist_item_free); ++} ++ + static void + gst_rtp_red_dec_init (GstRtpRedDec * self) + { +@@ -463,10 +516,13 @@ gst_rtp_red_dec_init (GstRtpRedDec * self) + + self->pt = DEFAULT_PT; + self->num_received = 0; +- self->rtp_history = g_queue_new (); ++ self->rtp_histories = ++ g_hash_table_new_full (g_direct_hash, g_direct_equal, NULL, ++ (GDestroyNotify) free_rtp_history); ++ self->payloads = NULL; ++ g_mutex_init (&self->lock); + } + +- + static void + gst_rtp_red_dec_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +@@ -475,14 +531,51 @@ gst_rtp_red_dec_set_property (GObject * object, guint prop_id, + + switch (prop_id) { + case PROP_PT: ++ g_mutex_lock (&self->lock); + self->pt = g_value_get_int (value); ++ g_mutex_unlock (&self->lock); ++ break; ++ case PROP_PAYLOADS: ++ { ++ guint i, n_vals; ++ ++ g_mutex_lock (&self->lock); ++ if (self->payloads) { ++ g_hash_table_unref (self->payloads); ++ self->payloads = NULL; ++ } ++ ++ n_vals = gst_value_array_get_size (value); ++ ++ if (n_vals > 0) { ++ self->payloads = g_hash_table_new (g_direct_hash, g_direct_equal); ++ ++ for (i = 0; i < gst_value_array_get_size (value); i++) { ++ const GValue *val = gst_value_array_get_value (value, i); ++ ++ g_hash_table_insert (self->payloads, ++ GINT_TO_POINTER (g_value_get_int (val)), NULL); ++ } ++ } ++ g_mutex_unlock (&self->lock); + break; ++ } + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } + } + ++static void ++append_payload (gpointer key, gpointer value, GValue * array) ++{ ++ GValue v = { 0, }; ++ g_value_init (&v, G_TYPE_INT); ++ g_value_set_int (&v, GPOINTER_TO_INT (key)); ++ gst_value_array_append_value (array, &v); ++ g_value_unset (&v); ++} ++ + static void + gst_rtp_red_dec_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec) +@@ -490,11 +583,22 @@ gst_rtp_red_dec_get_property (GObject * object, guint prop_id, + GstRtpRedDec *self = GST_RTP_RED_DEC (object); + switch (prop_id) { + case PROP_PT: ++ g_mutex_lock (&self->lock); + g_value_set_int (value, self->pt); ++ g_mutex_unlock (&self->lock); + break; + case PROP_RECEIVED: + g_value_set_uint (value, self->num_received); + break; ++ case PROP_PAYLOADS: ++ { ++ g_mutex_lock (&self->lock); ++ if (self->payloads) { ++ g_hash_table_foreach (self->payloads, (GHFunc) append_payload, value); ++ } ++ g_mutex_unlock (&self->lock); ++ break; ++ } + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -538,6 +642,27 @@ gst_rtp_red_dec_class_init (GstRtpRedDecClass * klass) + "Count of received packets", + 0, G_MAXUINT32, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); + ++ /** ++ * rtpreddec:payloads: ++ * ++ * All the RED payloads this decoder may encounter ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (G_OBJECT_CLASS (klass), ++ PROP_PAYLOADS, ++ gst_param_spec_array ("payloads", ++ "RED payloads", ++ "All the RED payloads this decoder may encounter", ++ g_param_spec_int ("pt", ++ "payload type", ++ "A RED payload type", ++ MIN_PT, MAX_PT, ++ DEFAULT_PT, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS), ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS) ++ ); ++ + GST_DEBUG_CATEGORY_INIT (gst_rtp_red_dec_debug, "rtpreddec", 0, + "RTP RED Decoder"); + } +diff --git a/gst/rtp/gstrtpreddec.h b/gst/rtp/gstrtpreddec.h +index 1ab864e2c..a1d89bf05 100644 +--- a/gst/rtp/gstrtpreddec.h ++++ b/gst/rtp/gstrtpreddec.h +@@ -51,7 +51,14 @@ struct _GstRtpRedDec { + gint pt; + guint num_received; + +- GQueue *rtp_history; ++ /* Per ssrc */ ++ GHashTable *rtp_histories; ++ ++ /* To track all FEC payload types */ ++ GHashTable *payloads; ++ ++ /* Protects pt and payloads */ ++ GMutex lock; + }; + + GType gst_rtp_red_dec_get_type (void); +diff --git a/gst/rtp/gstrtpredenc.c b/gst/rtp/gstrtpredenc.c +index f192b7862..94062fea9 100644 +--- a/gst/rtp/gstrtpredenc.c ++++ b/gst/rtp/gstrtpredenc.c +@@ -52,6 +52,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "rtpredcommon.h" + #include "gstrtpredenc.h" + +@@ -78,8 +79,9 @@ static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src", + + GST_DEBUG_CATEGORY_STATIC (gst_rtp_red_enc_debug); + #define GST_CAT_DEFAULT (gst_rtp_red_enc_debug) +- + G_DEFINE_TYPE (GstRtpRedEnc, gst_rtp_red_enc, GST_TYPE_ELEMENT); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpredenc, "rtpredenc", GST_RANK_NONE, ++ GST_TYPE_RTP_RED_ENC, rtp_element_init (plugin)); + + enum + { +@@ -155,8 +157,11 @@ _alloc_red_packet_and_fill_headers (GstRtpRedEnc * self, + g_assert_not_reached (); + + /* Copying RTP header of incoming packet */ +- if (gst_rtp_buffer_get_extension (inp_rtp)) +- GST_WARNING_OBJECT (self, "FIXME: Ignoring RTP extension"); ++ if (gst_rtp_buffer_get_extension (inp_rtp) ++ && !self->ignoring_extension_warned) { ++ GST_FIXME_OBJECT (self, "Ignoring RTP extension"); ++ self->ignoring_extension_warned = TRUE; ++ } + + gst_rtp_buffer_set_marker (&red_rtp, gst_rtp_buffer_get_marker (inp_rtp)); + gst_rtp_buffer_set_payload_type (&red_rtp, self->pt); +@@ -183,6 +188,27 @@ _alloc_red_packet_and_fill_headers (GstRtpRedEnc * self, + rtp_red_block_set_payload_type (red_block_header, + gst_rtp_buffer_get_payload_type (inp_rtp)); + ++ /* FIXME: remove that logic once https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/923 ++ * has been addressed. */ ++ if (self->twcc_ext_id != 0) { ++ guint8 appbits; ++ gpointer inp_data; ++ guint inp_size; ++ guint16 data; ++ ++ /* If the input buffer was meant to hold a TWCC seqnum, we also do that ++ * for our wrapper */ ++ if (gst_rtp_buffer_get_extension_onebyte_header (inp_rtp, self->twcc_ext_id, ++ 0, &inp_data, &inp_size)) { ++ gst_rtp_buffer_add_extension_onebyte_header (&red_rtp, self->twcc_ext_id, ++ &data, sizeof (guint16)); ++ } else if (gst_rtp_buffer_get_extension_twobytes_header (inp_rtp, &appbits, ++ self->twcc_ext_id, 0, &inp_data, &inp_size)) { ++ gst_rtp_buffer_add_extension_twobytes_header (&red_rtp, appbits, ++ self->twcc_ext_id, &data, sizeof (guint16)); ++ } ++ } ++ + gst_rtp_buffer_unmap (&red_rtp); + + gst_buffer_copy_into (red, inp_rtp->buffer, GST_BUFFER_COPY_METADATA, 0, -1); +@@ -358,6 +384,31 @@ gst_rtp_red_enc_chain (GstPad G_GNUC_UNUSED * pad, GstObject * parent, + return _push_red_packet (self, &rtp, buffer, redundant_block, distance); + } + ++static guint8 ++_get_extmap_id_for_attribute (const GstStructure * s, const gchar * ext_name) ++{ ++ guint i; ++ guint8 extmap_id = 0; ++ guint n_fields = gst_structure_n_fields (s); ++ ++ for (i = 0; i < n_fields; i++) { ++ const gchar *field_name = gst_structure_nth_field_name (s, i); ++ if (g_str_has_prefix (field_name, "extmap-")) { ++ const gchar *str = gst_structure_get_string (s, field_name); ++ if (str && g_strcmp0 (str, ext_name) == 0) { ++ gint64 id = g_ascii_strtoll (field_name + 7, NULL, 10); ++ if (id > 0 && id < 15) { ++ extmap_id = id; ++ break; ++ } ++ } ++ } ++ } ++ return extmap_id; ++} ++ ++#define TWCC_EXTMAP_STR "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01" ++ + static gboolean + gst_rtp_red_enc_event_sink (GstPad * pad, GstObject * parent, GstEvent * event) + { +@@ -366,12 +417,18 @@ gst_rtp_red_enc_event_sink (GstPad * pad, GstObject * parent, GstEvent * event) + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_CAPS: + { ++ GstCaps *caps; ++ GstStructure *s; + gboolean replace_with_red_caps = + self->is_current_caps_red || self->allow_no_red_blocks; + ++ gst_event_parse_caps (event, &caps); ++ s = gst_caps_get_structure (caps, 0); ++ self->twcc_ext_id = _get_extmap_id_for_attribute (s, TWCC_EXTMAP_STR); ++ ++ GST_INFO_OBJECT (self, "TWCC extension ID: %u", self->twcc_ext_id); ++ + if (replace_with_red_caps) { +- GstCaps *caps; +- gst_event_parse_caps (event, &caps); + gst_event_take (&event, _create_caps_event (caps, self->pt)); + + self->is_current_caps_red = TRUE; +@@ -421,6 +478,7 @@ gst_rtp_red_enc_init (GstRtpRedEnc * self) + self->allow_no_red_blocks = DEFAULT_ALLOW_NO_RED_BLOCKS; + self->num_sent = 0; + self->rtp_history = g_queue_new (); ++ self->ignoring_extension_warned = FALSE; + } + + +diff --git a/gst/rtp/gstrtpredenc.h b/gst/rtp/gstrtpredenc.h +index dc2b1ebbc..848aeea67 100644 +--- a/gst/rtp/gstrtpredenc.h ++++ b/gst/rtp/gstrtpredenc.h +@@ -56,6 +56,9 @@ struct _GstRtpRedEnc { + GQueue *rtp_history; + gboolean send_caps; + gboolean is_current_caps_red; ++ guint8 twcc_ext_id; ++ ++ gboolean ignoring_extension_warned; + }; + + GType gst_rtp_red_enc_get_type (void); +diff --git a/gst/rtp/gstrtpsbcdepay.c b/gst/rtp/gstrtpsbcdepay.c +index 9549bb06a..f5dec8b78 100644 +--- a/gst/rtp/gstrtpsbcdepay.c ++++ b/gst/rtp/gstrtpsbcdepay.c +@@ -26,6 +26,7 @@ + + #include + #include ++#include "gstrtpelements.h" + #include "gstrtpsbcdepay.h" + #include "gstrtputils.h" + +@@ -64,6 +65,8 @@ enum + + #define gst_rtp_sbc_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpSbcDepay, gst_rtp_sbc_depay, GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpsbcdepay, "rtpsbcdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_SBC_DEPAY, rtp_element_init (plugin)); + + static void gst_rtp_sbc_depay_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec); +@@ -386,10 +389,3 @@ bad_packet: + ("Received invalid RTP payload, dropping"), (NULL)); + goto out; + } +- +-gboolean +-gst_rtp_sbc_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpsbcdepay", GST_RANK_SECONDARY, +- GST_TYPE_RTP_SBC_DEPAY); +-} +diff --git a/gst/rtp/gstrtpsbcdepay.h b/gst/rtp/gstrtpsbcdepay.h +index 9cac3add2..bc5a39672 100644 +--- a/gst/rtp/gstrtpsbcdepay.h ++++ b/gst/rtp/gstrtpsbcdepay.h +@@ -64,7 +64,5 @@ struct _GstRtpSbcDepayClass + + GType gst_rtp_sbc_depay_get_type (void); + +-gboolean gst_rtp_sbc_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + #endif +diff --git a/gst/rtp/gstrtpsbcpay.c b/gst/rtp/gstrtpsbcpay.c +index f2cf849ec..676416bc0 100644 +--- a/gst/rtp/gstrtpsbcpay.c ++++ b/gst/rtp/gstrtpsbcpay.c +@@ -24,6 +24,7 @@ + #endif + + #include ++#include "gstrtpelements.h" + #include "gstrtpsbcpay.h" + #include + #include +@@ -33,42 +34,6 @@ + #define DEFAULT_MIN_FRAMES 0 + #define RTP_SBC_HEADER_TOTAL (12 + RTP_SBC_PAYLOAD_HEADER_SIZE) + +-/* BEGIN: Packing for rtp_payload */ +-#ifdef _MSC_VER +-#pragma pack(push, 1) +-#endif +- +-#if G_BYTE_ORDER == G_LITTLE_ENDIAN +-/* FIXME: this seems all a bit over the top for a single byte.. */ +-struct rtp_payload +-{ +- guint8 frame_count:4; +- guint8 rfa0:1; +- guint8 is_last_fragment:1; +- guint8 is_first_fragment:1; +- guint8 is_fragmented:1; +-} +-#elif G_BYTE_ORDER == G_BIG_ENDIAN +-struct rtp_payload +-{ +- guint8 is_fragmented:1; +- guint8 is_first_fragment:1; +- guint8 is_last_fragment:1; +- guint8 rfa0:1; +- guint8 frame_count:4; +-} +-#else +-#error "Unknown byte order" +-#endif +- +-#ifdef _MSC_VER +-; +-#pragma pack(pop) +-#else +-__attribute__ ((packed)); +-#endif +-/* END: Packing for rtp_payload */ +- + enum + { + PROP_0, +@@ -80,6 +45,8 @@ GST_DEBUG_CATEGORY_STATIC (gst_rtp_sbc_pay_debug); + + #define parent_class gst_rtp_sbc_pay_parent_class + G_DEFINE_TYPE (GstRtpSBCPay, gst_rtp_sbc_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpsbcpay, "rtpsbcpay", GST_RANK_NONE, ++ GST_TYPE_RTP_SBC_PAY, rtp_element_init (plugin)); + + static GstStaticPadTemplate gst_rtp_sbc_pay_sink_factory = + GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, +@@ -106,6 +73,8 @@ static void gst_rtp_sbc_pay_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec); + static void gst_rtp_sbc_pay_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec); ++static GstStateChangeReturn gst_rtp_sbc_pay_change_state (GstElement * element, ++ GstStateChange transition); + + static gint + gst_rtp_sbc_pay_get_frame_len (gint subbands, gint channels, +@@ -168,7 +137,7 @@ gst_rtp_sbc_pay_set_caps (GstRTPBasePayload * payload, GstCaps * caps) + } + + static GstFlowReturn +-gst_rtp_sbc_pay_flush_buffers (GstRtpSBCPay * sbcpay) ++gst_rtp_sbc_pay_drain_buffers (GstRtpSBCPay * sbcpay) + { + GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; + guint available; +@@ -177,7 +146,6 @@ gst_rtp_sbc_pay_flush_buffers (GstRtpSBCPay * sbcpay) + guint8 *payload_data; + guint frame_count; + guint payload_length; +- struct rtp_payload *payload; + GstFlowReturn res; + + if (sbcpay->frame_length == 0) { +@@ -209,9 +177,8 @@ gst_rtp_sbc_pay_flush_buffers (GstRtpSBCPay * sbcpay) + + /* write header and copy data into payload */ + payload_data = gst_rtp_buffer_get_payload (&rtp); +- payload = (struct rtp_payload *) payload_data; +- memset (payload, 0, sizeof (struct rtp_payload)); +- payload->frame_count = frame_count; ++ /* upper 3 fragment bits not used, ref A2DP v13, 4.3.4 */ ++ payload_data[0] = frame_count & 0x0f; + + gst_rtp_buffer_unmap (&rtp); + +@@ -246,7 +213,7 @@ gst_rtp_sbc_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer) + + if (GST_BUFFER_IS_DISCONT (buffer)) { + /* Try to flush whatever's left */ +- gst_rtp_sbc_pay_flush_buffers (sbcpay); ++ gst_rtp_sbc_pay_drain_buffers (sbcpay); + /* Drop the rest */ + gst_adapter_flush (sbcpay->adapter, + gst_adapter_available (sbcpay->adapter)); +@@ -263,7 +230,7 @@ gst_rtp_sbc_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer) + if (available + RTP_SBC_HEADER_TOTAL >= + GST_RTP_BASE_PAYLOAD_MTU (sbcpay) || + (available > (sbcpay->min_frames * sbcpay->frame_length))) +- return gst_rtp_sbc_pay_flush_buffers (sbcpay); ++ return gst_rtp_sbc_pay_drain_buffers (sbcpay); + + return GST_FLOW_OK; + } +@@ -275,7 +242,13 @@ gst_rtp_sbc_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event) + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_EOS: +- gst_rtp_sbc_pay_flush_buffers (sbcpay); ++ gst_rtp_sbc_pay_drain_buffers (sbcpay); ++ break; ++ case GST_EVENT_FLUSH_STOP: ++ gst_adapter_clear (sbcpay->adapter); ++ break; ++ case GST_EVENT_SEGMENT: ++ gst_rtp_sbc_pay_drain_buffers (sbcpay); + break; + default: + break; +@@ -284,6 +257,25 @@ gst_rtp_sbc_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event) + return GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload, event); + } + ++static GstStateChangeReturn ++gst_rtp_sbc_pay_change_state (GstElement * element, GstStateChange transition) ++{ ++ GstStateChangeReturn ret; ++ GstRtpSBCPay *sbcpay = GST_RTP_SBC_PAY (element); ++ ++ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); ++ ++ switch (transition) { ++ case GST_STATE_CHANGE_PAUSED_TO_READY: ++ gst_adapter_clear (sbcpay->adapter); ++ break; ++ default: ++ break; ++ } ++ ++ return ret; ++} ++ + static void + gst_rtp_sbc_pay_finalize (GObject * object) + { +@@ -310,6 +302,8 @@ gst_rtp_sbc_pay_class_init (GstRtpSBCPayClass * klass) + GST_DEBUG_FUNCPTR (gst_rtp_sbc_pay_handle_buffer); + payload_class->sink_event = GST_DEBUG_FUNCPTR (gst_rtp_sbc_pay_sink_event); + ++ element_class->change_state = gst_rtp_sbc_pay_change_state; ++ + /* properties */ + g_object_class_install_property (G_OBJECT_CLASS (klass), + PROP_MIN_FRAMES, +@@ -376,10 +370,3 @@ gst_rtp_sbc_pay_init (GstRtpSBCPay * self) + + self->min_frames = DEFAULT_MIN_FRAMES; + } +- +-gboolean +-gst_rtp_sbc_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpsbcpay", GST_RANK_NONE, +- GST_TYPE_RTP_SBC_PAY); +-} +diff --git a/gst/rtp/gstrtpsbcpay.h b/gst/rtp/gstrtpsbcpay.h +index e57022980..6f42c1e74 100644 +--- a/gst/rtp/gstrtpsbcpay.h ++++ b/gst/rtp/gstrtpsbcpay.h +@@ -60,6 +60,4 @@ struct _GstRtpSBCPayClass { + + GType gst_rtp_sbc_pay_get_type(void); + +-gboolean gst_rtp_sbc_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS +diff --git a/gst/rtp/gstrtpsirendepay.c b/gst/rtp/gstrtpsirendepay.c +index f22726810..86a9dfffe 100644 +--- a/gst/rtp/gstrtpsirendepay.c ++++ b/gst/rtp/gstrtpsirendepay.c +@@ -27,6 +27,7 @@ + #include + #include + #include ++#include "gstrtpelements.h" + #include "gstrtpsirendepay.h" + #include "gstrtputils.h" + +@@ -55,6 +56,8 @@ GST_STATIC_PAD_TEMPLATE ("sink", + + G_DEFINE_TYPE (GstRTPSirenDepay, gst_rtp_siren_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpsirendepay, "rtpsirendepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_SIREN_DEPAY, rtp_element_init (plugin)); + + static void gst_rtp_siren_depay_class_init (GstRTPSirenDepayClass * klass) + { +@@ -116,10 +119,3 @@ gst_rtp_siren_depay_process (GstRTPBaseDepayload * depayload, + + return outbuf; + } +- +-gboolean +-gst_rtp_siren_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpsirendepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_SIREN_DEPAY); +-} +diff --git a/gst/rtp/gstrtpsirendepay.h b/gst/rtp/gstrtpsirendepay.h +index d1ffb11e0..cdc108add 100644 +--- a/gst/rtp/gstrtpsirendepay.h ++++ b/gst/rtp/gstrtpsirendepay.h +@@ -53,7 +53,5 @@ struct _GstRTPSirenDepayClass + + GType gst_rtp_siren_depay_get_type (void); + +-gboolean gst_rtp_siren_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + #endif /* __GST_RTP_SIREN_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpsirenpay.c b/gst/rtp/gstrtpsirenpay.c +index 04f9aa817..93385a43f 100644 +--- a/gst/rtp/gstrtpsirenpay.c ++++ b/gst/rtp/gstrtpsirenpay.c +@@ -23,6 +23,7 @@ + #include "config.h" + #endif + ++#include "gstrtpelements.h" + #include "gstrtpsirenpay.h" + #include + +@@ -53,6 +54,8 @@ static gboolean gst_rtp_siren_pay_setcaps (GstRTPBasePayload * payload, + + G_DEFINE_TYPE (GstRTPSirenPay, gst_rtp_siren_pay, + GST_TYPE_RTP_BASE_AUDIO_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpsirenpay, "rtpsirenpay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_SIREN_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_siren_pay_class_init (GstRTPSirenPayClass * klass) +@@ -138,10 +141,3 @@ wrong_caps: + return FALSE; + } + } +- +-gboolean +-gst_rtp_siren_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpsirenpay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_SIREN_PAY); +-} +diff --git a/gst/rtp/gstrtpsirenpay.h b/gst/rtp/gstrtpsirenpay.h +index eaba5b5a4..56fc664ba 100644 +--- a/gst/rtp/gstrtpsirenpay.h ++++ b/gst/rtp/gstrtpsirenpay.h +@@ -51,7 +51,5 @@ struct _GstRTPSirenPayClass + + GType gst_rtp_siren_pay_get_type (void); + +-gboolean gst_rtp_siren_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + #endif /* __GST_RTP_SIREN_PAY_H__ */ +diff --git a/gst/rtp/gstrtpspeexdepay.c b/gst/rtp/gstrtpspeexdepay.c +index a2c987bd6..ca70cd3e2 100644 +--- a/gst/rtp/gstrtpspeexdepay.c ++++ b/gst/rtp/gstrtpspeexdepay.c +@@ -26,6 +26,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpspeexdepay.h" + #include "gstrtputils.h" + +@@ -66,6 +67,8 @@ static gboolean gst_rtp_speex_depay_setcaps (GstRTPBaseDepayload * depayload, + + G_DEFINE_TYPE (GstRtpSPEEXDepay, gst_rtp_speex_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpspeexdepay, "rtpspeexdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_SPEEX_DEPAY, rtp_element_init (plugin)); + + static void + gst_rtp_speex_depay_class_init (GstRtpSPEEXDepayClass * klass) +@@ -217,10 +220,3 @@ gst_rtp_speex_depay_process (GstRTPBaseDepayload * depayload, + + return outbuf; + } +- +-gboolean +-gst_rtp_speex_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpspeexdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_SPEEX_DEPAY); +-} +diff --git a/gst/rtp/gstrtpspeexdepay.h b/gst/rtp/gstrtpspeexdepay.h +index 59ffed030..3961ef1cd 100644 +--- a/gst/rtp/gstrtpspeexdepay.h ++++ b/gst/rtp/gstrtpspeexdepay.h +@@ -46,8 +46,6 @@ struct _GstRtpSPEEXDepayClass + + GType gst_rtp_speex_depay_get_type (void); + +-gboolean gst_rtp_speex_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_SPEEX_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpspeexpay.c b/gst/rtp/gstrtpspeexpay.c +index 6f086b010..17b2aa59e 100644 +--- a/gst/rtp/gstrtpspeexpay.c ++++ b/gst/rtp/gstrtpspeexpay.c +@@ -26,6 +26,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpspeexpay.h" + #include "gstrtputils.h" + +@@ -64,6 +65,8 @@ static GstFlowReturn gst_rtp_speex_pay_handle_buffer (GstRTPBasePayload * + + #define gst_rtp_speex_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpSPEEXPay, gst_rtp_speex_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpspeexpay, "rtpspeexpay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_SPEEX_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_speex_pay_class_init (GstRtpSPEEXPayClass * klass) +@@ -341,10 +344,3 @@ gst_rtp_speex_pay_change_state (GstElement * element, GstStateChange transition) + } + return ret; + } +- +-gboolean +-gst_rtp_speex_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpspeexpay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_SPEEX_PAY); +-} +diff --git a/gst/rtp/gstrtpspeexpay.h b/gst/rtp/gstrtpspeexpay.h +index 0ccaefbae..a89350e4c 100644 +--- a/gst/rtp/gstrtpspeexpay.h ++++ b/gst/rtp/gstrtpspeexpay.h +@@ -49,8 +49,6 @@ struct _GstRtpSPEEXPayClass + + GType gst_rtp_speex_pay_get_type (void); + +-gboolean gst_rtp_speex_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_SPEEX_PAY_H__ */ +diff --git a/gst/rtp/gstrtpstorage.c b/gst/rtp/gstrtpstorage.c +index b43eb982c..a3e1593ce 100644 +--- a/gst/rtp/gstrtpstorage.c ++++ b/gst/rtp/gstrtpstorage.c +@@ -53,6 +53,7 @@ + * Since: 1.14 + */ + ++#include "gstrtpelements.h" + #include "gstrtpstorage.h" + + static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink", +@@ -83,6 +84,8 @@ GST_DEBUG_CATEGORY (gst_rtp_storage_debug); + #define GST_CAT_DEFAULT (gst_rtp_storage_debug) + + G_DEFINE_TYPE (GstRtpStorage, gst_rtp_storage, GST_TYPE_ELEMENT); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpstorage, "rtpstorage", GST_RANK_NONE, ++ GST_TYPE_RTP_STORAGE, rtp_element_init (plugin)); + + static GstFlowReturn + gst_rtp_storage_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) +diff --git a/gst/rtp/gstrtpstreamdepay.c b/gst/rtp/gstrtpstreamdepay.c +index bcaec19ea..978c75301 100644 +--- a/gst/rtp/gstrtpstreamdepay.c ++++ b/gst/rtp/gstrtpstreamdepay.c +@@ -36,6 +36,7 @@ + #include "config.h" + #endif + ++#include "gstrtpelements.h" + #include "gstrtpstreamdepay.h" + + GST_DEBUG_CATEGORY (gst_rtp_stream_depay_debug); +@@ -57,6 +58,8 @@ static GstStaticPadTemplate sink_template = + + #define parent_class gst_rtp_stream_depay_parent_class + G_DEFINE_TYPE (GstRtpStreamDepay, gst_rtp_stream_depay, GST_TYPE_BASE_PARSE); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpstreamdepay, "rtpstreamdepay", ++ GST_RANK_NONE, GST_TYPE_RTP_STREAM_DEPAY, rtp_element_init (plugin)); + + static gboolean gst_rtp_stream_depay_set_sink_caps (GstBaseParse * parse, + GstCaps * caps); +@@ -223,10 +226,3 @@ gst_rtp_stream_depay_sink_activate (GstPad * pad, GstObject * parent) + { + return gst_pad_activate_mode (pad, GST_PAD_MODE_PUSH, TRUE); + } +- +-gboolean +-gst_rtp_stream_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpstreamdepay", +- GST_RANK_NONE, GST_TYPE_RTP_STREAM_DEPAY); +-} +diff --git a/gst/rtp/gstrtpstreamdepay.h b/gst/rtp/gstrtpstreamdepay.h +index b6011cb55..32bd6c142 100644 +--- a/gst/rtp/gstrtpstreamdepay.h ++++ b/gst/rtp/gstrtpstreamdepay.h +@@ -50,7 +50,6 @@ struct _GstRtpStreamDepayClass + }; + + GType gst_rtp_stream_depay_get_type (void); +-gboolean gst_rtp_stream_depay_plugin_init (GstPlugin * plugin); + + G_END_DECLS + +diff --git a/gst/rtp/gstrtpstreampay.c b/gst/rtp/gstrtpstreampay.c +index b5758228b..51206ffb9 100644 +--- a/gst/rtp/gstrtpstreampay.c ++++ b/gst/rtp/gstrtpstreampay.c +@@ -37,6 +37,7 @@ + #include "config.h" + #endif + ++#include "gstrtpelements.h" + #include "gstrtpstreampay.h" + + #define GST_CAT_DEFAULT gst_rtp_stream_pay_debug +@@ -58,6 +59,8 @@ static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src", + + #define parent_class gst_rtp_stream_pay_parent_class + G_DEFINE_TYPE (GstRtpStreamPay, gst_rtp_stream_pay, GST_TYPE_ELEMENT); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpstreampay, "rtpstreampay", ++ GST_RANK_NONE, GST_TYPE_RTP_STREAM_PAY, rtp_element_init (plugin)); + + static gboolean gst_rtp_stream_pay_sink_query (GstPad * pad, GstObject * parent, + GstQuery * query); +@@ -277,10 +280,3 @@ gst_rtp_stream_pay_sink_chain (GstPad * pad, GstObject * parent, + + return gst_pad_push (self->srcpad, outbuf); + } +- +-gboolean +-gst_rtp_stream_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpstreampay", +- GST_RANK_NONE, GST_TYPE_RTP_STREAM_PAY); +-} +diff --git a/gst/rtp/gstrtpstreampay.h b/gst/rtp/gstrtpstreampay.h +index a9436a8fd..b90165af2 100644 +--- a/gst/rtp/gstrtpstreampay.h ++++ b/gst/rtp/gstrtpstreampay.h +@@ -47,8 +47,6 @@ struct _GstRtpStreamPayClass { + + GType gst_rtp_stream_pay_get_type (void); + +-gboolean gst_rtp_stream_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_STREAM_PAY_H__ */ +diff --git a/gst/rtp/gstrtpsv3vdepay.c b/gst/rtp/gstrtpsv3vdepay.c +index 3d091c2b8..bac99458a 100644 +--- a/gst/rtp/gstrtpsv3vdepay.c ++++ b/gst/rtp/gstrtpsv3vdepay.c +@@ -25,6 +25,7 @@ + + #include + #include ++#include "gstrtpelements.h" + #include "gstrtpsv3vdepay.h" + #include "gstrtputils.h" + +@@ -49,8 +50,11 @@ GST_STATIC_PAD_TEMPLATE ("sink", + ); + + #define gst_rtp_sv3v_depay_parent_class parent_class +-G_DEFINE_TYPE (GstRtpSV3VDepay, gst_rtp_sv3v_depay, +- GST_TYPE_RTP_BASE_DEPAYLOAD); ++G_DEFINE_TYPE_WITH_CODE (GstRtpSV3VDepay, gst_rtp_sv3v_depay, ++ GST_TYPE_RTP_BASE_DEPAYLOAD, GST_DEBUG_CATEGORY_INIT (rtpsv3vdepay_debug, ++ "rtpsv3vdepay", 0, "RTP SV3V depayloader")); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpsv3vdepay, "rtpsv3vdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_SV3V_DEPAY, rtp_element_init (plugin)); + + static void gst_rtp_sv3v_depay_finalize (GObject * object); + +@@ -310,13 +314,3 @@ gst_rtp_sv3v_depay_change_state (GstElement * element, + } + return ret; + } +- +-gboolean +-gst_rtp_sv3v_depay_plugin_init (GstPlugin * plugin) +-{ +- GST_DEBUG_CATEGORY_INIT (rtpsv3vdepay_debug, "rtpsv3vdepay", 0, +- "RTP SV3V depayloader"); +- +- return gst_element_register (plugin, "rtpsv3vdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_SV3V_DEPAY); +-} +diff --git a/gst/rtp/gstrtpsv3vdepay.h b/gst/rtp/gstrtpsv3vdepay.h +index 428684a7d..1f3fc7f9b 100644 +--- a/gst/rtp/gstrtpsv3vdepay.h ++++ b/gst/rtp/gstrtpsv3vdepay.h +@@ -60,8 +60,6 @@ struct _GstRtpSV3VDepayClass + + GType gst_rtp_sv3v_depay_get_type (void); + +-gboolean gst_rtp_sv3v_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_SV3V_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtptheoradepay.c b/gst/rtp/gstrtptheoradepay.c +index c8af97495..e7ff9e18c 100644 +--- a/gst/rtp/gstrtptheoradepay.c ++++ b/gst/rtp/gstrtptheoradepay.c +@@ -26,6 +26,7 @@ + #include + + #include ++#include "gstrtpelements.h" + #include "gstrtptheoradepay.h" + #include "gstrtputils.h" + +@@ -64,6 +65,8 @@ GST_STATIC_PAD_TEMPLATE ("src", + #define gst_rtp_theora_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpTheoraDepay, gst_rtp_theora_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtptheoradepay, "rtptheoradepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_THEORA_DEPAY, rtp_element_init (plugin)); + + static gboolean gst_rtp_theora_depay_setcaps (GstRTPBaseDepayload * depayload, + GstCaps * caps); +@@ -680,13 +683,6 @@ gst_rtp_theora_depay_change_state (GstElement * element, + return ret; + } + +-gboolean +-gst_rtp_theora_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtptheoradepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_THEORA_DEPAY); +-} +- + static gboolean + gst_rtp_theora_depay_packet_lost (GstRTPBaseDepayload * depayload, + GstEvent * event) +diff --git a/gst/rtp/gstrtptheoradepay.h b/gst/rtp/gstrtptheoradepay.h +index 2b0b26016..492d33dc0 100644 +--- a/gst/rtp/gstrtptheoradepay.h ++++ b/gst/rtp/gstrtptheoradepay.h +@@ -65,8 +65,6 @@ struct _GstRtpTheoraDepayClass + + GType gst_rtp_theora_depay_get_type (void); + +-gboolean gst_rtp_theora_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_THEORA_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtptheorapay.c b/gst/rtp/gstrtptheorapay.c +index 61fb90bc6..57807570e 100644 +--- a/gst/rtp/gstrtptheorapay.c ++++ b/gst/rtp/gstrtptheorapay.c +@@ -26,6 +26,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "fnv1hash.h" + #include "gstrtptheorapay.h" + #include "gstrtputils.h" +@@ -79,6 +80,8 @@ enum + + #define gst_rtp_theora_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpTheoraPay, gst_rtp_theora_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtptheorapay, "rtptheorapay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_THEORA_PAY, rtp_element_init (plugin)); + + static gboolean gst_rtp_theora_pay_setcaps (GstRTPBasePayload * basepayload, + GstCaps * caps); +@@ -281,8 +284,8 @@ gst_rtp_theora_pay_init_packet (GstRtpTheoraPay * rtptheorapay, guint8 TDT, + + /* new packet allocate max packet size */ + rtptheorapay->packet = +- gst_rtp_buffer_new_allocate_len (GST_RTP_BASE_PAYLOAD_MTU +- (rtptheorapay), 0, 0); ++ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD ++ (rtptheorapay), GST_RTP_BASE_PAYLOAD_MTU (rtptheorapay), 0, 0); + gst_rtp_theora_pay_reset_packet (rtptheorapay, TDT); + + GST_BUFFER_PTS (rtptheorapay->packet) = timestamp; +@@ -976,10 +979,3 @@ gst_rtp_theora_pay_get_property (GObject * object, guint prop_id, + break; + } + } +- +-gboolean +-gst_rtp_theora_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtptheorapay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_THEORA_PAY); +-} +diff --git a/gst/rtp/gstrtptheorapay.h b/gst/rtp/gstrtptheorapay.h +index 22bc01df1..d009364fe 100644 +--- a/gst/rtp/gstrtptheorapay.h ++++ b/gst/rtp/gstrtptheorapay.h +@@ -79,8 +79,6 @@ struct _GstRtpTheoraPayClass + + GType gst_rtp_theora_pay_get_type (void); + +-gboolean gst_rtp_theora_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_THEORA_PAY_H__ */ +diff --git a/gst/rtp/gstrtpulpfecdec.c b/gst/rtp/gstrtpulpfecdec.c +index 709309ad1..beb0432ca 100644 +--- a/gst/rtp/gstrtpulpfecdec.c ++++ b/gst/rtp/gstrtpulpfecdec.c +@@ -62,6 +62,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "rtpulpfeccommon.h" + #include "gstrtpulpfecdec.h" + +@@ -84,10 +85,12 @@ enum + PROP_STORAGE, + PROP_RECOVERED, + PROP_UNRECOVERED, ++ PROP_PASSTHROUGH, + N_PROPERTIES + }; + + #define DEFAULT_FEC_PT 0 ++#define DEFAULT_PASSTHROUGH FALSE + + static GParamSpec *klass_properties[N_PROPERTIES] = { NULL, }; + +@@ -95,6 +98,8 @@ GST_DEBUG_CATEGORY (gst_rtp_ulpfec_dec_debug); + #define GST_CAT_DEFAULT (gst_rtp_ulpfec_dec_debug) + + G_DEFINE_TYPE (GstRtpUlpFecDec, gst_rtp_ulpfec_dec, GST_TYPE_ELEMENT); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpulpfecdec, "rtpulpfecdec", ++ GST_RANK_NONE, GST_TYPE_RTP_ULPFEC_DEC, rtp_element_init (plugin)); + + #define RTP_FEC_MAP_INFO_NTH(dec, data) (&g_array_index (\ + ((GstRtpUlpFecDec *)dec)->info_arr, \ +@@ -375,6 +380,7 @@ gst_rtp_ulpfec_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) + + if (G_LIKELY (GST_FLOW_OK == self->chain_return_val)) { + GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; ++ gboolean passthrough; + buf = gst_buffer_make_writable (buf); + + if (G_UNLIKELY (self->unset_discont_flag)) { +@@ -382,8 +388,20 @@ gst_rtp_ulpfec_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) + GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT); + } + ++ GST_OBJECT_LOCK (self); ++ if (G_UNLIKELY (self->needs_discont)) { ++ self->needs_discont = FALSE; ++ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT); ++ } ++ passthrough = self->passthrough; ++ GST_OBJECT_UNLOCK (self); ++ + gst_rtp_buffer_map (buf, GST_MAP_WRITE, &rtp); +- gst_rtp_buffer_set_seq (&rtp, self->next_seqnum++); ++ if (passthrough) { ++ self->next_seqnum = gst_rtp_buffer_get_seq (&rtp) + 1; ++ } else { ++ gst_rtp_buffer_set_seq (&rtp, self->next_seqnum++); ++ } + gst_rtp_buffer_unmap (&rtp); + + return gst_pad_push (self->srcpad, buf); +@@ -439,6 +457,13 @@ gst_rtp_ulpfec_dec_handle_packet_loss (GstRtpUlpFecDec * self, guint16 seqnum, + gst_rtp_buffer_set_seq (&rtp, self->next_seqnum++); + gst_rtp_buffer_unmap (&rtp); + ++ GST_OBJECT_LOCK (self); ++ if (G_UNLIKELY (self->needs_discont)) { ++ self->needs_discont = FALSE; ++ GST_BUFFER_FLAG_SET (sent_buffer, GST_BUFFER_FLAG_DISCONT); ++ } ++ GST_OBJECT_UNLOCK (self); ++ + ret = FALSE; + self->unset_discont_flag = TRUE; + self->chain_return_val = gst_pad_push (self->srcpad, sent_buffer); +@@ -478,6 +503,18 @@ gst_rtp_ulpfec_dec_handle_sink_event (GstPad * pad, GstObject * parent, + guint seqnum; + GstClockTime timestamp, duration; + GstStructure *s; ++ gboolean passthrough; ++ ++ GST_OBJECT_LOCK (self); ++ passthrough = self->passthrough; ++ GST_OBJECT_UNLOCK (self); ++ ++ if (passthrough) { ++ GST_TRACE_OBJECT (self, ++ "in passthrough mode, ignoring packet loss event"); ++ forward = TRUE; ++ goto out; ++ } + + event = gst_event_make_writable (event); + s = gst_event_writable_structure (event); +@@ -552,6 +589,7 @@ gst_rtp_ulpfec_dec_handle_sink_event (GstPad * pad, GstObject * parent, + self->caps_pt = caps_pt; + } + ++out: + if (forward) + return gst_pad_push_event (self->srcpad, event); + gst_event_unref (event); +@@ -574,6 +612,7 @@ gst_rtp_ulpfec_dec_init (GstRtpUlpFecDec * self) + gst_element_add_pad (GST_ELEMENT (self), self->sinkpad); + + self->fec_pt = DEFAULT_FEC_PT; ++ self->passthrough = DEFAULT_PASSTHROUGH; + + self->next_seqnum = g_random_int_range (0, G_MAXINT16); + +@@ -639,6 +678,20 @@ gst_rtp_ulpfec_dec_set_property (GObject * object, guint prop_id, + if (self->storage) + g_object_ref (self->storage); + break; ++ case PROP_PASSTHROUGH:{ ++ gboolean newval = g_value_get_boolean (value); ++ GST_OBJECT_LOCK (self); ++ /* if we changing into non-passthrough mode, then the sequence numbers may ++ * be completely different and we need to advertise that with a discont */ ++ GST_INFO_OBJECT (self, "passthrough changing from %u to %u", ++ self->passthrough, newval); ++ if (self->passthrough && !newval) { ++ self->needs_discont = TRUE; ++ } ++ self->passthrough = newval; ++ GST_OBJECT_UNLOCK (self); ++ break; ++ } + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -664,6 +717,9 @@ gst_rtp_ulpfec_dec_get_property (GObject * object, guint prop_id, + case PROP_UNRECOVERED: + g_value_set_uint (value, (guint) self->packets_unrecovered); + break; ++ case PROP_PASSTHROUGH: ++ g_value_set_boolean (value, self->passthrough); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -709,6 +765,19 @@ gst_rtp_ulpfec_dec_class_init (GstRtpUlpFecDecClass * klass) + g_param_spec_uint ("unrecovered", "unrecovered", + "The number of unrecovered packets", 0, G_MAXUINT, 0, + G_PARAM_READABLE | G_PARAM_STATIC_STRINGS); ++ /** ++ * GstRtpUlpFecDec:passthrough: ++ * ++ * Whether to push data through without any modification. If passthrough is ++ * enabled, then no packets will ever be recovered. ++ * ++ * Since: 1.22 ++ */ ++ klass_properties[PROP_PASSTHROUGH] = ++ g_param_spec_boolean ("passthrough", "Passthrough", ++ "Whether to passthrough all data as-is without modification and " ++ "never attempt to recover packets", DEFAULT_PASSTHROUGH, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS); + + g_object_class_install_properties (gobject_class, N_PROPERTIES, + klass_properties); +diff --git a/gst/rtp/gstrtpulpfecdec.h b/gst/rtp/gstrtpulpfecdec.h +index f9b10b068..b54e82ee2 100644 +--- a/gst/rtp/gstrtpulpfecdec.h ++++ b/gst/rtp/gstrtpulpfecdec.h +@@ -55,10 +55,12 @@ struct _GstRtpUlpFecDec { + RtpStorage *storage; + gsize packets_recovered; + gsize packets_unrecovered; ++ gboolean passthrough; + + /* internal stuff */ + GstFlowReturn chain_return_val; + gboolean unset_discont_flag; ++ gboolean needs_discont; + gboolean have_caps_ssrc; + gboolean have_caps_pt; + guint32 caps_ssrc; +diff --git a/gst/rtp/gstrtpulpfecenc.c b/gst/rtp/gstrtpulpfecenc.c +index bd2df23bb..c95fe0d89 100644 +--- a/gst/rtp/gstrtpulpfecenc.c ++++ b/gst/rtp/gstrtpulpfecenc.c +@@ -88,6 +88,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "rtpulpfeccommon.h" + #include "gstrtpulpfecenc.h" + +@@ -114,6 +115,8 @@ GST_DEBUG_CATEGORY (gst_rtp_ulpfec_enc_debug); + #define GST_CAT_DEFAULT (gst_rtp_ulpfec_enc_debug) + + G_DEFINE_TYPE (GstRtpUlpFecEnc, gst_rtp_ulpfec_enc, GST_TYPE_ELEMENT); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpulpfecenc, "rtpulpfecenc", ++ GST_RANK_NONE, GST_TYPE_RTP_ULPFEC_ENC, rtp_element_init (plugin)); + + enum + { +@@ -130,6 +133,15 @@ enum + RtpUlpFecMapInfo, \ + GPOINTER_TO_UINT(data))) + ++static void ++dump_stream_ctx_settings (GstRtpUlpFecEncStreamCtx * ctx) ++{ ++ GST_DEBUG_OBJECT (ctx->parent, "rtpulpfec settings for ssrc 0x%x, pt %u, " ++ "percentage %u, percentage important %u, multipacket %u, mux_seq %u", ++ ctx->ssrc, ctx->pt, ctx->percentage, ctx->percentage_important, ++ ctx->multipacket, ctx->mux_seq); ++}; ++ + static void + gst_rtp_ulpfec_enc_stream_ctx_start (GstRtpUlpFecEncStreamCtx * ctx, + GQueue * packets, guint fec_packets) +@@ -328,12 +340,15 @@ gst_rtp_ulpfec_enc_stream_ctx_prepend_to_fec_buffer (GstRtpUlpFecEncStreamCtx * + + static GstFlowReturn + gst_rtp_ulpfec_enc_stream_ctx_push_fec_packets (GstRtpUlpFecEncStreamCtx * ctx, +- guint8 pt, guint16 seq, guint32 timestamp, guint32 ssrc) ++ guint8 pt, guint16 seq, guint32 timestamp, guint32 ssrc, guint8 twcc_ext_id, ++ GstRTPHeaderExtensionFlags twcc_ext_flags, guint8 twcc_appbits) + { + GstFlowReturn ret = GST_FLOW_OK; + guint fec_packets_num = + gst_rtp_ulpfec_enc_stream_ctx_get_fec_packets_num (ctx); + ++ GST_LOG_OBJECT (ctx->parent, "ctx %p have %u fec packets to push", ctx, ++ fec_packets_num); + if (fec_packets_num) { + guint fec_packets_pushed = 0; + GstBuffer *latest_packet = ctx->packets_buf.head->data; +@@ -348,6 +363,33 @@ gst_rtp_ulpfec_enc_stream_ctx_push_fec_packets (GstRtpUlpFecEncStreamCtx * ctx, + gst_buffer_copy_into (fec, latest_packet, GST_BUFFER_COPY_TIMESTAMPS, 0, + -1); + ++ /* If buffers in the stream we are protecting were meant to hold a TWCC seqnum, ++ * we also indicate that our protection buffers need one. At this point no seqnum ++ * has actually been set, we thus don't need to rewrite seqnums, simply indicate ++ * to RTPSession that the FEC buffers need one too */ ++ ++ /* FIXME: remove this logic once https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/923 ++ * is addressed */ ++ if (twcc_ext_id != 0) { ++ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; ++ guint16 data; ++ ++ if (!gst_rtp_buffer_map (fec, GST_MAP_READWRITE, &rtp)) ++ g_assert_not_reached (); ++ ++ if (twcc_ext_flags & GST_RTP_HEADER_EXTENSION_ONE_BYTE) { ++ gst_rtp_buffer_add_extension_onebyte_header (&rtp, twcc_ext_id, ++ &data, sizeof (guint16)); ++ } else if (twcc_ext_flags & GST_RTP_HEADER_EXTENSION_TWO_BYTE) { ++ gst_rtp_buffer_add_extension_twobytes_header (&rtp, twcc_appbits, ++ twcc_ext_id, &data, sizeof (guint16)); ++ } ++ ++ gst_rtp_buffer_unmap (&rtp); ++ } ++ ++ GST_LOG_OBJECT (ctx->parent, "ctx %p pushing generated fec buffer %" ++ GST_PTR_FORMAT, ctx, fec); + ret = gst_pad_push (ctx->srcpad, fec); + if (GST_FLOW_OK == ret) + ++fec_packets_pushed; +@@ -381,6 +423,8 @@ gst_rtp_ulpfec_enc_stream_ctx_cache_packet (GstRtpUlpFecEncStreamCtx * ctx, + + *dst_empty_packet_buffer = gst_rtp_buffer_get_marker (rtp); + *dst_push_fec = *dst_empty_packet_buffer; ++ ++ GST_TRACE ("ctx %p pushing fec %u", ctx, *dst_push_fec); + } else { + gboolean push_fec; + +@@ -394,6 +438,8 @@ gst_rtp_ulpfec_enc_stream_ctx_cache_packet (GstRtpUlpFecEncStreamCtx * ctx, + + *dst_push_fec = push_fec; + *dst_empty_packet_buffer = FALSE; ++ ++ GST_TRACE ("ctx %p pushing fec %u", ctx, *dst_push_fec); + } + } + +@@ -420,6 +466,8 @@ gst_rtp_ulpfec_enc_stream_ctx_configure (GstRtpUlpFecEncStreamCtx * ctx, + */ + ctx->budget_inc_important = percentage > percentage_important ? + ctx->budget_inc : percentage_important / 100.; ++ ++ dump_stream_ctx_settings (ctx); + } + + static GstRtpUlpFecEncStreamCtx * +@@ -460,17 +508,19 @@ gst_rtp_ulpfec_enc_stream_ctx_free (GstRtpUlpFecEncStreamCtx * ctx) + g_assert (0 == ctx->info_arr->len); + g_array_free (ctx->info_arr, TRUE); + g_array_free (ctx->scratch_buf, TRUE); +- g_slice_free1 (sizeof (GstRtpUlpFecEncStreamCtx), ctx); ++ g_free (ctx); + } + + static GstFlowReturn + gst_rtp_ulpfec_enc_stream_ctx_process (GstRtpUlpFecEncStreamCtx * ctx, +- GstBuffer * buffer) ++ GstBuffer * buffer, guint8 twcc_ext_id) + { + GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; + GstFlowReturn ret; + gboolean push_fec = FALSE; + gboolean empty_packet_buffer = FALSE; ++ GstRTPHeaderExtensionFlags twcc_ext_flags = 0; ++ guint8 twcc_appbits = 0; + + ctx->num_packets_received++; + +@@ -487,6 +537,21 @@ gst_rtp_ulpfec_enc_stream_ctx_process (GstRtpUlpFecEncStreamCtx * ctx, + g_assert_not_reached (); + } + ++ if (twcc_ext_id != 0) { ++ gpointer data; ++ guint size; ++ ++ if (gst_rtp_buffer_get_extension_onebyte_header (&rtp, twcc_ext_id, 0, ++ &data, &size)) { ++ twcc_ext_flags |= GST_RTP_HEADER_EXTENSION_ONE_BYTE; ++ } else if (gst_rtp_buffer_get_extension_twobytes_header (&rtp, ++ &twcc_appbits, twcc_ext_id, 0, &data, &size)) { ++ twcc_ext_flags |= GST_RTP_HEADER_EXTENSION_TWO_BYTE; ++ } else { ++ twcc_ext_id = 0; ++ } ++ } ++ + gst_rtp_ulpfec_enc_stream_ctx_cache_packet (ctx, &rtp, &empty_packet_buffer, + &push_fec); + +@@ -501,7 +566,7 @@ gst_rtp_ulpfec_enc_stream_ctx_process (GstRtpUlpFecEncStreamCtx * ctx, + if (GST_FLOW_OK == ret) + ret = + gst_rtp_ulpfec_enc_stream_ctx_push_fec_packets (ctx, ctx->pt, fec_seq, +- fec_timestamp, fec_ssrc); ++ fec_timestamp, fec_ssrc, twcc_ext_id, twcc_ext_flags, twcc_appbits); + } else { + gst_rtp_buffer_unmap (&rtp); + ret = gst_pad_push (ctx->srcpad, buffer); +@@ -555,7 +620,7 @@ gst_rtp_ulpfec_enc_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + + ctx = gst_rtp_ulpfec_enc_aquire_ctx (fec, ssrc); + +- ret = gst_rtp_ulpfec_enc_stream_ctx_process (ctx, buffer); ++ ret = gst_rtp_ulpfec_enc_stream_ctx_process (ctx, buffer, fec->twcc_ext_id); + + /* FIXME: does not work for multiple ssrcs */ + fec->num_packets_protected = ctx->num_packets_protected; +@@ -574,6 +639,58 @@ gst_rtp_ulpfec_enc_configure_ctx (gpointer key, gpointer value, + fec->percentage, fec->percentage_important, fec->multipacket); + } + ++static guint8 ++_get_extmap_id_for_attribute (const GstStructure * s, const gchar * ext_name) ++{ ++ guint i; ++ guint8 extmap_id = 0; ++ guint n_fields = gst_structure_n_fields (s); ++ ++ for (i = 0; i < n_fields; i++) { ++ const gchar *field_name = gst_structure_nth_field_name (s, i); ++ if (g_str_has_prefix (field_name, "extmap-")) { ++ const gchar *str = gst_structure_get_string (s, field_name); ++ if (str && g_strcmp0 (str, ext_name) == 0) { ++ gint64 id = g_ascii_strtoll (field_name + 7, NULL, 10); ++ if (id > 0 && id < 15) { ++ extmap_id = id; ++ break; ++ } ++ } ++ } ++ } ++ return extmap_id; ++} ++ ++#define TWCC_EXTMAP_STR "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01" ++ ++static gboolean ++gst_rtp_ulpfec_enc_event_sink (GstPad * pad, GstObject * parent, ++ GstEvent * event) ++{ ++ GstRtpUlpFecEnc *self = GST_RTP_ULPFEC_ENC (parent); ++ ++ switch (GST_EVENT_TYPE (event)) { ++ case GST_EVENT_CAPS: ++ { ++ GstCaps *caps; ++ GstStructure *s; ++ ++ gst_event_parse_caps (event, &caps); ++ s = gst_caps_get_structure (caps, 0); ++ self->twcc_ext_id = _get_extmap_id_for_attribute (s, TWCC_EXTMAP_STR); ++ ++ GST_INFO_OBJECT (self, "TWCC extension ID: %u", self->twcc_ext_id); ++ ++ break; ++ } ++ default: ++ break; ++ } ++ ++ return gst_pad_event_default (pad, parent, event); ++} ++ + static void + gst_rtp_ulpfec_enc_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +@@ -636,7 +753,9 @@ gst_rtp_ulpfec_enc_dispose (GObject * obj) + { + GstRtpUlpFecEnc *fec = GST_RTP_ULPFEC_ENC (obj); + +- g_hash_table_destroy (fec->ssrc_to_ctx); ++ if (fec->ssrc_to_ctx) ++ g_hash_table_destroy (fec->ssrc_to_ctx); ++ fec->ssrc_to_ctx = NULL; + + G_OBJECT_CLASS (gst_rtp_ulpfec_enc_parent_class)->dispose (obj); + } +@@ -652,6 +771,8 @@ gst_rtp_ulpfec_enc_init (GstRtpUlpFecEnc * fec) + GST_PAD_SET_PROXY_ALLOCATION (fec->sinkpad); + gst_pad_set_chain_function (fec->sinkpad, + GST_DEBUG_FUNCPTR (gst_rtp_ulpfec_enc_chain)); ++ gst_pad_set_event_function (fec->sinkpad, ++ GST_DEBUG_FUNCPTR (gst_rtp_ulpfec_enc_event_sink)); + gst_element_add_pad (GST_ELEMENT (fec), fec->sinkpad); + + fec->ssrc_to_ctx = g_hash_table_new_full (NULL, NULL, NULL, +diff --git a/gst/rtp/gstrtpulpfecenc.h b/gst/rtp/gstrtpulpfecenc.h +index 885c6ad61..a92fc3d1a 100644 +--- a/gst/rtp/gstrtpulpfecenc.h ++++ b/gst/rtp/gstrtpulpfecenc.h +@@ -47,6 +47,7 @@ struct _GstRtpUlpFecEnc { + GstElement parent; + GstPad *srcpad; + GstPad *sinkpad; ++ guint8 twcc_ext_id; + + GHashTable *ssrc_to_ctx; + +diff --git a/gst/rtp/gstrtpvorbisdepay.c b/gst/rtp/gstrtpvorbisdepay.c +index 556bd76c0..053e64795 100644 +--- a/gst/rtp/gstrtpvorbisdepay.c ++++ b/gst/rtp/gstrtpvorbisdepay.c +@@ -26,6 +26,7 @@ + #include + + #include ++#include "gstrtpelements.h" + #include "gstrtpvorbisdepay.h" + #include "gstrtputils.h" + +@@ -61,6 +62,8 @@ GST_STATIC_PAD_TEMPLATE ("src", + #define gst_rtp_vorbis_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpVorbisDepay, gst_rtp_vorbis_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpvorbisdepay, "rtpvorbisdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_VORBIS_DEPAY, rtp_element_init (plugin)); + + static gboolean gst_rtp_vorbis_depay_setcaps (GstRTPBaseDepayload * depayload, + GstCaps * caps); +@@ -702,10 +705,3 @@ gst_rtp_vorbis_depay_change_state (GstElement * element, + } + return ret; + } +- +-gboolean +-gst_rtp_vorbis_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpvorbisdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_VORBIS_DEPAY); +-} +diff --git a/gst/rtp/gstrtpvorbisdepay.h b/gst/rtp/gstrtpvorbisdepay.h +index a343d04e1..40f8d5f5f 100644 +--- a/gst/rtp/gstrtpvorbisdepay.h ++++ b/gst/rtp/gstrtpvorbisdepay.h +@@ -63,8 +63,6 @@ struct _GstRtpVorbisDepayClass + + GType gst_rtp_vorbis_depay_get_type (void); + +-gboolean gst_rtp_vorbis_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_VORBIS_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpvorbispay.c b/gst/rtp/gstrtpvorbispay.c +index 8a3c249c0..e54e2a7cd 100644 +--- a/gst/rtp/gstrtpvorbispay.c ++++ b/gst/rtp/gstrtpvorbispay.c +@@ -26,6 +26,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "fnv1hash.h" + #include "gstrtpvorbispay.h" + #include "gstrtputils.h" +@@ -70,6 +71,8 @@ enum + + #define gst_rtp_vorbis_pay_parent_class parent_class + G_DEFINE_TYPE (GstRtpVorbisPay, gst_rtp_vorbis_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpvorbispay, "rtpvorbispay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_VORBIS_PAY, rtp_element_init (plugin)); + + static gboolean gst_rtp_vorbis_pay_setcaps (GstRTPBasePayload * basepayload, + GstCaps * caps); +@@ -997,10 +1000,3 @@ gst_rtp_vorbis_pay_get_property (GObject * object, guint prop_id, + break; + } + } +- +-gboolean +-gst_rtp_vorbis_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpvorbispay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_VORBIS_PAY); +-} +diff --git a/gst/rtp/gstrtpvorbispay.h b/gst/rtp/gstrtpvorbispay.h +index a5d693a72..e1625efb0 100644 +--- a/gst/rtp/gstrtpvorbispay.h ++++ b/gst/rtp/gstrtpvorbispay.h +@@ -78,8 +78,6 @@ struct _GstRtpVorbisPayClass + + GType gst_rtp_vorbis_pay_get_type (void); + +-gboolean gst_rtp_vorbis_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_VORBIS_PAY_H__ */ +diff --git a/gst/rtp/gstrtpvp8depay.c b/gst/rtp/gstrtpvp8depay.c +index e71baf816..3428f43f6 100644 +--- a/gst/rtp/gstrtpvp8depay.c ++++ b/gst/rtp/gstrtpvp8depay.c +@@ -22,6 +22,7 @@ + # include "config.h" + #endif + ++#include "gstrtpelements.h" + #include "gstrtpvp8depay.h" + #include "gstrtputils.h" + +@@ -43,8 +44,12 @@ static GstStateChangeReturn gst_rtp_vp8_depay_change_state (GstElement * + element, GstStateChange transition); + static gboolean gst_rtp_vp8_depay_handle_event (GstRTPBaseDepayload * depay, + GstEvent * event); ++static gboolean gst_rtp_vp8_depay_packet_lost (GstRTPBaseDepayload * depay, ++ GstEvent * event); + + G_DEFINE_TYPE (GstRtpVP8Depay, gst_rtp_vp8_depay, GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpvp8depay, "rtpvp8depay", ++ GST_RANK_MARGINAL, GST_TYPE_RTP_VP8_DEPAY, rtp_element_init (plugin)); + + static GstStaticPadTemplate gst_rtp_vp8_depay_src_template = + GST_STATIC_PAD_TEMPLATE ("src", +@@ -62,19 +67,26 @@ GST_STATIC_PAD_TEMPLATE ("sink", + "encoding-name = (string) { \"VP8\", \"VP8-DRAFT-IETF-01\" }")); + + #define DEFAULT_WAIT_FOR_KEYFRAME FALSE ++#define DEFAULT_REQUEST_KEYFRAME FALSE + + enum + { + PROP_0, +- PROP_WAIT_FOR_KEYFRAME ++ PROP_WAIT_FOR_KEYFRAME, ++ PROP_REQUEST_KEYFRAME, + }; + ++#define PICTURE_ID_NONE (UINT_MAX) ++#define IS_PICTURE_ID_15BITS(pid) (((guint)(pid) & 0x8000) != 0) ++ + static void + gst_rtp_vp8_depay_init (GstRtpVP8Depay * self) + { + self->adapter = gst_adapter_new (); + self->started = FALSE; + self->wait_for_keyframe = DEFAULT_WAIT_FOR_KEYFRAME; ++ self->request_keyframe = DEFAULT_REQUEST_KEYFRAME; ++ self->last_pushed_was_lost_event = FALSE; + } + + static void +@@ -105,10 +117,24 @@ gst_rtp_vp8_depay_class_init (GstRtpVP8DepayClass * gst_rtp_vp8_depay_class) + DEFAULT_WAIT_FOR_KEYFRAME, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + ++ /** ++ * GstRtpVP8Depay:request-keyframe: ++ * ++ * Request new keyframe when packet loss is detected ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (object_class, PROP_REQUEST_KEYFRAME, ++ g_param_spec_boolean ("request-keyframe", "Request Keyframe", ++ "Request new keyframe when packet loss is detected", ++ DEFAULT_REQUEST_KEYFRAME, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ + element_class->change_state = gst_rtp_vp8_depay_change_state; + + depay_class->process_rtp_packet = gst_rtp_vp8_depay_process; + depay_class->handle_event = gst_rtp_vp8_depay_handle_event; ++ depay_class->packet_lost = gst_rtp_vp8_depay_packet_lost; + + GST_DEBUG_CATEGORY_INIT (gst_rtp_vp8_depay_debug, "rtpvp8depay", 0, + "VP8 Video RTP Depayloader"); +@@ -139,6 +165,9 @@ gst_rtp_vp8_depay_set_property (GObject * object, guint prop_id, + case PROP_WAIT_FOR_KEYFRAME: + self->wait_for_keyframe = g_value_get_boolean (value); + break; ++ case PROP_REQUEST_KEYFRAME: ++ self->request_keyframe = g_value_get_boolean (value); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -155,31 +184,129 @@ gst_rtp_vp8_depay_get_property (GObject * object, guint prop_id, + case PROP_WAIT_FOR_KEYFRAME: + g_value_set_boolean (value, self->wait_for_keyframe); + break; ++ case PROP_REQUEST_KEYFRAME: ++ g_value_set_boolean (value, self->request_keyframe); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } + } + ++static gint ++picture_id_compare (guint16 id0, guint16 id1) ++{ ++ guint shift = 16 - (IS_PICTURE_ID_15BITS (id1) ? 15 : 7); ++ id0 = id0 << shift; ++ id1 = id1 << shift; ++ return ((gint16) (id1 - id0)) >> shift; ++} ++ ++static void ++send_last_lost_event (GstRtpVP8Depay * self) ++{ ++ if (self->last_lost_event) { ++ GST_ERROR_OBJECT (self, ++ "Sending the last stopped lost event: %" GST_PTR_FORMAT, ++ self->last_lost_event); ++ GST_RTP_BASE_DEPAYLOAD_CLASS (gst_rtp_vp8_depay_parent_class) ++ ->packet_lost (GST_RTP_BASE_DEPAYLOAD_CAST (self), ++ self->last_lost_event); ++ gst_event_replace (&self->last_lost_event, NULL); ++ self->last_pushed_was_lost_event = TRUE; ++ } ++} ++ ++static void ++send_new_lost_event (GstRtpVP8Depay * self, GstClockTime timestamp, ++ guint new_picture_id, const gchar * reason) ++{ ++ GstEvent *event; ++ ++ if (!GST_CLOCK_TIME_IS_VALID (timestamp)) { ++ GST_WARNING_OBJECT (self, "Can't create lost event with invalid timestmap"); ++ return; ++ } ++ ++ event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, ++ gst_structure_new ("GstRTPPacketLost", ++ "timestamp", G_TYPE_UINT64, timestamp, ++ "duration", G_TYPE_UINT64, G_GUINT64_CONSTANT (0), NULL)); ++ ++ GST_DEBUG_OBJECT (self, "Pushing lost event " ++ "(picids 0x%x 0x%x, reason \"%s\"): %" GST_PTR_FORMAT, ++ self->last_picture_id, new_picture_id, reason, event); ++ ++ GST_RTP_BASE_DEPAYLOAD_CLASS (gst_rtp_vp8_depay_parent_class) ++ ->packet_lost (GST_RTP_BASE_DEPAYLOAD_CAST (self), event); ++ ++ gst_event_unref (event); ++} ++ ++static void ++send_last_lost_event_if_needed (GstRtpVP8Depay * self, guint new_picture_id) ++{ ++ if (self->last_picture_id == PICTURE_ID_NONE) ++ return; ++ ++ if (self->last_lost_event) { ++ gboolean send_lost_event = FALSE; ++ if (new_picture_id == PICTURE_ID_NONE) { ++ GST_DEBUG_OBJECT (self, "Dropping the last stopped lost event " ++ "(picture id does not exist): %" GST_PTR_FORMAT, ++ self->last_lost_event); ++ } else if (IS_PICTURE_ID_15BITS (self->last_picture_id) && ++ !IS_PICTURE_ID_15BITS (new_picture_id)) { ++ GST_DEBUG_OBJECT (self, "Dropping the last stopped lost event " ++ "(picture id has less bits than before): %" GST_PTR_FORMAT, ++ self->last_lost_event); ++ } else if (picture_id_compare (self->last_picture_id, new_picture_id) != 1) { ++ GstStructure *s = gst_event_writable_structure (self->last_lost_event); ++ ++ GST_DEBUG_OBJECT (self, "Sending the last stopped lost event " ++ "(gap in picture id %u %u): %" GST_PTR_FORMAT, ++ self->last_picture_id, new_picture_id, self->last_lost_event); ++ send_lost_event = TRUE; ++ /* Prevent rtpbasedepayload from dropping the event now ++ * that we have made sure the lost packet was not FEC */ ++ gst_structure_remove_field (s, "might-have-been-fec"); ++ } ++ if (send_lost_event) ++ GST_RTP_BASE_DEPAYLOAD_CLASS (gst_rtp_vp8_depay_parent_class) ++ ->packet_lost (GST_RTP_BASE_DEPAYLOAD_CAST (self), ++ self->last_lost_event); ++ ++ gst_event_replace (&self->last_lost_event, NULL); ++ } ++} ++ + static GstBuffer * + gst_rtp_vp8_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp) + { + GstRtpVP8Depay *self = GST_RTP_VP8_DEPAY (depay); + GstBuffer *payload; + guint8 *data; +- guint hdrsize; +- guint size; ++ guint hdrsize = 1; ++ guint picture_id = PICTURE_ID_NONE; ++ guint size = gst_rtp_buffer_get_payload_len (rtp); ++ guint s_bit; ++ guint part_id; ++ gboolean frame_start; ++ gboolean sent_lost_event = FALSE; + + if (G_UNLIKELY (GST_BUFFER_IS_DISCONT (rtp->buffer))) { +- GST_LOG_OBJECT (self, "Discontinuity, flushing adapter"); ++ GST_DEBUG_OBJECT (self, "Discontinuity, flushing adapter"); + gst_adapter_clear (self->adapter); + self->started = FALSE; + + if (self->wait_for_keyframe) + self->waiting_for_keyframe = TRUE; +- } + +- size = gst_rtp_buffer_get_payload_len (rtp); ++ if (self->request_keyframe) ++ gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depay), ++ gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE, ++ TRUE, 0)); ++ } + + /* At least one header and one vp8 byte */ + if (G_UNLIKELY (size < 2)) +@@ -187,16 +314,9 @@ gst_rtp_vp8_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp) + + data = gst_rtp_buffer_get_payload (rtp); + +- if (G_UNLIKELY (!self->started)) { +- /* Check if this is the start of a VP8 frame, otherwise bail */ +- /* S=1 and PartID= 0 */ +- if ((data[0] & 0x17) != 0x10) +- goto done; ++ s_bit = (data[0] >> 4) & 0x1; ++ part_id = (data[0] >> 0) & 0x7; + +- self->started = TRUE; +- } +- +- hdrsize = 1; + /* Check X optional header */ + if ((data[0] & 0x80) != 0) { + hdrsize++; +@@ -206,8 +326,13 @@ gst_rtp_vp8_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp) + goto too_small; + hdrsize++; + /* Check for 16 bits PictureID */ +- if ((data[2] & 0x80) != 0) ++ picture_id = data[2]; ++ if ((data[2] & 0x80) != 0) { ++ if (G_UNLIKELY (size < 4)) ++ goto too_small; + hdrsize++; ++ picture_id = (picture_id << 8) | data[3]; ++ } + } + /* Check L optional header */ + if ((data[1] & 0x40) != 0) +@@ -216,19 +341,73 @@ gst_rtp_vp8_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp) + if ((data[1] & 0x20) != 0 || (data[1] & 0x10) != 0) + hdrsize++; + } +- GST_DEBUG_OBJECT (depay, "hdrsize %u, size %u", hdrsize, size); +- ++ GST_LOG_OBJECT (depay, ++ "hdrsize %u, size %u, picture id 0x%x, s %u, part_id %u", hdrsize, size, ++ picture_id, s_bit, part_id); + if (G_UNLIKELY (hdrsize >= size)) + goto too_small; + ++ frame_start = (s_bit == 1) && (part_id == 0); ++ if (frame_start) { ++ if (G_UNLIKELY (self->started)) { ++ GST_DEBUG_OBJECT (depay, "Incomplete frame, flushing adapter"); ++ gst_adapter_clear (self->adapter); ++ self->started = FALSE; ++ ++ if (self->wait_for_keyframe) ++ self->waiting_for_keyframe = TRUE; ++ if (self->request_keyframe) ++ gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depay), ++ gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE, ++ TRUE, 0)); ++ ++ send_new_lost_event (self, GST_BUFFER_PTS (rtp->buffer), picture_id, ++ "Incomplete frame detected"); ++ sent_lost_event = TRUE; ++ } ++ } ++ ++ if (!self->started) { ++ if (G_UNLIKELY (!frame_start)) { ++ GST_DEBUG_OBJECT (depay, ++ "The frame is missing the first packet, ignoring the packet"); ++ if (self->stop_lost_events && !sent_lost_event) { ++ send_last_lost_event (self); ++ self->stop_lost_events = FALSE; ++ } ++ ++ if (self->wait_for_keyframe) ++ self->waiting_for_keyframe = TRUE; ++ if (self->request_keyframe) ++ gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depay), ++ gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE, ++ TRUE, 0)); ++ ++ goto done; ++ } ++ ++ GST_LOG_OBJECT (depay, "Found the start of the frame"); ++ ++ if (self->stop_lost_events && !sent_lost_event) { ++ send_last_lost_event_if_needed (self, picture_id); ++ self->stop_lost_events = FALSE; ++ } ++ ++ self->started = TRUE; ++ } ++ + payload = gst_rtp_buffer_get_payload_subbuffer (rtp, hdrsize, -1); + gst_adapter_push (self->adapter, payload); ++ self->last_picture_id = picture_id; + + /* Marker indicates that it was the last rtp packet for this frame */ + if (gst_rtp_buffer_get_marker (rtp)) { + GstBuffer *out; + guint8 header[10]; + ++ GST_LOG_OBJECT (depay, ++ "Found the end of the frame (%" G_GSIZE_FORMAT " bytes)", ++ gst_adapter_available (self->adapter)); + if (gst_adapter_available (self->adapter) < 10) + goto too_small; + gst_adapter_copy (self->adapter, &header, 0, 10); +@@ -257,6 +436,7 @@ gst_rtp_vp8_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp) + guint profile, width, height; + + GST_BUFFER_FLAG_UNSET (out, GST_BUFFER_FLAG_DELTA_UNIT); ++ GST_DEBUG_OBJECT (self, "Processed keyframe"); + + profile = (header[0] & 0x0e) >> 1; + width = GST_READ_UINT16_LE (header + 6) & 0x3fff; +@@ -284,6 +464,11 @@ gst_rtp_vp8_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp) + self->waiting_for_keyframe = FALSE; + } + ++ if (picture_id != PICTURE_ID_NONE) ++ self->stop_lost_events = TRUE; ++ ++ self->last_pushed_was_lost_event = FALSE; ++ + return out; + } + +@@ -291,7 +476,7 @@ done: + return NULL; + + too_small: +- GST_LOG_OBJECT (self, "Invalid rtp packet (too small), ignoring"); ++ GST_DEBUG_OBJECT (self, "Invalid rtp packet (too small), ignoring"); + gst_adapter_clear (self->adapter); + self->started = FALSE; + +@@ -309,6 +494,10 @@ gst_rtp_vp8_depay_change_state (GstElement * element, GstStateChange transition) + self->last_height = -1; + self->last_width = -1; + self->waiting_for_keyframe = TRUE; ++ self->caps_sent = FALSE; ++ self->last_picture_id = PICTURE_ID_NONE; ++ gst_event_replace (&self->last_lost_event, NULL); ++ self->stop_lost_events = FALSE; + break; + default: + break; +@@ -329,6 +518,9 @@ gst_rtp_vp8_depay_handle_event (GstRTPBaseDepayload * depay, GstEvent * event) + self->last_profile = -1; + self->last_height = -1; + self->last_width = -1; ++ self->last_picture_id = PICTURE_ID_NONE; ++ gst_event_replace (&self->last_lost_event, NULL); ++ self->stop_lost_events = FALSE; + break; + default: + break; +@@ -339,9 +531,48 @@ gst_rtp_vp8_depay_handle_event (GstRTPBaseDepayload * depay, GstEvent * event) + (gst_rtp_vp8_depay_parent_class)->handle_event (depay, event); + } + +-gboolean +-gst_rtp_vp8_depay_plugin_init (GstPlugin * plugin) ++static gboolean ++gst_rtp_vp8_depay_packet_lost (GstRTPBaseDepayload * depay, GstEvent * event) + { +- return gst_element_register (plugin, "rtpvp8depay", +- GST_RANK_MARGINAL, GST_TYPE_RTP_VP8_DEPAY); ++ GstRtpVP8Depay *self = GST_RTP_VP8_DEPAY (depay); ++ const GstStructure *s; ++ gboolean might_have_been_fec; ++ gboolean unref_event = FALSE; ++ gboolean ret; ++ ++ s = gst_event_get_structure (event); ++ ++ if (self->stop_lost_events) { ++ if (gst_structure_get_boolean (s, "might-have-been-fec", ++ &might_have_been_fec) ++ && might_have_been_fec) { ++ GST_DEBUG_OBJECT (depay, "Stopping lost event %" GST_PTR_FORMAT, event); ++ gst_event_replace (&self->last_lost_event, event); ++ return TRUE; ++ } ++ } else if (self->last_picture_id != PICTURE_ID_NONE) { ++ GstStructure *s; ++ ++ if (!gst_event_is_writable (event)) { ++ event = gst_event_copy (event); ++ unref_event = TRUE; ++ } ++ ++ s = gst_event_writable_structure (event); ++ ++ /* We are currently processing a picture, let's make sure the ++ * base depayloader doesn't drop this lost event */ ++ gst_structure_remove_field (s, "might-have-been-fec"); ++ } ++ ++ self->last_pushed_was_lost_event = TRUE; ++ ++ ret = ++ GST_RTP_BASE_DEPAYLOAD_CLASS ++ (gst_rtp_vp8_depay_parent_class)->packet_lost (depay, event); ++ ++ if (unref_event) ++ gst_event_unref (event); ++ ++ return ret; + } +diff --git a/gst/rtp/gstrtpvp8depay.h b/gst/rtp/gstrtpvp8depay.h +index cde8e5ea8..76be52adc 100644 +--- a/gst/rtp/gstrtpvp8depay.h ++++ b/gst/rtp/gstrtpvp8depay.h +@@ -54,18 +54,28 @@ struct _GstRtpVP8Depay + GstAdapter *adapter; + gboolean started; + ++ gboolean caps_sent; ++ /* In between pictures, we might store GstRTPPacketLost events instead ++ * of forwarding them immediately, we check upon reception of a new ++ * picture id whether a gap was introduced, in which case we do forward ++ * the event. This is to avoid forwarding spurious lost events for FEC ++ * packets. ++ */ ++ gboolean stop_lost_events; ++ GstEvent *last_lost_event; + gboolean waiting_for_keyframe; + gint last_profile; + gint last_width; + gint last_height; ++ guint last_picture_id; + + gboolean wait_for_keyframe; ++ gboolean request_keyframe; ++ gboolean last_pushed_was_lost_event; + }; + + GType gst_rtp_vp8_depay_get_type (void); + +-gboolean gst_rtp_vp8_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* #ifndef __GST_RTP_VP8_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpvp8pay.c b/gst/rtp/gstrtpvp8pay.c +index 4d6d20f7f..f4b4031d6 100644 +--- a/gst/rtp/gstrtpvp8pay.c ++++ b/gst/rtp/gstrtpvp8pay.c +@@ -31,6 +31,7 @@ + #include + #include + #include ++#include "gstrtpelements.h" + #include "dboolhuff.h" + #include "gstrtpvp8pay.h" + #include "gstrtputils.h" +@@ -39,11 +40,13 @@ GST_DEBUG_CATEGORY_STATIC (gst_rtp_vp8_pay_debug); + #define GST_CAT_DEFAULT gst_rtp_vp8_pay_debug + + #define DEFAULT_PICTURE_ID_MODE VP8_PAY_NO_PICTURE_ID ++#define DEFAULT_PICTURE_ID_OFFSET (-1) + + enum + { + PROP_0, +- PROP_PICTURE_ID_MODE ++ PROP_PICTURE_ID_MODE, ++ PROP_PICTURE_ID_OFFSET + }; + + #define GST_TYPE_RTP_VP8_PAY_PICTURE_ID_MODE (gst_rtp_vp8_pay_picture_id_mode_get_type()) +@@ -77,6 +80,8 @@ static gboolean gst_rtp_vp8_pay_set_caps (GstRTPBasePayload * payload, + GstCaps * caps); + + G_DEFINE_TYPE (GstRtpVP8Pay, gst_rtp_vp8_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpvp8pay, "rtpvp8pay", ++ GST_RANK_MARGINAL, GST_TYPE_RTP_VP8_PAY, rtp_element_init (plugin)); + + static GstStaticPadTemplate gst_rtp_vp8_pay_src_template = + GST_STATIC_PAD_TEMPLATE ("src", +@@ -92,14 +97,59 @@ GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_ALWAYS, + GST_STATIC_CAPS ("video/x-vp8")); + ++static gint ++picture_id_field_len (PictureIDMode mode) ++{ ++ if (VP8_PAY_NO_PICTURE_ID == mode) ++ return 0; ++ if (VP8_PAY_PICTURE_ID_7BITS == mode) ++ return 7; ++ return 15; ++} ++ ++static void ++gst_rtp_vp8_pay_picture_id_reset (GstRtpVP8Pay * obj) ++{ ++ gint nbits; ++ ++ if (obj->picture_id_offset == -1) ++ obj->picture_id = g_random_int (); ++ else ++ obj->picture_id = obj->picture_id_offset; ++ ++ nbits = picture_id_field_len (obj->picture_id_mode); ++ obj->picture_id &= (1 << nbits) - 1; ++} ++ ++static void ++gst_rtp_vp8_pay_picture_id_increment (GstRtpVP8Pay * obj) ++{ ++ gint nbits; ++ ++ if (obj->picture_id_mode == VP8_PAY_NO_PICTURE_ID) ++ return; ++ ++ nbits = picture_id_field_len (obj->picture_id_mode); ++ obj->picture_id++; ++ obj->picture_id &= (1 << nbits) - 1; ++} ++ ++static void ++gst_rtp_vp8_pay_reset (GstRtpVP8Pay * obj) ++{ ++ gst_rtp_vp8_pay_picture_id_reset (obj); ++ /* tl0picidx MAY start at a random value, but there's no point. Initialize ++ * so that first packet will use 0 for convenience */ ++ obj->tl0picidx = -1; ++ obj->temporal_scalability_fields_present = FALSE; ++} ++ + static void + gst_rtp_vp8_pay_init (GstRtpVP8Pay * obj) + { + obj->picture_id_mode = DEFAULT_PICTURE_ID_MODE; +- if (obj->picture_id_mode == VP8_PAY_PICTURE_ID_7BITS) +- obj->picture_id = g_random_int_range (0, G_MAXUINT8) & 0x7F; +- else if (obj->picture_id_mode == VP8_PAY_PICTURE_ID_15BITS) +- obj->picture_id = g_random_int_range (0, G_MAXUINT16) & 0x7FFF; ++ obj->picture_id_offset = DEFAULT_PICTURE_ID_OFFSET; ++ gst_rtp_vp8_pay_reset (obj); + } + + static void +@@ -118,6 +168,18 @@ gst_rtp_vp8_pay_class_init (GstRtpVP8PayClass * gst_rtp_vp8_pay_class) + "The picture ID mode for payloading", + GST_TYPE_RTP_VP8_PAY_PICTURE_ID_MODE, DEFAULT_PICTURE_ID_MODE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ /** ++ * rtpvp8pay:picture-id-offset: ++ * ++ * Offset to add to the initial picture-id (-1 = random) ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_PICTURE_ID_OFFSET, ++ g_param_spec_int ("picture-id-offset", "Picture ID offset", ++ "Offset to add to the initial picture-id (-1 = random)", ++ -1, 0x7FFF, DEFAULT_PICTURE_ID_OFFSET, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + gst_element_class_add_static_pad_template (element_class, + &gst_rtp_vp8_pay_sink_template); +@@ -147,10 +209,11 @@ gst_rtp_vp8_pay_set_property (GObject * object, + switch (prop_id) { + case PROP_PICTURE_ID_MODE: + rtpvp8pay->picture_id_mode = g_value_get_enum (value); +- if (rtpvp8pay->picture_id_mode == VP8_PAY_PICTURE_ID_7BITS) +- rtpvp8pay->picture_id = g_random_int_range (0, G_MAXUINT8) & 0x7F; +- else if (rtpvp8pay->picture_id_mode == VP8_PAY_PICTURE_ID_15BITS) +- rtpvp8pay->picture_id = g_random_int_range (0, G_MAXUINT16) & 0x7FFF; ++ gst_rtp_vp8_pay_picture_id_reset (rtpvp8pay); ++ break; ++ case PROP_PICTURE_ID_OFFSET: ++ rtpvp8pay->picture_id_offset = g_value_get_int (value); ++ gst_rtp_vp8_pay_picture_id_reset (rtpvp8pay); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); +@@ -168,6 +231,9 @@ gst_rtp_vp8_pay_get_property (GObject * object, + case PROP_PICTURE_ID_MODE: + g_value_set_enum (value, rtpvp8pay->picture_id_mode); + break; ++ case PROP_PICTURE_ID_OFFSET: ++ g_value_set_int (value, rtpvp8pay->picture_id_offset); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -339,6 +405,10 @@ gst_rtp_vp8_pay_parse_frame (GstRtpVP8Pay * self, GstBuffer * buffer, + self->partition_offset[i + 1] = size; + + gst_buffer_unmap (buffer, &map); ++ ++ if (keyframe) ++ GST_DEBUG_OBJECT (self, "Parsed keyframe"); ++ + return TRUE; + + error: +@@ -365,47 +435,121 @@ gst_rtp_vp8_offset_to_partition (GstRtpVP8Pay * self, guint offset) + static gsize + gst_rtp_vp8_calc_header_len (GstRtpVP8Pay * self) + { ++ gsize len; ++ + switch (self->picture_id_mode) { + case VP8_PAY_PICTURE_ID_7BITS: +- return 3; ++ len = 1; ++ break; + case VP8_PAY_PICTURE_ID_15BITS: +- return 4; ++ len = 2; ++ break; + case VP8_PAY_NO_PICTURE_ID: + default: +- return 1; ++ len = 0; ++ break; ++ } ++ ++ if (self->temporal_scalability_fields_present) { ++ /* Add on space for TL0PICIDX and TID/Y/KEYIDX */ ++ len += 2; ++ } ++ ++ if (len > 0) { ++ /* All fields above are extension, so allocate space for the ECB field */ ++ len++; + } ++ ++ return len + 1; /* computed + fixed size header */ + } + + /* When growing the vp8 header keep max payload len calculation in sync */ + static GstBuffer * + gst_rtp_vp8_create_header_buffer (GstRtpVP8Pay * self, guint8 partid, +- gboolean start, gboolean mark, GstBuffer * in) ++ gboolean start, gboolean mark, GstBuffer * in, GstCustomMeta * meta) + { + GstBuffer *out; + guint8 *p; + GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT; + +- out = gst_rtp_buffer_new_allocate (gst_rtp_vp8_calc_header_len (self), 0, 0); ++ out = ++ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD_CAST ++ (self), gst_rtp_vp8_calc_header_len (self), 0, 0); + gst_rtp_buffer_map (out, GST_MAP_READWRITE, &rtpbuffer); + p = gst_rtp_buffer_get_payload (&rtpbuffer); ++ + /* X=0,R=0,N=0,S=start,PartID=partid */ + p[0] = (start << 4) | partid; +- if (self->picture_id_mode != VP8_PAY_NO_PICTURE_ID) { ++ if (GST_BUFFER_FLAG_IS_SET (in, GST_BUFFER_FLAG_DROPPABLE)) { ++ /* Enable N=1 */ ++ p[0] |= 0x20; ++ } ++ ++ if (self->picture_id_mode != VP8_PAY_NO_PICTURE_ID || ++ self->temporal_scalability_fields_present) { ++ gint index; ++ + /* Enable X=1 */ + p[0] |= 0x80; +- /* X: I=1,L=0,T=0,K=0,RSV=0 */ +- p[1] = 0x80; ++ ++ /* X: I=0,L=0,T=0,K=0,RSV=0 */ ++ p[1] = 0x00; ++ if (self->picture_id_mode != VP8_PAY_NO_PICTURE_ID) { ++ /* Set I bit */ ++ p[1] |= 0x80; ++ } ++ if (self->temporal_scalability_fields_present) { ++ /* Set L and T bits */ ++ p[1] |= 0x60; ++ } ++ ++ /* Insert picture ID */ + if (self->picture_id_mode == VP8_PAY_PICTURE_ID_7BITS) { + /* I: 7 bit picture_id */ + p[2] = self->picture_id & 0x7F; +- } else { ++ index = 3; ++ } else if (self->picture_id_mode == VP8_PAY_PICTURE_ID_15BITS) { + /* I: 15 bit picture_id */ + p[2] = 0x80 | ((self->picture_id & 0x7FFF) >> 8); + p[3] = self->picture_id & 0xFF; ++ index = 4; ++ } else { ++ index = 2; ++ } ++ ++ /* Insert TL0PICIDX and TID/Y/KEYIDX */ ++ if (self->temporal_scalability_fields_present) { ++ /* The meta contains tl0picidx from the encoder, but we need to ensure ++ * that tl0picidx is increasing correctly. The encoder may reset it's ++ * state and counter, but we cannot. Therefore, we cannot simply copy ++ * the value into the header.*/ ++ guint temporal_layer = 0; ++ gboolean layer_sync = FALSE; ++ gboolean use_temporal_scaling = FALSE; ++ ++ if (meta) { ++ GstStructure *s = gst_custom_meta_get_structure (meta); ++ gst_structure_get_boolean (s, "use-temporal-scaling", ++ &use_temporal_scaling); ++ ++ if (use_temporal_scaling) ++ gst_structure_get (s, "layer-id", G_TYPE_UINT, &temporal_layer, ++ "layer-sync", G_TYPE_BOOLEAN, &layer_sync, NULL); ++ } ++ ++ /* FIXME: Support a prediction structure where higher layers don't ++ * necessarily refer to the last base layer frame, ie they use an older ++ * tl0picidx as signalled in the meta */ ++ if (temporal_layer == 0 && start) ++ self->tl0picidx++; ++ p[index] = self->tl0picidx & 0xFF; ++ p[index + 1] = ((temporal_layer << 6) | (layer_sync << 5)) & 0xFF; + } + } + + gst_rtp_buffer_set_marker (&rtpbuffer, mark); ++ if (mark) ++ GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_MARKER); + + gst_rtp_buffer_unmap (&rtpbuffer); + +@@ -415,15 +559,38 @@ gst_rtp_vp8_create_header_buffer (GstRtpVP8Pay * self, guint8 partid, + return out; + } + ++static gboolean ++foreach_metadata_drop (GstBuffer * buf, GstMeta ** meta, gpointer user_data) ++{ ++ GstElement *element = user_data; ++ const GstMetaInfo *info = (*meta)->info; ++ ++ if (gst_meta_info_is_custom (info) && ++ gst_custom_meta_has_name ((GstCustomMeta *) * meta, "GstVP8Meta")) { ++ GST_DEBUG_OBJECT (element, "dropping GstVP8Meta"); ++ *meta = NULL; ++ } ++ ++ return TRUE; ++} ++ ++static void ++gst_rtp_vp8_drop_vp8_meta (gpointer element, GstBuffer * buf) ++{ ++ gst_buffer_foreach_meta (buf, foreach_metadata_drop, element); ++} ++ + static guint + gst_rtp_vp8_payload_next (GstRtpVP8Pay * self, GstBufferList * list, +- guint offset, GstBuffer * buffer, gsize buffer_size, gsize max_payload_len) ++ guint offset, GstBuffer * buffer, gsize buffer_size, gsize max_payload_len, ++ GstCustomMeta * meta) + { + guint partition; + GstBuffer *header; + GstBuffer *sub; + GstBuffer *out; + gboolean mark; ++ gboolean start; + gsize remaining; + gsize available; + +@@ -432,16 +599,27 @@ gst_rtp_vp8_payload_next (GstRtpVP8Pay * self, GstBufferList * list, + if (available > remaining) + available = remaining; + +- partition = gst_rtp_vp8_offset_to_partition (self, offset); +- g_assert (partition < self->n_partitions); ++ if (meta) { ++ /* If meta is present, then we have no partition offset information, ++ * so always emit PID 0 and set the start bit for the first packet ++ * of a frame only (c.f. RFC7741 $4.4) ++ */ ++ partition = 0; ++ start = (offset == 0); ++ } else { ++ partition = gst_rtp_vp8_offset_to_partition (self, offset); ++ g_assert (partition < self->n_partitions); ++ start = (offset == self->partition_offset[partition]); ++ } + + mark = (remaining == available); + /* whole set of partitions, payload them and done */ + header = gst_rtp_vp8_create_header_buffer (self, partition, +- offset == self->partition_offset[partition], mark, buffer); ++ start, mark, buffer, meta); + sub = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, offset, available); + + gst_rtp_copy_video_meta (self, header, buffer); ++ gst_rtp_vp8_drop_vp8_meta (self, header); + + out = gst_buffer_append (header, sub); + +@@ -457,37 +635,48 @@ gst_rtp_vp8_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer) + GstRtpVP8Pay *self = GST_RTP_VP8_PAY (payload); + GstFlowReturn ret; + GstBufferList *list; ++ GstCustomMeta *meta; + gsize size, max_paylen; + guint offset, mtu, vp8_hdr_len; + + size = gst_buffer_get_size (buffer); +- ++ meta = gst_buffer_get_custom_meta (buffer, "GstVP8Meta"); + if (G_UNLIKELY (!gst_rtp_vp8_pay_parse_frame (self, buffer, size))) { + GST_ELEMENT_ERROR (self, STREAM, ENCODE, (NULL), + ("Failed to parse VP8 frame")); + return GST_FLOW_ERROR; + } + ++ if (meta) { ++ GstStructure *s = gst_custom_meta_get_structure (meta); ++ gboolean use_temporal_scaling; ++ /* For interop it's most likely better to keep the temporal scalability ++ * fields present if the stream previously had them present. Alternating ++ * whether these fields are present or not may confuse the receiver. */ ++ ++ gst_structure_get_boolean (s, "use-temporal-scaling", ++ &use_temporal_scaling); ++ if (use_temporal_scaling) ++ self->temporal_scalability_fields_present = TRUE; ++ } ++ + mtu = GST_RTP_BASE_PAYLOAD_MTU (payload); + vp8_hdr_len = gst_rtp_vp8_calc_header_len (self); +- max_paylen = gst_rtp_buffer_calc_payload_len (mtu - vp8_hdr_len, 0, 0); ++ max_paylen = gst_rtp_buffer_calc_payload_len (mtu - vp8_hdr_len, 0, ++ gst_rtp_base_payload_get_source_count (payload, buffer)); + + list = gst_buffer_list_new_sized ((size / max_paylen) + 1); + + offset = 0; + while (offset < size) { + offset += +- gst_rtp_vp8_payload_next (self, list, offset, buffer, size, max_paylen); ++ gst_rtp_vp8_payload_next (self, list, offset, buffer, size, ++ max_paylen, meta); + } + + ret = gst_rtp_base_payload_push_list (payload, list); + +- /* Incremenent and wrap the picture id if it overflows */ +- if ((self->picture_id_mode == VP8_PAY_PICTURE_ID_7BITS && +- ++self->picture_id >= 0x80) || +- (self->picture_id_mode == VP8_PAY_PICTURE_ID_15BITS && +- ++self->picture_id >= 0x8000)) +- self->picture_id = 0; ++ gst_rtp_vp8_pay_picture_id_increment (self); + + gst_buffer_unref (buffer); + +@@ -500,10 +689,7 @@ gst_rtp_vp8_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event) + GstRtpVP8Pay *self = GST_RTP_VP8_PAY (payload); + + if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_START) { +- if (self->picture_id_mode == VP8_PAY_PICTURE_ID_7BITS) +- self->picture_id = g_random_int_range (0, G_MAXUINT8) & 0x7F; +- else if (self->picture_id_mode == VP8_PAY_PICTURE_ID_15BITS) +- self->picture_id = g_random_int_range (0, G_MAXUINT16) & 0x7FFF; ++ gst_rtp_vp8_pay_reset (self); + } + + return GST_RTP_BASE_PAYLOAD_CLASS (gst_rtp_vp8_pay_parent_class)->sink_event +@@ -541,10 +727,3 @@ gst_rtp_vp8_pay_set_caps (GstRTPBasePayload * payload, GstCaps * caps) + + return gst_rtp_base_payload_set_outcaps (payload, NULL); + } +- +-gboolean +-gst_rtp_vp8_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpvp8pay", +- GST_RANK_MARGINAL, GST_TYPE_RTP_VP8_PAY); +-} +diff --git a/gst/rtp/gstrtpvp8pay.h b/gst/rtp/gstrtpvp8pay.h +index 247206067..30ad99a67 100644 +--- a/gst/rtp/gstrtpvp8pay.h ++++ b/gst/rtp/gstrtpvp8pay.h +@@ -62,13 +62,14 @@ struct _GstRtpVP8Pay + guint partition_offset[10]; + guint partition_size[9]; + PictureIDMode picture_id_mode; ++ gint picture_id_offset; + guint16 picture_id; ++ gboolean temporal_scalability_fields_present; ++ guint8 tl0picidx; + }; + + GType gst_rtp_vp8_pay_get_type (void); + +-gboolean gst_rtp_vp8_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* #ifndef __GST_RTP_VP8_PAY_H__ */ +diff --git a/gst/rtp/gstrtpvp9depay.c b/gst/rtp/gstrtpvp9depay.c +index c61affca5..94348c5c8 100644 +--- a/gst/rtp/gstrtpvp9depay.c ++++ b/gst/rtp/gstrtpvp9depay.c +@@ -23,6 +23,7 @@ + # include "config.h" + #endif + ++#include "gstrtpelements.h" + #include "gstrtpvp9depay.h" + #include "gstrtputils.h" + +@@ -34,14 +35,22 @@ GST_DEBUG_CATEGORY_STATIC (gst_rtp_vp9_depay_debug); + #define GST_CAT_DEFAULT gst_rtp_vp9_depay_debug + + static void gst_rtp_vp9_depay_dispose (GObject * object); ++static void gst_rtp_vp9_depay_get_property (GObject * object, guint prop_id, ++ GValue * value, GParamSpec * pspec); ++static void gst_rtp_vp9_depay_set_property (GObject * object, guint prop_id, ++ const GValue * value, GParamSpec * pspec); + static GstBuffer *gst_rtp_vp9_depay_process (GstRTPBaseDepayload * depayload, + GstRTPBuffer * rtp); + static GstStateChangeReturn gst_rtp_vp9_depay_change_state (GstElement * + element, GstStateChange transition); + static gboolean gst_rtp_vp9_depay_handle_event (GstRTPBaseDepayload * depay, + GstEvent * event); ++static gboolean gst_rtp_vp9_depay_packet_lost (GstRTPBaseDepayload * depay, ++ GstEvent * event); + + G_DEFINE_TYPE (GstRtpVP9Depay, gst_rtp_vp9_depay, GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpvp9depay, "rtpvp9depay", ++ GST_RANK_MARGINAL, GST_TYPE_RTP_VP9_DEPAY, rtp_element_init (plugin)); + + static GstStaticPadTemplate gst_rtp_vp9_depay_src_template = + GST_STATIC_PAD_TEMPLATE ("src", +@@ -58,11 +67,27 @@ GST_STATIC_PAD_TEMPLATE ("sink", + "media = (string) \"video\"," + "encoding-name = (string) { \"VP9\", \"VP9-DRAFT-IETF-01\" }")); + ++#define DEFAULT_WAIT_FOR_KEYFRAME FALSE ++#define DEFAULT_REQUEST_KEYFRAME FALSE ++ ++enum ++{ ++ PROP_0, ++ PROP_WAIT_FOR_KEYFRAME, ++ PROP_REQUEST_KEYFRAME, ++}; ++ ++#define PICTURE_ID_NONE (UINT_MAX) ++#define IS_PICTURE_ID_15BITS(pid) (((guint)(pid) & 0x8000) != 0) ++ + static void + gst_rtp_vp9_depay_init (GstRtpVP9Depay * self) + { + self->adapter = gst_adapter_new (); + self->started = FALSE; ++ self->inter_picture = FALSE; ++ self->wait_for_keyframe = DEFAULT_WAIT_FOR_KEYFRAME; ++ self->request_keyframe = DEFAULT_REQUEST_KEYFRAME; + } + + static void +@@ -84,11 +109,40 @@ gst_rtp_vp9_depay_class_init (GstRtpVP9DepayClass * gst_rtp_vp9_depay_class) + "Extracts VP9 video from RTP packets)", "Stian Selnes "); + + object_class->dispose = gst_rtp_vp9_depay_dispose; ++ object_class->set_property = gst_rtp_vp9_depay_set_property; ++ object_class->get_property = gst_rtp_vp9_depay_get_property; ++ ++ /** ++ * GstRtpVP9Depay:wait-for-keyframe: ++ * ++ * Wait for the next keyframe after packet loss ++ * ++ * Since: 1.22 ++ */ ++ g_object_class_install_property (object_class, PROP_WAIT_FOR_KEYFRAME, ++ g_param_spec_boolean ("wait-for-keyframe", "Wait for Keyframe", ++ "Wait for the next keyframe after packet loss", ++ DEFAULT_WAIT_FOR_KEYFRAME, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstRtpVP9Depay:request-keyframe: ++ * ++ * Request new keyframe when packet loss is detected ++ * ++ * Since: 1.22 ++ */ ++ g_object_class_install_property (object_class, PROP_REQUEST_KEYFRAME, ++ g_param_spec_boolean ("request-keyframe", "Request Keyframe", ++ "Request new keyframe when packet loss is detected", ++ DEFAULT_REQUEST_KEYFRAME, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + element_class->change_state = gst_rtp_vp9_depay_change_state; + + depay_class->process_rtp_packet = gst_rtp_vp9_depay_process; + depay_class->handle_event = gst_rtp_vp9_depay_handle_event; ++ depay_class->packet_lost = gst_rtp_vp9_depay_packet_lost; + + GST_DEBUG_CATEGORY_INIT (gst_rtp_vp9_depay_debug, "rtpvp9depay", 0, + "VP9 Video RTP Depayloader"); +@@ -109,6 +163,106 @@ gst_rtp_vp9_depay_dispose (GObject * object) + G_OBJECT_CLASS (gst_rtp_vp9_depay_parent_class)->dispose (object); + } + ++static void ++gst_rtp_vp9_depay_set_property (GObject * object, guint prop_id, ++ const GValue * value, GParamSpec * pspec) ++{ ++ GstRtpVP9Depay *self = GST_RTP_VP9_DEPAY (object); ++ ++ switch (prop_id) { ++ case PROP_WAIT_FOR_KEYFRAME: ++ self->wait_for_keyframe = g_value_get_boolean (value); ++ break; ++ case PROP_REQUEST_KEYFRAME: ++ self->request_keyframe = g_value_get_boolean (value); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_rtp_vp9_depay_get_property (GObject * object, guint prop_id, ++ GValue * value, GParamSpec * pspec) ++{ ++ GstRtpVP9Depay *self = GST_RTP_VP9_DEPAY (object); ++ ++ switch (prop_id) { ++ case PROP_WAIT_FOR_KEYFRAME: ++ g_value_set_boolean (value, self->wait_for_keyframe); ++ break; ++ case PROP_REQUEST_KEYFRAME: ++ g_value_set_boolean (value, self->request_keyframe); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++ ++static gint ++picture_id_compare (guint16 id0, guint16 id1) ++{ ++ guint shift = 16 - (IS_PICTURE_ID_15BITS (id1) ? 15 : 7); ++ id0 = id0 << shift; ++ id1 = id1 << shift; ++ return ((gint16) (id1 - id0)) >> shift; ++} ++ ++static void ++send_last_lost_event (GstRtpVP9Depay * self) ++{ ++ if (self->last_lost_event) { ++ GST_DEBUG_OBJECT (self, ++ "Sending the last stopped lost event: %" GST_PTR_FORMAT, ++ self->last_lost_event); ++ GST_RTP_BASE_DEPAYLOAD_CLASS (gst_rtp_vp9_depay_parent_class) ++ ->packet_lost (GST_RTP_BASE_DEPAYLOAD_CAST (self), ++ self->last_lost_event); ++ gst_event_replace (&self->last_lost_event, NULL); ++ } ++} ++ ++static void ++send_last_lost_event_if_needed (GstRtpVP9Depay * self, guint new_picture_id) ++{ ++ if (self->last_picture_id == PICTURE_ID_NONE || ++ self->last_picture_id == new_picture_id) ++ return; ++ ++ if (self->last_lost_event) { ++ gboolean send_lost_event = FALSE; ++ if (new_picture_id == PICTURE_ID_NONE) { ++ GST_DEBUG_OBJECT (self, "Dropping the last stopped lost event " ++ "(picture id does not exist): %" GST_PTR_FORMAT, ++ self->last_lost_event); ++ } else if (IS_PICTURE_ID_15BITS (self->last_picture_id) && ++ !IS_PICTURE_ID_15BITS (new_picture_id)) { ++ GST_DEBUG_OBJECT (self, "Dropping the last stopped lost event " ++ "(picture id has less bits than before): %" GST_PTR_FORMAT, ++ self->last_lost_event); ++ } else if (picture_id_compare (self->last_picture_id, new_picture_id) != 1) { ++ GstStructure *s = gst_event_writable_structure (self->last_lost_event); ++ ++ GST_DEBUG_OBJECT (self, "Sending the last stopped lost event " ++ "(gap in picture id %u %u): %" GST_PTR_FORMAT, ++ self->last_picture_id, new_picture_id, self->last_lost_event); ++ send_lost_event = TRUE; ++ /* Prevent rtpbasedepayload from dropping the event now ++ * that we have made sure the lost packet was not FEC */ ++ gst_structure_remove_field (s, "might-have-been-fec"); ++ } ++ if (send_lost_event) ++ GST_RTP_BASE_DEPAYLOAD_CLASS (gst_rtp_vp9_depay_parent_class) ++ ->packet_lost (GST_RTP_BASE_DEPAYLOAD_CAST (self), ++ self->last_lost_event); ++ ++ gst_event_replace (&self->last_lost_event, NULL); ++ } ++} ++ + static GstBuffer * + gst_rtp_vp9_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp) + { +@@ -118,12 +272,16 @@ gst_rtp_vp9_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp) + guint hdrsize = 1; + guint size; + gint spatial_layer = 0; +- gboolean i_bit, p_bit, l_bit, f_bit, b_bit, e_bit, v_bit; ++ guint picture_id = PICTURE_ID_NONE; ++ gboolean i_bit, p_bit, l_bit, f_bit, b_bit, e_bit, v_bit, d_bit = 0; ++ gboolean is_start_of_picture; ++ gboolean flushed_adapter = FALSE; + + if (G_UNLIKELY (GST_BUFFER_IS_DISCONT (rtp->buffer))) { + GST_LOG_OBJECT (self, "Discontinuity, flushing adapter"); + gst_adapter_clear (self->adapter); + self->started = FALSE; ++ flushed_adapter = TRUE; + } + + size = gst_rtp_buffer_get_payload_len (rtp); +@@ -141,14 +299,6 @@ gst_rtp_vp9_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp) + e_bit = (data[0] & 0x04) != 0; + v_bit = (data[0] & 0x02) != 0; + +- if (G_UNLIKELY (!self->started)) { +- /* Check if this is the start of a VP9 layer frame, otherwise bail */ +- if (!b_bit) +- goto done; +- +- self->started = TRUE; +- } +- + GST_TRACE_OBJECT (self, "IPLFBEV : %d%d%d%d%d%d%d", i_bit, p_bit, l_bit, + f_bit, b_bit, e_bit, v_bit); + +@@ -157,16 +307,31 @@ gst_rtp_vp9_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp) + hdrsize++; + if (G_UNLIKELY (size < hdrsize + 1)) + goto too_small; ++ picture_id = data[1]; + /* Check M for 15 bits PictureID */ + if ((data[1] & 0x80) != 0) { + hdrsize++; + if (G_UNLIKELY (size < hdrsize + 1)) + goto too_small; ++ picture_id = (picture_id << 8) | data[2]; + } + } + + /* Check L optional header layer indices */ + if (l_bit) { ++ spatial_layer = (data[hdrsize] >> 1) & 0x07; ++ d_bit = (data[hdrsize] >> 0) & 0x01; ++ GST_TRACE_OBJECT (self, "TID=%d, U=%d, SID=%d, D=%d", ++ (data[hdrsize] >> 5) & 0x07, (data[hdrsize] >> 4) & 0x01, ++ (data[hdrsize] >> 1) & 0x07, (data[hdrsize] >> 0) & 0x01); ++ ++ if (spatial_layer == 0 && d_bit != 0) { ++ /* Invalid according to draft-ietf-payload-vp9-06, but firefox 61 and ++ * chrome 66 sends enchanment layers with SID=0, so let's not drop the ++ * packet. */ ++ GST_LOG_OBJECT (self, "Invalid inter-layer dependency for base layer"); ++ } ++ + hdrsize++; + /* Check TL0PICIDX temporal layer zero index (non-flexible mode) */ + if (!f_bit) +@@ -245,24 +410,82 @@ gst_rtp_vp9_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp) + hdrsize += sssize; + } + +- GST_DEBUG_OBJECT (depay, "hdrsize %u, size %u", hdrsize, size); ++ GST_DEBUG_OBJECT (depay, "hdrsize %u, size %u, picture id 0x%x", ++ hdrsize, size, picture_id); + + if (G_UNLIKELY (hdrsize >= size)) + goto too_small; + ++ is_start_of_picture = b_bit && (!l_bit || !d_bit); ++ /* If this is a start frame AND we are already processing a frame, we need to flush and wait for next start frame */ ++ if (is_start_of_picture) { ++ if (G_UNLIKELY (self->started)) { ++ GST_DEBUG_OBJECT (depay, "Incomplete frame, flushing adapter"); ++ gst_adapter_clear (self->adapter); ++ self->started = FALSE; ++ flushed_adapter = TRUE; ++ } ++ } ++ ++ if (G_UNLIKELY (!self->started)) { ++ self->inter_picture = FALSE; ++ ++ /* We have flushed the adapter and this packet does not ++ * start a keyframe, request one if needed */ ++ if (flushed_adapter && (!b_bit || p_bit)) { ++ if (self->wait_for_keyframe) { ++ GST_DEBUG_OBJECT (self, "Waiting for keyframe after flushing adapter"); ++ self->waiting_for_keyframe = TRUE; ++ } ++ ++ if (self->request_keyframe) { ++ GST_DEBUG_OBJECT (self, "Requesting keyframe after flushing adapter"); ++ gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depay), ++ gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE, ++ TRUE, 0)); ++ } ++ } ++ ++ /* Check if this is the start of a VP9 layer frame, otherwise bail */ ++ if (!b_bit) { ++ GST_DEBUG_OBJECT (depay, ++ "The layer is missing the first packets, ignoring the packet"); ++ if (self->stop_lost_events) { ++ send_last_lost_event (self); ++ self->stop_lost_events = FALSE; ++ } ++ goto done; ++ } ++ ++ GST_DEBUG_OBJECT (depay, "Found the start of the layer"); ++ if (self->stop_lost_events) { ++ send_last_lost_event_if_needed (self, picture_id); ++ self->stop_lost_events = FALSE; ++ } ++ self->started = TRUE; ++ } ++ + payload = gst_rtp_buffer_get_payload_subbuffer (rtp, hdrsize, -1); +- { ++ if (GST_LEVEL_MEMDUMP <= gst_debug_category_get_threshold (GST_CAT_DEFAULT)) { + GstMapInfo map; + gst_buffer_map (payload, &map, GST_MAP_READ); + GST_MEMDUMP_OBJECT (self, "vp9 payload", map.data, 16); + gst_buffer_unmap (payload, &map); + } + gst_adapter_push (self->adapter, payload); ++ self->last_picture_id = picture_id; ++ self->inter_picture |= p_bit; + +- /* Marker indicates that it was the last rtp packet for this frame */ ++ /* Marker indicates that it was the last rtp packet for this picture. Note ++ * that if spatial scalability is used, e_bit will be set for the last ++ * packet of a frame while the marker bit is not set until the last packet ++ * of the picture. */ + if (gst_rtp_buffer_get_marker (rtp)) { + GstBuffer *out; +- gboolean key_frame_first_layer = !p_bit && spatial_layer == 0; ++ ++ GST_DEBUG_OBJECT (depay, ++ "Found the end of the frame (%" G_GSIZE_FORMAT " bytes)", ++ gst_adapter_available (self->adapter)); + + if (gst_adapter_available (self->adapter) < 10) + goto too_small; +@@ -276,10 +499,10 @@ gst_rtp_vp9_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp) + out = gst_buffer_make_writable (out); + /* Filter away all metas that are not sensible to copy */ + gst_rtp_drop_non_video_meta (self, out); +- if (!key_frame_first_layer) { ++ if (self->inter_picture) { + GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DELTA_UNIT); + +- if (!self->caps_sent) { ++ if (self->waiting_for_keyframe) { + gst_buffer_unref (out); + out = NULL; + GST_INFO_OBJECT (self, "Dropping inter-frame before intra-frame"); +@@ -309,14 +532,17 @@ gst_rtp_vp9_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp) + gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depay), srccaps); + gst_caps_unref (srccaps); + +- self->caps_sent = TRUE; + self->last_width = self->ss_width; + self->last_height = self->ss_height; + self->ss_width = 0; + self->ss_height = 0; + } ++ ++ self->waiting_for_keyframe = FALSE; + } + ++ if (picture_id != PICTURE_ID_NONE) ++ self->stop_lost_events = TRUE; + return out; + } + +@@ -327,7 +553,6 @@ too_small: + GST_LOG_OBJECT (self, "Invalid rtp packet (too small), ignoring"); + gst_adapter_clear (self->adapter); + self->started = FALSE; +- + goto done; + } + +@@ -340,7 +565,10 @@ gst_rtp_vp9_depay_change_state (GstElement * element, GstStateChange transition) + case GST_STATE_CHANGE_READY_TO_PAUSED: + self->last_width = -1; + self->last_height = -1; +- self->caps_sent = FALSE; ++ self->last_picture_id = PICTURE_ID_NONE; ++ gst_event_replace (&self->last_lost_event, NULL); ++ self->stop_lost_events = FALSE; ++ self->waiting_for_keyframe = TRUE; + break; + default: + break; +@@ -360,6 +588,9 @@ gst_rtp_vp9_depay_handle_event (GstRTPBaseDepayload * depay, GstEvent * event) + case GST_EVENT_FLUSH_STOP: + self->last_width = -1; + self->last_height = -1; ++ self->last_picture_id = PICTURE_ID_NONE; ++ gst_event_replace (&self->last_lost_event, NULL); ++ self->stop_lost_events = FALSE; + break; + default: + break; +@@ -370,9 +601,32 @@ gst_rtp_vp9_depay_handle_event (GstRTPBaseDepayload * depay, GstEvent * event) + (gst_rtp_vp9_depay_parent_class)->handle_event (depay, event); + } + +-gboolean +-gst_rtp_vp9_depay_plugin_init (GstPlugin * plugin) ++static gboolean ++gst_rtp_vp9_depay_packet_lost (GstRTPBaseDepayload * depay, GstEvent * event) + { +- return gst_element_register (plugin, "rtpvp9depay", +- GST_RANK_MARGINAL, GST_TYPE_RTP_VP9_DEPAY); ++ GstRtpVP9Depay *self = GST_RTP_VP9_DEPAY (depay); ++ const GstStructure *s; ++ gboolean might_have_been_fec; ++ ++ s = gst_event_get_structure (event); ++ ++ if (self->stop_lost_events) { ++ if (gst_structure_get_boolean (s, "might-have-been-fec", ++ &might_have_been_fec) ++ && might_have_been_fec) { ++ GST_DEBUG_OBJECT (depay, "Stopping lost event %" GST_PTR_FORMAT, event); ++ gst_event_replace (&self->last_lost_event, event); ++ return TRUE; ++ } ++ } else if (self->last_picture_id != PICTURE_ID_NONE) { ++ GstStructure *s = gst_event_writable_structure (self->last_lost_event); ++ ++ /* We are currently processing a picture, let's make sure the ++ * base depayloader doesn't drop this lost event */ ++ gst_structure_remove_field (s, "might-have-been-fec"); ++ } ++ ++ return ++ GST_RTP_BASE_DEPAYLOAD_CLASS ++ (gst_rtp_vp9_depay_parent_class)->packet_lost (depay, event); + } +diff --git a/gst/rtp/gstrtpvp9depay.h b/gst/rtp/gstrtpvp9depay.h +index 6f783ab65..8c98a5bf1 100644 +--- a/gst/rtp/gstrtpvp9depay.h ++++ b/gst/rtp/gstrtpvp9depay.h +@@ -59,13 +59,26 @@ struct _GstRtpVP9Depay + gint ss_height; + gint last_width; + gint last_height; +- gboolean caps_sent; ++ guint last_picture_id; ++ GstEvent *last_lost_event; ++ /* In between pictures, we might store GstRTPPacketLost events instead ++ * of forwarding them immediately, we check upon reception of a new ++ * picture id whether a gap was introduced, in which case we do forward ++ * the event. This is to avoid forwarding spurious lost events for FEC ++ * packets. ++ */ ++ gboolean stop_lost_events; ++ gboolean inter_picture; ++ ++ gboolean waiting_for_keyframe; ++ ++ /* Properties */ ++ gboolean wait_for_keyframe; ++ gboolean request_keyframe; + }; + + GType gst_rtp_vp9_depay_get_type (void); + +-gboolean gst_rtp_vp9_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* #ifndef __GST_RTP_VP9_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpvp9pay.c b/gst/rtp/gstrtpvp9pay.c +index ce79c6463..0d29ac992 100644 +--- a/gst/rtp/gstrtpvp9pay.c ++++ b/gst/rtp/gstrtpvp9pay.c +@@ -32,6 +32,7 @@ + #include + #include + #include ++#include "gstrtpelements.h" + #include "dboolhuff.h" + #include "gstrtpvp9pay.h" + #include "gstrtputils.h" +@@ -78,6 +79,8 @@ static gboolean gst_rtp_vp9_pay_set_caps (GstRTPBasePayload * payload, + GstCaps * caps); + + G_DEFINE_TYPE (GstRtpVP9Pay, gst_rtp_vp9_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpvp9pay, "rtpvp9pay", ++ GST_RANK_MARGINAL, GST_TYPE_RTP_VP9_PAY, rtp_element_init (plugin)); + + static GstStaticPadTemplate gst_rtp_vp9_pay_src_template = + GST_STATIC_PAD_TEMPLATE ("src", +@@ -386,7 +389,9 @@ gst_rtp_vp9_create_header_buffer (GstRtpVP9Pay * self, + guint off = 1; + guint hdrlen = gst_rtp_vp9_calc_header_len (self, start); + +- out = gst_rtp_buffer_new_allocate (hdrlen, 0, 0); ++ out = ++ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD (self), ++ hdrlen, 0, 0); + gst_rtp_buffer_map (out, GST_MAP_READWRITE, &rtpbuffer); + p = gst_rtp_buffer_get_payload (&rtpbuffer); + p[0] = 0x0; +@@ -427,6 +432,8 @@ gst_rtp_vp9_create_header_buffer (GstRtpVP9Pay * self, + g_assert_cmpint (off, ==, hdrlen); + + gst_rtp_buffer_set_marker (&rtpbuffer, mark); ++ if (mark) ++ GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_MARKER); + + gst_rtp_buffer_unmap (&rtpbuffer); + +@@ -556,10 +563,3 @@ gst_rtp_vp9_pay_set_caps (GstRTPBasePayload * payload, GstCaps * caps) + + return gst_rtp_base_payload_set_outcaps (payload, NULL); + } +- +-gboolean +-gst_rtp_vp9_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpvp9pay", +- GST_RANK_MARGINAL, GST_TYPE_RTP_VP9_PAY); +-} +diff --git a/gst/rtp/gstrtpvp9pay.h b/gst/rtp/gstrtpvp9pay.h +index fc2aa8efd..407e3e08c 100644 +--- a/gst/rtp/gstrtpvp9pay.h ++++ b/gst/rtp/gstrtpvp9pay.h +@@ -65,8 +65,6 @@ struct _GstRtpVP9Pay + + GType gst_rtp_vp9_pay_get_type (void); + +-gboolean gst_rtp_vp9_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* #ifndef __GST_RTP_VP9_PAY_H__ */ +diff --git a/gst/rtp/gstrtpvrawdepay.c b/gst/rtp/gstrtpvrawdepay.c +index d679f6879..d3bb5af05 100644 +--- a/gst/rtp/gstrtpvrawdepay.c ++++ b/gst/rtp/gstrtpvrawdepay.c +@@ -26,6 +26,7 @@ + + #include + #include ++#include "gstrtpelements.h" + #include "gstrtpvrawdepay.h" + #include "gstrtputils.h" + +@@ -60,6 +61,8 @@ GST_STATIC_PAD_TEMPLATE ("sink", + #define gst_rtp_vraw_depay_parent_class parent_class + G_DEFINE_TYPE (GstRtpVRawDepay, gst_rtp_vraw_depay, + GST_TYPE_RTP_BASE_DEPAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpvrawdepay, "rtpvrawdepay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_VRAW_DEPAY, rtp_element_init (plugin)); + + static gboolean gst_rtp_vraw_depay_setcaps (GstRTPBaseDepayload * depayload, + GstCaps * caps); +@@ -658,10 +661,3 @@ gst_rtp_vraw_depay_change_state (GstElement * element, + } + return ret; + } +- +-gboolean +-gst_rtp_vraw_depay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpvrawdepay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_VRAW_DEPAY); +-} +diff --git a/gst/rtp/gstrtpvrawdepay.h b/gst/rtp/gstrtpvrawdepay.h +index 18ca755d3..736da7583 100644 +--- a/gst/rtp/gstrtpvrawdepay.h ++++ b/gst/rtp/gstrtpvrawdepay.h +@@ -64,8 +64,6 @@ struct _GstRtpVRawDepayClass + + GType gst_rtp_vraw_depay_get_type (void); + +-gboolean gst_rtp_vraw_depay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_VRAW_DEPAY_H__ */ +diff --git a/gst/rtp/gstrtpvrawpay.c b/gst/rtp/gstrtpvrawpay.c +index fa43ac87a..b2f807867 100644 +--- a/gst/rtp/gstrtpvrawpay.c ++++ b/gst/rtp/gstrtpvrawpay.c +@@ -26,6 +26,7 @@ + #include + #include + ++#include "gstrtpelements.h" + #include "gstrtpvrawpay.h" + #include "gstrtputils.h" + +@@ -85,6 +86,8 @@ static void gst_rtp_vraw_pay_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec); + + G_DEFINE_TYPE (GstRtpVRawPay, gst_rtp_vraw_pay, GST_TYPE_RTP_BASE_PAYLOAD); ++GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpvrawpay, "rtpvrawpay", ++ GST_RANK_SECONDARY, GST_TYPE_RTP_VRAW_PAY, rtp_element_init (plugin)); + + static void + gst_rtp_vraw_pay_class_init (GstRtpVRawPayClass * klass) +@@ -353,7 +356,7 @@ gst_rtp_vraw_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer) + + /* get the max allowed payload length size, we try to fill the complete MTU */ + left = gst_rtp_buffer_calc_payload_len (mtu, 0, 0); +- out = gst_rtp_buffer_new_allocate (left, 0, 0); ++ out = gst_rtp_base_payload_allocate_output_buffer (payload, left, 0, 0); + + if (discont) { + GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DISCONT); +@@ -563,6 +566,7 @@ gst_rtp_vraw_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer) + if (line >= height) { + GST_LOG_OBJECT (rtpvrawpay, "field/frame complete, set marker"); + gst_rtp_buffer_set_marker (&rtp, TRUE); ++ GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_MARKER); + complete = TRUE; + } + gst_rtp_buffer_unmap (&rtp); +@@ -656,10 +660,3 @@ gst_rtp_vraw_pay_get_property (GObject * object, guint prop_id, + break; + } + } +- +-gboolean +-gst_rtp_vraw_pay_plugin_init (GstPlugin * plugin) +-{ +- return gst_element_register (plugin, "rtpvrawpay", +- GST_RANK_SECONDARY, GST_TYPE_RTP_VRAW_PAY); +-} +diff --git a/gst/rtp/gstrtpvrawpay.h b/gst/rtp/gstrtpvrawpay.h +index 3fd2442d6..008cbee93 100644 +--- a/gst/rtp/gstrtpvrawpay.h ++++ b/gst/rtp/gstrtpvrawpay.h +@@ -60,8 +60,6 @@ struct _GstRtpVRawPayClass + + GType gst_rtp_vraw_pay_get_type (void); + +-gboolean gst_rtp_vraw_pay_plugin_init (GstPlugin * plugin); +- + G_END_DECLS + + #endif /* __GST_RTP_VRAW_PAY_H__ */ +diff --git a/gst/rtp/meson.build b/gst/rtp/meson.build +index d57a195ae..aa76523ec 100644 +--- a/gst/rtp/meson.build ++++ b/gst/rtp/meson.build +@@ -2,6 +2,7 @@ rtp_sources = [ + 'dboolhuff.c', + 'fnv1hash.c', + 'gstbuffermemory.c', ++ 'gstrtpelement.c', + 'gstrtp.c', + 'gstrtpchannels.c', + 'gstrtpac3depay.c', +@@ -41,6 +42,7 @@ rtp_sources = [ + 'gstrtpgsmpay.c', + 'gstrtpamrdepay.c', + 'gstrtpamrpay.c', ++ 'gstrtphdrext-colorspace.c', + 'gstrtph261depay.c', + 'gstrtph261pay.c', + 'gstrtph263pdepay.c', +@@ -61,6 +63,7 @@ rtp_sources = [ + 'gstrtpL16pay.c', + 'gstrtpL24depay.c', + 'gstrtpL24pay.c', ++ 'gstrtpldacpay.c', + 'gstasteriskh263.c', + 'gstrtpmp1sdepay.c', + 'gstrtpmp2tdepay.c', +@@ -102,6 +105,8 @@ rtp_sources = [ + 'rtpstorage.c', + 'rtpstoragestream.c', + 'gstrtpstorage.c', ++ 'gstrtpisacdepay.c', ++ 'gstrtpisacpay.c', + ] + + rtp_args = [ +@@ -119,5 +124,4 @@ gstrtp = library('gstrtp', + install : true, + install_dir : plugins_install_dir, + ) +-pkgconfig.generate(gstrtp, install_dir : plugins_pkgconfig_install_dir) + plugins += [gstrtp] +diff --git a/gst/rtp/rtpulpfeccommon.c b/gst/rtp/rtpulpfeccommon.c +index 7c6bf5876..10687fc12 100644 +--- a/gst/rtp/rtpulpfeccommon.c ++++ b/gst/rtp/rtpulpfeccommon.c +@@ -123,7 +123,13 @@ _xor_mem (guint8 * restrict dst, const guint8 * restrict src, gsize length) + guint i; + + for (i = 0; i < (length / sizeof (guint64)); ++i) { +- *((guint64 *) dst) ^= *((const guint64 *) src); ++#if G_BYTE_ORDER == G_LITTLE_ENDIAN ++ GST_WRITE_UINT64_LE (dst, ++ GST_READ_UINT64_LE (dst) ^ GST_READ_UINT64_LE (src)); ++#else ++ GST_WRITE_UINT64_BE (dst, ++ GST_READ_UINT64_BE (dst) ^ GST_READ_UINT64_BE (src)); ++#endif + dst += sizeof (guint64); + src += sizeof (guint64); + } +@@ -304,6 +310,8 @@ rtp_ulpfec_bitstring_to_fec_rtp_buffer (GArray * arr, + g_assert_not_reached (); + + gst_rtp_buffer_set_marker (&rtp, marker); ++ if (marker) ++ GST_BUFFER_FLAG_SET (ret, GST_BUFFER_FLAG_MARKER); + gst_rtp_buffer_set_payload_type (&rtp, pt); + gst_rtp_buffer_set_seq (&rtp, seq); + gst_rtp_buffer_set_timestamp (&rtp, timestamp); +diff --git a/gst/rtpmanager/gstrtpbin.c b/gst/rtpmanager/gstrtpbin.c +index 1fb98ffc7..70f609a49 100644 +--- a/gst/rtpmanager/gstrtpbin.c ++++ b/gst/rtpmanager/gstrtpbin.c +@@ -151,6 +151,7 @@ + #include "rtpsession.h" + #include "gstrtpsession.h" + #include "gstrtpjitterbuffer.h" ++#include "gstrtputils.h" + + #include + +@@ -165,6 +166,40 @@ static GstStaticPadTemplate rtpbin_recv_rtp_sink_template = + GST_STATIC_CAPS ("application/x-rtp;application/x-srtp") + ); + ++/** ++ * GstRtpBin!recv_fec_sink_%u_%u: ++ * ++ * Sink template for receiving Forward Error Correction packets, ++ * in the form recv_fec_sink__ ++ * ++ * See #GstRTPST_2022_1_FecDec for example usage ++ * ++ * Since: 1.20 ++ */ ++static GstStaticPadTemplate rtpbin_recv_fec_sink_template = ++GST_STATIC_PAD_TEMPLATE ("recv_fec_sink_%u_%u", ++ GST_PAD_SINK, ++ GST_PAD_REQUEST, ++ GST_STATIC_CAPS ("application/x-rtp") ++ ); ++ ++/** ++ * GstRtpBin!send_fec_src_%u_%u: ++ * ++ * Src template for sending Forward Error Correction packets, ++ * in the form send_fec_src__ ++ * ++ * See #GstRTPST_2022_1_FecEnc for example usage ++ * ++ * Since: 1.20 ++ */ ++static GstStaticPadTemplate rtpbin_send_fec_src_template = ++GST_STATIC_PAD_TEMPLATE ("send_fec_src_%u_%u", ++ GST_PAD_SRC, ++ GST_PAD_SOMETIMES, ++ GST_STATIC_CAPS ("application/x-rtp") ++ ); ++ + static GstStaticPadTemplate rtpbin_recv_rtcp_sink_template = + GST_STATIC_PAD_TEMPLATE ("recv_rtcp_sink_%u", + GST_PAD_SINK, +@@ -226,7 +261,7 @@ G_STMT_START { \ + + /* Minimum time offset to apply. This compensates for rounding errors in NTP to + * RTP timestamp conversions */ +-#define MIN_TS_OFFSET (4 * GST_MSECOND) ++#define MIN_TS_OFFSET_ROUND_OFF_COMP (4 * GST_MSECOND) + + struct _GstRtpBinPrivate + { +@@ -258,6 +293,7 @@ enum + SIGNAL_GET_INTERNAL_SESSION, + SIGNAL_GET_STORAGE, + SIGNAL_GET_INTERNAL_STORAGE, ++ SIGNAL_CLEAR_SSRC, + + SIGNAL_ON_NEW_SSRC, + SIGNAL_ON_SSRC_COLLISION, +@@ -276,6 +312,7 @@ enum + SIGNAL_REQUEST_RTCP_DECODER, + + SIGNAL_REQUEST_FEC_DECODER, ++ SIGNAL_REQUEST_FEC_DECODER_FULL, + SIGNAL_REQUEST_FEC_ENCODER, + + SIGNAL_REQUEST_JITTERBUFFER, +@@ -314,9 +351,13 @@ enum + #define DEFAULT_MAX_DROPOUT_TIME 60000 + #define DEFAULT_MAX_MISORDER_TIME 2000 + #define DEFAULT_RFC7273_SYNC FALSE ++#define DEFAULT_ADD_REFERENCE_TIMESTAMP_META FALSE + #define DEFAULT_MAX_STREAMS G_MAXUINT + #define DEFAULT_MAX_TS_OFFSET_ADJUSTMENT G_GUINT64_CONSTANT(0) + #define DEFAULT_MAX_TS_OFFSET G_GINT64_CONSTANT(3000000000) ++#define DEFAULT_MIN_TS_OFFSET MIN_TS_OFFSET_ROUND_OFF_COMP ++#define DEFAULT_TS_OFFSET_SMOOTHING_FACTOR 0 ++#define DEFAULT_UPDATE_NTP64_HEADER_EXT TRUE + + enum + { +@@ -341,9 +382,15 @@ enum + PROP_MAX_DROPOUT_TIME, + PROP_MAX_MISORDER_TIME, + PROP_RFC7273_SYNC, ++ PROP_ADD_REFERENCE_TIMESTAMP_META, + PROP_MAX_STREAMS, + PROP_MAX_TS_OFFSET_ADJUSTMENT, + PROP_MAX_TS_OFFSET, ++ PROP_MIN_TS_OFFSET, ++ PROP_TS_OFFSET_SMOOTHING_FACTOR, ++ PROP_FEC_DECODERS, ++ PROP_FEC_ENCODERS, ++ PROP_UPDATE_NTP64_HEADER_EXT, + }; + + #define GST_RTP_BIN_RTCP_SYNC_TYPE (gst_rtp_bin_rtcp_sync_get_type()) +@@ -377,7 +424,9 @@ static void payload_type_change (GstElement * element, guint pt, + GstRtpBinSession * session); + static void remove_recv_rtp (GstRtpBin * rtpbin, GstRtpBinSession * session); + static void remove_recv_rtcp (GstRtpBin * rtpbin, GstRtpBinSession * session); ++static void remove_recv_fec (GstRtpBin * rtpbin, GstRtpBinSession * session); + static void remove_send_rtp (GstRtpBin * rtpbin, GstRtpBinSession * session); ++static void remove_send_fec (GstRtpBin * rtpbin, GstRtpBinSession * session); + static void remove_rtcp (GstRtpBin * rtpbin, GstRtpBinSession * session); + static void free_client (GstRtpBinClient * client, GstRtpBin * bin); + static void free_stream (GstRtpBinStream * stream, GstRtpBin * bin); +@@ -429,6 +478,8 @@ struct _GstRtpBinStream + /* mapping to local RTP and NTP time */ + gint64 rt_delta; + gint64 rtp_delta; ++ gint64 avg_ts_offset; ++ gboolean is_initialized; + /* base rtptime in gst time */ + gint64 clock_base; + }; +@@ -486,6 +537,15 @@ struct _GstRtpBinSession + GstPad *send_rtp_src_ghost; + GstPad *send_rtcp_src; + GstPad *send_rtcp_src_ghost; ++ ++ GSList *recv_fec_sinks; ++ GSList *recv_fec_sink_ghosts; ++ /* fec decoder placed before the rtpjitterbuffer but after the rtpssrcdemux. ++ * XXX: This does not yet support multiple ssrc's in the same rtp session ++ */ ++ GstElement *early_fec_decoder; ++ ++ GSList *send_fec_src_ghosts; + }; + + /* Manages the RTP streams that come from one client and should therefore be +@@ -517,6 +577,12 @@ find_session_by_id (GstRtpBin * rtpbin, gint id) + return NULL; + } + ++static gboolean ++pad_is_recv_fec (GstRtpBinSession * session, GstPad * pad) ++{ ++ return g_slist_find (session->recv_fec_sink_ghosts, pad) != NULL; ++} ++ + /* find a session with the given request pad. Must be called with RTP_BIN_LOCK */ + static GstRtpBinSession * + find_session_by_pad (GstRtpBin * rtpbin, GstPad * pad) +@@ -528,8 +594,8 @@ find_session_by_pad (GstRtpBin * rtpbin, GstPad * pad) + + if ((sess->recv_rtp_sink_ghost == pad) || + (sess->recv_rtcp_sink_ghost == pad) || +- (sess->send_rtp_sink_ghost == pad) +- || (sess->send_rtcp_src_ghost == pad)) ++ (sess->send_rtp_sink_ghost == pad) || ++ (sess->send_rtcp_src_ghost == pad) || pad_is_recv_fec (sess, pad)) + return sess; + } + return NULL; +@@ -713,6 +779,10 @@ create_session (GstRtpBin * rtpbin, gint id) + + g_object_set (session, "max-dropout-time", rtpbin->max_dropout_time, + "max-misorder-time", rtpbin->max_misorder_time, NULL); ++ ++ g_object_set (session, "update-ntp64-header-ext", ++ rtpbin->update_ntp64_header_ext, NULL); ++ + GST_OBJECT_UNLOCK (rtpbin); + + /* provide clock_rate to the session manager when needed */ +@@ -850,7 +920,9 @@ free_session (GstRtpBinSession * sess, GstRtpBin * bin) + + remove_recv_rtp (bin, sess); + remove_recv_rtcp (bin, sess); ++ remove_recv_fec (bin, sess); + remove_send_rtp (bin, sess); ++ remove_send_fec (bin, sess); + remove_rtcp (bin, sess); + + gst_bin_remove (GST_BIN_CAST (bin), sess->session); +@@ -973,6 +1045,8 @@ gst_rtp_bin_reset_sync (GstRtpBin * rtpbin) + * lip-sync */ + stream->have_sync = FALSE; + stream->rt_delta = 0; ++ stream->avg_ts_offset = 0; ++ stream->is_initialized = FALSE; + stream->rtp_delta = 0; + stream->clock_base = -100 * GST_SECOND; + } +@@ -1086,6 +1160,25 @@ gst_rtp_bin_get_internal_storage (GstRtpBin * bin, guint session_id) + return internal_storage; + } + ++static void ++gst_rtp_bin_clear_ssrc (GstRtpBin * bin, guint session_id, guint32 ssrc) ++{ ++ GstRtpBinSession *session; ++ GstElement *demux = NULL; ++ ++ GST_RTP_BIN_LOCK (bin); ++ GST_DEBUG_OBJECT (bin, "clearing ssrc %u for session %u", ssrc, session_id); ++ session = find_session_by_id (bin, (gint) session_id); ++ if (session) ++ demux = gst_object_ref (session->demux); ++ GST_RTP_BIN_UNLOCK (bin); ++ ++ if (demux) { ++ g_signal_emit_by_name (demux, "clear-ssrc", ssrc, NULL); ++ gst_object_unref (demux); ++ } ++} ++ + static GstElement * + gst_rtp_bin_request_encoder (GstRtpBin * bin, guint session_id) + { +@@ -1150,7 +1243,8 @@ gst_rtp_bin_propagate_property_to_session (GstRtpBin * bin, + + /* get a client with the given SDES name. Must be called with RTP_BIN_LOCK */ + static GstRtpBinClient * +-get_client (GstRtpBin * bin, guint8 len, guint8 * data, gboolean * created) ++get_client (GstRtpBin * bin, guint8 len, const guint8 * data, ++ gboolean * created) + { + GstRtpBinClient *result = NULL; + GSList *walk; +@@ -1211,7 +1305,7 @@ get_current_times (GstRtpBin * bin, GstClockTime * running_time, + if (bin->use_pipeline_clock) { + ntpns = rt; + /* add constant to convert from 1970 based time to 1900 based time */ +- ntpns += (2208988800LL * GST_SECOND); ++ ntpns += (GST_RTP_NTP_UNIX_OFFSET * GST_SECOND); + } else { + switch (bin->ntp_time_source) { + case GST_RTP_NTP_TIME_SOURCE_NTP: +@@ -1221,7 +1315,7 @@ get_current_times (GstRtpBin * bin, GstClockTime * running_time, + + /* add constant to convert from 1970 based time to 1900 based time */ + if (bin->ntp_time_source == GST_RTP_NTP_TIME_SOURCE_NTP) +- ntpns += (2208988800LL * GST_SECOND); ++ ntpns += (GST_RTP_NTP_UNIX_OFFSET * GST_SECOND); + break; + } + case GST_RTP_NTP_TIME_SOURCE_RUNNING_TIME: +@@ -1251,7 +1345,7 @@ get_current_times (GstRtpBin * bin, GstClockTime * running_time, + + static void + stream_set_ts_offset (GstRtpBin * bin, GstRtpBinStream * stream, +- gint64 ts_offset, gint64 max_ts_offset, gint64 min_ts_offset, ++ gint64 ts_offset, gint64 max_ts_offset, guint64 min_ts_offset, + gboolean allow_positive_ts_offset) + { + gint64 prev_ts_offset; +@@ -1265,17 +1359,51 @@ stream_set_ts_offset (GstRtpBin * bin, GstRtpBinStream * stream, + return; + } + ++ if (bin->ts_offset_smoothing_factor > 0) { ++ if (!stream->is_initialized) { ++ stream->avg_ts_offset = ts_offset; ++ stream->is_initialized = TRUE; ++ } else { ++ /* RMA algorithm using smoothing factor is following, but split into ++ * parts to check for overflows: ++ * stream->avg_ts_offset = ++ * ((bin->ts_offset_smoothing_factor - 1) * stream->avg_ts_offset ++ * + ts_offset) / bin->ts_offset_smoothing_factor ++ */ ++ guint64 max_possible_smoothing_factor = G_MAXUINT64; ++ gint64 cur_avg_product = ++ (bin->ts_offset_smoothing_factor - 1) * stream->avg_ts_offset; ++ if (stream->avg_ts_offset != 0) ++ max_possible_smoothing_factor = ++ G_MAXINT64 / ABS (stream->avg_ts_offset); ++ ++ if ((max_possible_smoothing_factor < bin->ts_offset_smoothing_factor) || ++ (cur_avg_product > 0 && G_MAXINT64 - cur_avg_product < ts_offset) || ++ (cur_avg_product < 0 && G_MININT64 - cur_avg_product > ts_offset)) { ++ GST_WARNING_OBJECT (bin, ++ "ts-offset-smoothing-factor calculation overflow, fallback to using ts-offset directly"); ++ stream->avg_ts_offset = ts_offset; ++ } else { ++ stream->avg_ts_offset = ++ (cur_avg_product + ts_offset) / bin->ts_offset_smoothing_factor; ++ } ++ } ++ } else { ++ stream->avg_ts_offset = ts_offset; ++ } ++ + g_object_get (stream->buffer, "ts-offset", &prev_ts_offset, NULL); + + /* delta changed, see how much */ +- if (prev_ts_offset != ts_offset) { ++ if (prev_ts_offset != stream->avg_ts_offset) { + gint64 diff; + +- diff = prev_ts_offset - ts_offset; ++ diff = prev_ts_offset - stream->avg_ts_offset; + + GST_DEBUG_OBJECT (bin, + "ts-offset %" G_GINT64_FORMAT ", prev %" G_GINT64_FORMAT +- ", diff: %" G_GINT64_FORMAT, ts_offset, prev_ts_offset, diff); ++ ", diff: %" G_GINT64_FORMAT, stream->avg_ts_offset, prev_ts_offset, ++ diff); + + /* ignore minor offsets */ + if (ABS (diff) < min_ts_offset) { +@@ -1285,21 +1413,21 @@ stream_set_ts_offset (GstRtpBin * bin, GstRtpBinStream * stream, + + /* sanity check offset */ + if (max_ts_offset > 0) { +- if (ts_offset > 0 && !allow_positive_ts_offset) { ++ if (stream->avg_ts_offset > 0 && !allow_positive_ts_offset) { + GST_DEBUG_OBJECT (bin, + "offset is positive (clocks are out of sync), ignoring"); + return; + } +- if (ABS (ts_offset) > max_ts_offset) { ++ if (ABS (stream->avg_ts_offset) > max_ts_offset) { + GST_DEBUG_OBJECT (bin, "offset too large, ignoring"); + return; + } + } + +- g_object_set (stream->buffer, "ts-offset", ts_offset, NULL); ++ g_object_set (stream->buffer, "ts-offset", stream->avg_ts_offset, NULL); + } + GST_DEBUG_OBJECT (bin, "stream SSRC %08x, delta %" G_GINT64_FORMAT, +- stream->ssrc, ts_offset); ++ stream->ssrc, stream->avg_ts_offset); + } + + static void +@@ -1326,7 +1454,7 @@ gst_rtp_bin_send_sync_event (GstRtpBinStream * stream) + * Must be called with GST_RTP_BIN_LOCK */ + static void + gst_rtp_bin_associate (GstRtpBin * bin, GstRtpBinStream * stream, guint8 len, +- guint8 * data, guint64 ntptime, guint64 last_extrtptime, ++ const guint8 * data, guint64 ntpnstime, guint64 last_extrtptime, + guint64 base_rtptime, guint64 base_time, guint clock_rate, + gint64 rtp_clock_base) + { +@@ -1334,7 +1462,6 @@ gst_rtp_bin_associate (GstRtpBin * bin, GstRtpBinStream * stream, guint8 len, + gboolean created; + GSList *walk; + GstClockTime running_time, running_time_rtp; +- guint64 ntpnstime; + + /* first find or create the CNAME */ + client = get_client (bin, len, data, &created); +@@ -1392,10 +1519,6 @@ gst_rtp_bin_associate (GstRtpBin * bin, GstRtpBinStream * stream, guint8 len, + gst_util_uint64_scale_int (running_time_rtp, GST_SECOND, clock_rate); + running_time += base_time; + +- /* convert ntptime to nanoseconds */ +- ntpnstime = gst_util_uint64_scale (ntptime, GST_SECOND, +- (G_GINT64_CONSTANT (1) << 32)); +- + stream->have_sync = TRUE; + + GST_DEBUG_OBJECT (bin, +@@ -1437,9 +1560,9 @@ gst_rtp_bin_associate (GstRtpBin * bin, GstRtpBinStream * stream, guint8 len, + stream->rt_delta = rtdiff - ntpdiff; + + stream_set_ts_offset (bin, stream, stream->rt_delta, bin->max_ts_offset, +- 0, FALSE); ++ bin->min_ts_offset, FALSE); + } else { +- gint64 min, rtp_min, clock_base = stream->clock_base; ++ gint64 min, rtp_min, clock_base; + gboolean all_sync, use_rtp; + gboolean rtcp_sync = g_atomic_int_get (&bin->rtcp_sync); + +@@ -1465,27 +1588,25 @@ gst_rtp_bin_associate (GstRtpBin * bin, GstRtpBinStream * stream, guint8 len, + min = rtp_min = G_MAXINT64; + use_rtp = FALSE; + if (rtcp_sync == GST_RTP_BIN_RTCP_SYNC_RTP) { +- guint64 ext_base; ++ guint64 ext_base = -1; ++ gint64 rtp_delta = 0; + + use_rtp = TRUE; +- /* signed version for convenience */ +- clock_base = base_rtptime; +- /* deal with possible wrap-around */ +- ext_base = base_rtptime; ++ /* convert to extended RTP time */ + rtp_clock_base = gst_rtp_buffer_ext_timestamp (&ext_base, rtp_clock_base); + /* sanity check; base rtp and provided clock_base should be close */ +- if (rtp_clock_base >= clock_base) { +- if (rtp_clock_base - clock_base < 10 * clock_rate) { +- rtp_clock_base = base_time + +- gst_util_uint64_scale_int (rtp_clock_base - clock_base, ++ if (rtp_clock_base >= base_rtptime) { ++ if (rtp_clock_base - base_rtptime < 10 * clock_rate) { ++ rtp_delta = base_time + ++ gst_util_uint64_scale_int (rtp_clock_base - base_rtptime, + GST_SECOND, clock_rate); + } else { + use_rtp = FALSE; + } + } else { +- if (clock_base - rtp_clock_base < 10 * clock_rate) { +- rtp_clock_base = base_time - +- gst_util_uint64_scale_int (clock_base - rtp_clock_base, ++ if (base_rtptime - rtp_clock_base < 10 * clock_rate) { ++ rtp_delta = base_time - ++ gst_util_uint64_scale_int (base_rtptime - rtp_clock_base, + GST_SECOND, clock_rate); + } else { + use_rtp = FALSE; +@@ -1497,11 +1618,11 @@ gst_rtp_bin_associate (GstRtpBin * bin, GstRtpBinStream * stream, guint8 len, + return; + } + /* store to track changes */ +- clock_base = rtp_clock_base; ++ clock_base = rtp_delta; + /* generate a fake as before, + * now equating rtptime obtained from RTP-Info, + * where the large time represent the otherwise irrelevant npt/ntp time */ +- stream->rtp_delta = (GST_SECOND << 28) - rtp_clock_base; ++ stream->rtp_delta = (GST_SECOND << 28) - rtp_delta; + } else { + clock_base = rtp_clock_base; + } +@@ -1590,7 +1711,7 @@ gst_rtp_bin_associate (GstRtpBin * bin, GstRtpBinStream * stream, guint8 len, + ts_offset = ostream->rt_delta - min; + + stream_set_ts_offset (bin, ostream, ts_offset, bin->max_ts_offset, +- MIN_TS_OFFSET, TRUE); ++ bin->min_ts_offset, TRUE); + } + } + gst_rtp_bin_send_sync_event (stream); +@@ -1617,15 +1738,16 @@ gst_rtp_bin_handle_sync (GstElement * jitterbuffer, GstStructure * s, + GstRtpBin *bin; + GstRTCPPacket packet; + guint32 ssrc; +- guint64 ntptime; +- gboolean have_sr, have_sdes; ++ guint64 ntpnstime, inband_ntpnstime; ++ gboolean have_sr; + gboolean more; + guint64 base_rtptime; + guint64 base_time; + guint clock_rate; + guint64 clock_base; +- guint64 extrtptime; ++ guint64 extrtptime, inband_ext_rtptime; + GstBuffer *buffer; ++ const gchar *cname; + GstRTCPBuffer rtcp = { NULL, }; + + bin = stream->bin; +@@ -1636,17 +1758,59 @@ gst_rtp_bin_handle_sync (GstElement * jitterbuffer, GstStructure * s, + * timestamps. We get this info directly from the jitterbuffer which + * constructs gstreamer timestamps from rtp timestamps and so it know exactly + * what the current situation is. */ +- base_rtptime = +- g_value_get_uint64 (gst_structure_get_value (s, "base-rtptime")); +- base_time = g_value_get_uint64 (gst_structure_get_value (s, "base-time")); +- clock_rate = g_value_get_uint (gst_structure_get_value (s, "clock-rate")); +- clock_base = g_value_get_uint64 (gst_structure_get_value (s, "clock-base")); +- extrtptime = +- g_value_get_uint64 (gst_structure_get_value (s, "sr-ext-rtptime")); ++ if (!gst_structure_get_uint64 (s, "base-rtptime", &base_rtptime) || ++ !gst_structure_get_uint64 (s, "base-time", &base_time) || ++ !gst_structure_get_uint (s, "clock-rate", &clock_rate) || ++ !gst_structure_get_uint64 (s, "clock-base", &clock_base)) { ++ /* invalid structure */ ++ return; ++ } ++ ++ cname = gst_structure_get_string (s, "cname"); ++ ++ /* if the jitterbuffer directly got the NTP timestamp then don't work ++ * through the RTCP SR, otherwise extract it from there */ ++ if (gst_structure_get_uint64 (s, "inband-ntpnstime", &inband_ntpnstime) ++ && gst_structure_get_uint64 (s, "inband-ext-rtptime", &inband_ext_rtptime) ++ && (cname = gst_structure_get_string (s, "cname")) ++ && gst_structure_get_uint (s, "ssrc", &ssrc)) { ++ GST_DEBUG_OBJECT (bin, ++ "handle sync from inband NTP-64 information for SSRC %08x", ssrc); ++ ++ if (ssrc != stream->ssrc) ++ return; ++ ++ GST_RTP_BIN_LOCK (bin); ++ gst_rtp_bin_associate (bin, stream, strlen (cname), (const guint8 *) cname, ++ inband_ntpnstime, inband_ext_rtptime, base_rtptime, base_time, ++ clock_rate, clock_base); ++ GST_RTP_BIN_UNLOCK (bin); ++ return; ++ } ++ ++ if (!gst_structure_get_uint64 (s, "sr-ext-rtptime", &extrtptime) ++ || !gst_structure_has_field_typed (s, "sr-buffer", GST_TYPE_BUFFER)) { ++ /* invalid structure */ ++ return; ++ } ++ ++ GST_DEBUG_OBJECT (bin, "handle sync from RTCP SR information"); ++ ++ /* get RTCP SR ntpnstime if available */ ++ if (gst_structure_get_uint64 (s, "sr-ntpnstime", &ntpnstime) && cname) { ++ GST_RTP_BIN_LOCK (bin); ++ /* associate the stream to CNAME */ ++ gst_rtp_bin_associate (bin, stream, strlen (cname), ++ (const guint8 *) cname, ntpnstime, extrtptime, base_rtptime, ++ base_time, clock_rate, clock_base); ++ GST_RTP_BIN_UNLOCK (bin); ++ return; ++ } ++ ++ /* otherwise parse the RTCP packet */ + buffer = gst_value_get_buffer (gst_structure_get_value (s, "sr-buffer")); + + have_sr = FALSE; +- have_sdes = FALSE; + + gst_rtcp_buffer_map (buffer, GST_MAP_READ, &rtcp); + +@@ -1655,31 +1819,51 @@ gst_rtp_bin_handle_sync (GstElement * jitterbuffer, GstStructure * s, + switch (gst_rtcp_packet_get_type (&packet)) { + case GST_RTCP_TYPE_SR: + /* only parse first. There is only supposed to be one SR in the packet +- * but we will deal with malformed packets gracefully */ ++ * but we will deal with malformed packets gracefully by trying the ++ * next RTCP packet. */ + if (have_sr) +- break; +- /* get NTP and RTP times */ +- gst_rtcp_packet_sr_get_sender_info (&packet, &ssrc, &ntptime, NULL, ++ continue; ++ ++ /* get NTP time */ ++ gst_rtcp_packet_sr_get_sender_info (&packet, &ssrc, &ntpnstime, NULL, + NULL, NULL); + ++ /* convert ntptime to nanoseconds */ ++ ntpnstime = gst_util_uint64_scale (ntpnstime, GST_SECOND, ++ (G_GINT64_CONSTANT (1) << 32)); ++ + GST_DEBUG_OBJECT (bin, "received sync packet from SSRC %08x", ssrc); +- /* ignore SR that is not ours */ ++ ++ /* ignore SR that is not ours and check the next RTCP packet */ + if (ssrc != stream->ssrc) + continue; + + have_sr = TRUE; ++ ++ /* If we already have the CNAME don't require parsing SDES */ ++ if (cname) { ++ GST_RTP_BIN_LOCK (bin); ++ /* associate the stream to CNAME */ ++ gst_rtp_bin_associate (bin, stream, strlen (cname), ++ (const guint8 *) cname, ntpnstime, extrtptime, base_rtptime, ++ base_time, clock_rate, clock_base); ++ GST_RTP_BIN_UNLOCK (bin); ++ ++ goto out; ++ } ++ + break; + case GST_RTCP_TYPE_SDES: + { +- gboolean more_items, more_entries; ++ gboolean more_items; + +- /* only deal with first SDES, there is only supposed to be one SDES in +- * the RTCP packet but we deal with bad packets gracefully. Also bail +- * out if we have not seen an SR item yet. */ +- if (have_sdes || !have_sr) +- break; ++ /* Bail out if we have not seen an SR item yet. */ ++ if (!have_sr) ++ goto out; + + GST_RTCP_SDES_FOR_ITEMS (more_items, &packet) { ++ gboolean more_entries; ++ + /* skip items that are not about the SSRC of the sender */ + if (gst_rtcp_packet_sdes_get_ssrc (&packet) != ssrc) + continue; +@@ -1688,28 +1872,34 @@ gst_rtp_bin_handle_sync (GstElement * jitterbuffer, GstStructure * s, + GST_RTCP_SDES_FOR_ENTRIES (more_entries, &packet) { + GstRTCPSDESType type; + guint8 len; +- guint8 *data; ++ const guint8 *data; + +- gst_rtcp_packet_sdes_get_entry (&packet, &type, &len, &data); ++ gst_rtcp_packet_sdes_get_entry (&packet, &type, &len, ++ (guint8 **) & data); + + if (type == GST_RTCP_SDES_CNAME) { + GST_RTP_BIN_LOCK (bin); + /* associate the stream to CNAME */ + gst_rtp_bin_associate (bin, stream, len, data, +- ntptime, extrtptime, base_rtptime, base_time, clock_rate, ++ ntpnstime, extrtptime, base_rtptime, base_time, clock_rate, + clock_base); + GST_RTP_BIN_UNLOCK (bin); ++ ++ goto out; + } + } + } +- have_sdes = TRUE; +- break; ++ ++ /* only deal with first SDES, there is only supposed to be one SDES in ++ * the RTCP packet but we deal with bad packets gracefully. */ ++ goto out; + } + default: + /* we can ignore these packets */ + break; + } + } ++out: + gst_rtcp_buffer_unmap (&rtcp); + } + +@@ -1747,6 +1937,8 @@ create_stream (GstRtpBinSession * session, guint32 ssrc) + + stream->have_sync = FALSE; + stream->rt_delta = 0; ++ stream->avg_ts_offset = 0; ++ stream->is_initialized = FALSE; + stream->rtp_delta = 0; + stream->percent = 100; + stream->clock_base = -100 * GST_SECOND; +@@ -1787,9 +1979,14 @@ create_stream (GstRtpBinSession * session, guint32 ssrc) + g_object_set (buffer, "max-misorder-time", rtpbin->max_misorder_time, NULL); + if (g_object_class_find_property (jb_class, "rfc7273-sync")) + g_object_set (buffer, "rfc7273-sync", rtpbin->rfc7273_sync, NULL); ++ if (g_object_class_find_property (jb_class, "add-reference-timestamp-meta")) ++ g_object_set (buffer, "add-reference-timestamp-meta", ++ rtpbin->add_reference_timestamp_meta, NULL); + if (g_object_class_find_property (jb_class, "max-ts-offset-adjustment")) + g_object_set (buffer, "max-ts-offset-adjustment", + rtpbin->max_ts_offset_adjustment, NULL); ++ if (g_object_class_find_property (jb_class, "sync-interval")) ++ g_object_set (buffer, "sync-interval", rtpbin->rtcp_sync_interval, NULL); + + g_signal_emit (rtpbin, gst_rtp_bin_signals[SIGNAL_NEW_JITTERBUFFER], 0, + buffer, session->id, ssrc); +@@ -1928,6 +2125,7 @@ static void gst_rtp_bin_handle_message (GstBin * bin, GstMessage * message); + + #define gst_rtp_bin_parent_class parent_class + G_DEFINE_TYPE_WITH_PRIVATE (GstRtpBin, gst_rtp_bin, GST_TYPE_BIN); ++GST_ELEMENT_REGISTER_DEFINE (rtpbin, "rtpbin", GST_RANK_NONE, GST_TYPE_RTP_BIN); + + static gboolean + _gst_element_accumulator (GSignalInvocationHint * ihint, +@@ -1938,8 +2136,7 @@ _gst_element_accumulator (GSignalInvocationHint * ihint, + element = g_value_get_object (handler_return); + GST_DEBUG ("got element %" GST_PTR_FORMAT, element); + +- if (!(ihint->run_type & G_SIGNAL_RUN_CLEANUP)) +- g_value_set_object (return_accu, element); ++ g_value_set_object (return_accu, element); + + /* stop emission if we have an element */ + return (element == NULL); +@@ -1954,8 +2151,7 @@ _gst_caps_accumulator (GSignalInvocationHint * ihint, + caps = g_value_get_boxed (handler_return); + GST_DEBUG ("got caps %" GST_PTR_FORMAT, caps); + +- if (!(ihint->run_type & G_SIGNAL_RUN_CLEANUP)) +- g_value_set_boxed (return_accu, caps); ++ g_value_set_boxed (return_accu, caps); + + /* stop emission if we have a caps */ + return (caps == NULL); +@@ -2099,6 +2295,24 @@ gst_rtp_bin_class_init (GstRtpBinClass * klass) + G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRtpBinClass, + get_storage), NULL, NULL, NULL, GST_TYPE_ELEMENT, 1, G_TYPE_UINT); + ++ /** ++ * GstRtpBin::clear-ssrc: ++ * @rtpbin: the object which received the signal ++ * @id: the session id ++ * @ssrc: the ssrc ++ * ++ * Remove all pads from rtpssrcdemux element associated with the specified ++ * ssrc. This delegate the action signal to the rtpssrcdemux element ++ * associated with the specified session. ++ * ++ * Since: 1.20 ++ */ ++ gst_rtp_bin_signals[SIGNAL_CLEAR_SSRC] = ++ g_signal_new ("clear-ssrc", G_TYPE_FROM_CLASS (klass), ++ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRtpBinClass, ++ clear_ssrc), NULL, NULL, NULL, G_TYPE_NONE, 2, ++ G_TYPE_UINT, G_TYPE_UINT); ++ + /** + * GstRtpBin::on-new-ssrc: + * @rtpbin: the object which received the signal +@@ -2399,10 +2613,15 @@ gst_rtp_bin_class_init (GstRtpBinClass * klass) + * @session: the session index + * + * Request a FEC decoder element for the given @session. The element +- * will be added to the bin after the pt demuxer. ++ * will be added to the bin after the pt demuxer. If there are multiple ++ * ssrc's and pt's in @session, this signal may be called multiple times for ++ * the same @session each corresponding to a newly discovered ssrc. + * + * If no handler is connected, no FEC decoder will be used. + * ++ * Warning: usage of this signal is not appropriate for the BUNDLE case, ++ * connect to #GstRtpBin::request-fec-decoder-full instead. ++ * + * Since: 1.14 + */ + gst_rtp_bin_signals[SIGNAL_REQUEST_FEC_DECODER] = +@@ -2411,6 +2630,29 @@ gst_rtp_bin_class_init (GstRtpBinClass * klass) + request_fec_decoder), _gst_element_accumulator, NULL, NULL, + GST_TYPE_ELEMENT, 1, G_TYPE_UINT); + ++ /** ++ * GstRtpBin::request-fec-decoder-full: ++ * @rtpbin: the object which received the signal ++ * @session: the session index ++ * @ssrc: the ssrc of the stream ++ * @pt: the payload type ++ * ++ * Request a FEC decoder element for the given @session. The element ++ * will be added to the bin after the pt demuxer. If there are multiple ++ * ssrc's and pt's in @session, this signal may be called multiple times for ++ * the same @session each corresponding to a newly discovered ssrc and payload ++ * type, those are provided as parameters. ++ * ++ * If no handler is connected, no FEC decoder will be used. ++ * ++ * Since: 1.20 ++ */ ++ gst_rtp_bin_signals[SIGNAL_REQUEST_FEC_DECODER_FULL] = ++ g_signal_new ("request-fec-decoder-full", G_TYPE_FROM_CLASS (klass), ++ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, ++ request_fec_decoder), _gst_element_accumulator, NULL, NULL, ++ GST_TYPE_ELEMENT, 3, G_TYPE_UINT, G_TYPE_UINT, G_TYPE_UINT); ++ + /** + * GstRtpBin::request-fec-encoder: + * @rtpbin: the object which received the signal +@@ -2523,11 +2765,12 @@ gst_rtp_bin_class_init (GstRtpBinClass * klass) + /** + * GstRtpBin:rtcp-sync-interval: + * +- * Determines how often to sync streams using RTCP data. ++ * Determines how often to sync streams using RTCP data or inband NTP-64 ++ * header extensions. + */ + g_object_class_install_property (gobject_class, PROP_RTCP_SYNC_INTERVAL, + g_param_spec_uint ("rtcp-sync-interval", "RTCP Sync Interval", +- "RTCP SR interval synchronization (ms) (0 = always)", ++ "RTCP SR / NTP-64 interval synchronization (ms) (0 = always)", + 0, G_MAXUINT, DEFAULT_RTCP_SYNC_INTERVAL, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + +@@ -2600,6 +2843,25 @@ gst_rtp_bin_class_init (GstRtpBinClass * klass) + "(requires clock and offset to be provided)", DEFAULT_RFC7273_SYNC, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + ++ /** ++ * GstRtpBin:add-reference-timestamp-meta: ++ * ++ * When syncing to a RFC7273 clock or after clock synchronization via RTCP or ++ * inband NTP-64 header extensions has happened, add #GstReferenceTimestampMeta ++ * to buffers with the original reconstructed reference clock timestamp. ++ * ++ * Since: 1.22 ++ */ ++ g_object_class_install_property (gobject_class, ++ PROP_ADD_REFERENCE_TIMESTAMP_META, ++ g_param_spec_boolean ("add-reference-timestamp-meta", ++ "Add Reference Timestamp Meta", ++ "Add Reference Timestamp Meta to buffers with the original clock timestamp " ++ "before any adjustments when syncing to an RFC7273 clock or after clock " ++ "synchronization via RTCP or inband NTP-64 header extensions has happened.", ++ DEFAULT_ADD_REFERENCE_TIMESTAMP_META, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ + g_object_class_install_property (gobject_class, PROP_MAX_STREAMS, + g_param_spec_uint ("max-streams", "Max Streams", + "The maximum number of streams to create for one session", +@@ -2640,6 +2902,104 @@ gst_rtp_bin_class_init (GstRtpBinClass * klass) + "changed to 0 (no limit)", 0, G_MAXINT64, DEFAULT_MAX_TS_OFFSET, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + ++ /** ++ * GstRtpBin:min-ts-offset: ++ * ++ * Used to set an lower limit for when a time offset is deemed large enough ++ * to be useful for sync corrections. ++ * ++ * When streaming for instance audio, even very small ts_offsets cause ++ * audible glitches. This property is used for controlling how sensitive the ++ * adjustments should be to small deviations in ts_offset, occurring for ++ * instance due to jittery network conditions or system load. ++ * ++ * Since: 1.22 ++ */ ++ g_object_class_install_property (gobject_class, PROP_MIN_TS_OFFSET, ++ g_param_spec_uint64 ("min-ts-offset", "Min TS Offset", ++ "The minimum absolute value of the time offset in (nanoseconds). " ++ "Used to set an lower limit for when a time offset is deemed large " ++ "enough to be useful for sync corrections." ++ "Note, if the ntp-sync parameter is set the default value is " ++ "changed to 0 (no limit)", 0, G_MAXUINT64, DEFAULT_MIN_TS_OFFSET, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstRtpBin:ts-offset-smoothing-factor: ++ * ++ * Controls the weighting between previous and current timestamp offsets in ++ * a running moving average (RMA): ++ * ts_offset_average(n) = ++ * ((ts-offset-smoothing-factor - 1) * ts_offset_average(n - 1) + ts_offset(n)) / ++ * ts-offset-smoothing-factor ++ * ++ * This can stabilize the timestamp offset and prevent unnecessary skew ++ * corrections due to jitter introduced by network or system load. ++ * ++ * Since: 1.22 ++ */ ++ g_object_class_install_property (gobject_class, ++ PROP_TS_OFFSET_SMOOTHING_FACTOR, ++ g_param_spec_uint ("ts-offset-smoothing-factor", ++ "Timestamp Offset Smoothing Factor", ++ "Sets a smoothing factor for the timestamp offset in number of " ++ "values for a calculated running moving average. " ++ "(0 = no smoothing factor)", 0, G_MAXUINT, ++ DEFAULT_TS_OFFSET_SMOOTHING_FACTOR, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstRtpBin:fec-decoders: ++ * ++ * Used to provide a factory used to build the FEC decoder for a ++ * given session, as a command line alternative to ++ * #GstRtpBin::request-fec-decoder. ++ * ++ * Expects a GstStructure in the form session_id (gint) -> factory (string) ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_FEC_DECODERS, ++ g_param_spec_boxed ("fec-decoders", "Fec Decoders", ++ "GstStructure mapping from session index to FEC decoder " ++ "factory, eg " ++ "fec-decoders='fec,0=\"rtpst2022-1-fecdec\\ size-time\\=1000000000\";'", ++ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstRtpBin:fec-encoders: ++ * ++ * Used to provide a factory used to build the FEC encoder for a ++ * given session, as a command line alternative to ++ * #GstRtpBin::request-fec-encoder. ++ * ++ * Expects a GstStructure in the form session_id (gint) -> factory (string) ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_FEC_ENCODERS, ++ g_param_spec_boxed ("fec-encoders", "Fec Encoders", ++ "GstStructure mapping from session index to FEC encoder " ++ "factory, eg " ++ "fec-encoders='fec,0=\"rtpst2022-1-fecenc\\ rows\\=5\\ columns\\=5\";'", ++ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * GstRtpBin:update-ntp64-header-ext: ++ * ++ * Whether RTP NTP header extension should be updated with actual ++ * NTP time. If not, use the NTP time from buffer timestamp metadata ++ * ++ * Since: 1.22 ++ */ ++ g_object_class_install_property (gobject_class, ++ PROP_UPDATE_NTP64_HEADER_EXT, ++ g_param_spec_boolean ("update-ntp64-header-ext", ++ "Update NTP-64 RTP Header Extension", ++ "Whether RTP NTP header extension should be updated with actual NTP time", ++ DEFAULT_UPDATE_NTP64_HEADER_EXT, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ + gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_rtp_bin_change_state); + gstelement_class->request_new_pad = + GST_DEBUG_FUNCPTR (gst_rtp_bin_request_new_pad); +@@ -2648,6 +3008,8 @@ gst_rtp_bin_class_init (GstRtpBinClass * klass) + /* sink pads */ + gst_element_class_add_static_pad_template (gstelement_class, + &rtpbin_recv_rtp_sink_template); ++ gst_element_class_add_static_pad_template (gstelement_class, ++ &rtpbin_recv_fec_sink_template); + gst_element_class_add_static_pad_template (gstelement_class, + &rtpbin_recv_rtcp_sink_template); + gst_element_class_add_static_pad_template (gstelement_class, +@@ -2660,6 +3022,8 @@ gst_rtp_bin_class_init (GstRtpBinClass * klass) + &rtpbin_send_rtcp_src_template); + gst_element_class_add_static_pad_template (gstelement_class, + &rtpbin_send_rtp_src_template); ++ gst_element_class_add_static_pad_template (gstelement_class, ++ &rtpbin_send_fec_src_template); + + gst_element_class_set_static_metadata (gstelement_class, "RTP Bin", + "Filter/Network/RTP", +@@ -2676,6 +3040,7 @@ gst_rtp_bin_class_init (GstRtpBinClass * klass) + klass->get_storage = GST_DEBUG_FUNCPTR (gst_rtp_bin_get_storage); + klass->get_internal_storage = + GST_DEBUG_FUNCPTR (gst_rtp_bin_get_internal_storage); ++ klass->clear_ssrc = GST_DEBUG_FUNCPTR (gst_rtp_bin_clear_ssrc); + klass->request_rtp_encoder = GST_DEBUG_FUNCPTR (gst_rtp_bin_request_encoder); + klass->request_rtp_decoder = GST_DEBUG_FUNCPTR (gst_rtp_bin_request_decoder); + klass->request_rtcp_encoder = GST_DEBUG_FUNCPTR (gst_rtp_bin_request_encoder); +@@ -2717,15 +3082,24 @@ gst_rtp_bin_init (GstRtpBin * rtpbin) + rtpbin->max_dropout_time = DEFAULT_MAX_DROPOUT_TIME; + rtpbin->max_misorder_time = DEFAULT_MAX_MISORDER_TIME; + rtpbin->rfc7273_sync = DEFAULT_RFC7273_SYNC; ++ rtpbin->add_reference_timestamp_meta = DEFAULT_ADD_REFERENCE_TIMESTAMP_META; + rtpbin->max_streams = DEFAULT_MAX_STREAMS; + rtpbin->max_ts_offset_adjustment = DEFAULT_MAX_TS_OFFSET_ADJUSTMENT; + rtpbin->max_ts_offset = DEFAULT_MAX_TS_OFFSET; + rtpbin->max_ts_offset_is_set = FALSE; ++ rtpbin->min_ts_offset = DEFAULT_MIN_TS_OFFSET; ++ rtpbin->min_ts_offset_is_set = FALSE; ++ rtpbin->ts_offset_smoothing_factor = DEFAULT_TS_OFFSET_SMOOTHING_FACTOR; ++ rtpbin->update_ntp64_header_ext = DEFAULT_UPDATE_NTP64_HEADER_EXT; + + /* some default SDES entries */ + cname = g_strdup_printf ("user%u@host-%x", g_random_int (), g_random_int ()); + rtpbin->sdes = gst_structure_new ("application/x-rtp-source-sdes", + "cname", G_TYPE_STRING, cname, "tool", G_TYPE_STRING, "GStreamer", NULL); ++ rtpbin->fec_decoders = ++ gst_structure_new_empty ("application/x-rtp-fec-decoders"); ++ rtpbin->fec_encoders = ++ gst_structure_new_empty ("application/x-rtp-fec-encoders"); + g_free (cname); + } + +@@ -2756,6 +3130,12 @@ gst_rtp_bin_finalize (GObject * object) + if (rtpbin->sdes) + gst_structure_free (rtpbin->sdes); + ++ if (rtpbin->fec_decoders) ++ gst_structure_free (rtpbin->fec_decoders); ++ ++ if (rtpbin->fec_encoders) ++ gst_structure_free (rtpbin->fec_encoders); ++ + g_mutex_clear (&rtpbin->priv->bin_lock); + g_mutex_clear (&rtpbin->priv->dyn_lock); + +@@ -2788,6 +3168,44 @@ gst_rtp_bin_set_sdes_struct (GstRtpBin * bin, const GstStructure * sdes) + GST_RTP_BIN_UNLOCK (bin); + } + ++static void ++gst_rtp_bin_set_fec_decoders_struct (GstRtpBin * bin, ++ const GstStructure * decoders) ++{ ++ if (decoders == NULL) ++ return; ++ ++ GST_RTP_BIN_LOCK (bin); ++ ++ GST_OBJECT_LOCK (bin); ++ if (bin->fec_decoders) ++ gst_structure_free (bin->fec_decoders); ++ bin->fec_decoders = gst_structure_copy (decoders); ++ ++ GST_OBJECT_UNLOCK (bin); ++ ++ GST_RTP_BIN_UNLOCK (bin); ++} ++ ++static void ++gst_rtp_bin_set_fec_encoders_struct (GstRtpBin * bin, ++ const GstStructure * encoders) ++{ ++ if (encoders == NULL) ++ return; ++ ++ GST_RTP_BIN_LOCK (bin); ++ ++ GST_OBJECT_LOCK (bin); ++ if (bin->fec_encoders) ++ gst_structure_free (bin->fec_encoders); ++ bin->fec_encoders = gst_structure_copy (encoders); ++ ++ GST_OBJECT_UNLOCK (bin); ++ ++ GST_RTP_BIN_UNLOCK (bin); ++} ++ + static GstStructure * + gst_rtp_bin_get_sdes_struct (GstRtpBin * bin) + { +@@ -2800,6 +3218,30 @@ gst_rtp_bin_get_sdes_struct (GstRtpBin * bin) + return result; + } + ++static GstStructure * ++gst_rtp_bin_get_fec_decoders_struct (GstRtpBin * bin) ++{ ++ GstStructure *result; ++ ++ GST_OBJECT_LOCK (bin); ++ result = gst_structure_copy (bin->fec_decoders); ++ GST_OBJECT_UNLOCK (bin); ++ ++ return result; ++} ++ ++static GstStructure * ++gst_rtp_bin_get_fec_encoders_struct (GstRtpBin * bin) ++{ ++ GstStructure *result; ++ ++ GST_OBJECT_LOCK (bin); ++ result = gst_structure_copy (bin->fec_encoders); ++ GST_OBJECT_UNLOCK (bin); ++ ++ return result; ++} ++ + static void + gst_rtp_bin_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +@@ -2845,6 +3287,13 @@ gst_rtp_bin_set_property (GObject * object, guint prop_id, + rtpbin->max_ts_offset = DEFAULT_MAX_TS_OFFSET; + } + } ++ if (!rtpbin->min_ts_offset_is_set) { ++ if (rtpbin->ntp_sync) { ++ rtpbin->min_ts_offset = 0; ++ } else { ++ rtpbin->min_ts_offset = DEFAULT_MIN_TS_OFFSET; ++ } ++ } + break; + case PROP_RTCP_SYNC: + g_atomic_int_set (&rtpbin->rtcp_sync, g_value_get_enum (value)); +@@ -2951,6 +3400,11 @@ gst_rtp_bin_set_property (GObject * object, guint prop_id, + gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin, + "rfc7273-sync", value); + break; ++ case PROP_ADD_REFERENCE_TIMESTAMP_META: ++ rtpbin->add_reference_timestamp_meta = g_value_get_boolean (value); ++ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin, ++ "add-reference-timestamp-meta", value); ++ break; + case PROP_MAX_STREAMS: + rtpbin->max_streams = g_value_get_uint (value); + break; +@@ -2963,6 +3417,26 @@ gst_rtp_bin_set_property (GObject * object, guint prop_id, + rtpbin->max_ts_offset = g_value_get_int64 (value); + rtpbin->max_ts_offset_is_set = TRUE; + break; ++ case PROP_MIN_TS_OFFSET: ++ rtpbin->min_ts_offset = g_value_get_uint64 (value); ++ rtpbin->min_ts_offset_is_set = TRUE; ++ break; ++ case PROP_TS_OFFSET_SMOOTHING_FACTOR: ++ rtpbin->ts_offset_smoothing_factor = g_value_get_uint (value); ++ break; ++ case PROP_FEC_DECODERS: ++ gst_rtp_bin_set_fec_decoders_struct (rtpbin, g_value_get_boxed (value)); ++ break; ++ case PROP_FEC_ENCODERS: ++ gst_rtp_bin_set_fec_encoders_struct (rtpbin, g_value_get_boxed (value)); ++ break; ++ case PROP_UPDATE_NTP64_HEADER_EXT: ++ GST_RTP_BIN_LOCK (rtpbin); ++ rtpbin->update_ntp64_header_ext = g_value_get_boolean (value); ++ GST_RTP_BIN_UNLOCK (rtpbin); ++ gst_rtp_bin_propagate_property_to_session (rtpbin, ++ "update-ntp64-header-ext", value); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -3048,6 +3522,9 @@ gst_rtp_bin_get_property (GObject * object, guint prop_id, + case PROP_RFC7273_SYNC: + g_value_set_boolean (value, rtpbin->rfc7273_sync); + break; ++ case PROP_ADD_REFERENCE_TIMESTAMP_META: ++ g_value_set_boolean (value, rtpbin->add_reference_timestamp_meta); ++ break; + case PROP_MAX_STREAMS: + g_value_set_uint (value, rtpbin->max_streams); + break; +@@ -3057,6 +3534,21 @@ gst_rtp_bin_get_property (GObject * object, guint prop_id, + case PROP_MAX_TS_OFFSET: + g_value_set_int64 (value, rtpbin->max_ts_offset); + break; ++ case PROP_MIN_TS_OFFSET: ++ g_value_set_uint64 (value, rtpbin->min_ts_offset); ++ break; ++ case PROP_TS_OFFSET_SMOOTHING_FACTOR: ++ g_value_set_uint (value, rtpbin->ts_offset_smoothing_factor); ++ break; ++ case PROP_FEC_DECODERS: ++ g_value_take_boxed (value, gst_rtp_bin_get_fec_decoders_struct (rtpbin)); ++ break; ++ case PROP_FEC_ENCODERS: ++ g_value_take_boxed (value, gst_rtp_bin_get_fec_encoders_struct (rtpbin)); ++ break; ++ case PROP_UPDATE_NTP64_HEADER_EXT: ++ g_value_set_boolean (value, rtpbin->update_ntp64_header_ext); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -3308,6 +3800,32 @@ gst_rtp_bin_change_state (GstElement * element, GstStateChange transition) + return res; + } + ++static GstElement * ++session_request_element_full (GstRtpBinSession * session, guint signal, ++ guint ssrc, guint8 pt) ++{ ++ GstElement *element = NULL; ++ GstRtpBin *bin = session->bin; ++ ++ g_signal_emit (bin, gst_rtp_bin_signals[signal], 0, session->id, ssrc, pt, ++ &element); ++ ++ if (element) { ++ if (!bin_manage_element (bin, element)) ++ goto manage_failed; ++ session->elements = g_slist_prepend (session->elements, element); ++ } ++ return element; ++ ++ /* ERRORS */ ++manage_failed: ++ { ++ GST_WARNING_OBJECT (bin, "unable to manage element"); ++ gst_object_unref (element); ++ return NULL; ++ } ++} ++ + static GstElement * + session_request_element (GstRtpBinSession * session, guint signal) + { +@@ -3343,6 +3861,46 @@ copy_sticky_events (GstPad * pad, GstEvent ** event, gpointer user_data) + return TRUE; + } + ++static gboolean ++ensure_early_fec_decoder (GstRtpBin * rtpbin, GstRtpBinSession * session) ++{ ++ const gchar *factory; ++ gchar *sess_id_str; ++ ++ if (session->early_fec_decoder) ++ goto done; ++ ++ sess_id_str = g_strdup_printf ("%u", session->id); ++ factory = gst_structure_get_string (rtpbin->fec_decoders, sess_id_str); ++ g_free (sess_id_str); ++ ++ /* First try the property */ ++ if (factory) { ++ GError *err = NULL; ++ ++ session->early_fec_decoder = ++ gst_parse_bin_from_description_full (factory, TRUE, NULL, ++ GST_PARSE_FLAG_NO_SINGLE_ELEMENT_BINS | GST_PARSE_FLAG_FATAL_ERRORS, ++ &err); ++ if (!session->early_fec_decoder) { ++ GST_ERROR_OBJECT (rtpbin, "Failed to build decoder from factory: %s", ++ err->message); ++ } ++ ++ bin_manage_element (session->bin, session->early_fec_decoder); ++ session->elements = ++ g_slist_prepend (session->elements, session->early_fec_decoder); ++ GST_INFO_OBJECT (rtpbin, "Built FEC decoder: %" GST_PTR_FORMAT ++ " for session %u", session->early_fec_decoder, session->id); ++ } ++ ++ /* Do not fallback to the signal as the signal expects a fec decoder to ++ * be placed at a different place in the pipeline */ ++ ++done: ++ return session->early_fec_decoder != NULL; ++} ++ + static void + expose_recv_src_pad (GstRtpBin * rtpbin, GstPad * pad, GstRtpBinStream * stream, + guint8 pt) +@@ -3355,9 +3913,19 @@ expose_recv_src_pad (GstRtpBin * rtpbin, GstPad * pad, GstRtpBinStream * stream, + gst_object_ref (pad); + + if (stream->session->storage) { ++ /* First try the legacy signal, with no ssrc and pt as parameters. ++ * This will likely cause issues for the BUNDLE case. */ + GstElement *fec_decoder = + session_request_element (stream->session, SIGNAL_REQUEST_FEC_DECODER); + ++ /* Now try the new signal, where the application can provide a FEC ++ * decoder according to ssrc and pt. */ ++ if (!fec_decoder) { ++ fec_decoder = ++ session_request_element_full (stream->session, ++ SIGNAL_REQUEST_FEC_DECODER_FULL, stream->ssrc, pt); ++ } ++ + if (fec_decoder) { + GstPad *sinkpad, *srcpad; + GstPadLinkReturn ret; +@@ -3594,12 +4162,22 @@ new_ssrc_pad_found (GstElement * element, guint ssrc, GstPad * pad, + padname = g_strdup_printf ("src_%u", ssrc); + srcpad = gst_element_get_static_pad (element, padname); + g_free (padname); ++ ++ if (session->early_fec_decoder) { ++ GST_DEBUG_OBJECT (rtpbin, "linking fec decoder"); ++ sinkpad = gst_element_get_static_pad (session->early_fec_decoder, "sink"); ++ gst_pad_link_full (srcpad, sinkpad, GST_PAD_LINK_CHECK_NOTHING); ++ gst_object_unref (sinkpad); ++ gst_object_unref (srcpad); ++ srcpad = gst_element_get_static_pad (session->early_fec_decoder, "src"); ++ } ++ + sinkpad = gst_element_get_static_pad (stream->buffer, "sink"); + gst_pad_link_full (srcpad, sinkpad, GST_PAD_LINK_CHECK_NOTHING); + gst_object_unref (sinkpad); + gst_object_unref (srcpad); + +- sinkpad = gst_element_get_request_pad (stream->buffer, "sink_rtcp"); ++ sinkpad = gst_element_request_pad_simple (stream->buffer, "sink_rtcp"); + if (sinkpad) { + GST_DEBUG_OBJECT (rtpbin, "linking jitterbuffer RTCP"); + padname = g_strdup_printf ("rtcp_src_%u", ssrc); +@@ -3678,7 +4256,7 @@ complete_session_sink (GstRtpBin * rtpbin, GstRtpBinSession * session) + + /* get recv_rtp pad and store */ + session->recv_rtp_sink = +- gst_element_get_request_pad (session->session, "recv_rtp_sink"); ++ gst_element_request_pad_simple (session->session, "recv_rtp_sink"); + if (session->recv_rtp_sink == NULL) + goto pad_failed; + +@@ -3879,8 +4457,6 @@ create_recv_rtp (GstRtpBin * rtpbin, GstPadTemplate * templ, const gchar * name) + session->recv_rtp_sink_ghost = + gst_ghost_pad_new_from_template (name, recv_rtp_sink, templ); + gst_object_unref (recv_rtp_sink); +- gst_pad_set_active (session->recv_rtp_sink_ghost, TRUE); +- gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), session->recv_rtp_sink_ghost); + + complete_session_receiver (rtpbin, session, sessid); + +@@ -3933,6 +4509,68 @@ remove_recv_rtp (GstRtpBin * rtpbin, GstRtpBinSession * session) + } + } + ++static gint ++fec_sinkpad_find (const GValue * item, gchar * padname) ++{ ++ GstPad *pad = g_value_get_object (item); ++ return g_strcmp0 (GST_PAD_NAME (pad), padname); ++} ++ ++static GstPad * ++complete_session_fec (GstRtpBin * rtpbin, GstRtpBinSession * session, ++ guint fec_idx) ++{ ++ gboolean have_static_pad; ++ gchar *padname; ++ ++ GstPad *ret; ++ GstIterator *it; ++ GValue item = { 0, }; ++ ++ if (!ensure_early_fec_decoder (rtpbin, session)) ++ goto no_decoder; ++ ++ padname = g_strdup_printf ("fec_%u", fec_idx); ++ ++ GST_DEBUG_OBJECT (rtpbin, "getting FEC sink pad %s", padname); ++ ++ /* First try to find the decoder static pad that matches the padname */ ++ it = gst_element_iterate_sink_pads (session->early_fec_decoder); ++ have_static_pad = ++ gst_iterator_find_custom (it, (GCompareFunc) fec_sinkpad_find, &item, ++ padname); ++ ++ if (have_static_pad) { ++ ret = g_value_get_object (&item); ++ gst_object_ref (ret); ++ g_value_unset (&item); ++ } else { ++ ret = gst_element_request_pad_simple (session->early_fec_decoder, padname); ++ } ++ ++ g_free (padname); ++ gst_iterator_free (it); ++ ++ if (ret == NULL) ++ goto pad_failed; ++ ++ session->recv_fec_sinks = g_slist_prepend (session->recv_fec_sinks, ret); ++ ++ return ret; ++ ++pad_failed: ++ { ++ g_warning ("rtpbin: failed to get decoder fec pad"); ++ return NULL; ++ } ++no_decoder: ++ { ++ g_warning ("rtpbin: failed to build FEC decoder for session %u", ++ session->id); ++ return NULL; ++ } ++} ++ + static GstPad * + complete_session_rtcp (GstRtpBin * rtpbin, GstRtpBinSession * session, + guint sessid) +@@ -3944,7 +4582,7 @@ complete_session_rtcp (GstRtpBin * rtpbin, GstRtpBinSession * session, + /* get recv_rtp pad and store */ + GST_DEBUG_OBJECT (rtpbin, "getting RTCP sink pad"); + session->recv_rtcp_sink = +- gst_element_get_request_pad (session->session, "recv_rtcp_sink"); ++ gst_element_request_pad_simple (session->session, "recv_rtcp_sink"); + if (session->recv_rtcp_sink == NULL) + goto pad_failed; + +@@ -4056,9 +4694,6 @@ create_recv_rtcp (GstRtpBin * rtpbin, GstPadTemplate * templ, + session->recv_rtcp_sink_ghost = + gst_ghost_pad_new_from_template (name, decsink, templ); + gst_object_unref (decsink); +- gst_pad_set_active (session->recv_rtcp_sink_ghost, TRUE); +- gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), +- session->recv_rtcp_sink_ghost); + + return session->recv_rtcp_sink_ghost; + +@@ -4076,6 +4711,64 @@ create_error: + } + } + ++static GstPad * ++create_recv_fec (GstRtpBin * rtpbin, GstPadTemplate * templ, const gchar * name) ++{ ++ guint sessid, fec_idx; ++ GstRtpBinSession *session; ++ GstPad *decsink = NULL; ++ GstPad *ghost; ++ ++ /* first get the session number */ ++ if (name == NULL ++ || sscanf (name, "recv_fec_sink_%u_%u", &sessid, &fec_idx) != 2) ++ goto no_name; ++ ++ if (fec_idx > 1) ++ goto invalid_idx; ++ ++ GST_DEBUG_OBJECT (rtpbin, "finding session %u", sessid); ++ ++ /* get or create the session */ ++ session = find_session_by_id (rtpbin, sessid); ++ if (!session) { ++ GST_DEBUG_OBJECT (rtpbin, "creating session %u", sessid); ++ /* create session now */ ++ session = create_session (rtpbin, sessid); ++ if (session == NULL) ++ goto create_error; ++ } ++ ++ decsink = complete_session_fec (rtpbin, session, fec_idx); ++ if (!decsink) ++ goto create_error; ++ ++ ghost = gst_ghost_pad_new_from_template (name, decsink, templ); ++ session->recv_fec_sink_ghosts = ++ g_slist_prepend (session->recv_fec_sink_ghosts, ghost); ++ gst_object_unref (decsink); ++ ++ return ghost; ++ ++ /* ERRORS */ ++no_name: ++ { ++ g_warning ("rtpbin: cannot find session id for pad: %s", ++ GST_STR_NULL (name)); ++ return NULL; ++ } ++invalid_idx: ++ { ++ g_warning ("rtpbin: invalid FEC index: %s", GST_STR_NULL (name)); ++ return NULL; ++ } ++create_error: ++ { ++ /* create_session already warned */ ++ return NULL; ++ } ++} ++ + static void + remove_recv_rtcp (GstRtpBin * rtpbin, GstRtpBinSession * session) + { +@@ -4097,6 +4790,64 @@ remove_recv_rtcp (GstRtpBin * rtpbin, GstRtpBinSession * session) + } + } + ++static void ++remove_recv_fec_for_pad (GstRtpBin * rtpbin, GstRtpBinSession * session, ++ GstPad * ghost) ++{ ++ GSList *item; ++ GstPad *target; ++ ++ target = gst_ghost_pad_get_target (GST_GHOST_PAD (ghost)); ++ ++ if (target) { ++ item = g_slist_find (session->recv_fec_sinks, target); ++ if (item) { ++ GstPadTemplate *templ; ++ GstPad *pad; ++ ++ pad = item->data; ++ templ = gst_pad_get_pad_template (pad); ++ ++ if (GST_PAD_TEMPLATE_PRESENCE (templ) == GST_PAD_REQUEST) { ++ GST_DEBUG_OBJECT (rtpbin, ++ "Releasing FEC decoder pad %" GST_PTR_FORMAT, pad); ++ gst_element_release_request_pad (session->early_fec_decoder, pad); ++ } else { ++ gst_object_unref (pad); ++ } ++ ++ session->recv_fec_sinks = ++ g_slist_delete_link (session->recv_fec_sinks, item); ++ ++ gst_object_unref (templ); ++ } ++ gst_object_unref (target); ++ } ++ ++ item = g_slist_find (session->recv_fec_sink_ghosts, ghost); ++ if (item) ++ session->recv_fec_sink_ghosts = ++ g_slist_delete_link (session->recv_fec_sink_ghosts, item); ++ ++ gst_pad_set_active (ghost, FALSE); ++ gst_element_remove_pad (GST_ELEMENT_CAST (rtpbin), ghost); ++} ++ ++static void ++remove_recv_fec (GstRtpBin * rtpbin, GstRtpBinSession * session) ++{ ++ GSList *copy; ++ GSList *tmp; ++ ++ copy = g_slist_copy (session->recv_fec_sink_ghosts); ++ ++ for (tmp = copy; tmp; tmp = tmp->next) { ++ remove_recv_fec_for_pad (rtpbin, session, (GstPad *) tmp->data); ++ } ++ ++ g_slist_free (copy); ++} ++ + static gboolean + complete_session_src (GstRtpBin * rtpbin, GstRtpBinSession * session) + { +@@ -4222,7 +4973,7 @@ setup_aux_sender_fold (const GValue * item, GValue * result, gpointer user_data) + + /* get send_rtp pad and store */ + newsess->send_rtp_sink = +- gst_element_get_request_pad (newsess->session, "send_rtp_sink"); ++ gst_element_request_pad_simple (newsess->session, "send_rtp_sink"); + if (newsess->send_rtp_sink == NULL) + goto pad_failed; + +@@ -4285,6 +5036,146 @@ setup_aux_sender (GstRtpBin * rtpbin, GstRtpBinSession * session, + return res == GST_ITERATOR_DONE; + } + ++static void ++fec_encoder_add_pad_unlocked (GstPad * pad, GstRtpBinSession * session) ++{ ++ GstElementClass *klass; ++ gchar *gname; ++ GstPadTemplate *templ; ++ guint fec_idx; ++ GstPad *ghost; ++ ++ if (sscanf (GST_PAD_NAME (pad), "fec_%u", &fec_idx) != 1) { ++ GST_WARNING_OBJECT (session->bin, ++ "FEC encoder added pad with name not matching fec_%%u (%s)", ++ GST_PAD_NAME (pad)); ++ goto done; ++ } ++ ++ GST_INFO_OBJECT (session->bin, "FEC encoder for session %u exposed new pad", ++ session->id); ++ ++ klass = GST_ELEMENT_GET_CLASS (session->bin); ++ gname = g_strdup_printf ("send_fec_src_%u_%u", session->id, fec_idx); ++ templ = gst_element_class_get_pad_template (klass, "send_fec_src_%u_%u"); ++ ghost = gst_ghost_pad_new_from_template (gname, pad, templ); ++ session->send_fec_src_ghosts = ++ g_slist_prepend (session->send_fec_src_ghosts, ghost); ++ gst_pad_set_active (ghost, TRUE); ++ gst_pad_sticky_events_foreach (pad, copy_sticky_events, ghost); ++ gst_element_add_pad (GST_ELEMENT (session->bin), ghost); ++ g_free (gname); ++ ++done: ++ return; ++} ++ ++static void ++fec_encoder_add_pad (GstPad * pad, GstRtpBinSession * session) ++{ ++ GST_RTP_BIN_LOCK (session->bin); ++ fec_encoder_add_pad_unlocked (pad, session); ++ GST_RTP_BIN_UNLOCK (session->bin); ++} ++ ++static gint ++fec_srcpad_iterator_filter (const GValue * item, GValue * unused) ++{ ++ guint fec_idx; ++ GstPad *pad = g_value_get_object (item); ++ GstPadTemplate *templ = gst_pad_get_pad_template (pad); ++ ++ gint have_static_pad = ++ (GST_PAD_TEMPLATE_PRESENCE (templ) == GST_PAD_ALWAYS) && ++ (sscanf (GST_PAD_NAME (pad), "fec_%u", &fec_idx) == 1); ++ ++ gst_object_unref (templ); ++ ++ /* return 0 to retain pad in filtered iterator */ ++ return !have_static_pad; ++} ++ ++static void ++fec_srcpad_iterator_foreach (const GValue * item, GstRtpBinSession * session) ++{ ++ GstPad *pad = g_value_get_object (item); ++ fec_encoder_add_pad_unlocked (pad, session); ++} ++ ++static void ++fec_encoder_pad_added_cb (GstElement * encoder, GstPad * pad, ++ GstRtpBinSession * session) ++{ ++ fec_encoder_add_pad (pad, session); ++} ++ ++static GstElement * ++request_fec_encoder (GstRtpBin * rtpbin, GstRtpBinSession * session, ++ guint sessid) ++{ ++ GstElement *ret = NULL; ++ const gchar *factory; ++ gchar *sess_id_str; ++ ++ sess_id_str = g_strdup_printf ("%u", sessid); ++ factory = gst_structure_get_string (rtpbin->fec_encoders, sess_id_str); ++ g_free (sess_id_str); ++ ++ /* First try the property */ ++ if (factory) { ++ GError *err = NULL; ++ ++ ret = ++ gst_parse_bin_from_description_full (factory, TRUE, NULL, ++ GST_PARSE_FLAG_NO_SINGLE_ELEMENT_BINS | GST_PARSE_FLAG_FATAL_ERRORS, ++ &err); ++ if (!ret) { ++ GST_ERROR_OBJECT (rtpbin, "Failed to build encoder from factory: %s", ++ err->message); ++ goto done; ++ } ++ ++ bin_manage_element (session->bin, ret); ++ session->elements = g_slist_prepend (session->elements, ret); ++ GST_INFO_OBJECT (rtpbin, "Built FEC encoder: %" GST_PTR_FORMAT ++ " for session %u", ret, sessid); ++ } ++ ++ /* Fallback to the signal */ ++ if (!ret) ++ ret = session_request_element (session, SIGNAL_REQUEST_FEC_ENCODER); ++ ++ if (ret) { ++ /* First, add encoder pads that match fec_% template and are already present */ ++ GstIterator *it, *filter; ++ GstIteratorResult it_ret = GST_ITERATOR_OK; ++ ++ it = gst_element_iterate_src_pads (ret); ++ filter = ++ gst_iterator_filter (it, (GCompareFunc) fec_srcpad_iterator_filter, ++ NULL); ++ ++ while (it_ret == GST_ITERATOR_OK || it_ret == GST_ITERATOR_RESYNC) { ++ it_ret = ++ gst_iterator_foreach (filter, ++ (GstIteratorForeachFunction) fec_srcpad_iterator_foreach, session); ++ ++ if (it_ret == GST_ITERATOR_RESYNC) ++ gst_iterator_resync (filter); ++ } ++ ++ gst_iterator_free (filter); ++ ++ /* Finally, connect to pad-added signal if any of the encoder pads are ++ * added later */ ++ g_signal_connect (ret, "pad-added", G_CALLBACK (fec_encoder_pad_added_cb), ++ session); ++ } ++ ++done: ++ return ret; ++} ++ + /* Create a pad for sending RTP for the session in @name. Must be called with + * RTP_BIN_LOCK. + */ +@@ -4320,7 +5211,7 @@ create_send_rtp (GstRtpBin * rtpbin, GstPadTemplate * templ, const gchar * name) + if (session->send_rtp_sink != NULL) + goto existing_session; + +- encoder = session_request_element (session, SIGNAL_REQUEST_FEC_ENCODER); ++ encoder = request_fec_encoder (rtpbin, session, sessid); + + if (encoder) { + GST_DEBUG_OBJECT (rtpbin, "Linking FEC encoder"); +@@ -4359,12 +5250,13 @@ create_send_rtp (GstRtpBin * rtpbin, GstPadTemplate * templ, const gchar * name) + if (ret != GST_PAD_LINK_OK) { + goto aux_link_failed; + } ++ gst_object_unref (sinkpad); + } + prev = aux; + } else { + /* get send_rtp pad and store */ + session->send_rtp_sink = +- gst_element_get_request_pad (session->session, "send_rtp_sink"); ++ gst_element_request_pad_simple (session->session, "send_rtp_sink"); + if (session->send_rtp_sink == NULL) + goto pad_failed; + +@@ -4387,8 +5279,6 @@ create_send_rtp (GstRtpBin * rtpbin, GstPadTemplate * templ, const gchar * name) + session->send_rtp_sink_ghost = + gst_ghost_pad_new_from_template (name, send_rtp_sink, templ); + gst_object_unref (send_rtp_sink); +- gst_pad_set_active (session->send_rtp_sink_ghost, TRUE); +- gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), session->send_rtp_sink_ghost); + + return session->send_rtp_sink_ghost; + +@@ -4472,6 +5362,21 @@ remove_send_rtp (GstRtpBin * rtpbin, GstRtpBinSession * session) + } + } + ++static void ++remove_send_fec (GstRtpBin * rtpbin, GstRtpBinSession * session) ++{ ++ GSList *tmp; ++ ++ for (tmp = session->send_fec_src_ghosts; tmp; tmp = tmp->next) { ++ GstPad *ghost = GST_PAD (tmp->data); ++ gst_pad_set_active (ghost, FALSE); ++ gst_element_remove_pad (GST_ELEMENT_CAST (rtpbin), ghost); ++ } ++ ++ g_slist_free (session->send_fec_src_ghosts); ++ session->send_fec_src_ghosts = NULL; ++} ++ + /* Create a pad for sending RTCP for the session in @name. Must be called with + * RTP_BIN_LOCK. + */ +@@ -4504,7 +5409,7 @@ create_send_rtcp (GstRtpBin * rtpbin, GstPadTemplate * templ, + + /* get rtcp_src pad and store */ + session->send_rtcp_src = +- gst_element_get_request_pad (session->session, "send_rtcp_src"); ++ gst_element_request_pad_simple (session->session, "send_rtcp_src"); + if (session->send_rtcp_src == NULL) + goto pad_failed; + +@@ -4542,8 +5447,6 @@ create_send_rtcp (GstRtpBin * rtpbin, GstPadTemplate * templ, + session->send_rtcp_src_ghost = + gst_ghost_pad_new_from_template (name, encsrc, templ); + gst_object_unref (encsrc); +- gst_pad_set_active (session->send_rtcp_src_ghost, TRUE); +- gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), session->send_rtcp_src_ghost); + + return session->send_rtcp_src_ghost; + +@@ -4699,12 +5602,20 @@ gst_rtp_bin_request_new_pad (GstElement * element, + } else if (templ == gst_element_class_get_pad_template (klass, + "send_rtcp_src_%u")) { + result = create_send_rtcp (rtpbin, templ, pad_name); ++ } else if (templ == gst_element_class_get_pad_template (klass, ++ "recv_fec_sink_%u_%u")) { ++ result = create_recv_fec (rtpbin, templ, pad_name); + } else + goto wrong_template; + + g_free (pad_name); + GST_RTP_BIN_UNLOCK (rtpbin); + ++ if (result) { ++ gst_pad_set_active (result, TRUE); ++ gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), result); ++ } ++ + return result; + + /* ERRORS */ +@@ -4743,13 +5654,16 @@ gst_rtp_bin_release_pad (GstElement * element, GstPad * pad) + remove_send_rtp (rtpbin, session); + } else if (session->send_rtcp_src_ghost == pad) { + remove_rtcp (rtpbin, session); ++ } else if (pad_is_recv_fec (session, pad)) { ++ remove_recv_fec_for_pad (rtpbin, session, pad); + } + + /* no more request pads, free the complete session */ + if (session->recv_rtp_sink_ghost == NULL + && session->recv_rtcp_sink_ghost == NULL + && session->send_rtp_sink_ghost == NULL +- && session->send_rtcp_src_ghost == NULL) { ++ && session->send_rtcp_src_ghost == NULL ++ && session->recv_fec_sink_ghosts == NULL) { + GST_DEBUG_OBJECT (rtpbin, "no more pads for session %p", session); + rtpbin->sessions = g_slist_remove (rtpbin->sessions, session); + free_session (session, rtpbin); +diff --git a/gst/rtpmanager/gstrtpbin.h b/gst/rtpmanager/gstrtpbin.h +index fcea7cec8..eb98eb28e 100644 +--- a/gst/rtpmanager/gstrtpbin.h ++++ b/gst/rtpmanager/gstrtpbin.h +@@ -74,10 +74,14 @@ struct _GstRtpBin { + guint32 max_dropout_time; + guint32 max_misorder_time; + gboolean rfc7273_sync; ++ gboolean add_reference_timestamp_meta; + guint max_streams; + guint64 max_ts_offset_adjustment; + gint64 max_ts_offset; + gboolean max_ts_offset_is_set; ++ guint64 min_ts_offset; ++ gboolean min_ts_offset_is_set; ++ guint ts_offset_smoothing_factor; + + /* a list of session */ + GSList *sessions; +@@ -88,6 +92,14 @@ struct _GstRtpBin { + /* the default SDES items for sessions */ + GstStructure *sdes; + ++ /* the default FEC decoder factories for sessions */ ++ GstStructure *fec_decoders; ++ ++ /* the default FEC encoder factories for sessions */ ++ GstStructure *fec_encoders; ++ ++ gboolean update_ntp64_header_ext; ++ + /*< private >*/ + GstRtpBinPrivate *priv; + }; +@@ -111,6 +123,7 @@ struct _GstRtpBinClass { + RTPSession* (*get_internal_session) (GstRtpBin *rtpbin, guint session); + GstElement* (*get_storage) (GstRtpBin *rtpbin, guint session); + GObject* (*get_internal_storage) (GstRtpBin *rtpbin, guint session); ++ void (*clear_ssrc) (GstRtpBin *rtpbin, guint session, guint32 ssrc); + + /* session manager signals */ + void (*on_new_ssrc) (GstRtpBin *rtpbin, guint session, guint32 ssrc); +@@ -143,4 +156,6 @@ struct _GstRtpBinClass { + + GType gst_rtp_bin_get_type (void); + ++GST_ELEMENT_REGISTER_DECLARE (rtpbin); ++ + #endif /* __GST_RTP_BIN_H__ */ +diff --git a/gst/rtpmanager/gstrtpdtmfmux.c b/gst/rtpmanager/gstrtpdtmfmux.c +index cff68e518..b55c06724 100644 +--- a/gst/rtpmanager/gstrtpdtmfmux.c ++++ b/gst/rtpmanager/gstrtpdtmfmux.c +@@ -68,7 +68,11 @@ static gboolean gst_rtp_dtmf_mux_accept_buffer_locked (GstRTPMux * rtp_mux, + static gboolean gst_rtp_dtmf_mux_src_event (GstRTPMux * rtp_mux, + GstEvent * event); + +-G_DEFINE_TYPE (GstRTPDTMFMux, gst_rtp_dtmf_mux, GST_TYPE_RTP_MUX); ++G_DEFINE_TYPE_WITH_CODE (GstRTPDTMFMux, gst_rtp_dtmf_mux, GST_TYPE_RTP_MUX, ++ GST_DEBUG_CATEGORY_INIT (gst_rtp_dtmf_mux_debug, "rtpdtmfmux", 0, ++ "rtp dtmf muxer");); ++GST_ELEMENT_REGISTER_DEFINE (rtpdtmfmux, "rtpdtmfmux", GST_RANK_NONE, ++ GST_TYPE_RTP_DTMF_MUX); + + static void + gst_rtp_dtmf_mux_init (GstRTPDTMFMux * mux) +@@ -222,13 +226,3 @@ gst_rtp_dtmf_mux_change_state (GstElement * element, GstStateChange transition) + + return ret; + } +- +-gboolean +-gst_rtp_dtmf_mux_plugin_init (GstPlugin * plugin) +-{ +- GST_DEBUG_CATEGORY_INIT (gst_rtp_dtmf_mux_debug, "rtpdtmfmux", 0, +- "rtp dtmf muxer"); +- +- return gst_element_register (plugin, "rtpdtmfmux", GST_RANK_NONE, +- GST_TYPE_RTP_DTMF_MUX); +-} +diff --git a/gst/rtpmanager/gstrtpdtmfmux.h b/gst/rtpmanager/gstrtpdtmfmux.h +index 651e9ece7..df4eebd64 100644 +--- a/gst/rtpmanager/gstrtpdtmfmux.h ++++ b/gst/rtpmanager/gstrtpdtmfmux.h +@@ -61,7 +61,8 @@ struct _GstRTPDTMFMuxClass + }; + + GType gst_rtp_dtmf_mux_get_type (void); +-gboolean gst_rtp_dtmf_mux_plugin_init (GstPlugin * plugin); ++ ++GST_ELEMENT_REGISTER_DECLARE (rtpdtmfmux); + + G_END_DECLS + #endif /* __GST_RTP_DTMF_MUX_H__ */ +diff --git a/gst/rtpmanager/gstrtpfunnel.c b/gst/rtpmanager/gstrtpfunnel.c +index 7638b3497..841b55b00 100644 +--- a/gst/rtpmanager/gstrtpfunnel.c ++++ b/gst/rtpmanager/gstrtpfunnel.c +@@ -67,14 +67,14 @@ + #endif + + #include ++#include + + #include "gstrtpfunnel.h" ++#include "gstrtputils.h" + + GST_DEBUG_CATEGORY_STATIC (gst_rtp_funnel_debug); + #define GST_CAT_DEFAULT gst_rtp_funnel_debug + +-#define TWCC_EXTMAP_STR "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01" +- + /**************** GstRTPFunnelPad ****************/ + + struct _GstRtpFunnelPadClass +@@ -90,6 +90,8 @@ struct _GstRtpFunnelPad + }; + + G_DEFINE_TYPE (GstRtpFunnelPad, gst_rtp_funnel_pad, GST_TYPE_PAD); ++GST_ELEMENT_REGISTER_DEFINE (rtpfunnel, "rtpfunnel", GST_RANK_NONE, ++ GST_TYPE_RTP_FUNNEL); + + static void + gst_rtp_funnel_pad_class_init (G_GNUC_UNUSED GstRtpFunnelPadClass * klass) +@@ -127,9 +129,8 @@ struct _GstRtpFunnel + /* The last pad data was chained on */ + GstPad *current_pad; + +- guint8 twcc_ext_id; /* the negotiated twcc extmap id */ +- guint16 twcc_seqnum; /* our internal twcc seqnum */ + guint twcc_pads; /* numer of sinkpads with negotiated twcc */ ++ GstRTPHeaderExtension *twcc_ext; + + /* properties */ + gint common_ts_offset; +@@ -189,18 +190,28 @@ done: + static void + gst_rtp_funnel_forward_segment (GstRtpFunnel * funnel, GstPad * pad) + { +- GstEvent *segment; ++ GstEvent *event; ++ guint i; + + if (pad == funnel->current_pad) { + goto done; + } + +- segment = gst_pad_get_sticky_event (pad, GST_EVENT_SEGMENT, 0); +- if (segment && !gst_pad_push_event (funnel->srcpad, segment)) { ++ event = gst_pad_get_sticky_event (pad, GST_EVENT_SEGMENT, 0); ++ if (event && !gst_pad_push_event (funnel->srcpad, event)) { + GST_ERROR_OBJECT (funnel, "Could not push segment"); + goto done; + } + ++ for (i = 0;; i++) { ++ event = gst_pad_get_sticky_event (pad, GST_EVENT_CUSTOM_DOWNSTREAM_STICKY, ++ i); ++ if (event == NULL) ++ break; ++ if (!gst_pad_push_event (funnel->srcpad, event)) ++ GST_ERROR_OBJECT (funnel, "Could not push custom event"); ++ } ++ + funnel->current_pad = pad; + + done: +@@ -212,36 +223,40 @@ gst_rtp_funnel_set_twcc_seqnum (GstRtpFunnel * funnel, + GstPad * pad, GstBuffer ** buf) + { + GstRtpFunnelPad *fpad = GST_RTP_FUNNEL_PAD_CAST (pad); ++ guint8 twcc_seq[2] = { 0, }; + GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; ++ guint ext_id = gst_rtp_header_extension_get_id (funnel->twcc_ext); ++ guint8 *existing; ++ guint size; + +- if (!funnel->twcc_ext_id || !fpad->has_twcc) ++ if (!funnel->twcc_ext || !fpad->has_twcc) + return; + + *buf = gst_buffer_make_writable (*buf); + +- if (gst_rtp_buffer_map (*buf, GST_MAP_READWRITE, &rtp)) { +- gpointer data; ++ gst_rtp_header_extension_write (funnel->twcc_ext, *buf, ++ GST_RTP_HEADER_EXTENSION_ONE_BYTE, *buf, twcc_seq, sizeof (twcc_seq)); + +- /* if there already is a twcc-seqnum inside the packet */ +- if (gst_rtp_buffer_get_extension_onebyte_header (&rtp, funnel->twcc_ext_id, +- 0, &data, NULL)) { ++ if (!gst_rtp_buffer_map (*buf, GST_MAP_READWRITE, &rtp)) ++ goto map_failed; + +- /* with only one pad, we read the twcc-seqnum instead of writing it */ +- if (funnel->twcc_pads == 1) { +- funnel->twcc_seqnum = GST_READ_UINT16_BE (data); +- } else { +- GST_WRITE_UINT16_BE (data, funnel->twcc_seqnum); +- } +- } else { +- guint16 seq_be; +- GST_WRITE_UINT16_BE (&seq_be, funnel->twcc_seqnum); +- gst_rtp_buffer_add_extension_onebyte_header (&rtp, funnel->twcc_ext_id, +- &seq_be, 2); ++ if (gst_rtp_buffer_get_extension_onebyte_header (&rtp, ext_id, ++ 0, (gpointer) & existing, &size)) { ++ if (size >= gst_rtp_header_extension_get_max_size (funnel->twcc_ext, *buf)) { ++ existing[0] = twcc_seq[0]; ++ existing[1] = twcc_seq[1]; + } + } ++ /* TODO: two-byte variant */ ++ + gst_rtp_buffer_unmap (&rtp); + +- funnel->twcc_seqnum++; ++ return; ++ ++map_failed: ++ { ++ GST_ERROR ("failed to map buffer %p", *buf); ++ } + } + + static GstFlowReturn +@@ -292,48 +307,28 @@ static void + gst_rtp_funnel_set_twcc_ext_id (GstRtpFunnel * funnel, guint8 twcc_ext_id) + { + gchar *name; ++ guint current_ext_id; ++ ++ current_ext_id = gst_rtp_header_extension_get_id (funnel->twcc_ext); ++ g_object_set (funnel->twcc_ext, "n-streams", funnel->twcc_pads, NULL); + +- if (funnel->twcc_ext_id == twcc_ext_id) ++ if (current_ext_id == twcc_ext_id) + return; + + name = g_strdup_printf ("extmap-%u", twcc_ext_id); + +- GST_OBJECT_LOCK (funnel); +- gst_caps_set_simple (funnel->srccaps, name, G_TYPE_STRING, TWCC_EXTMAP_STR, +- NULL); +- GST_OBJECT_UNLOCK (funnel); ++ gst_caps_set_simple (funnel->srccaps, name, G_TYPE_STRING, ++ gst_rtp_header_extension_get_uri (funnel->twcc_ext), NULL); + + g_free (name); + + /* make sure we update the sticky with the new caps */ + funnel->send_sticky_events = TRUE; + +- GST_INFO_OBJECT (funnel, "Setting twcc-ext-id to %u", twcc_ext_id); +- funnel->twcc_ext_id = twcc_ext_id; ++ gst_rtp_header_extension_set_id (funnel->twcc_ext, twcc_ext_id); + } + +-static guint8 +-_get_extmap_id_for_attribute (const GstStructure * s, const gchar * ext_name) +-{ +- guint i; +- guint8 extmap_id = 0; +- guint n_fields = gst_structure_n_fields (s); +- +- for (i = 0; i < n_fields; i++) { +- const gchar *field_name = gst_structure_nth_field_name (s, i); +- if (g_str_has_prefix (field_name, "extmap-")) { +- const gchar *str = gst_structure_get_string (s, field_name); +- if (str && g_strcmp0 (str, ext_name) == 0) { +- gint64 id = g_ascii_strtoll (field_name + 7, NULL, 10); +- if (id > 0 && id < 15) { +- extmap_id = id; +- break; +- } +- } +- } +- } +- return extmap_id; +-} ++#define TWCC_EXTMAP_STR "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01" + + static gboolean + gst_rtp_funnel_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) +@@ -357,31 +352,37 @@ gst_rtp_funnel_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) + GstStructure *s; + guint ssrc; + guint8 ext_id; ++ GstCaps *rtpcaps = gst_caps_new_empty_simple (RTP_CAPS); + + gst_event_parse_caps (event, &caps); + + GST_OBJECT_LOCK (funnel); +- if (!gst_caps_can_intersect (funnel->srccaps, caps)) { ++ if (!gst_caps_can_intersect (rtpcaps, caps)) { + GST_ERROR_OBJECT (funnel, "Can't intersect with caps %" GST_PTR_FORMAT, + caps); + g_assert_not_reached (); + } +- GST_OBJECT_UNLOCK (funnel); ++ ++ gst_caps_unref (rtpcaps); + + s = gst_caps_get_structure (caps, 0); + if (gst_structure_get_uint (s, "ssrc", &ssrc)) { + fpad->ssrc = ssrc; + GST_DEBUG_OBJECT (pad, "Got ssrc: %u", ssrc); +- GST_OBJECT_LOCK (funnel); + g_hash_table_insert (funnel->ssrc_to_pad, GUINT_TO_POINTER (ssrc), pad); +- GST_OBJECT_UNLOCK (funnel); + } +- ext_id = _get_extmap_id_for_attribute (s, TWCC_EXTMAP_STR); ++ ++ if (!funnel->twcc_ext) ++ funnel->twcc_ext = ++ gst_rtp_header_extension_create_from_uri (TWCC_EXTMAP_STR); ++ ++ ext_id = gst_rtp_get_extmap_id_for_attribute (s, TWCC_EXTMAP_STR); + if (ext_id > 0) { + fpad->has_twcc = TRUE; + funnel->twcc_pads++; + gst_rtp_funnel_set_twcc_ext_id (funnel, ext_id); + } ++ GST_OBJECT_UNLOCK (funnel); + + forward = FALSE; + break; +@@ -410,15 +411,17 @@ gst_rtp_funnel_sink_query (GstPad * pad, GstObject * parent, GstQuery * query) + { + GstCaps *filter_caps; + GstCaps *new_caps; ++ GstCaps *rtpcaps = gst_caps_new_empty_simple (RTP_CAPS); + + gst_query_parse_caps (query, &filter_caps); + + GST_OBJECT_LOCK (funnel); + if (filter_caps) { +- new_caps = gst_caps_intersect_full (funnel->srccaps, filter_caps, ++ new_caps = gst_caps_intersect_full (rtpcaps, filter_caps, + GST_CAPS_INTERSECT_FIRST); ++ gst_caps_unref (rtpcaps); + } else { +- new_caps = gst_caps_copy (funnel->srccaps); ++ new_caps = rtpcaps; + } + GST_OBJECT_UNLOCK (funnel); + +@@ -440,7 +443,7 @@ gst_rtp_funnel_sink_query (GstPad * pad, GstObject * parent, GstQuery * query) + gst_query_parse_accept_caps (query, &caps); + + GST_OBJECT_LOCK (funnel); +- result = gst_caps_is_subset (caps, funnel->srccaps); ++ result = gst_caps_can_intersect (caps, funnel->srccaps); + if (!result) { + GST_ERROR_OBJECT (pad, + "caps: %" GST_PTR_FORMAT " were not compatible with: %" +@@ -615,6 +618,8 @@ gst_rtp_funnel_finalize (GObject * object) + gst_caps_unref (funnel->srccaps); + g_hash_table_destroy (funnel->ssrc_to_pad); + ++ gst_clear_object (&funnel->twcc_ext); ++ + G_OBJECT_CLASS (parent_class)->finalize (object); + } + +diff --git a/gst/rtpmanager/gstrtpfunnel.h b/gst/rtpmanager/gstrtpfunnel.h +index 6fb16d07b..fb436734d 100644 +--- a/gst/rtpmanager/gstrtpfunnel.h ++++ b/gst/rtpmanager/gstrtpfunnel.h +@@ -35,6 +35,8 @@ typedef struct _GstRtpFunnel GstRtpFunnel; + + GType gst_rtp_funnel_get_type (void); + ++GST_ELEMENT_REGISTER_DECLARE (rtpfunnel); ++ + typedef struct _GstRtpFunnelPadClass GstRtpFunnelPadClass; + typedef struct _GstRtpFunnelPad GstRtpFunnelPad; + +diff --git a/gst/rtpmanager/gstrtphdrext-clientaudiolevel.c b/gst/rtpmanager/gstrtphdrext-clientaudiolevel.c +new file mode 100644 +index 000000000..d6f98c24b +--- /dev/null ++++ b/gst/rtpmanager/gstrtphdrext-clientaudiolevel.c +@@ -0,0 +1,268 @@ ++/* GStreamer ++ * Copyright (C) <2018> Havard Graff ++ * Copyright (C) <2020-2021> Guillaume Desmottes ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more ++ */ ++ ++/** ++ * SECTION:element-rtphdrextclientaudiolevel ++ * @title: rtphdrextclientaudiolevel ++ * @short_description: Client-to-Mixer Audio Level Indication (RFC6464) RTP Header Extension ++ * ++ * Client-to-Mixer Audio Level Indication (RFC6464) RTP Header Extension. ++ * The extension should be automatically created by payloader and depayloaders, ++ * if their `auto-header-extension` property is enabled, if the extension ++ * is part of the RTP caps. ++ * ++ * ## Example pipeline ++ * |[ ++ * gst-launch-1.0 pulsesrc ! level audio-level-meta=true ! audiconvert ! ++ * rtpL16pay ! application/x-rtp, ++ * extmap-1=(string)\< \"\", urn:ietf:params:rtp-hdrext:ssrc-audio-level, ++ * \"vad=on\" \> ! udpsink ++ * ]| ++ * ++ * Since: 1.20 ++ * ++ */ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include "gstrtphdrext-clientaudiolevel.h" ++ ++#include ++ ++#define CLIENT_AUDIO_LEVEL_HDR_EXT_URI GST_RTP_HDREXT_BASE"ssrc-audio-level" ++ ++GST_DEBUG_CATEGORY_STATIC (rtphdrclient_audio_level_debug); ++#define GST_CAT_DEFAULT (rtphdrclient_audio_level_debug) ++ ++#define DEFAULT_VAD TRUE ++ ++enum ++{ ++ PROP_0, ++ PROP_VAD, ++}; ++ ++struct _GstRTPHeaderExtensionClientAudioLevel ++{ ++ GstRTPHeaderExtension parent; ++ ++ gboolean vad; ++}; ++ ++G_DEFINE_TYPE_WITH_CODE (GstRTPHeaderExtensionClientAudioLevel, ++ gst_rtp_header_extension_client_audio_level, GST_TYPE_RTP_HEADER_EXTENSION, ++ GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, "rtphdrextclientaudiolevel", 0, ++ "RTP RFC 6464 Header Extensions");); ++GST_ELEMENT_REGISTER_DEFINE (rtphdrextclientaudiolevel, ++ "rtphdrextclientaudiolevel", GST_RANK_MARGINAL, ++ GST_TYPE_RTP_HEADER_EXTENSION_CLIENT_AUDIO_LEVEL); ++ ++static void ++gst_rtp_header_extension_client_audio_level_get_property (GObject * object, ++ guint prop_id, GValue * value, GParamSpec * pspec) ++{ ++ GstRTPHeaderExtensionClientAudioLevel *self = ++ GST_RTP_HEADER_EXTENSION_CLIENT_AUDIO_LEVEL (object); ++ ++ switch (prop_id) { ++ case PROP_VAD: ++ g_value_set_boolean (value, self->vad); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static GstRTPHeaderExtensionFlags ++ gst_rtp_header_extension_client_audio_level_get_supported_flags ++ (GstRTPHeaderExtension * ext) ++{ ++ return GST_RTP_HEADER_EXTENSION_ONE_BYTE | GST_RTP_HEADER_EXTENSION_TWO_BYTE; ++} ++ ++static gsize ++gst_rtp_header_extension_client_audio_level_get_max_size (GstRTPHeaderExtension ++ * ext, const GstBuffer * input_meta) ++{ ++ return 2; ++} ++ ++static void ++set_vad (GstRTPHeaderExtension * ext, gboolean vad) ++{ ++ GstRTPHeaderExtensionClientAudioLevel *self = ++ GST_RTP_HEADER_EXTENSION_CLIENT_AUDIO_LEVEL (ext); ++ ++ if (self->vad == vad) ++ return; ++ ++ GST_DEBUG_OBJECT (ext, "vad: %d", vad); ++ self->vad = vad; ++ g_object_notify (G_OBJECT (self), "vad"); ++} ++ ++static gboolean ++ gst_rtp_header_extension_client_audio_level_set_attributes ++ (GstRTPHeaderExtension * ext, GstRTPHeaderExtensionDirection direction, ++ const gchar * attributes) ++{ ++ if (g_str_equal (attributes, "vad=on") || g_str_equal (attributes, "")) { ++ set_vad (ext, TRUE); ++ } else if (g_str_equal (attributes, "vad=off")) { ++ set_vad (ext, FALSE); ++ } else { ++ GST_WARNING_OBJECT (ext, "Invalid attribute: %s", attributes); ++ return FALSE; ++ } ++ ++ return TRUE; ++} ++ ++static gboolean ++ gst_rtp_header_extension_client_audio_level_set_caps_from_attributes ++ (GstRTPHeaderExtension * ext, GstCaps * caps) ++{ ++ GstRTPHeaderExtensionClientAudioLevel *self = ++ GST_RTP_HEADER_EXTENSION_CLIENT_AUDIO_LEVEL (ext); ++ const gchar *vad; ++ ++ if (self->vad) ++ vad = "vad=on"; ++ else ++ vad = "vad=off"; ++ ++ return gst_rtp_header_extension_set_caps_from_attributes_helper (ext, caps, ++ vad); ++} ++ ++static gssize ++gst_rtp_header_extension_client_audio_level_write (GstRTPHeaderExtension * ext, ++ const GstBuffer * input_meta, GstRTPHeaderExtensionFlags write_flags, ++ GstBuffer * output, guint8 * data, gsize size) ++{ ++ GstAudioLevelMeta *meta; ++ guint level; ++ ++ g_return_val_if_fail (size >= ++ gst_rtp_header_extension_client_audio_level_get_max_size (ext, NULL), -1); ++ g_return_val_if_fail (write_flags & ++ gst_rtp_header_extension_client_audio_level_get_supported_flags (ext), ++ -1); ++ ++ meta = gst_buffer_get_audio_level_meta ((GstBuffer *) input_meta); ++ if (!meta) { ++ GST_LOG_OBJECT (ext, "no meta"); ++ return 0; ++ } ++ ++ level = meta->level; ++ if (level > 127) { ++ GST_LOG_OBJECT (ext, "level from meta is higher than 127: %d, cropping", ++ meta->level); ++ level = 127; ++ } ++ ++ GST_LOG_OBJECT (ext, "writing ext (level: %d voice: %d)", level, ++ meta->voice_activity); ++ ++ /* Both one & two byte use the same format, the second byte being padding */ ++ data[0] = (level & 0x7F) | (meta->voice_activity << 7); ++ if (write_flags & GST_RTP_HEADER_EXTENSION_ONE_BYTE) { ++ return 1; ++ } ++ data[1] = 0; ++ return 2; ++} ++ ++static gboolean ++gst_rtp_header_extension_client_audio_level_read (GstRTPHeaderExtension * ext, ++ GstRTPHeaderExtensionFlags read_flags, const guint8 * data, gsize size, ++ GstBuffer * buffer) ++{ ++ guint8 level; ++ gboolean voice_activity; ++ ++ g_return_val_if_fail (read_flags & ++ gst_rtp_header_extension_client_audio_level_get_supported_flags (ext), ++ -1); ++ ++ /* Both one & two byte use the same format, the second byte being padding */ ++ level = data[0] & 0x7F; ++ voice_activity = (data[0] & 0x80) >> 7; ++ ++ GST_LOG_OBJECT (ext, "reading ext (level: %d voice: %d)", level, ++ voice_activity); ++ ++ gst_buffer_add_audio_level_meta (buffer, level, voice_activity); ++ ++ return TRUE; ++} ++ ++static void ++ gst_rtp_header_extension_client_audio_level_class_init ++ (GstRTPHeaderExtensionClientAudioLevelClass * klass) ++{ ++ GstRTPHeaderExtensionClass *rtp_hdr_class; ++ GstElementClass *gstelement_class; ++ GObjectClass *gobject_class; ++ ++ rtp_hdr_class = GST_RTP_HEADER_EXTENSION_CLASS (klass); ++ gobject_class = (GObjectClass *) klass; ++ gstelement_class = GST_ELEMENT_CLASS (klass); ++ ++ gobject_class->get_property = ++ gst_rtp_header_extension_client_audio_level_get_property; ++ ++ /** ++ * rtphdrextclientaudiolevel:vad: ++ * ++ * If the vad extension attribute is enabled or not, default to %FALSE. ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_VAD, ++ g_param_spec_boolean ("vad", "vad", ++ "If the vad extension attribute is enabled or not", ++ DEFAULT_VAD, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); ++ ++ rtp_hdr_class->get_supported_flags = ++ gst_rtp_header_extension_client_audio_level_get_supported_flags; ++ rtp_hdr_class->get_max_size = ++ gst_rtp_header_extension_client_audio_level_get_max_size; ++ rtp_hdr_class->set_attributes = ++ gst_rtp_header_extension_client_audio_level_set_attributes; ++ rtp_hdr_class->set_caps_from_attributes = ++ gst_rtp_header_extension_client_audio_level_set_caps_from_attributes; ++ rtp_hdr_class->write = gst_rtp_header_extension_client_audio_level_write; ++ rtp_hdr_class->read = gst_rtp_header_extension_client_audio_level_read; ++ ++ gst_element_class_set_static_metadata (gstelement_class, ++ "Client-to-Mixer Audio Level Indication (RFC6464) RTP Header Extension", ++ GST_RTP_HDREXT_ELEMENT_CLASS, ++ "Client-to-Mixer Audio Level Indication (RFC6464) RTP Header Extension", ++ "Guillaume Desmottes "); ++ gst_rtp_header_extension_class_set_uri (rtp_hdr_class, ++ CLIENT_AUDIO_LEVEL_HDR_EXT_URI); ++} ++ ++static void ++ gst_rtp_header_extension_client_audio_level_init ++ (GstRTPHeaderExtensionClientAudioLevel * self) ++{ ++ GST_DEBUG_OBJECT (self, "creating element"); ++ self->vad = DEFAULT_VAD; ++} +diff --git a/gst/rtpmanager/gstrtphdrext-clientaudiolevel.h b/gst/rtpmanager/gstrtphdrext-clientaudiolevel.h +new file mode 100644 +index 000000000..3ca688850 +--- /dev/null ++++ b/gst/rtpmanager/gstrtphdrext-clientaudiolevel.h +@@ -0,0 +1,32 @@ ++/* GStreamer ++ * Copyright (C) <2018> Havard Graff ++ * Copyright (C) <2020-2021> Guillaume Desmottes ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more ++ */ ++ ++#ifndef __GST_RTPHDREXT_CLIENT_AUDIO_LEVEL_H__ ++#define __GST_RTPHDREXT_CLIENT_AUDIO_LEVEL_H__ ++ ++#include ++#include ++ ++G_BEGIN_DECLS ++ ++#define GST_TYPE_RTP_HEADER_EXTENSION_CLIENT_AUDIO_LEVEL (gst_rtp_header_extension_client_audio_level_get_type()) ++ ++G_DECLARE_FINAL_TYPE (GstRTPHeaderExtensionClientAudioLevel, gst_rtp_header_extension_client_audio_level, GST, RTP_HEADER_EXTENSION_CLIENT_AUDIO_LEVEL, GstRTPHeaderExtension) ++ ++GST_ELEMENT_REGISTER_DECLARE (rtphdrextclientaudiolevel); ++ ++G_END_DECLS ++ ++#endif /* __GST_RTPHDREXT_CLIENT_AUDIO_LEVEL_H__ */ +diff --git a/gst/rtpmanager/gstrtphdrext-mid.c b/gst/rtpmanager/gstrtphdrext-mid.c +new file mode 100644 +index 000000000..8f861503b +--- /dev/null ++++ b/gst/rtpmanager/gstrtphdrext-mid.c +@@ -0,0 +1,318 @@ ++/* GStreamer ++ * Copyright (C) <2021> Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330, ++ * Boston, MA 02111-1307, USA. ++ */ ++ ++/** ++ * SECTION:element-rtphdrextmid ++ * @title: rtphdrextmid ++ * @short_description: RTP Bundle Media Identification (MID) RTP Header ++ * Extension (RFC8843) ++ * @see_also: #GstRTPHeaderExtension, #GstRTPBasePayload, #GstRTPBaseDepayload, gstrtpbuffer ++ * ++ * Since: 1.22 ++ */ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include ++ ++#include "gstrtphdrext-mid.h" ++ ++GST_DEBUG_CATEGORY_STATIC (rtphdrext_mid_debug); ++#define GST_CAT_DEFAULT (rtphdrext_mid_debug) ++ ++#define MID_EXTMAP_STR GST_RTP_HDREXT_BASE "sdes:mid" ++ ++enum ++{ ++ PROP_0, ++ PROP_MID, ++}; ++ ++struct _GstRTPHeaderExtensionMid ++{ ++ GstRTPHeaderExtension parent; ++ ++ char *mid; ++}; ++ ++#define parent_class gst_rtp_header_extension_mid_parent_class ++G_DEFINE_TYPE_WITH_CODE (GstRTPHeaderExtensionMid, ++ gst_rtp_header_extension_mid, GST_TYPE_RTP_HEADER_EXTENSION, ++ GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, "rtphdrext-mid", 0, ++ "RTP BUNDLE MID Header Extensions (RFC8843)") ++ ); ++GST_ELEMENT_REGISTER_DEFINE (rtphdrextmid, "rtphdrextmid", ++ GST_RANK_MARGINAL, GST_TYPE_RTP_HEADER_EXTENSION_MID); ++ ++static GstRTPHeaderExtensionFlags ++gst_rtp_header_extension_mid_get_supported_flags (GstRTPHeaderExtension * ext) ++{ ++ GstRTPHeaderExtensionMid *self = GST_RTP_HEADER_EXTENSION_MID (ext); ++ GstRTPHeaderExtensionFlags flags = ++ GST_RTP_HEADER_EXTENSION_ONE_BYTE | GST_RTP_HEADER_EXTENSION_TWO_BYTE; ++ gssize mid_len = -1; ++ ++ GST_OBJECT_LOCK (ext); ++ if (self->mid) ++ mid_len = strlen (self->mid); ++ GST_OBJECT_UNLOCK (ext); ++ if (mid_len > 16) ++ flags = GST_RTP_HEADER_EXTENSION_TWO_BYTE; ++ ++ return flags; ++} ++ ++static gsize ++gst_rtp_header_extension_mid_get_max_size (GstRTPHeaderExtension * ext, ++ const GstBuffer * buffer) ++{ ++ if (gst_rtp_header_extension_mid_get_supported_flags (ext) & ++ GST_RTP_HEADER_EXTENSION_ONE_BYTE) ++ return 16; ++ else ++ return 255; ++} ++ ++static gssize ++gst_rtp_header_extension_mid_write (GstRTPHeaderExtension * ext, ++ const GstBuffer * input_meta, GstRTPHeaderExtensionFlags write_flags, ++ GstBuffer * output, guint8 * data, gsize size) ++{ ++ GstRTPHeaderExtensionMid *self = GST_RTP_HEADER_EXTENSION_MID (ext); ++ gsize len = 0; ++ ++ g_return_val_if_fail (size >= ++ gst_rtp_header_extension_mid_get_max_size (ext, NULL), -1); ++ g_return_val_if_fail (write_flags & ++ gst_rtp_header_extension_mid_get_supported_flags (ext), -1); ++ ++ GST_OBJECT_LOCK (ext); ++ if (!self->mid) { ++ GST_LOG_OBJECT (self, "no mid to write"); ++ goto out; ++ } ++ ++ /* TODO: we don't need to always add mid, we can selectively omit it from e.g. ++ * non-video-keyframes or some percentage of the produced frames, e.g. RFC8843 ++ * mentions possibly using packet-loss as a indication of how often to add mid ++ * to packets */ ++ len = strlen (self->mid); ++ if ((write_flags & GST_RTP_HEADER_EXTENSION_TWO_BYTE) == 0 && len > 16) { ++ GST_DEBUG_OBJECT (self, "cannot write a mid of size %" G_GSIZE_FORMAT ++ " without using the two byte extension format", len); ++ len = 0; ++ goto out; ++ } ++ if (len > 0) { ++ GST_LOG_OBJECT (self, "writing mid \'%s\'", self->mid); ++ memcpy (data, self->mid, len); ++ } ++ ++out: ++ GST_OBJECT_UNLOCK (ext); ++ return len; ++} ++ ++static gboolean ++gst_rtp_header_extension_mid_read (GstRTPHeaderExtension * ext, ++ GstRTPHeaderExtensionFlags read_flags, const guint8 * data, gsize size, ++ GstBuffer * buffer) ++{ ++ GstRTPHeaderExtensionMid *self = GST_RTP_HEADER_EXTENSION_MID (ext); ++ gboolean notify = FALSE; ++ ++ if (!data || size == 0) ++ return TRUE; ++ ++ if (read_flags & GST_RTP_HEADER_EXTENSION_ONE_BYTE && (size < 1 || size > 16)) { ++ GST_ERROR_OBJECT (ext, ++ "one-byte header extensions must be between 1 and 16 bytes inculusive"); ++ return FALSE; ++ } ++ ++ GST_OBJECT_LOCK (self); ++ if (!self->mid || strncmp ((const char *) data, self->mid, size) != 0) { ++ g_clear_pointer (&self->mid, g_free); ++ self->mid = g_strndup ((const char *) data, size); ++ notify = TRUE; ++ } ++ GST_OBJECT_UNLOCK (self); ++ ++ if (notify) ++ g_object_notify ((GObject *) self, "mid"); ++ ++ return TRUE; ++} ++ ++static void ++gst_rtp_header_extension_mid_get_property (GObject * object, ++ guint prop_id, GValue * value, GParamSpec * pspec) ++{ ++ GstRTPHeaderExtensionMid *self = GST_RTP_HEADER_EXTENSION_MID (object); ++ ++ switch (prop_id) { ++ case PROP_MID: ++ GST_OBJECT_LOCK (self); ++ g_value_set_string (value, self->mid); ++ GST_OBJECT_UNLOCK (self); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static gboolean ++validate_mid (const char *rid) ++{ ++ const char *iter; ++ ++ /* For avoidance of doubt, the only allowed byte values for ++ * these IDs are decimal 48 through 57, 65 through 90, and 97 through ++ * 122. ++ */ ++ for (iter = rid; iter && iter[0]; iter++) { ++ if (iter[0] < 48) ++ return FALSE; ++ if (iter[0] > 122) ++ return FALSE; ++ if (iter[0] > 57 && iter[0] < 65) ++ return FALSE; ++ if (iter[0] > 90 && iter[0] < 97) ++ return FALSE; ++ } ++ ++ return TRUE; ++} ++ ++static void ++gst_rtp_header_extension_mid_set_property (GObject * object, ++ guint prop_id, const GValue * value, GParamSpec * pspec) ++{ ++ GstRTPHeaderExtensionMid *self = GST_RTP_HEADER_EXTENSION_MID (object); ++ ++ switch (prop_id) { ++ case PROP_MID:{ ++ const char *mid; ++ GST_OBJECT_LOCK (self); ++ mid = g_value_get_string (value); ++ if (!validate_mid (mid)) { ++ GST_WARNING_OBJECT (self, "Could not set mid \'%s\'. Validation failed", ++ mid); ++ } else { ++ g_clear_pointer (&self->mid, g_free); ++ self->mid = g_strdup (mid); ++ } ++ GST_OBJECT_UNLOCK (self); ++ break; ++ } ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_rtp_header_extension_mid_finalize (GObject * object) ++{ ++ GstRTPHeaderExtensionMid *self = GST_RTP_HEADER_EXTENSION_MID (object); ++ ++ g_clear_pointer (&self->mid, g_free); ++ ++ G_OBJECT_CLASS (parent_class)->finalize (object); ++} ++ ++static gboolean ++gst_rtp_header_extension_mid_set_caps_from_attributes (GstRTPHeaderExtension ++ * ext, GstCaps * caps) ++{ ++ GstRTPHeaderExtensionMid *self = GST_RTP_HEADER_EXTENSION_MID (ext); ++ GstStructure *s = gst_caps_get_structure (caps, 0); ++ ++ if (!GST_RTP_HEADER_EXTENSION_CLASS (parent_class)->set_caps_from_attributes ++ (ext, caps)) ++ return FALSE; ++ ++ GST_OBJECT_LOCK (ext); ++ if (self->mid) { ++ if (gst_structure_has_field (s, "a-mid")) { ++ GST_WARNING_OBJECT (ext, "caps already contain the \'a-mid\' field!"); ++ goto error; ++ } ++ gst_structure_set (s, "a-mid", G_TYPE_STRING, self->mid, NULL); ++ GST_DEBUG_OBJECT (ext, "set mid \'%s\' on output caps", self->mid); ++ } ++ GST_OBJECT_UNLOCK (ext); ++ ++ return TRUE; ++ ++error: ++ GST_OBJECT_UNLOCK (ext); ++ return FALSE; ++} ++ ++static void ++gst_rtp_header_extension_mid_class_init (GstRTPHeaderExtensionMidClass * klass) ++{ ++ GstRTPHeaderExtensionClass *rtp_hdr_class; ++ GstElementClass *gstelement_class; ++ GObjectClass *gobject_class; ++ ++ rtp_hdr_class = (GstRTPHeaderExtensionClass *) klass; ++ gobject_class = (GObjectClass *) klass; ++ gstelement_class = (GstElementClass *) klass; ++ ++ gobject_class->set_property = gst_rtp_header_extension_mid_set_property; ++ gobject_class->get_property = gst_rtp_header_extension_mid_get_property; ++ gobject_class->finalize = gst_rtp_header_extension_mid_finalize; ++ ++ /** ++ * rtphdrextmid:mid: ++ * ++ * The Media Identification (MID) value either last retrieved from the RTP ++ * Header extension, or to set on outgoing RTP packets. ++ * ++ * Since: 1.22 ++ */ ++ g_object_class_install_property (gobject_class, PROP_MID, ++ g_param_spec_string ("mid", "mid", ++ "The Media Identification (MID) value last read or to write from/to " ++ "RTP buffers", NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ rtp_hdr_class->get_supported_flags = ++ gst_rtp_header_extension_mid_get_supported_flags; ++ rtp_hdr_class->get_max_size = gst_rtp_header_extension_mid_get_max_size; ++ rtp_hdr_class->write = gst_rtp_header_extension_mid_write; ++ rtp_hdr_class->read = gst_rtp_header_extension_mid_read; ++ rtp_hdr_class->set_caps_from_attributes = ++ gst_rtp_header_extension_mid_set_caps_from_attributes; ++ ++ gst_element_class_set_static_metadata (gstelement_class, ++ "RTP BUNDLE MID Header Extension", GST_RTP_HDREXT_ELEMENT_CLASS, ++ "Extends RTP packets to add or retrieve a Media Identification (MID) " ++ "value as specified in RFC8843", ++ "Matthew Waters "); ++ gst_rtp_header_extension_class_set_uri (rtp_hdr_class, MID_EXTMAP_STR); ++} ++ ++static void ++gst_rtp_header_extension_mid_init (GstRTPHeaderExtensionMid * self) ++{ ++} +diff --git a/gst/rtpmanager/gstrtphdrext-mid.h b/gst/rtpmanager/gstrtphdrext-mid.h +new file mode 100644 +index 000000000..9e58598fd +--- /dev/null ++++ b/gst/rtpmanager/gstrtphdrext-mid.h +@@ -0,0 +1,36 @@ ++/* GStreamer ++ * Copyright (C) 2021 Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330, ++ * Boston, MA 02111-1307, USA. ++ */ ++ ++#ifndef __GST_RTPHDREXT_MID_H__ ++#define __GST_RTPHDREXT_MID_H__ ++ ++#include ++#include ++ ++G_BEGIN_DECLS ++ ++#define GST_TYPE_RTP_HEADER_EXTENSION_MID (gst_rtp_header_extension_mid_get_type()) ++ ++G_DECLARE_FINAL_TYPE (GstRTPHeaderExtensionMid, gst_rtp_header_extension_mid, GST, RTP_HEADER_EXTENSION_MID, GstRTPHeaderExtension); ++ ++GST_ELEMENT_REGISTER_DECLARE (rtphdrextmid); ++ ++G_END_DECLS ++ ++#endif /* __GST_RTPHDREXT_MID_H__ */ +diff --git a/gst/rtpmanager/gstrtphdrext-ntp.c b/gst/rtpmanager/gstrtphdrext-ntp.c +new file mode 100644 +index 000000000..a027138d5 +--- /dev/null ++++ b/gst/rtpmanager/gstrtphdrext-ntp.c +@@ -0,0 +1,269 @@ ++/* GStreamer ++ * Copyright (C) <2022> Sebastian Dröge ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330, ++ * Boston, MA 02111-1307, USA. ++ */ ++ ++/** ++ * SECTION:element-rtphdrextntp64 ++ * @title: rtphdrextntp64 ++ * @short_description: RTP Header Extension for RFC6051 64-bit NTP timestamps for rapid synchronization. ++ * @see_also: #GstRTPHeaderExtension, #GstRTPBasePayload, #GstRTPBaseDepayload, gstrtpbuffer ++ * ++ * Since: 1.22 ++ */ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include ++ ++#include "gstrtphdrext-ntp.h" ++ ++GST_DEBUG_CATEGORY_STATIC (rtphdrext_ntp_debug); ++#define GST_CAT_DEFAULT (rtphdrext_ntp_debug) ++ ++enum ++{ ++ PROP_0, ++ PROP_INTERVAL, ++ PROP_EVERY_PACKET, ++}; ++ ++#define DEFAULT_INTERVAL 0 ++#define DEFAULT_EVERY_PACKET FALSE ++ ++static GstStaticCaps ntp_reference_timestamp_caps = ++GST_STATIC_CAPS ("timestamp/x-ntp"); ++ ++struct _GstRTPHeaderExtensionNtp64 ++{ ++ GstRTPHeaderExtension parent; ++ ++ GstClockTime last_pts; ++ ++ GstClockTime interval; ++ gboolean every_packet; ++}; ++ ++G_DEFINE_TYPE_WITH_CODE (GstRTPHeaderExtensionNtp64, ++ gst_rtp_header_extension_ntp_64, ++ GST_TYPE_RTP_HEADER_EXTENSION, GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, ++ "rtphdrextntp", 0, "RTP RFC6051 NTP Timestamps Header Extension"); ++ ); ++ ++GST_ELEMENT_REGISTER_DEFINE (rtphdrextntp64, ++ "rtphdrextntp64", GST_RANK_MARGINAL, GST_TYPE_RTP_HEADER_EXTENSION_NTP_64); ++ ++static GstRTPHeaderExtensionFlags ++ gst_rtp_header_extension_ntp_get_supported_flags ++ (GstRTPHeaderExtension * ext) ++{ ++ return GST_RTP_HEADER_EXTENSION_ONE_BYTE | GST_RTP_HEADER_EXTENSION_TWO_BYTE; ++} ++ ++static gsize ++ gst_rtp_header_extension_ntp_64_get_max_size ++ (GstRTPHeaderExtension * ext, const GstBuffer * buffer) ++{ ++ return 8; ++} ++ ++static gssize ++gst_rtp_header_extension_ntp_64_write (GstRTPHeaderExtension ++ * ext, const GstBuffer * input_meta, GstRTPHeaderExtensionFlags write_flags, ++ GstBuffer * output, guint8 * data, gsize size) ++{ ++ GstRTPHeaderExtensionNtp64 *self = GST_RTP_HEADER_EXTENSION_NTP_64 (ext); ++ ++ g_return_val_if_fail (size >= ++ gst_rtp_header_extension_ntp_64_get_max_size (ext, NULL), -1); ++ g_return_val_if_fail (write_flags & ++ gst_rtp_header_extension_ntp_get_supported_flags (ext), -1); ++ ++ if (self->every_packet ++ || self->last_pts == GST_CLOCK_TIME_NONE ++ || !GST_BUFFER_PTS_IS_VALID (input_meta) ++ || (self->last_pts != GST_BUFFER_PTS (input_meta) ++ && (GST_BUFFER_IS_DISCONT (input_meta) ++ || (GST_BUFFER_PTS (input_meta) >= self->last_pts ++ && GST_BUFFER_PTS (input_meta) - self->last_pts >= ++ self->interval)))) { ++ GstCaps *caps; ++ GstReferenceTimestampMeta *meta; ++ ++ caps = gst_static_caps_get (&ntp_reference_timestamp_caps); ++ meta = ++ gst_buffer_get_reference_timestamp_meta ((GstBuffer *) input_meta, ++ caps); ++ if (meta) { ++ guint64 ntptime = ++ gst_util_uint64_scale (meta->timestamp, G_GUINT64_CONSTANT (1) << 32, ++ GST_SECOND); ++ ++ GST_WRITE_UINT64_BE (data, ntptime); ++ } else { ++ memset (data, 0, 8); ++ } ++ gst_caps_unref (caps); ++ self->last_pts = GST_BUFFER_PTS (input_meta); ++ return 8; ++ } else { ++ return 0; ++ } ++} ++ ++static gboolean ++gst_rtp_header_extension_ntp_64_read (GstRTPHeaderExtension ++ * ext, GstRTPHeaderExtensionFlags read_flags, const guint8 * data, ++ gsize size, GstBuffer * buffer) ++{ ++ GstCaps *caps; ++ guint64 ntptime; ++ GstClockTime timestamp; ++ ++ if (size < 8) ++ return FALSE; ++ ++ caps = gst_static_caps_get (&ntp_reference_timestamp_caps); ++ ++ ntptime = GST_READ_UINT64_BE (data); ++ timestamp = ++ gst_util_uint64_scale (ntptime, GST_SECOND, G_GUINT64_CONSTANT (1) << 32); ++ ++ gst_buffer_add_reference_timestamp_meta (buffer, caps, timestamp, ++ GST_CLOCK_TIME_NONE); ++ ++ gst_caps_unref (caps); ++ ++ return TRUE; ++} ++ ++ ++static void ++gst_rtp_header_extension_ntp_64_get_property (GObject * ++ object, guint prop_id, GValue * value, GParamSpec * pspec) ++{ ++ GstRTPHeaderExtensionNtp64 *self = GST_RTP_HEADER_EXTENSION_NTP_64 (object); ++ ++ switch (prop_id) { ++ case PROP_INTERVAL: ++ GST_OBJECT_LOCK (self); ++ g_value_set_uint64 (value, self->interval); ++ GST_OBJECT_UNLOCK (self); ++ break; ++ case PROP_EVERY_PACKET: ++ GST_OBJECT_LOCK (self); ++ g_value_set_boolean (value, self->every_packet); ++ GST_OBJECT_UNLOCK (self); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_rtp_header_extension_ntp_64_set_property (GObject * ++ object, guint prop_id, const GValue * value, GParamSpec * pspec) ++{ ++ GstRTPHeaderExtensionNtp64 *self = GST_RTP_HEADER_EXTENSION_NTP_64 (object); ++ ++ switch (prop_id) { ++ case PROP_INTERVAL: ++ GST_OBJECT_LOCK (self); ++ self->interval = g_value_get_uint64 (value); ++ GST_OBJECT_UNLOCK (self); ++ break; ++ case PROP_EVERY_PACKET: ++ GST_OBJECT_LOCK (self); ++ self->every_packet = g_value_get_boolean (value); ++ GST_OBJECT_UNLOCK (self); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++ gst_rtp_header_extension_ntp_64_class_init ++ (GstRTPHeaderExtensionNtp64Class * klass) ++{ ++ GstRTPHeaderExtensionClass *rtp_hdr_class; ++ GstElementClass *gstelement_class; ++ GObjectClass *gobject_class; ++ ++ rtp_hdr_class = (GstRTPHeaderExtensionClass *) klass; ++ gobject_class = (GObjectClass *) klass; ++ gstelement_class = (GstElementClass *) klass; ++ ++ gobject_class->set_property = gst_rtp_header_extension_ntp_64_set_property; ++ gobject_class->get_property = gst_rtp_header_extension_ntp_64_get_property; ++ ++ /** ++ * rtphdrextntp64:interval: ++ * ++ * The minimum interval between packets that get the header extension added. ++ * ++ * On discontinuities the interval will be reset and the next packet gets ++ * the header extension added. ++ * ++ * Since: 1.22 ++ */ ++ g_object_class_install_property (gobject_class, PROP_INTERVAL, ++ g_param_spec_uint64 ("interval", "Interval", ++ "Interval between consecutive packets that get the header extension added", ++ 0, G_MAXUINT64, DEFAULT_INTERVAL, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ /** ++ * rtphdrextntp64:every-packet: ++ * ++ * If set to %TRUE the header extension will be added to every packet, ++ * independent of its timestamp. By default only the first packet with a ++ * given timestamp will get the header extension added. ++ * ++ * Since: 1.22 ++ */ ++ g_object_class_install_property (gobject_class, PROP_EVERY_PACKET, ++ g_param_spec_boolean ("every-packet", "Every Packet", ++ "Add the header extension to every packet", DEFAULT_EVERY_PACKET, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ rtp_hdr_class->get_supported_flags = ++ gst_rtp_header_extension_ntp_get_supported_flags; ++ rtp_hdr_class->get_max_size = gst_rtp_header_extension_ntp_64_get_max_size; ++ rtp_hdr_class->write = gst_rtp_header_extension_ntp_64_write; ++ rtp_hdr_class->read = gst_rtp_header_extension_ntp_64_read; ++ ++ gst_element_class_set_static_metadata (gstelement_class, ++ "RTP Header Extension RFC6051 64-bit NTP timestamp", ++ GST_RTP_HDREXT_ELEMENT_CLASS, ++ "Extends RTP packets to add or retrieve a 64-bit NTP " ++ "timestamp as specified in RFC6501", ++ "Sebastian Dröge "); ++ gst_rtp_header_extension_class_set_uri (rtp_hdr_class, ++ GST_RTP_HDREXT_BASE GST_RTP_HDREXT_NTP_64); ++} ++ ++static void ++gst_rtp_header_extension_ntp_64_init (GstRTPHeaderExtensionNtp64 * self) ++{ ++ self->last_pts = GST_CLOCK_TIME_NONE; ++ self->interval = DEFAULT_INTERVAL; ++ self->every_packet = DEFAULT_EVERY_PACKET; ++} +diff --git a/gst/rtpmanager/gstrtphdrext-ntp.h b/gst/rtpmanager/gstrtphdrext-ntp.h +new file mode 100644 +index 000000000..0e468058b +--- /dev/null ++++ b/gst/rtpmanager/gstrtphdrext-ntp.h +@@ -0,0 +1,36 @@ ++/* GStreamer ++ * Copyright (C) <2022> Sebastian Dröge ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330, ++ * Boston, MA 02111-1307, USA. ++ */ ++ ++#ifndef __GST_RTPHDREXT_NTP_H__ ++#define __GST_RTPHDREXT_NTP_H__ ++ ++#include ++#include ++ ++G_BEGIN_DECLS ++ ++#define GST_TYPE_RTP_HEADER_EXTENSION_NTP_64 (gst_rtp_header_extension_ntp_64_get_type()) ++ ++G_DECLARE_FINAL_TYPE (GstRTPHeaderExtensionNtp64, gst_rtp_header_extension_ntp_64, GST, RTP_HEADER_EXTENSION_NTP_64, GstRTPHeaderExtension); ++ ++GST_ELEMENT_REGISTER_DECLARE (rtphdrextntp64); ++ ++G_END_DECLS ++ ++#endif /* __GST_RTPHDREXT_NTP_H__ */ +diff --git a/gst/rtpmanager/gstrtphdrext-repairedstreamid.c b/gst/rtpmanager/gstrtphdrext-repairedstreamid.c +new file mode 100644 +index 000000000..706176952 +--- /dev/null ++++ b/gst/rtpmanager/gstrtphdrext-repairedstreamid.c +@@ -0,0 +1,307 @@ ++/* GStreamer ++ * Copyright (C) <2021> Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330, ++ * Boston, MA 02111-1307, USA. ++ */ ++ ++/** ++ * SECTION:element-rtphdrextrepairedstreamid ++ * @title: rtphdrextrepairedstreamid ++ * @short_description: RTP SDES Header Extension for RFC8852 RepairedRtpStreamId ++ * Extension ++ * @see_also: #GstRTPHeaderExtension, #GstRTPBasePayload, #GstRTPBaseDepayload, gstrtpbuffer ++ * ++ * Since: 1.22 ++ */ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include ++ ++#include "gstrtphdrext-repairedstreamid.h" ++ ++GST_DEBUG_CATEGORY_STATIC (rtphdrext_repaired_stream_id_debug); ++#define GST_CAT_DEFAULT (rtphdrext_repaired_stream_id_debug) ++ ++#define REPAIRED_RID_EXTMAP_STR GST_RTP_HDREXT_BASE "sdes:repaired-rtp-stream-id" ++ ++enum ++{ ++ PROP_0, ++ PROP_RID, ++}; ++ ++struct _GstRTPHeaderExtensionRepairedStreamId ++{ ++ GstRTPHeaderExtension parent; ++ ++ char *rid; ++}; ++ ++#define gst_rtp_header_extension_repaired_stream_id_parent_class parent_class ++G_DEFINE_TYPE_WITH_CODE (GstRTPHeaderExtensionRepairedStreamId, ++ gst_rtp_header_extension_repaired_stream_id, ++ GST_TYPE_RTP_HEADER_EXTENSION, GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, ++ "rtphdrextrepairedstreamid", 0, ++ "RTP RFC8852 RepairedRtpStreamId Header Extensions"); ++ ); ++GST_ELEMENT_REGISTER_DEFINE (rtphdrextrepairedstreamid, ++ "rtphdrextrepairedstreamid", GST_RANK_MARGINAL, ++ GST_TYPE_RTP_HEADER_EXTENSION_REPAIRED_STREAM_ID); ++ ++static GstRTPHeaderExtensionFlags ++ gst_rtp_header_extension_repaired_stream_id_get_supported_flags ++ (GstRTPHeaderExtension * ext) ++{ ++ GstRTPHeaderExtensionRepairedStreamId *self = ++ GST_RTP_HEADER_EXTENSION_REPAIRED_STREAM_ID (ext); ++ GstRTPHeaderExtensionFlags flags = ++ GST_RTP_HEADER_EXTENSION_ONE_BYTE | GST_RTP_HEADER_EXTENSION_TWO_BYTE; ++ gssize rid_len = -1; ++ ++ GST_OBJECT_LOCK (ext); ++ if (self->rid) ++ rid_len = strlen (self->rid); ++ GST_OBJECT_UNLOCK (ext); ++ /* One byte extensions only support [1, 16] bytes */ ++ if (rid_len > 16) ++ flags = GST_RTP_HEADER_EXTENSION_TWO_BYTE; ++ ++ return flags; ++} ++ ++static gsize ++ gst_rtp_header_extension_repaired_stream_id_get_max_size ++ (GstRTPHeaderExtension * ext, const GstBuffer * buffer) ++{ ++ if (gst_rtp_header_extension_repaired_stream_id_get_supported_flags ++ (ext) & GST_RTP_HEADER_EXTENSION_ONE_BYTE) ++ return 16; ++ else ++ return 255; ++} ++ ++static gssize ++gst_rtp_header_extension_repaired_stream_id_write (GstRTPHeaderExtension ++ * ext, const GstBuffer * input_meta, GstRTPHeaderExtensionFlags write_flags, ++ GstBuffer * output, guint8 * data, gsize size) ++{ ++ GstRTPHeaderExtensionRepairedStreamId *self = ++ GST_RTP_HEADER_EXTENSION_REPAIRED_STREAM_ID (ext); ++ gsize len = 0; ++ ++ g_return_val_if_fail (size >= ++ gst_rtp_header_extension_repaired_stream_id_get_max_size (ext, NULL), -1); ++ g_return_val_if_fail (write_flags & ++ gst_rtp_header_extension_repaired_stream_id_get_supported_flags ++ (ext), -1); ++ ++ GST_OBJECT_LOCK (ext); ++ if (!self->rid) { ++ GST_LOG_OBJECT (self, "no rid to write"); ++ goto out; ++ } ++ ++ /* TODO: we don't need to always add rid, we can selectively omit it from e.g. ++ * non-video-keyframes or some percentage of the produced frames, e.g. RFC8852 ++ * mentions possibly using packet-loss as a indication of how often to add rid ++ * to packets */ ++ GST_LOG_OBJECT (self, "writing rid \'%s\'", self->rid); ++ len = strlen (self->rid); ++ if ((write_flags & GST_RTP_HEADER_EXTENSION_TWO_BYTE) == 0 && len > 16) { ++ GST_DEBUG_OBJECT (self, "cannot write a rid of size %" G_GSIZE_FORMAT ++ " without using the two byte extension format", len); ++ len = 0; ++ goto out; ++ } ++ if (len > 0) { ++ GST_LOG_OBJECT (self, "writing rid \'%s\'", self->rid); ++ memcpy (data, self->rid, len); ++ } ++ ++out: ++ GST_OBJECT_UNLOCK (ext); ++ return len; ++} ++ ++static gboolean ++gst_rtp_header_extension_repaired_stream_id_read (GstRTPHeaderExtension ++ * ext, GstRTPHeaderExtensionFlags read_flags, const guint8 * data, ++ gsize size, GstBuffer * buffer) ++{ ++ GstRTPHeaderExtensionRepairedStreamId *self = ++ GST_RTP_HEADER_EXTENSION_REPAIRED_STREAM_ID (ext); ++ gboolean notify = FALSE; ++ ++ if (!data || size == 0) ++ return TRUE; ++ ++ if (read_flags & GST_RTP_HEADER_EXTENSION_ONE_BYTE && (size < 1 || size > 16)) { ++ GST_ERROR_OBJECT (ext, ++ "one-byte header extensions must be between 1 and 16 bytes inculusive"); ++ return FALSE; ++ } ++ ++ GST_OBJECT_LOCK (self); ++ if (!self->rid || strncmp ((const char *) data, self->rid, size) != 0) { ++ g_clear_pointer (&self->rid, g_free); ++ self->rid = g_strndup ((const char *) data, size); ++ notify = TRUE; ++ } ++ GST_OBJECT_UNLOCK (self); ++ ++ if (notify) ++ g_object_notify ((GObject *) self, "rid"); ++ ++ return TRUE; ++} ++ ++static void ++gst_rtp_header_extension_repaired_stream_id_get_property (GObject * ++ object, guint prop_id, GValue * value, GParamSpec * pspec) ++{ ++ GstRTPHeaderExtensionRepairedStreamId *self = ++ GST_RTP_HEADER_EXTENSION_REPAIRED_STREAM_ID (object); ++ ++ switch (prop_id) { ++ case PROP_RID: ++ GST_OBJECT_LOCK (self); ++ g_value_set_string (value, self->rid); ++ GST_OBJECT_UNLOCK (self); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static gboolean ++validate_rid (const char *rid) ++{ ++ const char *iter; ++ ++ /* For avoidance of doubt, the only allowed byte values for ++ * these IDs are decimal 48 through 57, 65 through 90, and 97 through ++ * 122. ++ */ ++ for (iter = rid; iter && iter[0]; iter++) { ++ if (iter[0] < 48) ++ return FALSE; ++ if (iter[0] > 122) ++ return FALSE; ++ if (iter[0] > 57 && iter[0] < 65) ++ return FALSE; ++ if (iter[0] > 90 && iter[0] < 97) ++ return FALSE; ++ } ++ ++ return TRUE; ++} ++ ++static void ++gst_rtp_header_extension_repaired_stream_id_set_property (GObject * ++ object, guint prop_id, const GValue * value, GParamSpec * pspec) ++{ ++ GstRTPHeaderExtensionRepairedStreamId *self = ++ GST_RTP_HEADER_EXTENSION_REPAIRED_STREAM_ID (object); ++ ++ switch (prop_id) { ++ case PROP_RID:{ ++ const char *rid; ++ GST_OBJECT_LOCK (self); ++ rid = g_value_get_string (value); ++ if (!validate_rid (rid)) { ++ GST_WARNING_OBJECT (self, "Could not set rid \'%s\'. Validation failed", ++ rid); ++ } else { ++ g_clear_pointer (&self->rid, g_free); ++ self->rid = g_strdup (rid); ++ } ++ GST_OBJECT_UNLOCK (self); ++ break; ++ } ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_rtp_header_extension_repaired_stream_id_finalize (GObject * object) ++{ ++ GstRTPHeaderExtensionRepairedStreamId *self = ++ GST_RTP_HEADER_EXTENSION_REPAIRED_STREAM_ID (object); ++ ++ g_clear_pointer (&self->rid, g_free); ++ ++ G_OBJECT_CLASS (parent_class)->finalize (object); ++} ++ ++static void ++ gst_rtp_header_extension_repaired_stream_id_class_init ++ (GstRTPHeaderExtensionRepairedStreamIdClass * klass) ++{ ++ GstRTPHeaderExtensionClass *rtp_hdr_class; ++ GstElementClass *gstelement_class; ++ GObjectClass *gobject_class; ++ ++ rtp_hdr_class = (GstRTPHeaderExtensionClass *) klass; ++ gobject_class = (GObjectClass *) klass; ++ gstelement_class = (GstElementClass *) klass; ++ ++ gobject_class->set_property = ++ gst_rtp_header_extension_repaired_stream_id_set_property; ++ gobject_class->get_property = ++ gst_rtp_header_extension_repaired_stream_id_get_property; ++ gobject_class->finalize = ++ gst_rtp_header_extension_repaired_stream_id_finalize; ++ ++ /** ++ * rtphdrextrepairedstreamid:rid: ++ * ++ * The RepairedRtpStreamID (RID) value either last retrieved from the RTP ++ * Header extension, or to set on outgoing RTP packets. ++ * ++ * Since: 1.22 ++ */ ++ g_object_class_install_property (gobject_class, PROP_RID, ++ g_param_spec_string ("rid", "rid", ++ "The RepairedRtpStreamId (RID) value last read or to write from/to " ++ "RTP buffers", NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ rtp_hdr_class->get_supported_flags = ++ gst_rtp_header_extension_repaired_stream_id_get_supported_flags; ++ rtp_hdr_class->get_max_size = ++ gst_rtp_header_extension_repaired_stream_id_get_max_size; ++ rtp_hdr_class->write = gst_rtp_header_extension_repaired_stream_id_write; ++ rtp_hdr_class->read = gst_rtp_header_extension_repaired_stream_id_read; ++ ++ gst_element_class_set_static_metadata (gstelement_class, ++ "RTP Header Extension Repaired RFC8852 Stream ID", ++ GST_RTP_HDREXT_ELEMENT_CLASS, ++ "Extends RTP packets to add or retrieve a RepairedStreamId (RID) " ++ "value as specified in RFC8852", ++ "Matthew Waters "); ++ gst_rtp_header_extension_class_set_uri (rtp_hdr_class, ++ REPAIRED_RID_EXTMAP_STR); ++} ++ ++static void ++ gst_rtp_header_extension_repaired_stream_id_init ++ (GstRTPHeaderExtensionRepairedStreamId * self) ++{ ++} +diff --git a/gst/rtpmanager/gstrtphdrext-repairedstreamid.h b/gst/rtpmanager/gstrtphdrext-repairedstreamid.h +new file mode 100644 +index 000000000..7697c537c +--- /dev/null ++++ b/gst/rtpmanager/gstrtphdrext-repairedstreamid.h +@@ -0,0 +1,36 @@ ++/* GStreamer ++ * Copyright (C) 2021 Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330, ++ * Boston, MA 02111-1307, USA. ++ */ ++ ++#ifndef __GST_RTPHDREXT_REPAIRED_STREAM_ID_H__ ++#define __GST_RTPHDREXT_REPAIRED_STREAM_ID_H__ ++ ++#include ++#include ++ ++G_BEGIN_DECLS ++ ++#define GST_TYPE_RTP_HEADER_EXTENSION_REPAIRED_STREAM_ID (gst_rtp_header_extension_repaired_stream_id_get_type()) ++ ++G_DECLARE_FINAL_TYPE (GstRTPHeaderExtensionRepairedStreamId, gst_rtp_header_extension_repaired_stream_id, GST, RTP_HEADER_EXTENSION_REPAIRED_STREAM_ID, GstRTPHeaderExtension); ++ ++GST_ELEMENT_REGISTER_DECLARE (rtphdrextrepairedstreamid); ++ ++G_END_DECLS ++ ++#endif /* __GST_RTPHDREXT_REPAIRED_STREAM_ID_H__ */ +diff --git a/gst/rtpmanager/gstrtphdrext-streamid.c b/gst/rtpmanager/gstrtphdrext-streamid.c +new file mode 100644 +index 000000000..1705e1f98 +--- /dev/null ++++ b/gst/rtpmanager/gstrtphdrext-streamid.c +@@ -0,0 +1,297 @@ ++/* GStreamer ++ * Copyright (C) <2021> Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330, ++ * Boston, MA 02111-1307, USA. ++ */ ++ ++/** ++ * SECTION:element-rtphdrextstreamid ++ * @title: rtphdrextstreamid ++ * @short_description: RTP SDES Header Extension for RFC8852 RtpStreamId ++ * Extension ++ * @see_also: #GstRTPHeaderExtension, #GstRTPBasePayload, #GstRTPBaseDepayload, gstrtpbuffer ++ * ++ * Since: 1.22 ++ */ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include ++ ++#include "gstrtphdrext-streamid.h" ++ ++GST_DEBUG_CATEGORY_STATIC (rtphdrext_stream_id_debug); ++#define GST_CAT_DEFAULT (rtphdrext_stream_id_debug) ++ ++#define RID_EXTMAP_STR GST_RTP_HDREXT_BASE "sdes:rtp-stream-id" ++ ++enum ++{ ++ PROP_0, ++ PROP_RID, ++}; ++ ++struct _GstRTPHeaderExtensionStreamId ++{ ++ GstRTPHeaderExtension parent; ++ ++ char *rid; ++}; ++ ++#define gst_rtp_header_extension_stream_id_parent_class parent_class ++G_DEFINE_TYPE_WITH_CODE (GstRTPHeaderExtensionStreamId, ++ gst_rtp_header_extension_stream_id, GST_TYPE_RTP_HEADER_EXTENSION, ++ GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, "rtphdrextstreamid", 0, ++ "RTP RFC8852 RtpStreamId Header Extensions"); ++ ); ++GST_ELEMENT_REGISTER_DEFINE (rtphdrextstreamid, ++ "rtphdrextstreamid", GST_RANK_MARGINAL, ++ GST_TYPE_RTP_HEADER_EXTENSION_STREAM_ID); ++ ++static GstRTPHeaderExtensionFlags ++ gst_rtp_header_extension_stream_id_get_supported_flags ++ (GstRTPHeaderExtension * ext) ++{ ++ GstRTPHeaderExtensionStreamId *self = ++ GST_RTP_HEADER_EXTENSION_STREAM_ID (ext); ++ GstRTPHeaderExtensionFlags flags = ++ GST_RTP_HEADER_EXTENSION_ONE_BYTE | GST_RTP_HEADER_EXTENSION_TWO_BYTE; ++ gssize rid_len = -1; ++ ++ GST_OBJECT_LOCK (ext); ++ if (self->rid) ++ rid_len = strlen (self->rid); ++ GST_OBJECT_UNLOCK (ext); ++ if (rid_len > 16) ++ flags = GST_RTP_HEADER_EXTENSION_TWO_BYTE; ++ ++ return flags; ++} ++ ++static gsize ++gst_rtp_header_extension_stream_id_get_max_size (GstRTPHeaderExtension * ++ ext, const GstBuffer * buffer) ++{ ++ if (gst_rtp_header_extension_stream_id_get_supported_flags (ext) & ++ GST_RTP_HEADER_EXTENSION_ONE_BYTE) ++ return 16; ++ else ++ return 255; ++} ++ ++static gssize ++gst_rtp_header_extension_stream_id_write (GstRTPHeaderExtension * ext, ++ const GstBuffer * input_meta, GstRTPHeaderExtensionFlags write_flags, ++ GstBuffer * output, guint8 * data, gsize size) ++{ ++ GstRTPHeaderExtensionStreamId *self = ++ GST_RTP_HEADER_EXTENSION_STREAM_ID (ext); ++ gsize len = 0; ++ ++ g_return_val_if_fail (size >= ++ gst_rtp_header_extension_stream_id_get_max_size (ext, NULL), -1); ++ g_return_val_if_fail (write_flags & ++ gst_rtp_header_extension_stream_id_get_supported_flags (ext), -1); ++ ++ GST_OBJECT_LOCK (ext); ++ if (!self->rid) { ++ GST_LOG_OBJECT (self, "no rid to write"); ++ goto out; ++ } ++ ++ /* TODO: we don't need to always add rid, we can selectively omit it from e.g. ++ * non-video-keyframes or some percentage of the produced frames, e.g. RFC8852 ++ * mentions possibly using packet-loss as a indication of how often to add mid ++ * to packets */ ++ len = strlen (self->rid); ++ if ((write_flags & GST_RTP_HEADER_EXTENSION_TWO_BYTE) == 0 && len > 16) { ++ GST_DEBUG_OBJECT (self, "cannot write a rid of size %" G_GSIZE_FORMAT ++ " without using the two byte extension format", len); ++ len = 0; ++ goto out; ++ } ++ if (len > 0) { ++ GST_LOG_OBJECT (self, "writing rid \'%s\'", self->rid); ++ memcpy (data, self->rid, len); ++ } ++ ++out: ++ GST_OBJECT_UNLOCK (ext); ++ return len; ++} ++ ++static gboolean ++gst_rtp_header_extension_stream_id_read (GstRTPHeaderExtension * ext, ++ GstRTPHeaderExtensionFlags read_flags, const guint8 * data, gsize size, ++ GstBuffer * buffer) ++{ ++ GstRTPHeaderExtensionStreamId *self = ++ GST_RTP_HEADER_EXTENSION_STREAM_ID (ext); ++ gboolean notify = FALSE; ++ ++ if (!data || size == 0) ++ return TRUE; ++ ++ if (read_flags & GST_RTP_HEADER_EXTENSION_ONE_BYTE && (size < 1 || size > 16)) { ++ GST_ERROR_OBJECT (ext, ++ "one-byte header extensions must be between 1 and 16 bytes inculusive"); ++ return FALSE; ++ } ++ ++ GST_OBJECT_LOCK (self); ++ if (!self->rid || strncmp ((const char *) data, self->rid, size) != 0) { ++ g_clear_pointer (&self->rid, g_free); ++ self->rid = g_strndup ((const char *) data, size); ++ notify = TRUE; ++ } ++ GST_OBJECT_UNLOCK (self); ++ ++ if (notify) ++ g_object_notify ((GObject *) self, "rid"); ++ ++ return TRUE; ++} ++ ++static void ++gst_rtp_header_extension_stream_id_get_property (GObject * object, ++ guint prop_id, GValue * value, GParamSpec * pspec) ++{ ++ GstRTPHeaderExtensionStreamId *self = ++ GST_RTP_HEADER_EXTENSION_STREAM_ID (object); ++ ++ switch (prop_id) { ++ case PROP_RID: ++ GST_OBJECT_LOCK (self); ++ g_value_set_string (value, self->rid); ++ GST_OBJECT_UNLOCK (self); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static gboolean ++validate_rid (const char *rid) ++{ ++ const char *iter; ++ ++ /* For avoidance of doubt, the only allowed byte values for ++ * these IDs are decimal 48 through 57, 65 through 90, and 97 through ++ * 122. ++ */ ++ for (iter = rid; iter && iter[0]; iter++) { ++ if (iter[0] < 48) ++ return FALSE; ++ if (iter[0] > 122) ++ return FALSE; ++ if (iter[0] > 57 && iter[0] < 65) ++ return FALSE; ++ if (iter[0] > 90 && iter[0] < 97) ++ return FALSE; ++ } ++ ++ return TRUE; ++} ++ ++static void ++gst_rtp_header_extension_stream_id_set_property (GObject * object, ++ guint prop_id, const GValue * value, GParamSpec * pspec) ++{ ++ GstRTPHeaderExtensionStreamId *self = ++ GST_RTP_HEADER_EXTENSION_STREAM_ID (object); ++ ++ switch (prop_id) { ++ case PROP_RID:{ ++ const char *rid; ++ GST_OBJECT_LOCK (self); ++ rid = g_value_get_string (value); ++ if (!validate_rid (rid)) { ++ GST_WARNING_OBJECT (self, "Could not set rid \'%s\'. Validation failed", ++ rid); ++ } else { ++ g_clear_pointer (&self->rid, g_free); ++ self->rid = g_strdup (rid); ++ } ++ GST_OBJECT_UNLOCK (self); ++ break; ++ } ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_rtp_header_extension_stream_id_finalize (GObject * object) ++{ ++ GstRTPHeaderExtensionStreamId *self = ++ GST_RTP_HEADER_EXTENSION_STREAM_ID (object); ++ ++ g_clear_pointer (&self->rid, g_free); ++ ++ G_OBJECT_CLASS (parent_class)->finalize (object); ++} ++ ++static void ++ gst_rtp_header_extension_stream_id_class_init ++ (GstRTPHeaderExtensionStreamIdClass * klass) ++{ ++ GstRTPHeaderExtensionClass *rtp_hdr_class; ++ GstElementClass *gstelement_class; ++ GObjectClass *gobject_class; ++ ++ rtp_hdr_class = (GstRTPHeaderExtensionClass *) klass; ++ gobject_class = (GObjectClass *) klass; ++ gstelement_class = (GstElementClass *) klass; ++ ++ gobject_class->set_property = gst_rtp_header_extension_stream_id_set_property; ++ gobject_class->get_property = gst_rtp_header_extension_stream_id_get_property; ++ gobject_class->finalize = gst_rtp_header_extension_stream_id_finalize; ++ ++ /** ++ * rtphdrextstreamid:rid: ++ * ++ * The RtpStreamID (RID) value either last retrieved from the RTP ++ * Header extension, or to set on outgoing RTP packets. ++ * ++ * Since: 1.22 ++ */ ++ g_object_class_install_property (gobject_class, PROP_RID, ++ g_param_spec_string ("rid", "rid", ++ "The RtpStreamId (RID) value last read or to write from/to " ++ "RTP buffers", NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ rtp_hdr_class->get_supported_flags = ++ gst_rtp_header_extension_stream_id_get_supported_flags; ++ rtp_hdr_class->get_max_size = gst_rtp_header_extension_stream_id_get_max_size; ++ rtp_hdr_class->write = gst_rtp_header_extension_stream_id_write; ++ rtp_hdr_class->read = gst_rtp_header_extension_stream_id_read; ++ ++ gst_element_class_set_static_metadata (gstelement_class, ++ "RTP Header Extension RFC8852 Stream ID", GST_RTP_HDREXT_ELEMENT_CLASS, ++ "Extends RTP packets to add or retrieve a RtpStreamId (RID) " ++ "value as specified in RFC8852", ++ "Matthew Waters "); ++ gst_rtp_header_extension_class_set_uri (rtp_hdr_class, RID_EXTMAP_STR); ++} ++ ++static void ++ gst_rtp_header_extension_stream_id_init ++ (GstRTPHeaderExtensionStreamId * self) ++{ ++} +diff --git a/gst/rtpmanager/gstrtphdrext-streamid.h b/gst/rtpmanager/gstrtphdrext-streamid.h +new file mode 100644 +index 000000000..17771321e +--- /dev/null ++++ b/gst/rtpmanager/gstrtphdrext-streamid.h +@@ -0,0 +1,36 @@ ++/* GStreamer ++ * Copyright (C) 2021 Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330, ++ * Boston, MA 02111-1307, USA. ++ */ ++ ++#ifndef __GST_RTPHDREXT_STREAM_ID_H__ ++#define __GST_RTPHDREXT_STREAM_ID_H__ ++ ++#include ++#include ++ ++G_BEGIN_DECLS ++ ++#define GST_TYPE_RTP_HEADER_EXTENSION_STREAM_ID (gst_rtp_header_extension_stream_id_get_type()) ++ ++G_DECLARE_FINAL_TYPE (GstRTPHeaderExtensionStreamId, gst_rtp_header_extension_stream_id, GST, RTP_HEADER_EXTENSION_STREAM_ID, GstRTPHeaderExtension); ++ ++GST_ELEMENT_REGISTER_DECLARE (rtphdrextstreamid); ++ ++G_END_DECLS ++ ++#endif /* __GST_RTPHDREXT_STREAM_ID_H__ */ +diff --git a/gst/rtpmanager/gstrtphdrext-twcc.c b/gst/rtpmanager/gstrtphdrext-twcc.c +new file mode 100644 +index 000000000..2457337d0 +--- /dev/null ++++ b/gst/rtpmanager/gstrtphdrext-twcc.c +@@ -0,0 +1,230 @@ ++/* GStreamer ++ * Copyright (C) <2020> Matthew Waters ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330, ++ * Boston, MA 02111-1307, USA. ++ */ ++ ++/** ++ * SECTION:rtphdrexttwcc ++ * @title: GstRtphdrext-TWCC ++ * @short_description: Helper methods for dealing with RTP header extensions ++ * in the Audio/Video RTP Profile for transport-wide-cc ++ * @see_also: #GstRTPHeaderExtension, #GstRTPBasePayload, #GstRTPBaseDepayload, gstrtpbuffer ++ * ++ * Since: 1.20 ++ */ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include ++ ++#include "gstrtphdrext-twcc.h" ++ ++GST_DEBUG_CATEGORY_STATIC (rtphdrext_twcc_debug); ++#define GST_CAT_DEFAULT (rtphdrext_twcc_debug) ++ ++#define gst_gl_base_filter_parent_class parent_class ++G_DEFINE_TYPE_WITH_CODE (GstRTPHeaderExtensionTWCC, ++ gst_rtp_header_extension_twcc, GST_TYPE_RTP_HEADER_EXTENSION, ++ GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, "rtphdrexttwcc", 0, ++ "RTP TWCC Header Extensions"); ++ ); ++GST_ELEMENT_REGISTER_DEFINE (rtphdrexttwcc, "rtphdrexttwcc", GST_RANK_MARGINAL, ++ GST_TYPE_RTP_HEADER_EXTENSION_TWCC); ++ ++#define TWCC_EXTMAP_STR "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01" ++ ++static void gst_rtp_header_extension_twcc_set_property (GObject * object, ++ guint prop_id, const GValue * value, GParamSpec * pspec); ++static void gst_rtp_header_extension_twcc_get_property (GObject * object, ++ guint prop_id, GValue * value, GParamSpec * pspec); ++ ++static GstRTPHeaderExtensionFlags ++gst_rtp_header_extension_twcc_get_supported_flags (GstRTPHeaderExtension * ext); ++static gsize gst_rtp_header_extension_twcc_get_max_size (GstRTPHeaderExtension * ++ ext, const GstBuffer * buffer); ++static gssize gst_rtp_header_extension_twcc_write (GstRTPHeaderExtension * ext, ++ const GstBuffer * input_meta, GstRTPHeaderExtensionFlags write_flags, ++ GstBuffer * output, guint8 * data, gsize size); ++static gboolean gst_rtp_header_extension_twcc_read (GstRTPHeaderExtension * ext, ++ GstRTPHeaderExtensionFlags read_flags, const guint8 * data, gsize size, ++ GstBuffer * buffer); ++ ++enum ++{ ++ PROP_0, ++ PROP_N_STREAMS, ++}; ++ ++static void ++gst_rtp_header_extension_twcc_class_init (GstRTPHeaderExtensionTWCCClass * ++ klass) ++{ ++ GstRTPHeaderExtensionClass *rtp_hdr_class; ++ GstElementClass *gstelement_class; ++ GObjectClass *gobject_class; ++ ++ rtp_hdr_class = (GstRTPHeaderExtensionClass *) klass; ++ gobject_class = (GObjectClass *) klass; ++ gstelement_class = (GstElementClass *) klass; ++ ++ gobject_class->set_property = gst_rtp_header_extension_twcc_set_property; ++ gobject_class->get_property = gst_rtp_header_extension_twcc_get_property; ++ ++ /** ++ * rtphdrexttwcc:n-streams: ++ * ++ * The number of independant RTP streams that are being used for the transport ++ * wide counter for TWCC. If set to 1 (the default), then any existing ++ * transport wide counter is kept. ++ * ++ * Since: 1.20 ++ */ ++ g_object_class_install_property (gobject_class, PROP_N_STREAMS, ++ g_param_spec_uint ("n-streams", "N Streams", ++ "The number of separate RTP streams this header applies to", ++ 1, G_MAXUINT32, 1, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ ++ rtp_hdr_class->get_supported_flags = ++ gst_rtp_header_extension_twcc_get_supported_flags; ++ rtp_hdr_class->get_max_size = gst_rtp_header_extension_twcc_get_max_size; ++ rtp_hdr_class->write = gst_rtp_header_extension_twcc_write; ++ rtp_hdr_class->read = gst_rtp_header_extension_twcc_read; ++ ++ gst_element_class_set_static_metadata (gstelement_class, ++ "Transport Wide Congestion Control", GST_RTP_HDREXT_ELEMENT_CLASS, ++ "Extends RTP packets to add sequence number transport wide.", ++ "Matthew Waters "); ++ gst_rtp_header_extension_class_set_uri (rtp_hdr_class, TWCC_EXTMAP_STR); ++} ++ ++static void ++gst_rtp_header_extension_twcc_init (GstRTPHeaderExtensionTWCC * twcc) ++{ ++ twcc->n_streams = 1; ++} ++ ++static void ++gst_rtp_header_extension_twcc_set_property (GObject * object, guint prop_id, ++ const GValue * value, GParamSpec * pspec) ++{ ++ GstRTPHeaderExtensionTWCC *twcc = GST_RTP_HEADER_EXTENSION_TWCC (object); ++ ++ switch (prop_id) { ++ case PROP_N_STREAMS: ++ twcc->n_streams = g_value_get_uint (value); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_rtp_header_extension_twcc_get_property (GObject * object, guint prop_id, ++ GValue * value, GParamSpec * pspec) ++{ ++ GstRTPHeaderExtensionTWCC *twcc = GST_RTP_HEADER_EXTENSION_TWCC (object); ++ ++ switch (prop_id) { ++ case PROP_N_STREAMS: ++ g_value_set_uint (value, twcc->n_streams); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static GstRTPHeaderExtensionFlags ++gst_rtp_header_extension_twcc_get_supported_flags (GstRTPHeaderExtension * ext) ++{ ++ return GST_RTP_HEADER_EXTENSION_ONE_BYTE; ++} ++ ++static gsize ++gst_rtp_header_extension_twcc_get_max_size (GstRTPHeaderExtension * ext, ++ const GstBuffer * buffer) ++{ ++ return 2; ++} ++ ++static gssize ++gst_rtp_header_extension_twcc_write (GstRTPHeaderExtension * ext, ++ const GstBuffer * input_meta, GstRTPHeaderExtensionFlags write_flags, ++ GstBuffer * output, guint8 * data, gsize size) ++{ ++ GstRTPHeaderExtensionTWCC *twcc = GST_RTP_HEADER_EXTENSION_TWCC (ext); ++ GstRTPBuffer rtp = { NULL, }; ++ gpointer ext_data; ++ guint ext_size; ++ gsize written = 0; ++ ++ g_return_val_if_fail (size >= gst_rtp_header_extension_twcc_get_max_size (ext, ++ NULL), -1); ++ g_return_val_if_fail (write_flags & ++ gst_rtp_header_extension_twcc_get_supported_flags (ext), -1); ++ ++ if (!gst_rtp_buffer_map (output, GST_MAP_READWRITE, &rtp)) ++ goto map_failed; ++ ++ /* if there already is a twcc-seqnum inside the packet */ ++ if (gst_rtp_buffer_get_extension_onebyte_header (&rtp, ++ gst_rtp_header_extension_get_id (ext), 0, &ext_data, &ext_size)) { ++ if (ext_size < gst_rtp_header_extension_twcc_get_max_size (ext, NULL)) ++ goto existing_too_small; ++ ++ /* with only one stream, we read the twcc-seqnum */ ++ if (twcc->n_streams == 1) ++ twcc->seqnum = GST_READ_UINT16_BE (ext_data); ++ } else { ++ /* with only one stream, we read the existing seqnum */ ++ if (twcc->n_streams == 1) ++ twcc->seqnum = gst_rtp_buffer_get_seq (&rtp); ++ ++ written = 2; ++ } ++ GST_WRITE_UINT16_BE (data, twcc->seqnum); ++ ++ gst_rtp_buffer_unmap (&rtp); ++ ++ twcc->seqnum++; ++ ++ return written; ++ ++ /* ERRORS */ ++map_failed: ++ { ++ GST_ERROR ("failed to map buffer %p", output); ++ return -1; ++ } ++ ++existing_too_small: ++ { ++ GST_ERROR ("Cannot rewrite twcc data of smaller size (%u)", ext_size); ++ return 0; ++ } ++} ++ ++static gboolean ++gst_rtp_header_extension_twcc_read (GstRTPHeaderExtension * ext, ++ GstRTPHeaderExtensionFlags read_flags, const guint8 * data, gsize size, ++ GstBuffer * buffer) ++{ ++ /* TODO: does this need an extra GstMeta? */ ++ return TRUE; ++} +diff --git a/gst/rtpmanager/gstrtphdrext-twcc.h b/gst/rtpmanager/gstrtphdrext-twcc.h +new file mode 100644 +index 000000000..3df0249a6 +--- /dev/null ++++ b/gst/rtpmanager/gstrtphdrext-twcc.h +@@ -0,0 +1,83 @@ ++/* GStreamer ++ * Copyright (C) 2020 Matthew Waters ++ * ++ * gstrtphdrexttwcc.h: transport-wide-cc RTP header extensions for the ++ * Audio/Video RTP Profile ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330, ++ * Boston, MA 02111-1307, USA. ++ */ ++ ++#ifndef __GST_RTPHDREXT_TWCC_H__ ++#define __GST_RTPHDREXT_TWCC_H__ ++ ++#include ++#include ++ ++G_BEGIN_DECLS ++ ++GType gst_rtp_header_extension_twcc_get_type (void); ++#define GST_TYPE_RTP_HEADER_EXTENSION_TWCC (gst_rtp_header_extension_twcc_get_type()) ++#define GST_RTP_HEADER_EXTENSION_TWCC(obj) \ ++ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_HEADER_EXTENSION_TWCC,GstRTPHeaderExtensionTWCC)) ++#define GST_RTP_HEADER_EXTENSION_TWCC_CLASS(klass) \ ++ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_HEADER_EXTENSION_TWCC,GstRTPHeaderExtensionTWCCClass)) ++#define GST_RTP_HEADER_EXTENSION_TWCC_GET_CLASS(obj) \ ++ (G_TYPE_INSTANCE_GET_CLASS ((obj),GST_TYPE_RTP_HEADER_EXTENSION_TWCC,GstRTPHeaderExtensionTWCCClass)) ++#define GST_IS_RTP_HEADER_EXTENSION_TWCC(obj) \ ++ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_HEADER_EXTENSION_TWCC)) ++#define GST_IS_RTP_HEADER_EXTENSION_TWCC_CLASS(klass) \ ++ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_HEADER_EXTENSION_TWCC)) ++#define GST_RTP_HEADER_EXTENSION_TWCC_CAST(obj) ((GstRTPHeaderExtensionTWCC *)(obj)) ++ ++typedef struct _GstRTPHeaderExtensionTWCC GstRTPHeaderExtensionTWCC; ++typedef struct _GstRTPHeaderExtensionTWCCClass GstRTPHeaderExtensionTWCCClass; ++ ++/** ++ * GstRTPHeaderExtensionTWCC: ++ * @parent: the parent #GstRTPHeaderExtension ++ * ++ * Instance struct for a transport-wide-cc RTP Audio/Video header extension. ++ * ++ * http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01 ++ */ ++struct _GstRTPHeaderExtensionTWCC ++{ ++ GstRTPHeaderExtension parent; ++ ++ guint16 seqnum; ++ guint n_streams; ++ ++ /*< private >*/ ++ gpointer _gst_reserved[GST_PADDING]; ++}; ++ ++/** ++ * GstRTPHeaderExtensionTWCCClass: ++ * @parent_class: the parent class ++ */ ++struct _GstRTPHeaderExtensionTWCCClass ++{ ++ GstRTPHeaderExtensionClass parent_class; ++ ++ /*< private >*/ ++ gpointer _gst_reserved[GST_PADDING]; ++}; ++ ++GST_ELEMENT_REGISTER_DECLARE (rtphdrexttwcc); ++ ++G_END_DECLS ++ ++#endif /* __GST_RTPHDREXT_TWCC_H__ */ +diff --git a/gst/rtpmanager/gstrtpjitterbuffer.c b/gst/rtpmanager/gstrtpjitterbuffer.c +index 5bd15b3f4..32fe63934 100644 +--- a/gst/rtpmanager/gstrtpjitterbuffer.c ++++ b/gst/rtpmanager/gstrtpjitterbuffer.c +@@ -106,12 +106,14 @@ + #include + #include + #include ++#include + #include + + #include "gstrtpjitterbuffer.h" + #include "rtpjitterbuffer.h" + #include "rtpstats.h" + #include "rtptimerqueue.h" ++#include "gstrtputils.h" + + #include + +@@ -153,7 +155,9 @@ enum + #define DEFAULT_MAX_DROPOUT_TIME 60000 + #define DEFAULT_MAX_MISORDER_TIME 2000 + #define DEFAULT_RFC7273_SYNC FALSE ++#define DEFAULT_ADD_REFERENCE_TIMESTAMP_META FALSE + #define DEFAULT_FASTSTART_MIN_PACKETS 0 ++#define DEFAULT_SYNC_INTERVAL 0 + + #define DEFAULT_AUTO_RTX_DELAY (20 * GST_MSECOND) + #define DEFAULT_AUTO_RTX_TIMEOUT (40 * GST_MSECOND) +@@ -186,7 +190,9 @@ enum + PROP_MAX_DROPOUT_TIME, + PROP_MAX_MISORDER_TIME, + PROP_RFC7273_SYNC, +- PROP_FASTSTART_MIN_PACKETS ++ PROP_ADD_REFERENCE_TIMESTAMP_META, ++ PROP_FASTSTART_MIN_PACKETS, ++ PROP_SYNC_INTERVAL, + }; + + #define JBUF_LOCK(priv) G_STMT_START { \ +@@ -226,6 +232,13 @@ enum + (priv)->waiting_timer--; \ + GST_DEBUG ("waiting timer done"); \ + } G_STMT_END ++#define JBUF_WAIT_TIMER_CHECK(priv, label) G_STMT_START { \ ++ if (G_UNLIKELY (priv->srcresult != GST_FLOW_OK)) \ ++ goto label; \ ++ JBUF_WAIT_TIMER (priv); \ ++ if (G_UNLIKELY (priv->srcresult != GST_FLOW_OK)) \ ++ goto label; \ ++ } G_STMT_END + #define JBUF_SIGNAL_TIMER(priv) G_STMT_START { \ + if (G_UNLIKELY ((priv)->waiting_timer)) { \ + GST_DEBUG ("signal timer, %d waiters", (priv)->waiting_timer); \ +@@ -273,18 +286,6 @@ enum + #define GST_BUFFER_IS_RETRANSMISSION(buffer) \ + GST_BUFFER_FLAG_IS_SET (buffer, GST_RTP_BUFFER_FLAG_RETRANSMISSION) + +-#if !GLIB_CHECK_VERSION(2, 60, 0) +-#define g_queue_clear_full queue_clear_full +-static void +-queue_clear_full (GQueue * queue, GDestroyNotify free_func) +-{ +- gpointer data; +- +- while ((data = g_queue_pop_head (queue)) != NULL) +- free_func (data); +-} +-#endif +- + struct _GstRtpJitterBufferPrivate + { + GstPad *sinkpad, *srcpad; +@@ -334,6 +335,17 @@ struct _GstRtpJitterBufferPrivate + guint32 max_dropout_time; + guint32 max_misorder_time; + guint faststart_min_packets; ++ gboolean add_reference_timestamp_meta; ++ guint sync_interval; ++ ++ /* Reference for GstReferenceTimestampMeta */ ++ GstCaps *reference_timestamp_caps; ++ ++ /* RTP header extension ID for RFC6051 64-bit NTP timestamps */ ++ guint8 ntp64_ext_id; ++ ++ /* Known CNAME / SSRC mappings */ ++ GList *cname_ssrc_mappings; + + /* the last seqnum we pushed out */ + guint32 last_popped_seqnum; +@@ -374,6 +386,7 @@ struct _GstRtpJitterBufferPrivate + + /* clock rate and rtp timestamp offset */ + gint last_pt; ++ guint32 last_ssrc; + gint32 clock_rate; + gint64 clock_base; + gint64 ts_offset_remainder; +@@ -390,8 +403,13 @@ struct _GstRtpJitterBufferPrivate + /* the latency of the upstream peer, we have to take this into account when + * synchronizing the buffers. */ + GstClockTime peer_latency; +- guint64 ext_rtptime; ++ guint64 last_sr_ext_rtptime; + GstBuffer *last_sr; ++ guint32 last_sr_ssrc; ++ GstClockTime last_sr_ntpnstime; ++ ++ GstClockTime last_known_ntpnstime; ++ guint64 last_known_ext_rtptime; + + /* some accounting */ + guint64 num_pushed; +@@ -409,6 +427,7 @@ struct _GstRtpJitterBufferPrivate + GstClockTime last_dts; + GstClockTime last_pts; + guint64 last_rtptime; ++ GstClockTime last_ntpnstime; + GstClockTime avg_jitter; + + /* for dropped packet messages */ +@@ -423,6 +442,47 @@ typedef enum + REASON_DROP_ON_LATENCY + } DropMessageReason; + ++typedef struct ++{ ++ gchar *cname; ++ guint32 ssrc; ++} CNameSSRCMapping; ++ ++static void ++cname_ssrc_mapping_free (CNameSSRCMapping * mapping) ++{ ++ g_free (mapping->cname); ++ g_free (mapping); ++} ++ ++static void ++insert_cname_ssrc_mapping (GstRtpJitterBuffer * jbuf, const gchar * cname, ++ guint32 ssrc) ++{ ++ CNameSSRCMapping *map; ++ GList *l; ++ ++ GST_DEBUG_OBJECT (jbuf, "Adding SSRC %08x to CNAME %s", ssrc, cname); ++ ++ for (l = jbuf->priv->cname_ssrc_mappings; l; l = l->next) { ++ map = l->data; ++ ++ if (map->ssrc == ssrc) { ++ if (strcmp (cname, map->cname) != 0) { ++ g_free (map->cname); ++ map->cname = g_strdup (cname); ++ } ++ return; ++ } ++ } ++ ++ map = g_new0 (CNameSSRCMapping, 1); ++ map->cname = g_strdup (cname); ++ map->ssrc = ssrc; ++ jbuf->priv->cname_ssrc_mappings = ++ g_list_prepend (jbuf->priv->cname_ssrc_mappings, map); ++} ++ + static GstStaticPadTemplate gst_rtp_jitter_buffer_sink_template = + GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, +@@ -457,6 +517,8 @@ static guint gst_rtp_jitter_buffer_signals[LAST_SIGNAL] = { 0 }; + #define gst_rtp_jitter_buffer_parent_class parent_class + G_DEFINE_TYPE_WITH_PRIVATE (GstRtpJitterBuffer, gst_rtp_jitter_buffer, + GST_TYPE_ELEMENT); ++GST_ELEMENT_REGISTER_DEFINE (rtpjitterbuffer, "rtpjitterbuffer", GST_RANK_NONE, ++ GST_TYPE_RTP_JITTER_BUFFER); + + /* object overrides */ + static void gst_rtp_jitter_buffer_set_property (GObject * object, +@@ -512,6 +574,8 @@ static GstClockTime + gst_rtp_jitter_buffer_set_active (GstRtpJitterBuffer * jitterbuffer, + gboolean active, guint64 base_time); + static void do_handle_sync (GstRtpJitterBuffer * jitterbuffer); ++static void do_handle_sync_inband (GstRtpJitterBuffer * jitterbuffer, ++ guint64 ntpnstime); + + static void unschedule_current_timer (GstRtpJitterBuffer * jitterbuffer); + +@@ -883,6 +947,25 @@ gst_rtp_jitter_buffer_class_init (GstRtpJitterBufferClass * klass) + "(requires clock and offset to be provided)", DEFAULT_RFC7273_SYNC, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + ++ /** ++ * GstRtpJitterBuffer:add-reference-timestamp-meta: ++ * ++ * When syncing to a RFC7273 clock or after clock synchronization via RTCP or ++ * inband NTP-64 header extensions has happened, add #GstReferenceTimestampMeta ++ * to buffers with the original reconstructed reference clock timestamp. ++ * ++ * Since: 1.22 ++ */ ++ g_object_class_install_property (gobject_class, ++ PROP_ADD_REFERENCE_TIMESTAMP_META, ++ g_param_spec_boolean ("add-reference-timestamp-meta", ++ "Add Reference Timestamp Meta", ++ "Add Reference Timestamp Meta to buffers with the original clock timestamp " ++ "before any adjustments when syncing to an RFC7273 clock or after clock " ++ "synchronization via RTCP or inband NTP-64 header extensions has happened.", ++ DEFAULT_ADD_REFERENCE_TIMESTAMP_META, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ + /** + * GstRtpJitterBuffer:faststart-min-packets + * +@@ -900,6 +983,20 @@ gst_rtp_jitter_buffer_class_init (GstRtpJitterBufferClass * klass) + 0, G_MAXUINT, DEFAULT_FASTSTART_MIN_PACKETS, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + ++ /** ++ * GstRtpJitterBuffer:sync-interval: ++ * ++ * Determines how often to sync streams using RTCP data or inband NTP-64 ++ * header extensions. ++ * ++ * Since: 1.22 ++ */ ++ g_object_class_install_property (gobject_class, PROP_SYNC_INTERVAL, ++ g_param_spec_uint ("sync-interval", "Sync Interval", ++ "RTCP SR / NTP-64 interval synchronization (ms) (0 = always)", ++ 0, G_MAXUINT, DEFAULT_SYNC_INTERVAL, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ + /** + * GstRtpJitterBuffer::request-pt-map: + * @buffer: the object which received the signal +@@ -928,7 +1025,7 @@ gst_rtp_jitter_buffer_class_init (GstRtpJitterBufferClass * klass) + * GstRtpJitterBuffer::on-npt-stop: + * @buffer: the object which received the signal + * +- * Signal that the jitterbufer has pushed the RTP packet that corresponds to ++ * Signal that the jitterbuffer has pushed the RTP packet that corresponds to + * the npt-stop position. + */ + gst_rtp_jitter_buffer_signals[SIGNAL_ON_NPT_STOP] = +@@ -1029,11 +1126,16 @@ gst_rtp_jitter_buffer_init (GstRtpJitterBuffer * jitterbuffer) + priv->max_dropout_time = DEFAULT_MAX_DROPOUT_TIME; + priv->max_misorder_time = DEFAULT_MAX_MISORDER_TIME; + priv->faststart_min_packets = DEFAULT_FASTSTART_MIN_PACKETS; ++ priv->add_reference_timestamp_meta = DEFAULT_ADD_REFERENCE_TIMESTAMP_META; ++ priv->sync_interval = DEFAULT_SYNC_INTERVAL; + + priv->ts_offset_remainder = 0; + priv->last_dts = -1; + priv->last_pts = -1; + priv->last_rtptime = -1; ++ priv->last_ntpnstime = -1; ++ priv->last_known_ext_rtptime = -1; ++ priv->last_known_ntpnstime = -1; + priv->avg_jitter = 0; + priv->last_drop_msg_timestamp = GST_CLOCK_TIME_NONE; + priv->num_too_late = 0; +@@ -1119,6 +1221,9 @@ gst_rtp_jitter_buffer_finalize (GObject * object) + g_cond_clear (&priv->jbuf_query); + + rtp_jitter_buffer_flush (priv->jbuf, NULL, NULL); ++ g_list_free_full (priv->cname_ssrc_mappings, ++ (GDestroyNotify) cname_ssrc_mapping_free); ++ priv->cname_ssrc_mappings = NULL; + g_queue_foreach (&priv->gap_packets, (GFunc) gst_buffer_unref, NULL); + g_queue_clear (&priv->gap_packets); + g_object_unref (priv->jbuf); +@@ -1372,23 +1477,28 @@ gst_rtp_jitter_buffer_getcaps (GstPad * pad, GstCaps * filter) + return caps; + } + +-/* g_ascii_string_to_unsigned is available since 2.54. Get rid of this wrapper +- * when we bump the version in 1.18 */ +-#if !GLIB_CHECK_VERSION(2,54,0) +-#define g_ascii_string_to_unsigned _gst_jitter_buffer_ascii_string_to_unsigned +-static gboolean +-_gst_jitter_buffer_ascii_string_to_unsigned (const gchar * str, guint base, +- guint64 min, guint64 max, guint64 * out_num, GError ** error) ++static void ++_get_cname_ssrc_mappings (GstRtpJitterBuffer * jitterbuffer, ++ const GstStructure * s) + { +- gchar *endptr = NULL; +- *out_num = g_ascii_strtoull (str, &endptr, base); +- if (errno) +- return FALSE; +- if (endptr == str) +- return FALSE; +- return TRUE; ++ guint i; ++ guint n_fields = gst_structure_n_fields (s); ++ ++ for (i = 0; i < n_fields; i++) { ++ const gchar *field_name = gst_structure_nth_field_name (s, i); ++ if (g_str_has_prefix (field_name, "ssrc-") ++ && g_str_has_suffix (field_name, "-cname")) { ++ const gchar *str = gst_structure_get_string (s, field_name); ++ gchar *endptr; ++ guint32 ssrc = g_ascii_strtoll (field_name + 5, &endptr, 10); ++ ++ if (!endptr || *endptr != '-') ++ continue; ++ ++ insert_cname_ssrc_mapping (jitterbuffer, str, ssrc); ++ } ++ } + } +-#endif + + /* + * Must be called with JBUF_LOCK held +@@ -1404,6 +1514,7 @@ gst_jitter_buffer_sink_parse_caps (GstRtpJitterBuffer * jitterbuffer, + gint payload = -1; + GstClockTime tval; + const gchar *ts_refclk, *mediaclk; ++ GstCaps *ts_meta_ref = NULL; + + priv = jitterbuffer->priv; + +@@ -1438,14 +1549,15 @@ gst_jitter_buffer_sink_parse_caps (GstRtpJitterBuffer * jitterbuffer, + + gst_rtp_packet_rate_ctx_reset (&priv->packet_rate_ctx, priv->clock_rate); + +- /* The clock base is the RTP timestamp corrsponding to the npt-start value. We ++ /* The clock base is the RTP timestamp corresponding to the npt-start value. We + * can use this to track the amount of time elapsed on the sender. */ +- if (gst_structure_get_uint (caps_struct, "clock-base", &val)) +- priv->clock_base = val; +- else ++ priv->ext_timestamp = -1; ++ if (gst_structure_get_uint (caps_struct, "clock-base", &val)) { ++ priv->clock_base = gst_rtp_buffer_ext_timestamp (&priv->ext_timestamp, val); ++ priv->ext_timestamp = priv->clock_base; ++ } else { + priv->clock_base = -1; +- +- priv->ext_timestamp = priv->clock_base; ++ } + + GST_DEBUG_OBJECT (jitterbuffer, "got clock-base %" G_GINT64_FORMAT, + priv->clock_base); +@@ -1519,6 +1631,10 @@ gst_jitter_buffer_sink_parse_caps (GstRtpJitterBuffer * jitterbuffer, + hostname = g_strdup (host); + + clock = gst_ntp_clock_new (NULL, hostname, port, 0); ++ ++ ts_meta_ref = gst_caps_new_simple ("timestamp/x-ntp", ++ "host", G_TYPE_STRING, hostname, "port", G_TYPE_INT, port, NULL); ++ + g_free (hostname); + } + } else if (g_str_has_prefix (ts_refclk, "ptp=IEEE1588-2008:")) { +@@ -1530,6 +1646,12 @@ gst_jitter_buffer_sink_parse_caps (GstRtpJitterBuffer * jitterbuffer, + domain = 0; + + clock = gst_ptp_clock_new (NULL, domain); ++ ++ ts_meta_ref = gst_caps_new_simple ("timestamp/x-ptp", ++ "version", G_TYPE_STRING, "IEEE1588-2008", ++ "domain", G_TYPE_INT, domain, NULL); ++ } else if (!g_strcmp0 (ts_refclk, "local")) { ++ ts_meta_ref = gst_caps_new_empty_simple ("timestamp/x-ntp"); + } else { + GST_FIXME_OBJECT (jitterbuffer, "Unsupported timestamp reference clock"); + } +@@ -1550,8 +1672,16 @@ gst_jitter_buffer_sink_parse_caps (GstRtpJitterBuffer * jitterbuffer, + rtp_jitter_buffer_set_media_clock (priv->jbuf, clock, clock_offset); + } else { + rtp_jitter_buffer_set_media_clock (priv->jbuf, NULL, -1); ++ ts_meta_ref = gst_caps_new_empty_simple ("timestamp/x-ntp"); + } + ++ gst_caps_take (&priv->reference_timestamp_caps, ts_meta_ref); ++ ++ _get_cname_ssrc_mappings (jitterbuffer, caps_struct); ++ priv->ntp64_ext_id = ++ gst_rtp_get_extmap_id_for_attribute (caps_struct, ++ GST_RTP_HDREXT_BASE GST_RTP_HDREXT_NTP_64); ++ + return TRUE; + + /* ERRORS */ +@@ -1582,6 +1712,7 @@ gst_rtp_jitter_buffer_flush_start (GstRtpJitterBuffer * jitterbuffer) + JBUF_SIGNAL_EVENT (priv); + JBUF_SIGNAL_QUERY (priv, FALSE); + JBUF_SIGNAL_QUEUE (priv); ++ JBUF_SIGNAL_TIMER (priv); + JBUF_UNLOCK (priv); + } + +@@ -1606,7 +1737,9 @@ gst_rtp_jitter_buffer_flush_stop (GstRtpJitterBuffer * jitterbuffer) + priv->packet_spacing = 0; + priv->next_in_seqnum = -1; + priv->clock_rate = -1; ++ priv->ntp64_ext_id = 0; + priv->last_pt = -1; ++ priv->last_ssrc = -1; + priv->eos = FALSE; + priv->estimated_eos = -1; + priv->last_elapsed = 0; +@@ -1614,12 +1747,18 @@ gst_rtp_jitter_buffer_flush_stop (GstRtpJitterBuffer * jitterbuffer) + priv->avg_jitter = 0; + priv->last_dts = -1; + priv->last_rtptime = -1; ++ priv->last_ntpnstime = -1; ++ priv->last_known_ext_rtptime = -1; ++ priv->last_known_ntpnstime = -1; + priv->last_in_pts = 0; + priv->equidistant = 0; + priv->segment_seqnum = GST_SEQNUM_INVALID; + priv->last_drop_msg_timestamp = GST_CLOCK_TIME_NONE; + priv->num_too_late = 0; + priv->num_drop_on_latency = 0; ++ g_list_free_full (priv->cname_ssrc_mappings, ++ (GDestroyNotify) cname_ssrc_mapping_free); ++ priv->cname_ssrc_mappings = NULL; + GST_DEBUG_OBJECT (jitterbuffer, "flush and reset jitterbuffer"); + rtp_jitter_buffer_flush (priv->jbuf, NULL, NULL); + rtp_jitter_buffer_disable_buffering (priv->jbuf, FALSE); +@@ -1687,6 +1826,11 @@ gst_rtp_jitter_buffer_change_state (GstElement * element, + priv->clock_base = -1; + priv->peer_latency = 0; + priv->last_pt = -1; ++ priv->last_ssrc = -1; ++ priv->ntp64_ext_id = 0; ++ g_list_free_full (priv->cname_ssrc_mappings, ++ (GDestroyNotify) cname_ssrc_mapping_free); ++ priv->cname_ssrc_mappings = NULL; + /* block until we go to PLAYING */ + priv->blocked = TRUE; + priv->timer_running = TRUE; +@@ -1737,6 +1881,10 @@ gst_rtp_jitter_buffer_change_state (GstElement * element, + JBUF_UNLOCK (priv); + g_thread_join (priv->timer_thread); + priv->timer_thread = NULL; ++ gst_clear_caps (&priv->reference_timestamp_caps); ++ g_list_free_full (priv->cname_ssrc_mappings, ++ (GDestroyNotify) cname_ssrc_mapping_free); ++ priv->cname_ssrc_mappings = NULL; + break; + case GST_STATE_CHANGE_READY_TO_NULL: + break; +@@ -2127,10 +2275,9 @@ update_timer_offsets (GstRtpJitterBuffer * jitterbuffer) + test->timeout = GST_CLOCK_TIME_NONE; + test->offset = 0; + } +- /* as we apply the offset on all timers, the order of timers won't +- * change and we can skip updating the timer queue */ + } + ++ rtp_timer_queue_reschedule (priv->timers, test); + test = rtp_timer_get_next (test); + } + } +@@ -2270,7 +2417,7 @@ get_rtx_delay (GstRtpJitterBufferPrivate * priv) + * had for this packet. + */ + static void +-update_timers (GstRtpJitterBuffer * jitterbuffer, guint16 seqnum, ++update_rtx_timers (GstRtpJitterBuffer * jitterbuffer, guint16 seqnum, + GstClockTime dts, GstClockTime pts, gboolean do_next_seqnum, + gboolean is_rtx, RtpTimer * timer) + { +@@ -2315,7 +2462,7 @@ update_timers (GstRtpJitterBuffer * jitterbuffer, guint16 seqnum, + } + + do_next_seqnum = do_next_seqnum && priv->packet_spacing > 0 +- && priv->do_retransmission && priv->rtx_next_seqnum; ++ && priv->rtx_next_seqnum; + + if (timer && timer->type != RTP_TIMER_DEADLINE) { + if (timer->num_rtx_retry > 0) { +@@ -2342,27 +2489,27 @@ update_timers (GstRtpJitterBuffer * jitterbuffer, guint16 seqnum, + } + + if (do_next_seqnum && pts != GST_CLOCK_TIME_NONE) { +- GstClockTime expected, delay; ++ GstClockTime next_expected_pts, delay; + + /* calculate expected arrival time of the next seqnum */ +- expected = pts + priv->packet_spacing; ++ next_expected_pts = pts + priv->packet_spacing; + + delay = get_rtx_delay (priv); + + /* and update/install timer for next seqnum */ +- GST_DEBUG_OBJECT (jitterbuffer, "Add RTX timer #%d, expected %" +- GST_TIME_FORMAT ", delay %" GST_TIME_FORMAT ", packet-spacing %" ++ GST_DEBUG_OBJECT (jitterbuffer, "Add RTX timer #%d, next_expected_pts %" ++ GST_TIME_FORMAT ", delay %" GST_TIME_FORMAT ", est packet duration %" + GST_TIME_FORMAT ", jitter %" GST_TIME_FORMAT, priv->next_in_seqnum, +- GST_TIME_ARGS (expected), GST_TIME_ARGS (delay), ++ GST_TIME_ARGS (next_expected_pts), GST_TIME_ARGS (delay), + GST_TIME_ARGS (priv->packet_spacing), GST_TIME_ARGS (priv->avg_jitter)); + + if (timer && !is_stats_timer) { + timer->type = RTP_TIMER_EXPECTED; + rtp_timer_queue_update_timer (priv->timers, timer, priv->next_in_seqnum, +- expected, delay, 0, TRUE); ++ next_expected_pts, delay, 0, TRUE); + } else { + rtp_timer_queue_set_expected (priv->timers, priv->next_in_seqnum, +- expected, delay, priv->packet_spacing); ++ next_expected_pts, delay, priv->packet_spacing); + } + } else if (timer && timer->type != RTP_TIMER_DEADLINE && !is_stats_timer) { + /* if we had a timer, remove it, we don't know when to expect the next +@@ -2459,130 +2606,186 @@ insert_lost_event (GstRtpJitterBuffer * jitterbuffer, + } + + static void +-calculate_expected (GstRtpJitterBuffer * jitterbuffer, guint32 expected, +- guint16 seqnum, GstClockTime pts, gint gap) ++gst_rtp_jitter_buffer_handle_missing_packets (GstRtpJitterBuffer * jitterbuffer, ++ guint32 missing_seqnum, guint16 current_seqnum, GstClockTime pts, gint gap, ++ GstClockTime now) + { + GstRtpJitterBufferPrivate *priv = jitterbuffer->priv; +- GstClockTime duration, expected_pts; ++ GstClockTime est_pkt_duration, est_pts; + gboolean equidistant = priv->equidistant > 0; + GstClockTime last_in_pts = priv->last_in_pts; ++ GstClockTimeDiff offset = timeout_offset (jitterbuffer); ++ GstClockTime rtx_delay = get_rtx_delay (priv); ++ guint16 remaining_gap; ++ GstClockTimeDiff remaining_duration; ++ GstClockTimeDiff remainder_duration; ++ guint i; + + GST_DEBUG_OBJECT (jitterbuffer, +- "pts %" GST_TIME_FORMAT ", last %" GST_TIME_FORMAT, +- GST_TIME_ARGS (pts), GST_TIME_ARGS (last_in_pts)); +- +- if (pts == GST_CLOCK_TIME_NONE) { +- GST_WARNING_OBJECT (jitterbuffer, "Have no PTS"); +- return; +- } ++ "Missing packets: (#%u->#%u), gap %d, pts %" GST_TIME_FORMAT ++ ", last-pts %" GST_TIME_FORMAT, ++ missing_seqnum, current_seqnum - 1, gap, GST_TIME_ARGS (pts), ++ GST_TIME_ARGS (last_in_pts)); + + if (equidistant) { +- GstClockTime total_duration; ++ GstClockTimeDiff total_duration; ++ gboolean too_late; ++ + /* the total duration spanned by the missing packets */ +- if (pts >= last_in_pts) +- total_duration = pts - last_in_pts; +- else +- total_duration = 0; ++ total_duration = MAX (0, GST_CLOCK_DIFF (last_in_pts, pts)); + + /* interpolate between the current time and the last time based on + * number of packets we are missing, this is the estimated duration + * for the missing packet based on equidistant packet spacing. */ +- duration = total_duration / (gap + 1); ++ est_pkt_duration = total_duration / (gap + 1); ++ ++ /* if we have valid packet-spacing, use that */ ++ if (total_duration > 0 && priv->packet_spacing) { ++ est_pkt_duration = priv->packet_spacing; ++ } ++ ++ est_pts = last_in_pts + est_pkt_duration; ++ GST_DEBUG_OBJECT (jitterbuffer, "estimated missing packet pts %" ++ GST_TIME_FORMAT " and duration %" GST_TIME_FORMAT, ++ GST_TIME_ARGS (est_pts), GST_TIME_ARGS (est_pkt_duration)); + +- GST_DEBUG_OBJECT (jitterbuffer, "duration %" GST_TIME_FORMAT, +- GST_TIME_ARGS (duration)); ++ /* a packet is considered too late if our estimated pts plus all ++ applicable offsets are in the past */ ++ too_late = now > (est_pts + offset); + +- if (total_duration > priv->latency_ns) { +- GstClockTime gap_time; ++ /* Here we optimistically try to save any packets that could potentially ++ be saved by making sure we create lost/rtx timers for them, and for ++ the rest that could not possibly be saved, we create a "multi-lost" ++ event immediately containing the missing duration and sequence numbers */ ++ if (too_late) { + guint lost_packets; ++ GstClockTime lost_duration; ++ GstClockTimeDiff gap_time; ++ guint max_saveable_packets = 0; ++ GstClockTime max_saveable_duration; ++ GstClockTime saveable_duration; + +- if (duration > 0) { +- GstClockTime gap_dur = gap * duration; +- if (gap_dur > priv->latency_ns) +- gap_time = gap_dur - priv->latency_ns; +- else +- gap_time = 0; +- lost_packets = gap_time / duration; +- } else { +- gap_time = total_duration - priv->latency_ns; +- lost_packets = gap; +- } ++ /* gap time represents the total duration of all missing packets */ ++ gap_time = MAX (0, GST_CLOCK_DIFF (est_pts, pts)); + +- /* too many lost packets, some of the missing packets are already +- * too late and we can generate lost packet events for them. */ +- GST_INFO_OBJECT (jitterbuffer, +- "lost packets (%d, #%d->#%d) duration too large %" GST_TIME_FORMAT +- " > %" GST_TIME_FORMAT ", consider %u lost (%" GST_TIME_FORMAT ")", +- gap, expected, seqnum - 1, GST_TIME_ARGS (total_duration), +- GST_TIME_ARGS (priv->latency_ns), lost_packets, +- GST_TIME_ARGS (gap_time)); ++ /* based on the estimated packet duration, we ++ can figure out how many packets we could possibly save */ ++ if (est_pkt_duration && offset > 0) ++ max_saveable_packets = offset / est_pkt_duration; ++ ++ /* and say that the amount of lost packet is the sequence-number ++ gap minus these saveable packets, but at least 1 */ ++ lost_packets = MAX (1, (gint) gap - (gint) max_saveable_packets); ++ ++ /* now we know how many packets we can possibly save */ ++ max_saveable_packets = gap - lost_packets; ++ ++ /* we convert that to time */ ++ max_saveable_duration = max_saveable_packets * est_pkt_duration; ++ ++ /* determine the actual amount of time we can save */ ++ saveable_duration = MIN (max_saveable_duration, gap_time); ++ ++ /* and we now have the duration we need to fill */ ++ lost_duration = GST_CLOCK_DIFF (saveable_duration, gap_time); + + /* this multi-lost-packet event will be inserted directly into the packet-queue + for immediate processing */ + if (lost_packets > 0) { + RtpTimer *timer; +- GstClockTime timestamp = +- apply_offset (jitterbuffer, last_in_pts + duration); +- insert_lost_event (jitterbuffer, expected, lost_packets, timestamp, +- gap_time, 0); ++ GstClockTime timestamp = apply_offset (jitterbuffer, est_pts); ++ ++ GST_INFO_OBJECT (jitterbuffer, "lost event for %d packet(s) (#%d->#%d) " ++ "for duration %" GST_TIME_FORMAT, lost_packets, missing_seqnum, ++ missing_seqnum + lost_packets - 1, GST_TIME_ARGS (lost_duration)); + +- timer = rtp_timer_queue_find (priv->timers, expected); +- if (timer && timer->type == RTP_TIMER_EXPECTED) { ++ insert_lost_event (jitterbuffer, missing_seqnum, lost_packets, ++ timestamp, lost_duration, 0); ++ ++ timer = rtp_timer_queue_find (priv->timers, missing_seqnum); ++ if (timer && timer->type != RTP_TIMER_DEADLINE) { + if (timer->queued) + rtp_timer_queue_unschedule (priv->timers, timer); + GST_DEBUG_OBJECT (jitterbuffer, "removing timer for seqnum #%u", +- expected); ++ missing_seqnum); + rtp_timer_free (timer); + } + +- expected += lost_packets; +- last_in_pts += gap_time; ++ missing_seqnum += lost_packets; ++ est_pts += lost_duration; + } + } + +- expected_pts = last_in_pts + duration; + } else { + /* If we cannot assume equidistant packet spacing, the only thing we now + * for sure is that the missing packets have expected pts not later than + * the last received pts. */ +- duration = 0; +- expected_pts = pts; ++ est_pkt_duration = 0; ++ est_pts = pts; + } + +- if (priv->do_retransmission) { +- RtpTimer *timer = rtp_timer_queue_find (priv->timers, expected); +- GstClockTime rtx_delay = get_rtx_delay (priv); +- +- /* if we had a timer for the first missing packet, update it. */ +- if (timer && timer->type == RTP_TIMER_EXPECTED) { +- GstClockTime timeout = timer->timeout; +- GstClockTime delay = MAX (rtx_delay, pts - expected_pts); +- +- timer->duration = duration; +- if (timeout > (expected_pts + delay) && timer->num_rtx_retry == 0) { +- rtp_timer_queue_update_timer (priv->timers, timer, timer->seqnum, +- expected_pts, delay, 0, TRUE); ++ /* Figure out how many more packets we are missing. */ ++ remaining_gap = current_seqnum - missing_seqnum; ++ /* and how much time these packets represent */ ++ remaining_duration = MAX (0, GST_CLOCK_DIFF (est_pts, pts)); ++ /* Given the calculated packet-duration (packet spacing when equidistant), ++ the remainder is what we are left with after subtracting the ideal time ++ for the gap */ ++ remainder_duration = ++ MAX (0, GST_CLOCK_DIFF (est_pkt_duration * remaining_gap, ++ remaining_duration)); ++ ++ GST_DEBUG_OBJECT (jitterbuffer, "remaining gap of %u, with " ++ "duration %" GST_TIME_FORMAT " gives remainder duration %" ++ GST_STIME_FORMAT, remaining_gap, GST_TIME_ARGS (remaining_duration), ++ GST_STIME_ARGS (remainder_duration)); ++ ++ for (i = 0; i < remaining_gap; i++) { ++ GstClockTime duration = est_pkt_duration; ++ /* we add the remainder on the first packet */ ++ if (i == 0) ++ duration += remainder_duration; ++ ++ /* clip duration to what is actually left */ ++ remaining_duration = MAX (0, GST_CLOCK_DIFF (est_pts, pts)); ++ duration = MIN (duration, remaining_duration); ++ ++ if (priv->do_retransmission) { ++ RtpTimer *timer = rtp_timer_queue_find (priv->timers, missing_seqnum); ++ ++ /* if we had a timer for the missing packet, update it. */ ++ if (timer && timer->type == RTP_TIMER_EXPECTED) { ++ timer->duration = duration; ++ if (timer->timeout > (est_pts + rtx_delay) && timer->num_rtx_retry == 0) { ++ rtp_timer_queue_update_timer (priv->timers, timer, timer->seqnum, ++ est_pts, rtx_delay, 0, TRUE); ++ GST_DEBUG_OBJECT (jitterbuffer, "Update RTX timer(s) #%u, " ++ "pts %" GST_TIME_FORMAT ", delay %" GST_TIME_FORMAT ++ ", duration %" GST_TIME_FORMAT, ++ missing_seqnum, GST_TIME_ARGS (est_pts), ++ GST_TIME_ARGS (rtx_delay), GST_TIME_ARGS (duration)); ++ } ++ } else { ++ GST_DEBUG_OBJECT (jitterbuffer, "Add RTX timer(s) #%u, " ++ "pts %" GST_TIME_FORMAT ", delay %" GST_TIME_FORMAT ++ ", duration %" GST_TIME_FORMAT, ++ missing_seqnum, GST_TIME_ARGS (est_pts), ++ GST_TIME_ARGS (rtx_delay), GST_TIME_ARGS (duration)); ++ rtp_timer_queue_set_expected (priv->timers, missing_seqnum, est_pts, ++ rtx_delay, duration); + } +- expected++; +- expected_pts += duration; ++ } else { ++ GST_INFO_OBJECT (jitterbuffer, ++ "Add Lost timer for #%u, pts %" GST_TIME_FORMAT ++ ", duration %" GST_TIME_FORMAT ", offset %" GST_STIME_FORMAT, ++ missing_seqnum, GST_TIME_ARGS (est_pts), ++ GST_TIME_ARGS (duration), GST_STIME_ARGS (offset)); ++ rtp_timer_queue_set_lost (priv->timers, missing_seqnum, est_pts, ++ duration, offset); + } + +- while (gst_rtp_buffer_compare_seqnum (expected, seqnum) > 0) { +- /* minimum delay the expected-timer has "waited" is the elapsed time +- * since expected arrival of the missing packet */ +- GstClockTime delay = MAX (rtx_delay, pts - expected_pts); +- rtp_timer_queue_set_expected (priv->timers, expected, expected_pts, +- delay, duration); +- expected_pts += duration; +- expected++; +- } +- } else { +- while (gst_rtp_buffer_compare_seqnum (expected, seqnum) > 0) { +- rtp_timer_queue_set_lost (priv->timers, expected, expected_pts, +- duration, timeout_offset (jitterbuffer)); +- expected_pts += duration; +- expected++; +- } ++ missing_seqnum++; ++ est_pts += duration; + } + } + +@@ -2863,6 +3066,34 @@ gst_rtp_jitter_buffer_fast_start (GstRtpJitterBuffer * jitterbuffer) + return FALSE; + } + ++static GstClockTime ++_get_inband_ntp_time (GstRtpJitterBuffer * jitterbuffer, GstRTPBuffer * rtp) ++{ ++ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv; ++ guint8 *data; ++ guint size; ++ guint64 ntptime; ++ GstClockTime ntpnstime; ++ ++ if (priv->ntp64_ext_id == 0) ++ return GST_CLOCK_TIME_NONE; ++ ++ if (!gst_rtp_buffer_get_extension_onebyte_header (rtp, priv->ntp64_ext_id, 0, ++ (gpointer *) & data, &size) ++ && !gst_rtp_buffer_get_extension_twobytes_header (rtp, NULL, ++ priv->ntp64_ext_id, 0, (gpointer *) & data, &size)) ++ return GST_CLOCK_TIME_NONE; ++ ++ if (size != 8) ++ return GST_CLOCK_TIME_NONE; ++ ++ ntptime = GST_READ_UINT64_BE (data); ++ ntpnstime = ++ gst_util_uint64_scale (ntptime, GST_SECOND, G_GUINT64_CONSTANT (1) << 32); ++ ++ return ntpnstime; ++} ++ + static GstFlowReturn + gst_rtp_jitter_buffer_chain (GstPad * pad, GstObject * parent, + GstBuffer * buffer) +@@ -2872,12 +3103,16 @@ gst_rtp_jitter_buffer_chain (GstPad * pad, GstObject * parent, + guint16 seqnum; + guint32 expected, rtptime; + GstFlowReturn ret = GST_FLOW_OK; ++ GstClockTime now; + GstClockTime dts, pts; ++ GstClockTime ntp_time; ++ GstClockTime inband_ntp_time; + guint64 latency_ts; + gboolean head; + gboolean duplicate; + gint percent = -1; + guint8 pt; ++ guint32 ssrc; + GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; + gboolean do_next_seqnum = FALSE; + GstMessage *msg = NULL; +@@ -2897,9 +3132,12 @@ gst_rtp_jitter_buffer_chain (GstPad * pad, GstObject * parent, + pt = gst_rtp_buffer_get_payload_type (&rtp); + seqnum = gst_rtp_buffer_get_seq (&rtp); + rtptime = gst_rtp_buffer_get_timestamp (&rtp); ++ inband_ntp_time = _get_inband_ntp_time (jitterbuffer, &rtp); ++ ssrc = gst_rtp_buffer_get_ssrc (&rtp); + gst_rtp_buffer_unmap (&rtp); + + is_rtx = GST_BUFFER_IS_RETRANSMISSION (buffer); ++ now = get_current_running_time (jitterbuffer); + + /* make sure we have PTS and DTS set */ + pts = GST_BUFFER_PTS (buffer); +@@ -2912,7 +3150,7 @@ gst_rtp_jitter_buffer_chain (GstPad * pad, GstObject * parent, + if (dts == -1) { + /* If we have no DTS here, i.e. no capture time, get one from the + * clock now to have something to calculate with in the future. */ +- dts = get_current_running_time (jitterbuffer); ++ dts = now; + pts = dts; + + /* Remember that we estimated the DTS if we are running already +@@ -2931,8 +3169,10 @@ gst_rtp_jitter_buffer_chain (GstPad * pad, GstObject * parent, + } + + GST_DEBUG_OBJECT (jitterbuffer, +- "Received packet #%d at time %" GST_TIME_FORMAT ", discont %d, rtx %d", +- seqnum, GST_TIME_ARGS (dts), GST_BUFFER_IS_DISCONT (buffer), is_rtx); ++ "Received packet #%d at time %" GST_TIME_FORMAT ++ ", discont %d, rtx %d, inband NTP time %" GST_TIME_FORMAT, seqnum, ++ GST_TIME_ARGS (dts), GST_BUFFER_IS_DISCONT (buffer), is_rtx, ++ GST_TIME_ARGS (inband_ntp_time)); + + JBUF_LOCK_CHECK (priv, out_flushing); + +@@ -2946,6 +3186,9 @@ gst_rtp_jitter_buffer_chain (GstPad * pad, GstObject * parent, + /* reset clock-rate so that we get a new one */ + priv->clock_rate = -1; + ++ priv->last_known_ext_rtptime = -1; ++ priv->last_known_ntpnstime = -1; ++ + /* Try to get the clock-rate from the caps first if we can. If there are no + * caps we must fire the signal to get the clock-rate. */ + if ((caps = gst_pad_get_current_caps (pad))) { +@@ -2964,6 +3207,16 @@ gst_rtp_jitter_buffer_chain (GstPad * pad, GstObject * parent, + goto no_clock_rate; + + gst_rtp_packet_rate_ctx_reset (&priv->packet_rate_ctx, priv->clock_rate); ++ priv->last_known_ext_rtptime = -1; ++ priv->last_known_ntpnstime = -1; ++ } ++ ++ if (G_UNLIKELY (priv->last_ssrc != ssrc)) { ++ GST_DEBUG_OBJECT (jitterbuffer, "SSRC changed from %u to %u", ++ priv->last_ssrc, ssrc); ++ priv->last_ssrc = ssrc; ++ priv->last_known_ext_rtptime = -1; ++ priv->last_known_ntpnstime = -1; + } + + /* don't accept more data on EOS */ +@@ -3031,7 +3284,7 @@ gst_rtp_jitter_buffer_chain (GstPad * pad, GstObject * parent, + pts = + rtp_jitter_buffer_calculate_pts (priv->jbuf, dts, estimated_dts, + rtptime, gst_element_get_base_time (GST_ELEMENT_CAST (jitterbuffer)), +- 0, FALSE); ++ 0, FALSE, &ntp_time); + + if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (pts))) { + /* A valid timestamp cannot be calculated, discard packet */ +@@ -3094,7 +3347,7 @@ gst_rtp_jitter_buffer_chain (GstPad * pad, GstObject * parent, + pts = + rtp_jitter_buffer_calculate_pts (priv->jbuf, dts, estimated_dts, + rtptime, gst_element_get_base_time (GST_ELEMENT_CAST (jitterbuffer)), +- gap, is_rtx); ++ gap, is_rtx, &ntp_time); + + if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (pts))) { + /* A valid timestamp cannot be calculated, discard packet */ +@@ -3111,11 +3364,22 @@ gst_rtp_jitter_buffer_chain (GstPad * pad, GstObject * parent, + if (gap > 0) { + GST_DEBUG_OBJECT (jitterbuffer, "%d missing packets", gap); + /* fill in the gap with EXPECTED timers */ +- calculate_expected (jitterbuffer, expected, seqnum, pts, gap); ++ gst_rtp_jitter_buffer_handle_missing_packets (jitterbuffer, expected, ++ seqnum, pts, gap, now); + do_next_seqnum = TRUE; + } else { + GST_DEBUG_OBJECT (jitterbuffer, "old packet received"); + do_next_seqnum = FALSE; ++ ++ /* If an out of order packet arrives before its lost timer has expired ++ * remove it to avoid false positive statistics. If this is an RTX ++ * packet then the timer will be updated later as part of update_rtx_timers() */ ++ if (!is_rtx && timer && timer->type == RTP_TIMER_LOST) { ++ rtp_timer_queue_unschedule (priv->timers, timer); ++ GST_DEBUG_OBJECT (jitterbuffer, ++ "removing lost timer for late seqnum #%u", seqnum); ++ rtp_timer_free (g_steal_pointer (&timer)); ++ } + } + + /* reset spacing estimation when gap */ +@@ -3129,8 +3393,22 @@ gst_rtp_jitter_buffer_chain (GstPad * pad, GstObject * parent, + priv->next_in_seqnum = (seqnum + 1) & 0xffff; + } + +- if (is_rtx) ++ if (inband_ntp_time != GST_CLOCK_TIME_NONE) { ++ guint64 ext_rtptime; ++ ++ ext_rtptime = priv->jbuf->ext_rtptime; ++ ext_rtptime = gst_rtp_buffer_ext_timestamp (&ext_rtptime, rtptime); ++ ++ priv->last_known_ext_rtptime = ext_rtptime; ++ priv->last_known_ntpnstime = inband_ntp_time; ++ } ++ ++ if (is_rtx) { ++ /* For RTX there must be a corresponding timer or it would be an ++ * unsolicited RTX packet that would be dropped */ ++ g_assert (timer != NULL); + timer->num_rtx_received++; ++ } + + /* At 2^15, we would detect a seqnum rollover too early, therefore + * limit the queue size. But let's not limit it to a number that is +@@ -3138,12 +3416,12 @@ gst_rtp_jitter_buffer_chain (GstPad * pad, GstObject * parent, + * sequence number, let's allow at least 10k packets in any case. */ + while (rtp_jitter_buffer_is_full (priv->jbuf) && + priv->srcresult == GST_FLOW_OK) { +- RtpTimer *timer = rtp_timer_queue_peek_earliest (priv->timers); +- while (timer) { +- timer->timeout = -1; +- if (timer->type == RTP_TIMER_DEADLINE) ++ RtpTimer *earliest_timer = rtp_timer_queue_peek_earliest (priv->timers); ++ while (earliest_timer) { ++ earliest_timer->timeout = -1; ++ if (earliest_timer->type == RTP_TIMER_DEADLINE) + break; +- timer = rtp_timer_get_next (timer); ++ earliest_timer = rtp_timer_get_next (earliest_timer); + } + + update_current_timer (jitterbuffer); +@@ -3162,7 +3440,11 @@ gst_rtp_jitter_buffer_chain (GstPad * pad, GstObject * parent, + /* priv->last_popped_seqnum >= seqnum, we're too late. */ + if (G_UNLIKELY (gap <= 0)) { + if (priv->do_retransmission) { +- if (is_rtx && timer) { ++ if (is_rtx) { ++ /* For RTX there must be a corresponding timer or it would be an ++ * unsolicited RTX packet that would be dropped */ ++ g_assert (timer != NULL); ++ + update_rtx_stats (jitterbuffer, timer, dts, FALSE); + /* Only count the retranmitted packet too late if it has been + * considered lost. If the original packet arrived before the +@@ -3205,6 +3487,39 @@ gst_rtp_jitter_buffer_chain (GstPad * pad, GstObject * parent, + JBUF_SIGNAL_EVENT (priv); + } + } ++ // If we can calculate a NTP time based solely on the Sender Report, or ++ // inband NTP header extension do that so that we can still add a reference ++ // timestamp meta to the buffer ++ if (!GST_CLOCK_TIME_IS_VALID (ntp_time) && ++ GST_CLOCK_TIME_IS_VALID (priv->last_known_ntpnstime) && ++ priv->last_known_ext_rtptime != -1) { ++ guint64 ext_time = priv->last_known_ext_rtptime; ++ ++ ext_time = gst_rtp_buffer_ext_timestamp (&ext_time, rtptime); ++ ++ if (ext_time >= priv->last_known_ext_rtptime) { ++ ntp_time = ++ priv->last_known_ntpnstime + gst_util_uint64_scale (ext_time - ++ priv->last_known_ext_rtptime, GST_SECOND, priv->clock_rate); ++ } else { ++ ntp_time = ++ priv->last_known_ntpnstime - ++ gst_util_uint64_scale (priv->last_known_ext_rtptime - ext_time, ++ GST_SECOND, priv->clock_rate); ++ } ++ } ++ ++ if (priv->add_reference_timestamp_meta && GST_CLOCK_TIME_IS_VALID (ntp_time) ++ && priv->reference_timestamp_caps != NULL) { ++ buffer = gst_buffer_make_writable (buffer); ++ ++ GST_TRACE_OBJECT (jitterbuffer, ++ "adding NTP time reference meta: %" GST_TIME_FORMAT, ++ GST_TIME_ARGS (ntp_time)); ++ ++ gst_buffer_add_reference_timestamp_meta (buffer, ++ priv->reference_timestamp_caps, ntp_time, GST_CLOCK_TIME_NONE); ++ } + + /* If we estimated the DTS, don't consider it in the clock skew calculations + * later. The code above always sets dts to pts or the other way around if +@@ -3218,8 +3533,12 @@ gst_rtp_jitter_buffer_chain (GstPad * pad, GstObject * parent, + * FALSE if a packet with the same seqnum was already in the queue, meaning we + * have a duplicate. */ + if (G_UNLIKELY (duplicate)) { +- if (is_rtx && timer) ++ if (is_rtx) { ++ /* For RTX there must be a corresponding timer or it would be an ++ * unsolicited RTX packet that would be dropped */ ++ g_assert (timer != NULL); + update_rtx_stats (jitterbuffer, timer, dts, FALSE); ++ } + goto duplicate; + } + +@@ -3227,13 +3546,18 @@ gst_rtp_jitter_buffer_chain (GstPad * pad, GstObject * parent, + if (gst_rtp_jitter_buffer_fast_start (jitterbuffer)) + head = TRUE; + +- /* update timers */ +- update_timers (jitterbuffer, seqnum, dts, pts, do_next_seqnum, is_rtx, timer); ++ /* update rtx timers */ ++ if (priv->do_retransmission) ++ update_rtx_timers (jitterbuffer, seqnum, dts, pts, do_next_seqnum, is_rtx, ++ g_steal_pointer (&timer)); + + /* we had an unhandled SR, handle it now */ + if (priv->last_sr) + do_handle_sync (jitterbuffer); + ++ if (inband_ntp_time != GST_CLOCK_TIME_NONE) ++ do_handle_sync_inband (jitterbuffer, inband_ntp_time); ++ + if (G_UNLIKELY (head)) { + /* signal addition of new buffer when the _loop is waiting. */ + if (G_LIKELY (priv->active)) +@@ -3535,7 +3859,7 @@ pop_and_push_next (GstRtpJitterBuffer * jitterbuffer, guint seqnum) + while (rtp_timer_queue_length (priv->timers) > 0) { + /* Stopping timers */ + unschedule_current_timer (jitterbuffer); +- JBUF_WAIT_TIMER (priv); ++ JBUF_WAIT_TIMER_CHECK (priv, out_flushing_wait); + } + } + +@@ -3602,6 +3926,12 @@ out_flushing: + { + return priv->srcresult; + } ++ ++out_flushing_wait: ++ { ++ rtp_jitter_buffer_free_item (item); ++ return priv->srcresult; ++ } + } + + #define GST_FLOW_WAIT GST_FLOW_CUSTOM_SUCCESS +@@ -3828,14 +4158,16 @@ do_expected_timeout (GstRtpJitterBuffer * jitterbuffer, RtpTimer * timer, + GstClockTime rtx_retry_timeout; + GstClock *clock; + GstClockTimeDiff offset = 0; ++ GstClockTime timeout; + +- GST_DEBUG_OBJECT (jitterbuffer, "expected %d didn't arrive, now %" ++ GST_DEBUG_OBJECT (jitterbuffer, "expected #%d didn't arrive, now %" + GST_TIME_FORMAT, timer->seqnum, GST_TIME_ARGS (now)); + + rtx_retry_timeout = get_rtx_retry_timeout (priv); + rtx_retry_period = get_rtx_retry_period (priv, rtx_retry_timeout); + +- delay = timer->rtx_delay + timer->rtx_retry; ++ /* delay expresses how late this packet is currently */ ++ delay = now - timer->rtx_base; + + delay_ms = GST_TIME_AS_MSECONDS (delay); + rtx_retry_timeout_ms = GST_TIME_AS_MSECONDS (rtx_retry_timeout); +@@ -3870,28 +4202,34 @@ do_expected_timeout (GstRtpJitterBuffer * jitterbuffer, RtpTimer * timer, + } + GST_OBJECT_UNLOCK (jitterbuffer); + +- /* calculate the timeout for the next retransmission attempt */ +- timer->rtx_retry += rtx_retry_timeout; +- GST_DEBUG_OBJECT (jitterbuffer, "timer #%i base %" GST_TIME_FORMAT ", delay %" +- GST_TIME_FORMAT ", retry %" GST_TIME_FORMAT ", num_retry %u", +- timer->seqnum, GST_TIME_ARGS (timer->rtx_base), +- GST_TIME_ARGS (timer->rtx_delay), GST_TIME_ARGS (timer->rtx_retry), +- timer->num_rtx_retry); ++ /* ++ Calculate the timeout for the next retransmission attempt: ++ We have just successfully sent one RTX request, and we need to ++ find out when to schedule the next one. ++ ++ The rtx_retry_timeout tells us the logical timeout between RTX ++ requests based on things like round-trip time, jitter and packet spacing, ++ and is how long we are going to wait before attempting another RTX packet ++ */ ++ timeout = timer->rtx_last + rtx_retry_timeout; ++ GST_DEBUG_OBJECT (jitterbuffer, ++ "timer #%i new timeout %" GST_TIME_FORMAT ", rtx retry timeout %" ++ GST_TIME_FORMAT ", num_retry %u", timer->seqnum, GST_TIME_ARGS (timeout), ++ GST_TIME_ARGS (rtx_retry_timeout), timer->num_rtx_retry); + if ((priv->rtx_max_retries != -1 + && timer->num_rtx_retry >= priv->rtx_max_retries) +- || (timer->rtx_retry + timer->rtx_delay > rtx_retry_period) +- || (timer->rtx_base + rtx_retry_period < now)) { +- GST_DEBUG_OBJECT (jitterbuffer, "reschedule #%i as LOST timer", +- timer->seqnum); ++ || (timeout > timer->rtx_base + rtx_retry_period)) { + /* too many retransmission request, we now convert the timer + * to a lost timer, leave the num_rtx_retry as it is for stats */ + timer->type = RTP_TIMER_LOST; +- timer->rtx_delay = 0; +- timer->rtx_retry = 0; ++ timeout = timer->rtx_base; + offset = timeout_offset (jitterbuffer); ++ GST_DEBUG_OBJECT (jitterbuffer, "reschedule #%i as LOST timer for %" ++ GST_TIME_FORMAT, timer->seqnum, ++ GST_TIME_ARGS (timer->rtx_base + offset)); + } + rtp_timer_queue_update_timer (priv->timers, timer, timer->seqnum, +- timer->rtx_base + timer->rtx_retry, timer->rtx_delay, offset, FALSE); ++ timeout, 0, offset, FALSE); + + return FALSE; + } +@@ -4205,6 +4543,72 @@ pause: + } + } + ++static void ++do_handle_sync_inband (GstRtpJitterBuffer * jitterbuffer, guint64 ntpnstime) ++{ ++ GstRtpJitterBufferPrivate *priv; ++ GstStructure *s; ++ guint64 base_rtptime, base_time; ++ guint32 clock_rate; ++ guint64 last_rtptime; ++ const gchar *cname = NULL; ++ GList *l; ++ ++ priv = jitterbuffer->priv; ++ ++ /* get the last values from the jitterbuffer */ ++ rtp_jitter_buffer_get_sync (priv->jbuf, &base_rtptime, &base_time, ++ &clock_rate, &last_rtptime); ++ ++ for (l = priv->cname_ssrc_mappings; l; l = l->next) { ++ const CNameSSRCMapping *map = l->data; ++ ++ if (map->ssrc == priv->last_ssrc) { ++ cname = map->cname; ++ break; ++ } ++ } ++ ++ GST_DEBUG_OBJECT (jitterbuffer, ++ "inband NTP-64 %" GST_TIME_FORMAT " rtptime %" G_GUINT64_FORMAT ", base %" ++ G_GUINT64_FORMAT ", clock-rate %" G_GUINT32_FORMAT ", clock-base %" ++ G_GUINT64_FORMAT ", CNAME %s", GST_TIME_ARGS (ntpnstime), last_rtptime, ++ base_rtptime, clock_rate, priv->clock_base, GST_STR_NULL (cname)); ++ ++ /* no CNAME known yet for this ssrc */ ++ if (cname == NULL) { ++ GST_DEBUG_OBJECT (jitterbuffer, "no CNAME for this packet known yet"); ++ return; ++ } ++ ++ if (priv->last_ntpnstime != GST_CLOCK_TIME_NONE ++ && ntpnstime - priv->last_ntpnstime < priv->sync_interval * GST_MSECOND) { ++ GST_DEBUG_OBJECT (jitterbuffer, ++ "discarding RTCP sender packet for sync; " ++ "previous sender info too recent " "(previous NTP %" G_GUINT64_FORMAT ++ ")", priv->last_ntpnstime); ++ return; ++ } ++ priv->last_ntpnstime = ntpnstime; ++ ++ s = gst_structure_new ("application/x-rtp-sync", ++ "base-rtptime", G_TYPE_UINT64, base_rtptime, ++ "base-time", G_TYPE_UINT64, base_time, ++ "clock-rate", G_TYPE_UINT, clock_rate, ++ "clock-base", G_TYPE_UINT64, priv->clock_base & G_MAXUINT32, ++ "cname", G_TYPE_STRING, cname, ++ "ssrc", G_TYPE_UINT, priv->last_ssrc, ++ "inband-ext-rtptime", G_TYPE_UINT64, last_rtptime, ++ "inband-ntpnstime", G_TYPE_UINT64, ntpnstime, NULL); ++ ++ GST_DEBUG_OBJECT (jitterbuffer, "signaling sync"); ++ JBUF_UNLOCK (priv); ++ g_signal_emit (jitterbuffer, ++ gst_rtp_jitter_buffer_signals[SIGNAL_HANDLE_SYNC], 0, s); ++ JBUF_LOCK (priv); ++ gst_structure_free (s); ++} ++ + /* collect the info from the latest RTCP packet and the jitterbuffer sync, do + * some sanity checks and then emit the handle-sync signal with the parameters. + * This function must be called with the LOCK */ +@@ -4226,12 +4630,14 @@ do_handle_sync (GstRtpJitterBuffer * jitterbuffer) + &clock_rate, &last_rtptime); + + clock_base = priv->clock_base; +- ext_rtptime = priv->ext_rtptime; ++ ext_rtptime = priv->last_sr_ext_rtptime; + +- GST_DEBUG_OBJECT (jitterbuffer, "ext SR %" G_GUINT64_FORMAT ", base %" +- G_GUINT64_FORMAT ", clock-rate %" G_GUINT32_FORMAT +- ", clock-base %" G_GUINT64_FORMAT ", last-rtptime %" G_GUINT64_FORMAT, +- ext_rtptime, base_rtptime, clock_rate, clock_base, last_rtptime); ++ GST_DEBUG_OBJECT (jitterbuffer, ++ "ext SR %" G_GUINT64_FORMAT ", NTP %" G_GUINT64_FORMAT ", base %" ++ G_GUINT64_FORMAT ", clock-rate %" G_GUINT32_FORMAT ", clock-base %" ++ G_GUINT64_FORMAT ", last-rtptime %" G_GUINT64_FORMAT, ext_rtptime, ++ priv->last_sr_ntpnstime, base_rtptime, clock_rate, clock_base, ++ last_rtptime); + + if (base_rtptime == -1 || clock_rate == -1 || base_time == -1) { + /* we keep this SR packet for later. When we get a valid RTP packet the +@@ -4271,15 +4677,27 @@ do_handle_sync (GstRtpJitterBuffer * jitterbuffer) + GST_DEBUG_OBJECT (jitterbuffer, "keeping RTCP packet for later"); + } else if (valid) { + GstStructure *s; ++ GList *l; + + s = gst_structure_new ("application/x-rtp-sync", + "base-rtptime", G_TYPE_UINT64, base_rtptime, + "base-time", G_TYPE_UINT64, base_time, + "clock-rate", G_TYPE_UINT, clock_rate, +- "clock-base", G_TYPE_UINT64, clock_base, ++ "clock-base", G_TYPE_UINT64, priv->clock_base & G_MAXUINT32, ++ "ssrc", G_TYPE_UINT, priv->last_sr_ssrc, + "sr-ext-rtptime", G_TYPE_UINT64, ext_rtptime, ++ "sr-ntpnstime", G_TYPE_UINT64, priv->last_sr_ntpnstime, + "sr-buffer", GST_TYPE_BUFFER, priv->last_sr, NULL); + ++ for (l = priv->cname_ssrc_mappings; l; l = l->next) { ++ const CNameSSRCMapping *map = l->data; ++ ++ if (map->ssrc == priv->last_ssrc) { ++ gst_structure_set (s, "cname", G_TYPE_STRING, map->cname, NULL); ++ break; ++ } ++ } ++ + GST_DEBUG_OBJECT (jitterbuffer, "signaling sync"); + gst_buffer_replace (&priv->last_sr, NULL); + JBUF_UNLOCK (priv); +@@ -4293,6 +4711,18 @@ do_handle_sync (GstRtpJitterBuffer * jitterbuffer) + } + } + ++#define GST_RTCP_BUFFER_FOR_PACKETS(b,buffer,packet) \ ++ for ((b) = gst_rtcp_buffer_get_first_packet ((buffer), (packet)); (b); \ ++ (b) = gst_rtcp_packet_move_to_next ((packet))) ++ ++#define GST_RTCP_SDES_FOR_ITEMS(b,packet) \ ++ for ((b) = gst_rtcp_packet_sdes_first_item ((packet)); (b); \ ++ (b) = gst_rtcp_packet_sdes_next_item ((packet))) ++ ++#define GST_RTCP_SDES_FOR_ENTRIES(b,packet) \ ++ for ((b) = gst_rtcp_packet_sdes_first_entry ((packet)); (b); \ ++ (b) = gst_rtcp_packet_sdes_next_entry ((packet))) ++ + static GstFlowReturn + gst_rtp_jitter_buffer_chain_rtcp (GstPad * pad, GstObject * parent, + GstBuffer * buffer) +@@ -4302,9 +4732,13 @@ gst_rtp_jitter_buffer_chain_rtcp (GstPad * pad, GstObject * parent, + GstFlowReturn ret = GST_FLOW_OK; + guint32 ssrc; + GstRTCPPacket packet; +- guint64 ext_rtptime; ++ guint64 ext_rtptime, ntptime; ++ GstClockTime ntpnstime = GST_CLOCK_TIME_NONE; + guint32 rtptime; + GstRTCPBuffer rtcp = { NULL, }; ++ gchar *cname = NULL; ++ gboolean have_sr = FALSE; ++ gboolean more; + + jitterbuffer = GST_RTP_JITTER_BUFFER (parent); + +@@ -4315,35 +4749,109 @@ gst_rtp_jitter_buffer_chain_rtcp (GstPad * pad, GstObject * parent, + + gst_rtcp_buffer_map (buffer, GST_MAP_READ, &rtcp); + +- if (!gst_rtcp_buffer_get_first_packet (&rtcp, &packet)) +- goto empty_buffer; ++ GST_RTCP_BUFFER_FOR_PACKETS (more, &rtcp, &packet) { ++ /* first packet must be SR or RR or else the validate would have failed */ ++ switch (gst_rtcp_packet_get_type (&packet)) { ++ case GST_RTCP_TYPE_SR: ++ /* only parse first. There is only supposed to be one SR in the packet ++ * but we will deal with malformed packets gracefully by trying the ++ * next RTCP packet */ ++ if (have_sr) ++ continue; + +- /* first packet must be SR or RR or else the validate would have failed */ +- switch (gst_rtcp_packet_get_type (&packet)) { +- case GST_RTCP_TYPE_SR: +- gst_rtcp_packet_sr_get_sender_info (&packet, &ssrc, NULL, &rtptime, +- NULL, NULL); +- break; +- default: +- goto ignore_buffer; ++ /* get NTP and RTP times */ ++ gst_rtcp_packet_sr_get_sender_info (&packet, &ssrc, &ntptime, &rtptime, ++ NULL, NULL); ++ ++ /* convert ntptime to nanoseconds */ ++ ntpnstime = ++ gst_util_uint64_scale (ntptime, GST_SECOND, ++ G_GUINT64_CONSTANT (1) << 32); ++ ++ have_sr = TRUE; ++ ++ break; ++ case GST_RTCP_TYPE_SDES: ++ { ++ gboolean more_items; ++ ++ /* Bail out if we have not seen an SR item yet. */ ++ if (!have_sr) ++ goto ignore_buffer; ++ ++ GST_RTCP_SDES_FOR_ITEMS (more_items, &packet) { ++ gboolean more_entries; ++ ++ /* skip items that are not about the SSRC of the sender */ ++ if (gst_rtcp_packet_sdes_get_ssrc (&packet) != ssrc) ++ continue; ++ ++ /* find the CNAME entry */ ++ GST_RTCP_SDES_FOR_ENTRIES (more_entries, &packet) { ++ GstRTCPSDESType type; ++ guint8 len; ++ const guint8 *data; ++ ++ gst_rtcp_packet_sdes_get_entry (&packet, &type, &len, ++ (guint8 **) & data); ++ ++ if (type == GST_RTCP_SDES_CNAME) { ++ cname = g_strndup ((const gchar *) data, len); ++ goto out; ++ } ++ } ++ } ++ ++ /* only deal with first SDES, there is only supposed to be one SDES in ++ * the RTCP packet but we deal with bad packets gracefully. */ ++ goto out; ++ } ++ default: ++ /* we can ignore these packets */ ++ break; ++ } + } ++out: + gst_rtcp_buffer_unmap (&rtcp); + +- GST_DEBUG_OBJECT (jitterbuffer, "received RTCP of SSRC %08x", ssrc); ++ GST_DEBUG_OBJECT (jitterbuffer, "received RTCP of SSRC %08x from CNAME %s", ++ ssrc, GST_STR_NULL (cname)); ++ ++ if (!have_sr) ++ goto empty_buffer; + + JBUF_LOCK (priv); ++ if (cname) ++ insert_cname_ssrc_mapping (jitterbuffer, cname, ssrc); ++ + /* convert the RTP timestamp to our extended timestamp, using the same offset + * we used in the jitterbuffer */ + ext_rtptime = priv->jbuf->ext_rtptime; + ext_rtptime = gst_rtp_buffer_ext_timestamp (&ext_rtptime, rtptime); + +- priv->ext_rtptime = ext_rtptime; +- gst_buffer_replace (&priv->last_sr, buffer); ++ priv->last_sr_ext_rtptime = ext_rtptime; ++ priv->last_sr_ssrc = ssrc; ++ priv->last_sr_ntpnstime = ntpnstime; ++ ++ priv->last_known_ext_rtptime = ext_rtptime; ++ priv->last_known_ntpnstime = ntpnstime; ++ ++ if (priv->last_ntpnstime != GST_CLOCK_TIME_NONE ++ && ntpnstime - priv->last_ntpnstime < priv->sync_interval * GST_MSECOND) { ++ gst_buffer_replace (&priv->last_sr, NULL); ++ GST_DEBUG_OBJECT (jitterbuffer, "discarding RTCP sender packet for sync; " ++ "previous sender info too recent " ++ "(previous NTP %" G_GUINT64_FORMAT ")", priv->last_ntpnstime); ++ } else { ++ gst_buffer_replace (&priv->last_sr, buffer); ++ do_handle_sync (jitterbuffer); ++ priv->last_ntpnstime = ntpnstime; ++ } + +- do_handle_sync (jitterbuffer); + JBUF_UNLOCK (priv); + + done: ++ g_free (cname); + gst_buffer_unref (buffer); + + return ret; +@@ -4688,6 +5196,16 @@ gst_rtp_jitter_buffer_set_property (GObject * object, + priv->faststart_min_packets = g_value_get_uint (value); + JBUF_UNLOCK (priv); + break; ++ case PROP_ADD_REFERENCE_TIMESTAMP_META: ++ JBUF_LOCK (priv); ++ priv->add_reference_timestamp_meta = g_value_get_boolean (value); ++ JBUF_UNLOCK (priv); ++ break; ++ case PROP_SYNC_INTERVAL: ++ JBUF_LOCK (priv); ++ priv->sync_interval = g_value_get_uint (value); ++ JBUF_UNLOCK (priv); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -4844,6 +5362,16 @@ gst_rtp_jitter_buffer_get_property (GObject * object, + g_value_set_uint (value, priv->faststart_min_packets); + JBUF_UNLOCK (priv); + break; ++ case PROP_ADD_REFERENCE_TIMESTAMP_META: ++ JBUF_LOCK (priv); ++ g_value_set_boolean (value, priv->add_reference_timestamp_meta); ++ JBUF_UNLOCK (priv); ++ break; ++ case PROP_SYNC_INTERVAL: ++ JBUF_LOCK (priv); ++ g_value_set_uint (value, priv->sync_interval); ++ JBUF_UNLOCK (priv); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +diff --git a/gst/rtpmanager/gstrtpjitterbuffer.h b/gst/rtpmanager/gstrtpjitterbuffer.h +index debb13b3a..9605e7ba2 100644 +--- a/gst/rtpmanager/gstrtpjitterbuffer.h ++++ b/gst/rtpmanager/gstrtpjitterbuffer.h +@@ -82,6 +82,8 @@ struct _GstRtpJitterBufferClass + + GType gst_rtp_jitter_buffer_get_type (void); + ++GST_ELEMENT_REGISTER_DECLARE (rtpjitterbuffer); ++ + G_END_DECLS + + #endif /* __GST_RTP_JITTER_BUFFER_H__ */ +diff --git a/gst/rtpmanager/gstrtpmanager.c b/gst/rtpmanager/gstrtpmanager.c +index 4ba624fba..59a9eeb3a 100644 +--- a/gst/rtpmanager/gstrtpmanager.c ++++ b/gst/rtpmanager/gstrtpmanager.c +@@ -32,53 +32,43 @@ + #include "gstrtpdtmfmux.h" + #include "gstrtpmux.h" + #include "gstrtpfunnel.h" ++#include "gstrtpst2022-1-fecdec.h" ++#include "gstrtpst2022-1-fecenc.h" ++#include "gstrtphdrext-twcc.h" ++#include "gstrtphdrext-clientaudiolevel.h" ++#include "gstrtphdrext-mid.h" ++#include "gstrtphdrext-ntp.h" ++#include "gstrtphdrext-repairedstreamid.h" ++#include "gstrtphdrext-streamid.h" + + static gboolean + plugin_init (GstPlugin * plugin) + { +- if (!gst_element_register (plugin, "rtpbin", GST_RANK_NONE, GST_TYPE_RTP_BIN)) +- return FALSE; ++ gboolean ret = FALSE; + +- if (!gst_element_register (plugin, "rtpjitterbuffer", GST_RANK_NONE, +- GST_TYPE_RTP_JITTER_BUFFER)) +- return FALSE; ++ ret |= GST_ELEMENT_REGISTER (rtpbin, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpjitterbuffer, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpptdemux, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpsession, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtprtxqueue, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtprtxreceive, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtprtxsend, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpssrcdemux, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpmux, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpdtmfmux, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpfunnel, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpst2022_1_fecdec, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtpst2022_1_fecenc, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtphdrexttwcc, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtphdrextclientaudiolevel, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtphdrextmid, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtphdrextntp64, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtphdrextstreamid, plugin); ++ ret |= GST_ELEMENT_REGISTER (rtphdrextrepairedstreamid, plugin); + +- if (!gst_element_register (plugin, "rtpptdemux", GST_RANK_NONE, +- GST_TYPE_RTP_PT_DEMUX)) +- return FALSE; +- +- if (!gst_element_register (plugin, "rtpsession", GST_RANK_NONE, +- GST_TYPE_RTP_SESSION)) +- return FALSE; +- +- if (!gst_rtp_rtx_queue_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_rtx_receive_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_rtx_send_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_element_register (plugin, "rtpssrcdemux", GST_RANK_NONE, +- GST_TYPE_RTP_SSRC_DEMUX)) +- return FALSE; +- +- if (!gst_rtp_mux_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_rtp_dtmf_mux_plugin_init (plugin)) +- return FALSE; +- +- if (!gst_element_register (plugin, "rtpfunnel", GST_RANK_NONE, +- GST_TYPE_RTP_FUNNEL)) +- return FALSE; +- +- return TRUE; ++ return ret; + } + +-GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, +- GST_VERSION_MINOR, +- rtpmanager, +- "RTP session management plugin library", +- plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN) ++GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, GST_VERSION_MINOR, rtpmanager, ++ "RTP session management plugin library", plugin_init, VERSION, "LGPL", ++ GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN) +diff --git a/gst/rtpmanager/gstrtpmux.c b/gst/rtpmanager/gstrtpmux.c +index 4d16d5d3c..6387c655c 100644 +--- a/gst/rtpmanager/gstrtpmux.c ++++ b/gst/rtpmanager/gstrtpmux.c +@@ -112,8 +112,9 @@ static void gst_rtp_mux_dispose (GObject * object); + static gboolean gst_rtp_mux_src_event_real (GstRTPMux * rtp_mux, + GstEvent * event); + +-G_DEFINE_TYPE (GstRTPMux, gst_rtp_mux, GST_TYPE_ELEMENT); +- ++G_DEFINE_TYPE_WITH_CODE (GstRTPMux, gst_rtp_mux, GST_TYPE_ELEMENT, ++ GST_DEBUG_CATEGORY_INIT (gst_rtp_mux_debug, "rtpmux", 0, "rtp muxer")); ++GST_ELEMENT_REGISTER_DEFINE (rtpmux, "rtpmux", GST_RANK_NONE, GST_TYPE_RTP_MUX); + + static void + gst_rtp_mux_class_init (GstRTPMuxClass * klass) +@@ -1013,12 +1014,3 @@ gst_rtp_mux_change_state (GstElement * element, GstStateChange transition) + + return ret; + } +- +-gboolean +-gst_rtp_mux_plugin_init (GstPlugin * plugin) +-{ +- GST_DEBUG_CATEGORY_INIT (gst_rtp_mux_debug, "rtpmux", 0, "rtp muxer"); +- +- return gst_element_register (plugin, "rtpmux", GST_RANK_NONE, +- GST_TYPE_RTP_MUX); +-} +diff --git a/gst/rtpmanager/gstrtpmux.h b/gst/rtpmanager/gstrtpmux.h +index 65343da3a..82d00c7d3 100644 +--- a/gst/rtpmanager/gstrtpmux.h ++++ b/gst/rtpmanager/gstrtpmux.h +@@ -88,9 +88,9 @@ struct _GstRTPMuxClass + gboolean (*src_event) (GstRTPMux *rtp_mux, GstEvent *event); + }; + +- + GType gst_rtp_mux_get_type (void); +-gboolean gst_rtp_mux_plugin_init (GstPlugin * plugin); ++ ++GST_ELEMENT_REGISTER_DECLARE (rtpmux); + + G_END_DECLS + #endif /* __GST_RTP_MUX_H__ */ +diff --git a/gst/rtpmanager/gstrtpptdemux.c b/gst/rtpmanager/gstrtpptdemux.c +index 56ce75161..7a310ca66 100644 +--- a/gst/rtpmanager/gstrtpptdemux.c ++++ b/gst/rtpmanager/gstrtpptdemux.c +@@ -122,6 +122,8 @@ enum + + #define gst_rtp_pt_demux_parent_class parent_class + G_DEFINE_TYPE (GstRtpPtDemux, gst_rtp_pt_demux, GST_TYPE_ELEMENT); ++GST_ELEMENT_REGISTER_DEFINE (rtpptdemux, "rtpptdemux", GST_RANK_NONE, ++ GST_TYPE_RTP_PT_DEMUX); + + static void gst_rtp_pt_demux_finalize (GObject * object); + +@@ -475,8 +477,12 @@ gst_rtp_pt_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) + if (!caps) + goto no_caps; + +- if (gst_rtp_pt_demux_pt_is_ignored (rtpdemux, pt)) ++ /* must be after the get_caps() call as get_caps() may cause external code ++ * (e.g. rtpbin) to update the ignored-pt list */ ++ if (gst_rtp_pt_demux_pt_is_ignored (rtpdemux, pt)) { ++ gst_clear_caps (&caps); + goto ignored; ++ } + + klass = GST_ELEMENT_GET_CLASS (rtpdemux); + templ = gst_element_class_get_pad_template (klass, "src_%u"); +@@ -499,8 +505,31 @@ gst_rtp_pt_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) + gst_pad_set_active (srcpad, TRUE); + + /* First push the stream-start event, it must always come first */ +- gst_pad_push_event (srcpad, +- gst_pad_get_sticky_event (rtpdemux->sink, GST_EVENT_STREAM_START, 0)); ++ { ++ gchar *stream_id; ++ GstEvent *sink_event, *event; ++ guint group_id; ++ GstStreamFlags flags; ++ ++ sink_event = ++ gst_pad_get_sticky_event (rtpdemux->sink, GST_EVENT_STREAM_START, 0); ++ ++ stream_id = ++ gst_pad_create_stream_id_printf (srcpad, GST_ELEMENT_CAST (rtpdemux), ++ "%u", pt); ++ ++ event = gst_event_new_stream_start (stream_id); ++ if (gst_event_parse_group_id (sink_event, &group_id)) { ++ gst_event_set_group_id (event, group_id); ++ } ++ gst_event_parse_stream_flags (sink_event, &flags); ++ gst_event_set_stream_flags (event, flags); ++ ++ gst_pad_push_event (srcpad, event); ++ ++ gst_event_unref (sink_event); ++ g_free (stream_id); ++ } + + /* Then caps event is sent */ + gst_pad_set_caps (srcpad, caps); +@@ -561,7 +590,7 @@ invalid_buffer: + GST_ELEMENT_WARNING (rtpdemux, STREAM, DEMUX, (NULL), + ("Dropping invalid RTP payload")); + gst_buffer_unref (buf); +- return GST_FLOW_ERROR; ++ return GST_FLOW_OK; + } + no_caps: + { +diff --git a/gst/rtpmanager/gstrtpptdemux.h b/gst/rtpmanager/gstrtpptdemux.h +index 4beae863b..9ba4f7767 100644 +--- a/gst/rtpmanager/gstrtpptdemux.h ++++ b/gst/rtpmanager/gstrtpptdemux.h +@@ -60,4 +60,6 @@ struct _GstRtpPtDemuxClass + + GType gst_rtp_pt_demux_get_type (void); + ++GST_ELEMENT_REGISTER_DECLARE (rtpptdemux); ++ + #endif /* __GST_RTP_PT_DEMUX_H__ */ +diff --git a/gst/rtpmanager/gstrtprtxqueue.c b/gst/rtpmanager/gstrtprtxqueue.c +index 97678eb35..e098eb9c2 100644 +--- a/gst/rtpmanager/gstrtprtxqueue.c ++++ b/gst/rtpmanager/gstrtprtxqueue.c +@@ -122,7 +122,11 @@ static void gst_rtp_rtx_queue_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec); + static void gst_rtp_rtx_queue_finalize (GObject * object); + +-G_DEFINE_TYPE (GstRTPRtxQueue, gst_rtp_rtx_queue, GST_TYPE_ELEMENT); ++G_DEFINE_TYPE_WITH_CODE (GstRTPRtxQueue, gst_rtp_rtx_queue, GST_TYPE_ELEMENT, ++ GST_DEBUG_CATEGORY_INIT (gst_rtp_rtx_queue_debug, "rtprtxqueue", 0, ++ "rtp retransmission queue")); ++GST_ELEMENT_REGISTER_DEFINE (rtprtxqueue, "rtprtxqueue", GST_RANK_NONE, ++ GST_TYPE_RTP_RTX_QUEUE); + + static void + gst_rtp_rtx_queue_class_init (GstRTPRtxQueueClass * klass) +@@ -514,13 +518,3 @@ gst_rtp_rtx_queue_change_state (GstElement * element, GstStateChange transition) + + return ret; + } +- +-gboolean +-gst_rtp_rtx_queue_plugin_init (GstPlugin * plugin) +-{ +- GST_DEBUG_CATEGORY_INIT (gst_rtp_rtx_queue_debug, "rtprtxqueue", 0, +- "rtp retransmission queue"); +- +- return gst_element_register (plugin, "rtprtxqueue", GST_RANK_NONE, +- GST_TYPE_RTP_RTX_QUEUE); +-} +diff --git a/gst/rtpmanager/gstrtprtxqueue.h b/gst/rtpmanager/gstrtprtxqueue.h +index bee8d44af..92269732e 100644 +--- a/gst/rtpmanager/gstrtprtxqueue.h ++++ b/gst/rtpmanager/gstrtprtxqueue.h +@@ -72,9 +72,9 @@ struct _GstRTPRtxQueueClass + GstElementClass parent_class; + }; + +- + GType gst_rtp_rtx_queue_get_type (void); +-gboolean gst_rtp_rtx_queue_plugin_init (GstPlugin * plugin); ++ ++GST_ELEMENT_REGISTER_DECLARE (rtprtxqueue); + + G_END_DECLS + #endif /* __GST_RTP_RTX_QUEUE_H__ */ +diff --git a/gst/rtpmanager/gstrtprtxreceive.c b/gst/rtpmanager/gstrtprtxreceive.c +index 77fe78493..274ceed22 100644 +--- a/gst/rtpmanager/gstrtprtxreceive.c ++++ b/gst/rtpmanager/gstrtprtxreceive.c +@@ -145,7 +145,7 @@ + #endif + + #include +-#include ++#include + #include + #include + +@@ -159,12 +159,26 @@ GST_DEBUG_CATEGORY_STATIC (gst_rtp_rtx_receive_debug); + enum + { + PROP_0, ++ PROP_SSRC_MAP, + PROP_PAYLOAD_TYPE_MAP, + PROP_NUM_RTX_REQUESTS, + PROP_NUM_RTX_PACKETS, + PROP_NUM_RTX_ASSOC_PACKETS + }; + ++enum ++{ ++ SIGNAL_0, ++ SIGNAL_ADD_EXTENSION, ++ SIGNAL_CLEAR_EXTENSIONS, ++ LAST_SIGNAL ++}; ++ ++static guint gst_rtp_rtx_receive_signals[LAST_SIGNAL] = { 0, }; ++ ++#define RTPHDREXT_STREAM_ID GST_RTP_HDREXT_BASE "sdes:rtp-stream-id" ++#define RTPHDREXT_REPAIRED_STREAM_ID GST_RTP_HDREXT_BASE "sdes:repaired-rtp-stream-id" ++ + static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, +@@ -191,7 +205,45 @@ static void gst_rtp_rtx_receive_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec); + static void gst_rtp_rtx_receive_finalize (GObject * object); + +-G_DEFINE_TYPE (GstRtpRtxReceive, gst_rtp_rtx_receive, GST_TYPE_ELEMENT); ++G_DEFINE_TYPE_WITH_CODE (GstRtpRtxReceive, gst_rtp_rtx_receive, ++ GST_TYPE_ELEMENT, GST_DEBUG_CATEGORY_INIT (gst_rtp_rtx_receive_debug, ++ "rtprtxreceive", 0, "rtp retransmission receiver")); ++GST_ELEMENT_REGISTER_DEFINE (rtprtxreceive, "rtprtxreceive", GST_RANK_NONE, ++ GST_TYPE_RTP_RTX_RECEIVE); ++ ++static void ++gst_rtp_rtx_receive_add_extension (GstRtpRtxReceive * rtx, ++ GstRTPHeaderExtension * ext) ++{ ++ g_return_if_fail (GST_IS_RTP_HEADER_EXTENSION (ext)); ++ g_return_if_fail (gst_rtp_header_extension_get_id (ext) > 0); ++ ++ GST_OBJECT_LOCK (rtx); ++ if (g_strcmp0 (gst_rtp_header_extension_get_uri (ext), ++ RTPHDREXT_STREAM_ID) == 0) { ++ gst_clear_object (&rtx->rid_stream); ++ rtx->rid_stream = gst_object_ref (ext); ++ } else if (g_strcmp0 (gst_rtp_header_extension_get_uri (ext), ++ RTPHDREXT_REPAIRED_STREAM_ID) == 0) { ++ gst_clear_object (&rtx->rid_repaired); ++ rtx->rid_repaired = gst_object_ref (ext); ++ } else { ++ g_warning ("rtprtxsend (%s) doesn't know how to deal with the " ++ "RTP Header Extension with URI \'%s\'", GST_OBJECT_NAME (rtx), ++ gst_rtp_header_extension_get_uri (ext)); ++ } ++ /* XXX: check for other duplicate ids? */ ++ GST_OBJECT_UNLOCK (rtx); ++} ++ ++static void ++gst_rtp_rtx_receive_clear_extensions (GstRtpRtxReceive * rtx) ++{ ++ GST_OBJECT_LOCK (rtx); ++ gst_clear_object (&rtx->rid_stream); ++ gst_clear_object (&rtx->rid_repaired); ++ GST_OBJECT_UNLOCK (rtx); ++} + + static void + gst_rtp_rtx_receive_class_init (GstRtpRtxReceiveClass * klass) +@@ -206,6 +258,22 @@ gst_rtp_rtx_receive_class_init (GstRtpRtxReceiveClass * klass) + gobject_class->set_property = gst_rtp_rtx_receive_set_property; + gobject_class->finalize = gst_rtp_rtx_receive_finalize; + ++ /** ++ * GstRtpRtxReceive:ssrc-map: ++ * ++ * Map of SSRCs to their retransmission SSRCs for SSRC-multiplexed mode. ++ * ++ * If an application know this information already (WebRTC signals this ++ * in their SDP), it can allow the rtxreceive element to know a packet ++ * is a "valid" RTX packet even if it has not been requested. ++ * ++ * Since: 1.22 ++ */ ++ g_object_class_install_property (gobject_class, PROP_SSRC_MAP, ++ g_param_spec_boxed ("ssrc-map", "SSRC Map", ++ "Map of SSRCs to their retransmission SSRCs for SSRC-multiplexed mode", ++ GST_TYPE_STRUCTURE, G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS)); ++ + g_object_class_install_property (gobject_class, PROP_PAYLOAD_TYPE_MAP, + g_param_spec_boxed ("payload-type-map", "Payload Type Map", + "Map of original payload types to their retransmission payload types", +@@ -227,6 +295,38 @@ gst_rtp_rtx_receive_class_init (GstRtpRtxReceiveClass * klass) + "correctly associated with retransmission requests", 0, G_MAXUINT, + 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); + ++ /** ++ * rtprtxreceive::add-extension: ++ * ++ * Add @ext as an extension for writing part of an RTP header extension onto ++ * outgoing RTP packets. Currently only supports using the following ++ * extension URIs. All other RTP header extensions are copied as-is. ++ * - "urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id": will be removed ++ * - "urn:ietf:params:rtp-hdrext:sdes:repaired-rtp-stream-id": will be ++ * written instead of the "rtp-stream-id" header extension. ++ * ++ * Since: 1.22 ++ */ ++ gst_rtp_rtx_receive_signals[SIGNAL_ADD_EXTENSION] = ++ g_signal_new_class_handler ("add-extension", G_TYPE_FROM_CLASS (klass), ++ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, ++ G_CALLBACK (gst_rtp_rtx_receive_add_extension), NULL, NULL, NULL, ++ G_TYPE_NONE, 1, GST_TYPE_RTP_HEADER_EXTENSION); ++ ++ /** ++ * rtprtxreceive::clear-extensions: ++ * @object: the #GstRTPBasePayload ++ * ++ * Clear all RTP header extensions used by rtprtxreceive. ++ * ++ * Since: 1.22 ++ */ ++ gst_rtp_rtx_receive_signals[SIGNAL_CLEAR_EXTENSIONS] = ++ g_signal_new_class_handler ("clear-extensions", G_TYPE_FROM_CLASS (klass), ++ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, ++ G_CALLBACK (gst_rtp_rtx_receive_clear_extensions), NULL, NULL, NULL, ++ G_TYPE_NONE, 0); ++ + gst_element_class_add_static_pad_template (gstelement_class, &src_factory); + gst_element_class_add_static_pad_template (gstelement_class, &sink_factory); + +@@ -254,14 +354,21 @@ gst_rtp_rtx_receive_reset (GstRtpRtxReceive * rtx) + static void + gst_rtp_rtx_receive_finalize (GObject * object) + { +- GstRtpRtxReceive *rtx = GST_RTP_RTX_RECEIVE (object); ++ GstRtpRtxReceive *rtx = GST_RTP_RTX_RECEIVE_CAST (object); + + g_hash_table_unref (rtx->ssrc2_ssrc1_map); ++ if (rtx->external_ssrc_map) ++ gst_structure_free (rtx->external_ssrc_map); + g_hash_table_unref (rtx->seqnum_ssrc1_map); + g_hash_table_unref (rtx->rtx_pt_map); + if (rtx->rtx_pt_map_structure) + gst_structure_free (rtx->rtx_pt_map_structure); + ++ gst_clear_object (&rtx->rid_stream); ++ gst_clear_object (&rtx->rid_repaired); ++ ++ gst_clear_buffer (&rtx->dummy_writable); ++ + G_OBJECT_CLASS (gst_rtp_rtx_receive_parent_class)->finalize (object); + } + +@@ -316,13 +423,15 @@ gst_rtp_rtx_receive_init (GstRtpRtxReceive * rtx) + NULL, (GDestroyNotify) ssrc_assoc_free); + + rtx->rtx_pt_map = g_hash_table_new (g_direct_hash, g_direct_equal); ++ ++ rtx->dummy_writable = gst_buffer_new (); + } + + static gboolean + gst_rtp_rtx_receive_src_event (GstPad * pad, GstObject * parent, + GstEvent * event) + { +- GstRtpRtxReceive *rtx = GST_RTP_RTX_RECEIVE (parent); ++ GstRtpRtxReceive *rtx = GST_RTP_RTX_RECEIVE_CAST (parent); + gboolean res; + + switch (GST_EVENT_TYPE (event)) { +@@ -442,13 +551,169 @@ gst_rtp_rtx_receive_src_event (GstPad * pad, GstObject * parent, + return res; + } + ++static GstMemory * ++rewrite_header_extensions (GstRtpRtxReceive * rtx, GstRTPBuffer * rtp) ++{ ++ gsize out_size = rtp->size[1] + 32; ++ guint16 bit_pattern; ++ guint8 *pdata; ++ guint wordlen; ++ GstMemory *mem; ++ GstMapInfo map; ++ ++ mem = gst_allocator_alloc (NULL, out_size, NULL); ++ ++ gst_memory_map (mem, &map, GST_MAP_READWRITE); ++ ++ if (gst_rtp_buffer_get_extension_data (rtp, &bit_pattern, (gpointer) & pdata, ++ &wordlen)) { ++ GstRTPHeaderExtensionFlags ext_flags = 0; ++ gsize bytelen = wordlen * 4; ++ guint hdr_unit_bytes; ++ gsize read_offset = 0, write_offset = 4; ++ ++ if (bit_pattern == 0xBEDE) { ++ /* one byte extensions */ ++ hdr_unit_bytes = 1; ++ ext_flags |= GST_RTP_HEADER_EXTENSION_ONE_BYTE; ++ } else if (bit_pattern >> 4 == 0x100) { ++ /* two byte extensions */ ++ hdr_unit_bytes = 2; ++ ext_flags |= GST_RTP_HEADER_EXTENSION_TWO_BYTE; ++ } else { ++ GST_DEBUG_OBJECT (rtx, "unknown extension bit pattern 0x%02x%02x", ++ bit_pattern >> 8, bit_pattern & 0xff); ++ goto copy_as_is; ++ } ++ ++ GST_WRITE_UINT16_BE (map.data, bit_pattern); ++ ++ while (TRUE) { ++ guint8 read_id, read_len; ++ ++ if (read_offset + hdr_unit_bytes >= bytelen) ++ /* not enough remaning data */ ++ break; ++ ++ if (ext_flags & GST_RTP_HEADER_EXTENSION_ONE_BYTE) { ++ read_id = GST_READ_UINT8 (pdata + read_offset) >> 4; ++ read_len = (GST_READ_UINT8 (pdata + read_offset) & 0x0F) + 1; ++ read_offset += 1; ++ ++ if (read_id == 0) ++ /* padding */ ++ continue; ++ ++ if (read_id == 15) ++ /* special id for possible future expansion */ ++ break; ++ } else { ++ read_id = GST_READ_UINT8 (pdata + read_offset); ++ read_offset += 1; ++ ++ if (read_id == 0) ++ /* padding */ ++ continue; ++ ++ read_len = GST_READ_UINT8 (pdata + read_offset); ++ read_offset += 1; ++ } ++ GST_TRACE_OBJECT (rtx, "found rtp header extension with id %u and " ++ "length %u", read_id, read_len); ++ ++ /* Ignore extension headers where the size does not fit */ ++ if (read_offset + read_len > bytelen) { ++ GST_WARNING_OBJECT (rtx, "Extension length extends past the " ++ "size of the extension data"); ++ break; ++ } ++ ++ /* rewrite the rtp-stream-id into a repaired-stream-id */ ++ if (rtx->rid_stream ++ && read_id == gst_rtp_header_extension_get_id (rtx->rid_repaired)) { ++ if (!gst_rtp_header_extension_read (rtx->rid_repaired, ext_flags, ++ &pdata[read_offset], read_len, rtx->dummy_writable)) { ++ GST_WARNING_OBJECT (rtx, "RTP header extension (%s) could " ++ "not read payloaded data", GST_OBJECT_NAME (rtx->rid_stream)); ++ goto copy_as_is; ++ } ++ if (rtx->rid_repaired) { ++ guint8 write_id = gst_rtp_header_extension_get_id (rtx->rid_stream); ++ gsize written; ++ char *rid; ++ ++ g_object_get (rtx->rid_repaired, "rid", &rid, NULL); ++ g_object_set (rtx->rid_stream, "rid", rid, NULL); ++ g_clear_pointer (&rid, g_free); ++ ++ written = ++ gst_rtp_header_extension_write (rtx->rid_stream, rtp->buffer, ++ ext_flags, rtx->dummy_writable, ++ &map.data[write_offset + hdr_unit_bytes], ++ map.size - write_offset - hdr_unit_bytes); ++ GST_TRACE_OBJECT (rtx->rid_repaired, "wrote %" G_GSIZE_FORMAT, ++ written); ++ if (written <= 0) { ++ GST_WARNING_OBJECT (rtx, "Failed to rewrite RID for RTX"); ++ goto copy_as_is; ++ } else { ++ if (ext_flags & GST_RTP_HEADER_EXTENSION_ONE_BYTE) { ++ map.data[write_offset] = ++ ((write_id & 0x0F) << 4) | ((written - 1) & 0x0F); ++ } else if (ext_flags & GST_RTP_HEADER_EXTENSION_TWO_BYTE) { ++ map.data[write_offset] = write_id & 0xFF; ++ map.data[write_offset + 1] = written & 0xFF; ++ } else { ++ g_assert_not_reached (); ++ goto copy_as_is; ++ } ++ write_offset += written + hdr_unit_bytes; ++ } ++ } ++ } else { ++ /* TODO: may need to write mid at different times to the original ++ * buffer to account for the difference in timing of acknowledgement ++ * of the rtx ssrc from the original ssrc. This may add extra data to ++ * the header extension space that needs to be accounted for. ++ */ ++ memcpy (&map.data[write_offset], ++ &pdata[read_offset - hdr_unit_bytes], read_len + hdr_unit_bytes); ++ write_offset += read_len + hdr_unit_bytes; ++ } ++ ++ read_offset += read_len; ++ } ++ ++ /* subtract the ext header */ ++ wordlen = write_offset / 4 + ((write_offset % 4) ? 1 : 0); ++ ++ /* wordlen in the ext data doesn't include the 4-byte header */ ++ GST_WRITE_UINT16_BE (map.data + 2, wordlen - 1); ++ ++ if (wordlen * 4 > write_offset) ++ memset (&map.data[write_offset], 0, wordlen * 4 - write_offset); ++ ++ GST_MEMDUMP_OBJECT (rtx, "generated ext data", map.data, wordlen * 4); ++ } else { ++ copy_as_is: ++ wordlen = rtp->size[1] / 4; ++ memcpy (map.data, rtp->data[1], rtp->size[1]); ++ GST_LOG_OBJECT (rtx, "copying data as-is"); ++ } ++ ++ gst_memory_unmap (mem, &map); ++ gst_memory_resize (mem, 0, wordlen * 4); ++ ++ return mem; ++} ++ + /* Copy fixed header and extension. Replace current ssrc by ssrc1, + * remove OSN and replace current seq num by OSN. + * Copy memory to avoid to manually copy each rtp buffer field. + */ + static GstBuffer * +-_gst_rtp_buffer_new_from_rtx (GstRTPBuffer * rtp, guint32 ssrc1, +- guint16 orign_seqnum, guint8 origin_payload_type) ++_gst_rtp_buffer_new_from_rtx (GstRtpRtxReceive * rtx, GstRTPBuffer * rtp, ++ guint32 ssrc1, guint16 orign_seqnum, guint8 origin_payload_type) + { + GstMemory *mem = NULL; + GstRTPBuffer new_rtp = GST_RTP_BUFFER_INIT; +@@ -463,18 +728,17 @@ _gst_rtp_buffer_new_from_rtx (GstRTPBuffer * rtp, guint32 ssrc1, + + /* copy extension if any */ + if (rtp->size[1]) { +- mem = gst_memory_copy (rtp->map[1].memory, +- (guint8 *) rtp->data[1] - rtp->map[1].data, rtp->size[1]); ++ mem = rewrite_header_extensions (rtx, rtp); + gst_buffer_append_memory (new_buffer, mem); + } + + /* copy payload and remove OSN */ ++ g_assert_cmpint (rtp->size[2], >, 1); + payload_len = rtp->size[2] - 2; + mem = gst_allocator_alloc (NULL, payload_len, NULL); + + gst_memory_map (mem, &map, GST_MAP_WRITE); +- if (rtp->size[2]) +- memcpy (map.data, (guint8 *) rtp->data[2] + 2, payload_len); ++ memcpy (map.data, (guint8 *) rtp->data[2] + 2, payload_len); + gst_memory_unmap (mem, &map); + gst_buffer_append_memory (new_buffer, mem); + +@@ -511,7 +775,7 @@ _gst_rtp_buffer_new_from_rtx (GstRTPBuffer * rtp, guint32 ssrc1, + static GstFlowReturn + gst_rtp_rtx_receive_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + { +- GstRtpRtxReceive *rtx = GST_RTP_RTX_RECEIVE (parent); ++ GstRtpRtxReceive *rtx = GST_RTP_RTX_RECEIVE_CAST (parent); + GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; + GstFlowReturn ret = GST_FLOW_OK; + GstBuffer *new_buffer = NULL; +@@ -526,10 +790,17 @@ gst_rtp_rtx_receive_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + gboolean is_rtx; + gboolean drop = FALSE; + ++ if (rtx->rtx_pt_map_structure == NULL) ++ goto no_map; ++ + /* map current rtp packet to parse its header */ + if (!gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp)) + goto invalid_buffer; + ++ GST_MEMDUMP_OBJECT (rtx, "rtp header", rtp.map[0].data, rtp.map[0].size); ++ GST_MEMDUMP_OBJECT (rtx, "rtp ext", rtp.map[1].data, rtp.map[1].size); ++ GST_MEMDUMP_OBJECT (rtx, "rtp payload", rtp.map[2].data, rtp.map[2].size); ++ + ssrc = gst_rtp_buffer_get_ssrc (&rtp); + seqnum = gst_rtp_buffer_get_seq (&rtp); + payload_type = gst_rtp_buffer_get_payload_type (&rtp); +@@ -575,67 +846,76 @@ gst_rtp_rtx_receive_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + /* increase our statistic */ + ++rtx->num_rtx_packets; + +- /* read OSN in the rtx payload */ +- orign_seqnum = GST_READ_UINT16_BE (gst_rtp_buffer_get_payload (&rtp)); +- origin_payload_type = +- GPOINTER_TO_UINT (g_hash_table_lookup (rtx->rtx_pt_map, +- GUINT_TO_POINTER (payload_type))); +- +- GST_DEBUG_OBJECT (rtx, "Got rtx packet: rtx seqnum %u, rtx ssrc %X, " +- "rtx pt %u, orig seqnum %u, orig pt %u", seqnum, ssrc, payload_type, +- orign_seqnum, origin_payload_type); +- +- /* first we check if we already have associated this retransmission stream +- * to a master stream */ +- if (g_hash_table_lookup_extended (rtx->ssrc2_ssrc1_map, +- GUINT_TO_POINTER (ssrc), NULL, &ssrc1)) { +- GST_TRACE_OBJECT (rtx, +- "packet is from retransmission stream %X already associated to " +- "master stream %X", ssrc, GPOINTER_TO_UINT (ssrc1)); +- ssrc2 = ssrc; +- } else { +- SsrcAssoc *assoc; +- +- /* the current retransmitted packet has its rtx stream not already +- * associated to a master stream, so retrieve it from our request +- * history */ +- if (g_hash_table_lookup_extended (rtx->seqnum_ssrc1_map, +- GUINT_TO_POINTER (orign_seqnum), NULL, (gpointer *) & assoc)) { +- GST_LOG_OBJECT (rtx, +- "associating retransmitted stream %X to master stream %X thanks " +- "to rtx packet %u (orig seqnum %u)", ssrc, assoc->ssrc, seqnum, +- orign_seqnum); +- ssrc1 = GUINT_TO_POINTER (assoc->ssrc); ++ /* check if there enough data to read OSN from the paylaod, ++ we need at least two bytes ++ */ ++ if (gst_rtp_buffer_get_payload_len (&rtp) > 1) { ++ /* read OSN in the rtx payload */ ++ orign_seqnum = GST_READ_UINT16_BE (gst_rtp_buffer_get_payload (&rtp)); ++ origin_payload_type = ++ GPOINTER_TO_UINT (g_hash_table_lookup (rtx->rtx_pt_map, ++ GUINT_TO_POINTER (payload_type))); ++ ++ GST_DEBUG_OBJECT (rtx, "Got rtx packet: rtx seqnum %u, rtx ssrc %X, " ++ "rtx pt %u, orig seqnum %u, orig pt %u", seqnum, ssrc, payload_type, ++ orign_seqnum, origin_payload_type); ++ ++ /* first we check if we already have associated this retransmission stream ++ * to a master stream */ ++ if (g_hash_table_lookup_extended (rtx->ssrc2_ssrc1_map, ++ GUINT_TO_POINTER (ssrc), NULL, &ssrc1)) { ++ GST_TRACE_OBJECT (rtx, ++ "packet is from retransmission stream %X already associated to " ++ "master stream %X", ssrc, GPOINTER_TO_UINT (ssrc1)); + ssrc2 = ssrc; +- +- /* just put a guard */ +- if (GPOINTER_TO_UINT (ssrc1) == ssrc2) +- GST_WARNING_OBJECT (rtx, "RTX receiver ssrc2_ssrc1_map bad state, " +- "master and rtx SSRCs are the same (%X)\n", ssrc); +- +- /* free the spot so that this seqnum can be used to do another +- * association */ +- g_hash_table_remove (rtx->seqnum_ssrc1_map, +- GUINT_TO_POINTER (orign_seqnum)); +- +- /* actually do the association between rtx stream and master stream */ +- g_hash_table_insert (rtx->ssrc2_ssrc1_map, GUINT_TO_POINTER (ssrc2), +- ssrc1); +- +- /* also do the association between master stream and rtx stream +- * every ssrc are unique so we can use the same hash table +- * for both retrieving the ssrc1 from ssrc2 and also ssrc2 from ssrc1 +- */ +- g_hash_table_insert (rtx->ssrc2_ssrc1_map, ssrc1, +- GUINT_TO_POINTER (ssrc2)); +- + } else { +- /* we are not able to associate this rtx packet with a master stream */ +- GST_INFO_OBJECT (rtx, +- "dropping rtx packet %u because its orig seqnum (%u) is not in our" +- " pending retransmission requests", seqnum, orign_seqnum); +- drop = TRUE; ++ SsrcAssoc *assoc; ++ ++ /* the current retransmitted packet has its rtx stream not already ++ * associated to a master stream, so retrieve it from our request ++ * history */ ++ if (g_hash_table_lookup_extended (rtx->seqnum_ssrc1_map, ++ GUINT_TO_POINTER (orign_seqnum), NULL, (gpointer *) & assoc)) { ++ GST_LOG_OBJECT (rtx, ++ "associating retransmitted stream %X to master stream %X thanks " ++ "to rtx packet %u (orig seqnum %u)", ssrc, assoc->ssrc, seqnum, ++ orign_seqnum); ++ ssrc1 = GUINT_TO_POINTER (assoc->ssrc); ++ ssrc2 = ssrc; ++ ++ /* just put a guard */ ++ if (GPOINTER_TO_UINT (ssrc1) == ssrc2) ++ GST_WARNING_OBJECT (rtx, "RTX receiver ssrc2_ssrc1_map bad state, " ++ "master and rtx SSRCs are the same (%X)\n", ssrc); ++ ++ /* free the spot so that this seqnum can be used to do another ++ * association */ ++ g_hash_table_remove (rtx->seqnum_ssrc1_map, ++ GUINT_TO_POINTER (orign_seqnum)); ++ ++ /* actually do the association between rtx stream and master stream */ ++ g_hash_table_insert (rtx->ssrc2_ssrc1_map, GUINT_TO_POINTER (ssrc2), ++ ssrc1); ++ ++ /* also do the association between master stream and rtx stream ++ * every ssrc are unique so we can use the same hash table ++ * for both retrieving the ssrc1 from ssrc2 and also ssrc2 from ssrc1 ++ */ ++ g_hash_table_insert (rtx->ssrc2_ssrc1_map, ssrc1, ++ GUINT_TO_POINTER (ssrc2)); ++ ++ } else { ++ /* we are not able to associate this rtx packet with a master stream */ ++ GST_INFO_OBJECT (rtx, ++ "dropping rtx packet %u because its orig seqnum (%u) is not in our" ++ " pending retransmission requests", seqnum, orign_seqnum); ++ drop = TRUE; ++ } + } ++ } else { ++ /* the rtx packet is empty */ ++ GST_DEBUG_OBJECT (rtx, "drop rtx packet because it is empty"); ++ drop = TRUE; + } + } + +@@ -655,7 +935,7 @@ gst_rtp_rtx_receive_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + /* create the retransmission packet */ + if (is_rtx) + new_buffer = +- _gst_rtp_buffer_new_from_rtx (&rtp, GPOINTER_TO_UINT (ssrc1), ++ _gst_rtp_buffer_new_from_rtx (rtx, &rtp, GPOINTER_TO_UINT (ssrc1), + orign_seqnum, origin_payload_type); + + gst_rtp_buffer_unmap (&rtp); +@@ -675,10 +955,14 @@ gst_rtp_rtx_receive_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + + return ret; + ++no_map: ++ { ++ GST_DEBUG_OBJECT (pad, "No map set, passthrough"); ++ return gst_pad_push (rtx->srcpad, buffer); ++ } + invalid_buffer: + { +- GST_ELEMENT_WARNING (rtx, STREAM, DECODE, (NULL), +- ("Received invalid RTP payload, dropping")); ++ GST_INFO_OBJECT (pad, "Received invalid RTP payload, dropping"); + gst_buffer_unref (buffer); + return GST_FLOW_OK; + } +@@ -688,7 +972,7 @@ static void + gst_rtp_rtx_receive_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec) + { +- GstRtpRtxReceive *rtx = GST_RTP_RTX_RECEIVE (object); ++ GstRtpRtxReceive *rtx = GST_RTP_RTX_RECEIVE_CAST (object); + + switch (prop_id) { + case PROP_PAYLOAD_TYPE_MAP: +@@ -738,9 +1022,19 @@ static void + gst_rtp_rtx_receive_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec) + { +- GstRtpRtxReceive *rtx = GST_RTP_RTX_RECEIVE (object); ++ GstRtpRtxReceive *rtx = GST_RTP_RTX_RECEIVE_CAST (object); + + switch (prop_id) { ++ case PROP_SSRC_MAP: ++ GST_OBJECT_LOCK (rtx); ++ if (rtx->external_ssrc_map) ++ gst_structure_free (rtx->external_ssrc_map); ++ rtx->external_ssrc_map = g_value_dup_boxed (value); ++ g_hash_table_remove_all (rtx->ssrc2_ssrc1_map); ++ gst_structure_foreach (rtx->external_ssrc_map, ++ structure_to_hash_table_inv, rtx->ssrc2_ssrc1_map); ++ GST_OBJECT_UNLOCK (rtx); ++ break; + case PROP_PAYLOAD_TYPE_MAP: + GST_OBJECT_LOCK (rtx); + if (rtx->rtx_pt_map_structure) +@@ -764,7 +1058,7 @@ gst_rtp_rtx_receive_change_state (GstElement * element, + GstStateChangeReturn ret; + GstRtpRtxReceive *rtx; + +- rtx = GST_RTP_RTX_RECEIVE (element); ++ rtx = GST_RTP_RTX_RECEIVE_CAST (element); + + switch (transition) { + default: +@@ -785,13 +1079,3 @@ gst_rtp_rtx_receive_change_state (GstElement * element, + + return ret; + } +- +-gboolean +-gst_rtp_rtx_receive_plugin_init (GstPlugin * plugin) +-{ +- GST_DEBUG_CATEGORY_INIT (gst_rtp_rtx_receive_debug, "rtprtxreceive", 0, +- "rtp retransmission receiver"); +- +- return gst_element_register (plugin, "rtprtxreceive", GST_RANK_NONE, +- GST_TYPE_RTP_RTX_RECEIVE); +-} +diff --git a/gst/rtpmanager/gstrtprtxreceive.h b/gst/rtpmanager/gstrtprtxreceive.h +index cf6c4a9f5..833c0a2fb 100644 +--- a/gst/rtpmanager/gstrtprtxreceive.h ++++ b/gst/rtpmanager/gstrtprtxreceive.h +@@ -25,17 +25,19 @@ + #define __GST_RTP_RTX_RECEIVE_H__ + + #include +-#include ++#include + + G_BEGIN_DECLS ++typedef struct _GstRtpRtxReceive GstRtpRtxReceive; ++typedef struct _GstRtpRtxReceiveClass GstRtpRtxReceiveClass; ++ + #define GST_TYPE_RTP_RTX_RECEIVE (gst_rtp_rtx_receive_get_type()) + #define GST_RTP_RTX_RECEIVE(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_RTX_RECEIVE, GstRtpRtxReceive)) + #define GST_RTP_RTX_RECEIVE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_RTX_RECEIVE, GstRtpRtxReceiveClass)) + #define GST_RTP_RTX_RECEIVE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_RTP_RTX_RECEIVE, GstRtpRtxReceiveClass)) + #define GST_IS_RTP_RTX_RECEIVE(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_RTX_RECEIVE)) + #define GST_IS_RTP_RTX_RECEIVE_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_RTX_RECEIVE)) +-typedef struct _GstRtpRtxReceive GstRtpRtxReceive; +-typedef struct _GstRtpRtxReceiveClass GstRtpRtxReceiveClass; ++#define GST_RTP_RTX_RECEIVE_CAST(obj) ((GstRtpRtxReceive *)(obj)) + + struct _GstRtpRtxReceive + { +@@ -50,6 +52,8 @@ struct _GstRtpRtxReceive + * as we make sure all ssrc are unique */ + GHashTable *ssrc2_ssrc1_map; + ++ GstStructure *external_ssrc_map; ++ + /* contains seqnum of request packets of whom their ssrc have + * not been associated to a rtx stream yet */ + GHashTable *seqnum_ssrc1_map; +@@ -65,6 +69,11 @@ struct _GstRtpRtxReceive + guint num_rtx_assoc_packets; + + GstClockTime last_time; ++ ++ GstRTPHeaderExtension *rid_stream; ++ GstRTPHeaderExtension *rid_repaired; ++ ++ GstBuffer *dummy_writable; + }; + + struct _GstRtpRtxReceiveClass +@@ -74,7 +83,8 @@ struct _GstRtpRtxReceiveClass + + + GType gst_rtp_rtx_receive_get_type (void); +-gboolean gst_rtp_rtx_receive_plugin_init (GstPlugin * plugin); ++ ++GST_ELEMENT_REGISTER_DECLARE (rtprtxreceive); + + G_END_DECLS + #endif /* __GST_RTP_RTX_RECEIVE_H__ */ +diff --git a/gst/rtpmanager/gstrtprtxsend.c b/gst/rtpmanager/gstrtprtxsend.c +index bfa5db809..349bb169b 100644 +--- a/gst/rtpmanager/gstrtprtxsend.c ++++ b/gst/rtpmanager/gstrtprtxsend.c +@@ -41,7 +41,6 @@ + #endif + + #include +-#include + #include + #include + +@@ -66,6 +65,20 @@ enum + PROP_CLOCK_RATE_MAP, + }; + ++enum ++{ ++ SIGNAL_0, ++ SIGNAL_ADD_EXTENSION, ++ SIGNAL_CLEAR_EXTENSIONS, ++ LAST_SIGNAL ++}; ++ ++static guint gst_rtp_rtx_send_signals[LAST_SIGNAL] = { 0, }; ++ ++#define RTPHDREXT_BUNDLE_MID GST_RTP_HDREXT_BASE "sdes:mid" ++#define RTPHDREXT_STREAM_ID GST_RTP_HDREXT_BASE "sdes:rtp-stream-id" ++#define RTPHDREXT_REPAIRED_STREAM_ID GST_RTP_HDREXT_BASE "sdes:repaired-rtp-stream-id" ++ + static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, +@@ -103,7 +116,47 @@ static void gst_rtp_rtx_send_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec); + static void gst_rtp_rtx_send_finalize (GObject * object); + +-G_DEFINE_TYPE (GstRtpRtxSend, gst_rtp_rtx_send, GST_TYPE_ELEMENT); ++static void ++gst_rtp_rtx_send_add_extension (GstRtpRtxSend * rtx, ++ GstRTPHeaderExtension * ext) ++{ ++ g_return_if_fail (GST_IS_RTP_HEADER_EXTENSION (ext)); ++ g_return_if_fail (gst_rtp_header_extension_get_id (ext) > 0); ++ ++ GST_OBJECT_LOCK (rtx); ++ if (g_strcmp0 (gst_rtp_header_extension_get_uri (ext), ++ RTPHDREXT_STREAM_ID) == 0) { ++ gst_clear_object (&rtx->rid_stream); ++ rtx->rid_stream = gst_object_ref (ext); ++ } else if (g_strcmp0 (gst_rtp_header_extension_get_uri (ext), ++ RTPHDREXT_REPAIRED_STREAM_ID) == 0) { ++ gst_clear_object (&rtx->rid_repaired); ++ rtx->rid_repaired = gst_object_ref (ext); ++ } else { ++ g_warning ("rtprtxsend (%s) doesn't know how to deal with the " ++ "RTP Header Extension with URI \'%s\'", GST_OBJECT_NAME (rtx), ++ gst_rtp_header_extension_get_uri (ext)); ++ } ++ /* XXX: check for other duplicate ids? */ ++ GST_OBJECT_UNLOCK (rtx); ++} ++ ++static void ++gst_rtp_rtx_send_clear_extensions (GstRtpRtxSend * rtx) ++{ ++ GST_OBJECT_LOCK (rtx); ++ gst_clear_object (&rtx->rid_stream); ++ gst_clear_object (&rtx->rid_repaired); ++ GST_OBJECT_UNLOCK (rtx); ++} ++ ++G_DEFINE_TYPE_WITH_CODE (GstRtpRtxSend, gst_rtp_rtx_send, GST_TYPE_ELEMENT, ++ GST_DEBUG_CATEGORY_INIT (gst_rtp_rtx_send_debug, "rtprtxsend", 0, ++ "rtp retransmission sender")); ++GST_ELEMENT_REGISTER_DEFINE (rtprtxsend, "rtprtxsend", GST_RANK_NONE, ++ GST_TYPE_RTP_RTX_SEND); ++ ++#define IS_RTX_ENABLED(rtx) (g_hash_table_size ((rtx)->rtx_pt_map) > 0) + + typedef struct + { +@@ -148,6 +201,60 @@ ssrc_rtx_data_free (SSRCRtxData * data) + g_slice_free (SSRCRtxData, data); + } + ++typedef enum ++{ ++ RTX_TASK_START, ++ RTX_TASK_PAUSE, ++ RTX_TASK_STOP, ++} RtxTaskState; ++ ++static void ++gst_rtp_rtx_send_set_flushing (GstRtpRtxSend * rtx, gboolean flush) ++{ ++ GST_OBJECT_LOCK (rtx); ++ gst_data_queue_set_flushing (rtx->queue, flush); ++ gst_data_queue_flush (rtx->queue); ++ GST_OBJECT_UNLOCK (rtx); ++} ++ ++static gboolean ++gst_rtp_rtx_send_set_task_state (GstRtpRtxSend * rtx, RtxTaskState task_state) ++{ ++ GstTask *task = GST_PAD_TASK (rtx->srcpad); ++ GstPadMode mode = GST_PAD_MODE (rtx->srcpad); ++ gboolean ret = TRUE; ++ ++ switch (task_state) { ++ case RTX_TASK_START: ++ { ++ gboolean active = task && GST_TASK_STATE (task) == GST_TASK_STARTED; ++ if (IS_RTX_ENABLED (rtx) && mode != GST_PAD_MODE_NONE && !active) { ++ GST_DEBUG_OBJECT (rtx, "Starting RTX task"); ++ gst_rtp_rtx_send_set_flushing (rtx, FALSE); ++ ret = gst_pad_start_task (rtx->srcpad, ++ (GstTaskFunction) gst_rtp_rtx_send_src_loop, rtx, NULL); ++ } ++ break; ++ } ++ case RTX_TASK_PAUSE: ++ if (task) { ++ GST_DEBUG_OBJECT (rtx, "Pausing RTX task"); ++ gst_rtp_rtx_send_set_flushing (rtx, TRUE); ++ ret = gst_pad_pause_task (rtx->srcpad); ++ } ++ break; ++ case RTX_TASK_STOP: ++ if (task) { ++ GST_DEBUG_OBJECT (rtx, "Stopping RTX task"); ++ gst_rtp_rtx_send_set_flushing (rtx, TRUE); ++ ret = gst_pad_stop_task (rtx->srcpad); ++ } ++ break; ++ } ++ ++ return ret; ++} ++ + static void + gst_rtp_rtx_send_class_init (GstRtpRtxSendClass * klass) + { +@@ -198,6 +305,38 @@ gst_rtp_rtx_send_class_init (GstRtpRtxSendClass * klass) + "Map of payload types to their clock rates", + GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + ++ /** ++ * rtprtxsend::add-extension: ++ * ++ * Add @ext as an extension for writing part of an RTP header extension onto ++ * outgoing RTP packets. Currently only supports using the following ++ * extension URIs. All other RTP header extensions are copied as-is. ++ * - "urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id": will be removed ++ * - "urn:ietf:params:rtp-hdrext:sdes:repaired-rtp-stream-id": will be ++ * written instead of the "rtp-stream-id" header extension. ++ * ++ * Since: 1.22 ++ */ ++ gst_rtp_rtx_send_signals[SIGNAL_ADD_EXTENSION] = ++ g_signal_new_class_handler ("add-extension", G_TYPE_FROM_CLASS (klass), ++ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, ++ G_CALLBACK (gst_rtp_rtx_send_add_extension), NULL, NULL, NULL, ++ G_TYPE_NONE, 1, GST_TYPE_RTP_HEADER_EXTENSION); ++ ++ /** ++ * rtprtxsend::clear-extensions: ++ * @object: the #GstRTPBasePayload ++ * ++ * Clear all RTP header extensions used by this rtprtxsend. ++ * ++ * Since: 1.22 ++ */ ++ gst_rtp_rtx_send_signals[SIGNAL_CLEAR_EXTENSIONS] = ++ g_signal_new_class_handler ("clear-extensions", G_TYPE_FROM_CLASS (klass), ++ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, ++ G_CALLBACK (gst_rtp_rtx_send_clear_extensions), NULL, NULL, NULL, ++ G_TYPE_NONE, 0); ++ + gst_element_class_add_static_pad_template (gstelement_class, &src_factory); + gst_element_class_add_static_pad_template (gstelement_class, &sink_factory); + +@@ -225,7 +364,7 @@ gst_rtp_rtx_send_reset (GstRtpRtxSend * rtx) + static void + gst_rtp_rtx_send_finalize (GObject * object) + { +- GstRtpRtxSend *rtx = GST_RTP_RTX_SEND (object); ++ GstRtpRtxSend *rtx = GST_RTP_RTX_SEND_CAST (object); + + g_hash_table_unref (rtx->ssrc_data); + g_hash_table_unref (rtx->rtx_ssrcs); +@@ -239,6 +378,11 @@ gst_rtp_rtx_send_finalize (GObject * object) + gst_structure_free (rtx->clock_rate_map_structure); + g_object_unref (rtx->queue); + ++ gst_clear_object (&rtx->rid_stream); ++ gst_clear_object (&rtx->rid_repaired); ++ ++ gst_clear_buffer (&rtx->dummy_writable); ++ + G_OBJECT_CLASS (gst_rtp_rtx_send_parent_class)->finalize (object); + } + +@@ -281,15 +425,8 @@ gst_rtp_rtx_send_init (GstRtpRtxSend * rtx) + + rtx->max_size_time = DEFAULT_MAX_SIZE_TIME; + rtx->max_size_packets = DEFAULT_MAX_SIZE_PACKETS; +-} + +-static void +-gst_rtp_rtx_send_set_flushing (GstRtpRtxSend * rtx, gboolean flush) +-{ +- GST_OBJECT_LOCK (rtx); +- gst_data_queue_set_flushing (rtx->queue, flush); +- gst_data_queue_flush (rtx->queue); +- GST_OBJECT_UNLOCK (rtx); ++ rtx->dummy_writable = gst_buffer_new (); + } + + static gboolean +@@ -371,6 +508,162 @@ gst_rtp_rtx_send_get_ssrc_data (GstRtpRtxSend * rtx, guint32 ssrc) + return data; + } + ++static GstMemory * ++rewrite_header_extensions (GstRtpRtxSend * rtx, GstRTPBuffer * rtp) ++{ ++ gsize out_size = rtp->size[1] + 32; ++ guint16 bit_pattern; ++ guint8 *pdata; ++ guint wordlen; ++ GstMemory *mem; ++ GstMapInfo map; ++ ++ mem = gst_allocator_alloc (NULL, out_size, NULL); ++ ++ gst_memory_map (mem, &map, GST_MAP_READWRITE); ++ ++ if (gst_rtp_buffer_get_extension_data (rtp, &bit_pattern, (gpointer) & pdata, ++ &wordlen)) { ++ GstRTPHeaderExtensionFlags ext_flags = 0; ++ gsize bytelen = wordlen * 4; ++ guint hdr_unit_bytes; ++ gsize read_offset = 0, write_offset = 4; ++ ++ if (bit_pattern == 0xBEDE) { ++ /* one byte extensions */ ++ hdr_unit_bytes = 1; ++ ext_flags |= GST_RTP_HEADER_EXTENSION_ONE_BYTE; ++ } else if (bit_pattern >> 4 == 0x100) { ++ /* two byte extensions */ ++ hdr_unit_bytes = 2; ++ ext_flags |= GST_RTP_HEADER_EXTENSION_TWO_BYTE; ++ } else { ++ GST_DEBUG_OBJECT (rtx, "unknown extension bit pattern 0x%02x%02x", ++ bit_pattern >> 8, bit_pattern & 0xff); ++ goto copy_as_is; ++ } ++ ++ GST_WRITE_UINT16_BE (map.data, bit_pattern); ++ ++ while (TRUE) { ++ guint8 read_id, read_len; ++ ++ if (read_offset + hdr_unit_bytes >= bytelen) ++ /* not enough remaning data */ ++ break; ++ ++ if (ext_flags & GST_RTP_HEADER_EXTENSION_ONE_BYTE) { ++ read_id = GST_READ_UINT8 (pdata + read_offset) >> 4; ++ read_len = (GST_READ_UINT8 (pdata + read_offset) & 0x0F) + 1; ++ read_offset += 1; ++ ++ if (read_id == 0) ++ /* padding */ ++ continue; ++ ++ if (read_id == 15) ++ /* special id for possible future expansion */ ++ break; ++ } else { ++ read_id = GST_READ_UINT8 (pdata + read_offset); ++ read_offset += 1; ++ ++ if (read_id == 0) ++ /* padding */ ++ continue; ++ ++ read_len = GST_READ_UINT8 (pdata + read_offset); ++ read_offset += 1; ++ } ++ GST_TRACE_OBJECT (rtx, "found rtp header extension with id %u and " ++ "length %u", read_id, read_len); ++ ++ /* Ignore extension headers where the size does not fit */ ++ if (read_offset + read_len > bytelen) { ++ GST_WARNING_OBJECT (rtx, "Extension length extends past the " ++ "size of the extension data"); ++ break; ++ } ++ ++ /* rewrite the rtp-stream-id into a repaired-stream-id */ ++ if (rtx->rid_stream ++ && read_id == gst_rtp_header_extension_get_id (rtx->rid_stream)) { ++ if (!gst_rtp_header_extension_read (rtx->rid_stream, ext_flags, ++ &pdata[read_offset], read_len, rtx->dummy_writable)) { ++ GST_WARNING_OBJECT (rtx, "RTP header extension (%s) could " ++ "not read payloaded data", GST_OBJECT_NAME (rtx->rid_stream)); ++ goto copy_as_is; ++ } ++ if (rtx->rid_repaired) { ++ guint8 write_id = gst_rtp_header_extension_get_id (rtx->rid_repaired); ++ gsize written; ++ char *rid; ++ ++ g_object_get (rtx->rid_stream, "rid", &rid, NULL); ++ g_object_set (rtx->rid_repaired, "rid", rid, NULL); ++ g_clear_pointer (&rid, g_free); ++ ++ written = ++ gst_rtp_header_extension_write (rtx->rid_repaired, rtp->buffer, ++ ext_flags, rtx->dummy_writable, ++ &map.data[write_offset + hdr_unit_bytes], ++ map.size - write_offset - hdr_unit_bytes); ++ GST_TRACE_OBJECT (rtx->rid_repaired, "wrote %" G_GSIZE_FORMAT, ++ written); ++ if (written <= 0) { ++ GST_WARNING_OBJECT (rtx, "Failed to rewrite RID for RTX"); ++ goto copy_as_is; ++ } else { ++ if (ext_flags & GST_RTP_HEADER_EXTENSION_ONE_BYTE) { ++ map.data[write_offset] = ++ ((write_id & 0x0F) << 4) | ((written - 1) & 0x0F); ++ } else if (ext_flags & GST_RTP_HEADER_EXTENSION_TWO_BYTE) { ++ map.data[write_offset] = write_id & 0xFF; ++ map.data[write_offset + 1] = written & 0xFF; ++ } else { ++ g_assert_not_reached (); ++ goto copy_as_is; ++ } ++ write_offset += written + hdr_unit_bytes; ++ } ++ } ++ } else { ++ /* TODO: may need to write mid at different times to the original ++ * buffer to account for the difference in timing of acknowledgement ++ * of the rtx ssrc from the original ssrc. This may add extra data to ++ * the header extension space that needs to be accounted for. ++ */ ++ memcpy (&map.data[write_offset], &pdata[read_offset - hdr_unit_bytes], ++ read_len + hdr_unit_bytes); ++ write_offset += read_len + hdr_unit_bytes; ++ } ++ ++ read_offset += read_len; ++ } ++ ++ /* subtract the ext header */ ++ wordlen = write_offset / 4 + ((write_offset % 4) ? 1 : 0); ++ ++ /* wordlen in the ext data doesn't include the 4-byte header */ ++ GST_WRITE_UINT16_BE (map.data + 2, wordlen - 1); ++ ++ if (wordlen * 4 > write_offset) ++ memset (&map.data[write_offset], 0, wordlen * 4 - write_offset); ++ ++ GST_MEMDUMP_OBJECT (rtx, "generated ext data", map.data, wordlen * 4); ++ } else { ++ copy_as_is: ++ wordlen = rtp->size[1] / 4; ++ memcpy (map.data, rtp->data[1], rtp->size[1]); ++ GST_LOG_OBJECT (rtx, "copying data as-is"); ++ } ++ ++ gst_memory_unmap (mem, &map); ++ gst_memory_resize (mem, 0, wordlen * 4); ++ ++ return mem; ++} ++ + /* Copy fixed header and extension. Add OSN before to copy payload + * Copy memory to avoid to manually copy each rtp buffer field. + */ +@@ -411,10 +704,7 @@ gst_rtp_rtx_buffer_new (GstRtpRtxSend * rtx, GstBuffer * buffer) + + /* copy extension if any */ + if (rtp.size[1]) { +- mem = gst_allocator_alloc (NULL, rtp.size[1], NULL); +- gst_memory_map (mem, &map, GST_MAP_WRITE); +- memcpy (map.data, rtp.data[1], rtp.size[1]); +- gst_memory_unmap (mem, &map); ++ mem = rewrite_header_extensions (rtx, &rtp); + gst_buffer_append_memory (new_buffer, mem); + } + +@@ -444,6 +734,9 @@ gst_rtp_rtx_buffer_new (GstRtpRtxSend * rtx, GstBuffer * buffer) + /* Copy over timestamps */ + gst_buffer_copy_into (new_buffer, buffer, GST_BUFFER_COPY_TIMESTAMPS, 0, -1); + ++ /* mark this is a RETRANSMISSION buffer */ ++ GST_BUFFER_FLAG_SET (new_buffer, GST_RTP_BUFFER_FLAG_RETRANSMISSION); ++ + return new_buffer; + } + +@@ -460,7 +753,7 @@ buffer_queue_items_cmp (BufferQueueItem * a, BufferQueueItem * b, + static gboolean + gst_rtp_rtx_send_src_event (GstPad * pad, GstObject * parent, GstEvent * event) + { +- GstRtpRtxSend *rtx = GST_RTP_RTX_SEND (parent); ++ GstRtpRtxSend *rtx = GST_RTP_RTX_SEND_CAST (parent); + gboolean res; + + switch (GST_EVENT_TYPE (event)) { +@@ -600,19 +893,16 @@ gst_rtp_rtx_send_src_event (GstPad * pad, GstObject * parent, GstEvent * event) + static gboolean + gst_rtp_rtx_send_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) + { +- GstRtpRtxSend *rtx = GST_RTP_RTX_SEND (parent); ++ GstRtpRtxSend *rtx = GST_RTP_RTX_SEND_CAST (parent); + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_FLUSH_START: + gst_pad_push_event (rtx->srcpad, event); +- gst_rtp_rtx_send_set_flushing (rtx, TRUE); +- gst_pad_pause_task (rtx->srcpad); ++ gst_rtp_rtx_send_set_task_state (rtx, RTX_TASK_PAUSE); + return TRUE; + case GST_EVENT_FLUSH_STOP: + gst_pad_push_event (rtx->srcpad, event); +- gst_rtp_rtx_send_set_flushing (rtx, FALSE); +- gst_pad_start_task (rtx->srcpad, +- (GstTaskFunction) gst_rtp_rtx_send_src_loop, rtx, NULL); ++ gst_rtp_rtx_send_set_task_state (rtx, RTX_TASK_START); + return TRUE; + case GST_EVENT_EOS: + GST_INFO_OBJECT (rtx, "Got EOS - enqueueing it"); +@@ -647,7 +937,8 @@ gst_rtp_rtx_send_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) + GUINT_TO_POINTER (payload), NULL, &rtx_payload)) + rtx_payload = GINT_TO_POINTER (-1); + +- if (GPOINTER_TO_INT (rtx_payload) == -1 && payload != -1) ++ if (rtx->rtx_pt_map_structure && GPOINTER_TO_INT (rtx_payload) == -1 ++ && payload != -1) + GST_WARNING_OBJECT (rtx, "Payload %d not in rtx-pt-map", payload); + + GST_DEBUG_OBJECT (rtx, +@@ -656,14 +947,16 @@ gst_rtp_rtx_send_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) + + gst_structure_get_int (s, "clock-rate", &data->clock_rate); + +- /* The session might need to know the RTX ssrc */ + caps = gst_caps_copy (caps); +- gst_caps_set_simple (caps, "rtx-ssrc", G_TYPE_UINT, data->rtx_ssrc, +- "rtx-seqnum-offset", G_TYPE_UINT, data->seqnum_base, NULL); + +- if (GPOINTER_TO_INT (rtx_payload) != -1) ++ /* The session might need to know the RTX ssrc */ ++ if (GPOINTER_TO_INT (rtx_payload) != -1) { ++ gst_caps_set_simple (caps, "rtx-ssrc", G_TYPE_UINT, data->rtx_ssrc, ++ "rtx-seqnum-offset", G_TYPE_UINT, data->seqnum_base, NULL); ++ + gst_caps_set_simple (caps, "rtx-payload", G_TYPE_INT, + GPOINTER_TO_INT (rtx_payload), NULL); ++ } + + GST_DEBUG_OBJECT (rtx, "got clock-rate from caps: %d for ssrc: %u", + data->clock_rate, ssrc); +@@ -696,18 +989,24 @@ gst_rtp_rtx_send_get_ts_diff (SSRCRtxData * data) + if (!high_buf || !low_buf || high_buf == low_buf) + return 0; + +- high_ts = high_buf->timestamp; +- low_ts = low_buf->timestamp; ++ if (data->clock_rate) { ++ high_ts = high_buf->timestamp; ++ low_ts = low_buf->timestamp; + +- /* it needs to work if ts wraps */ +- if (high_ts >= low_ts) { +- result = (guint32) (high_ts - low_ts); ++ /* it needs to work if ts wraps */ ++ if (high_ts >= low_ts) { ++ result = (guint32) (high_ts - low_ts); ++ } else { ++ result = (guint32) (high_ts + G_MAXUINT32 + 1 - low_ts); ++ } ++ result = gst_util_uint64_scale_int (result, 1000, data->clock_rate); + } else { +- result = (guint32) (high_ts + G_MAXUINT32 + 1 - low_ts); ++ high_ts = GST_BUFFER_PTS (high_buf->buffer); ++ low_ts = GST_BUFFER_PTS (low_buf->buffer); ++ result = gst_util_uint64_scale_int_round (high_ts - low_ts, 1, GST_MSECOND); + } + +- /* return value in ms instead of clock ticks */ +- return (guint32) gst_util_uint64_scale_int (result, 1000, data->clock_rate); ++ return result; + } + + /* Must be called with lock */ +@@ -764,11 +1063,12 @@ process_buffer (GstRtpRtxSend * rtx, GstBuffer * buffer) + static GstFlowReturn + gst_rtp_rtx_send_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer) + { +- GstRtpRtxSend *rtx = GST_RTP_RTX_SEND (parent); ++ GstRtpRtxSend *rtx = GST_RTP_RTX_SEND_CAST (parent); + GstFlowReturn ret; + + GST_OBJECT_LOCK (rtx); +- process_buffer (rtx, buffer); ++ if (rtx->rtx_pt_map_structure) ++ process_buffer (rtx, buffer); + GST_OBJECT_UNLOCK (rtx); + ret = gst_pad_push (rtx->srcpad, buffer); + +@@ -786,7 +1086,7 @@ static GstFlowReturn + gst_rtp_rtx_send_chain_list (GstPad * pad, GstObject * parent, + GstBufferList * list) + { +- GstRtpRtxSend *rtx = GST_RTP_RTX_SEND (parent); ++ GstRtpRtxSend *rtx = GST_RTP_RTX_SEND_CAST (parent); + GstFlowReturn ret; + + GST_OBJECT_LOCK (rtx); +@@ -829,7 +1129,7 @@ gst_rtp_rtx_send_src_loop (GstRtpRtxSend * rtx) + data->destroy (data); + } else { + GST_LOG_OBJECT (rtx, "flushing"); +- gst_pad_pause_task (rtx->srcpad); ++ gst_rtp_rtx_send_set_task_state (rtx, RTX_TASK_PAUSE); + } + } + +@@ -837,18 +1137,15 @@ static gboolean + gst_rtp_rtx_send_activate_mode (GstPad * pad, GstObject * parent, + GstPadMode mode, gboolean active) + { +- GstRtpRtxSend *rtx = GST_RTP_RTX_SEND (parent); ++ GstRtpRtxSend *rtx = GST_RTP_RTX_SEND_CAST (parent); + gboolean ret = FALSE; + + switch (mode) { + case GST_PAD_MODE_PUSH: + if (active) { +- gst_rtp_rtx_send_set_flushing (rtx, FALSE); +- ret = gst_pad_start_task (rtx->srcpad, +- (GstTaskFunction) gst_rtp_rtx_send_src_loop, rtx, NULL); ++ ret = gst_rtp_rtx_send_set_task_state (rtx, RTX_TASK_START); + } else { +- gst_rtp_rtx_send_set_flushing (rtx, TRUE); +- ret = gst_pad_stop_task (rtx->srcpad); ++ ret = gst_rtp_rtx_send_set_task_state (rtx, RTX_TASK_STOP); + } + GST_INFO_OBJECT (rtx, "activate_mode: active %d, ret %d", active, ret); + break; +@@ -862,7 +1159,7 @@ static void + gst_rtp_rtx_send_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec) + { +- GstRtpRtxSend *rtx = GST_RTP_RTX_SEND (object); ++ GstRtpRtxSend *rtx = GST_RTP_RTX_SEND_CAST (object); + + switch (prop_id) { + case PROP_PAYLOAD_TYPE_MAP: +@@ -921,7 +1218,7 @@ static void + gst_rtp_rtx_send_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec) + { +- GstRtpRtxSend *rtx = GST_RTP_RTX_SEND (object); ++ GstRtpRtxSend *rtx = GST_RTP_RTX_SEND_CAST (object); + + switch (prop_id) { + case PROP_SSRC_MAP: +@@ -940,6 +1237,12 @@ gst_rtp_rtx_send_set_property (GObject * object, + gst_structure_foreach (rtx->rtx_pt_map_structure, structure_to_hash_table, + rtx->rtx_pt_map); + GST_OBJECT_UNLOCK (rtx); ++ ++ if (IS_RTX_ENABLED (rtx)) ++ gst_rtp_rtx_send_set_task_state (rtx, RTX_TASK_START); ++ else ++ gst_rtp_rtx_send_set_task_state (rtx, RTX_TASK_STOP); ++ + break; + case PROP_MAX_SIZE_TIME: + GST_OBJECT_LOCK (rtx); +@@ -973,7 +1276,7 @@ gst_rtp_rtx_send_change_state (GstElement * element, GstStateChange transition) + GstStateChangeReturn ret; + GstRtpRtxSend *rtx; + +- rtx = GST_RTP_RTX_SEND (element); ++ rtx = GST_RTP_RTX_SEND_CAST (element); + + switch (transition) { + default: +@@ -994,13 +1297,3 @@ gst_rtp_rtx_send_change_state (GstElement * element, GstStateChange transition) + + return ret; + } +- +-gboolean +-gst_rtp_rtx_send_plugin_init (GstPlugin * plugin) +-{ +- GST_DEBUG_CATEGORY_INIT (gst_rtp_rtx_send_debug, "rtprtxsend", 0, +- "rtp retransmission sender"); +- +- return gst_element_register (plugin, "rtprtxsend", GST_RANK_NONE, +- GST_TYPE_RTP_RTX_SEND); +-} +diff --git a/gst/rtpmanager/gstrtprtxsend.h b/gst/rtpmanager/gstrtprtxsend.h +index b25a511b3..60a4ec5bb 100644 +--- a/gst/rtpmanager/gstrtprtxsend.h ++++ b/gst/rtpmanager/gstrtprtxsend.h +@@ -25,18 +25,21 @@ + #define __GST_RTP_RTX_SEND_H__ + + #include +-#include ++#include + #include + + G_BEGIN_DECLS ++ ++typedef struct _GstRtpRtxSend GstRtpRtxSend; ++typedef struct _GstRtpRtxSendClass GstRtpRtxSendClass; ++ + #define GST_TYPE_RTP_RTX_SEND (gst_rtp_rtx_send_get_type()) + #define GST_RTP_RTX_SEND(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_RTX_SEND, GstRtpRtxSend)) + #define GST_RTP_RTX_SEND_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_RTX_SEND, GstRtpRtxSendClass)) + #define GST_RTP_RTX_SEND_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_RTP_RTX_SEND, GstRtpRtxSendClass)) + #define GST_IS_RTP_RTX_SEND(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_RTX_SEND)) + #define GST_IS_RTP_RTX_SEND_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_RTX_SEND)) +-typedef struct _GstRtpRtxSend GstRtpRtxSend; +-typedef struct _GstRtpRtxSendClass GstRtpRtxSendClass; ++#define GST_RTP_RTX_SEND_CAST(obj) ((GstRtpRtxSend *)(obj)) + + struct _GstRtpRtxSend + { +@@ -74,6 +77,12 @@ struct _GstRtpRtxSend + /* statistics */ + guint num_rtx_requests; + guint num_rtx_packets; ++ ++ /* list of relevant RTP Header Extensions */ ++ GstRTPHeaderExtension *rid_stream; ++ GstRTPHeaderExtension *rid_repaired; ++ ++ GstBuffer *dummy_writable; + }; + + struct _GstRtpRtxSendClass +@@ -81,9 +90,9 @@ struct _GstRtpRtxSendClass + GstElementClass parent_class; + }; + +- + GType gst_rtp_rtx_send_get_type (void); +-gboolean gst_rtp_rtx_send_plugin_init (GstPlugin * plugin); ++ ++GST_ELEMENT_REGISTER_DECLARE (rtprtxsend); + + G_END_DECLS + #endif /* __GST_RTP_RTX_SEND_H__ */ +diff --git a/gst/rtpmanager/gstrtpsession.c b/gst/rtpmanager/gstrtpsession.c +index 0b9fc3d5d..a4fa9199a 100644 +--- a/gst/rtpmanager/gstrtpsession.c ++++ b/gst/rtpmanager/gstrtpsession.c +@@ -111,6 +111,7 @@ + + #include "gstrtpsession.h" + #include "rtpsession.h" ++#include "gstrtputils.h" + + GST_DEBUG_CATEGORY_STATIC (gst_rtp_session_debug); + #define GST_CAT_DEFAULT gst_rtp_session_debug +@@ -222,6 +223,7 @@ enum + #define DEFAULT_RTP_PROFILE GST_RTP_PROFILE_AVP + #define DEFAULT_NTP_TIME_SOURCE GST_RTP_NTP_TIME_SOURCE_NTP + #define DEFAULT_RTCP_SYNC_SEND_TIME TRUE ++#define DEFAULT_UPDATE_NTP64_HEADER_EXT TRUE + + enum + { +@@ -243,7 +245,8 @@ enum + PROP_TWCC_STATS, + PROP_RTP_PROFILE, + PROP_NTP_TIME_SOURCE, +- PROP_RTCP_SYNC_SEND_TIME ++ PROP_RTCP_SYNC_SEND_TIME, ++ PROP_UPDATE_NTP64_HEADER_EXT + }; + + #define GST_RTP_SESSION_LOCK(sess) g_mutex_lock (&(sess)->priv->lock) +@@ -271,6 +274,9 @@ struct _GstRtpSessionPrivate + GHashTable *ptmap; + + GstClockTime send_latency; ++ /* Set if we warned once already that no latency is configured yet but we ++ * need it to calculate correct send running time of the packets */ ++ gboolean warned_latency_once; + + gboolean use_pipeline_clock; + GstRtpNtpTimeSource ntp_time_source; +@@ -297,7 +303,7 @@ static GstFlowReturn gst_rtp_session_send_rtcp (RTPSession * sess, + RTPSource * src, GstBuffer * buffer, gboolean eos, gpointer user_data); + static GstFlowReturn gst_rtp_session_sync_rtcp (RTPSession * sess, + GstBuffer * buffer, gpointer user_data); +-static gint gst_rtp_session_clock_rate (RTPSession * sess, guint8 payload, ++static GstCaps *gst_rtp_session_caps (RTPSession * sess, guint8 payload, + gpointer user_data); + static void gst_rtp_session_reconsider (RTPSession * sess, gpointer user_data); + static void gst_rtp_session_request_key_unit (RTPSession * sess, guint32 ssrc, +@@ -327,7 +333,7 @@ static RTPSessionCallbacks callbacks = { + gst_rtp_session_send_rtp, + gst_rtp_session_sync_rtcp, + gst_rtp_session_send_rtcp, +- gst_rtp_session_clock_rate, ++ gst_rtp_session_caps, + gst_rtp_session_reconsider, + gst_rtp_session_request_key_unit, + gst_rtp_session_request_time, +@@ -351,7 +357,7 @@ static GstPad *gst_rtp_session_request_new_pad (GstElement * element, + GstPadTemplate * templ, const gchar * name, const GstCaps * caps); + static void gst_rtp_session_release_pad (GstElement * element, GstPad * pad); + +-static gboolean gst_rtp_session_sink_setcaps (GstPad * pad, ++static gboolean gst_rtp_session_setcaps_recv_rtp (GstPad * pad, + GstRtpSession * rtpsession, GstCaps * caps); + static gboolean gst_rtp_session_setcaps_send_rtp (GstPad * pad, + GstRtpSession * rtpsession, GstCaps * caps); +@@ -491,6 +497,8 @@ on_notify_stats (RTPSession * session, GParamSpec * spec, + + #define gst_rtp_session_parent_class parent_class + G_DEFINE_TYPE_WITH_PRIVATE (GstRtpSession, gst_rtp_session, GST_TYPE_ELEMENT); ++GST_ELEMENT_REGISTER_DEFINE (rtpsession, "rtpsession", GST_RANK_NONE, ++ GST_TYPE_RTP_SESSION); + + static void + gst_rtp_session_class_init (GstRtpSessionClass * klass) +@@ -772,7 +780,11 @@ gst_rtp_session_class_init (GstRtpSessionClass * klass) + * "packets-sent" G_TYPE_UINT Number of packets sent + * "packets-recv" G_TYPE_UINT Number of packets reported recevied + * "packet-loss-pct" G_TYPE_DOUBLE Packetloss percentage, based on +- * packets reported as lost from the recevier. ++ * packets reported as lost from the receiver. Note: depending on the ++ * implementation of the receiver and due to the nature of the TWCC ++ * RRs being sent with high frequency, out of order packets may not ++ * be fully accounted for and this number could be higher than other ++ * measurement sources of packet loss. + * "avg-delta-of-delta", G_TYPE_INT64 In nanoseconds, a moving window + * average of the difference in inter-packet spacing between + * sender and receiver. A sudden increase in this number can indicate +@@ -803,6 +815,22 @@ gst_rtp_session_class_init (GstRtpSessionClass * klass) + DEFAULT_RTCP_SYNC_SEND_TIME, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + ++ /** ++ * GstRtpSession:update-ntp64-header-ext: ++ * ++ * Whether RTP NTP header extension should be updated with actual ++ * NTP time. If not, use the NTP time from buffer timestamp metadata ++ * ++ * Since: 1.22 ++ */ ++ g_object_class_install_property (gobject_class, ++ PROP_UPDATE_NTP64_HEADER_EXT, ++ g_param_spec_boolean ("update-ntp64-header-ext", ++ "Update NTP-64 RTP Header Extension", ++ "Whether RTP NTP header extension should be updated with actual NTP time", ++ DEFAULT_UPDATE_NTP64_HEADER_EXT, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ + gstelement_class->change_state = + GST_DEBUG_FUNCPTR (gst_rtp_session_change_state); + gstelement_class->request_new_pad = +@@ -975,6 +1003,10 @@ gst_rtp_session_set_property (GObject * object, guint prop_id, + case PROP_RTCP_SYNC_SEND_TIME: + priv->rtcp_sync_send_time = g_value_get_boolean (value); + break; ++ case PROP_UPDATE_NTP64_HEADER_EXT: ++ g_object_set_property (G_OBJECT (priv->session), ++ "update-ntp64-header-ext", value); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -1054,6 +1086,10 @@ gst_rtp_session_get_property (GObject * object, guint prop_id, + case PROP_RTCP_SYNC_SEND_TIME: + g_value_set_boolean (value, priv->rtcp_sync_send_time); + break; ++ case PROP_UPDATE_NTP64_HEADER_EXT: ++ g_object_get_property (G_OBJECT (priv->session), ++ "update-ntp64-header-ext", value); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -1095,7 +1131,7 @@ get_current_times (GstRtpSession * rtpsession, GstClockTime * running_time, + if (rtpsession->priv->use_pipeline_clock) { + ntpns = rt; + /* add constant to convert from 1970 based time to 1900 based time */ +- ntpns += (2208988800LL * GST_SECOND); ++ ntpns += (GST_RTP_NTP_UNIX_OFFSET * GST_SECOND); + } else { + switch (rtpsession->priv->ntp_time_source) { + case GST_RTP_NTP_TIME_SOURCE_NTP: +@@ -1105,7 +1141,7 @@ get_current_times (GstRtpSession * rtpsession, GstClockTime * running_time, + + /* add constant to convert from 1970 based time to 1900 based time */ + if (rtpsession->priv->ntp_time_source == GST_RTP_NTP_TIME_SOURCE_NTP) +- ntpns += (2208988800LL * GST_SECOND); ++ ntpns += (GST_RTP_NTP_UNIX_OFFSET * GST_SECOND); + break; + } + case GST_RTP_NTP_TIME_SOURCE_RUNNING_TIME: +@@ -1303,6 +1339,8 @@ gst_rtp_session_change_state (GstElement * element, GstStateChange transition) + case GST_STATE_CHANGE_READY_TO_PAUSED: + GST_RTP_SESSION_LOCK (rtpsession); + rtpsession->priv->wait_send = TRUE; ++ rtpsession->priv->send_latency = GST_CLOCK_TIME_NONE; ++ rtpsession->priv->warned_latency_once = FALSE; + GST_RTP_SESSION_UNLOCK (rtpsession); + break; + case GST_STATE_CHANGE_PAUSED_TO_PLAYING: +@@ -1670,41 +1708,12 @@ no_caps: + } + + /* called when the session manager needs the clock rate */ +-static gint +-gst_rtp_session_clock_rate (RTPSession * sess, guint8 payload, +- gpointer user_data) ++static GstCaps * ++gst_rtp_session_caps (RTPSession * sess, guint8 payload, gpointer user_data) + { +- gint result = -1; +- GstRtpSession *rtpsession; +- GstCaps *caps; +- const GstStructure *s; +- +- rtpsession = GST_RTP_SESSION_CAST (user_data); +- +- caps = gst_rtp_session_get_caps_for_pt (rtpsession, payload); ++ GstRtpSession *rtpsession = GST_RTP_SESSION_CAST (user_data); + +- if (!caps) +- goto done; +- +- s = gst_caps_get_structure (caps, 0); +- if (!gst_structure_get_int (s, "clock-rate", &result)) +- goto no_clock_rate; +- +- gst_caps_unref (caps); +- +- GST_DEBUG_OBJECT (rtpsession, "parsed clock-rate %d", result); +- +-done: +- +- return result; +- +- /* ERRORS */ +-no_clock_rate: +- { +- gst_caps_unref (caps); +- GST_DEBUG_OBJECT (rtpsession, "No clock-rate in caps!"); +- goto done; +- } ++ return gst_rtp_session_get_caps_for_pt (rtpsession, payload); + } + + /* called when the session manager asks us to reconsider the timeout */ +@@ -1741,7 +1750,7 @@ gst_rtp_session_event_recv_rtp_sink (GstPad * pad, GstObject * parent, + + /* process */ + gst_event_parse_caps (event, &caps); +- gst_rtp_session_sink_setcaps (pad, rtpsession, caps); ++ gst_rtp_session_setcaps_recv_rtp (pad, rtpsession, caps); + ret = gst_pad_push_event (rtpsession->recv_rtp_src, event); + break; + } +@@ -1959,7 +1968,7 @@ gst_rtp_session_iterate_internal_links (GstPad * pad, GstObject * parent) + } + + static gboolean +-gst_rtp_session_sink_setcaps (GstPad * pad, GstRtpSession * rtpsession, ++gst_rtp_session_setcaps_recv_rtp (GstPad * pad, GstRtpSession * rtpsession, + GstCaps * caps) + { + GST_RTP_SESSION_LOCK (rtpsession); +@@ -2396,6 +2405,8 @@ gst_rtp_session_chain_send_rtp_common (GstRtpSession * rtpsession, + GstFlowReturn ret; + GstClockTime timestamp, running_time; + GstClockTime current_time; ++ guint64 ntpnstime; ++ GstClock *clock; + + priv = rtpsession->priv; + +@@ -2421,16 +2432,104 @@ gst_rtp_session_chain_send_rtp_common (GstRtpSession * rtpsession, + running_time = + gst_segment_to_running_time (&rtpsession->send_rtp_seg, GST_FORMAT_TIME, + timestamp); +- if (priv->rtcp_sync_send_time) +- running_time += priv->send_latency; ++ if (priv->rtcp_sync_send_time) { ++ if (priv->send_latency != GST_CLOCK_TIME_NONE) { ++ running_time += priv->send_latency; ++ } else { ++ if (!priv->warned_latency_once) { ++ priv->warned_latency_once = TRUE; ++ GST_WARNING_OBJECT (rtpsession, ++ "Can't determine running time for this packet without knowing configured latency"); ++ } else { ++ GST_LOG_OBJECT (rtpsession, ++ "Can't determine running time for this packet without knowing configured latency"); ++ } ++ running_time = -1; ++ } ++ } + } else { + /* no timestamp. */ + running_time = -1; + } + + current_time = gst_clock_get_time (priv->sysclock); ++ ++ /* Calculate the NTP time of this packet based on the session configuration ++ * and the running time from above */ ++ GST_OBJECT_LOCK (rtpsession); ++ if (running_time != -1 && (clock = GST_ELEMENT_CLOCK (rtpsession))) { ++ GstClockTime base_time; ++ base_time = GST_ELEMENT_CAST (rtpsession)->base_time; ++ gst_object_ref (clock); ++ GST_OBJECT_UNLOCK (rtpsession); ++ ++ if (rtpsession->priv->use_pipeline_clock) { ++ ntpnstime = running_time; ++ /* add constant to convert from 1970 based time to 1900 based time */ ++ ntpnstime += (GST_RTP_NTP_UNIX_OFFSET * GST_SECOND); ++ } else { ++ switch (rtpsession->priv->ntp_time_source) { ++ case GST_RTP_NTP_TIME_SOURCE_NTP: ++ case GST_RTP_NTP_TIME_SOURCE_UNIX:{ ++ GstClockTime wallclock_now, pipeline_now; ++ ++ /* pipeline clock time for this packet */ ++ ntpnstime = running_time + base_time; ++ ++ /* get current wallclock and pipeline clock time */ ++ wallclock_now = g_get_real_time () * GST_USECOND; ++ pipeline_now = gst_clock_get_time (clock); ++ ++ /* adjust pipeline clock time by the current diff. ++ * Note that this will include some jitter for each packet */ ++ if (wallclock_now > pipeline_now) { ++ GstClockTime diff = wallclock_now - pipeline_now; ++ ++ ntpnstime += diff; ++ } else { ++ GstClockTime diff = pipeline_now - wallclock_now; ++ ++ if (diff > ntpnstime) { ++ /* This can't really happen unless the clock configuration is ++ * broken */ ++ ntpnstime = GST_CLOCK_TIME_NONE; ++ } else { ++ ntpnstime -= diff; ++ } ++ } ++ ++ /* add constant to convert from 1970 based time to 1900 based time */ ++ if (ntpnstime != GST_CLOCK_TIME_NONE ++ && rtpsession->priv->ntp_time_source == ++ GST_RTP_NTP_TIME_SOURCE_NTP) ++ ntpnstime += (GST_RTP_NTP_UNIX_OFFSET * GST_SECOND); ++ break; ++ } ++ case GST_RTP_NTP_TIME_SOURCE_RUNNING_TIME: ++ ntpnstime = running_time; ++ break; ++ case GST_RTP_NTP_TIME_SOURCE_CLOCK_TIME: ++ ntpnstime = running_time + base_time; ++ break; ++ default: ++ ntpnstime = -1; ++ g_assert_not_reached (); ++ break; ++ } ++ } ++ ++ gst_object_unref (clock); ++ } else { ++ if (!GST_ELEMENT_CLOCK (rtpsession)) { ++ GST_WARNING_OBJECT (rtpsession, ++ "Don't have a clock yet and can't determine NTP time for this packet"); ++ } ++ GST_OBJECT_UNLOCK (rtpsession); ++ ntpnstime = GST_CLOCK_TIME_NONE; ++ } ++ + ret = rtp_session_send_rtp (priv->session, data, is_list, current_time, +- running_time); ++ running_time, ntpnstime); + if (ret != GST_FLOW_OK) + goto push_error; + +diff --git a/gst/rtpmanager/gstrtpsession.h b/gst/rtpmanager/gstrtpsession.h +index 599ec9a39..5a3ecfc1c 100644 +--- a/gst/rtpmanager/gstrtpsession.h ++++ b/gst/rtpmanager/gstrtpsession.h +@@ -80,6 +80,8 @@ struct _GstRtpSessionClass { + + GType gst_rtp_session_get_type (void); + ++GST_ELEMENT_REGISTER_DECLARE (rtpsession); ++ + typedef enum { + GST_RTP_NTP_TIME_SOURCE_NTP, + GST_RTP_NTP_TIME_SOURCE_UNIX, +diff --git a/gst/rtpmanager/gstrtpssrcdemux.c b/gst/rtpmanager/gstrtpssrcdemux.c +index 96d53490d..243449be2 100644 +--- a/gst/rtpmanager/gstrtpssrcdemux.c ++++ b/gst/rtpmanager/gstrtpssrcdemux.c +@@ -83,6 +83,10 @@ GST_STATIC_PAD_TEMPLATE ("rtcp_src_%u", + #define INTERNAL_STREAM_LOCK(obj) (g_rec_mutex_lock (&(obj)->padlock)) + #define INTERNAL_STREAM_UNLOCK(obj) (g_rec_mutex_unlock (&(obj)->padlock)) + ++#define GST_PAD_FLAG_STICKIES_SENT (GST_PAD_FLAG_LAST << 0) ++#define GST_PAD_STICKIES_SENT(pad) (GST_OBJECT_FLAG_IS_SET (pad, GST_PAD_FLAG_STICKIES_SENT)) ++#define GST_PAD_SET_STICKIES_SENT(pad) (GST_OBJECT_FLAG_SET (pad, GST_PAD_FLAG_STICKIES_SENT)) ++ + typedef enum + { + RTP_PAD, +@@ -107,6 +111,8 @@ enum + + #define gst_rtp_ssrc_demux_parent_class parent_class + G_DEFINE_TYPE (GstRtpSsrcDemux, gst_rtp_ssrc_demux, GST_TYPE_ELEMENT); ++GST_ELEMENT_REGISTER_DEFINE (rtpssrcdemux, "rtpssrcdemux", GST_RANK_NONE, ++ GST_TYPE_RTP_SSRC_DEMUX); + + /* GObject vmethods */ + static void gst_rtp_ssrc_demux_dispose (GObject * object); +@@ -143,24 +149,24 @@ static guint gst_rtp_ssrc_demux_signals[LAST_SIGNAL] = { 0 }; + /* + * Item for storing GstPad <-> SSRC pairs. + */ +-struct _GstRtpSsrcDemuxPad ++typedef struct + { + guint32 ssrc; + GstPad *rtp_pad; + GstCaps *caps; + GstPad *rtcp_pad; +-}; ++} GstRtpSsrcDemuxPads; + + /* find a src pad for a given SSRC, returns NULL if the SSRC was not found + * MUST be called with object lock + */ +-static GstRtpSsrcDemuxPad * +-find_demux_pad_for_ssrc (GstRtpSsrcDemux * demux, guint32 ssrc) ++static GstRtpSsrcDemuxPads * ++find_demux_pads_for_ssrc (GstRtpSsrcDemux * demux, guint32 ssrc) + { + GSList *walk; + + for (walk = demux->srcpads; walk; walk = g_slist_next (walk)) { +- GstRtpSsrcDemuxPad *pad = (GstRtpSsrcDemuxPad *) walk->data; ++ GstRtpSsrcDemuxPads *pad = (GstRtpSsrcDemuxPads *) walk->data; + + if (pad->ssrc == ssrc) + return pad; +@@ -172,23 +178,23 @@ find_demux_pad_for_ssrc (GstRtpSsrcDemux * demux, guint32 ssrc) + static GstPad * + get_demux_pad_for_ssrc (GstRtpSsrcDemux * demux, guint32 ssrc, PadType padtype) + { +- GstRtpSsrcDemuxPad *demuxpad; ++ GstRtpSsrcDemuxPads *dpads; + GstPad *retpad; + + GST_OBJECT_LOCK (demux); + +- demuxpad = find_demux_pad_for_ssrc (demux, ssrc); +- if (!demuxpad) { ++ dpads = find_demux_pads_for_ssrc (demux, ssrc); ++ if (!dpads) { + GST_OBJECT_UNLOCK (demux); + return NULL; + } + + switch (padtype) { + case RTP_PAD: +- retpad = gst_object_ref (demuxpad->rtp_pad); ++ retpad = gst_object_ref (dpads->rtp_pad); + break; + case RTCP_PAD: +- retpad = gst_object_ref (demuxpad->rtcp_pad); ++ retpad = gst_object_ref (dpads->rtcp_pad); + break; + default: + retpad = NULL; +@@ -220,6 +226,30 @@ add_ssrc_and_ref (GstEvent * event, guint32 ssrc) + gst_caps_unref (newcaps); + break; + } ++ case GST_EVENT_STREAM_START: ++ { ++ const gchar *stream_id; ++ gchar *new_stream_id; ++ guint group_id; ++ GstStreamFlags flags; ++ GstEvent *new_event; ++ ++ gst_event_parse_stream_start (event, &stream_id); ++ ++ new_stream_id = ++ g_strdup_printf ("%s/%u", stream_id ? stream_id : "", ssrc); ++ new_event = gst_event_new_stream_start (new_stream_id); ++ g_free (new_stream_id); ++ ++ if (gst_event_parse_group_id (event, &group_id)) ++ gst_event_set_group_id (new_event, group_id); ++ gst_event_parse_stream_flags (event, &flags); ++ gst_event_set_stream_flags (new_event, flags); ++ ++ event = new_event; ++ ++ break; ++ } + default: + gst_event_ref (event); + break; +@@ -242,13 +272,11 @@ forward_sticky_events (GstPad * pad, GstEvent ** event, gpointer user_data) + GstEvent *newevent; + + newevent = add_ssrc_and_ref (*event, data->ssrc); +- + gst_pad_push_event (data->pad, newevent); + + return TRUE; + } + +-/* With internal stream lock held */ + static void + forward_initial_events (GstRtpSsrcDemux * demux, guint32 ssrc, GstPad * pad, + PadType padtype) +@@ -278,7 +306,7 @@ find_or_create_demux_pad_for_ssrc (GstRtpSsrcDemux * demux, guint32 ssrc, + GstElementClass *klass; + GstPadTemplate *templ; + gchar *padname; +- GstRtpSsrcDemuxPad *demuxpad; ++ GstRtpSsrcDemuxPads *dpads; + GstPad *retpad; + guint num_streams; + +@@ -311,16 +339,13 @@ find_or_create_demux_pad_for_ssrc (GstRtpSsrcDemux * demux, guint32 ssrc, + g_free (padname); + + /* wrap in structure and add to list */ +- demuxpad = g_new0 (GstRtpSsrcDemuxPad, 1); +- demuxpad->ssrc = ssrc; +- demuxpad->rtp_pad = rtp_pad; +- demuxpad->rtcp_pad = rtcp_pad; +- +- gst_pad_set_element_private (rtp_pad, demuxpad); +- gst_pad_set_element_private (rtcp_pad, demuxpad); ++ dpads = g_new0 (GstRtpSsrcDemuxPads, 1); ++ dpads->ssrc = ssrc; ++ dpads->rtp_pad = rtp_pad; ++ dpads->rtcp_pad = rtcp_pad; + + GST_OBJECT_LOCK (demux); +- demux->srcpads = g_slist_prepend (demux->srcpads, demuxpad); ++ demux->srcpads = g_slist_prepend (demux->srcpads, dpads); + GST_OBJECT_UNLOCK (demux); + + gst_pad_set_query_function (rtp_pad, gst_rtp_ssrc_demux_src_query); +@@ -336,18 +361,15 @@ find_or_create_demux_pad_for_ssrc (GstRtpSsrcDemux * demux, guint32 ssrc, + gst_pad_use_fixed_caps (rtcp_pad); + gst_pad_set_active (rtcp_pad, TRUE); + +- forward_initial_events (demux, ssrc, rtp_pad, RTP_PAD); +- forward_initial_events (demux, ssrc, rtcp_pad, RTCP_PAD); +- + gst_element_add_pad (GST_ELEMENT_CAST (demux), rtp_pad); + gst_element_add_pad (GST_ELEMENT_CAST (demux), rtcp_pad); + + switch (padtype) { + case RTP_PAD: +- retpad = gst_object_ref (demuxpad->rtp_pad); ++ retpad = gst_object_ref (dpads->rtp_pad); + break; + case RTCP_PAD: +- retpad = gst_object_ref (demuxpad->rtcp_pad); ++ retpad = gst_object_ref (dpads->rtcp_pad); + break; + default: + retpad = NULL; +@@ -514,21 +536,22 @@ gst_rtp_ssrc_demux_init (GstRtpSsrcDemux * demux) + } + + static void +-gst_rtp_ssrc_demux_reset (GstRtpSsrcDemux * demux) ++gst_rtp_ssrc_demux_pads_free (GstRtpSsrcDemuxPads * dpads) + { +- GSList *walk; ++ gst_pad_set_active (dpads->rtp_pad, FALSE); ++ gst_pad_set_active (dpads->rtcp_pad, FALSE); + +- for (walk = demux->srcpads; walk; walk = g_slist_next (walk)) { +- GstRtpSsrcDemuxPad *dpad = (GstRtpSsrcDemuxPad *) walk->data; ++ gst_element_remove_pad (GST_PAD_PARENT (dpads->rtp_pad), dpads->rtp_pad); ++ gst_element_remove_pad (GST_PAD_PARENT (dpads->rtcp_pad), dpads->rtcp_pad); + +- gst_pad_set_active (dpad->rtp_pad, FALSE); +- gst_pad_set_active (dpad->rtcp_pad, FALSE); ++ g_free (dpads); ++} + +- gst_element_remove_pad (GST_ELEMENT_CAST (demux), dpad->rtp_pad); +- gst_element_remove_pad (GST_ELEMENT_CAST (demux), dpad->rtcp_pad); +- g_free (dpad); +- } +- g_slist_free (demux->srcpads); ++static void ++gst_rtp_ssrc_demux_reset (GstRtpSsrcDemux * demux) ++{ ++ g_slist_free_full (demux->srcpads, ++ (GDestroyNotify) gst_rtp_ssrc_demux_pads_free); + demux->srcpads = NULL; + } + +@@ -558,31 +581,25 @@ gst_rtp_ssrc_demux_finalize (GObject * object) + static void + gst_rtp_ssrc_demux_clear_ssrc (GstRtpSsrcDemux * demux, guint32 ssrc) + { +- GstRtpSsrcDemuxPad *dpad; ++ GstRtpSsrcDemuxPads *dpads; + + GST_OBJECT_LOCK (demux); +- dpad = find_demux_pad_for_ssrc (demux, ssrc); +- if (dpad == NULL) { ++ dpads = find_demux_pads_for_ssrc (demux, ssrc); ++ if (dpads == NULL) { + GST_OBJECT_UNLOCK (demux); + goto unknown_pad; + } + + GST_DEBUG_OBJECT (demux, "clearing pad for SSRC %08x", ssrc); + +- demux->srcpads = g_slist_remove (demux->srcpads, dpad); ++ demux->srcpads = g_slist_remove (demux->srcpads, dpads); + GST_OBJECT_UNLOCK (demux); + +- gst_pad_set_active (dpad->rtp_pad, FALSE); +- gst_pad_set_active (dpad->rtcp_pad, FALSE); +- + g_signal_emit (G_OBJECT (demux), + gst_rtp_ssrc_demux_signals[SIGNAL_REMOVED_SSRC_PAD], 0, ssrc, +- dpad->rtp_pad); +- +- gst_element_remove_pad (GST_ELEMENT_CAST (demux), dpad->rtp_pad); +- gst_element_remove_pad (GST_ELEMENT_CAST (demux), dpad->rtcp_pad); ++ dpads->rtp_pad); + +- g_free (dpad); ++ gst_rtp_ssrc_demux_pads_free (dpads); + + return; + +@@ -609,12 +626,19 @@ forward_event (GstPad * pad, gpointer user_data) + GSList *walk = NULL; + GstEvent *newevent = NULL; + ++ /* special case for EOS */ ++ if (GST_EVENT_TYPE (fdata->event) == GST_EVENT_EOS) ++ GST_PAD_SET_STICKIES_SENT (pad); ++ ++ if (GST_EVENT_IS_STICKY (fdata->event) && !GST_PAD_STICKIES_SENT (pad)) ++ return FALSE; ++ + GST_OBJECT_LOCK (fdata->demux); + for (walk = fdata->demux->srcpads; walk; walk = walk->next) { +- GstRtpSsrcDemuxPad *dpad = (GstRtpSsrcDemuxPad *) walk->data; ++ GstRtpSsrcDemuxPads *dpads = (GstRtpSsrcDemuxPads *) walk->data; + +- if (pad == dpad->rtp_pad || pad == dpad->rtcp_pad) { +- newevent = add_ssrc_and_ref (fdata->event, dpad->ssrc); ++ if (pad == dpads->rtp_pad || pad == dpads->rtcp_pad) { ++ newevent = add_ssrc_and_ref (fdata->event, dpads->ssrc); + break; + } + } +@@ -671,6 +695,11 @@ gst_rtp_ssrc_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf) + if (srcpad == NULL) + goto create_failed; + ++ if (!GST_PAD_STICKIES_SENT (srcpad)) { ++ forward_initial_events (demux, ssrc, srcpad, RTP_PAD); ++ GST_PAD_SET_STICKIES_SENT (srcpad); ++ } ++ + /* push to srcpad */ + ret = gst_pad_push (srcpad, buf); + +@@ -761,6 +790,11 @@ gst_rtp_ssrc_demux_rtcp_chain (GstPad * pad, GstObject * parent, + if (srcpad == NULL) + goto create_failed; + ++ if (!GST_PAD_STICKIES_SENT (srcpad)) { ++ forward_initial_events (demux, ssrc, srcpad, RTCP_PAD); ++ GST_PAD_SET_STICKIES_SENT (srcpad); ++ } ++ + /* push to srcpad */ + ret = gst_pad_push (srcpad, buf); + +@@ -804,15 +838,15 @@ create_failed: + } + } + +-static GstRtpSsrcDemuxPad * ++static GstRtpSsrcDemuxPads * + find_demux_pad_for_pad (GstRtpSsrcDemux * demux, GstPad * pad) + { + GSList *walk; + + for (walk = demux->srcpads; walk; walk = g_slist_next (walk)) { +- GstRtpSsrcDemuxPad *dpad = (GstRtpSsrcDemuxPad *) walk->data; +- if (dpad->rtp_pad == pad || dpad->rtcp_pad == pad) { +- return dpad; ++ GstRtpSsrcDemuxPads *dpads = (GstRtpSsrcDemuxPads *) walk->data; ++ if (dpads->rtp_pad == pad || dpads->rtcp_pad == pad) { ++ return dpads; + } + } + +@@ -835,14 +869,14 @@ gst_rtp_ssrc_demux_src_event (GstPad * pad, GstObject * parent, + case GST_EVENT_CUSTOM_BOTH_OOB: + s = gst_event_get_structure (event); + if (s && !gst_structure_has_field (s, "ssrc")) { +- GstRtpSsrcDemuxPad *dpad = find_demux_pad_for_pad (demux, pad); ++ GstRtpSsrcDemuxPads *dpads = find_demux_pad_for_pad (demux, pad); + +- if (dpad) { ++ if (dpads) { + GstStructure *ws; + + event = gst_event_make_writable (event); + ws = gst_event_writable_structure (event); +- gst_structure_set (ws, "ssrc", G_TYPE_UINT, dpad->ssrc, NULL); ++ gst_structure_set (ws, "ssrc", G_TYPE_UINT, dpads->ssrc, NULL); + } + } + break; +@@ -865,12 +899,12 @@ gst_rtp_ssrc_demux_iterate_internal_links_src (GstPad * pad, GstObject * parent) + + GST_OBJECT_LOCK (demux); + for (current = demux->srcpads; current; current = g_slist_next (current)) { +- GstRtpSsrcDemuxPad *dpad = (GstRtpSsrcDemuxPad *) current->data; ++ GstRtpSsrcDemuxPads *dpads = (GstRtpSsrcDemuxPads *) current->data; + +- if (pad == dpad->rtp_pad) { ++ if (pad == dpads->rtp_pad) { + otherpad = demux->rtp_sink; + break; +- } else if (pad == dpad->rtcp_pad) { ++ } else if (pad == dpads->rtcp_pad) { + otherpad = demux->rtcp_sink; + break; + } +@@ -947,17 +981,12 @@ gst_rtp_ssrc_demux_src_query (GstPad * pad, GstObject * parent, + if ((res = gst_pad_peer_query (demux->rtp_sink, query))) { + gboolean live; + GstClockTime min_latency, max_latency; +- GstRtpSsrcDemuxPad *demuxpad; +- +- demuxpad = gst_pad_get_element_private (pad); + + gst_query_parse_latency (query, &live, &min_latency, &max_latency); + +- GST_DEBUG_OBJECT (demux, "peer min latency %" GST_TIME_FORMAT, ++ GST_DEBUG_OBJECT (pad, "peer min latency %" GST_TIME_FORMAT, + GST_TIME_ARGS (min_latency)); + +- GST_DEBUG_OBJECT (demux, "latency for SSRC %08x", demuxpad->ssrc); +- + gst_query_set_latency (query, live, min_latency, max_latency); + } + break; +diff --git a/gst/rtpmanager/gstrtpssrcdemux.h b/gst/rtpmanager/gstrtpssrcdemux.h +index 3bb210c8e..e7e347d25 100644 +--- a/gst/rtpmanager/gstrtpssrcdemux.h ++++ b/gst/rtpmanager/gstrtpssrcdemux.h +@@ -30,7 +30,6 @@ + + typedef struct _GstRtpSsrcDemux GstRtpSsrcDemux; + typedef struct _GstRtpSsrcDemuxClass GstRtpSsrcDemuxClass; +-typedef struct _GstRtpSsrcDemuxPad GstRtpSsrcDemuxPad; + + struct _GstRtpSsrcDemux + { +@@ -58,4 +57,6 @@ struct _GstRtpSsrcDemuxClass + + GType gst_rtp_ssrc_demux_get_type (void); + ++GST_ELEMENT_REGISTER_DECLARE (rtpssrcdemux); ++ + #endif /* __GST_RTP_SSRC_DEMUX_H__ */ +diff --git a/gst/rtpmanager/gstrtpst2022-1-fecdec.c b/gst/rtpmanager/gstrtpst2022-1-fecdec.c +new file mode 100644 +index 000000000..77b275fb1 +--- /dev/null ++++ b/gst/rtpmanager/gstrtpst2022-1-fecdec.c +@@ -0,0 +1,1013 @@ ++/* GStreamer ++ * Copyright (C) <2020> Mathieu Duponchelle ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++/** ++ * SECTION:element-rtpst2022-1-fecdec ++ * @see_also: #element-rtpst2022-1-fecenc ++ * ++ * This element takes as input a media stream and up to two FEC ++ * streams as described in SMPTE 2022-1: Forward Error Correction ++ * for Real-Time Video/Audio Transport Over IP Networks, and makes ++ * use of the FEC packets to recover media packets that may have ++ * gotten lost. ++ * ++ * ## Design ++ * ++ * The approach picked for this element is to proactively reconstruct missing ++ * packets as soon as possible. When a FEC packet arrives, the element ++ * immediately checks whether a media packet in the row / column it protects ++ * can be reconstructed. ++ * ++ * Similarly, when a media packet comes in, the element checks whether it has ++ * already received a corresponding packet in both the column and row the packet ++ * belongs to, and if so goes through the first step listed above. ++ * ++ * This process is repeated recursively, allowing for recoveries over one ++ * dimension to unblock recoveries over the other. ++ * ++ * In perfect networking conditions, this incurs next to no overhead as FEC ++ * packets will arrive after the media packets, causing no reconstruction to ++ * take place, just a few checks upon chaining. ++ * ++ * ## sender / receiver example ++ * ++ * ``` shell ++ * gst-launch-1.0 \ ++ * rtpbin name=rtp fec-encoders='fec,0="rtpst2022-1-fecenc\ rows\=5\ columns\=5";' \ ++ * uridecodebin uri=file:///path/to/video/file ! x264enc key-int-max=60 tune=zerolatency ! \ ++ * queue ! mpegtsmux ! rtpmp2tpay ssrc=0 ! rtp.send_rtp_sink_0 \ ++ * rtp.send_rtp_src_0 ! udpsink host=127.0.0.1 port=5000 \ ++ * rtp.send_fec_src_0_0 ! udpsink host=127.0.0.1 port=5002 async=false \ ++ * rtp.send_fec_src_0_1 ! udpsink host=127.0.0.1 port=5004 async=false ++ * ``` ++ * ++ * ``` shell ++ * gst-launch-1.0 \ ++ * rtpbin latency=500 fec-decoders='fec,0="rtpst2022-1-fecdec\ size-time\=1000000000";' name=rtp \ ++ * udpsrc address=127.0.0.1 port=5002 caps="application/x-rtp, payload=96" ! queue ! rtp.recv_fec_sink_0_0 \ ++ * udpsrc address=127.0.0.1 port=5004 caps="application/x-rtp, payload=96" ! queue ! rtp.recv_fec_sink_0_1 \ ++ * udpsrc address=127.0.0.1 port=5000 caps="application/x-rtp, media=video, clock-rate=90000, encoding-name=mp2t, payload=33" ! \ ++ * queue ! netsim drop-probability=0.05 ! rtp.recv_rtp_sink_0 \ ++ * rtp. ! decodebin ! videoconvert ! queue ! autovideosink ++ * ``` ++ * ++ * With the above command line, as the media packet size is constant, ++ * the fec overhead can be approximated to the number of fec packets ++ * per 2-d matrix of media packet, here 10 fec packets for each 25 ++ * media packets. ++ * ++ * Increasing the number of rows and columns will decrease the overhead, ++ * but obviously increase the likelihood of recovery failure for lost ++ * packets on the receiver side. ++ * ++ * Since: 1.20 ++ */ ++ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include ++#include ++ ++#include "gstrtpst2022-1-fecdec.h" ++ ++GST_DEBUG_CATEGORY_STATIC (gst_rtpst_2022_1_fecdec_debug); ++#define GST_CAT_DEFAULT gst_rtpst_2022_1_fecdec_debug ++ ++#define DEFAULT_SIZE_TIME (GST_SECOND) ++ ++typedef struct ++{ ++ guint16 seq; ++ GstBuffer *buffer; ++} Item; ++ ++static GstFlowReturn store_media_item (GstRTPST_2022_1_FecDec * dec, ++ GstRTPBuffer * rtp, Item * item); ++ ++static void ++free_item (Item * item) ++{ ++ gst_buffer_unref (item->buffer); ++ item->buffer = NULL; ++ g_free (item); ++} ++ ++static gint ++cmp_items (Item * a, Item * b, gpointer unused) ++{ ++ return gst_rtp_buffer_compare_seqnum (b->seq, a->seq); ++} ++ ++enum ++{ ++ PROP_0, ++ PROP_SIZE_TIME, ++}; ++ ++struct _GstRTPST_2022_1_FecDecClass ++{ ++ GstElementClass class; ++}; ++ ++struct _GstRTPST_2022_1_FecDec ++{ ++ GstElement element; ++ ++ GstPad *srcpad; ++ GstPad *sinkpad; ++ GList *fec_sinkpads; ++ ++ /* All the following field are protected by the OBJECT_LOCK */ ++ GSequence *packets; ++ GHashTable *column_fec_packets; ++ GSequence *fec_packets[2]; ++ /* N columns */ ++ guint l; ++ /* N rows */ ++ guint d; ++ ++ GstClockTime size_time; ++ GstClockTime max_arrival_time; ++ GstClockTime max_fec_arrival_time[2]; ++}; ++ ++#define RTP_CAPS "application/x-rtp" ++ ++typedef struct ++{ ++ guint16 seq; ++ guint16 len; ++ guint8 E; ++ guint8 pt; ++ guint32 mask; ++ guint32 timestamp; ++ guint8 N; ++ guint8 D; ++ guint8 type; ++ guint8 index; ++ guint8 offset; ++ guint8 NA; ++ guint8 seq_ext; ++ guint8 *payload; ++ guint payload_len; ++ gboolean marker; ++ gboolean padding; ++ gboolean extension; ++} Rtp2DFecHeader; ++ ++static GstStaticPadTemplate fec_sink_template = ++GST_STATIC_PAD_TEMPLATE ("fec_%u", ++ GST_PAD_SINK, ++ GST_PAD_REQUEST, ++ GST_STATIC_CAPS (RTP_CAPS)); ++ ++static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink", ++ GST_PAD_SINK, ++ GST_PAD_ALWAYS, ++ GST_STATIC_CAPS (RTP_CAPS)); ++ ++static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src", ++ GST_PAD_SRC, ++ GST_PAD_ALWAYS, ++ GST_STATIC_CAPS (RTP_CAPS)); ++ ++#define gst_rtpst_2022_1_fecdec_parent_class parent_class ++G_DEFINE_TYPE (GstRTPST_2022_1_FecDec, gst_rtpst_2022_1_fecdec, ++ GST_TYPE_ELEMENT); ++GST_ELEMENT_REGISTER_DEFINE (rtpst2022_1_fecdec, "rtpst2022-1-fecdec", ++ GST_RANK_NONE, GST_TYPE_RTPST_2022_1_FECDEC); ++ ++static void ++trim_items (GstRTPST_2022_1_FecDec * dec) ++{ ++ GSequenceIter *tmp_iter, *iter = NULL; ++ ++ for (tmp_iter = g_sequence_get_begin_iter (dec->packets); ++ tmp_iter; tmp_iter = g_sequence_iter_next (tmp_iter)) { ++ Item *item; ++ ++ if (g_sequence_iter_is_end (tmp_iter)) ++ break; ++ ++ item = g_sequence_get (tmp_iter); ++ ++ if (dec->max_arrival_time - GST_BUFFER_DTS_OR_PTS (item->buffer) < ++ dec->size_time) ++ break; ++ ++ iter = tmp_iter; ++ } ++ ++ if (iter) { ++ Item *item = g_sequence_get (iter); ++ GST_TRACE_OBJECT (dec, ++ "Trimming packets up to %" GST_TIME_FORMAT " (seq: %u)", ++ GST_TIME_ARGS (GST_BUFFER_DTS_OR_PTS (item->buffer)), item->seq); ++ g_sequence_remove_range (g_sequence_get_begin_iter (dec->packets), ++ g_sequence_iter_next (iter)); ++ } ++} ++ ++static void ++trim_fec_items (GstRTPST_2022_1_FecDec * dec, guint D) ++{ ++ GSequenceIter *tmp_iter, *iter = NULL; ++ ++ for (tmp_iter = g_sequence_get_begin_iter (dec->fec_packets[D]); ++ tmp_iter; tmp_iter = g_sequence_iter_next (tmp_iter)) { ++ Item *item; ++ ++ if (g_sequence_iter_is_end (tmp_iter)) ++ break; ++ ++ item = g_sequence_get (tmp_iter); ++ ++ if (dec->max_fec_arrival_time[D] - GST_BUFFER_DTS_OR_PTS (item->buffer) < ++ dec->size_time) ++ break; ++ ++ if (!D) { ++ guint i; ++ guint16 seq; ++ ++ for (i = 0; i < dec->d; i++) { ++ seq = item->seq + i * dec->l; ++ g_hash_table_remove (dec->column_fec_packets, GUINT_TO_POINTER (seq)); ++ } ++ } ++ ++ iter = tmp_iter; ++ } ++ ++ if (iter) { ++ Item *item = g_sequence_get (iter); ++ GST_TRACE_OBJECT (dec, ++ "Trimming %s FEC packets up to %" GST_TIME_FORMAT " (seq: %u)", ++ D ? "row" : "column", ++ GST_TIME_ARGS (GST_BUFFER_DTS_OR_PTS (item->buffer)), item->seq); ++ g_sequence_remove_range (g_sequence_get_begin_iter (dec->fec_packets[D]), ++ g_sequence_iter_next (iter)); ++ } ++} ++ ++static Item * ++lookup_media_packet (GstRTPST_2022_1_FecDec * dec, guint16 seqnum) ++{ ++ GSequenceIter *iter; ++ Item *ret = NULL; ++ Item dummy = { seqnum, NULL }; ++ ++ iter = ++ g_sequence_lookup (dec->packets, &dummy, (GCompareDataFunc) cmp_items, ++ NULL); ++ ++ if (iter) ++ ret = g_sequence_get (iter); ++ ++ return ret; ++} ++ ++static gboolean ++parse_header (GstRTPBuffer * rtp, Rtp2DFecHeader * fec) ++{ ++ gboolean ret = FALSE; ++ GstBitReader bits; ++ guint8 *data = gst_rtp_buffer_get_payload (rtp); ++ guint len = gst_rtp_buffer_get_payload_len (rtp); ++ ++ if (len < 16) ++ goto done; ++ ++ gst_bit_reader_init (&bits, data, len); ++ ++ fec->marker = gst_rtp_buffer_get_marker (rtp); ++ fec->padding = gst_rtp_buffer_get_padding (rtp); ++ fec->extension = gst_rtp_buffer_get_extension (rtp); ++ fec->seq = gst_bit_reader_get_bits_uint16_unchecked (&bits, 16); ++ fec->len = gst_bit_reader_get_bits_uint16_unchecked (&bits, 16); ++ fec->E = gst_bit_reader_get_bits_uint8_unchecked (&bits, 1); ++ fec->pt = gst_bit_reader_get_bits_uint8_unchecked (&bits, 7); ++ fec->mask = gst_bit_reader_get_bits_uint32_unchecked (&bits, 24); ++ fec->timestamp = gst_bit_reader_get_bits_uint32_unchecked (&bits, 32); ++ fec->N = gst_bit_reader_get_bits_uint8_unchecked (&bits, 1); ++ fec->D = gst_bit_reader_get_bits_uint8_unchecked (&bits, 1); ++ fec->type = gst_bit_reader_get_bits_uint8_unchecked (&bits, 3); ++ fec->index = gst_bit_reader_get_bits_uint8_unchecked (&bits, 3); ++ fec->offset = gst_bit_reader_get_bits_uint8_unchecked (&bits, 8); ++ fec->NA = gst_bit_reader_get_bits_uint8_unchecked (&bits, 8); ++ fec->seq_ext = gst_bit_reader_get_bits_uint8_unchecked (&bits, 8); ++ fec->payload = data + 16; ++ fec->payload_len = len - 16; ++ ++ ret = TRUE; ++ ++done: ++ return ret; ++} ++ ++static Item * ++get_row_fec (GstRTPST_2022_1_FecDec * dec, guint16 seqnum) ++{ ++ GSequenceIter *iter; ++ Item *ret = NULL; ++ Item dummy = { 0, }; ++ ++ if (dec->l == G_MAXUINT) ++ goto done; ++ ++ /* Potential underflow is intended */ ++ dummy.seq = seqnum - dec->l; ++ ++ iter = ++ g_sequence_search (dec->fec_packets[1], &dummy, ++ (GCompareDataFunc) cmp_items, NULL); ++ ++ if (!g_sequence_iter_is_end (iter)) { ++ gint seqdiff; ++ ret = g_sequence_get (iter); ++ ++ seqdiff = gst_rtp_buffer_compare_seqnum (ret->seq, seqnum); ++ ++ /* Now check whether the fec packet does apply */ ++ if (seqdiff < 0 || seqdiff >= dec->l) ++ ret = NULL; ++ } ++ ++done: ++ return ret; ++} ++ ++static Item * ++get_column_fec (GstRTPST_2022_1_FecDec * dec, guint16 seqnum) ++{ ++ Item *ret = NULL; ++ ++ if (dec->l == G_MAXUINT || dec->d == G_MAXUINT) ++ goto done; ++ ++ ret = ++ g_hash_table_lookup (dec->column_fec_packets, GUINT_TO_POINTER (seqnum)); ++ ++done: ++ return ret; ++} ++ ++static void ++_xor_mem (guint8 * restrict dst, const guint8 * restrict src, gsize length) ++{ ++ guint i; ++ ++ for (i = 0; i < (length / sizeof (guint64)); ++i) { ++#if G_BYTE_ORDER == G_LITTLE_ENDIAN ++ GST_WRITE_UINT64_LE (dst, ++ GST_READ_UINT64_LE (dst) ^ GST_READ_UINT64_LE (src)); ++#else ++ GST_WRITE_UINT64_BE (dst, ++ GST_READ_UINT64_BE (dst) ^ GST_READ_UINT64_BE (src)); ++#endif ++ dst += sizeof (guint64); ++ src += sizeof (guint64); ++ } ++ for (i = 0; i < (length % sizeof (guint64)); ++i) ++ dst[i] ^= src[i]; ++} ++ ++static GstFlowReturn ++xor_items (GstRTPST_2022_1_FecDec * dec, Rtp2DFecHeader * fec, GList * packets, ++ guint16 seqnum) ++{ ++ guint8 *xored; ++ guint32 xored_timestamp; ++ guint8 xored_pt; ++ guint16 xored_payload_len; ++ Item *item; ++ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; ++ GList *tmp; ++ GstFlowReturn ret = GST_FLOW_OK; ++ GstBuffer *buffer; ++ gboolean xored_marker; ++ gboolean xored_padding; ++ gboolean xored_extension; ++ ++ /* Figure out the recovered packet length first */ ++ xored_payload_len = fec->len; ++ for (tmp = packets; tmp; tmp = tmp->next) { ++ GstRTPBuffer media_rtp = GST_RTP_BUFFER_INIT; ++ Item *item = (Item *) tmp->data; ++ ++ gst_rtp_buffer_map (item->buffer, GST_MAP_READ, &media_rtp); ++ xored_payload_len ^= gst_rtp_buffer_get_payload_len (&media_rtp); ++ gst_rtp_buffer_unmap (&media_rtp); ++ } ++ ++ if (xored_payload_len > fec->payload_len) { ++ GST_WARNING_OBJECT (dec, "FEC payload len %u < length recovery %u", ++ fec->payload_len, xored_payload_len); ++ goto done; ++ } ++ ++ item = g_malloc0 (sizeof (Item)); ++ item->seq = seqnum; ++ item->buffer = gst_rtp_buffer_new_allocate (xored_payload_len, 0, 0); ++ gst_rtp_buffer_map (item->buffer, GST_MAP_WRITE, &rtp); ++ ++ xored = gst_rtp_buffer_get_payload (&rtp); ++ memcpy (xored, fec->payload, xored_payload_len); ++ xored_timestamp = fec->timestamp; ++ xored_pt = fec->pt; ++ xored_marker = fec->marker; ++ xored_padding = fec->padding; ++ xored_extension = fec->extension; ++ ++ for (tmp = packets; tmp; tmp = tmp->next) { ++ GstRTPBuffer media_rtp = GST_RTP_BUFFER_INIT; ++ Item *item = (Item *) tmp->data; ++ ++ gst_rtp_buffer_map (item->buffer, GST_MAP_READ, &media_rtp); ++ _xor_mem (xored, gst_rtp_buffer_get_payload (&media_rtp), ++ MIN (gst_rtp_buffer_get_payload_len (&media_rtp), xored_payload_len)); ++ xored_timestamp ^= gst_rtp_buffer_get_timestamp (&media_rtp); ++ xored_pt ^= gst_rtp_buffer_get_payload_type (&media_rtp); ++ xored_marker ^= gst_rtp_buffer_get_marker (&media_rtp); ++ xored_padding ^= gst_rtp_buffer_get_padding (&media_rtp); ++ xored_extension ^= gst_rtp_buffer_get_extension (&media_rtp); ++ ++ gst_rtp_buffer_unmap (&media_rtp); ++ } ++ ++ GST_DEBUG_OBJECT (dec, ++ "Recovered buffer through %s FEC with seqnum %u, payload len %u and timestamp %u", ++ fec->D ? "row" : "column", seqnum, xored_payload_len, xored_timestamp); ++ ++ GST_BUFFER_DTS (item->buffer) = dec->max_arrival_time; ++ ++ gst_rtp_buffer_set_timestamp (&rtp, xored_timestamp); ++ gst_rtp_buffer_set_seq (&rtp, seqnum); ++ gst_rtp_buffer_set_payload_type (&rtp, xored_pt); ++ gst_rtp_buffer_set_marker (&rtp, xored_marker); ++ gst_rtp_buffer_set_padding (&rtp, xored_padding); ++ gst_rtp_buffer_set_extension (&rtp, xored_extension); ++ ++ gst_rtp_buffer_unmap (&rtp); ++ ++ /* Store a ref on item->buffer as store_media_item may ++ * recurse and call this method again, potentially releasing ++ * the object lock and leaving our item unprotected in ++ * dec->packets ++ */ ++ buffer = gst_buffer_ref (item->buffer); ++ ++ /* It is right that we should celebrate, ++ * for your brother was dead, and is alive again */ ++ gst_rtp_buffer_map (item->buffer, GST_MAP_READ, &rtp); ++ ret = store_media_item (dec, &rtp, item); ++ gst_rtp_buffer_unmap (&rtp); ++ ++ if (ret == GST_FLOW_OK) { ++ /* Unlocking here is safe */ ++ GST_OBJECT_UNLOCK (dec); ++ ret = gst_pad_push (dec->srcpad, buffer); ++ GST_OBJECT_LOCK (dec); ++ } else { ++ gst_buffer_unref (buffer); ++ } ++ ++done: ++ return ret; ++} ++ ++/* Returns a flow value if we should discard the packet, GST_FLOW_CUSTOM_SUCCESS otherwise */ ++static GstFlowReturn ++check_fec (GstRTPST_2022_1_FecDec * dec, Rtp2DFecHeader * fec) ++{ ++ GList *packets = NULL; ++ gint missing_seq = -1; ++ guint n_packets = 0; ++ guint required_n_packets; ++ GstFlowReturn ret = GST_FLOW_OK; ++ ++ if (fec->D) { ++ guint i = 0; ++ ++ required_n_packets = dec->l; ++ ++ for (i = 0; i < dec->l; i++) { ++ Item *item = lookup_media_packet (dec, fec->seq + i); ++ ++ if (item) { ++ packets = g_list_prepend (packets, item); ++ n_packets += 1; ++ } else { ++ missing_seq = fec->seq + i; ++ } ++ } ++ } else { ++ guint i = 0; ++ ++ required_n_packets = dec->d; ++ ++ for (i = 0; i < dec->d; i++) { ++ Item *item = lookup_media_packet (dec, fec->seq + i * dec->l); ++ ++ if (item) { ++ packets = g_list_prepend (packets, item); ++ n_packets += 1; ++ } else { ++ missing_seq = fec->seq + i * dec->l; ++ } ++ } ++ } ++ ++ if (n_packets == required_n_packets) { ++ g_assert (missing_seq == -1); ++ GST_LOG_OBJECT (dec, ++ "All media packets present, we can discard that FEC packet"); ++ } else if (n_packets + 1 == required_n_packets) { ++ g_assert (missing_seq != -1); ++ ret = xor_items (dec, fec, packets, missing_seq); ++ GST_LOG_OBJECT (dec, "We have enough info to reconstruct %u", missing_seq); ++ } else { ++ ret = GST_FLOW_CUSTOM_SUCCESS; ++ GST_LOG_OBJECT (dec, "Too many media packets missing, storing FEC packet"); ++ } ++ g_list_free (packets); ++ ++ return ret; ++} ++ ++static GstFlowReturn ++check_fec_item (GstRTPST_2022_1_FecDec * dec, Item * item) ++{ ++ Rtp2DFecHeader fec; ++ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; ++ GstFlowReturn ret; ++ ++ gst_rtp_buffer_map (item->buffer, GST_MAP_READ, &rtp); ++ ++ parse_header (&rtp, &fec); ++ ++ ret = check_fec (dec, &fec); ++ ++ gst_rtp_buffer_unmap (&rtp); ++ ++ return ret; ++} ++ ++static GstFlowReturn ++store_media_item (GstRTPST_2022_1_FecDec * dec, GstRTPBuffer * rtp, Item * item) ++{ ++ GstFlowReturn ret = GST_FLOW_OK; ++ Item *fec_item; ++ guint16 seq; ++ ++ seq = gst_rtp_buffer_get_seq (rtp); ++ ++ g_sequence_insert_sorted (dec->packets, item, (GCompareDataFunc) cmp_items, ++ NULL); ++ ++ if ((fec_item = get_row_fec (dec, seq))) { ++ ret = check_fec_item (dec, fec_item); ++ if (ret == GST_FLOW_CUSTOM_SUCCESS) ++ ret = GST_FLOW_OK; ++ } ++ ++ if (ret == GST_FLOW_OK && (fec_item = get_column_fec (dec, seq))) { ++ ret = check_fec_item (dec, fec_item); ++ if (ret == GST_FLOW_CUSTOM_SUCCESS) ++ ret = GST_FLOW_OK; ++ } ++ ++ return ret; ++} ++ ++static GstFlowReturn ++store_media (GstRTPST_2022_1_FecDec * dec, GstRTPBuffer * rtp, ++ GstBuffer * buffer) ++{ ++ Item *item; ++ guint16 seq; ++ ++ seq = gst_rtp_buffer_get_seq (rtp); ++ item = g_malloc0 (sizeof (Item)); ++ item->buffer = gst_buffer_ref (buffer); ++ item->seq = seq; ++ ++ return store_media_item (dec, rtp, item); ++} ++ ++static GstFlowReturn ++gst_rtpst_2022_1_fecdec_sink_chain_fec (GstPad * pad, GstObject * parent, ++ GstBuffer * buffer) ++{ ++ GstRTPST_2022_1_FecDec *dec = GST_RTPST_2022_1_FECDEC_CAST (parent); ++ Rtp2DFecHeader fec = { 0, }; ++ guint payload_len; ++ guint8 *payload; ++ GstFlowReturn ret = GST_FLOW_OK; ++ Item *item; ++ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; ++ ++ GST_OBJECT_LOCK (dec); ++ ++ if (!gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp)) { ++ GST_WARNING_OBJECT (pad, "Chained FEC buffer isn't valid RTP"); ++ goto discard; ++ } ++ ++ payload_len = gst_rtp_buffer_get_payload_len (&rtp); ++ payload = gst_rtp_buffer_get_payload (&rtp); ++ ++ if (!parse_header (&rtp, &fec)) { ++ GST_WARNING_OBJECT (pad, "Failed to parse FEC header (payload len: %d)", ++ payload_len); ++ GST_MEMDUMP_OBJECT (pad, "Invalid payload", payload, payload_len); ++ goto discard; ++ } ++ ++ GST_TRACE_OBJECT ++ (pad, ++ "Handling FEC buffer with SNBase / N / D / NA / offset %u / %u / %u / %u / %u", ++ fec.seq, fec.N, fec.D, fec.NA, fec.offset); ++ ++ if (fec.D) { ++ if (dec->l == G_MAXUINT) { ++ dec->l = fec.NA; ++ } else if (fec.NA != dec->l) { ++ GST_WARNING_OBJECT (dec, "2D FEC dimensionality cannot change"); ++ goto discard; ++ } ++ ++ if (fec.offset != 1) { ++ GST_WARNING_OBJECT (pad, "offset must be 1 for row FEC packets"); ++ goto discard; ++ } ++ } else { ++ if (dec->d == G_MAXUINT) { ++ dec->d = fec.NA; ++ } else if (fec.NA != dec->d) { ++ GST_WARNING_OBJECT (dec, "2D FEC dimensionality cannot change"); ++ goto discard; ++ } ++ ++ if (dec->l == G_MAXUINT) { ++ dec->l = fec.offset; ++ } else if (fec.offset != dec->l) { ++ GST_WARNING_OBJECT (dec, "2D FEC dimensionality cannot change"); ++ goto discard; ++ } ++ } ++ ++ dec->max_fec_arrival_time[fec.D] = GST_BUFFER_DTS_OR_PTS (buffer); ++ trim_fec_items (dec, fec.D); ++ ++ ret = check_fec (dec, &fec); ++ ++ if (ret == GST_FLOW_CUSTOM_SUCCESS) { ++ item = g_malloc0 (sizeof (Item)); ++ item->buffer = buffer; ++ item->seq = fec.seq; ++ ++ if (!fec.D) { ++ guint i; ++ guint16 seq; ++ ++ for (i = 0; i < dec->d; i++) { ++ seq = fec.seq + i * dec->l; ++ g_hash_table_insert (dec->column_fec_packets, GUINT_TO_POINTER (seq), ++ item); ++ } ++ } ++ g_sequence_insert_sorted (dec->fec_packets[fec.D], item, ++ (GCompareDataFunc) cmp_items, NULL); ++ ret = GST_FLOW_OK; ++ } else { ++ goto discard; ++ } ++ ++ gst_rtp_buffer_unmap (&rtp); ++ ++done: ++ GST_OBJECT_UNLOCK (dec); ++ return ret; ++ ++discard: ++ if (rtp.buffer != NULL) ++ gst_rtp_buffer_unmap (&rtp); ++ ++ gst_buffer_unref (buffer); ++ ++ goto done; ++} ++ ++static GstFlowReturn ++gst_rtpst_2022_1_fecdec_sink_chain (GstPad * pad, GstObject * parent, ++ GstBuffer * buffer) ++{ ++ GstRTPST_2022_1_FecDec *dec = GST_RTPST_2022_1_FECDEC_CAST (parent); ++ GstFlowReturn ret = GST_FLOW_OK; ++ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; ++ ++ if (!gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp)) { ++ GST_WARNING_OBJECT (pad, "Chained buffer isn't valid RTP"); ++ goto error; ++ } ++ ++ GST_OBJECT_LOCK (dec); ++ dec->max_arrival_time = ++ MAX (dec->max_arrival_time, GST_BUFFER_DTS_OR_PTS (buffer)); ++ trim_items (dec); ++ ret = store_media (dec, &rtp, buffer); ++ GST_OBJECT_UNLOCK (dec); ++ ++ gst_rtp_buffer_unmap (&rtp); ++ ++ if (ret == GST_FLOW_OK) ++ ret = gst_pad_push (dec->srcpad, buffer); ++ ++done: ++ return ret; ++ ++error: ++ gst_buffer_unref (buffer); ++ goto done; ++} ++ ++static gboolean ++gst_rtpst_2022_1_fecdec_src_event (GstPad * pad, GstObject * parent, ++ GstEvent * event) ++{ ++ gboolean handled = FALSE; ++ gboolean ret = TRUE; ++ ++ if (!handled) { ++ gst_pad_event_default (pad, parent, event); ++ } ++ ++ return ret; ++} ++ ++/* Takes the object lock */ ++static void ++gst_rtpst_2022_1_fecdec_reset (GstRTPST_2022_1_FecDec * dec, gboolean allocate) ++{ ++ guint i; ++ ++ GST_OBJECT_LOCK (dec); ++ ++ if (dec->packets) { ++ g_sequence_free (dec->packets); ++ dec->packets = NULL; ++ } ++ ++ if (dec->column_fec_packets) { ++ g_hash_table_unref (dec->column_fec_packets); ++ dec->column_fec_packets = NULL; ++ } ++ ++ if (allocate) { ++ dec->packets = g_sequence_new ((GDestroyNotify) free_item); ++ dec->column_fec_packets = g_hash_table_new (g_direct_hash, g_direct_equal); ++ } ++ ++ for (i = 0; i < 2; i++) { ++ if (dec->fec_packets[i]) { ++ g_sequence_free (dec->fec_packets[i]); ++ dec->fec_packets[i] = NULL; ++ } ++ ++ if (allocate) ++ dec->fec_packets[i] = g_sequence_new ((GDestroyNotify) free_item); ++ } ++ ++ dec->d = G_MAXUINT; ++ dec->l = G_MAXUINT; ++ ++ GST_OBJECT_UNLOCK (dec); ++} ++ ++static GstStateChangeReturn ++gst_rtpst_2022_1_fecdec_change_state (GstElement * element, ++ GstStateChange transition) ++{ ++ GstStateChangeReturn ret; ++ GstRTPST_2022_1_FecDec *dec = GST_RTPST_2022_1_FECDEC_CAST (element); ++ ++ switch (transition) { ++ case GST_STATE_CHANGE_READY_TO_PAUSED: ++ gst_rtpst_2022_1_fecdec_reset (dec, TRUE); ++ break; ++ case GST_STATE_CHANGE_PAUSED_TO_READY: ++ gst_rtpst_2022_1_fecdec_reset (dec, FALSE); ++ break; ++ default: ++ break; ++ } ++ ++ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); ++ ++ return ret; ++} ++ ++static void ++gst_rtpst_2022_1_fecdec_finalize (GObject * object) ++{ ++ GstRTPST_2022_1_FecDec *dec = GST_RTPST_2022_1_FECDEC_CAST (object); ++ ++ gst_rtpst_2022_1_fecdec_reset (dec, FALSE); ++ ++ G_OBJECT_CLASS (parent_class)->finalize (object); ++} ++ ++static void ++gst_rtpst_2022_1_fecdec_set_property (GObject * object, guint prop_id, ++ const GValue * value, GParamSpec * pspec) ++{ ++ GstRTPST_2022_1_FecDec *dec = GST_RTPST_2022_1_FECDEC_CAST (object); ++ ++ switch (prop_id) { ++ case PROP_SIZE_TIME: ++ dec->size_time = g_value_get_uint64 (value); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_rtpst_2022_1_fecdec_get_property (GObject * object, guint prop_id, ++ GValue * value, GParamSpec * pspec) ++{ ++ GstRTPST_2022_1_FecDec *dec = GST_RTPST_2022_1_FECDEC_CAST (object); ++ ++ switch (prop_id) { ++ case PROP_SIZE_TIME: ++ g_value_set_uint64 (value, dec->size_time); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static gboolean ++gst_2d_fec_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) ++{ ++ GstRTPST_2022_1_FecDec *dec = GST_RTPST_2022_1_FECDEC_CAST (parent); ++ gboolean ret; ++ ++ if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) ++ gst_rtpst_2022_1_fecdec_reset (dec, TRUE); ++ ++ ret = gst_pad_event_default (pad, parent, event); ++ ++ return ret; ++} ++ ++static GstIterator * ++gst_rtpst_2022_1_fecdec_iterate_linked_pads (GstPad * pad, GstObject * parent) ++{ ++ GstRTPST_2022_1_FecDec *dec = GST_RTPST_2022_1_FECDEC_CAST (parent); ++ GstPad *otherpad = NULL; ++ GstIterator *it = NULL; ++ GValue val = { 0, }; ++ ++ if (pad == dec->srcpad) ++ otherpad = dec->sinkpad; ++ else if (pad == dec->sinkpad) ++ otherpad = dec->srcpad; ++ ++ if (otherpad) { ++ g_value_init (&val, GST_TYPE_PAD); ++ g_value_set_object (&val, otherpad); ++ it = gst_iterator_new_single (GST_TYPE_PAD, &val); ++ g_value_unset (&val); ++ } ++ ++ return it; ++} ++ ++static GstPad * ++gst_rtpst_2022_1_fecdec_request_new_pad (GstElement * element, ++ GstPadTemplate * templ, const gchar * name, const GstCaps * caps) ++{ ++ GstRTPST_2022_1_FecDec *dec = GST_RTPST_2022_1_FECDEC_CAST (element); ++ GstPad *sinkpad = NULL; ++ ++ GST_DEBUG_OBJECT (element, "requesting pad"); ++ ++ if (g_list_length (dec->fec_sinkpads) > 1) { ++ GST_ERROR_OBJECT (dec, "not accepting more than two fec streams"); ++ goto done; ++ } ++ ++ sinkpad = gst_pad_new_from_template (templ, name); ++ gst_pad_set_chain_function (sinkpad, gst_rtpst_2022_1_fecdec_sink_chain_fec); ++ gst_element_add_pad (GST_ELEMENT (dec), sinkpad); ++ gst_pad_set_iterate_internal_links_function (sinkpad, ++ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecdec_iterate_linked_pads)); ++ ++ gst_pad_set_active (sinkpad, TRUE); ++ ++ GST_DEBUG_OBJECT (element, "requested pad %s:%s", ++ GST_DEBUG_PAD_NAME (sinkpad)); ++ ++done: ++ return sinkpad; ++} ++ ++static void ++gst_rtpst_2022_1_fecdec_release_pad (GstElement * element, GstPad * pad) ++{ ++ GstRTPST_2022_1_FecDec *dec = GST_RTPST_2022_1_FECDEC_CAST (element); ++ ++ GST_DEBUG_OBJECT (element, "releasing pad %s:%s", GST_DEBUG_PAD_NAME (pad)); ++ ++ dec->fec_sinkpads = g_list_remove (dec->fec_sinkpads, pad); ++ ++ gst_pad_set_active (pad, FALSE); ++ gst_element_remove_pad (GST_ELEMENT_CAST (dec), pad); ++} ++ ++static void ++gst_rtpst_2022_1_fecdec_class_init (GstRTPST_2022_1_FecDecClass * klass) ++{ ++ GObjectClass *gobject_class = G_OBJECT_CLASS (klass); ++ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass); ++ ++ gobject_class->set_property = ++ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecdec_set_property); ++ gobject_class->get_property = ++ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecdec_get_property); ++ gobject_class->finalize = ++ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecdec_finalize); ++ ++ g_object_class_install_property (gobject_class, PROP_SIZE_TIME, ++ g_param_spec_uint64 ("size-time", "Storage size (in ns)", ++ "The amount of data to store (in ns, 0-disable)", 0, ++ G_MAXUINT64, DEFAULT_SIZE_TIME, ++ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS)); ++ ++ gstelement_class->change_state = ++ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecdec_change_state); ++ gstelement_class->request_new_pad = ++ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecdec_request_new_pad); ++ gstelement_class->release_pad = ++ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecdec_release_pad); ++ ++ gst_element_class_set_static_metadata (gstelement_class, ++ "SMPTE 2022-1 FEC decoder", "SMPTE 2022-1 FEC decoding", ++ "performs FEC as described by SMPTE 2022-1", ++ "Mathieu Duponchelle "); ++ ++ gst_element_class_add_static_pad_template (gstelement_class, &sink_template); ++ gst_element_class_add_static_pad_template (gstelement_class, ++ &fec_sink_template); ++ gst_element_class_add_static_pad_template (gstelement_class, &src_template); ++ ++ GST_DEBUG_CATEGORY_INIT (gst_rtpst_2022_1_fecdec_debug, ++ "rtpst2022-1-fecdec", 0, "SMPTE 2022-1 FEC decoder element"); ++} ++ ++static void ++gst_rtpst_2022_1_fecdec_init (GstRTPST_2022_1_FecDec * dec) ++{ ++ dec->srcpad = gst_pad_new_from_static_template (&src_template, "src"); ++ GST_PAD_SET_PROXY_CAPS (dec->srcpad); ++ gst_pad_use_fixed_caps (dec->srcpad); ++ gst_pad_set_event_function (dec->srcpad, ++ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecdec_src_event)); ++ gst_pad_set_iterate_internal_links_function (dec->srcpad, ++ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecdec_iterate_linked_pads)); ++ gst_element_add_pad (GST_ELEMENT (dec), dec->srcpad); ++ ++ dec->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink"); ++ GST_PAD_SET_PROXY_CAPS (dec->sinkpad); ++ gst_pad_set_chain_function (dec->sinkpad, gst_rtpst_2022_1_fecdec_sink_chain); ++ gst_pad_set_event_function (dec->sinkpad, ++ GST_DEBUG_FUNCPTR (gst_2d_fec_sink_event)); ++ gst_pad_set_iterate_internal_links_function (dec->sinkpad, ++ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecdec_iterate_linked_pads)); ++ gst_element_add_pad (GST_ELEMENT (dec), dec->sinkpad); ++ ++ dec->d = G_MAXUINT; ++ dec->l = G_MAXUINT; ++} +diff --git a/gst/rtpmanager/gstrtpst2022-1-fecdec.h b/gst/rtpmanager/gstrtpst2022-1-fecdec.h +new file mode 100644 +index 000000000..7fc463143 +--- /dev/null ++++ b/gst/rtpmanager/gstrtpst2022-1-fecdec.h +@@ -0,0 +1,39 @@ ++/* GStreamer ++ * Copyright (C) <2020> Mathieu Duponchelle ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_RTPST_2022_1_FECDEC_H__ ++#define __GST_RTPST_2022_1_FECDEC_H__ ++ ++#include ++ ++G_BEGIN_DECLS ++ ++typedef struct _GstRTPST_2022_1_FecDecClass GstRTPST_2022_1_FecDecClass; ++typedef struct _GstRTPST_2022_1_FecDec GstRTPST_2022_1_FecDec; ++ ++#define GST_TYPE_RTPST_2022_1_FECDEC (gst_rtpst_2022_1_fecdec_get_type()) ++#define GST_RTPST_2022_1_FECDEC_CAST(obj) ((GstRTPST_2022_1_FecDec *)(obj)) ++ ++GType gst_rtpst_2022_1_fecdec_get_type (void); ++ ++GST_ELEMENT_REGISTER_DECLARE (rtpst2022_1_fecdec); ++ ++G_END_DECLS ++ ++#endif /* __GST_RTPST_2022_1_FECDEC_H__ */ +diff --git a/gst/rtpmanager/gstrtpst2022-1-fecenc.c b/gst/rtpmanager/gstrtpst2022-1-fecenc.c +new file mode 100644 +index 000000000..de2782ab2 +--- /dev/null ++++ b/gst/rtpmanager/gstrtpst2022-1-fecenc.c +@@ -0,0 +1,803 @@ ++/* GStreamer ++ * Copyright (C) <2020> Mathieu Duponchelle ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++/** ++ * SECTION:element-rtpst2022-1-fecenc ++ * @see_also: #element-rtpst2022-1-fecdec ++ * ++ * This element takes as input a media stream and up to two FEC ++ * streams as described in SMPTE 2022-1: Forward Error Correction ++ * for Real-Time Video/Audio Transport Over IP Networks, and makes ++ * use of the FEC packets to recover media packets that may have ++ * gotten lost. ++ * ++ * ## sender / receiver example ++ * ++ * ``` shell ++ * gst-launch-1.0 \ ++ * rtpbin name=rtp fec-encoders='fec,0="rtpst2022-1-fecenc\ rows\=5\ columns\=5";' \ ++ * uridecodebin uri=file:///path/to/video/file ! x264enc key-int-max=60 tune=zerolatency ! \ ++ * queue ! mpegtsmux ! rtpmp2tpay ssrc=0 ! rtp.send_rtp_sink_0 \ ++ * rtp.send_rtp_src_0 ! udpsink host=127.0.0.1 port=5000 \ ++ * rtp.send_fec_src_0_0 ! udpsink host=127.0.0.1 port=5002 async=false \ ++ * rtp.send_fec_src_0_1 ! udpsink host=127.0.0.1 port=5004 async=false ++ * ``` ++ * ++ * ``` shell ++ * gst-launch-1.0 \ ++ * rtpbin latency=500 fec-decoders='fec,0="rtpst2022-1-fecdec\ size-time\=1000000000";' name=rtp \ ++ * udpsrc address=127.0.0.1 port=5002 caps="application/x-rtp, payload=96" ! queue ! rtp.recv_fec_sink_0_0 \ ++ * udpsrc address=127.0.0.1 port=5004 caps="application/x-rtp, payload=96" ! queue ! rtp.recv_fec_sink_0_1 \ ++ * udpsrc address=127.0.0.1 port=5000 caps="application/x-rtp, media=video, clock-rate=90000, encoding-name=mp2t, payload=33" ! \ ++ * queue ! netsim drop-probability=0.05 ! rtp.recv_rtp_sink_0 \ ++ * rtp. ! decodebin ! videoconvert ! queue ! autovideosink ++ * ``` ++ * ++ * With the above command line, as the media packet size is constant, ++ * the fec overhead can be approximated to the number of fec packets ++ * per 2-d matrix of media packet, here 10 fec packets for each 25 ++ * media packets. ++ * ++ * Increasing the number of rows and columns will decrease the overhead, ++ * but obviously increase the likelihood of recovery failure for lost ++ * packets on the receiver side. ++ * ++ * Since: 1.20 ++ */ ++ ++#ifdef HAVE_CONFIG_H ++#include "config.h" ++#endif ++ ++#include ++#include ++ ++#include "gstrtpst2022-1-fecenc.h" ++ ++GST_DEBUG_CATEGORY_STATIC (gst_rtpst_2022_1_fecenc_debug); ++#define GST_CAT_DEFAULT gst_rtpst_2022_1_fecenc_debug ++ ++enum ++{ ++ PROP_0, ++ PROP_COLUMNS, ++ PROP_ROWS, ++ PROP_PT, ++ PROP_ENABLE_COLUMN, ++ PROP_ENABLE_ROW, ++}; ++ ++#define DEFAULT_ROWS 0 ++#define DEFAULT_COLUMNS 0 ++#define DEFAULT_PT 96 ++#define DEFAULT_ENABLE_COLUMN TRUE ++#define DEFAULT_ENABLE_ROW TRUE ++ ++typedef struct ++{ ++ guint16 target_media_seq; /* The media seqnum we want to send that packet alongside */ ++ guint16 seq_base; /* Only used for logging purposes */ ++ GstBuffer *buffer; ++} Item; ++ ++typedef struct ++{ ++ guint8 *xored_payload; ++ guint32 xored_timestamp; ++ guint8 xored_pt; ++ guint16 xored_payload_len; ++ gboolean xored_marker; ++ gboolean xored_padding; ++ gboolean xored_extension; ++ ++ guint16 seq_base; ++ ++ guint16 payload_len; ++ guint n_packets; ++} FecPacket; ++ ++struct _GstRTPST_2022_1_FecEncClass ++{ ++ GstElementClass class; ++}; ++ ++struct _GstRTPST_2022_1_FecEnc ++{ ++ GstElement element; ++ ++ GstPad *srcpad; ++ GstPad *sinkpad; ++ ++ /* These pads do not participate in the flow return of the element, ++ * which should continue working even if the sending of FEC packets ++ * fails ++ */ ++ GstPad *row_fec_srcpad; ++ GstPad *column_fec_srcpad; ++ ++ /* The following fields are only accessed on state change or from the ++ * streaming thread, and only settable in state < PAUSED */ ++ ++ /* N columns */ ++ guint l; ++ /* N rows */ ++ guint d; ++ ++ /* Whether we have pushed initial events on the column FEC source pad */ ++ gboolean column_events_pushed; ++ ++ /* The current row FEC packet */ ++ FecPacket *row; ++ /* Tracks the row seqnum */ ++ guint16 row_seq; ++ /* Whether we have pushed initial events on the row FEC source pad */ ++ gboolean row_events_pushed; ++ ++ /* These two fields are used to enforce input seqnum consecutiveness, ++ * and to determine when column FEC packets should be pushed */ ++ gboolean last_media_seqnum_set; ++ guint16 last_media_seqnum; ++ ++ /* This field is used to timestamp our FEC packets, we just piggy back */ ++ guint32 last_media_timestamp; ++ ++ /* The payload type of the FEC packets */ ++ gint pt; ++ ++ /* The following fields can be changed while PLAYING, and are ++ * protected with the OBJECT_LOCK ++ */ ++ /* Tracks the property, can be changed while PLAYING */ ++ gboolean enable_row; ++ /* Tracks the property, can be changed while PLAYING */ ++ gboolean enable_column; ++ ++ /* Array of FecPackets, with size enc->l */ ++ GPtrArray *columns; ++ /* Index of the current column in the array above */ ++ guint current_column; ++ /* Tracks the column seqnum */ ++ guint16 column_seq; ++ /* Column FEC packets must be delayed to make them more resilient ++ * to loss bursts, we store them here */ ++ GQueue queued_column_packets; ++}; ++ ++#define RTP_CAPS "application/x-rtp" ++ ++static GstStaticPadTemplate fec_src_template = ++GST_STATIC_PAD_TEMPLATE ("fec_%u", ++ GST_PAD_SRC, ++ GST_PAD_SOMETIMES, ++ GST_STATIC_CAPS (RTP_CAPS)); ++ ++static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink", ++ GST_PAD_SINK, ++ GST_PAD_ALWAYS, ++ GST_STATIC_CAPS (RTP_CAPS)); ++ ++static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src", ++ GST_PAD_SRC, ++ GST_PAD_ALWAYS, ++ GST_STATIC_CAPS (RTP_CAPS)); ++ ++#define gst_rtpst_2022_1_fecenc_parent_class parent_class ++G_DEFINE_TYPE (GstRTPST_2022_1_FecEnc, gst_rtpst_2022_1_fecenc, ++ GST_TYPE_ELEMENT); ++GST_ELEMENT_REGISTER_DEFINE (rtpst2022_1_fecenc, "rtpst2022-1-fecenc", ++ GST_RANK_NONE, GST_TYPE_RTPST_2022_1_FECENC); ++ ++static void ++free_item (Item * item) ++{ ++ if (item->buffer) ++ gst_buffer_unref (item->buffer); ++ ++ g_free (item); ++} ++ ++static void ++free_fec_packet (FecPacket * packet) ++{ ++ if (packet->xored_payload) ++ g_free (packet->xored_payload); ++ g_free (packet); ++} ++ ++static void ++_xor_mem (guint8 * restrict dst, const guint8 * restrict src, gsize length) ++{ ++ guint i; ++ ++ for (i = 0; i < (length / sizeof (guint64)); ++i) { ++#if G_BYTE_ORDER == G_LITTLE_ENDIAN ++ GST_WRITE_UINT64_LE (dst, ++ GST_READ_UINT64_LE (dst) ^ GST_READ_UINT64_LE (src)); ++#else ++ GST_WRITE_UINT64_BE (dst, ++ GST_READ_UINT64_BE (dst) ^ GST_READ_UINT64_BE (src)); ++#endif ++ dst += sizeof (guint64); ++ src += sizeof (guint64); ++ } ++ for (i = 0; i < (length % sizeof (guint64)); ++i) ++ dst[i] ^= src[i]; ++} ++ ++static void ++fec_packet_update (FecPacket * fec, GstRTPBuffer * rtp) ++{ ++ if (fec->n_packets == 0) { ++ fec->seq_base = gst_rtp_buffer_get_seq (rtp); ++ fec->payload_len = gst_rtp_buffer_get_payload_len (rtp); ++ fec->xored_payload_len = gst_rtp_buffer_get_payload_len (rtp); ++ fec->xored_pt = gst_rtp_buffer_get_payload_type (rtp); ++ fec->xored_timestamp = gst_rtp_buffer_get_timestamp (rtp); ++ fec->xored_marker = gst_rtp_buffer_get_marker (rtp); ++ fec->xored_padding = gst_rtp_buffer_get_padding (rtp); ++ fec->xored_extension = gst_rtp_buffer_get_extension (rtp); ++ fec->xored_payload = g_malloc (sizeof (guint8) * fec->payload_len); ++ memcpy (fec->xored_payload, gst_rtp_buffer_get_payload (rtp), ++ fec->payload_len); ++ } else { ++ guint plen = gst_rtp_buffer_get_payload_len (rtp); ++ ++ if (fec->payload_len < plen) { ++ fec->xored_payload = ++ g_realloc (fec->xored_payload, sizeof (guint8) * plen); ++ memset (fec->xored_payload + fec->payload_len, 0, ++ plen - fec->payload_len); ++ fec->payload_len = plen; ++ } ++ ++ fec->xored_payload_len ^= plen; ++ fec->xored_pt ^= gst_rtp_buffer_get_payload_type (rtp); ++ fec->xored_timestamp ^= gst_rtp_buffer_get_timestamp (rtp); ++ fec->xored_marker ^= gst_rtp_buffer_get_marker (rtp); ++ fec->xored_padding ^= gst_rtp_buffer_get_padding (rtp); ++ fec->xored_extension ^= gst_rtp_buffer_get_extension (rtp); ++ _xor_mem (fec->xored_payload, gst_rtp_buffer_get_payload (rtp), plen); ++ } ++ ++ fec->n_packets += 1; ++} ++ ++static void ++push_initial_events (GstRTPST_2022_1_FecEnc * enc, GstPad * pad, ++ const gchar * id) ++{ ++ gchar *stream_id; ++ GstCaps *caps; ++ GstSegment segment; ++ ++ stream_id = gst_pad_create_stream_id (pad, GST_ELEMENT (enc), id); ++ gst_pad_push_event (pad, gst_event_new_stream_start (stream_id)); ++ g_free (stream_id); ++ ++ caps = gst_caps_new_simple ("application/x-rtp", ++ "payload", G_TYPE_UINT, enc->pt, "ssrc", G_TYPE_UINT, 0, NULL); ++ gst_pad_push_event (pad, gst_event_new_caps (caps)); ++ gst_caps_unref (caps); ++ ++ gst_segment_init (&segment, GST_FORMAT_TIME); ++ gst_pad_push_event (pad, gst_event_new_segment (&segment)); ++} ++ ++static void ++queue_fec_packet (GstRTPST_2022_1_FecEnc * enc, FecPacket * fec, gboolean row) ++{ ++ GstBuffer *buffer = gst_rtp_buffer_new_allocate (fec->payload_len + 16, 0, 0); ++ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; ++ GstBitWriter bits; ++ guint8 *data; ++ ++ gst_rtp_buffer_map (buffer, GST_MAP_WRITE, &rtp); ++ data = gst_rtp_buffer_get_payload (&rtp); ++ memset (data, 0x00, 16); ++ ++ gst_bit_writer_init_with_data (&bits, data, 17, FALSE); ++ ++ gst_bit_writer_put_bits_uint16 (&bits, fec->seq_base, 16); /* SNBase low bits */ ++ gst_bit_writer_put_bits_uint16 (&bits, fec->xored_payload_len, 16); /* Length Recovery */ ++ gst_bit_writer_put_bits_uint8 (&bits, 1, 1); /* E */ ++ gst_bit_writer_put_bits_uint8 (&bits, fec->xored_pt, 7); /* PT recovery */ ++ gst_bit_writer_put_bits_uint32 (&bits, 0, 24); /* Mask */ ++ gst_bit_writer_put_bits_uint32 (&bits, fec->xored_timestamp, 32); /* TS recovery */ ++ gst_bit_writer_put_bits_uint8 (&bits, 0, 1); /* N */ ++ gst_bit_writer_put_bits_uint8 (&bits, row ? 1 : 0, 1); /* D */ ++ gst_bit_writer_put_bits_uint8 (&bits, 0, 3); /* type */ ++ gst_bit_writer_put_bits_uint8 (&bits, 0, 3); /* index */ ++ gst_bit_writer_put_bits_uint8 (&bits, row ? 1 : enc->l, 8); /* Offset */ ++ gst_bit_writer_put_bits_uint8 (&bits, fec->n_packets, 8); /* NA */ ++ gst_bit_writer_put_bits_uint8 (&bits, 0, 8); /* SNBase ext bits */ ++ ++ memcpy (data + 16, fec->xored_payload, fec->payload_len); ++ ++ gst_bit_writer_reset (&bits); ++ ++ gst_rtp_buffer_set_payload_type (&rtp, enc->pt); ++ gst_rtp_buffer_set_seq (&rtp, row ? enc->row_seq++ : enc->column_seq++); ++ gst_rtp_buffer_set_marker (&rtp, fec->xored_marker); ++ gst_rtp_buffer_set_padding (&rtp, fec->xored_padding); ++ gst_rtp_buffer_set_extension (&rtp, fec->xored_extension); ++ ++ /* We're sending it out immediately */ ++ if (row) ++ gst_rtp_buffer_set_timestamp (&rtp, enc->last_media_timestamp); ++ ++ gst_rtp_buffer_unmap (&rtp); ++ ++ /* We can send row FEC packets immediately, column packets need ++ * delaying by L <= delay < L * D ++ */ ++ if (row) { ++ GstFlowReturn ret; ++ ++ GST_LOG_OBJECT (enc, ++ "Pushing row FEC packet, seq base: %u, media seqnum: %u", ++ fec->seq_base, enc->last_media_seqnum); ++ ++ /* Safe to unlock here */ ++ GST_OBJECT_UNLOCK (enc); ++ ret = gst_pad_push (enc->row_fec_srcpad, buffer); ++ GST_OBJECT_LOCK (enc); ++ ++ if (ret != GST_FLOW_OK && ret != GST_FLOW_FLUSHING) ++ GST_WARNING_OBJECT (enc->row_fec_srcpad, ++ "Failed to push row FEC packet: %s", gst_flow_get_name (ret)); ++ } else { ++ Item *item = g_malloc0 (sizeof (Item)); ++ ++ item->buffer = buffer; ++ item->seq_base = fec->seq_base; ++ /* Let's get cute and linearize */ ++ item->target_media_seq = ++ enc->last_media_seqnum + enc->l - enc->current_column + ++ enc->d * enc->current_column; ++ ++ g_queue_push_tail (&enc->queued_column_packets, item); ++ } ++} ++ ++static void ++gst_2d_fec_push_item_unlocked (GstRTPST_2022_1_FecEnc * enc) ++{ ++ GstFlowReturn ret; ++ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; ++ Item *item = g_queue_pop_head (&enc->queued_column_packets); ++ ++ GST_LOG_OBJECT (enc, ++ "Pushing column FEC packet, target media seq: %u, seq base: %u, " ++ "media seqnum: %u", item->target_media_seq, item->seq_base, ++ enc->last_media_seqnum); ++ gst_rtp_buffer_map (item->buffer, GST_MAP_WRITE, &rtp); ++ gst_rtp_buffer_set_timestamp (&rtp, enc->last_media_timestamp); ++ gst_rtp_buffer_unmap (&rtp); ++ GST_OBJECT_UNLOCK (enc); ++ ret = gst_pad_push (enc->column_fec_srcpad, gst_buffer_ref (item->buffer)); ++ GST_OBJECT_LOCK (enc); ++ ++ if (ret != GST_FLOW_OK && ret != GST_FLOW_FLUSHING) ++ GST_WARNING_OBJECT (enc->column_fec_srcpad, ++ "Failed to push column FEC packet: %s", gst_flow_get_name (ret)); ++ ++ free_item (item); ++} ++ ++static GstFlowReturn ++gst_rtpst_2022_1_fecenc_sink_chain (GstPad * pad, GstObject * parent, ++ GstBuffer * buffer) ++{ ++ GstRTPST_2022_1_FecEnc *enc = GST_RTPST_2022_1_FECENC_CAST (parent); ++ GstFlowReturn ret = GST_FLOW_OK; ++ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; ++ ++ if (!gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp)) { ++ GST_ERROR_OBJECT (enc, "Chained buffer isn't valid RTP"); ++ goto error; ++ } ++ ++ if (gst_rtp_buffer_get_ssrc (&rtp) != 0) { ++ GST_ERROR_OBJECT (enc, "Chained buffer must have SSRC == 0"); ++ goto error; ++ } ++ ++ if (enc->last_media_seqnum_set ++ && (guint16) (enc->last_media_seqnum + 1) != ++ gst_rtp_buffer_get_seq (&rtp)) { ++ GST_ERROR_OBJECT (enc, "consecutive sequence numbers are required"); ++ goto error; ++ } ++ ++ if (!enc->row_events_pushed) { ++ push_initial_events (enc, enc->row_fec_srcpad, "row-fec"); ++ enc->row_events_pushed = TRUE; ++ } ++ ++ if (!enc->column_events_pushed) { ++ push_initial_events (enc, enc->column_fec_srcpad, "column-fec"); ++ enc->column_events_pushed = TRUE; ++ } ++ ++ enc->last_media_timestamp = gst_rtp_buffer_get_timestamp (&rtp); ++ enc->last_media_seqnum = gst_rtp_buffer_get_seq (&rtp); ++ enc->last_media_seqnum_set = TRUE; ++ ++ GST_OBJECT_LOCK (enc); ++ if (enc->enable_row && enc->l) { ++ g_assert (enc->row->n_packets < enc->l); ++ fec_packet_update (enc->row, &rtp); ++ if (enc->row->n_packets == enc->l) { ++ queue_fec_packet (enc, enc->row, TRUE); ++ g_free (enc->row->xored_payload); ++ memset (enc->row, 0x00, sizeof (FecPacket)); ++ } ++ } ++ ++ if (enc->enable_column && enc->l && enc->d) { ++ FecPacket *column = g_ptr_array_index (enc->columns, enc->current_column); ++ ++ fec_packet_update (column, &rtp); ++ if (column->n_packets == enc->d) { ++ queue_fec_packet (enc, column, FALSE); ++ g_free (column->xored_payload); ++ memset (column, 0x00, sizeof (FecPacket)); ++ } ++ ++ enc->current_column++; ++ enc->current_column %= enc->l; ++ } ++ ++ gst_rtp_buffer_unmap (&rtp); ++ ++ { ++ Item *item = g_queue_peek_head (&enc->queued_column_packets); ++ if (item && item->target_media_seq == enc->last_media_seqnum) ++ gst_2d_fec_push_item_unlocked (enc); ++ } ++ ++ GST_OBJECT_UNLOCK (enc); ++ ++ ret = gst_pad_push (enc->srcpad, buffer); ++ ++done: ++ return ret; ++ ++error: ++ if (rtp.buffer) ++ gst_rtp_buffer_unmap (&rtp); ++ gst_buffer_unref (buffer); ++ ret = GST_FLOW_ERROR; ++ goto done; ++} ++ ++static GstIterator * ++gst_rtpst_2022_1_fecenc_iterate_linked_pads (GstPad * pad, GstObject * parent) ++{ ++ GstRTPST_2022_1_FecEnc *enc = GST_RTPST_2022_1_FECENC_CAST (parent); ++ GstPad *otherpad = NULL; ++ GstIterator *it = NULL; ++ GValue val = { 0, }; ++ ++ if (pad == enc->srcpad) ++ otherpad = enc->sinkpad; ++ else if (pad == enc->sinkpad) ++ otherpad = enc->srcpad; ++ ++ if (otherpad) { ++ g_value_init (&val, GST_TYPE_PAD); ++ g_value_set_object (&val, otherpad); ++ it = gst_iterator_new_single (GST_TYPE_PAD, &val); ++ g_value_unset (&val); ++ } ++ ++ return it; ++} ++ ++static void ++gst_rtpst_2022_1_fecenc_reset (GstRTPST_2022_1_FecEnc * enc, gboolean allocate) ++{ ++ if (enc->row) { ++ free_fec_packet (enc->row); ++ enc->row = NULL; ++ } ++ ++ if (enc->columns) { ++ g_ptr_array_unref (enc->columns); ++ enc->columns = NULL; ++ } ++ ++ if (enc->row_fec_srcpad) { ++ gst_element_remove_pad (GST_ELEMENT (enc), enc->row_fec_srcpad); ++ enc->row_fec_srcpad = NULL; ++ } ++ ++ if (enc->column_fec_srcpad) { ++ gst_element_remove_pad (GST_ELEMENT (enc), enc->column_fec_srcpad); ++ enc->column_fec_srcpad = NULL; ++ } ++ ++ g_queue_clear_full (&enc->queued_column_packets, (GDestroyNotify) free_item); ++ ++ if (allocate) { ++ guint i; ++ ++ enc->row = g_malloc0 (sizeof (FecPacket)); ++ enc->columns = ++ g_ptr_array_new_full (enc->l, (GDestroyNotify) free_fec_packet); ++ ++ for (i = 0; i < enc->l; i++) { ++ g_ptr_array_add (enc->columns, g_malloc0 (sizeof (FecPacket))); ++ } ++ ++ g_queue_init (&enc->queued_column_packets); ++ ++ enc->column_fec_srcpad = ++ gst_pad_new_from_static_template (&fec_src_template, "fec_0"); ++ gst_pad_set_active (enc->column_fec_srcpad, TRUE); ++ gst_pad_set_iterate_internal_links_function (enc->column_fec_srcpad, ++ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecenc_iterate_linked_pads)); ++ gst_element_add_pad (GST_ELEMENT (enc), enc->column_fec_srcpad); ++ ++ enc->row_fec_srcpad = ++ gst_pad_new_from_static_template (&fec_src_template, "fec_1"); ++ gst_pad_set_active (enc->row_fec_srcpad, TRUE); ++ gst_pad_set_iterate_internal_links_function (enc->row_fec_srcpad, ++ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecenc_iterate_linked_pads)); ++ gst_element_add_pad (GST_ELEMENT (enc), enc->row_fec_srcpad); ++ ++ gst_element_no_more_pads (GST_ELEMENT (enc)); ++ } ++ ++ enc->current_column = 0; ++ enc->last_media_seqnum_set = FALSE; ++} ++ ++static GstStateChangeReturn ++gst_rtpst_2022_1_fecenc_change_state (GstElement * element, ++ GstStateChange transition) ++{ ++ GstStateChangeReturn ret; ++ GstRTPST_2022_1_FecEnc *enc = GST_RTPST_2022_1_FECENC_CAST (element); ++ ++ switch (transition) { ++ case GST_STATE_CHANGE_READY_TO_PAUSED: ++ gst_rtpst_2022_1_fecenc_reset (enc, TRUE); ++ break; ++ case GST_STATE_CHANGE_PAUSED_TO_READY: ++ gst_rtpst_2022_1_fecenc_reset (enc, FALSE); ++ break; ++ default: ++ break; ++ } ++ ++ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); ++ ++ return ret; ++} ++ ++static void ++gst_rtpst_2022_1_fecenc_finalize (GObject * object) ++{ ++ GstRTPST_2022_1_FecEnc *enc = GST_RTPST_2022_1_FECENC_CAST (object); ++ ++ gst_rtpst_2022_1_fecenc_reset (enc, FALSE); ++ ++ G_OBJECT_CLASS (parent_class)->finalize (object); ++} ++ ++static void ++gst_rtpst_2022_1_fecenc_set_property (GObject * object, guint prop_id, ++ const GValue * value, GParamSpec * pspec) ++{ ++ GstRTPST_2022_1_FecEnc *enc = GST_RTPST_2022_1_FECENC_CAST (object); ++ ++ if (GST_STATE (enc) > GST_STATE_READY) { ++ GST_ERROR_OBJECT (enc, ++ "rtpst2022-1-fecenc properties can't be changed in PLAYING or PAUSED state"); ++ return; ++ } ++ ++ switch (prop_id) { ++ case PROP_COLUMNS: ++ enc->l = g_value_get_uint (value); ++ break; ++ case PROP_ROWS: ++ enc->d = g_value_get_uint (value); ++ break; ++ case PROP_PT: ++ enc->pt = g_value_get_int (value); ++ break; ++ case PROP_ENABLE_COLUMN: ++ GST_OBJECT_LOCK (enc); ++ enc->enable_column = g_value_get_boolean (value); ++ if (!enc->enable_column) { ++ guint i; ++ ++ if (enc->columns) { ++ for (i = 0; i < enc->l; i++) { ++ FecPacket *column = g_ptr_array_index (enc->columns, i); ++ g_free (column->xored_payload); ++ memset (column, 0x00, sizeof (FecPacket)); ++ } ++ } ++ enc->current_column = 0; ++ enc->column_seq = 0; ++ g_queue_clear_full (&enc->queued_column_packets, ++ (GDestroyNotify) free_item); ++ } ++ GST_OBJECT_UNLOCK (enc); ++ break; ++ case PROP_ENABLE_ROW: ++ GST_OBJECT_LOCK (enc); ++ enc->enable_row = g_value_get_boolean (value); ++ GST_OBJECT_UNLOCK (enc); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static void ++gst_rtpst_2022_1_fecenc_get_property (GObject * object, guint prop_id, ++ GValue * value, GParamSpec * pspec) ++{ ++ GstRTPST_2022_1_FecEnc *enc = GST_RTPST_2022_1_FECENC_CAST (object); ++ ++ switch (prop_id) { ++ case PROP_COLUMNS: ++ g_value_set_uint (value, enc->l); ++ break; ++ case PROP_ROWS: ++ g_value_set_uint (value, enc->d); ++ break; ++ case PROP_PT: ++ g_value_set_int (value, enc->pt); ++ break; ++ case PROP_ENABLE_COLUMN: ++ GST_OBJECT_LOCK (enc); ++ g_value_set_boolean (value, enc->enable_column); ++ GST_OBJECT_UNLOCK (enc); ++ break; ++ case PROP_ENABLE_ROW: ++ GST_OBJECT_LOCK (enc); ++ g_value_set_boolean (value, enc->enable_row); ++ GST_OBJECT_UNLOCK (enc); ++ break; ++ default: ++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); ++ break; ++ } ++} ++ ++static gboolean ++gst_2d_fec_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) ++{ ++ GstRTPST_2022_1_FecEnc *enc = GST_RTPST_2022_1_FECENC_CAST (parent); ++ gboolean ret; ++ ++ switch (GST_EVENT_TYPE (event)) { ++ case GST_EVENT_FLUSH_STOP: ++ gst_rtpst_2022_1_fecenc_reset (enc, TRUE); ++ break; ++ case GST_EVENT_EOS: ++ gst_pad_push_event (enc->row_fec_srcpad, gst_event_ref (event)); ++ GST_OBJECT_LOCK (enc); ++ while (g_queue_peek_head (&enc->queued_column_packets)) ++ gst_2d_fec_push_item_unlocked (enc); ++ GST_OBJECT_UNLOCK (enc); ++ gst_pad_push_event (enc->column_fec_srcpad, gst_event_ref (event)); ++ break; ++ default: ++ break; ++ } ++ ++ ret = gst_pad_event_default (pad, parent, event); ++ ++ return ret; ++} ++ ++static void ++gst_rtpst_2022_1_fecenc_class_init (GstRTPST_2022_1_FecEncClass * klass) ++{ ++ GObjectClass *gobject_class = G_OBJECT_CLASS (klass); ++ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass); ++ ++ gobject_class->set_property = ++ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecenc_set_property); ++ gobject_class->get_property = ++ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecenc_get_property); ++ gobject_class->finalize = ++ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecenc_finalize); ++ ++ g_object_class_install_property (gobject_class, PROP_COLUMNS, ++ g_param_spec_uint ("columns", "Columns", ++ "Number of columns to apply row FEC on, 0=disabled", 0, ++ 255, DEFAULT_COLUMNS, ++ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS | ++ GST_PARAM_MUTABLE_READY)); ++ ++ g_object_class_install_property (gobject_class, PROP_ROWS, ++ g_param_spec_uint ("rows", "Rows", ++ "Number of rows to apply column FEC on, 0=disabled", 0, ++ 255, DEFAULT_ROWS, ++ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS | ++ GST_PARAM_MUTABLE_READY)); ++ ++ g_object_class_install_property (gobject_class, PROP_PT, ++ g_param_spec_int ("pt", "Payload Type", ++ "The payload type of FEC packets", 96, ++ 255, DEFAULT_PT, ++ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS | ++ GST_PARAM_MUTABLE_READY)); ++ ++ g_object_class_install_property (gobject_class, PROP_ENABLE_COLUMN, ++ g_param_spec_boolean ("enable-column-fec", "Enable Column FEC", ++ "Whether the encoder should compute and send column FEC", ++ DEFAULT_ENABLE_COLUMN, ++ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS | ++ GST_PARAM_MUTABLE_PLAYING)); ++ ++ g_object_class_install_property (gobject_class, PROP_ENABLE_ROW, ++ g_param_spec_boolean ("enable-row-fec", "Enable Row FEC", ++ "Whether the encoder should compute and send row FEC", ++ DEFAULT_ENABLE_ROW, ++ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS | ++ GST_PARAM_MUTABLE_PLAYING)); ++ ++ gstelement_class->change_state = ++ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecenc_change_state); ++ ++ gst_element_class_set_static_metadata (gstelement_class, ++ "SMPTE 2022-1 FEC encoder", "SMPTE 2022-1 FEC encoding", ++ "performs FEC as described by SMPTE 2022-1", ++ "Mathieu Duponchelle "); ++ ++ gst_element_class_add_static_pad_template (gstelement_class, &sink_template); ++ gst_element_class_add_static_pad_template (gstelement_class, ++ &fec_src_template); ++ gst_element_class_add_static_pad_template (gstelement_class, &src_template); ++ ++ GST_DEBUG_CATEGORY_INIT (gst_rtpst_2022_1_fecenc_debug, ++ "rtpst2022-1-fecenc", 0, "SMPTE 2022-1 FEC encoder element"); ++} ++ ++static void ++gst_rtpst_2022_1_fecenc_init (GstRTPST_2022_1_FecEnc * enc) ++{ ++ enc->srcpad = gst_pad_new_from_static_template (&src_template, "src"); ++ gst_pad_use_fixed_caps (enc->srcpad); ++ GST_PAD_SET_PROXY_CAPS (enc->srcpad); ++ gst_pad_set_iterate_internal_links_function (enc->srcpad, ++ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecenc_iterate_linked_pads)); ++ gst_element_add_pad (GST_ELEMENT (enc), enc->srcpad); ++ ++ enc->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink"); ++ GST_PAD_SET_PROXY_CAPS (enc->sinkpad); ++ gst_pad_set_chain_function (enc->sinkpad, gst_rtpst_2022_1_fecenc_sink_chain); ++ gst_pad_set_event_function (enc->sinkpad, ++ GST_DEBUG_FUNCPTR (gst_2d_fec_sink_event)); ++ gst_pad_set_iterate_internal_links_function (enc->sinkpad, ++ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecenc_iterate_linked_pads)); ++ gst_element_add_pad (GST_ELEMENT (enc), enc->sinkpad); ++ ++ enc->d = 0; ++ enc->l = 0; ++} +diff --git a/gst/rtpmanager/gstrtpst2022-1-fecenc.h b/gst/rtpmanager/gstrtpst2022-1-fecenc.h +new file mode 100644 +index 000000000..33e5de43a +--- /dev/null ++++ b/gst/rtpmanager/gstrtpst2022-1-fecenc.h +@@ -0,0 +1,39 @@ ++/* GStreamer ++ * Copyright (C) <2020> Mathieu Duponchelle ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_RTPST_2022_1_FECENC_H__ ++#define __GST_RTPST_2022_1_FECENC_H__ ++ ++#include ++ ++G_BEGIN_DECLS ++ ++typedef struct _GstRTPST_2022_1_FecEncClass GstRTPST_2022_1_FecEncClass; ++typedef struct _GstRTPST_2022_1_FecEnc GstRTPST_2022_1_FecEnc; ++ ++#define GST_TYPE_RTPST_2022_1_FECENC (gst_rtpst_2022_1_fecenc_get_type()) ++#define GST_RTPST_2022_1_FECENC_CAST(obj) ((GstRTPST_2022_1_FecEnc *)(obj)) ++ ++GType gst_rtpst_2022_1_fecenc_get_type (void); ++ ++GST_ELEMENT_REGISTER_DECLARE (rtpst2022_1_fecenc); ++ ++G_END_DECLS ++ ++#endif /* __GST_RTPST_2022_1_FECENC_H__ */ +diff --git a/gst/rtpmanager/gstrtputils.c b/gst/rtpmanager/gstrtputils.c +new file mode 100644 +index 000000000..eb8113368 +--- /dev/null ++++ b/gst/rtpmanager/gstrtputils.c +@@ -0,0 +1,44 @@ ++/* GStreamer ++ * Copyright (C) 2022 Sebastian Dröge ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#include "gstrtputils.h" ++ ++guint8 ++gst_rtp_get_extmap_id_for_attribute (const GstStructure * s, ++ const gchar * ext_name) ++{ ++ guint i; ++ guint8 extmap_id = 0; ++ guint n_fields = gst_structure_n_fields (s); ++ ++ for (i = 0; i < n_fields; i++) { ++ const gchar *field_name = gst_structure_nth_field_name (s, i); ++ if (g_str_has_prefix (field_name, "extmap-")) { ++ const gchar *str = gst_structure_get_string (s, field_name); ++ if (str && g_strcmp0 (str, ext_name) == 0) { ++ gint64 id = g_ascii_strtoll (field_name + 7, NULL, 10); ++ if (id > 0 && id < 15) { ++ extmap_id = id; ++ break; ++ } ++ } ++ } ++ } ++ return extmap_id; ++} +diff --git a/gst/rtpmanager/gstrtputils.h b/gst/rtpmanager/gstrtputils.h +new file mode 100644 +index 000000000..668933360 +--- /dev/null ++++ b/gst/rtpmanager/gstrtputils.h +@@ -0,0 +1,34 @@ ++/* GStreamer ++ * Copyright (C) 2022 Sebastian Dröge ++ * ++ * This library is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU Library General Public ++ * License as published by the Free Software Foundation; either ++ * version 2 of the License, or (at your option) any later version. ++ * ++ * This library is distributed in the hope that it will be useful, ++ * but WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * Library General Public License for more details. ++ * ++ * You should have received a copy of the GNU Library General Public ++ * License along with this library; if not, write to the ++ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, ++ * Boston, MA 02110-1301, USA. ++ */ ++ ++#ifndef __GST_RTP_UTILS_H__ ++#define __GST_RTP_UTILS_H__ ++ ++#include ++ ++G_BEGIN_DECLS ++ ++#define GST_RTP_NTP_UNIX_OFFSET (2208988800LL) ++ ++G_GNUC_INTERNAL guint8 ++gst_rtp_get_extmap_id_for_attribute (const GstStructure * s, const gchar * ext_name); ++ ++G_END_DECLS ++ ++#endif /* __GST_RTP_UTILS_H__ */ +diff --git a/gst/rtpmanager/meson.build b/gst/rtpmanager/meson.build +index 118a1e1ea..11435d902 100644 +--- a/gst/rtpmanager/meson.build ++++ b/gst/rtpmanager/meson.build +@@ -3,6 +3,12 @@ rtpmanager_sources = [ + 'gstrtpbin.c', + 'gstrtpdtmfmux.c', + 'gstrtpjitterbuffer.c', ++ 'gstrtphdrext-twcc.c', ++ 'gstrtphdrext-clientaudiolevel.c', ++ 'gstrtphdrext-mid.c', ++ 'gstrtphdrext-ntp.c', ++ 'gstrtphdrext-repairedstreamid.c', ++ 'gstrtphdrext-streamid.c', + 'gstrtpmux.c', + 'gstrtpptdemux.c', + 'gstrtprtxqueue.c', +@@ -17,15 +23,17 @@ rtpmanager_sources = [ + 'rtptwcc.c', + 'gstrtpsession.c', + 'gstrtpfunnel.c', ++ 'gstrtpst2022-1-fecdec.c', ++ 'gstrtpst2022-1-fecenc.c', ++ 'gstrtputils.c' + ] + + gstrtpmanager = library('gstrtpmanager', + rtpmanager_sources, + c_args : gst_plugins_good_args, + include_directories : [configinc, libsinc], +- dependencies : [gstbase_dep, gstnet_dep, gstrtp_dep, gio_dep], ++ dependencies : [gstbase_dep, gstnet_dep, gstrtp_dep, gstaudio_dep, gio_dep], + install : true, + install_dir : plugins_install_dir, + ) +-pkgconfig.generate(gstrtpmanager, install_dir : plugins_pkgconfig_install_dir) + plugins += [gstrtpmanager] +diff --git a/gst/rtpmanager/rtpjitterbuffer.c b/gst/rtpmanager/rtpjitterbuffer.c +index aef5cbc35..1ab0aaf60 100644 +--- a/gst/rtpmanager/rtpjitterbuffer.c ++++ b/gst/rtpmanager/rtpjitterbuffer.c +@@ -703,7 +703,7 @@ queue_do_insert (RTPJitterBuffer * jbuf, GList * list, GList * item) + GstClockTime + rtp_jitter_buffer_calculate_pts (RTPJitterBuffer * jbuf, GstClockTime dts, + gboolean estimated_dts, guint32 rtptime, GstClockTime base_time, +- gint gap, gboolean is_rtx) ++ gint gap, gboolean is_rtx, GstClockTime * p_ntp_time) + { + guint64 ext_rtptime; + GstClockTime gstrtptime, pts; +@@ -711,6 +711,8 @@ rtp_jitter_buffer_calculate_pts (RTPJitterBuffer * jbuf, GstClockTime dts, + guint64 media_clock_offset; + gboolean rfc7273_mode; + ++ *p_ntp_time = GST_CLOCK_TIME_NONE; ++ + /* rtp time jumps are checked for during skew calculation, but bypassed + * in other mode, so mind those here and reset jb if needed. + * Only reset if valid input time, which is likely for UDP input +@@ -870,48 +872,125 @@ rtp_jitter_buffer_calculate_pts (RTPJitterBuffer * jbuf, GstClockTime dts, + } else if (rfc7273_mode && (jbuf->mode == RTP_JITTER_BUFFER_MODE_SLAVE + || jbuf->mode == RTP_JITTER_BUFFER_MODE_SYNCED) + && media_clock_offset != -1 && jbuf->rfc7273_sync) { +- GstClockTime ntptime, rtptime_tmp; ++ GstClockTime ntptime; + GstClockTime ntprtptime, rtpsystime; + GstClockTime internal, external; + GstClockTime rate_num, rate_denom; ++ GstClockTime ntprtptime_period_start; ++ gboolean negative_ntprtptime_period_start; + + /* Don't do any of the dts related adjustments further down */ + dts = -1; + + /* Calculate the actual clock time on the sender side based on the +- * RFC7273 clock and convert it to our pipeline clock +- */ ++ * RFC7273 clock and convert it to our pipeline clock. */ + + gst_clock_get_calibration (media_clock, &internal, &external, &rate_num, + &rate_denom); + ++ /* Current NTP clock estimation */ + ntptime = gst_clock_get_internal_time (media_clock); + +- ntprtptime = gst_util_uint64_scale (ntptime, jbuf->clock_rate, GST_SECOND); ++ /* Current RTP time based on the estimated NTP clock and the corresponding ++ * RTP time period start */ ++ ntprtptime = ntprtptime_period_start = ++ gst_util_uint64_scale (ntptime, jbuf->clock_rate, GST_SECOND); + ntprtptime += media_clock_offset; + ntprtptime &= 0xffffffff; + +- rtptime_tmp = rtptime; ++ /* If we're in the first period then the start of the period might be ++ * before the clock epoch */ ++ if (ntprtptime_period_start >= ntprtptime) { ++ ntprtptime_period_start = ntprtptime_period_start - ntprtptime; ++ negative_ntprtptime_period_start = FALSE; ++ } else { ++ ntprtptime_period_start = ntprtptime - ntprtptime_period_start; ++ negative_ntprtptime_period_start = TRUE; ++ } ++ ++ GST_TRACE ("Current NTP time %" GST_TIME_FORMAT " (RTP: %" G_GUINT64_FORMAT ++ ")", GST_TIME_ARGS (ntptime), ntprtptime); ++ GST_TRACE ("Current NTP RTP time period start %c%" GST_TIME_FORMAT ++ " (RTP: %c%" G_GUINT64_FORMAT ")", ++ negative_ntprtptime_period_start ? '-' : '+', ++ GST_TIME_ARGS (gst_util_uint64_scale (ntprtptime_period_start, ++ GST_SECOND, jbuf->clock_rate)), ++ negative_ntprtptime_period_start ? '-' : '+', ntprtptime_period_start); ++ GST_TRACE ("Current NTP RTP time related to period start %" GST_TIME_FORMAT ++ " (RTP: %" G_GUINT64_FORMAT ")", ++ GST_TIME_ARGS (gst_util_uint64_scale (ntprtptime, GST_SECOND, ++ jbuf->clock_rate)), ntprtptime); ++ + /* Check for wraparounds, we assume that the diff between current RTP +- * timestamp and current media clock time can't be bigger than +- * 2**31 clock units */ +- if (ntprtptime > rtptime_tmp && ntprtptime - rtptime_tmp >= 0x80000000) +- rtptime_tmp += G_GUINT64_CONSTANT (0x100000000); +- else if (rtptime_tmp > ntprtptime && rtptime_tmp - ntprtptime >= 0x80000000) +- ntprtptime += G_GUINT64_CONSTANT (0x100000000); +- +- if (ntprtptime > rtptime_tmp) +- ntptime -= +- gst_util_uint64_scale (ntprtptime - rtptime_tmp, GST_SECOND, +- jbuf->clock_rate); +- else +- ntptime += +- gst_util_uint64_scale (rtptime_tmp - ntprtptime, GST_SECOND, +- jbuf->clock_rate); ++ * timestamp and current media clock time can't be bigger than 2**31 clock ++ * rate units. If it is bigger then get closer to it by moving one RTP ++ * timestamp period into the future or into the past. ++ * ++ * E.g. ++ * current NTP: 0x_______5 fffffffe ++ * packet RTP: 0x 00000001 ++ * => packet NTP: 0x_______6 00000001 ++ * ++ * current NTP: 0x_______5 00000001 ++ * packet RTP: 0x fffffffe ++ * => packet NTP: 0x_______4 fffffffe ++ * ++ */ ++ if (ntprtptime > rtptime && ntprtptime - rtptime >= 0x80000000) { ++ if (negative_ntprtptime_period_start) { ++ negative_ntprtptime_period_start = FALSE; ++ g_assert (ntprtptime_period_start <= 0x100000000); ++ ntprtptime_period_start = 0x100000000 - ntprtptime_period_start; ++ } else { ++ ntprtptime_period_start += 0x100000000; ++ } ++ } else if (rtptime > ntprtptime && rtptime - ntprtptime >= 0x80000000) { ++ if (negative_ntprtptime_period_start) { ++ ntprtptime_period_start += 0x100000000; ++ } else if (ntprtptime_period_start < 0x100000000) { ++ negative_ntprtptime_period_start = TRUE; ++ ntprtptime_period_start = 0x100000000 - ntprtptime_period_start; ++ } else { ++ ntprtptime_period_start -= 0x100000000; ++ } ++ } + ++ GST_TRACE ("Wraparound adjusted NTP RTP time period start %c%" ++ GST_TIME_FORMAT " (RTP: %c%" G_GUINT64_FORMAT ")", ++ negative_ntprtptime_period_start ? '-' : '+', ++ GST_TIME_ARGS (gst_util_uint64_scale (ntprtptime_period_start, ++ GST_SECOND, jbuf->clock_rate)), ++ negative_ntprtptime_period_start ? '-' : '+', ntprtptime_period_start); ++ ++ /* Packet timestamp according to the NTP clock in RTP time units. ++ * Note that this does not include any inaccuracy caused by the estimation ++ * of the NTP clock unless it is more than 2**31 RTP time units off. */ ++ if (negative_ntprtptime_period_start) { ++ if (rtptime >= ntprtptime_period_start) { ++ ntprtptime = rtptime - ntprtptime_period_start; ++ } else { ++ /* Packet is timestamped before the NTP clock epoch! */ ++ ntprtptime = 0; ++ } ++ } else { ++ ntprtptime = ntprtptime_period_start + rtptime; ++ } ++ ++ /* Packet timestamp in nanoseconds according to the NTP clock. */ ++ ntptime = gst_util_uint64_scale (ntprtptime, GST_SECOND, jbuf->clock_rate); ++ ++ GST_DEBUG ("RFC7273 packet NTP time %" GST_TIME_FORMAT " (RTP: %" ++ G_GUINT64_FORMAT ")", GST_TIME_ARGS (ntptime), ntprtptime); ++ ++ *p_ntp_time = ntptime; ++ ++ /* Packet timestamp converted to the pipeline clock. ++ * Note that this includes again inaccuracy caused by the estimation of ++ * the NTP vs. pipeline clock. */ + rtpsystime = + gst_clock_adjust_with_calibration (media_clock, ntptime, internal, + external, rate_num, rate_denom); ++ + /* All this assumes that the pipeline has enough additional + * latency to cover for the network delay */ + if (rtpsystime > base_time) +@@ -919,14 +998,8 @@ rtp_jitter_buffer_calculate_pts (RTPJitterBuffer * jbuf, GstClockTime dts, + else + pts = 0; + +- GST_DEBUG ("RFC7273 clock time %" GST_TIME_FORMAT ", ntptime %" +- GST_TIME_FORMAT ", ntprtptime %" G_GUINT64_FORMAT ", rtptime %" +- G_GUINT32_FORMAT ", base_time %" GST_TIME_FORMAT ", internal %" +- GST_TIME_FORMAT ", external %" GST_TIME_FORMAT ", out %" +- GST_TIME_FORMAT, GST_TIME_ARGS (rtpsystime), GST_TIME_ARGS (ntptime), +- ntprtptime, rtptime, GST_TIME_ARGS (base_time), +- GST_TIME_ARGS (internal), GST_TIME_ARGS (external), +- GST_TIME_ARGS (pts)); ++ GST_DEBUG ("Packet pipeline clock time %" GST_TIME_FORMAT ", PTS %" ++ GST_TIME_FORMAT, GST_TIME_ARGS (rtpsystime), GST_TIME_ARGS (pts)); + } else { + /* If we used the RFC7273 clock before and not anymore, + * we need to resync it later again */ +@@ -1265,7 +1338,8 @@ rtp_jitter_buffer_pop (RTPJitterBuffer * jbuf, gint * percent) + + /* let's clear the pointers so we can ensure we don't free items that are + * still in the jitterbuffer */ +- item->next = item->prev = NULL; ++ if (item) ++ item->next = item->prev = NULL; + + return (RTPJitterBufferItem *) item; + } +diff --git a/gst/rtpmanager/rtpjitterbuffer.h b/gst/rtpmanager/rtpjitterbuffer.h +index 8accee4b4..d0e60c275 100644 +--- a/gst/rtpmanager/rtpjitterbuffer.h ++++ b/gst/rtpmanager/rtpjitterbuffer.h +@@ -210,7 +210,7 @@ void rtp_jitter_buffer_get_sync (RTPJitterBuffer *jbuf, + + GstClockTime rtp_jitter_buffer_calculate_pts (RTPJitterBuffer * jbuf, GstClockTime dts, gboolean estimated_dts, + guint32 rtptime, GstClockTime base_time, gint gap, +- gboolean is_rtx); ++ gboolean is_rtx, GstClockTime * p_ntp_time); + + gboolean rtp_jitter_buffer_can_fast_start (RTPJitterBuffer * jbuf, gint num_packet); + +diff --git a/gst/rtpmanager/rtpsession.c b/gst/rtpmanager/rtpsession.c +index dcfb1d796..a73954e4b 100644 +--- a/gst/rtpmanager/rtpsession.c ++++ b/gst/rtpmanager/rtpsession.c +@@ -30,6 +30,7 @@ + #include + + #include "rtpsession.h" ++#include "gstrtputils.h" + + GST_DEBUG_CATEGORY (rtp_session_debug); + #define GST_CAT_DEFAULT rtp_session_debug +@@ -78,6 +79,9 @@ enum + #define DEFAULT_RTP_PROFILE GST_RTP_PROFILE_AVP + #define DEFAULT_RTCP_REDUCED_SIZE FALSE + #define DEFAULT_RTCP_DISABLE_SR_TIMESTAMP FALSE ++#define DEFAULT_FAVOR_NEW FALSE ++#define DEFAULT_TWCC_FEEDBACK_INTERVAL GST_CLOCK_TIME_NONE ++#define DEFAULT_UPDATE_NTP64_HEADER_EXT TRUE + + enum + { +@@ -103,9 +107,14 @@ enum + PROP_STATS, + PROP_RTP_PROFILE, + PROP_RTCP_REDUCED_SIZE, +- PROP_RTCP_DISABLE_SR_TIMESTAMP ++ PROP_RTCP_DISABLE_SR_TIMESTAMP, ++ PROP_TWCC_FEEDBACK_INTERVAL, ++ PROP_UPDATE_NTP64_HEADER_EXT, ++ PROP_LAST, + }; + ++static GParamSpec *properties[PROP_LAST]; ++ + /* update average packet size */ + #define INIT_AVG(avg, val) \ + (avg) = (val); +@@ -115,9 +124,6 @@ enum + else \ + (avg) = ((val) + (15 * (avg))) >> 4; + +- +-#define TWCC_EXTMAP_STR "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01" +- + /* GObject vmethods */ + static void rtp_session_finalize (GObject * object); + static void rtp_session_set_property (GObject * object, guint prop_id, +@@ -449,64 +455,62 @@ rtp_session_class_init (RTPSessionClass * klass) + G_TYPE_UINT, 4, G_TYPE_UINT, G_TYPE_UINT, G_TYPE_ARRAY, + GST_TYPE_BUFFER | G_SIGNAL_TYPE_STATIC_SCOPE); + +- g_object_class_install_property (gobject_class, PROP_INTERNAL_SSRC, ++ properties[PROP_INTERNAL_SSRC] = + g_param_spec_uint ("internal-ssrc", "Internal SSRC", +- "The internal SSRC used for the session (deprecated)", +- 0, G_MAXUINT, 0, +- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | +- GST_PARAM_DOC_SHOW_DEFAULT)); ++ "The internal SSRC used for the session (deprecated)", ++ 0, G_MAXUINT, 0, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_DOC_SHOW_DEFAULT); + +- g_object_class_install_property (gobject_class, PROP_INTERNAL_SOURCE, ++ properties[PROP_INTERNAL_SOURCE] = + g_param_spec_object ("internal-source", "Internal Source", +- "The internal source element of the session (deprecated)", +- RTP_TYPE_SOURCE, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); ++ "The internal source element of the session (deprecated)", ++ RTP_TYPE_SOURCE, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS); + +- g_object_class_install_property (gobject_class, PROP_BANDWIDTH, ++ properties[PROP_BANDWIDTH] = + g_param_spec_double ("bandwidth", "Bandwidth", +- "The bandwidth of the session in bits per second (0 for auto-discover)", +- 0.0, G_MAXDOUBLE, DEFAULT_BANDWIDTH, +- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ "The bandwidth of the session in bits per second (0 for auto-discover)", ++ 0.0, G_MAXDOUBLE, DEFAULT_BANDWIDTH, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS); + +- g_object_class_install_property (gobject_class, PROP_RTCP_FRACTION, ++ properties[PROP_RTCP_FRACTION] = + g_param_spec_double ("rtcp-fraction", "RTCP Fraction", +- "The fraction of the bandwidth used for RTCP in bits per second (or as a real fraction of the RTP bandwidth if < 1)", +- 0.0, G_MAXDOUBLE, DEFAULT_RTCP_FRACTION, +- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ "The fraction of the bandwidth used for RTCP in bits per second (or as a real fraction of the RTP bandwidth if < 1)", ++ 0.0, G_MAXDOUBLE, DEFAULT_RTCP_FRACTION, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS); + +- g_object_class_install_property (gobject_class, PROP_RTCP_RR_BANDWIDTH, ++ properties[PROP_RTCP_RR_BANDWIDTH] = + g_param_spec_int ("rtcp-rr-bandwidth", "RTCP RR bandwidth", +- "The RTCP bandwidth used for receivers in bits per second (-1 = default)", +- -1, G_MAXINT, DEFAULT_RTCP_RR_BANDWIDTH, +- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ "The RTCP bandwidth used for receivers in bits per second (-1 = default)", ++ -1, G_MAXINT, DEFAULT_RTCP_RR_BANDWIDTH, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS); + +- g_object_class_install_property (gobject_class, PROP_RTCP_RS_BANDWIDTH, ++ properties[PROP_RTCP_RS_BANDWIDTH] = + g_param_spec_int ("rtcp-rs-bandwidth", "RTCP RS bandwidth", +- "The RTCP bandwidth used for senders in bits per second (-1 = default)", +- -1, G_MAXINT, DEFAULT_RTCP_RS_BANDWIDTH, +- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ "The RTCP bandwidth used for senders in bits per second (-1 = default)", ++ -1, G_MAXINT, DEFAULT_RTCP_RS_BANDWIDTH, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS); + +- g_object_class_install_property (gobject_class, PROP_RTCP_MTU, ++ properties[PROP_RTCP_MTU] = + g_param_spec_uint ("rtcp-mtu", "RTCP MTU", +- "The maximum size of the RTCP packets", +- 16, G_MAXINT16, DEFAULT_RTCP_MTU, +- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ "The maximum size of the RTCP packets", ++ 16, G_MAXINT16, DEFAULT_RTCP_MTU, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS); + +- g_object_class_install_property (gobject_class, PROP_SDES, ++ properties[PROP_SDES] = + g_param_spec_boxed ("sdes", "SDES", +- "The SDES items of this session", +- GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS +- | GST_PARAM_DOC_SHOW_DEFAULT)); ++ "The SDES items of this session", ++ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS ++ | GST_PARAM_DOC_SHOW_DEFAULT); + +- g_object_class_install_property (gobject_class, PROP_NUM_SOURCES, ++ properties[PROP_NUM_SOURCES] = + g_param_spec_uint ("num-sources", "Num Sources", +- "The number of sources in the session", 0, G_MAXUINT, +- DEFAULT_NUM_SOURCES, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); ++ "The number of sources in the session", 0, G_MAXUINT, ++ DEFAULT_NUM_SOURCES, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS); + +- g_object_class_install_property (gobject_class, PROP_NUM_ACTIVE_SOURCES, ++ properties[PROP_NUM_ACTIVE_SOURCES] = + g_param_spec_uint ("num-active-sources", "Num Active Sources", +- "The number of active sources in the session", 0, G_MAXUINT, +- DEFAULT_NUM_ACTIVE_SOURCES, +- G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); ++ "The number of active sources in the session", 0, G_MAXUINT, ++ DEFAULT_NUM_ACTIVE_SOURCES, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS); + /** + * RTPSource:sources + * +@@ -532,56 +536,54 @@ rtp_session_class_init (RTPSessionClass * klass) + * } + * ``` + */ +- g_object_class_install_property (gobject_class, PROP_SOURCES, ++ properties[PROP_SOURCES] = + g_param_spec_boxed ("sources", "Sources", +- "An array of all known sources in the session", +- G_TYPE_VALUE_ARRAY, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); ++ "An array of all known sources in the session", ++ G_TYPE_VALUE_ARRAY, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS); + +- g_object_class_install_property (gobject_class, PROP_FAVOR_NEW, ++ properties[PROP_FAVOR_NEW] = + g_param_spec_boolean ("favor-new", "Favor new sources", +- "Resolve SSRC conflict in favor of new sources", FALSE, +- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ "Resolve SSRC conflict in favor of new sources", DEFAULT_FAVOR_NEW, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS); + +- g_object_class_install_property (gobject_class, PROP_RTCP_MIN_INTERVAL, ++ properties[PROP_RTCP_MIN_INTERVAL] = + g_param_spec_uint64 ("rtcp-min-interval", "Minimum RTCP interval", +- "Minimum interval between Regular RTCP packet (in ns)", +- 0, G_MAXUINT64, DEFAULT_RTCP_MIN_INTERVAL, +- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ "Minimum interval between Regular RTCP packet (in ns)", ++ 0, G_MAXUINT64, DEFAULT_RTCP_MIN_INTERVAL, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS); + +- g_object_class_install_property (gobject_class, +- PROP_RTCP_FEEDBACK_RETENTION_WINDOW, ++ properties[PROP_RTCP_FEEDBACK_RETENTION_WINDOW] = + g_param_spec_uint64 ("rtcp-feedback-retention-window", +- "RTCP Feedback retention window", +- "Duration during which RTCP Feedback packets are retained (in ns)", +- 0, G_MAXUINT64, DEFAULT_RTCP_FEEDBACK_RETENTION_WINDOW, +- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ "RTCP Feedback retention window", ++ "Duration during which RTCP Feedback packets are retained (in ns)", ++ 0, G_MAXUINT64, DEFAULT_RTCP_FEEDBACK_RETENTION_WINDOW, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS); + +- g_object_class_install_property (gobject_class, +- PROP_RTCP_IMMEDIATE_FEEDBACK_THRESHOLD, ++ properties[PROP_RTCP_IMMEDIATE_FEEDBACK_THRESHOLD] = + g_param_spec_uint ("rtcp-immediate-feedback-threshold", +- "RTCP Immediate Feedback threshold", +- "The maximum number of members of a RTP session for which immediate" +- " feedback is used (DEPRECATED: has no effect and is not needed)", +- 0, G_MAXUINT, DEFAULT_RTCP_IMMEDIATE_FEEDBACK_THRESHOLD, +- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED)); ++ "RTCP Immediate Feedback threshold", ++ "The maximum number of members of a RTP session for which immediate" ++ " feedback is used (DEPRECATED: has no effect and is not needed)", ++ 0, G_MAXUINT, DEFAULT_RTCP_IMMEDIATE_FEEDBACK_THRESHOLD, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED); + +- g_object_class_install_property (gobject_class, PROP_PROBATION, ++ properties[PROP_PROBATION] = + g_param_spec_uint ("probation", "Number of probations", +- "Consecutive packet sequence numbers to accept the source", +- 0, G_MAXUINT, DEFAULT_PROBATION, +- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ "Consecutive packet sequence numbers to accept the source", ++ 0, G_MAXUINT, DEFAULT_PROBATION, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS); + +- g_object_class_install_property (gobject_class, PROP_MAX_DROPOUT_TIME, ++ properties[PROP_MAX_DROPOUT_TIME] = + g_param_spec_uint ("max-dropout-time", "Max dropout time", +- "The maximum time (milliseconds) of missing packets tolerated.", +- 0, G_MAXUINT, DEFAULT_MAX_DROPOUT_TIME, +- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ "The maximum time (milliseconds) of missing packets tolerated.", ++ 0, G_MAXUINT, DEFAULT_MAX_DROPOUT_TIME, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS); + +- g_object_class_install_property (gobject_class, PROP_MAX_MISORDER_TIME, ++ properties[PROP_MAX_MISORDER_TIME] = + g_param_spec_uint ("max-misorder-time", "Max misorder time", +- "The maximum time (milliseconds) of misordered packets tolerated.", +- 0, G_MAXUINT, DEFAULT_MAX_MISORDER_TIME, +- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ "The maximum time (milliseconds) of misordered packets tolerated.", ++ 0, G_MAXUINT, DEFAULT_MAX_MISORDER_TIME, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS); + + /** + * RTPSession:stats: +@@ -598,21 +600,20 @@ rtp_session_class_init (RTPSessionClass * klass) + * + * Since: 1.4 + */ +- g_object_class_install_property (gobject_class, PROP_STATS, ++ properties[PROP_STATS] = + g_param_spec_boxed ("stats", "Statistics", +- "Various statistics", GST_TYPE_STRUCTURE, +- G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); ++ "Various statistics", GST_TYPE_STRUCTURE, ++ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS); + +- g_object_class_install_property (gobject_class, PROP_RTP_PROFILE, ++ properties[PROP_RTP_PROFILE] = + g_param_spec_enum ("rtp-profile", "RTP Profile", +- "RTP profile to use for this session", GST_TYPE_RTP_PROFILE, +- DEFAULT_RTP_PROFILE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ "RTP profile to use for this session", GST_TYPE_RTP_PROFILE, ++ DEFAULT_RTP_PROFILE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS); + +- g_object_class_install_property (gobject_class, PROP_RTCP_REDUCED_SIZE, ++ properties[PROP_RTCP_REDUCED_SIZE] = + g_param_spec_boolean ("rtcp-reduced-size", "RTCP Reduced Size", +- "Use Reduced Size RTCP for feedback packets", +- DEFAULT_RTCP_REDUCED_SIZE, +- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ "Use Reduced Size RTCP for feedback packets", ++ DEFAULT_RTCP_REDUCED_SIZE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS); + + /** + * RTPSession:disable-sr-timestamp: +@@ -621,13 +622,45 @@ rtp_session_class_init (RTPSessionClass * klass) + * + * Since: 1.16 + */ +- g_object_class_install_property (gobject_class, +- PROP_RTCP_DISABLE_SR_TIMESTAMP, ++ properties[PROP_RTCP_DISABLE_SR_TIMESTAMP] = + g_param_spec_boolean ("disable-sr-timestamp", +- "Disable Sender Report Timestamp", +- "Whether sender reports should be timestamped", +- DEFAULT_RTCP_DISABLE_SR_TIMESTAMP, +- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); ++ "Disable Sender Report Timestamp", ++ "Whether sender reports should be timestamped", ++ DEFAULT_RTCP_DISABLE_SR_TIMESTAMP, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS); ++ ++ /** ++ * RTPSession:twcc-feedback-interval: ++ * ++ * The interval to send TWCC reports on. ++ * This overrides the default behavior of sending reports ++ * based on marker-bits. ++ * ++ * Since: 1.20 ++ */ ++ properties[PROP_TWCC_FEEDBACK_INTERVAL] = ++ g_param_spec_uint64 ("twcc-feedback-interval", ++ "TWCC Feedback Interval", ++ "The interval to send TWCC reports on", ++ 0, G_MAXUINT64, DEFAULT_TWCC_FEEDBACK_INTERVAL, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS); ++ ++ /** ++ * RTPSession:update-ntp64-header-ext: ++ * ++ * Whether RTP NTP header extension should be updated with actual ++ * NTP time. If not, use the NTP time from buffer timestamp metadata ++ * ++ * Since: 1.22 ++ */ ++ properties[PROP_UPDATE_NTP64_HEADER_EXT] = ++ g_param_spec_boolean ("update-ntp64-header-ext", ++ "Update NTP-64 RTP Header Extension", ++ "Whether RTP NTP header extension should be updated with actual NTP time", ++ DEFAULT_UPDATE_NTP64_HEADER_EXT, ++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS); ++ ++ g_object_class_install_properties (gobject_class, PROP_LAST, properties); + + klass->get_source_by_ssrc = + GST_DEBUG_FUNCPTR (rtp_session_get_source_by_ssrc); +@@ -672,9 +705,12 @@ rtp_session_init (RTPSession * sess) + sess->header_len = UDP_IP_HEADER_OVERHEAD; + sess->mtu = DEFAULT_RTCP_MTU; + ++ sess->update_ntp64_header_ext = DEFAULT_UPDATE_NTP64_HEADER_EXT; ++ + sess->probation = DEFAULT_PROBATION; + sess->max_dropout_time = DEFAULT_MAX_DROPOUT_TIME; + sess->max_misorder_time = DEFAULT_MAX_MISORDER_TIME; ++ sess->favor_new = DEFAULT_FAVOR_NEW; + + /* some default SDES entries */ + sess->sdes = gst_structure_new_empty ("application/x-rtp-source-sdes"); +@@ -907,6 +943,13 @@ rtp_session_set_property (GObject * object, guint prop_id, + case PROP_RTCP_DISABLE_SR_TIMESTAMP: + sess->timestamp_sender_reports = !g_value_get_boolean (value); + break; ++ case PROP_TWCC_FEEDBACK_INTERVAL: ++ rtp_twcc_manager_set_feedback_interval (sess->twcc, ++ g_value_get_uint64 (value)); ++ break; ++ case PROP_UPDATE_NTP64_HEADER_EXT: ++ sess->update_ntp64_header_ext = g_value_get_boolean (value); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -989,6 +1032,13 @@ rtp_session_get_property (GObject * object, guint prop_id, + case PROP_RTCP_DISABLE_SR_TIMESTAMP: + g_value_set_boolean (value, !sess->timestamp_sender_reports); + break; ++ case PROP_TWCC_FEEDBACK_INTERVAL: ++ g_value_set_uint64 (value, ++ rtp_twcc_manager_get_feedback_interval (sess->twcc)); ++ break; ++ case PROP_UPDATE_NTP64_HEADER_EXT: ++ g_value_set_boolean (value, sess->update_ntp64_header_ext); ++ break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; +@@ -1139,6 +1189,7 @@ rtp_session_reset (RTPSession * sess) + { + g_return_if_fail (RTP_IS_SESSION (sess)); + ++ RTP_SESSION_LOCK (sess); + /* remove all sources */ + g_hash_table_remove_all (sess->ssrcs[sess->mask_idx]); + sess->total_sources = 0; +@@ -1167,6 +1218,7 @@ rtp_session_reset (RTPSession * sess) + g_list_free_full (sess->conflicting_addresses, + (GDestroyNotify) rtp_conflicting_address_free); + sess->conflicting_addresses = NULL; ++ RTP_SESSION_UNLOCK (sess); + } + + /** +@@ -1199,9 +1251,9 @@ rtp_session_set_callbacks (RTPSession * sess, RTPSessionCallbacks * callbacks, + sess->callbacks.sync_rtcp = callbacks->sync_rtcp; + sess->sync_rtcp_user_data = user_data; + } +- if (callbacks->clock_rate) { +- sess->callbacks.clock_rate = callbacks->clock_rate; +- sess->clock_rate_user_data = user_data; ++ if (callbacks->caps) { ++ sess->callbacks.caps = callbacks->caps; ++ sess->caps_user_data = user_data; + } + if (callbacks->reconsider) { + sess->callbacks.reconsider = callbacks->reconsider; +@@ -1306,7 +1358,7 @@ rtp_session_set_sync_rtcp_callback (RTPSession * sess, + } + + /** +- * rtp_session_set_clock_rate_callback: ++ * rtp_session_set_caps_callback: + * @sess: an #RTPSession + * @callback: callback to set + * @user_data: user data passed in the callback +@@ -1314,13 +1366,13 @@ rtp_session_set_sync_rtcp_callback (RTPSession * sess, + * Configure only the clock_rate callback to be notified of the clock_rate action. + */ + void +-rtp_session_set_clock_rate_callback (RTPSession * sess, +- RTPSessionClockRate callback, gpointer user_data) ++rtp_session_set_caps_callback (RTPSession * sess, ++ RTPSessionCaps callback, gpointer user_data) + { + g_return_if_fail (RTP_IS_SESSION (sess)); + +- sess->callbacks.clock_rate = callback; +- sess->clock_rate_user_data = user_data; ++ sess->callbacks.caps = callback; ++ sess->caps_user_data = user_data; + } + + /** +@@ -1524,30 +1576,26 @@ source_push_rtp (RTPSource * source, gpointer data, RTPSession * session) + return result; + } + +-static gint +-source_clock_rate (RTPSource * source, guint8 pt, RTPSession * session) ++static GstCaps * ++source_caps (RTPSource * source, guint8 pt, RTPSession * session) + { +- gint result; ++ GstCaps *result = NULL; + + RTP_SESSION_UNLOCK (session); + +- if (session->callbacks.clock_rate) +- result = +- session->callbacks.clock_rate (session, pt, +- session->clock_rate_user_data); +- else +- result = -1; ++ if (session->callbacks.caps) ++ result = session->callbacks.caps (session, pt, session->caps_user_data); + + RTP_SESSION_LOCK (session); + +- GST_DEBUG ("got clock-rate %d for pt %d", result, pt); ++ GST_DEBUG ("got caps %" GST_PTR_FORMAT " for pt %d", result, pt); + + return result; + } + + static RTPSourceCallbacks callbacks = { + (RTPSourcePushRTP) source_push_rtp, +- (RTPSourceClockRate) source_clock_rate, ++ (RTPSourceCaps) source_caps, + }; + + +@@ -1899,7 +1947,8 @@ obtain_internal_source (RTPSession * sess, guint32 ssrc, gboolean * created, + + source->validated = TRUE; + source->internal = TRUE; +- source->probation = FALSE; ++ source->probation = 0; ++ source->curr_probation = 0; + rtp_source_set_sdes_struct (source, gst_structure_copy (sess->sdes)); + rtp_source_set_callbacks (source, &callbacks, sess); + +@@ -2094,6 +2143,25 @@ update_packet (GstBuffer ** buffer, guint idx, RTPPacketInfo * pinfo) + pinfo->header_ext = gst_rtp_buffer_get_extension_bytes (&rtp, + &pinfo->header_ext_bit_pattern); + } ++ ++ if (pinfo->ntp64_ext_id != 0 && pinfo->send && !pinfo->have_ntp64_ext) { ++ guint8 *data; ++ guint size; ++ ++ /* Remember here that there is a 64-bit NTP header extension on this buffer ++ * or any of the other buffers in the buffer list. ++ * Later we update this after making the buffer(list) writable. ++ */ ++ if ((gst_rtp_buffer_get_extension_onebyte_header (&rtp, ++ pinfo->ntp64_ext_id, 0, (gpointer *) & data, &size) ++ && size == 8) ++ || (gst_rtp_buffer_get_extension_twobytes_header (&rtp, NULL, ++ pinfo->ntp64_ext_id, 0, (gpointer *) & data, &size) ++ && size == 8)) { ++ pinfo->have_ntp64_ext = TRUE; ++ } ++ } ++ + gst_rtp_buffer_unmap (&rtp); + } + +@@ -2142,15 +2210,19 @@ update_packet_info (RTPSession * sess, RTPPacketInfo * pinfo, + pinfo->payload_len = 0; + pinfo->packets = 0; + pinfo->marker = FALSE; ++ pinfo->ntp64_ext_id = send ? sess->send_ntp64_ext_id : 0; ++ pinfo->have_ntp64_ext = FALSE; + + if (is_list) { + GstBufferList *list = GST_BUFFER_LIST_CAST (data); + res = + gst_buffer_list_foreach (list, (GstBufferListFunc) update_packet, + pinfo); ++ pinfo->arrival_time = GST_CLOCK_TIME_NONE; + } else { + GstBuffer *buffer = GST_BUFFER_CAST (data); + res = update_packet (&buffer, 0, pinfo); ++ pinfo->arrival_time = GST_BUFFER_DTS (buffer); + } + + return res; +@@ -2169,22 +2241,6 @@ clean_packet_info (RTPPacketInfo * pinfo) + g_bytes_unref (pinfo->header_ext); + } + +-static gint32 +-packet_info_get_twcc_seqnum (RTPPacketInfo * pinfo, guint8 ext_id) +-{ +- gint32 val = -1; +- gpointer data; +- guint size; +- +- if (pinfo->header_ext && +- gst_rtp_buffer_get_extension_onebyte_header_from_bytes (pinfo->header_ext, +- pinfo->header_ext_bit_pattern, ext_id, 0, &data, &size)) { +- if (size == 2) +- val = GST_READ_UINT16_BE (data); +- } +- return val; +-} +- + static gboolean + source_update_active (RTPSession * sess, RTPSource * source, + gboolean prevactive) +@@ -2210,16 +2266,7 @@ source_update_active (RTPSession * sess, RTPSource * source, + static void + process_twcc_packet (RTPSession * sess, RTPPacketInfo * pinfo) + { +- gint32 twcc_seqnum; +- +- if (sess->twcc_recv_ext_id == 0) +- return; +- +- twcc_seqnum = packet_info_get_twcc_seqnum (pinfo, sess->twcc_recv_ext_id); +- if (twcc_seqnum == -1) +- return; +- +- if (rtp_twcc_manager_recv_packet (sess->twcc, twcc_seqnum, pinfo)) { ++ if (rtp_twcc_manager_recv_packet (sess->twcc, pinfo)) { + RTP_SESSION_UNLOCK (sess); + + /* TODO: find a better rational for this number, and possibly tune it based +@@ -2393,7 +2440,7 @@ rtp_session_process_rb (RTPSession * sess, RTPSource * source, + * the sender of the RTCP message. We could also compare our stats against + * the other sender to see if we are better or worse. */ + /* FIXME, need to keep track who the RB block is from */ +- rtp_source_process_rb (source, pinfo->ntpnstime, fractionlost, ++ rtp_source_process_rb (source, ssrc, pinfo->ntpnstime, fractionlost, + packetslost, exthighestseq, jitter, lsr, dlsr); + } + } +@@ -2722,11 +2769,16 @@ rtp_session_process_app (RTPSession * sess, GstRTCPPacket * packet, + + static gboolean + rtp_session_request_local_key_unit (RTPSession * sess, RTPSource * src, +- guint32 media_ssrc, gboolean fir, GstClockTime current_time) ++ const guint32 * ssrcs, guint num_ssrcs, gboolean fir, ++ GstClockTime current_time) + { + guint32 round_trip = 0; ++ gint i; ++ ++ g_return_val_if_fail (ssrcs != NULL && num_ssrcs > 0, FALSE); + +- rtp_source_get_last_rb (src, NULL, NULL, NULL, NULL, NULL, NULL, &round_trip); ++ rtp_source_get_last_rb (src, NULL, NULL, NULL, NULL, NULL, NULL, NULL, ++ &round_trip); + + if (src->last_keyframe_request != GST_CLOCK_TIME_NONE && round_trip) { + GstClockTime round_trip_in_ns = gst_util_uint64_scale (round_trip, +@@ -2749,14 +2801,17 @@ rtp_session_request_local_key_unit (RTPSession * sess, RTPSource * src, + + src->last_keyframe_request = current_time; + +- GST_LOG ("received %s request from %X about %X %p(%p)", fir ? "FIR" : "PLI", +- rtp_source_get_ssrc (src), media_ssrc, sess->callbacks.process_rtp, +- sess->callbacks.request_key_unit); ++ for (i = 0; i < num_ssrcs; ++i) { ++ GST_LOG ("received %s request from %X about %X %p(%p)", ++ fir ? "FIR" : "PLI", ++ rtp_source_get_ssrc (src), ssrcs[i], sess->callbacks.process_rtp, ++ sess->callbacks.request_key_unit); + +- RTP_SESSION_UNLOCK (sess); +- sess->callbacks.request_key_unit (sess, media_ssrc, fir, +- sess->request_key_unit_user_data); +- RTP_SESSION_LOCK (sess); ++ RTP_SESSION_UNLOCK (sess); ++ sess->callbacks.request_key_unit (sess, ssrcs[i], fir, ++ sess->request_key_unit_user_data); ++ RTP_SESSION_LOCK (sess); ++ } + + return TRUE; + } +@@ -2778,19 +2833,19 @@ rtp_session_process_pli (RTPSession * sess, guint32 sender_ssrc, + return; + } + +- rtp_session_request_local_key_unit (sess, src, media_ssrc, FALSE, ++ rtp_session_request_local_key_unit (sess, src, &media_ssrc, 1, FALSE, + current_time); + } + + static void + rtp_session_process_fir (RTPSession * sess, guint32 sender_ssrc, +- guint32 media_ssrc, guint8 * fci_data, guint fci_length, +- GstClockTime current_time) ++ guint8 * fci_data, guint fci_length, GstClockTime current_time) + { + RTPSource *src; + guint32 ssrc; + guint position = 0; +- gboolean our_request = FALSE; ++ guint32 ssrcs[32]; ++ guint num_ssrcs = 0; + + if (!sess->callbacks.request_key_unit) + return; +@@ -2828,15 +2883,14 @@ rtp_session_process_fir (RTPSession * sess, guint32 sender_ssrc, + if (own == NULL) + continue; + +- if (own->internal) { +- our_request = TRUE; +- break; ++ if (own->internal && num_ssrcs < 32) { ++ ssrcs[num_ssrcs++] = ssrc; + } + } +- if (!our_request) ++ if (num_ssrcs == 0) + return; + +- rtp_session_request_local_key_unit (sess, src, media_ssrc, TRUE, ++ rtp_session_request_local_key_unit (sess, src, ssrcs, num_ssrcs, TRUE, + current_time); + } + +@@ -2868,6 +2922,34 @@ rtp_session_process_nack (RTPSession * sess, guint32 sender_ssrc, + } + } + ++static void ++rtp_session_process_sr_req (RTPSession * sess, guint32 sender_ssrc, ++ guint32 media_ssrc) ++{ ++ RTPSource *src; ++ ++ /* Request a new SR in feedback profiles ASAP */ ++ if (sess->rtp_profile != GST_RTP_PROFILE_AVPF ++ && sess->rtp_profile != GST_RTP_PROFILE_SAVPF) ++ return; ++ ++ src = find_source (sess, sender_ssrc); ++ /* Our own RTCP packet */ ++ if (src && src->internal) ++ return; ++ ++ src = find_source (sess, media_ssrc); ++ /* Not an SSRC we're producing */ ++ if (!src || !src->internal) ++ return; ++ ++ GST_DEBUG_OBJECT (sess, "Handling RTCP-SR-REQ"); ++ /* FIXME: 5s max_delay hard-coded here as we have to give some ++ * high enough value */ ++ sess->sr_req_pending = TRUE; ++ rtp_session_send_rtcp (sess, 5 * GST_SECOND); ++} ++ + static void + rtp_session_process_twcc (RTPSession * sess, guint32 sender_ssrc, + guint32 media_ssrc, guint8 * fci_data, guint fci_length) +@@ -2973,8 +3055,8 @@ rtp_session_process_feedback (RTPSession * sess, GstRTCPPacket * packet, + case GST_RTCP_PSFB_TYPE_FIR: + if (src) + src->stats.recv_fir_count++; +- rtp_session_process_fir (sess, sender_ssrc, media_ssrc, fci_data, +- fci_length, current_time); ++ rtp_session_process_fir (sess, sender_ssrc, fci_data, fci_length, ++ current_time); + break; + default: + break; +@@ -2988,6 +3070,9 @@ rtp_session_process_feedback (RTPSession * sess, GstRTCPPacket * packet, + rtp_session_process_nack (sess, sender_ssrc, media_ssrc, + fci_data, fci_length, current_time); + break; ++ case GST_RTCP_RTPFB_TYPE_RTCP_SR_REQ: ++ rtp_session_process_sr_req (sess, sender_ssrc, media_ssrc); ++ break; + case GST_RTCP_RTPFB_TYPE_TWCC: + rtp_session_process_twcc (sess, sender_ssrc, media_ssrc, + fci_data, fci_length); +@@ -3021,7 +3106,7 @@ rtp_session_process_rtcp (RTPSession * sess, GstBuffer * buffer, + GstClockTime current_time, GstClockTime running_time, guint64 ntpnstime) + { + GstRTCPPacket packet; +- gboolean more, is_bye = FALSE, do_sync = FALSE; ++ gboolean more, is_bye = FALSE, do_sync = FALSE, has_report = FALSE; + RTPPacketInfo pinfo = { 0, }; + GstFlowReturn result = GST_FLOW_OK; + GstRTCPBuffer rtcp = { NULL, }; +@@ -3052,9 +3137,11 @@ rtp_session_process_rtcp (RTPSession * sess, GstBuffer * buffer, + + switch (type) { + case GST_RTCP_TYPE_SR: ++ has_report = TRUE; + rtp_session_process_sr (sess, &packet, &pinfo, &do_sync); + break; + case GST_RTCP_TYPE_RR: ++ has_report = TRUE; + rtp_session_process_rr (sess, &packet, &pinfo); + break; + case GST_RTCP_TYPE_SDES: +@@ -3102,6 +3189,10 @@ rtp_session_process_rtcp (RTPSession * sess, GstBuffer * buffer, + sess->stats.avg_rtcp_packet_size, pinfo.bytes); + RTP_SESSION_UNLOCK (sess); + ++ if (has_report) { ++ g_object_notify_by_pspec (G_OBJECT (sess), properties[PROP_STATS]); ++ } ++ + pinfo.data = NULL; + clean_packet_info (&pinfo); + +@@ -3123,29 +3214,6 @@ invalid_packet: + } + } + +-static guint8 +-_get_extmap_id_for_attribute (const GstStructure * s, const gchar * ext_name) +-{ +- guint i; +- guint8 extmap_id = 0; +- guint n_fields = gst_structure_n_fields (s); +- +- for (i = 0; i < n_fields; i++) { +- const gchar *field_name = gst_structure_nth_field_name (s, i); +- if (g_str_has_prefix (field_name, "extmap-")) { +- const gchar *str = gst_structure_get_string (s, field_name); +- if (str && g_strcmp0 (str, ext_name) == 0) { +- gint64 id = g_ascii_strtoll (field_name + 7, NULL, 10); +- if (id > 0 && id < 15) { +- extmap_id = id; +- break; +- } +- } +- } +- } +- return extmap_id; +-} +- + /** + * rtp_session_update_send_caps: + * @sess: an #RTPSession +@@ -3176,7 +3244,7 @@ rtp_session_update_send_caps (RTPSession * sess, GstCaps * caps) + sess->internal_ssrc_set = TRUE; + sess->internal_ssrc_from_caps_or_property = TRUE; + if (source) { +- rtp_source_update_caps (source, caps); ++ rtp_source_update_send_caps (source, caps); + + if (created) + on_new_sender_ssrc (sess, source); +@@ -3188,7 +3256,7 @@ rtp_session_update_send_caps (RTPSession * sess, GstCaps * caps) + source = + obtain_internal_source (sess, ssrc, &created, GST_CLOCK_TIME_NONE); + if (source) { +- rtp_source_update_caps (source, caps); ++ rtp_source_update_send_caps (source, caps); + + if (created) + on_new_sender_ssrc (sess, source); +@@ -3201,28 +3269,150 @@ rtp_session_update_send_caps (RTPSession * sess, GstCaps * caps) + sess->internal_ssrc_from_caps_or_property = FALSE; + } + +- sess->twcc_send_ext_id = _get_extmap_id_for_attribute (s, TWCC_EXTMAP_STR); +- if (sess->twcc_send_ext_id > 0) { +- GST_INFO ("TWCC enabled for send using extension id: %u", +- sess->twcc_send_ext_id); +- } ++ sess->send_ntp64_ext_id = ++ gst_rtp_get_extmap_id_for_attribute (s, ++ GST_RTP_HDREXT_BASE GST_RTP_HDREXT_NTP_64); ++ ++ rtp_twcc_manager_parse_send_ext_id (sess->twcc, s); + } + + static void +-send_twcc_packet (RTPSession * sess, RTPPacketInfo * pinfo) ++update_ntp64_header_ext_data (RTPPacketInfo * pinfo, GstBuffer * buffer) + { +- gint32 twcc_seqnum; ++ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; + +- if (sess->twcc_send_ext_id == 0) +- return; ++ if (gst_rtp_buffer_map (buffer, GST_MAP_READWRITE, &rtp)) { ++ guint16 bits; ++ guint8 *data; ++ guint wordlen; + +- twcc_seqnum = packet_info_get_twcc_seqnum (pinfo, sess->twcc_send_ext_id); +- if (twcc_seqnum == -1) +- return; ++ if (gst_rtp_buffer_get_extension_data (&rtp, &bits, (gpointer *) & data, ++ &wordlen)) { ++ gsize len = wordlen * 4; ++ ++ /* One-byte header */ ++ if (bits == 0xBEDE) { ++ /* One-byte header extension */ ++ while (TRUE) { ++ guint8 ext_id, ext_len; ++ ++ if (len < 1) ++ break; ++ ++ ext_id = GST_READ_UINT8 (data) >> 4; ++ ext_len = (GST_READ_UINT8 (data) & 0xF) + 1; ++ data += 1; ++ len -= 1; ++ if (ext_id == 0) { ++ /* Skip padding */ ++ continue; ++ } else if (ext_id == 15) { ++ /* Stop parsing */ ++ break; ++ } ++ ++ /* extension doesn't fit into the header */ ++ if (ext_len > len) ++ break; ++ ++ if (ext_id == pinfo->ntp64_ext_id && ext_len == 8) { ++ if (pinfo->ntpnstime != GST_CLOCK_TIME_NONE) { ++ guint64 ntptime = gst_util_uint64_scale (pinfo->ntpnstime, ++ G_GUINT64_CONSTANT (1) << 32, ++ GST_SECOND); ++ ++ GST_WRITE_UINT64_BE (data, ntptime); ++ } else { ++ /* Replace extension with padding */ ++ memset (data - 1, 0, 1 + ext_len); ++ } ++ } + +- rtp_twcc_manager_send_packet (sess->twcc, twcc_seqnum, pinfo); ++ /* skip to the next extension */ ++ data += ext_len; ++ len -= ext_len; ++ } ++ } else if ((bits >> 4) == 0x100) { ++ /* Two-byte header extension */ ++ ++ while (TRUE) { ++ guint8 ext_id, ext_len; ++ ++ if (len < 1) ++ break; ++ ++ ext_id = GST_READ_UINT8 (data); ++ data += 1; ++ len -= 1; ++ if (ext_id == 0) { ++ /* Skip padding */ ++ continue; ++ } ++ ++ ext_len = GST_READ_UINT8 (data); ++ data += 1; ++ len -= 1; ++ ++ /* extension doesn't fit into the header */ ++ if (ext_len > len) ++ break; ++ ++ if (ext_id == pinfo->ntp64_ext_id && ext_len == 8) { ++ if (pinfo->ntpnstime != GST_CLOCK_TIME_NONE) { ++ guint64 ntptime = gst_util_uint64_scale (pinfo->ntpnstime, ++ G_GUINT64_CONSTANT (1) << 32, ++ GST_SECOND); ++ ++ GST_WRITE_UINT64_BE (data, ntptime); ++ } else { ++ /* Replace extension with padding */ ++ memset (data - 2, 0, 2 + ext_len); ++ } ++ } ++ ++ /* skip to the next extension */ ++ data += ext_len; ++ len -= ext_len; ++ } ++ } ++ } ++ gst_rtp_buffer_unmap (&rtp); ++ } + } + ++static void ++update_ntp64_header_ext (RTPPacketInfo * pinfo) ++{ ++ /* Early return if we don't know the header extension id or the packets ++ * don't contain the header extension */ ++ if (pinfo->ntp64_ext_id == 0 || !pinfo->have_ntp64_ext) ++ return; ++ ++ /* If no NTP time is known then the header extension will be replaced with ++ * padding, otherwise it will be updated */ ++ GST_TRACE ++ ("Updating NTP-64 header extension for SSRC %08x packet with RTP time %u and running time %" ++ GST_TIME_FORMAT " to %" GST_TIME_FORMAT, pinfo->ssrc, pinfo->rtptime, ++ GST_TIME_ARGS (pinfo->running_time), GST_TIME_ARGS (pinfo->ntpnstime)); ++ ++ if (GST_IS_BUFFER_LIST (pinfo->data)) { ++ GstBufferList *list; ++ guint i = 0; ++ ++ pinfo->data = gst_buffer_list_make_writable (pinfo->data); ++ ++ list = GST_BUFFER_LIST (pinfo->data); ++ ++ for (i = 0; i < gst_buffer_list_length (list); i++) { ++ GstBuffer *buffer = gst_buffer_list_get_writable (list, i); ++ ++ update_ntp64_header_ext_data (pinfo, buffer); ++ } ++ } else { ++ pinfo->data = gst_buffer_make_writable (pinfo->data); ++ update_ntp64_header_ext_data (pinfo, pinfo->data); ++ } ++} + + /** + * rtp_session_send_rtp: +@@ -3239,7 +3429,7 @@ send_twcc_packet (RTPSession * sess, RTPPacketInfo * pinfo) + */ + GstFlowReturn + rtp_session_send_rtp (RTPSession * sess, gpointer data, gboolean is_list, +- GstClockTime current_time, GstClockTime running_time) ++ GstClockTime current_time, GstClockTime running_time, guint64 ntpnstime) + { + GstFlowReturn result; + RTPSource *source; +@@ -3255,10 +3445,14 @@ rtp_session_send_rtp (RTPSession * sess, gpointer data, gboolean is_list, + + RTP_SESSION_LOCK (sess); + if (!update_packet_info (sess, &pinfo, TRUE, TRUE, is_list, data, +- current_time, running_time, -1)) ++ current_time, running_time, ntpnstime)) + goto invalid_packet; + +- send_twcc_packet (sess, &pinfo); ++ /* Update any 64-bit NTP header extensions with the actual NTP time here */ ++ if (sess->update_ntp64_header_ext) ++ update_ntp64_header_ext (&pinfo); ++ ++ rtp_twcc_manager_send_packet (sess->twcc, &pinfo); + + source = obtain_internal_source (sess, pinfo.ssrc, &created, current_time); + if (created) +@@ -3281,13 +3475,15 @@ rtp_session_send_rtp (RTPSession * sess, gpointer data, gboolean is_list, + GST_DEBUG ("Collision for SSRC %x, change our sender ssrc", pinfo.ssrc); + + rtp_session_have_conflict (sess, source, from, current_time); +- +- goto collision; + } + } else { + GST_LOG ("Ignoring collision on sent SSRC %x because remote source" + " doesn't have an address", pinfo.ssrc); + } ++ ++ /* the the sending source is not internal, we have to drop the packet, ++ or else we will end up receving it ourselves! */ ++ goto collision; + } + + prevsender = RTP_SOURCE_IS_SENDER (source); +@@ -3611,14 +3807,14 @@ session_start_rtcp (RTPSession * sess, ReportData * data) + + gst_rtcp_buffer_map (data->rtcp, GST_MAP_READWRITE, rtcp); + +- if (data->is_early && sess->reduced_size_rtcp) +- return; +- +- if (RTP_SOURCE_IS_SENDER (own)) { ++ if (RTP_SOURCE_IS_SENDER (own) && (!data->is_early || !sess->reduced_size_rtcp ++ || sess->sr_req_pending)) { + guint64 ntptime; + guint32 rtptime; + guint32 packet_count, octet_count; + ++ sess->sr_req_pending = FALSE; ++ + /* we are a sender, create SR */ + GST_DEBUG ("create SR for SSRC %08x", own->ssrc); + gst_rtcp_buffer_add_packet (rtcp, GST_RTCP_TYPE_SR, packet); +@@ -3635,7 +3831,7 @@ session_start_rtcp (RTPSession * sess, ReportData * data) + sess->timestamp_sender_reports ? ntptime : 0, + sess->timestamp_sender_reports ? rtptime : 0, + packet_count, octet_count); +- } else { ++ } else if (!data->is_early || !sess->reduced_size_rtcp) { + /* we are only receiver, create RR */ + GST_DEBUG ("create RR for SSRC %08x", own->ssrc); + gst_rtcp_buffer_add_packet (rtcp, GST_RTCP_TYPE_RR, packet); +@@ -3694,6 +3890,7 @@ session_report_blocks (const gchar * key, RTPSource * source, ReportData * data) + + /* store last generated RR packet */ + source->last_rr.is_valid = TRUE; ++ source->last_rr.ssrc = data->source->ssrc; + source->last_rr.fractionlost = fractionlost; + source->last_rr.packetslost = packetslost; + source->last_rr.exthighestseq = exthighestseq; +@@ -4002,7 +4199,8 @@ session_cleanup (const gchar * key, RTPSource * source, ReportData * data) + /* this is an internal source that is not using our suggested ssrc. + * since there must be another source using this ssrc, we can remove + * this one instead of making it a receiver forever */ +- if (source->ssrc != sess->suggested_ssrc) { ++ if (source->ssrc != sess->suggested_ssrc ++ && source->media_ssrc != sess->suggested_ssrc) { + rtp_source_mark_bye (source, "timed out"); + /* do not schedule bye here, since we are inside the RTCP timeout + * processing and scheduling bye will interfere with SR/RR sending */ +@@ -4283,6 +4481,8 @@ generate_twcc (const gchar * key, RTPSource * source, ReportData * data) + return; + } + ++ GST_DEBUG ("generating TWCC feedback for source %08x", source->ssrc); ++ + while ((buf = rtp_twcc_manager_get_feedback (sess->twcc, source->ssrc))) { + ReportOutput *output = g_slice_new (ReportOutput); + output->source = g_object_ref (source); +@@ -4300,6 +4500,7 @@ generate_rtcp (const gchar * key, RTPSource * source, ReportData * data) + RTPSession *sess = data->sess; + gboolean is_bye = FALSE; + ReportOutput *output; ++ gboolean sr_req_pending = sess->sr_req_pending; + + /* only generate RTCP for active internal sources */ + if (!source->internal || source->sent_bye) +@@ -4330,7 +4531,8 @@ generate_rtcp (const gchar * key, RTPSource * source, ReportData * data) + g_hash_table_foreach (sess->ssrcs[sess->mask_idx], + (GHFunc) session_report_blocks, data); + } +- if (!data->has_sdes && (!data->is_early || !sess->reduced_size_rtcp)) ++ if (!data->has_sdes && (!data->is_early || !sess->reduced_size_rtcp ++ || sr_req_pending)) + session_sdes (sess, data); + + if (data->have_fir) +@@ -4510,20 +4712,27 @@ rtp_session_on_timeout (RTPSession * sess, GstClockTime current_time, + /* check if all the buffers are empty after generation */ + all_empty = TRUE; + ++ /* Make a local copy of the hashtable. We need to do this because the ++ * generate_rtcp stage below releases the session lock. */ ++ table_copy = g_hash_table_new_full (NULL, NULL, NULL, ++ (GDestroyNotify) g_object_unref); ++ g_hash_table_foreach (sess->ssrcs[sess->mask_idx], ++ (GHFunc) clone_ssrcs_hashtable, table_copy); ++ + GST_DEBUG + ("doing RTCP generation %u for %u sources, early %d, may suppress %d", + sess->generation, data.num_to_report, data.is_early, data.may_suppress); + +- /* generate RTCP for all internal sources */ +- g_hash_table_foreach (sess->ssrcs[sess->mask_idx], +- (GHFunc) generate_rtcp, &data); ++ /* generate RTCP for all internal sources, this might release the ++ * session lock. */ ++ g_hash_table_foreach (table_copy, (GHFunc) generate_rtcp, &data); + +- g_hash_table_foreach (sess->ssrcs[sess->mask_idx], +- (GHFunc) generate_twcc, &data); ++ g_hash_table_foreach (table_copy, (GHFunc) generate_twcc, &data); + + /* update the generation for all the sources that have been reported */ +- g_hash_table_foreach (sess->ssrcs[sess->mask_idx], +- (GHFunc) update_generation, &data); ++ g_hash_table_foreach (table_copy, (GHFunc) update_generation, &data); ++ ++ g_hash_table_destroy (table_copy); + + /* we keep track of the last report time in order to timeout inactive + * receivers or senders */ +@@ -4549,7 +4758,7 @@ done: + RTP_SESSION_UNLOCK (sess); + + /* notify about updated statistics */ +- g_object_notify (G_OBJECT (sess), "stats"); ++ g_object_notify_by_pspec (G_OBJECT (sess), properties[PROP_STATS]); + + /* push out the RTCP packets */ + while ((output = g_queue_pop_head (&data.output))) { +@@ -4916,10 +5125,5 @@ void + rtp_session_update_recv_caps_structure (RTPSession * sess, + const GstStructure * s) + { +- guint8 ext_id = _get_extmap_id_for_attribute (s, TWCC_EXTMAP_STR); +- if (ext_id > 0) { +- sess->twcc_recv_ext_id = ext_id; +- GST_INFO ("TWCC enabled for recv using extension id: %u", +- sess->twcc_recv_ext_id); +- } ++ rtp_twcc_manager_parse_recv_ext_id (sess->twcc, s); + } +diff --git a/gst/rtpmanager/rtpsession.h b/gst/rtpmanager/rtpsession.h +index 949fcc49b..84b2948dc 100644 +--- a/gst/rtpmanager/rtpsession.h ++++ b/gst/rtpmanager/rtpsession.h +@@ -97,16 +97,16 @@ typedef GstFlowReturn (*RTPSessionSendRTCP) (RTPSession *sess, RTPSource *src, G + typedef GstFlowReturn (*RTPSessionSyncRTCP) (RTPSession *sess, GstBuffer *buffer, gpointer user_data); + + /** +- * RTPSessionClockRate: ++ * RTPSessionCaps: + * @sess: an #RTPSession + * @payload: the payload + * @user_data: user data specified when registering + * +- * This callback will be called when @sess needs the clock-rate of @payload. ++ * This callback will be called when @sess needs the caps of @payload. + * +- * Returns: the clock-rate of @pt. ++ * Returns: the caps of @pt. + */ +-typedef gint (*RTPSessionClockRate) (RTPSession *sess, guint8 payload, gpointer user_data); ++typedef GstCaps * (*RTPSessionCaps) (RTPSession *sess, guint8 payload, gpointer user_data); + + /** + * RTPSessionReconsider: +@@ -209,7 +209,7 @@ typedef struct { + RTPSessionSendRTP send_rtp; + RTPSessionSyncRTCP sync_rtcp; + RTPSessionSendRTCP send_rtcp; +- RTPSessionClockRate clock_rate; ++ RTPSessionCaps caps; + RTPSessionReconsider reconsider; + RTPSessionRequestKeyUnit request_key_unit; + RTPSessionRequestTime request_time; +@@ -280,6 +280,7 @@ struct _RTPSession { + + GstClockTime next_early_rtcp_time; + ++ gboolean sr_req_pending; + gboolean scheduled_bye; + + RTPSessionCallbacks callbacks; +@@ -287,7 +288,7 @@ struct _RTPSession { + gpointer send_rtp_user_data; + gpointer send_rtcp_user_data; + gpointer sync_rtcp_user_data; +- gpointer clock_rate_user_data; ++ gpointer caps_user_data; + gpointer reconsider_user_data; + gpointer request_key_unit_user_data; + gpointer request_time_user_data; +@@ -309,11 +310,14 @@ struct _RTPSession { + + gboolean timestamp_sender_reports; + ++ /* RFC6051 64-bit NTP header extension */ ++ guint8 send_ntp64_ext_id; ++ ++ gboolean update_ntp64_header_ext; ++ + /* Transport-wide cc-extension */ + RTPTWCCManager *twcc; + RTPTWCCStats *twcc_stats; +- guint8 twcc_recv_ext_id; +- guint8 twcc_send_ext_id; + }; + + /** +@@ -373,8 +377,8 @@ void rtp_session_set_send_rtcp_callback (RTPSession * sess, + void rtp_session_set_sync_rtcp_callback (RTPSession * sess, + RTPSessionSyncRTCP callback, + gpointer user_data); +-void rtp_session_set_clock_rate_callback (RTPSession * sess, +- RTPSessionClockRate callback, ++void rtp_session_set_caps_callback (RTPSession * sess, ++ RTPSessionCaps callback, + gpointer user_data); + void rtp_session_set_reconsider_callback (RTPSession * sess, + RTPSessionReconsider callback, +@@ -412,7 +416,8 @@ GstFlowReturn rtp_session_process_rtcp (RTPSession *sess, GstBuffer + /* processing packets for sending */ + void rtp_session_update_send_caps (RTPSession *sess, GstCaps *caps); + GstFlowReturn rtp_session_send_rtp (RTPSession *sess, gpointer data, gboolean is_list, +- GstClockTime current_time, GstClockTime running_time); ++ GstClockTime current_time, GstClockTime running_time, ++ guint64 ntpnstime); + + /* scheduling bye */ + void rtp_session_mark_all_bye (RTPSession *sess, const gchar *reason); +diff --git a/gst/rtpmanager/rtpsource.c b/gst/rtpmanager/rtpsource.c +index 12aa12549..c221c1f22 100644 +--- a/gst/rtpmanager/rtpsource.c ++++ b/gst/rtpmanager/rtpsource.c +@@ -373,6 +373,7 @@ rtp_source_create_stats (RTPSource * src) + gboolean internal = src->internal; + gchar *address_str; + gboolean have_rb; ++ guint32 ssrc = 0; + guint8 fractionlost = 0; + gint32 packetslost = 0; + guint32 exthighestseq = 0; +@@ -453,11 +454,12 @@ rtp_source_create_stats (RTPSource * src) + (guint) src->last_rr.dlsr, NULL); + + /* get the last RB */ +- have_rb = rtp_source_get_last_rb (src, &fractionlost, &packetslost, +- &exthighestseq, &jitter, &lsr, &dlsr, &round_trip); ++ have_rb = rtp_source_get_last_rb (src, &ssrc, &fractionlost, ++ &packetslost, &exthighestseq, &jitter, &lsr, &dlsr, &round_trip); + + gst_structure_set (s, + "have-rb", G_TYPE_BOOLEAN, have_rb, ++ "rb-ssrc", G_TYPE_UINT, ssrc, + "rb-fractionlost", G_TYPE_UINT, (guint) fractionlost, + "rb-packetslost", G_TYPE_INT, (gint) packetslost, + "rb-exthighestseq", G_TYPE_UINT, (guint) exthighestseq, +@@ -647,7 +649,7 @@ rtp_source_set_callbacks (RTPSource * src, RTPSourceCallbacks * cb, + g_return_if_fail (RTP_IS_SOURCE (src)); + + src->callbacks.push_rtp = cb->push_rtp; +- src->callbacks.clock_rate = cb->clock_rate; ++ src->callbacks.caps = cb->caps; + src->user_data = user_data; + } + +@@ -818,11 +820,12 @@ rtp_source_get_bye_reason (RTPSource * src) + * Parse @caps and store all relevant information in @source. + */ + void +-rtp_source_update_caps (RTPSource * src, GstCaps * caps) ++rtp_source_update_send_caps (RTPSource * src, GstCaps * caps) + { + GstStructure *s; + guint val; + gint ival; ++ guint ssrc, rtx_ssrc = -1; + gboolean rtx; + + /* nothing changed, return */ +@@ -831,7 +834,17 @@ rtp_source_update_caps (RTPSource * src, GstCaps * caps) + + s = gst_caps_get_structure (caps, 0); + +- rtx = (gst_structure_get_uint (s, "rtx-ssrc", &val) && val == src->ssrc); ++ if (!gst_structure_get_uint (s, "ssrc", &ssrc)) ++ return; ++ gst_structure_get_uint (s, "rtx-ssrc", &rtx_ssrc); ++ ++ if (src->ssrc != ssrc && src->ssrc != rtx_ssrc) { ++ GST_WARNING ("got ssrc %u/%u that doesn't match with this source's ssrc %u", ++ ssrc, rtx_ssrc, src->ssrc); ++ return; ++ } ++ ++ rtx = (rtx_ssrc == src->ssrc); + + if (gst_structure_get_int (s, rtx ? "rtx-payload" : "payload", &ival)) + src->payload = ival; +@@ -857,6 +870,12 @@ rtp_source_update_caps (RTPSource * src, GstCaps * caps) + src->seqnum_offset); + + gst_caps_replace (&src->caps, caps); ++ ++ if (rtx) { ++ src->media_ssrc = ssrc; ++ } else { ++ src->media_ssrc = -1; ++ } + } + + /** +@@ -921,7 +940,7 @@ push_packet (RTPSource * src, GstBuffer * buffer) + } + + static void +-fetch_clock_rate_from_payload (RTPSource * src, guint8 payload) ++fetch_caps_for_payload (RTPSource * src, guint8 payload) + { + if (src->payload == -1) { + /* first payload received, nothing was in the caps, lock on to this payload */ +@@ -935,16 +954,40 @@ fetch_clock_rate_from_payload (RTPSource * src, guint8 payload) + src->stats.transit = -1; + } + +- if (src->clock_rate == -1) { +- gint clock_rate = -1; ++ if (src->clock_rate == -1 || !src->caps) { ++ GstCaps *caps = NULL; ++ ++ if (src->callbacks.caps) { ++ caps = src->callbacks.caps (src, payload, src->user_data); ++ } + +- if (src->callbacks.clock_rate) +- clock_rate = src->callbacks.clock_rate (src, payload, src->user_data); ++ GST_DEBUG ("got caps %" GST_PTR_FORMAT, caps); + +- GST_DEBUG ("got clock-rate %d", clock_rate); ++ if (caps) { ++ const GstStructure *s; ++ gint clock_rate = -1; ++ const gchar *encoding_name; + +- src->clock_rate = clock_rate; +- gst_rtp_packet_rate_ctx_reset (&src->packet_rate_ctx, clock_rate); ++ s = gst_caps_get_structure (caps, 0); ++ ++ if (gst_structure_get_int (s, "clock-rate", &clock_rate)) { ++ src->clock_rate = clock_rate; ++ gst_rtp_packet_rate_ctx_reset (&src->packet_rate_ctx, clock_rate); ++ } else { ++ GST_DEBUG ("No clock-rate in caps!"); ++ } ++ ++ encoding_name = gst_structure_get_string (s, "encoding-name"); ++ /* Disable probation for RTX sources as packets will arrive very ++ * irregularly and waiting for a second packet usually exceeds the ++ * deadline of the retransmission */ ++ if (g_strcmp0 (encoding_name, "rtx") == 0) { ++ src->probation = src->curr_probation = 0; ++ } ++ } ++ ++ gst_caps_replace (&src->caps, caps); ++ gst_clear_caps (&caps); + } + } + +@@ -1208,6 +1251,28 @@ update_receiver_stats (RTPSource * src, RTPPacketInfo * pinfo, + GST_INFO ("duplicate or reordered packet (seqnr %u, expected %u)", + seqnr, expected); + } ++ } else { ++ /* Sender stats - update the outbound sequence number */ ++ expected = src->stats.max_seq + 1; ++ delta = gst_rtp_buffer_compare_seqnum (expected, seqnr); ++ /* No probation for local senders, just check for lost / dropouts */ ++ if (delta >= 0 && delta < max_dropout) { ++ stats->bad_seq = RTP_SEQ_MOD + 1; /* so seq == bad_seq is false */ ++ /* in order, with permissible gap */ ++ if (seqnr < stats->max_seq) { ++ /* sequence number wrapped - count another 64K cycle. */ ++ stats->cycles += RTP_SEQ_MOD; ++ } ++ stats->max_seq = seqnr; ++ } else if (delta < -max_misorder || delta >= max_dropout) { ++ /* the sequence number made a very large jump */ ++ if (seqnr != stats->bad_seq) { ++ /* unacceptable jump */ ++ stats->bad_seq = (seqnr + 1) & (RTP_SEQ_MOD - 1); ++ } ++ } else { /* delta < 0 && delta >= -max_misorder */ ++ stats->bad_seq = RTP_SEQ_MOD + 1; /* so seq == bad_seq is false */ ++ } + } + + src->stats.octets_received += pinfo->payload_len; +@@ -1261,7 +1326,7 @@ rtp_source_process_rtp (RTPSource * src, RTPPacketInfo * pinfo) + g_return_val_if_fail (RTP_IS_SOURCE (src), GST_FLOW_ERROR); + g_return_val_if_fail (pinfo != NULL, GST_FLOW_ERROR); + +- fetch_clock_rate_from_payload (src, pinfo->pt); ++ fetch_caps_for_payload (src, pinfo->pt); + + if (!update_receiver_stats (src, pinfo, TRUE)) + return GST_FLOW_OK; +@@ -1445,6 +1510,7 @@ rtp_source_process_sr (RTPSource * src, GstClockTime time, guint64 ntptime, + /** + * rtp_source_process_rb: + * @src: an #RTPSource ++ * @ssrc: SSRC of the local source for this this RB was sent + * @ntpnstime: the current time in nanoseconds since 1970 + * @fractionlost: fraction lost since last SR/RR + * @packetslost: the cumulative number of packets lost +@@ -1458,7 +1524,7 @@ rtp_source_process_sr (RTPSource * src, GstClockTime time, guint64 ntptime, + * Update the report block in @src. + */ + void +-rtp_source_process_rb (RTPSource * src, guint64 ntpnstime, ++rtp_source_process_rb (RTPSource * src, guint32 ssrc, guint64 ntpnstime, + guint8 fractionlost, gint32 packetslost, guint32 exthighestseq, + guint32 jitter, guint32 lsr, guint32 dlsr) + { +@@ -1479,6 +1545,7 @@ rtp_source_process_rb (RTPSource * src, guint64 ntpnstime, + + /* update current */ + curr->is_valid = TRUE; ++ curr->ssrc = ssrc; + curr->fractionlost = fractionlost; + curr->packetslost = packetslost; + curr->exthighestseq = exthighestseq; +@@ -1551,7 +1618,7 @@ rtp_source_get_new_sr (RTPSource * src, guint64 ntpnstime, + if (src->clock_rate == -1 && src->pt_set) { + GST_INFO ("no clock-rate, getting for pt %u and SSRC %u", src->pt, + src->ssrc); +- fetch_clock_rate_from_payload (src, src->pt); ++ fetch_caps_for_payload (src, src->pt); + } + + if (src->clock_rate != -1) { +@@ -1732,6 +1799,7 @@ rtp_source_get_last_sr (RTPSource * src, GstClockTime * time, guint64 * ntptime, + /** + * rtp_source_get_last_rb: + * @src: an #RTPSource ++ * @ssrc: SSRC of the local source for this this RB was sent + * @fractionlost: fraction lost since last SR/RR + * @packetslost: the cumulative number of packets lost + * @exthighestseq: the extended last sequence number received +@@ -1748,9 +1816,9 @@ rtp_source_get_last_sr (RTPSource * src, GstClockTime * time, guint64 * ntptime, + * Returns: %TRUE if there was a valid SB report. + */ + gboolean +-rtp_source_get_last_rb (RTPSource * src, guint8 * fractionlost, +- gint32 * packetslost, guint32 * exthighestseq, guint32 * jitter, +- guint32 * lsr, guint32 * dlsr, guint32 * round_trip) ++rtp_source_get_last_rb (RTPSource * src, guint32 * ssrc, ++ guint8 * fractionlost, gint32 * packetslost, guint32 * exthighestseq, ++ guint32 * jitter, guint32 * lsr, guint32 * dlsr, guint32 * round_trip) + { + RTPReceiverReport *curr; + +@@ -1760,6 +1828,8 @@ rtp_source_get_last_rb (RTPSource * src, guint8 * fractionlost, + if (!curr->is_valid) + return FALSE; + ++ if (ssrc) ++ *ssrc = curr->ssrc; + if (fractionlost) + *fractionlost = curr->fractionlost; + if (packetslost) +@@ -1824,7 +1894,7 @@ timeout_conflicting_addresses (GList * conflicting_addresses, + RTPConflictingAddress *known_conflict = item->data; + GList *next_item = g_list_next (item); + +- if (known_conflict->time < current_time - collision_timeout) { ++ if (known_conflict->time + collision_timeout < current_time) { + gchar *buf; + + conflicting_addresses = g_list_delete_link (conflicting_addresses, item); +@@ -2026,7 +2096,7 @@ rtp_source_get_nacks (RTPSource * src, guint * n_nacks) + } + + /** +- * rtp_source_get_nacks: ++ * rtp_source_get_nack_deadlines: + * @src: The #RTPSource + * @n_nacks: result number of nacks + * +diff --git a/gst/rtpmanager/rtpsource.h b/gst/rtpmanager/rtpsource.h +index dff7b313f..f099e4b79 100644 +--- a/gst/rtpmanager/rtpsource.h ++++ b/gst/rtpmanager/rtpsource.h +@@ -86,29 +86,29 @@ typedef GstFlowReturn (*RTPSourcePushRTP) (RTPSource *src, gpointer data, + gpointer user_data); + + /** +- * RTPSourceClockRate: ++ * RTPSourceCaps: + * @src: an #RTPSource + * @payload: a payload type + * @user_data: user data specified when registering + * +- * This callback will be called when @src needs the clock-rate of the ++ * This callback will be called when @src needs the caps of the + * @payload. + * +- * Returns: a clock-rate for @payload. ++ * Returns: a caps for @payload. + */ +-typedef gint (*RTPSourceClockRate) (RTPSource *src, guint8 payload, gpointer user_data); ++typedef GstCaps * (*RTPSourceCaps) (RTPSource *src, guint8 payload, gpointer user_data); + + /** + * RTPSourceCallbacks: + * @push_rtp: a packet becomes available for handling +- * @clock_rate: a clock-rate is requested ++ * @caps: a caps is requested + * @get_time: the current clock time is requested + * + * Callbacks performed by #RTPSource when actions need to be performed. + */ + typedef struct { + RTPSourcePushRTP push_rtp; +- RTPSourceClockRate clock_rate; ++ RTPSourceCaps caps; + } RTPSourceCallbacks; + + /** +@@ -137,6 +137,9 @@ struct _RTPSource { + /*< private >*/ + guint32 ssrc; + ++ /* If not -1 then this is the SSRC of the corresponding media RTPSource */ ++ guint32 media_ssrc; ++ + guint16 generation; + GHashTable *reported_in_sr_of; /* set of SSRCs */ + +@@ -230,7 +233,7 @@ void rtp_source_mark_bye (RTPSource *src, const gchar *rea + gboolean rtp_source_is_marked_bye (RTPSource *src); + gchar * rtp_source_get_bye_reason (RTPSource *src); + +-void rtp_source_update_caps (RTPSource *src, GstCaps *caps); ++void rtp_source_update_send_caps (RTPSource *src, GstCaps *caps); + + /* SDES info */ + const GstStructure * +@@ -249,7 +252,7 @@ GstFlowReturn rtp_source_send_rtp (RTPSource *src, RTPPacketInfo *p + /* RTCP messages */ + void rtp_source_process_sr (RTPSource *src, GstClockTime time, guint64 ntptime, + guint32 rtptime, guint32 packet_count, guint32 octet_count); +-void rtp_source_process_rb (RTPSource *src, guint64 ntpnstime, guint8 fractionlost, ++void rtp_source_process_rb (RTPSource *src, guint32 ssrc, guint64 ntpnstime, guint8 fractionlost, + gint32 packetslost, guint32 exthighestseq, guint32 jitter, + guint32 lsr, guint32 dlsr); + +@@ -263,7 +266,7 @@ gboolean rtp_source_get_new_rb (RTPSource *src, GstClockTime tim + gboolean rtp_source_get_last_sr (RTPSource *src, GstClockTime *time, guint64 *ntptime, + guint32 *rtptime, guint32 *packet_count, + guint32 *octet_count); +-gboolean rtp_source_get_last_rb (RTPSource *src, guint8 *fractionlost, gint32 *packetslost, ++gboolean rtp_source_get_last_rb (RTPSource *src, guint32 * ssrc, guint8 *fractionlost, gint32 *packetslost, + guint32 *exthighestseq, guint32 *jitter, + guint32 *lsr, guint32 *dlsr, guint32 *round_trip); + +diff --git a/gst/rtpmanager/rtpstats.c b/gst/rtpmanager/rtpstats.c +index 45fff3705..0f35046f1 100644 +--- a/gst/rtpmanager/rtpstats.c ++++ b/gst/rtpmanager/rtpstats.c +@@ -484,6 +484,7 @@ rtp_twcc_stats_get_packets_structure (GArray * twcc_packets) + "seqnum", G_TYPE_UINT, pkt->seqnum, + "local-ts", G_TYPE_UINT64, pkt->local_ts, + "remote-ts", G_TYPE_UINT64, pkt->remote_ts, ++ "payload-type", G_TYPE_UCHAR, pkt->pt, + "size", G_TYPE_UINT, pkt->size, + "lost", G_TYPE_BOOLEAN, pkt->status == RTP_TWCC_PACKET_STATUS_NOT_RECV, + NULL); +diff --git a/gst/rtpmanager/rtpstats.h b/gst/rtpmanager/rtpstats.h +index 776651f44..45ad377ee 100644 +--- a/gst/rtpmanager/rtpstats.h ++++ b/gst/rtpmanager/rtpstats.h +@@ -51,7 +51,7 @@ typedef struct { + */ + typedef struct { + gboolean is_valid; +- guint32 ssrc; /* who the report is from */ ++ guint32 ssrc; /* which source is the report about */ + guint8 fractionlost; + guint32 packetslost; + guint32 exthighestseq; +@@ -70,6 +70,7 @@ typedef struct { + * @address: address of the sender of the packet + * @current_time: current time according to the system clock + * @running_time: time of a packet as buffer running_time ++ * @arrival_time: time of arrival of a packet + * @ntpnstime: time of a packet NTP time in nanoseconds + * @header_len: number of overhead bytes per packet + * @bytes: bytes of the packet including lowlevel overhead +@@ -78,9 +79,13 @@ typedef struct { + * @pt: the payload type of the packet + * @rtptime: the RTP time of the packet + * @marker: the marker bit +- * +- * @tw_seqnum_ext_id: the extension-header ID for transport-wide seqnums +- * @tw_seqnum: the transport-wide seqnum of the packet ++ * @csrc_count: Number of CSRCs in @csrcs ++ * @csrcs: CSRCs ++ * @header_ext: Header extension data ++ * @header_ext_bit_pattern: Header extension bit pattern ++ * @ntp64_ext_id: Extension header ID for RFC6051 64-bit NTP timestamp. ++ * @have_ntp64_ext: If there is at least one 64-bit NTP timestamp header ++ * extension. + * + * Structure holding information about the packet. + */ +@@ -92,6 +97,7 @@ typedef struct { + GSocketAddress *address; + GstClockTime current_time; + GstClockTime running_time; ++ GstClockTime arrival_time; + guint64 ntpnstime; + guint header_len; + guint bytes; +@@ -106,6 +112,8 @@ typedef struct { + guint32 csrcs[16]; + GBytes *header_ext; + guint16 header_ext_bit_pattern; ++ guint8 ntp64_ext_id; ++ gboolean have_ntp64_ext; + } RTPPacketInfo; + + /** +diff --git a/gst/rtpmanager/rtptimerqueue.c b/gst/rtpmanager/rtptimerqueue.c +index 446a70ea5..6cee0266e 100644 +--- a/gst/rtpmanager/rtptimerqueue.c ++++ b/gst/rtpmanager/rtptimerqueue.c +@@ -304,6 +304,8 @@ rtp_timer_queue_finalize (GObject * object) + rtp_timer_free (timer); + g_hash_table_unref (queue->hashtable); + g_assert (queue->timers.length == 0); ++ ++ G_OBJECT_CLASS (rtp_timer_queue_parent_class)->finalize (object); + } + + static void +@@ -582,8 +584,6 @@ rtp_timer_queue_set_timer (RtpTimerQueue * queue, RtpTimerType type, + if (!timer->queued || timer->seqnum != seqnum) { + if (type == RTP_TIMER_EXPECTED) { + timer->rtx_base = timeout; +- timer->rtx_delay = delay; +- timer->rtx_retry = 0; + } + + timer->rtx_last = GST_CLOCK_TIME_NONE; +@@ -702,11 +702,9 @@ rtp_timer_queue_update_timer (RtpTimerQueue * queue, RtpTimer * timer, + g_return_if_fail (timer != NULL); + + if (reset) { +- GST_DEBUG ("reset rtx delay %" GST_TIME_FORMAT "->%" GST_TIME_FORMAT, +- GST_TIME_ARGS (timer->rtx_delay), GST_TIME_ARGS (delay)); ++ GST_DEBUG ("reset rtx base %" GST_TIME_FORMAT "->%" GST_TIME_FORMAT, ++ GST_TIME_ARGS (timer->rtx_base), GST_TIME_ARGS (timeout)); + timer->rtx_base = timeout; +- timer->rtx_delay = delay; +- timer->rtx_retry = 0; + } + + if (timer->seqnum != seqnum) { +diff --git a/gst/rtpmanager/rtptimerqueue.h b/gst/rtpmanager/rtptimerqueue.h +index 969cbd37f..283f22879 100644 +--- a/gst/rtpmanager/rtptimerqueue.h ++++ b/gst/rtpmanager/rtptimerqueue.h +@@ -65,8 +65,6 @@ typedef struct + GstClockTimeDiff offset; + GstClockTime duration; + GstClockTime rtx_base; +- GstClockTime rtx_delay; +- GstClockTime rtx_retry; + GstClockTime rtx_last; + guint num_rtx_retry; + guint num_rtx_received; +diff --git a/gst/rtpmanager/rtptwcc.c b/gst/rtpmanager/rtptwcc.c +index f5b5351bf..2b642bff6 100644 +--- a/gst/rtpmanager/rtptwcc.c ++++ b/gst/rtpmanager/rtptwcc.c +@@ -22,13 +22,20 @@ + #include + #include + ++#include "gstrtputils.h" ++ + GST_DEBUG_CATEGORY_EXTERN (rtp_session_debug); + #define GST_CAT_DEFAULT rtp_session_debug + ++#define TWCC_EXTMAP_STR "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01" ++ + #define REF_TIME_UNIT (64 * GST_MSECOND) + #define DELTA_UNIT (250 * GST_USECOND) + #define MAX_TS_DELTA (0xff * DELTA_UNIT) + ++#define STATUS_VECTOR_MAX_CAPACITY 14 ++#define STATUS_VECTOR_TWO_BIT_MAX_CAPACITY 7 ++ + typedef enum + { + RTP_TWCC_CHUNK_TYPE_RUN_LENGTH = 0, +@@ -60,6 +67,7 @@ typedef struct + GstClockTime socket_ts; + GstClockTime remote_ts; + guint16 seqnum; ++ guint8 pt; + guint size; + gboolean lost; + } SentPacket; +@@ -68,11 +76,15 @@ struct _RTPTWCCManager + { + GObject object; + ++ guint8 send_ext_id; ++ guint8 recv_ext_id; ++ guint16 send_seqnum; ++ + guint mtu; + guint max_packets_per_rtcp; + GArray *recv_packets; + +- guint8 fb_pkt_count; ++ guint64 fb_pkt_count; + gint32 last_seqnum; + + GArray *sent_packets; +@@ -83,10 +95,14 @@ struct _RTPTWCCManager + guint64 recv_media_ssrc; + + guint16 expected_recv_seqnum; ++ guint16 packet_count_no_marker; + + gboolean first_fci_parse; + guint16 expected_parsed_seqnum; + guint8 expected_parsed_fb_pkt_count; ++ ++ GstClockTime next_feedback_send_time; ++ GstClockTime feedback_interval; + }; + + G_DEFINE_TYPE (RTPTWCCManager, rtp_twcc_manager, G_TYPE_OBJECT); +@@ -105,6 +121,9 @@ rtp_twcc_manager_init (RTPTWCCManager * twcc) + twcc->recv_sender_ssrc = -1; + + twcc->first_fci_parse = TRUE; ++ ++ twcc->feedback_interval = GST_CLOCK_TIME_NONE; ++ twcc->next_feedback_send_time = GST_CLOCK_TIME_NONE; + } + + static void +@@ -142,7 +161,35 @@ recv_packet_init (RecvPacket * packet, guint16 seqnum, RTPPacketInfo * pinfo) + { + memset (packet, 0, sizeof (RecvPacket)); + packet->seqnum = seqnum; +- packet->ts = pinfo->running_time; ++ ++ if (GST_CLOCK_TIME_IS_VALID (pinfo->arrival_time)) ++ packet->ts = pinfo->arrival_time; ++ else ++ packet->ts = pinfo->current_time; ++} ++ ++void ++rtp_twcc_manager_parse_recv_ext_id (RTPTWCCManager * twcc, ++ const GstStructure * s) ++{ ++ guint8 recv_ext_id = gst_rtp_get_extmap_id_for_attribute (s, TWCC_EXTMAP_STR); ++ if (recv_ext_id > 0) { ++ twcc->recv_ext_id = recv_ext_id; ++ GST_INFO ("TWCC enabled for recv using extension id: %u", ++ twcc->recv_ext_id); ++ } ++} ++ ++void ++rtp_twcc_manager_parse_send_ext_id (RTPTWCCManager * twcc, ++ const GstStructure * s) ++{ ++ guint8 send_ext_id = gst_rtp_get_extmap_id_for_attribute (s, TWCC_EXTMAP_STR); ++ if (send_ext_id > 0) { ++ twcc->send_ext_id = send_ext_id; ++ GST_INFO ("TWCC enabled for send using extension id: %u", ++ twcc->send_ext_id); ++ } + } + + void +@@ -157,6 +204,114 @@ rtp_twcc_manager_set_mtu (RTPTWCCManager * twcc, guint mtu) + twcc->max_packets_per_rtcp = ((twcc->mtu - 32) * 7) / (2 + 14); + } + ++void ++rtp_twcc_manager_set_feedback_interval (RTPTWCCManager * twcc, ++ GstClockTime feedback_interval) ++{ ++ twcc->feedback_interval = feedback_interval; ++} ++ ++GstClockTime ++rtp_twcc_manager_get_feedback_interval (RTPTWCCManager * twcc) ++{ ++ return twcc->feedback_interval; ++} ++ ++static gboolean ++_get_twcc_seqnum_data (RTPPacketInfo * pinfo, guint8 ext_id, gpointer * data) ++{ ++ gboolean ret = FALSE; ++ guint size; ++ ++ if (pinfo->header_ext && ++ gst_rtp_buffer_get_extension_onebyte_header_from_bytes (pinfo->header_ext, ++ pinfo->header_ext_bit_pattern, ext_id, 0, data, &size)) { ++ if (size == 2) ++ ret = TRUE; ++ } ++ return ret; ++} ++ ++static void ++sent_packet_init (SentPacket * packet, guint16 seqnum, RTPPacketInfo * pinfo, ++ GstRTPBuffer * rtp) ++{ ++ packet->seqnum = seqnum; ++ packet->ts = pinfo->current_time; ++ packet->size = gst_rtp_buffer_get_payload_len (rtp); ++ packet->pt = gst_rtp_buffer_get_payload_type (rtp); ++ packet->remote_ts = GST_CLOCK_TIME_NONE; ++ packet->socket_ts = GST_CLOCK_TIME_NONE; ++ packet->lost = FALSE; ++} ++ ++static void ++_set_twcc_seqnum_data (RTPTWCCManager * twcc, RTPPacketInfo * pinfo, ++ GstBuffer * buf, guint8 ext_id) ++{ ++ SentPacket packet; ++ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT; ++ gpointer data; ++ ++ if (gst_rtp_buffer_map (buf, GST_MAP_READWRITE, &rtp)) { ++ if (gst_rtp_buffer_get_extension_onebyte_header (&rtp, ++ ext_id, 0, &data, NULL)) { ++ guint16 seqnum = twcc->send_seqnum++; ++ ++ GST_WRITE_UINT16_BE (data, seqnum); ++ sent_packet_init (&packet, seqnum, pinfo, &rtp); ++ g_array_append_val (twcc->sent_packets, packet); ++ ++ GST_LOG ("Send: twcc-seqnum: %u, pt: %u, marker: %d, len: %u, ts: %" ++ GST_TIME_FORMAT, seqnum, packet.pt, pinfo->marker, packet.size, ++ GST_TIME_ARGS (pinfo->current_time)); ++ } ++ gst_rtp_buffer_unmap (&rtp); ++ } ++} ++ ++static void ++rtp_twcc_manager_set_send_twcc_seqnum (RTPTWCCManager * twcc, ++ RTPPacketInfo * pinfo) ++{ ++ if (GST_IS_BUFFER_LIST (pinfo->data)) { ++ GstBufferList *list; ++ guint i = 0; ++ ++ pinfo->data = gst_buffer_list_make_writable (pinfo->data); ++ ++ list = GST_BUFFER_LIST (pinfo->data); ++ ++ for (i = 0; i < gst_buffer_list_length (list); i++) { ++ GstBuffer *buffer = gst_buffer_list_get_writable (list, i); ++ ++ _set_twcc_seqnum_data (twcc, pinfo, buffer, twcc->send_ext_id); ++ } ++ } else { ++ pinfo->data = gst_buffer_make_writable (pinfo->data); ++ _set_twcc_seqnum_data (twcc, pinfo, pinfo->data, twcc->send_ext_id); ++ } ++} ++ ++static gint32 ++rtp_twcc_manager_get_recv_twcc_seqnum (RTPTWCCManager * twcc, ++ RTPPacketInfo * pinfo) ++{ ++ gint32 val = -1; ++ gpointer data; ++ ++ if (twcc->recv_ext_id == 0) { ++ GST_DEBUG ("Received TWCC packet, but no extension registered; ignoring"); ++ return val; ++ } ++ ++ if (_get_twcc_seqnum_data (pinfo, twcc->recv_ext_id, &data)) { ++ val = GST_READ_UINT16_BE (data); ++ } ++ ++ return val; ++} ++ + static gint + _twcc_seqnum_sort (gconstpointer a, gconstpointer b) + { +@@ -197,7 +352,7 @@ rtp_twcc_write_run_length_chunk (GArray * packet_chunks, + guint16 data = 0; + guint len = MIN (run_length - written, 8191); + +- GST_LOG ("Writing a run-lenght of %u with status %u", len, status); ++ GST_LOG ("Writing a run-length of %u with status %u", len, status); + + gst_bit_writer_init_with_data (&writer, (guint8 *) & data, 2, FALSE); + gst_bit_writer_put_bits_uint8 (&writer, RTP_TWCC_CHUNK_TYPE_RUN_LENGTH, 1); +@@ -257,7 +412,7 @@ chunk_bit_writer_get_available_slots (ChunkBitWriter * writer) + static guint + chunk_bit_writer_get_total_slots (ChunkBitWriter * writer) + { +- return 14 / writer->symbol_size; ++ return STATUS_VECTOR_MAX_CAPACITY / writer->symbol_size; + } + + static void +@@ -340,13 +495,43 @@ run_lenght_helper_update (RunLengthHelper * rlh, RecvPacket * pkt) + } + } + ++static guint ++_get_max_packets_capacity (guint symbol_size) ++{ ++ if (symbol_size == 2) ++ return STATUS_VECTOR_TWO_BIT_MAX_CAPACITY; ++ ++ return STATUS_VECTOR_MAX_CAPACITY; ++} ++ ++static gboolean ++_pkt_fits_run_length_chunk (RecvPacket * pkt, guint packets_per_chunks, ++ guint remaining_packets) ++{ ++ if (pkt->missing_run == 0) { ++ /* we have more or the same equal packets than the ones we can write in to a status chunk */ ++ if (pkt->equal_run >= packets_per_chunks) ++ return TRUE; ++ ++ /* we have more than one equal and not enough space for the remainings */ ++ if (pkt->equal_run > 1 && remaining_packets > STATUS_VECTOR_MAX_CAPACITY) ++ return TRUE; ++ ++ /* we have all equal packets for the remaining to write */ ++ if (pkt->equal_run == remaining_packets) ++ return TRUE; ++ } ++ ++ return FALSE; ++} ++ + static void + rtp_twcc_write_chunks (GArray * packet_chunks, + GArray * twcc_packets, guint symbol_size) + { + ChunkBitWriter writer; + guint i; +- guint bits_per_chunks = 7 * symbol_size; ++ guint packets_per_chunks = _get_max_packets_capacity (symbol_size); + + chunk_bit_writer_init (&writer, packet_chunks, symbol_size); + +@@ -354,21 +539,26 @@ rtp_twcc_write_chunks (GArray * packet_chunks, + RecvPacket *pkt = &g_array_index (twcc_packets, RecvPacket, i); + guint remaining_packets = twcc_packets->len - i; + ++ GST_LOG ++ ("About to write pkt: #%u missing_run: %u equal_run: %u status: %u, remaining_packets: %u", ++ pkt->seqnum, pkt->missing_run, pkt->equal_run, pkt->status, ++ remaining_packets); ++ + /* we can only start a run-length chunk if the status-chunk is + completed */ + if (chunk_bit_writer_is_empty (&writer)) { + /* first write in any preceeding gaps, we use run-length + if it would take up more than one chunk (14/7) */ +- if (pkt->missing_run > bits_per_chunks) { ++ if (pkt->missing_run > packets_per_chunks) { + rtp_twcc_write_run_length_chunk (packet_chunks, + RTP_TWCC_PACKET_STATUS_NOT_RECV, pkt->missing_run); + } + + /* we have a run of the same status, write a run-length chunk and skip + to the next point */ +- if (pkt->missing_run == 0 && +- (pkt->equal_run > bits_per_chunks || +- pkt->equal_run == remaining_packets)) { ++ if (_pkt_fits_run_length_chunk (pkt, packets_per_chunks, ++ remaining_packets)) { ++ + rtp_twcc_write_run_length_chunk (packet_chunks, + pkt->status, pkt->equal_run); + i += pkt->equal_run - 1; +@@ -404,9 +594,24 @@ rtp_twcc_manager_add_fci (RTPTWCCManager * twcc, GstRTCPPacket * packet) + guint symbol_size = 1; + GstClockTimeDiff delta_ts; + gint64 delta_ts_rounded; ++ guint8 fb_pkt_count; + + g_array_sort (twcc->recv_packets, _twcc_seqnum_sort); + ++ /* Quick scan to remove duplicates */ ++ prev = &g_array_index (twcc->recv_packets, RecvPacket, 0); ++ for (i = 1; i < twcc->recv_packets->len;) { ++ RecvPacket *cur = &g_array_index (twcc->recv_packets, RecvPacket, i); ++ ++ if (prev->seqnum == cur->seqnum) { ++ GST_DEBUG ("Removing duplicate packet #%u", cur->seqnum); ++ g_array_remove_index (twcc->recv_packets, i); ++ } else { ++ prev = cur; ++ i += 1; ++ } ++ } ++ + /* get first and last packet */ + first = &g_array_index (twcc->recv_packets, RecvPacket, 0); + last = +@@ -415,19 +620,19 @@ rtp_twcc_manager_add_fci (RTPTWCCManager * twcc, GstRTCPPacket * packet) + + packet_count = last->seqnum - first->seqnum + 1; + base_time = first->ts / REF_TIME_UNIT; ++ fb_pkt_count = (guint8) (twcc->fb_pkt_count % G_MAXUINT8); + + GST_WRITE_UINT16_BE (header.base_seqnum, first->seqnum); + GST_WRITE_UINT16_BE (header.packet_count, packet_count); + GST_WRITE_UINT24_BE (header.base_time, base_time); +- GST_WRITE_UINT8 (header.fb_pkt_count, twcc->fb_pkt_count); ++ GST_WRITE_UINT8 (header.fb_pkt_count, fb_pkt_count); + + base_time *= REF_TIME_UNIT; + ts_rounded = base_time; + + GST_DEBUG ("Created TWCC feedback: base_seqnum: #%u, packet_count: %u, " + "base_time %" GST_TIME_FORMAT " fb_pkt_count: %u", +- first->seqnum, packet_count, GST_TIME_ARGS (base_time), +- twcc->fb_pkt_count); ++ first->seqnum, packet_count, GST_TIME_ARGS (base_time), fb_pkt_count); + + twcc->fb_pkt_count++; + twcc->expected_recv_seqnum = first->seqnum + packet_count; +@@ -527,24 +732,7 @@ rtp_twcc_manager_create_feedback (RTPTWCCManager * twcc) + static gboolean + _exceeds_max_packets (RTPTWCCManager * twcc, guint16 seqnum) + { +- RecvPacket *first, *last; +- guint16 packet_count; +- +- if (twcc->recv_packets->len == 0) +- return FALSE; +- +- /* find the delta betwen first stored packet and this seqnum */ +- first = &g_array_index (twcc->recv_packets, RecvPacket, 0); +- packet_count = seqnum - first->seqnum + 1; +- if (packet_count > twcc->max_packets_per_rtcp) +- return TRUE; +- +- /* then find the delta between last stored packet and this seqnum */ +- last = +- &g_array_index (twcc->recv_packets, RecvPacket, +- twcc->recv_packets->len - 1); +- packet_count = seqnum - (last->seqnum + 1); +- if (packet_count > twcc->max_packets_per_rtcp) ++ if (twcc->recv_packets->len + 1 > twcc->max_packets_per_rtcp) + return TRUE; + + return FALSE; +@@ -559,25 +747,41 @@ _many_packets_some_lost (RTPTWCCManager * twcc, guint16 seqnum) + RecvPacket *first; + guint16 packet_count; + guint received_packets = twcc->recv_packets->len; ++ guint lost_packets; + if (received_packets == 0) + return FALSE; + + first = &g_array_index (twcc->recv_packets, RecvPacket, 0); + packet_count = seqnum - first->seqnum + 1; +- /* packet-count larger than recevied-packets means we have lost packets */ +- if (packet_count >= 30 && packet_count > received_packets) ++ ++ /* If there are a high number of duplicates, we can't use the following ++ * metrics */ ++ if (received_packets > packet_count) ++ return FALSE; ++ ++ /* check if we lost half of the threshold */ ++ lost_packets = packet_count - received_packets; ++ if (received_packets >= 30 && lost_packets >= 60) ++ return TRUE; ++ ++ /* we have lost the marker bit for some and lost some */ ++ if (twcc->packet_count_no_marker >= 10 && lost_packets >= 60) + return TRUE; + + return FALSE; + } + + gboolean +-rtp_twcc_manager_recv_packet (RTPTWCCManager * twcc, +- guint16 seqnum, RTPPacketInfo * pinfo) ++rtp_twcc_manager_recv_packet (RTPTWCCManager * twcc, RTPPacketInfo * pinfo) + { + gboolean send_feedback = FALSE; + RecvPacket packet; +- gint32 diff; ++ gint32 seqnum; ++ gint diff; ++ ++ seqnum = rtp_twcc_manager_get_recv_twcc_seqnum (twcc, pinfo); ++ if (seqnum == -1) ++ return FALSE; + + /* if this packet would exceed the capacity of our MTU, we create a feedback + with the current packets, and start over with this one */ +@@ -595,8 +799,8 @@ rtp_twcc_manager_recv_packet (RTPTWCCManager * twcc, + /* check if we are reordered, and treat it as lost if we already sent + a feedback msg with a higher seqnum. If the diff is huge, treat + it as a restart of a stream */ +- diff = (gint32) seqnum - (gint32) twcc->expected_recv_seqnum; +- if (twcc->fb_pkt_count > 0 && diff < 0 && diff > -1000) { ++ diff = gst_rtp_buffer_compare_seqnum (twcc->expected_recv_seqnum, seqnum); ++ if (twcc->fb_pkt_count > 0 && diff < 0) { + GST_INFO ("Received out of order packet (%u after %u), treating as lost", + seqnum, twcc->expected_recv_seqnum); + return FALSE; +@@ -606,12 +810,36 @@ rtp_twcc_manager_recv_packet (RTPTWCCManager * twcc, + recv_packet_init (&packet, seqnum, pinfo); + g_array_append_val (twcc->recv_packets, packet); + twcc->last_seqnum = seqnum; +- GST_LOG ("Receive: twcc-seqnum: %u, marker: %d, ts: %" GST_TIME_FORMAT, +- seqnum, pinfo->marker, GST_TIME_ARGS (pinfo->running_time)); + +- if (pinfo->marker || _many_packets_some_lost (twcc, seqnum)) { ++ GST_LOG ("Receive: twcc-seqnum: %u, pt: %u, marker: %d, ts: %" ++ GST_TIME_FORMAT, seqnum, pinfo->pt, pinfo->marker, ++ GST_TIME_ARGS (pinfo->arrival_time)); ++ ++ if (!pinfo->marker) ++ twcc->packet_count_no_marker++; ++ ++ /* are we sending on an interval, or based on marker bit */ ++ if (GST_CLOCK_TIME_IS_VALID (twcc->feedback_interval)) { ++ if (!GST_CLOCK_TIME_IS_VALID (twcc->next_feedback_send_time)) ++ twcc->next_feedback_send_time = ++ pinfo->running_time + twcc->feedback_interval; ++ ++ if (pinfo->running_time >= twcc->next_feedback_send_time) { ++ GST_LOG ("Generating feedback : Exceeded feedback interval %" ++ GST_TIME_FORMAT, GST_TIME_ARGS (twcc->feedback_interval)); ++ rtp_twcc_manager_create_feedback (twcc); ++ send_feedback = TRUE; ++ ++ while (pinfo->running_time >= twcc->next_feedback_send_time) ++ twcc->next_feedback_send_time += twcc->feedback_interval; ++ } ++ } else if (pinfo->marker || _many_packets_some_lost (twcc, seqnum)) { ++ GST_LOG ("Generating feedback because of %s", ++ pinfo->marker ? "marker packet" : "many packets some lost"); + rtp_twcc_manager_create_feedback (twcc); + send_feedback = TRUE; ++ ++ twcc->packet_count_no_marker = 0; + } + + return send_feedback; +@@ -642,39 +870,13 @@ rtp_twcc_manager_get_feedback (RTPTWCCManager * twcc, guint sender_ssrc) + return buf; + } + +-static void +-sent_packet_init (SentPacket * packet, guint16 seqnum, RTPPacketInfo * pinfo) +-{ +- packet->seqnum = seqnum; +- packet->ts = pinfo->running_time; +- packet->size = pinfo->payload_len; +- packet->remote_ts = GST_CLOCK_TIME_NONE; +- packet->socket_ts = GST_CLOCK_TIME_NONE; +- packet->lost = FALSE; +-} +- + void +-rtp_twcc_manager_send_packet (RTPTWCCManager * twcc, +- guint16 seqnum, RTPPacketInfo * pinfo) ++rtp_twcc_manager_send_packet (RTPTWCCManager * twcc, RTPPacketInfo * pinfo) + { +- SentPacket packet; +- sent_packet_init (&packet, seqnum, pinfo); +- g_array_append_val (twcc->sent_packets, packet); +- +- GST_LOG ("Send: twcc-seqnum: %u, marker: %d, ts: %" GST_TIME_FORMAT, +- seqnum, pinfo->marker, GST_TIME_ARGS (pinfo->running_time)); +-} ++ if (twcc->send_ext_id == 0) ++ return; + +-void +-rtp_twcc_manager_set_send_packet_ts (RTPTWCCManager * twcc, +- guint packet_id, GstClockTime ts) +-{ +- SentPacket *pkt = NULL; +- pkt = &g_array_index (twcc->sent_packets, SentPacket, packet_id); +- if (pkt) { +- pkt->socket_ts = ts; +- GST_DEBUG ("assigning: pkt-id: %u to packet: %u", packet_id, pkt->seqnum); +- } ++ rtp_twcc_manager_set_send_twcc_seqnum (twcc, pinfo); + } + + static void +@@ -696,14 +898,14 @@ static guint + _parse_run_length_chunk (GstBitReader * reader, GArray * twcc_packets, + guint16 seqnum_offset, guint remaining_packets) + { +- guint run_length; ++ guint16 run_length; + guint8 status_code; + guint i; + + gst_bit_reader_get_bits_uint8 (reader, &status_code, 2); ++ gst_bit_reader_get_bits_uint16 (reader, &run_length, 13); + +- run_length = *(guint16 *) reader->data & ~0xE0; /* mask out the 3 last bits */ +- run_length = MIN (remaining_packets, GST_READ_UINT16_BE (&run_length)); ++ run_length = MIN (remaining_packets, run_length); + + for (i = 0; i < run_length; i++) { + _add_twcc_packet (twcc_packets, seqnum_offset + i, status_code); +@@ -759,6 +961,7 @@ _check_for_lost_packets (RTPTWCCManager * twcc, GArray * twcc_packets, + guint16 base_seqnum, guint16 packet_count, guint8 fb_pkt_count) + { + guint packets_lost; ++ gint8 fb_pkt_count_diff; + guint i; + + /* first packet */ +@@ -767,22 +970,31 @@ _check_for_lost_packets (RTPTWCCManager * twcc, GArray * twcc_packets, + goto done; + } + ++ fb_pkt_count_diff = ++ (gint8) (fb_pkt_count - twcc->expected_parsed_fb_pkt_count); ++ + /* we have gone backwards, don't reset the expectations, + but process the packet nonetheless */ +- if (fb_pkt_count < twcc->expected_parsed_fb_pkt_count) { +- GST_WARNING ("feedback packet count going backwards (%u < %u)", ++ if (fb_pkt_count_diff < 0) { ++ GST_DEBUG ("feedback packet count going backwards (%u < %u)", + fb_pkt_count, twcc->expected_parsed_fb_pkt_count); + return; + } + + /* we have jumped forwards, reset expectations, but don't trigger + lost packets in case the missing fb-packet(s) arrive later */ +- if (fb_pkt_count > twcc->expected_parsed_fb_pkt_count) { +- GST_WARNING ("feedback packet count jumped ahead (%u > %u)", ++ if (fb_pkt_count_diff > 0) { ++ GST_DEBUG ("feedback packet count jumped ahead (%u > %u)", + fb_pkt_count, twcc->expected_parsed_fb_pkt_count); + goto done; + } + ++ if (base_seqnum < twcc->expected_parsed_seqnum) { ++ GST_DEBUG ("twcc seqnum is older than expected (%u < %u)", base_seqnum, ++ twcc->expected_parsed_seqnum); ++ return; ++ } ++ + packets_lost = base_seqnum - twcc->expected_parsed_seqnum; + for (i = 0; i < packets_lost; i++) { + _add_twcc_packet (twcc_packets, twcc->expected_parsed_seqnum + i, +@@ -896,6 +1108,7 @@ rtp_twcc_manager_parse_fci (RTPTWCCManager * twcc, + pkt->local_ts = found->ts; + } + pkt->size = found->size; ++ pkt->pt = found->pt; + + GST_LOG ("matching pkt: #%u with local_ts: %" GST_TIME_FORMAT + " size: %u", pkt->seqnum, GST_TIME_ARGS (pkt->local_ts), pkt->size); +diff --git a/gst/rtpmanager/rtptwcc.h b/gst/rtpmanager/rtptwcc.h +index 50da70477..a826e9a4c 100644 +--- a/gst/rtpmanager/rtptwcc.h ++++ b/gst/rtpmanager/rtptwcc.h +@@ -49,19 +49,25 @@ struct _RTPTWCCPacket + RTPTWCCPacketStatus status; + guint16 seqnum; + guint size; ++ guint8 pt; + }; + + RTPTWCCManager * rtp_twcc_manager_new (guint mtu); + ++void rtp_twcc_manager_parse_recv_ext_id (RTPTWCCManager * twcc, ++ const GstStructure * s); ++void rtp_twcc_manager_parse_send_ext_id (RTPTWCCManager * twcc, ++ const GstStructure * s); ++ + void rtp_twcc_manager_set_mtu (RTPTWCCManager * twcc, guint mtu); ++void rtp_twcc_manager_set_feedback_interval (RTPTWCCManager * twcc, ++ GstClockTime feedback_interval); ++GstClockTime rtp_twcc_manager_get_feedback_interval (RTPTWCCManager * twcc); + + gboolean rtp_twcc_manager_recv_packet (RTPTWCCManager * twcc, +- guint16 seqnum, RTPPacketInfo * pinfo); +- ++ RTPPacketInfo * pinfo); + void rtp_twcc_manager_send_packet (RTPTWCCManager * twcc, +- guint16 seqnum, RTPPacketInfo * pinfo); +-void rtp_twcc_manager_set_send_packet_ts (RTPTWCCManager * twcc, +- guint packet_id, GstClockTime ts); ++ RTPPacketInfo * pinfo); + + GstBuffer * rtp_twcc_manager_get_feedback (RTPTWCCManager * twcc, + guint32 sender_ssrc); +-- +2.47.1 + diff --git a/package/gstreamer1/gst1-plugins-good/1.18.6-gstwebrtc/0002-rtpsession-Fix-twcc-stats-structure-leaks.patch b/package/gstreamer1/gst1-plugins-good/1.18.6-gstwebrtc/0002-rtpsession-Fix-twcc-stats-structure-leaks.patch new file mode 100644 index 000000000000..6d8e228314e6 --- /dev/null +++ b/package/gstreamer1/gst1-plugins-good/1.18.6-gstwebrtc/0002-rtpsession-Fix-twcc-stats-structure-leaks.patch @@ -0,0 +1,62 @@ +From 987794cf36786f60226093d79826c0cfaec0c987 Mon Sep 17 00:00:00 2001 +From: Philippe Normand +Date: Thu, 19 Dec 2024 15:18:27 +0100 +Subject: [PATCH 2/2] rtpsession: Fix twcc stats structure leaks + +Part-of: +--- + gst/rtpmanager/gstrtpsession.c | 2 ++ + gst/rtpmanager/rtpsession.c | 9 +++++++-- + gst/rtpmanager/rtpsession.h | 3 +++ + 3 files changed, 12 insertions(+), 2 deletions(-) + +diff --git a/gst/rtpmanager/gstrtpsession.c b/gst/rtpmanager/gstrtpsession.c +index a4fa9199a..4bbbd676e 100644 +--- a/gst/rtpmanager/gstrtpsession.c ++++ b/gst/rtpmanager/gstrtpsession.c +@@ -2964,6 +2964,8 @@ gst_rtp_session_notify_twcc (RTPSession * sess, + event = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM, twcc_packets); + gst_pad_push_event (send_rtp_sink, event); + gst_object_unref (send_rtp_sink); ++ } else { ++ gst_structure_free (twcc_packets); + } + + g_object_notify (G_OBJECT (rtpsession), "twcc-stats"); +diff --git a/gst/rtpmanager/rtpsession.c b/gst/rtpmanager/rtpsession.c +index a73954e4b..1cd880b4b 100644 +--- a/gst/rtpmanager/rtpsession.c ++++ b/gst/rtpmanager/rtpsession.c +@@ -2974,8 +2974,13 @@ rtp_session_process_twcc (RTPSession * sess, guint32 sender_ssrc, + + RTP_SESSION_UNLOCK (sess); + if (sess->callbacks.notify_twcc) +- sess->callbacks.notify_twcc (sess, twcc_packets_s, twcc_stats_s, +- sess->notify_twcc_user_data); ++ sess->callbacks.notify_twcc (sess, g_steal_pointer (&twcc_packets_s), ++ g_steal_pointer (&twcc_stats_s), sess->notify_twcc_user_data); ++ else { ++ gst_structure_free (twcc_packets_s); ++ gst_structure_free (twcc_stats_s); ++ } ++ + RTP_SESSION_LOCK (sess); + } + +diff --git a/gst/rtpmanager/rtpsession.h b/gst/rtpmanager/rtpsession.h +index 84b2948dc..a9111fb0a 100644 +--- a/gst/rtpmanager/rtpsession.h ++++ b/gst/rtpmanager/rtpsession.h +@@ -159,6 +159,9 @@ typedef void (*RTPSessionNotifyNACK) (RTPSession *sess, + + /** + * RTPSessionNotifyTWCC: ++ * @sess: an #RTPSession ++ * @twcc_packets: (transfer full): TWCC packets #GstStructure ++ * @twcc_stats: (transfer full): TWCC stats #GstStructure + * @user_data: user data specified when registering + * + * Notifies of Transport-wide congestion control packets and stats. +-- +2.47.1 + diff --git a/package/gstreamer1/gst1-plugins-good/gst1-plugins-good.mk b/package/gstreamer1/gst1-plugins-good/gst1-plugins-good.mk index bdab41a0d91f..3d367a46fa96 100644 --- a/package/gstreamer1/gst1-plugins-good/gst1-plugins-good.mk +++ b/package/gstreamer1/gst1-plugins-good/gst1-plugins-good.mk @@ -505,22 +505,29 @@ endef ifeq ($(BR2_PACKAGE_WPEWEBKIT2_22),y) define GST1_PLUGINS_GOOD_APPLY_WPEWEBKIT_EXTRA_PATCHES_POST_HOOK - cd $(@D) && { for P in ../../../package/gstreamer1/gst1-plugins-good/$(GST1_PLUGINS_GOOD_VERSION)-wpe-2.22/*.patch; do patch -p1 < "$$P" ; done; } + cd $(@D) && { for P in $(TOPDIR)/$(GST1_PLUGINS_GOOD_PKGDIR)/$(GST1_PLUGINS_GOOD_VERSION)-wpe-2.22/*.patch; do patch -p1 < "$$P" ; done; } endef endif ifeq ($(BR2_PACKAGE_WPEWEBKIT2_28),y) define GST1_PLUGINS_GOOD_APPLY_WPEWEBKIT_EXTRA_PATCHES_POST_HOOK - cd $(@D) && { for P in ../../../package/gstreamer1/gst1-plugins-good/$(GST1_PLUGINS_GOOD_VERSION)-wpe-2.28/*.patch; do patch -p1 < "$$P" ; done; } + cd $(@D) && { for P in $(TOPDIR)/$(GST1_PLUGINS_GOOD_PKGDIR)/$(GST1_PLUGINS_GOOD_VERSION)-wpe-2.28/*.patch; do patch -p1 < "$$P" ; done; } endef endif ifeq ($(BR2_PACKAGE_WPEWEBKIT2_38),y) define GST1_PLUGINS_GOOD_APPLY_WPEWEBKIT_EXTRA_PATCHES_POST_HOOK - cd $(@D) && { for P in ../../../package/gstreamer1/gst1-plugins-good/$(GST1_PLUGINS_GOOD_VERSION)-wpe-2.38/*.patch; do patch -p1 < "$$P" ; done; } + cd $(@D) && { for P in $(TOPDIR)/$(GST1_PLUGINS_GOOD_PKGDIR)/$(GST1_PLUGINS_GOOD_VERSION)-wpe-2.38/*.patch; do patch -p1 < "$$P" ; done; } endef endif GST1_PLUGINS_GOOD_POST_PATCH_HOOKS += GST1_PLUGINS_GOOD_APPLY_WPEWEBKIT_EXTRA_PATCHES_POST_HOOK +ifeq ($(BR2_PACKAGE_WPEWEBKIT_USE_GSTREAMER_WEBRTC),y) +define GST1_PLUGINS_GOOD_APPLY_GSTWEBRTC_PATCHES_POST_HOOK + cd $(@D) && { for P in $(TOPDIR)/$(GST1_PLUGINS_GOOD_PKGDIR)/$(GST1_PLUGINS_GOOD_VERSION)-gstwebrtc/*.patch; do patch -p1 < "$$P" ; done; } +endef +GST1_PLUGINS_GOOD_POST_PATCH_HOOKS += GST1_PLUGINS_GOOD_APPLY_GSTWEBRTC_PATCHES_POST_HOOK +endif + $(eval $(meson-package)) diff --git a/package/gstreamer1/gstreamer1/1.18.6-gstwebrtc/0001-Backports-from-GStreamer-1.22.patch b/package/gstreamer1/gstreamer1/1.18.6-gstwebrtc/0001-Backports-from-GStreamer-1.22.patch new file mode 100644 index 000000000000..ae683fadfa7c --- /dev/null +++ b/package/gstreamer1/gstreamer1/1.18.6-gstwebrtc/0001-Backports-from-GStreamer-1.22.patch @@ -0,0 +1,2603 @@ +From 0fa540b5d7447dd17a9ce4a5f5f869210c11acad Mon Sep 17 00:00:00 2001 +From: Philippe Normand +Date: Thu, 19 Jan 2023 14:50:21 +0000 +Subject: [PATCH] Backports from GStreamer 1.22 + +--- + gst/gstbuffer.c | 279 +++++++++++++++++++++++++++++--------------- + gst/gstbuffer.h | 133 ++++++++++++++++----- + gst/gstelement.c | 84 ++++++++++--- + gst/gstelement.h | 143 ++++++++++++++++++++++- + gst/gstevent.c | 205 +++++++++++++++++++++----------- + gst/gstevent.h | 26 +++++ + gst/gstmeta.c | 220 +++++++++++++++++++++++++++++++++- + gst/gstmeta.h | 61 ++++++++++ + gst/gstminiobject.c | 31 +++-- + gst/gstminiobject.h | 7 ++ + gst/gstquark.c | 3 +- + gst/gstquark.h | 7 +- + 12 files changed, 976 insertions(+), 223 deletions(-) + +diff --git a/gst/gstbuffer.c b/gst/gstbuffer.c +index 8048651120..186ae34c93 100644 +--- a/gst/gstbuffer.c ++++ b/gst/gstbuffer.c +@@ -34,7 +34,8 @@ + * created one will typically allocate memory for it and add it to the buffer. + * The following example creates a buffer that can hold a given video frame + * with a given width, height and bits per plane. +- * |[ ++ * ++ * ``` C + * GstBuffer *buffer; + * GstMemory *memory; + * gint size, width, height, bpp; +@@ -44,7 +45,7 @@ + * memory = gst_allocator_alloc (NULL, size, NULL); + * gst_buffer_insert_memory (buffer, -1, memory); + * ... +- * ]| ++ * ``` + * + * Alternatively, use gst_buffer_new_allocate() to create a buffer with + * preallocated data of a given size. +@@ -84,7 +85,7 @@ + * + * If a plug-in wants to modify the buffer data or metadata in-place, it should + * first obtain a buffer that is safe to modify by using +- * gst_buffer_make_writable(). This function is optimized so that a copy will ++ * gst_buffer_make_writable(). This function is optimized so that a copy will + * only be made when it is necessary. + * + * Several flags of the buffer can be set and unset with the +@@ -96,7 +97,7 @@ + * needed. + * + * Arbitrary extra metadata can be set on a buffer with gst_buffer_add_meta(). +- * Metadata can be retrieved with gst_buffer_get_meta(). See also #GstMeta ++ * Metadata can be retrieved with gst_buffer_get_meta(). See also #GstMeta. + * + * An element should either unref the buffer or push it out on a src pad + * using gst_pad_push() (see #GstPad). +@@ -114,8 +115,8 @@ + * using the #GstMemory of the parent buffer, and wants to prevent the parent + * buffer from being returned to a buffer pool until the #GstMemory is available + * for re-use. (Since: 1.6) +- * + */ ++ + #define GST_DISABLE_MINIOBJECT_INLINE_FUNCTIONS + #include "gst_private.h" + +@@ -129,9 +130,13 @@ + #include "gstbuffer.h" + #include "gstbufferpool.h" + #include "gstinfo.h" ++#include "gstmeta.h" + #include "gstutils.h" + #include "gstversion.h" + ++/* For g_memdup2 */ ++#include "glib-compat-private.h" ++ + GType _gst_buffer_type = 0; + + /* info->size will be sizeof(FooMeta) which contains a GstMeta at the beginning +@@ -355,7 +360,7 @@ _replace_memory (GstBuffer * buffer, guint len, guint idx, guint length, + * gst_buffer_get_flags: + * @buffer: a #GstBuffer + * +- * Get the #GstBufferFlags flags set on this buffer. ++ * Gets the #GstBufferFlags flags set on this buffer. + * + * Returns: the flags set on this buffer. + * +@@ -501,7 +506,7 @@ _priv_gst_buffer_initialize (void) + /** + * gst_buffer_get_max_memory: + * +- * Get the maximum amount of memory blocks that a buffer can hold. This is a ++ * Gets the maximum amount of memory blocks that a buffer can hold. This is a + * compile time constant that can be queried with the function. + * + * When more memory blocks are added, existing memory blocks will be merged +@@ -541,6 +546,7 @@ gst_buffer_copy_into (GstBuffer * dest, GstBuffer * src, + GstMetaItem *walk; + gsize bufsize; + gboolean region = FALSE; ++ gboolean sharing_mem = FALSE; + + g_return_val_if_fail (dest != NULL, FALSE); + g_return_val_if_fail (src != NULL, FALSE); +@@ -644,6 +650,9 @@ gst_buffer_copy_into (GstBuffer * dest, GstBuffer * src, + return FALSE; + } + ++ /* Indicates if dest references any of src memories. */ ++ sharing_mem |= (newmem == mem); ++ + _memory_add (dest, -1, newmem); + left -= tocopy; + } +@@ -657,11 +666,19 @@ gst_buffer_copy_into (GstBuffer * dest, GstBuffer * src, + gst_buffer_remove_memory_range (dest, dest_len, -1); + return FALSE; + } ++ ++ /* If we were sharing memory and the merge is no-op, we are still sharing. */ ++ sharing_mem &= (mem == GST_BUFFER_MEM_PTR (dest, 0)); ++ + _replace_memory (dest, len, 0, len, mem); + } + } + + if (flags & GST_BUFFER_COPY_META) { ++ gboolean deep; ++ ++ deep = (flags & GST_BUFFER_COPY_DEEP) != 0; ++ + /* NOTE: GstGLSyncMeta copying relies on the meta + * being copied now, after the buffer data, + * so this has to happen last */ +@@ -679,6 +696,11 @@ gst_buffer_copy_into (GstBuffer * dest, GstBuffer * src, + GST_CAT_DEBUG (GST_CAT_BUFFER, + "don't copy memory meta %p of API type %s", meta, + g_type_name (info->api)); ++ } else if (deep && gst_meta_api_type_has_tag (info->api, ++ _gst_meta_tag_memory_reference)) { ++ GST_CAT_DEBUG (GST_CAT_BUFFER, ++ "don't copy memory reference meta %p of API type %s", meta, ++ g_type_name (info->api)); + } else if (info->transform_func) { + GstMetaTransformCopy copy_data; + +@@ -696,6 +718,14 @@ gst_buffer_copy_into (GstBuffer * dest, GstBuffer * src, + } + } + ++ if (sharing_mem && src->pool != NULL) { ++ /* The new buffer references some of src's memories. We have to ensure that ++ * src buffer does not return to its buffer pool as long as its memories are ++ * used by other buffers. That would cause the buffer to be discarted by the ++ * pool because its memories are not writable. */ ++ gst_buffer_add_parent_buffer_meta (dest, src); ++ } ++ + return TRUE; + } + +@@ -730,10 +760,10 @@ _gst_buffer_copy (const GstBuffer * buffer) + * gst_buffer_copy_deep: + * @buf: a #GstBuffer. + * +- * Create a copy of the given buffer. This will make a newly allocated ++ * Creates a copy of the given buffer. This will make a newly allocated + * copy of the data the source buffer contains. + * +- * Returns: (transfer full): a new copy of @buf. ++ * Returns: (transfer full) (nullable): a new copy of @buf if the copy succeeded, %NULL otherwise. + * + * Since: 1.6 + */ +@@ -775,6 +805,15 @@ _gst_buffer_free (GstBuffer * buffer) + + GST_CAT_LOG (GST_CAT_BUFFER, "finalize %p", buffer); + ++ /* free our memory */ ++ len = GST_BUFFER_MEM_LEN (buffer); ++ for (i = 0; i < len; i++) { ++ gst_memory_unlock (GST_BUFFER_MEM_PTR (buffer, i), GST_LOCK_FLAG_EXCLUSIVE); ++ gst_mini_object_remove_parent (GST_MINI_OBJECT_CAST (GST_BUFFER_MEM_PTR ++ (buffer, i)), GST_MINI_OBJECT_CAST (buffer)); ++ gst_memory_unref (GST_BUFFER_MEM_PTR (buffer, i)); ++ } ++ + /* free metadata */ + for (walk = GST_BUFFER_META (buffer); walk; walk = next) { + GstMeta *meta = &walk->meta; +@@ -793,15 +832,6 @@ _gst_buffer_free (GstBuffer * buffer) + * itself */ + msize = GST_BUFFER_SLICE_SIZE (buffer); + +- /* free our memory */ +- len = GST_BUFFER_MEM_LEN (buffer); +- for (i = 0; i < len; i++) { +- gst_memory_unlock (GST_BUFFER_MEM_PTR (buffer, i), GST_LOCK_FLAG_EXCLUSIVE); +- gst_mini_object_remove_parent (GST_MINI_OBJECT_CAST (GST_BUFFER_MEM_PTR +- (buffer, i)), GST_MINI_OBJECT_CAST (buffer)); +- gst_memory_unref (GST_BUFFER_MEM_PTR (buffer, i)); +- } +- + /* we set msize to 0 when the buffer is part of the memory block */ + if (msize) { + #ifdef USE_POISONING +@@ -839,8 +869,6 @@ gst_buffer_init (GstBufferImpl * buffer, gsize size) + * + * Creates a newly allocated buffer without any data. + * +- * MT safe. +- * + * Returns: (transfer full): the new #GstBuffer. + */ + GstBuffer * +@@ -871,10 +899,7 @@ gst_buffer_new (void) + * + * Note that when @size == 0, the buffer will not have memory associated with it. + * +- * MT safe. +- * +- * Returns: (transfer full) (nullable): a new #GstBuffer, or %NULL if +- * the memory couldn't be allocated. ++ * Returns: (transfer full) (nullable): a new #GstBuffer + */ + GstBuffer * + gst_buffer_new_allocate (GstAllocator * allocator, gsize size, +@@ -972,7 +997,7 @@ no_memory: + * @user_data: (allow-none): user_data + * @notify: (allow-none) (scope async) (closure user_data): called with @user_data when the memory is freed + * +- * Allocate a new buffer that wraps the given memory. @data must point to ++ * Allocates a new buffer that wraps the given memory. @data must point to + * @maxsize of memory, the wrapped buffer will have the region from @offset and + * @size visible. + * +@@ -1008,9 +1033,7 @@ gst_buffer_new_wrapped_full (GstMemoryFlags flags, gpointer data, + * @size: allocated size of @data + * + * Creates a new buffer that wraps the given @data. The memory will be freed +- * with g_free and will be marked writable. +- * +- * MT safe. ++ * with g_free() and will be marked writable. + * + * Returns: (transfer full): a new #GstBuffer + */ +@@ -1027,8 +1050,6 @@ gst_buffer_new_wrapped (gpointer data, gsize size) + * Creates a new #GstBuffer that wraps the given @bytes. The data inside + * @bytes cannot be %NULL and the resulting buffer will be marked as read only. + * +- * MT safe. +- * + * Returns: (transfer full): a new #GstBuffer wrapping @bytes + * + * Since: 1.16 +@@ -1047,11 +1068,30 @@ gst_buffer_new_wrapped_bytes (GBytes * bytes) + size, 0, size, g_bytes_ref (bytes), (GDestroyNotify) g_bytes_unref); + } + ++/** ++ * gst_buffer_new_memdup: ++ * @data: (array length=size) (element-type guint8) (transfer none): data to copy into new buffer ++ * @size: size of @data in bytes ++ * ++ * Creates a new buffer of size @size and fills it with a copy of @data. ++ * ++ * Returns: (transfer full): a new #GstBuffer ++ * ++ * Since: 1.20 ++ */ ++GstBuffer * ++gst_buffer_new_memdup (gconstpointer data, gsize size) ++{ ++ gpointer data2 = g_memdup2 (data, size); ++ ++ return gst_buffer_new_wrapped_full (0, data2, size, 0, size, data2, g_free); ++} ++ + /** + * gst_buffer_n_memory: + * @buffer: a #GstBuffer. + * +- * Get the amount of memory blocks that this buffer has. This amount is never ++ * Gets the amount of memory blocks that this buffer has. This amount is never + * larger than what gst_buffer_get_max_memory() returns. + * + * Returns: the number of memory blocks this buffer is made of. +@@ -1069,7 +1109,7 @@ gst_buffer_n_memory (GstBuffer * buffer) + * @buffer: a #GstBuffer. + * @mem: (transfer full): a #GstMemory. + * +- * Prepend the memory block @mem to @buffer. This function takes ++ * Prepends the memory block @mem to @buffer. This function takes + * ownership of @mem and thus doesn't increase its refcount. + * + * This function is identical to gst_buffer_insert_memory() with an index of 0. +@@ -1086,7 +1126,7 @@ gst_buffer_prepend_memory (GstBuffer * buffer, GstMemory * mem) + * @buffer: a #GstBuffer. + * @mem: (transfer full): a #GstMemory. + * +- * Append the memory block @mem to @buffer. This function takes ++ * Appends the memory block @mem to @buffer. This function takes + * ownership of @mem and thus doesn't increase its refcount. + * + * This function is identical to gst_buffer_insert_memory() with an index of -1. +@@ -1104,7 +1144,7 @@ gst_buffer_append_memory (GstBuffer * buffer, GstMemory * mem) + * @idx: the index to add the memory at, or -1 to append it to the end + * @mem: (transfer full): a #GstMemory. + * +- * Insert the memory block @mem to @buffer at @idx. This function takes ownership ++ * Inserts the memory block @mem into @buffer at @idx. This function takes ownership + * of @mem and thus doesn't increase its refcount. + * + * Only gst_buffer_get_max_memory() can be added to a buffer. If more memory is +@@ -1160,7 +1200,7 @@ _get_mapped (GstBuffer * buffer, guint idx, GstMapInfo * info, + * @buffer: a #GstBuffer. + * @idx: an index + * +- * Get the memory block at @idx in @buffer. The memory block stays valid until ++ * Gets the memory block at @idx in @buffer. The memory block stays valid until + * the memory block in @buffer is removed, replaced or merged, typically with + * any call that modifies the memory in @buffer. + * +@@ -1180,10 +1220,10 @@ gst_buffer_peek_memory (GstBuffer * buffer, guint idx) + * @buffer: a #GstBuffer. + * @idx: an index + * +- * Get the memory block at index @idx in @buffer. ++ * Gets the memory block at index @idx in @buffer. + * + * Returns: (transfer full) (nullable): a #GstMemory that contains the data of the +- * memory block at @idx. Use gst_memory_unref () after usage. ++ * memory block at @idx. + */ + GstMemory * + gst_buffer_get_memory (GstBuffer * buffer, guint idx) +@@ -1195,11 +1235,10 @@ gst_buffer_get_memory (GstBuffer * buffer, guint idx) + * gst_buffer_get_all_memory: + * @buffer: a #GstBuffer. + * +- * Get all the memory block in @buffer. The memory blocks will be merged ++ * Gets all the memory blocks in @buffer. The memory blocks will be merged + * into one large #GstMemory. + * + * Returns: (transfer full) (nullable): a #GstMemory that contains the merged memory. +- * Use gst_memory_unref () after usage. + */ + GstMemory * + gst_buffer_get_all_memory (GstBuffer * buffer) +@@ -1213,13 +1252,13 @@ gst_buffer_get_all_memory (GstBuffer * buffer) + * @idx: an index + * @length: a length + * +- * Get @length memory blocks in @buffer starting at @idx. The memory blocks will ++ * Gets @length memory blocks in @buffer starting at @idx. The memory blocks will + * be merged into one large #GstMemory. + * + * If @length is -1, all memory starting from @idx is merged. + * + * Returns: (transfer full) (nullable): a #GstMemory that contains the merged data of @length +- * blocks starting at @idx. Use gst_memory_unref () after usage. ++ * blocks starting at @idx. + */ + GstMemory * + gst_buffer_get_memory_range (GstBuffer * buffer, guint idx, gint length) +@@ -1270,7 +1309,7 @@ gst_buffer_replace_all_memory (GstBuffer * buffer, GstMemory * mem) + * gst_buffer_replace_memory_range: + * @buffer: a #GstBuffer. + * @idx: an index +- * @length: a length should not be 0 ++ * @length: a length, should not be 0 + * @mem: (transfer full): a #GstMemory + * + * Replaces @length memory blocks in @buffer starting at @idx with @mem. +@@ -1306,7 +1345,7 @@ gst_buffer_replace_memory_range (GstBuffer * buffer, guint idx, gint length, + * @buffer: a #GstBuffer. + * @idx: an index + * +- * Remove the memory block in @b at index @i. ++ * Removes the memory block in @b at index @i. + */ + void + gst_buffer_remove_memory (GstBuffer * buffer, guint idx) +@@ -1318,7 +1357,7 @@ gst_buffer_remove_memory (GstBuffer * buffer, guint idx) + * gst_buffer_remove_all_memory: + * @buffer: a #GstBuffer. + * +- * Remove all the memory blocks in @buffer. ++ * Removes all the memory blocks in @buffer. + */ + void + gst_buffer_remove_all_memory (GstBuffer * buffer) +@@ -1333,7 +1372,7 @@ gst_buffer_remove_all_memory (GstBuffer * buffer) + * @idx: an index + * @length: a length + * +- * Remove @length memory blocks in @buffer starting from @idx. ++ * Removes @length memory blocks in @buffer starting from @idx. + * + * @length can be -1, in which case all memory starting from @idx is removed. + */ +@@ -1366,7 +1405,7 @@ gst_buffer_remove_memory_range (GstBuffer * buffer, guint idx, gint length) + * @length: (out): pointer to length + * @skip: (out): pointer to skip + * +- * Find the memory blocks that span @size bytes starting from @offset ++ * Finds the memory blocks that span @size bytes starting from @offset + * in @buffer. + * + * When this function returns %TRUE, @idx will contain the index of the first +@@ -1434,9 +1473,9 @@ gst_buffer_find_memory (GstBuffer * buffer, gsize offset, gsize size, + * gst_buffer_is_memory_range_writable: + * @buffer: a #GstBuffer. + * @idx: an index +- * @length: a length should not be 0 ++ * @length: a length, should not be 0 + * +- * Check if @length memory blocks in @buffer starting from @idx are writable. ++ * Checks if @length memory blocks in @buffer starting from @idx are writable. + * + * @length can be -1 to check all the memory blocks after @idx. + * +@@ -1477,7 +1516,7 @@ gst_buffer_is_memory_range_writable (GstBuffer * buffer, guint idx, gint length) + * gst_buffer_is_all_memory_writable: + * @buffer: a #GstBuffer. + * +- * Check if all memory blocks in @buffer are writable. ++ * Checks if all memory blocks in @buffer are writable. + * + * Note that this function does not check if @buffer is writable, use + * gst_buffer_is_writable() to check that if needed. +@@ -1498,7 +1537,7 @@ gst_buffer_is_all_memory_writable (GstBuffer * buffer) + * @offset: (out) (allow-none): a pointer to the offset + * @maxsize: (out) (allow-none): a pointer to the maxsize + * +- * Get the total size of the memory blocks in @b. ++ * Gets the total size of the memory blocks in @buffer. + * + * When not %NULL, @offset will contain the offset of the data in the + * first memory block in @buffer and @maxsize will contain the sum of +@@ -1518,7 +1557,7 @@ gst_buffer_get_sizes (GstBuffer * buffer, gsize * offset, gsize * maxsize) + * gst_buffer_get_size: + * @buffer: a #GstBuffer. + * +- * Get the total size of the memory blocks in @buffer. ++ * Gets the total size of the memory blocks in @buffer. + * + * Returns: total size of the memory blocks in @buffer. + */ +@@ -1545,7 +1584,7 @@ gst_buffer_get_size (GstBuffer * buffer) + * @offset: (out) (allow-none): a pointer to the offset + * @maxsize: (out) (allow-none): a pointer to the maxsize + * +- * Get the total size of @length memory blocks stating from @idx in @buffer. ++ * Gets the total size of @length memory blocks stating from @idx in @buffer. + * + * When not %NULL, @offset will contain the offset of the data in the + * memory block in @buffer at @idx and @maxsize will contain the sum of the size +@@ -1625,7 +1664,7 @@ gst_buffer_get_sizes_range (GstBuffer * buffer, guint idx, gint length, + * @offset: the offset adjustment + * @size: the new size or -1 to just adjust the offset + * +- * Set the offset and total size of the memory blocks in @buffer. ++ * Sets the offset and total size of the memory blocks in @buffer. + */ + void + gst_buffer_resize (GstBuffer * buffer, gssize offset, gssize size) +@@ -1638,7 +1677,7 @@ gst_buffer_resize (GstBuffer * buffer, gssize offset, gssize size) + * @buffer: a #GstBuffer. + * @size: the new size + * +- * Set the total size of the memory blocks in @buffer. ++ * Sets the total size of the memory blocks in @buffer. + */ + void + gst_buffer_set_size (GstBuffer * buffer, gssize size) +@@ -1654,7 +1693,7 @@ gst_buffer_set_size (GstBuffer * buffer, gssize size) + * @offset: the offset adjustment + * @size: the new size or -1 to just adjust the offset + * +- * Set the total size of the @length memory blocks starting at @idx in ++ * Sets the total size of the @length memory blocks starting at @idx in + * @buffer + * + * Returns: %TRUE if resizing succeeded, %FALSE otherwise. +@@ -1760,8 +1799,7 @@ gst_buffer_resize_range (GstBuffer * buffer, guint idx, gint length, + * @info: (out caller-allocates): info about the mapping + * @flags: flags for the mapping + * +- * This function fills @info with the #GstMapInfo of all merged memory +- * blocks in @buffer. ++ * Fills @info with the #GstMapInfo of all merged memory blocks in @buffer. + * + * @flags describe the desired access of the memory. When @flags is + * #GST_MAP_WRITE, @buffer should be writable (as returned from +@@ -1790,7 +1828,7 @@ gst_buffer_map (GstBuffer * buffer, GstMapInfo * info, GstMapFlags flags) + * @info: (out caller-allocates): info about the mapping + * @flags: flags for the mapping + * +- * This function fills @info with the #GstMapInfo of @length merged memory blocks ++ * Fills @info with the #GstMapInfo of @length merged memory blocks + * starting at @idx in @buffer. When @length is -1, all memory blocks starting + * from @idx are merged and mapped. + * +@@ -1887,7 +1925,7 @@ cannot_map: + * @buffer: a #GstBuffer. + * @info: a #GstMapInfo + * +- * Release the memory previously mapped with gst_buffer_map(). ++ * Releases the memory previously mapped with gst_buffer_map(). + */ + void + gst_buffer_unmap (GstBuffer * buffer, GstMapInfo * info) +@@ -1895,12 +1933,7 @@ gst_buffer_unmap (GstBuffer * buffer, GstMapInfo * info) + g_return_if_fail (GST_IS_BUFFER (buffer)); + g_return_if_fail (info != NULL); + +- /* we need to check for NULL, it is possible that we tried to map a buffer +- * without memory and we should be able to unmap that fine */ +- if (G_LIKELY (info->memory)) { +- gst_memory_unmap (info->memory, info); +- gst_memory_unref (info->memory); +- } ++ _gst_buffer_map_info_clear ((GstBufferMapInfo *) info); + } + + /** +@@ -1910,7 +1943,7 @@ gst_buffer_unmap (GstBuffer * buffer, GstMapInfo * info) + * @src: (array length=size) (element-type guint8): the source address + * @size: the size to fill + * +- * Copy @size bytes from @src to @buffer at @offset. ++ * Copies @size bytes from @src to @buffer at @offset. + * + * Returns: The amount of bytes copied. This value can be lower than @size + * when @buffer did not contain enough data. +@@ -1963,7 +1996,7 @@ gst_buffer_fill (GstBuffer * buffer, gsize offset, gconstpointer src, + * the destination address + * @size: the size to extract + * +- * Copy @size bytes starting from @offset in @buffer to @dest. ++ * Copies @size bytes starting from @offset in @buffer to @dest. + * + * Returns: The amount of bytes extracted. This value can be lower than @size + * when @buffer did not contain enough data. +@@ -2013,7 +2046,7 @@ gst_buffer_extract (GstBuffer * buffer, gsize offset, gpointer dest, gsize size) + * @mem: (array length=size) (element-type guint8): the memory to compare + * @size: the size to compare + * +- * Compare @size bytes starting from @offset in @buffer with the memory in @mem. ++ * Compares @size bytes starting from @offset in @buffer with the memory in @mem. + * + * Returns: 0 if the memory is equal. + */ +@@ -2066,7 +2099,7 @@ gst_buffer_memcmp (GstBuffer * buffer, gsize offset, gconstpointer mem, + * @val: the value to set + * @size: the size to set + * +- * Fill @buf with @size bytes with @val starting from @offset. ++ * Fills @buf with @size bytes with @val starting from @offset. + * + * Returns: The amount of bytes filled. This value can be lower than @size + * when @buffer did not contain enough data. +@@ -2125,10 +2158,8 @@ gst_buffer_memset (GstBuffer * buffer, gsize offset, guint8 val, gsize size) + * duration and offset end fields are also copied. If not they will be set + * to #GST_CLOCK_TIME_NONE and #GST_BUFFER_OFFSET_NONE. + * +- * MT safe. +- * +- * Returns: (transfer full): the new #GstBuffer or %NULL if the arguments were +- * invalid. ++ * Returns: (transfer full) (nullable): the new #GstBuffer or %NULL if copying ++ * failed. + */ + GstBuffer * + gst_buffer_copy_region (GstBuffer * buffer, GstBufferCopyFlags flags, +@@ -2155,7 +2186,7 @@ gst_buffer_copy_region (GstBuffer * buffer, GstBufferCopyFlags flags, + * @buf1: (transfer full): the first source #GstBuffer to append. + * @buf2: (transfer full): the second source #GstBuffer to append. + * +- * Append all the memory from @buf2 to @buf1. The result buffer will contain a ++ * Appends all the memory from @buf2 to @buf1. The result buffer will contain a + * concatenation of the memory of @buf1 and @buf2. + * + * Returns: (transfer full): the new #GstBuffer that contains the memory +@@ -2174,7 +2205,7 @@ gst_buffer_append (GstBuffer * buf1, GstBuffer * buf2) + * @offset: the offset in @buf2 + * @size: the size or -1 of @buf2 + * +- * Append @size bytes at @offset from @buf2 to @buf1. The result buffer will ++ * Appends @size bytes at @offset from @buf2 to @buf1. The result buffer will + * contain a concatenation of the memory of @buf1 and the requested region of + * @buf2. + * +@@ -2218,14 +2249,13 @@ gst_buffer_append_region (GstBuffer * buf1, GstBuffer * buf2, gssize offset, + * @buffer: a #GstBuffer + * @api: the #GType of an API + * +- * Get the metadata for @api on buffer. When there is no such metadata, %NULL is ++ * Gets the metadata for @api on buffer. When there is no such metadata, %NULL is + * returned. If multiple metadata with the given @api are attached to this + * buffer only the first one is returned. To handle multiple metadata with a + * given API use gst_buffer_iterate_meta() or gst_buffer_foreach_meta() instead +- * and check the meta->info.api member for the API type. ++ * and check the `meta->info.api` member for the API type. + * +- * Returns: (transfer none) (nullable): the metadata for @api on +- * @buffer. ++ * Returns: (transfer none) (nullable): the metadata for @api on @buffer. + */ + GstMeta * + gst_buffer_get_meta (GstBuffer * buffer, GType api) +@@ -2275,7 +2305,7 @@ gst_buffer_get_n_meta (GstBuffer * buffer, GType api_type) + * @info: a #GstMetaInfo + * @params: params for @info + * +- * Add metadata for @info to @buffer using the parameters in @params. ++ * Adds metadata for @info to @buffer using the parameters in @params. + * + * Returns: (transfer none) (nullable): the metadata for the api in @info on @buffer. + */ +@@ -2338,7 +2368,7 @@ init_failed: + * @buffer: a #GstBuffer + * @meta: a #GstMeta + * +- * Remove the metadata for @meta on @buffer. ++ * Removes the metadata for @meta on @buffer. + * + * Returns: %TRUE if the metadata existed and was removed, %FALSE if no such + * metadata was on @buffer. +@@ -2392,7 +2422,7 @@ gst_buffer_remove_meta (GstBuffer * buffer, GstMeta * meta) + * @buffer: a #GstBuffer + * @state: (out caller-allocates): an opaque state pointer + * +- * Retrieve the next #GstMeta after @current. If @state points ++ * Retrieves the next #GstMeta after @current. If @state points + * to %NULL, the first metadata is returned. + * + * @state will be updated with an opaque state pointer +@@ -2428,7 +2458,7 @@ gst_buffer_iterate_meta (GstBuffer * buffer, gpointer * state) + * @state: (out caller-allocates): an opaque state pointer + * @meta_api_type: only return #GstMeta of this type + * +- * Retrieve the next #GstMeta of type @meta_api_type after the current one ++ * Retrieves the next #GstMeta of type @meta_api_type after the current one + * according to @state. If @state points to %NULL, the first metadata of + * type @meta_api_type is returned. + * +@@ -2471,10 +2501,10 @@ gst_buffer_iterate_meta_filtered (GstBuffer * buffer, gpointer * state, + * @func: (scope call): a #GstBufferForeachMetaFunc to call + * @user_data: (closure): user data passed to @func + * +- * Call @func with @user_data for each meta in @buffer. ++ * Calls @func with @user_data for each meta in @buffer. + * + * @func can modify the passed meta pointer or its contents. The return value +- * of @func define if this function returns or if the remaining metadata items ++ * of @func defines if this function returns or if the remaining metadata items + * in the buffer should be skipped. + * + * Returns: %FALSE when @func returned %FALSE for one of the metadata. +@@ -2577,7 +2607,7 @@ GST_DEBUG_CATEGORY_STATIC (gst_parent_buffer_meta_debug); + * @buffer: (transfer none): a #GstBuffer + * @ref: (transfer none): a #GstBuffer to ref + * +- * Add a #GstParentBufferMeta to @buffer that holds a reference on ++ * Adds a #GstParentBufferMeta to @buffer that holds a reference on + * @ref until the buffer is freed. + * + * Returns: (transfer none) (nullable): The #GstParentBufferMeta that was added to the buffer +@@ -2663,7 +2693,7 @@ GType + gst_parent_buffer_meta_api_get_type (void) + { + static GType type = 0; +- static const gchar *tags[] = { NULL }; ++ static const gchar *tags[] = { GST_META_TAG_MEMORY_REFERENCE_STR, NULL }; + + if (g_once_init_enter (&type)) { + GType _type = gst_meta_api_type_register ("GstParentBufferMetaAPI", tags); +@@ -2676,7 +2706,7 @@ gst_parent_buffer_meta_api_get_type (void) + /** + * gst_parent_buffer_meta_get_info: + * +- * Get the global #GstMetaInfo describing the #GstParentBufferMeta meta. ++ * Gets the global #GstMetaInfo describing the #GstParentBufferMeta meta. + * + * Returns: (transfer none): The #GstMetaInfo + * +@@ -2710,7 +2740,7 @@ GST_DEBUG_CATEGORY_STATIC (gst_reference_timestamp_meta_debug); + * @timestamp: timestamp + * @duration: duration, or %GST_CLOCK_TIME_NONE + * +- * Add a #GstReferenceTimestampMeta to @buffer that holds a @timestamp and ++ * Adds a #GstReferenceTimestampMeta to @buffer that holds a @timestamp and + * optionally @duration based on a specific timestamp @reference. See the + * documentation of #GstReferenceTimestampMeta for details. + * +@@ -2746,7 +2776,7 @@ gst_buffer_add_reference_timestamp_meta (GstBuffer * buffer, + * @buffer: a #GstBuffer + * @reference: (allow-none): a reference #GstCaps + * +- * Find the first #GstReferenceTimestampMeta on @buffer that conforms to ++ * Finds the first #GstReferenceTimestampMeta on @buffer that conforms to + * @reference. Conformance is tested by checking if the meta's reference is a + * subset of @reference. + * +@@ -2848,7 +2878,7 @@ gst_reference_timestamp_meta_api_get_type (void) + /** + * gst_reference_timestamp_meta_get_info: + * +- * Get the global #GstMetaInfo describing the #GstReferenceTimestampMeta meta. ++ * Gets the global #GstMetaInfo describing the #GstReferenceTimestampMeta meta. + * + * Returns: (transfer none): The #GstMetaInfo + * +@@ -2873,6 +2903,67 @@ gst_reference_timestamp_meta_get_info (void) + return meta_info; + } + ++/** ++ * gst_buffer_add_custom_meta: ++ * @buffer: (transfer none): a #GstBuffer ++ * @name: the registered name of the desired custom meta ++ * ++ * Creates and adds a #GstCustomMeta for the desired @name. @name must have ++ * been successfully registered with gst_meta_register_custom(). ++ * ++ * Returns: (transfer none) (nullable): The #GstCustomMeta that was added to the buffer ++ * ++ * Since: 1.20 ++ */ ++GstCustomMeta * ++gst_buffer_add_custom_meta (GstBuffer * buffer, const gchar * name) ++{ ++ GstCustomMeta *meta; ++ const GstMetaInfo *info; ++ ++ g_return_val_if_fail (name != NULL, NULL); ++ g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL); ++ ++ info = gst_meta_get_info (name); ++ ++ if (info == NULL || !gst_meta_info_is_custom (info)) ++ return NULL; ++ ++ meta = (GstCustomMeta *) gst_buffer_add_meta (buffer, info, NULL); ++ ++ return meta; ++} ++ ++/** ++ * gst_buffer_get_custom_meta: ++ * @buffer: a #GstBuffer ++ * @name: the registered name of the custom meta to retrieve. ++ * ++ * Finds the first #GstCustomMeta on @buffer for the desired @name. ++ * ++ * Returns: (transfer none) (nullable): the #GstCustomMeta ++ * ++ * Since: 1.20 ++ */ ++GstCustomMeta * ++gst_buffer_get_custom_meta (GstBuffer * buffer, const gchar * name) ++{ ++ const GstMetaInfo *info; ++ ++ g_return_val_if_fail (buffer != NULL, NULL); ++ g_return_val_if_fail (name != NULL, NULL); ++ ++ info = gst_meta_get_info (name); ++ ++ if (!info) ++ return NULL; ++ ++ if (!gst_meta_info_is_custom (info)) ++ return NULL; ++ ++ return (GstCustomMeta *) gst_buffer_get_meta (buffer, info->api); ++} ++ + /** + * gst_buffer_ref: (skip) + * @buf: a #GstBuffer. +@@ -2883,7 +2974,7 @@ gst_reference_timestamp_meta_get_info (void) + * of @buf and its metadata, see gst_buffer_is_writable(). + * It is important to note that keeping additional references to + * GstBuffer instances can potentially increase the number +- * of memcpy operations in a pipeline. ++ * of `memcpy` operations in a pipeline. + * + * Returns: (transfer full): @buf + */ +@@ -2929,13 +3020,13 @@ gst_clear_buffer (GstBuffer ** buf_ptr) + * gst_buffer_copy: (skip) + * @buf: a #GstBuffer. + * +- * Create a copy of the given buffer. This will only copy the buffer's ++ * Creates a copy of the given buffer. This will only copy the buffer's + * data to a newly allocated memory if needed (if the type of memory + * requires it), otherwise the underlying data is just referenced. + * Check gst_buffer_copy_deep() if you want to force the data + * to be copied to newly allocated memory. + * +- * Returns: (transfer full): a new copy of @buf. ++ * Returns: (transfer full) (nullable): a new copy of @buf if the copy succeeded, %NULL otherwise. + */ + GstBuffer * + gst_buffer_copy (const GstBuffer * buf) +diff --git a/gst/gstbuffer.h b/gst/gstbuffer.h +index c564905b0f..6a8b84fc69 100644 +--- a/gst/gstbuffer.h ++++ b/gst/gstbuffer.h +@@ -47,7 +47,7 @@ typedef struct _GstBufferPool GstBufferPool; + * GST_BUFFER_FLAGS: + * @buf: a #GstBuffer. + * +- * A flags word containing #GstBufferFlags flags set on this buffer. ++ * Returns a flags word containing #GstBufferFlags flags set on this buffer. + */ + #define GST_BUFFER_FLAGS(buf) GST_MINI_OBJECT_FLAGS(buf) + /** +@@ -80,25 +80,27 @@ typedef struct _GstBufferPool GstBufferPool; + * GST_BUFFER_PTS: + * @buf: a #GstBuffer.: + * +- * The presentation timestamp (pts) in nanoseconds (as a #GstClockTime) ++ * Gets the presentation timestamp (pts) in nanoseconds (as a #GstClockTime) + * of the data in the buffer. This is the timestamp when the media should be + * presented to the user. ++ * + * Value will be %GST_CLOCK_TIME_NONE if the pts is unknown. + */ + #define GST_BUFFER_PTS(buf) (GST_BUFFER_CAST(buf)->pts) + /** + * GST_BUFFER_DTS: +- * @buf: a #GstBuffer.: ++ * @buf: a #GstBuffer. + * +- * The decoding timestamp (dts) in nanoseconds (as a #GstClockTime) ++ * Gets the decoding timestamp (dts) in nanoseconds (as a #GstClockTime) + * of the data in the buffer. This is the timestamp when the media should be + * decoded or processed otherwise. ++ * + * Value will be %GST_CLOCK_TIME_NONE if the dts is unknown. + */ + #define GST_BUFFER_DTS(buf) (GST_BUFFER_CAST(buf)->dts) + /** + * GST_BUFFER_DTS_OR_PTS: +- * @buf: a #GstBuffer.: ++ * @buf: a #GstBuffer. + * + * Returns the buffer decoding timestamp (dts) if valid, else the buffer + * presentation time (pts) +@@ -110,7 +112,8 @@ typedef struct _GstBufferPool GstBufferPool; + * GST_BUFFER_DURATION: + * @buf: a #GstBuffer. + * +- * The duration in nanoseconds (as a #GstClockTime) of the data in the buffer. ++ * Gets the duration in nanoseconds (as a #GstClockTime) of the data in the buffer. ++ * + * Value will be %GST_CLOCK_TIME_NONE if the duration is unknown. + */ + #define GST_BUFFER_DURATION(buf) (GST_BUFFER_CAST(buf)->duration) +@@ -118,14 +121,14 @@ typedef struct _GstBufferPool GstBufferPool; + * GST_BUFFER_OFFSET: + * @buf: a #GstBuffer. + * +- * The offset in the source file of the beginning of this buffer. ++ * Gets the offset in the source file of the beginning of this buffer. + */ + #define GST_BUFFER_OFFSET(buf) (GST_BUFFER_CAST(buf)->offset) + /** + * GST_BUFFER_OFFSET_END: + * @buf: a #GstBuffer. + * +- * The offset in the source file of the end of this buffer. ++ * Gets the offset in the source file of the end of this buffer. + */ + #define GST_BUFFER_OFFSET_END(buf) (GST_BUFFER_CAST(buf)->offset_end) + +@@ -195,7 +198,9 @@ typedef struct _GstBufferPool GstBufferPool; + * @GST_BUFFER_FLAG_CORRUPTED: the buffer data is corrupted. + * @GST_BUFFER_FLAG_MARKER: the buffer contains a media specific marker. for + * video this is the end of a frame boundary, for audio +- * this is the start of a talkspurt. ++ * this is the start of a talkspurt. for RTP ++ * packets this matches the marker flag in the ++ * RTP packet header. + * @GST_BUFFER_FLAG_HEADER: the buffer contains header information that is + * needed to decode the following data. + * @GST_BUFFER_FLAG_GAP: the buffer has been created to fill a gap in the +@@ -207,14 +212,6 @@ typedef struct _GstBufferPool GstBufferPool; + * @GST_BUFFER_FLAG_DELTA_UNIT: this unit cannot be decoded independently. + * @GST_BUFFER_FLAG_TAG_MEMORY: this flag is set when memory of the buffer + * is added/removed +- * @GST_BUFFER_FLAG_SYNC_AFTER: Elements which write to disk or permanent +- * storage should ensure the data is synced after +- * writing the contents of this buffer. (Since: 1.6) +- * @GST_BUFFER_FLAG_NON_DROPPABLE: This buffer is important and should not be dropped. +- * This can be used to mark important buffers, e.g. to flag +- * RTP packets carrying keyframes or codec setup data for RTP +- * Forward Error Correction purposes, or to prevent still video +- * frames from being dropped by elements due to QoS. (Since: 1.14) + * @GST_BUFFER_FLAG_LAST: additional media specific flags can be added starting from + * this flag. + * +@@ -232,7 +229,29 @@ typedef enum { + GST_BUFFER_FLAG_DROPPABLE = (GST_MINI_OBJECT_FLAG_LAST << 8), + GST_BUFFER_FLAG_DELTA_UNIT = (GST_MINI_OBJECT_FLAG_LAST << 9), + GST_BUFFER_FLAG_TAG_MEMORY = (GST_MINI_OBJECT_FLAG_LAST << 10), ++ ++ /** ++ * GST_BUFFER_FLAG_SYNC_AFTER: ++ * ++ * Elements which write to disk or permanent storage should ensure the data ++ * is synced after writing the contents of this buffer. ++ * ++ * Since: 1.6 ++ */ + GST_BUFFER_FLAG_SYNC_AFTER = (GST_MINI_OBJECT_FLAG_LAST << 11), ++ ++ /** ++ * GST_BUFFER_FLAG_NON_DROPPABLE: ++ * ++ * This buffer is important and should not be dropped. ++ * ++ * This can be used to mark important buffers, e.g. to flag RTP packets ++ * carrying keyframes or codec setup data for RTP Forward Error Correction ++ * purposes, or to prevent still video frames from being dropped by elements ++ * due to QoS. ++ * ++ * Since: 1.14 ++ */ + GST_BUFFER_FLAG_NON_DROPPABLE = (GST_MINI_OBJECT_FLAG_LAST << 12), + + GST_BUFFER_FLAG_LAST = (GST_MINI_OBJECT_FLAG_LAST << 16) +@@ -297,9 +316,13 @@ GstBuffer * gst_buffer_new_wrapped_full (GstMemoryFlags flags, gpointer data, + GDestroyNotify notify); + GST_API + GstBuffer * gst_buffer_new_wrapped (gpointer data, gsize size); ++ + GST_API + GstBuffer * gst_buffer_new_wrapped_bytes (GBytes * bytes); + ++GST_API ++GstBuffer * gst_buffer_new_memdup (gconstpointer data, gsize size); ++ + /* memory blocks */ + + GST_API +@@ -466,8 +489,6 @@ GstBuffer * gst_buffer_copy_deep (const GstBuffer * buf); + * merged + * @GST_BUFFER_COPY_META: flag indicating that buffer meta should be + * copied +- * @GST_BUFFER_COPY_DEEP: flag indicating that memory should always be +- * copied instead of reffed (Since: 1.2) + * + * A set of flags that can be provided to the gst_buffer_copy_into() + * function to specify which items should be copied. +@@ -479,6 +500,14 @@ typedef enum { + GST_BUFFER_COPY_META = (1 << 2), + GST_BUFFER_COPY_MEMORY = (1 << 3), + GST_BUFFER_COPY_MERGE = (1 << 4), ++ ++ /** ++ * GST_BUFFER_COPY_DEEP: ++ * ++ * flag indicating that memory should always be copied instead of reffed ++ * ++ * Since: 1.2 ++ */ + GST_BUFFER_COPY_DEEP = (1 << 5) + } GstBufferCopyFlags; + +@@ -538,8 +567,8 @@ gboolean gst_buffer_copy_into (GstBuffer *dest, GstBuffer *src + * that it returns. Don't access the argument after calling this function unless + * you have an additional reference to it. + * +- * Returns: (transfer full): a writable buffer which may or may not be the +- * same as @buf ++ * Returns: (transfer full) (nullable): a writable buffer (which may or may not be the ++ * same as @buf) or %NULL if copying is required but not possible. + */ + #define gst_buffer_make_writable(buf) GST_BUFFER_CAST (gst_mini_object_make_writable (GST_MINI_OBJECT_CAST (buf))) + +@@ -617,6 +646,14 @@ gboolean gst_buffer_foreach_meta (GstBuffer *buffer, + GstBufferForeachMetaFunc func, + gpointer user_data); + ++GST_API ++GstCustomMeta * gst_buffer_add_custom_meta (GstBuffer *buffer, ++ const gchar *name); ++ ++GST_API ++GstCustomMeta * gst_buffer_get_custom_meta (GstBuffer *buffer, ++ const gchar *name); ++ + /** + * gst_value_set_buffer: + * @v: a #GValue to receive the data +@@ -682,7 +719,7 @@ GType gst_parent_buffer_meta_api_get_type (void); + * gst_buffer_get_parent_buffer_meta: + * @b: a #GstBuffer + * +- * Find and return a #GstParentBufferMeta if one exists on the ++ * Finds and returns a #GstParentBufferMeta if one exists on the + * buffer + */ + #define gst_buffer_get_parent_buffer_meta(b) \ +@@ -713,10 +750,18 @@ typedef struct _GstReferenceTimestampMeta GstReferenceTimestampMeta; + * captured. + * + * The reference is stored as a #GstCaps in @reference. Examples of valid +- * references would be "timestamp/x-drivername-stream" for timestamps that are locally +- * generated by some driver named "drivername" when generating the stream, +- * e.g. based on a frame counter, or "timestamp/x-ntp, host=pool.ntp.org, +- * port=123" for timestamps based on a specific NTP server. ++ * references would be ++ * ++ * * `timestamp/x-drivername-stream`: for timestamps that are locally ++ * generated by some driver named `drivername` when generating the stream, ++ * e.g. based on a frame counter ++ * * `timestamp/x-ntp, host=pool.ntp.org, port=123`: for timestamps based on a ++ * specific NTP server. Note that the host/port parameters might not always ++ * be given. ++ * * `timestamp/x-ptp, version=IEEE1588-2008, domain=1`: for timestamps based ++ * on a given PTP clock. ++ * * `timestamp/x-unix`: for timestamps based on the UNIX epoch according to ++ * the local clock. + * + * Since: 1.14 + */ +@@ -753,6 +798,42 @@ G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstBuffer, gst_buffer_unref) + + G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstBufferPool, gst_object_unref) + ++/** ++ * GstBufferMapInfo: (skip): ++ * ++ * Alias for #GstMapInfo to be used with g_auto(): ++ * ```c ++ * void my_func(GstBuffer *buf) ++ * { ++ * g_auto(GstBufferMapInfo) map = GST_MAP_INFO_INIT; ++ * if (!gst_buffer_map(buf, &map, GST_MAP_READWRITE)) ++ * return; ++ * ... ++ * // No need to call gst_buffer_unmap() ++ * } ++ * ``` ++ * ++ * #GstMapInfo cannot be used with g_auto() because it is ambiguous whether it ++ * needs to be unmapped using gst_buffer_unmap() or gst_memory_unmap(). ++ * ++ * See also #GstMemoryMapInfo. ++ * ++ * Since: 1.22 ++ */ ++typedef GstMapInfo GstBufferMapInfo; ++ ++static inline void _gst_buffer_map_info_clear(GstBufferMapInfo *info) ++{ ++ /* we need to check for NULL, it is possible that we tried to map a buffer ++ * without memory and we should be able to unmap that fine */ ++ if (G_LIKELY (info->memory)) { ++ gst_memory_unmap (info->memory, info); ++ gst_memory_unref (info->memory); ++ } ++} ++ ++G_DEFINE_AUTO_CLEANUP_CLEAR_FUNC(GstBufferMapInfo, _gst_buffer_map_info_clear) ++ + G_END_DECLS + + #endif /* __GST_BUFFER_H__ */ +diff --git a/gst/gstelement.c b/gst/gstelement.c +index c2902ee81b..154b612fba 100644 +--- a/gst/gstelement.c ++++ b/gst/gstelement.c +@@ -101,12 +101,10 @@ + #include "gstquark.h" + #include "gsttracerutils.h" + #include "gstvalue.h" +-#include "gst-i18n-lib.h" ++#include + #include "glib-compat-private.h" + +-#ifndef GST_DISABLE_GST_DEBUG + #include "printf/printf.h" +-#endif + + /* Element signals and args */ + enum +@@ -152,7 +150,7 @@ static gboolean gst_element_default_query (GstElement * element, + GstQuery * query); + + static GstPadTemplate +- * gst_element_class_get_request_pad_template (GstElementClass * ++ * gst_element_class_request_pad_simple_template (GstElementClass * + element_class, const gchar * name); + + static void gst_element_call_async_func (gpointer data, gpointer user_data); +@@ -166,6 +164,10 @@ static GThreadPool *gst_element_pool = NULL; + /* this is used in gstelementfactory.c:gst_element_register() */ + GQuark __gst_elementclass_factory = 0; + ++/* used for gst_element_type_set_skip_documentation() and ++ * gst_element_factory_get_skip_documentation() */ ++GQuark __gst_elementclass_skip_doc = 0; ++ + GType + gst_element_get_type (void) + { +@@ -191,6 +193,8 @@ gst_element_get_type (void) + + __gst_elementclass_factory = + g_quark_from_static_string ("GST_ELEMENTCLASS_FACTORY"); ++ __gst_elementclass_skip_doc = ++ g_quark_from_static_string ("GST_ELEMENTCLASS_SKIP_DOCUMENTATION"); + g_once_init_leave (&gst_element_type, _type); + } + return gst_element_type; +@@ -413,7 +417,7 @@ gst_element_set_clock_func (GstElement * element, GstClock * clock) + /** + * gst_element_set_clock: + * @element: a #GstElement to set the clock for. +- * @clock: (transfer none) (allow-none): the #GstClock to set for the element. ++ * @clock: (transfer none) (nullable): the #GstClock to set for the element. + * + * Sets the clock for the element. This function increases the + * refcount on the clock. Any previously set clock on the object +@@ -488,6 +492,7 @@ gst_element_set_base_time (GstElement * element, GstClockTime time) + GstClockTime old; + + g_return_if_fail (GST_IS_ELEMENT (element)); ++ g_return_if_fail (GST_CLOCK_TIME_IS_VALID (time)); + + GST_OBJECT_LOCK (element); + old = element->base_time; +@@ -742,6 +747,7 @@ gst_element_add_pad (GstElement * element, GstPad * pad) + { + gchar *pad_name; + gboolean active; ++ gboolean should_activate; + + g_return_val_if_fail (GST_IS_ELEMENT (element), FALSE); + g_return_val_if_fail (GST_IS_PAD (pad), FALSE); +@@ -766,10 +772,8 @@ gst_element_add_pad (GstElement * element, GstPad * pad) + goto had_parent; + + /* check for active pads */ +- if (!active && (GST_STATE (element) > GST_STATE_READY || +- GST_STATE_NEXT (element) == GST_STATE_PAUSED)) { +- gst_pad_set_active (pad, TRUE); +- } ++ should_activate = !active && (GST_STATE (element) > GST_STATE_READY || ++ GST_STATE_NEXT (element) == GST_STATE_PAUSED); + + g_free (pad_name); + +@@ -791,6 +795,9 @@ gst_element_add_pad (GstElement * element, GstPad * pad) + element->pads_cookie++; + GST_OBJECT_UNLOCK (element); + ++ if (should_activate) ++ gst_pad_set_active (pad, TRUE); ++ + /* emit the PAD_ADDED signal */ + g_signal_emit (element, gst_element_signals[PAD_ADDED], 0, pad); + GST_TRACER_ELEMENT_ADD_PAD (element, pad); +@@ -1045,7 +1052,7 @@ gst_element_is_valid_request_template_name (const gchar * templ_name, + /* %s is not allowed for multiple specifiers, just a single specifier can be + * accepted in gst_pad_template_new() and can not be mixed with other + * specifier '%u' and '%d' */ +- if (*(templ_name_ptr + 1) == 's' && g_strcmp0 (templ_name, name) == 0) { ++ if (*(templ_name_ptr + 1) == 's') { + return TRUE; + } + +@@ -1158,6 +1165,16 @@ _gst_element_request_pad (GstElement * element, GstPadTemplate * templ, + } + #endif + ++#ifdef GST_ENABLE_EXTRA_CHECKS ++ { ++ if (!g_list_find (oclass->padtemplates, templ)) { ++ /* FIXME 2.0: Change this to g_return_val_if_fail() */ ++ g_critical ("Element type %s does not have a pad template %s (%p)", ++ g_type_name (G_OBJECT_TYPE (element)), templ->name_template, templ); ++ } ++ } ++#endif ++ + if (oclass->request_new_pad) + newpad = (oclass->request_new_pad) (element, templ, name, caps); + +@@ -1167,11 +1184,34 @@ _gst_element_request_pad (GstElement * element, GstPadTemplate * templ, + return newpad; + } + ++#ifndef GST_REMOVE_DEPRECATED + /** + * gst_element_get_request_pad: + * @element: a #GstElement to find a request pad of. + * @name: the name of the request #GstPad to retrieve. + * ++ * The name of this function is confusing to people learning GStreamer. ++ * gst_element_request_pad_simple() aims at making it more explicit it is ++ * a simplified gst_element_request_pad(). ++ * ++ * Deprecated: 1.20: Prefer using gst_element_request_pad_simple() which ++ * provides the exact same functionality. ++ * ++ * Returns: (transfer full) (nullable): requested #GstPad if found, ++ * otherwise %NULL. Release after usage. ++ */ ++GstPad * ++gst_element_get_request_pad (GstElement * element, const gchar * name) ++{ ++ return gst_element_request_pad_simple (element, name); ++} ++#endif ++ ++/** ++ * gst_element_request_pad_simple: ++ * @element: a #GstElement to find a request pad of. ++ * @name: the name of the request #GstPad to retrieve. ++ * + * Retrieves a pad from the element by name (e.g. "src_\%d"). This version only + * retrieves request pads. The pad should be released with + * gst_element_release_request_pad(). +@@ -1180,11 +1220,18 @@ _gst_element_request_pad (GstElement * element, GstPadTemplate * templ, + * gst_element_request_pad() if the pads should have a specific name (e.g. + * @name is "src_1" instead of "src_\%u"). + * ++ * Note that this function was introduced in GStreamer 1.20 in order to provide ++ * a better name to gst_element_get_request_pad(). Prior to 1.20, users ++ * should use gst_element_get_request_pad() which provides the same ++ * functionality. ++ * + * Returns: (transfer full) (nullable): requested #GstPad if found, + * otherwise %NULL. Release after usage. ++ * ++ * Since: 1.20 + */ + GstPad * +-gst_element_get_request_pad (GstElement * element, const gchar * name) ++gst_element_request_pad_simple (GstElement * element, const gchar * name) + { + GstPadTemplate *templ = NULL; + GstPad *pad; +@@ -1198,7 +1245,7 @@ gst_element_get_request_pad (GstElement * element, const gchar * name) + + class = GST_ELEMENT_GET_CLASS (element); + +- templ = gst_element_class_get_request_pad_template (class, name); ++ templ = gst_element_class_request_pad_simple_template (class, name); + if (templ) { + req_name = strstr (name, "%") ? NULL : name; + templ_found = TRUE; +@@ -1714,9 +1761,9 @@ gst_element_get_metadata (GstElement * element, const gchar * key) + * + * Retrieves a list of the pad templates associated with @element_class. The + * list must not be modified by the calling code. +- * > If you use this function in the #GInstanceInitFunc of an object class ++ * > If you use this function in the GInstanceInitFunc of an object class + * > that has subclasses, make sure to pass the g_class parameter of the +- * > #GInstanceInitFunc here. ++ * > GInstanceInitFunc here. + * + * Returns: (transfer none) (element-type Gst.PadTemplate): the #GList of + * pad templates. +@@ -1756,9 +1803,9 @@ gst_element_get_pad_template_list (GstElement * element) + * @name: the name of the #GstPadTemplate to get. + * + * Retrieves a padtemplate from @element_class with the given name. +- * > If you use this function in the #GInstanceInitFunc of an object class ++ * > If you use this function in the GInstanceInitFunc of an object class + * > that has subclasses, make sure to pass the g_class parameter of the +- * > #GInstanceInitFunc here. ++ * > GInstanceInitFunc here. + * + * Returns: (transfer none) (nullable): the #GstPadTemplate with the + * given name, or %NULL if none was found. No unreferencing is +@@ -1811,7 +1858,7 @@ gst_element_get_pad_template (GstElement * element, const gchar * name) + } + + static GstPadTemplate * +-gst_element_class_get_request_pad_template (GstElementClass * ++gst_element_class_request_pad_simple_template (GstElementClass * + element_class, const gchar * name) + { + GstPadTemplate *tmpl; +@@ -3846,7 +3893,8 @@ _priv_gst_element_cleanup (void) + * @name: Name of the first field to set + * @...: variable arguments in the same form as #GstStructure + * +- * Create a #GstStructure to be used with #gst_element_message_full_with_details ++ * Create a #GstStructure to be used with #gst_element_message_full_with_details. ++ * %NULL terminator required. + * + * Since: 1.10 + */ +diff --git a/gst/gstelement.h b/gst/gstelement.h +index c1bfb45f2a..e690bf0e3f 100644 +--- a/gst/gstelement.h ++++ b/gst/gstelement.h +@@ -28,6 +28,142 @@ + + G_BEGIN_DECLS + ++/** ++ * _GST_ELEMENT_REGISTER_DEFINE_BEGIN: (attributes doc.skip=true) ++ */ ++#define _GST_ELEMENT_REGISTER_DEFINE_BEGIN(element) \ ++G_BEGIN_DECLS \ ++gboolean G_PASTE (gst_element_register_, element) (GstPlugin * plugin) \ ++{ \ ++ { ++ ++/** ++ * _GST_ELEMENT_REGISTER_DEFINE_END: (attributes doc.skip=true) ++ */ ++#define _GST_ELEMENT_REGISTER_DEFINE_END(element_name, rank, type) \ ++ } \ ++ return gst_element_register (plugin, element_name, rank, type); \ ++} \ ++G_END_DECLS ++ ++/** ++ * GST_ELEMENT_REGISTER_DEFINE_CUSTOM: ++ * @element: The element name in lower case, with words separated by '_'. ++ * Used to generate `gst_element_register_*(GstPlugin* plugin)`. ++ * @register_func: pointer to a method with the format: `gboolean register_func (GstPlugin* plugin);` ++ * ++ * A convenience macro to define the entry point of an ++ * element `gst_element_register_*(GstPlugin* plugin)` which uses ++ * register_func as the main registration method for the element. ++ * As an example, you may define the element named "streamer-filter" ++ * with the namespace `my` as following using `element_register_custom`: ++ * ++ * ``` ++ * GST_ELEMENT_REGISTER_DEFINE_CUSTOM (my_element, element_register_custom) ++ * ``` ++ * ++ * Since: 1.20 ++ */ ++#define GST_ELEMENT_REGISTER_DEFINE_CUSTOM(element, register_func) \ ++G_BEGIN_DECLS \ ++gboolean G_PASTE (gst_element_register_, element) (GstPlugin * plugin) \ ++{ \ ++ return register_func (plugin); \ ++} \ ++G_END_DECLS ++ ++/** ++ * GST_ELEMENT_REGISTER_DEFINE: ++ * @e: The element name in lower case, with words separated by '_'. ++ * Used to generate `gst_element_register_*(GstPlugin* plugin)`. ++ * @e_n: The public name of the element ++ * @r: The #GstRank of the element (higher rank means more importance when autoplugging, see #GstRank) ++ * @t: The #GType of the element. ++ * ++ * A convenience macro to define the entry point of an ++ * element `gst_element_register_*(GstPlugin* plugin)`. ++ * As an example, you may define the element named "streamer-filter" ++ * with the namespace `my` as following: ++ * ++ * ``` ++ * GST_ELEMENT_REGISTER_REGISTER_DEFINE (stream_filter, "stream-filter", GST_RANK_PRIMARY, MY_TYPE_STREAM_FILTER) ++ * ``` ++ * ++ * Since: 1.20 ++ */ ++#define GST_ELEMENT_REGISTER_DEFINE(e, e_n, r, t) _GST_ELEMENT_REGISTER_DEFINE_BEGIN(e) _GST_ELEMENT_REGISTER_DEFINE_END(e_n, r, t) ++ ++/** ++ * GST_ELEMENT_REGISTER_DEFINE_WITH_CODE: ++ * @e: The element name in lower case, with words separated by '_'. ++ * Used to generate `gst_element_register_*(GstPlugin* plugin)`. ++ * @e_n: The public name of the element ++ * @r: The #GstRank of the element (higher rank means more importance when autoplugging, see #GstRank) ++ * @t: The #GType of the element. ++ * @_c_: Custom code that gets inserted in the gst_element_register_*() function. ++ * ++ * A convenience macro to define the entry point of an ++ * element `gst_element_register_*(GstPlugin* plugin)` executing code ++ * before gst_element_register in `gst_element_register_*(GstPlugin* plugin)`. ++ ++ * As an example, you may define the element named "stream-filter" ++ * with the namespace `my` as following: ++ * ++ * ``` ++ * #define _pre_register_init \ ++ * my_stream_filter_pre_register (plugin); ++ * GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (stream_filter, "stream-filter", GST_RANK_PRIMARY, MY_TYPE_STREAM_FILTER, _pre_register_init) ++ * ``` ++ * ++ * Since: 1.20 ++ */ ++#define GST_ELEMENT_REGISTER_DEFINE_WITH_CODE(e, e_n, r, t, _c_) _GST_ELEMENT_REGISTER_DEFINE_BEGIN(e) {_c_;} _GST_ELEMENT_REGISTER_DEFINE_END(e_n, r, t) ++ ++/** ++ * GST_ELEMENT_REGISTER_DECLARE: ++ * @element: The element name in lower case, with words separated by '_'. ++ * ++ * This macro can be used to declare a new element. ++ * It has to be used in combination with #GST_ELEMENT_REGISTER_DEFINE macros ++ * and must be placed outside any block to declare the element registration ++ * function. ++ * As an example, you may declare the element named "stream-filter" ++ * with the namespace `my` as following: ++ * ++ * ``` ++ * GST_ELEMENT_REGISTER_DECLARE (stream_filter) ++ * ``` ++ * ++ * Since: 1.20 ++ */ ++#define GST_ELEMENT_REGISTER_DECLARE(element) \ ++G_BEGIN_DECLS \ ++gboolean G_PASTE(gst_element_register_, element) (GstPlugin * plugin); \ ++G_END_DECLS ++ ++/** ++ * GST_ELEMENT_REGISTER: ++ * @element: The element name in lower case, with words separated by '_'. ++ * @plugin: The #GstPlugin where to register the element. ++ * ++ * This macro can be used to register an element into a #GstPlugin. ++ * This method will be usually called in the plugin init function ++ * but can also be called with a NULL plugin, ++ * for example with a static registration of the element. ++ * It has to be used in combination with #GST_ELEMENT_REGISTER_DECLARE. ++ * ++ * ``` ++ * GstPlugin* plugin; ++ * ++ * ... ++ * ++ * GST_ELEMENT_REGISTER (stream_filter, plugin); ++ * ``` ++ * ++ * Since: 1.20 ++ */ ++#define GST_ELEMENT_REGISTER(element, plugin) G_PASTE(gst_element_register_, element) (plugin) ++ + /* gstelement.h and gstelementfactory.h include each other */ + typedef struct _GstElement GstElement; + typedef struct _GstElementClass GstElementClass; +@@ -348,7 +484,7 @@ typedef enum + #define GST_ELEMENT_START_TIME(elem) (GST_ELEMENT_CAST(elem)->start_time) + + GST_API +-GstStructure *gst_make_element_message_details (const char *name, ...); ++GstStructure *gst_make_element_message_details (const char *name, ...) G_GNUC_NULL_TERMINATED; + + #define GST_ELEMENT_MESSAGE_MAKE_DETAILS(args) gst_make_element_message_details args + +@@ -902,9 +1038,12 @@ void gst_element_no_more_pads (GstElement *element); + GST_API + GstPad* gst_element_get_static_pad (GstElement *element, const gchar *name); + +-GST_API ++GST_API G_DEPRECATED_FOR(gst_element_request_pad_simple) + GstPad* gst_element_get_request_pad (GstElement *element, const gchar *name); + ++GST_API ++GstPad* gst_element_request_pad_simple (GstElement *element, const gchar *name); ++ + GST_API + GstPad* gst_element_request_pad (GstElement *element, GstPadTemplate *templ, + const gchar * name, const GstCaps *caps); +diff --git a/gst/gstevent.c b/gst/gstevent.c +index 974ced3431..0bd4ac06e6 100644 +--- a/gst/gstevent.c ++++ b/gst/gstevent.c +@@ -213,6 +213,33 @@ gst_event_type_get_flags (GstEventType type) + return ret; + } + ++/** ++ * gst_event_type_to_sticky_ordering ++ * @type: a #GstEventType ++ * ++ * Converts the #GstEventType to an unsigned integer that ++ * represents the ordering of sticky events when re-sending them. ++ * A lower value represents a higher-priority event. ++ * ++ * Returns: an unsigned integer ++ * Since: 1.22 ++ */ ++/* FIXME 2.0: Remove the sticky event order overrides once ++ * the event type numbers are fixed */ ++guint ++gst_event_type_to_sticky_ordering (GstEventType type) ++{ ++ guint sticky_order = type; ++ ++ /* Fix up the sticky event ordering for events where the ++ * type was chosen poorly */ ++ if (type == GST_EVENT_INSTANT_RATE_CHANGE) { ++ sticky_order = GST_EVENT_SEGMENT + 1; ++ } ++ ++ return sticky_order; ++} ++ + static void + _gst_event_free (GstEvent * event) + { +@@ -298,7 +325,7 @@ gst_event_init (GstEventImpl * event, GstEventType type) + * New custom events can also be created by subclassing the event type if + * needed. + * +- * Returns: (transfer full) (nullable): the new custom event. ++ * Returns: (transfer full): the new custom event. + */ + GstEvent * + gst_event_new_custom (GstEventType type, GstStructure * structure) +@@ -354,15 +381,15 @@ gst_event_get_structure (GstEvent * event) + + /** + * gst_event_writable_structure: +- * @event: The #GstEvent. ++ * @event: A writable #GstEvent. + * + * Get a writable version of the structure. + * + * Returns: (transfer none): The structure of the event. The structure + * is still owned by the event, which means that you should not free + * it and that the pointer becomes invalid when you free the event. +- * This function checks if @event is writable and will never return +- * %NULL. ++ * This function ensures that @event is writable, and if so, will ++ * never return %NULL. + * + * MT safe. + */ +@@ -591,7 +618,7 @@ gst_event_new_flush_stop (gboolean reset_time) + /** + * gst_event_parse_flush_stop: + * @event: The event to parse +- * @reset_time: (out): if time should be reset ++ * @reset_time: (out) (optional): if time should be reset + * + * Parse the FLUSH_STOP event and retrieve the @reset_time member. + */ +@@ -626,8 +653,7 @@ gst_event_parse_flush_stop (GstEvent * event, gboolean * reset_time) + * + * Note: The list of @streams can not be empty. + * +- * Returns: (transfer full): a new select-streams event or %NULL in case of +- * an error (like an empty streams list). ++ * Returns: (transfer full): a new select-streams event. + * + * Since: 1.10 + */ +@@ -661,7 +687,7 @@ gst_event_new_select_streams (GList * streams) + /** + * gst_event_parse_select_streams: + * @event: The event to parse +- * @streams: (out) (element-type utf8) (transfer full): the streams ++ * @streams: (out) (optional) (element-type utf8) (transfer full): the streams + * + * Parse the SELECT_STREAMS event and retrieve the contained streams. + * +@@ -724,7 +750,7 @@ gst_event_new_stream_group_done (guint group_id) + /** + * gst_event_parse_stream_group_done: + * @event: a stream-group-done event. +- * @group_id: (out): address of variable to store the group id into ++ * @group_id: (out) (optional): address of variable to store the group id into + * + * Parse a stream-group-done @event and store the result in the given + * @group_id location. +@@ -804,9 +830,9 @@ gst_event_new_gap (GstClockTime timestamp, GstClockTime duration) + /** + * gst_event_parse_gap: + * @event: a #GstEvent of type #GST_EVENT_GAP +- * @timestamp: (out) (allow-none): location where to store the ++ * @timestamp: (out) (optional): location where to store the + * start time (pts) of the gap, or %NULL +- * @duration: (out) (allow-none): location where to store the duration of ++ * @duration: (out) (optional): location where to store the duration of + * the gap, or %NULL + * + * Extract timestamp and duration from a new GAP event. +@@ -826,6 +852,51 @@ gst_event_parse_gap (GstEvent * event, GstClockTime * timestamp, + GST_QUARK (DURATION), GST_TYPE_CLOCK_TIME, duration, NULL); + } + ++/** ++ * gst_event_set_gap_flags: ++ * @event: a #GstEvent of type #GST_EVENT_GAP ++ * @flags: a #GstGapFlags ++ * ++ * Sets @flags on @event to give additional information about the reason for ++ * the #GST_EVENT_GAP. ++ * ++ * Since: 1.20 ++ */ ++void ++gst_event_set_gap_flags (GstEvent * event, GstGapFlags flags) ++{ ++ g_return_if_fail (event != NULL); ++ g_return_if_fail (GST_EVENT_TYPE (event) == GST_EVENT_GAP); ++ g_return_if_fail (gst_event_is_writable (event)); ++ ++ gst_structure_id_set (GST_EVENT_STRUCTURE (event), ++ GST_QUARK (GAP_FLAGS), GST_TYPE_GAP_FLAGS, flags, NULL); ++} ++ ++/** ++ * gst_event_parse_gap_flags: ++ * @event: a #GstEvent of type #GST_EVENT_GAP ++ * @flags: (out) (optional): a #GstGapFlags or %NULL ++ * ++ * Retrieve the gap flags that may have been set on a gap event with ++ * gst_event_set_gap_flags(). ++ * ++ * Since: 1.20 ++ */ ++void ++gst_event_parse_gap_flags (GstEvent * event, GstGapFlags * flags) ++{ ++ g_return_if_fail (event != NULL); ++ g_return_if_fail (GST_EVENT_TYPE (event) == GST_EVENT_GAP); ++ ++ /* Initialize to 0 for GAP events that don't have the field set */ ++ if (flags) ++ *flags = 0; ++ ++ gst_structure_id_get (GST_EVENT_STRUCTURE (event), ++ GST_QUARK (GAP_FLAGS), GST_TYPE_GAP_FLAGS, flags, NULL); ++} ++ + /** + * gst_event_new_caps: + * @caps: (transfer none): a #GstCaps +@@ -834,7 +905,7 @@ gst_event_parse_gap (GstEvent * event, GstClockTime * timestamp, + * synchronized with the buffer flow and contains the format of the buffers + * that will follow after the event. + * +- * Returns: (transfer full) (nullable): the new CAPS event. ++ * Returns: (transfer full): the new CAPS event. + */ + GstEvent * + gst_event_new_caps (GstCaps * caps) +@@ -856,7 +927,7 @@ gst_event_new_caps (GstCaps * caps) + /** + * gst_event_parse_caps: + * @event: The event to parse +- * @caps: (out) (transfer none): A pointer to the caps ++ * @caps: (out) (optional) (transfer none): A pointer to the caps + * + * Get the caps from @event. The caps remains valid as long as @event remains + * valid. +@@ -912,7 +983,7 @@ gst_event_parse_caps (GstEvent * event, GstCaps ** caps) + * + * time + (TIMESTAMP(buf) - start) * ABS (rate * applied_rate) + * +- * Returns: (transfer full) (nullable): the new SEGMENT event. ++ * Returns: (transfer full): the new SEGMENT event. + */ + GstEvent * + gst_event_new_segment (const GstSegment * segment) +@@ -937,7 +1008,7 @@ gst_event_new_segment (const GstSegment * segment) + /** + * gst_event_parse_segment: + * @event: The event to parse +- * @segment: (out) (transfer none): a pointer to a #GstSegment ++ * @segment: (out) (optional) (transfer none): a pointer to a #GstSegment + * + * Parses a segment @event and stores the result in the given @segment location. + * @segment remains valid only until the @event is freed. Don't modify the segment +@@ -1014,7 +1085,7 @@ gst_event_new_tag (GstTagList * taglist) + /** + * gst_event_parse_tag: + * @event: a tag event +- * @taglist: (out) (transfer none): pointer to metadata list ++ * @taglist: (out) (optional) (transfer none): pointer to metadata list + * + * Parses a tag @event and stores the results in the given @taglist location. + * No reference to the taglist will be returned, it remains valid only until +@@ -1076,10 +1147,10 @@ gst_event_new_buffer_size (GstFormat format, gint64 minsize, + /** + * gst_event_parse_buffer_size: + * @event: The event to query +- * @format: (out): A pointer to store the format in +- * @minsize: (out): A pointer to store the minsize in +- * @maxsize: (out): A pointer to store the maxsize in +- * @async: (out): A pointer to store the async-flag in ++ * @format: (out) (optional): A pointer to store the format in ++ * @minsize: (out) (optional): A pointer to store the minsize in ++ * @maxsize: (out) (optional): A pointer to store the maxsize in ++ * @async: (out) (optional): A pointer to store the async-flag in + * + * Get the format, minsize, maxsize and async-flag in the buffersize event. + */ +@@ -1161,7 +1232,7 @@ gst_event_parse_buffer_size (GstEvent * event, GstFormat * format, + * The application can use general event probes to intercept the QoS + * event and implement custom application specific QoS handling. + * +- * Returns: (transfer full) (nullable): a new QOS event. ++ * Returns: (transfer full): a new QOS event. + */ + GstEvent * + gst_event_new_qos (GstQOSType type, gdouble proportion, +@@ -1191,10 +1262,10 @@ gst_event_new_qos (GstQOSType type, gdouble proportion, + /** + * gst_event_parse_qos: + * @event: The event to query +- * @type: (out): A pointer to store the QoS type in +- * @proportion: (out): A pointer to store the proportion in +- * @diff: (out): A pointer to store the diff in +- * @timestamp: (out): A pointer to store the timestamp in ++ * @type: (out) (optional): A pointer to store the QoS type in ++ * @proportion: (out) (optional): A pointer to store the proportion in ++ * @diff: (out) (optional): A pointer to store the diff in ++ * @timestamp: (out) (optional): A pointer to store the timestamp in + * + * Get the type, proportion, diff and timestamp in the qos event. See + * gst_event_new_qos() for more information about the different QoS values. +@@ -1287,7 +1358,7 @@ gst_event_parse_qos (GstEvent * event, GstQOSType * type, + * #GST_QUERY_POSITION and update the playback segment current position with a + * #GST_SEEK_TYPE_SET to the desired position. + * +- * Returns: (transfer full) (nullable): a new seek event. ++ * Returns: (transfer full): a new seek event. + */ + GstEvent * + gst_event_new_seek (gdouble rate, GstFormat format, GstSeekFlags flags, +@@ -1348,13 +1419,13 @@ gst_event_new_seek (gdouble rate, GstFormat format, GstSeekFlags flags, + /** + * gst_event_parse_seek: + * @event: a seek event +- * @rate: (out): result location for the rate +- * @format: (out): result location for the stream format +- * @flags: (out): result location for the #GstSeekFlags +- * @start_type: (out): result location for the #GstSeekType of the start position +- * @start: (out): result location for the start position expressed in @format +- * @stop_type: (out): result location for the #GstSeekType of the stop position +- * @stop: (out): result location for the stop position expressed in @format ++ * @rate: (out) (optional): result location for the rate ++ * @format: (out) (optional): result location for the stream format ++ * @flags: (out) (optional): result location for the #GstSeekFlags ++ * @start_type: (out) (optional): result location for the #GstSeekType of the start position ++ * @start: (out) (optional): result location for the start position expressed in @format ++ * @stop_type: (out) (optional): result location for the #GstSeekType of the stop position ++ * @stop: (out) (optional): result location for the stop position expressed in @format + * + * Parses a seek @event and stores the results in the given result locations. + */ +@@ -1422,7 +1493,7 @@ gst_event_set_seek_trickmode_interval (GstEvent * event, GstClockTime interval) + + /** + * gst_event_parse_seek_trickmode_interval: +- * @interval: (out) ++ * @interval: (out) (optional): interval + * + * Retrieve the trickmode interval that may have been set on a + * seek event with gst_event_set_seek_trickmode_interval(). +@@ -1447,7 +1518,8 @@ gst_event_parse_seek_trickmode_interval (GstEvent * event, + /** + * gst_event_new_navigation: + * @structure: (transfer full): description of the event. The event will take +- * ownership of the structure. ++ * ownership of the structure. See #GstNavigation for more specific ++ * constructors. + * + * Create a new navigation event from the given description. + * +@@ -1480,6 +1552,8 @@ gst_event_new_latency (GstClockTime latency) + GstEvent *event; + GstStructure *structure; + ++ g_return_val_if_fail (GST_CLOCK_TIME_IS_VALID (latency), NULL); ++ + GST_CAT_INFO (GST_CAT_EVENT, + "creating latency event %" GST_TIME_FORMAT, GST_TIME_ARGS (latency)); + +@@ -1493,7 +1567,7 @@ gst_event_new_latency (GstClockTime latency) + /** + * gst_event_parse_latency: + * @event: The event to query +- * @latency: (out): A pointer to store the latency in. ++ * @latency: (out) (optional): A pointer to store the latency in. + * + * Get the latency in the latency event. + */ +@@ -1531,7 +1605,7 @@ gst_event_parse_latency (GstEvent * event, GstClockTime * latency) + * The @intermediate flag instructs the pipeline that this step operation is + * part of a larger step operation. + * +- * Returns: (transfer full) (nullable): a new #GstEvent ++ * Returns: (transfer full): a new #GstEvent + */ + GstEvent * + gst_event_new_step (GstFormat format, guint64 amount, gdouble rate, +@@ -1558,11 +1632,11 @@ gst_event_new_step (GstFormat format, guint64 amount, gdouble rate, + /** + * gst_event_parse_step: + * @event: The event to query +- * @format: (out) (allow-none): a pointer to store the format in +- * @amount: (out) (allow-none): a pointer to store the amount in +- * @rate: (out) (allow-none): a pointer to store the rate in +- * @flush: (out) (allow-none): a pointer to store the flush boolean in +- * @intermediate: (out) (allow-none): a pointer to store the intermediate ++ * @format: (out) (optional): a pointer to store the format in ++ * @amount: (out) (optional): a pointer to store the amount in ++ * @rate: (out) (optional): a pointer to store the rate in ++ * @flush: (out) (optional): a pointer to store the flush boolean in ++ * @intermediate: (out) (optional): a pointer to store the intermediate + * boolean in + * + * Parse the step event. +@@ -1651,7 +1725,7 @@ gst_event_new_sink_message (const gchar * name, GstMessage * msg) + /** + * gst_event_parse_sink_message: + * @event: The event to query +- * @msg: (out) (transfer full): a pointer to store the #GstMessage in. ++ * @msg: (out) (optional) (transfer full): a pointer to store the #GstMessage in. + * + * Parse the sink-message event. Unref @msg after usage. + */ +@@ -1715,7 +1789,7 @@ gst_event_new_stream_start (const gchar * stream_id) + /** + * gst_event_parse_stream_start: + * @event: a stream-start event. +- * @stream_id: (out) (transfer none): pointer to store the stream-id ++ * @stream_id: (out) (optional) (transfer none): pointer to store the stream-id + * + * Parse a stream-id @event and store the result in the given @stream_id + * location. The string stored in @stream_id must not be modified and will +@@ -1761,7 +1835,7 @@ gst_event_set_stream (GstEvent * event, GstStream * stream) + /** + * gst_event_parse_stream: + * @event: a stream-start event +- * @stream: (out) (transfer full): address of variable to store the stream ++ * @stream: (out) (optional) (transfer full): address of variable to store the stream + * + * Parse a stream-start @event and extract the #GstStream from it. + * +@@ -1801,7 +1875,7 @@ gst_event_set_stream_flags (GstEvent * event, GstStreamFlags flags) + /** + * gst_event_parse_stream_flags: + * @event: a stream-start event +- * @flags: (out): address of variable where to store the stream flags ++ * @flags: (out) (optional): address of variable where to store the stream flags + * + * Since: 1.2 + */ +@@ -1848,7 +1922,7 @@ gst_event_set_group_id (GstEvent * event, guint group_id) + /** + * gst_event_parse_group_id: + * @event: a stream-start event +- * @group_id: (out): address of variable where to store the group id ++ * @group_id: (out) (optional): address of variable where to store the group id + * + * Returns: %TRUE if a group id was set on the event and could be parsed, + * %FALSE otherwise. +@@ -1873,7 +1947,7 @@ gst_event_parse_group_id (GstEvent * event, guint * group_id) + + /** + * gst_event_new_stream_collection: +- * @collection: Active collection for this data flow ++ * @collection: (transfer none): Active collection for this data flow + * + * Create a new STREAM_COLLECTION event. The stream collection event can only + * travel downstream synchronized with the buffer flow. +@@ -1905,7 +1979,7 @@ gst_event_new_stream_collection (GstStreamCollection * collection) + /** + * gst_event_parse_stream_collection: + * @event: a stream-collection event +- * @collection: (out): pointer to store the collection ++ * @collection: (out) (optional) (transfer full): pointer to store the collection. + * + * Retrieve new #GstStreamCollection from STREAM_COLLECTION event @event. + * +@@ -1965,8 +2039,8 @@ gst_event_new_toc (GstToc * toc, gboolean updated) + /** + * gst_event_parse_toc: + * @event: a TOC event. +- * @toc: (out) (transfer full): pointer to #GstToc structure. +- * @updated: (out): pointer to store TOC updated flag. ++ * @toc: (out) (optional) (transfer full): pointer to #GstToc structure. ++ * @updated: (out) (optional): pointer to store TOC updated flag. + * + * Parse a TOC @event and store the results in the given @toc and @updated locations. + */ +@@ -2014,7 +2088,7 @@ gst_event_new_toc_select (const gchar * uid) + /** + * gst_event_parse_toc_select: + * @event: a TOC select event. +- * @uid: (out) (transfer full) (allow-none): storage for the selection UID. ++ * @uid: (out) (transfer full) (optional): storage for the selection UID. + * + * Parse a TOC select @event and store the results in the given @uid location. + */ +@@ -2075,8 +2149,7 @@ gst_event_parse_toc_select (GstEvent * event, gchar ** uid) + * event of a particular @origin and @system_id will + * be stuck to the output pad of the sending element. + * +- * Returns: a #GST_EVENT_PROTECTION event, if successful; %NULL +- * if unsuccessful. ++ * Returns: (transfer full): a #GST_EVENT_PROTECTION event. + * + * Since: 1.6 + */ +@@ -2110,11 +2183,11 @@ gst_event_new_protection (const gchar * system_id, + /** + * gst_event_parse_protection: + * @event: a #GST_EVENT_PROTECTION event. +- * @system_id: (out) (allow-none) (transfer none): pointer to store the UUID ++ * @system_id: (out) (optional) (transfer none): pointer to store the UUID + * string uniquely identifying a content protection system. +- * @data: (out) (allow-none) (transfer none): pointer to store a #GstBuffer ++ * @data: (out) (optional) (transfer none): pointer to store a #GstBuffer + * holding protection system specific information. +- * @origin: (out) (allow-none) (transfer none): pointer to store a value that ++ * @origin: (out) (optional) (transfer none): pointer to store a value that + * indicates where the protection information carried by @event was extracted + * from. + * +@@ -2178,8 +2251,8 @@ gst_event_new_segment_done (GstFormat format, gint64 position) + /** + * gst_event_parse_segment_done: + * @event: A valid #GstEvent of type GST_EVENT_SEGMENT_DONE. +- * @format: (out) (allow-none): Result location for the format, or %NULL +- * @position: (out) (allow-none): Result location for the position, or %NULL ++ * @format: (out) (optional): Result location for the format, or %NULL ++ * @position: (out) (optional): Result location for the position, or %NULL + * + * Extracts the position and format from the segment done message. + * +@@ -2248,9 +2321,9 @@ gst_event_new_instant_rate_change (gdouble rate_multiplier, + /** + * gst_event_parse_instant_rate_change: + * @event: a #GstEvent of type #GST_EVENT_INSTANT_RATE_CHANGE +- * @rate_multiplier: (out) (allow-none): location in which to store the rate ++ * @rate_multiplier: (out) (optional): location in which to store the rate + * multiplier of the instant-rate-change event, or %NULL +- * @new_flags: (out) (allow-none): location in which to store the new ++ * @new_flags: (out) (optional): location in which to store the new + * segment flags of the instant-rate-change event, or %NULL + * + * Extract rate and flags from an instant-rate-change event. +@@ -2324,11 +2397,11 @@ gst_event_new_instant_rate_sync_time (gdouble rate_multiplier, + /** + * gst_event_parse_instant_rate_sync_time: + * @event: a #GstEvent of type #GST_EVENT_INSTANT_RATE_CHANGE +- * @rate_multiplier: (out) (allow-none): location where to store the rate of ++ * @rate_multiplier: (out) (optional): location where to store the rate of + * the instant-rate-sync-time event, or %NULL +- * @running_time: (out) (allow-none): location in which to store the running time ++ * @running_time: (out) (optional): location in which to store the running time + * of the instant-rate-sync-time event, or %NULL +- * @upstream_running_time: (out) (allow-none): location in which to store the ++ * @upstream_running_time: (out) (optional): location in which to store the + * upstream running time of the instant-rate-sync-time event, or %NULL + * + * Extract the rate multiplier and running times from an instant-rate-sync-time event. +@@ -2356,7 +2429,7 @@ gst_event_parse_instant_rate_sync_time (GstEvent * event, + * gst_event_replace: (skip) + * @old_event: (inout) (transfer full) (nullable): pointer to a + * pointer to a #GstEvent to be replaced. +- * @new_event: (allow-none) (transfer none): pointer to a #GstEvent that will ++ * @new_event: (nullable) (transfer none): pointer to a #GstEvent that will + * replace the event pointed to by @old_event. + * + * Modifies a pointer to a #GstEvent to point to a different #GstEvent. The +@@ -2395,7 +2468,7 @@ gst_event_steal (GstEvent ** old_event) + * gst_event_take: (skip) + * @old_event: (inout) (transfer full) (nullable): pointer to a + * pointer to a #GstEvent to be stolen. +- * @new_event: (allow-none) (transfer full): pointer to a #GstEvent that will ++ * @new_event: (nullable) (transfer full): pointer to a #GstEvent that will + * replace the event pointed to by @old_event. + * + * Modifies a pointer to a #GstEvent to point to a different #GstEvent. This +diff --git a/gst/gstevent.h b/gst/gstevent.h +index f4f1ebeb06..a58eb40aa8 100644 +--- a/gst/gstevent.h ++++ b/gst/gstevent.h +@@ -169,6 +169,7 @@ typedef enum { + GST_EVENT_GAP = GST_EVENT_MAKE_TYPE (160, _FLAG(DOWNSTREAM) | _FLAG(SERIALIZED)), + + /* sticky downstream non-serialized */ ++ /* FIXME 2.0: change to value 72 and move after the GST_EVENT_SEGMENT event */ + GST_EVENT_INSTANT_RATE_CHANGE = GST_EVENT_MAKE_TYPE (180, _FLAG(DOWNSTREAM) | _FLAG(STICKY)), + + /* upstream events */ +@@ -374,6 +375,19 @@ typedef enum { + GST_QOS_TYPE_THROTTLE = 2 + } GstQOSType; + ++/** ++ * GstGapFlags: ++ * @GST_GAP_FLAG_MISSING_DATA: The #GST_EVENT_GAP signals missing data, ++ * for example because of packet loss. ++ * ++ * The different flags that can be set on #GST_EVENT_GAP events. See ++ * gst_event_set_gap_flags() for details. ++ * ++ * Since: 1.20 ++ */ ++typedef enum { ++ GST_GAP_FLAG_MISSING_DATA = (1<<0), ++} GstGapFlags; + + /** + * GstEvent: +@@ -404,6 +418,10 @@ GST_API + GstEventTypeFlags + gst_event_type_get_flags (GstEventType type); + ++ ++GST_API ++guint gst_event_type_to_sticky_ordering (GstEventType type) G_GNUC_CONST; ++ + #ifndef GST_DISABLE_MINIOBJECT_INLINE_FUNCTIONS + /* refcounting */ + static inline GstEvent * +@@ -557,6 +575,14 @@ void gst_event_parse_gap (GstEvent * event, + GstClockTime * timestamp, + GstClockTime * duration); + ++GST_API ++void gst_event_set_gap_flags (GstEvent * event, ++ GstGapFlags flags); ++ ++GST_API ++void gst_event_parse_gap_flags (GstEvent * event, ++ GstGapFlags * flags); ++ + /* Caps events */ + + GST_API +diff --git a/gst/gstmeta.c b/gst/gstmeta.c +index ab36cefffd..9fc619860f 100644 +--- a/gst/gstmeta.c ++++ b/gst/gstmeta.c +@@ -57,15 +57,62 @@ static GRWLock lock; + + GQuark _gst_meta_transform_copy; + GQuark _gst_meta_tag_memory; ++GQuark _gst_meta_tag_memory_reference; ++ ++typedef struct ++{ ++ GstCustomMeta meta; ++ ++ GstStructure *structure; ++} GstCustomMetaImpl; ++ ++typedef struct ++{ ++ GstMetaInfo info; ++ GstCustomMetaTransformFunction custom_transform_func; ++ gpointer custom_transform_user_data; ++ GDestroyNotify custom_transform_destroy_notify; ++ gboolean is_custom; ++} GstMetaInfoImpl; ++ ++static void ++free_info (gpointer data) ++{ ++ g_slice_free (GstMetaInfoImpl, data); ++} + + void + _priv_gst_meta_initialize (void) + { + g_rw_lock_init (&lock); +- metainfo = g_hash_table_new (g_str_hash, g_str_equal); ++ metainfo = g_hash_table_new_full (g_str_hash, g_str_equal, NULL, free_info); + + _gst_meta_transform_copy = g_quark_from_static_string ("gst-copy"); + _gst_meta_tag_memory = g_quark_from_static_string ("memory"); ++ _gst_meta_tag_memory_reference = ++ g_quark_from_static_string ("memory-reference"); ++} ++ ++static gboolean ++notify_custom (gchar * key, GstMetaInfo * info, gpointer unused) ++{ ++ GstMetaInfoImpl *impl = (GstMetaInfoImpl *) info; ++ ++ if (impl->is_custom) { ++ if (impl->custom_transform_destroy_notify) ++ impl->custom_transform_destroy_notify (impl->custom_transform_user_data); ++ } ++ return TRUE; ++} ++ ++void ++_priv_gst_meta_cleanup (void) ++{ ++ if (metainfo != NULL) { ++ g_hash_table_foreach_remove (metainfo, (GHRFunc) notify_custom, NULL); ++ g_hash_table_unref (metainfo); ++ metainfo = NULL; ++ } + } + + /** +@@ -89,7 +136,7 @@ gst_meta_api_type_register (const gchar * api, const gchar ** tags) + GST_CAT_DEBUG (GST_CAT_META, "register API \"%s\"", api); + type = g_pointer_type_register_static (api); + +- if (type != 0) { ++ if (type != G_TYPE_INVALID) { + gint i; + + for (i = 0; tags[i]; i++) { +@@ -104,6 +151,168 @@ gst_meta_api_type_register (const gchar * api, const gchar ** tags) + return type; + } + ++static gboolean ++custom_init_func (GstMeta * meta, gpointer params, GstBuffer * buffer) ++{ ++ GstCustomMetaImpl *cmeta = (GstCustomMetaImpl *) meta; ++ ++ cmeta->structure = gst_structure_new_empty (g_type_name (meta->info->type)); ++ ++ gst_structure_set_parent_refcount (cmeta->structure, ++ &GST_MINI_OBJECT_REFCOUNT (buffer)); ++ ++ return TRUE; ++} ++ ++static void ++custom_free_func (GstMeta * meta, GstBuffer * buffer) ++{ ++ GstCustomMetaImpl *cmeta = (GstCustomMetaImpl *) meta; ++ ++ gst_structure_set_parent_refcount (cmeta->structure, NULL); ++ gst_structure_free (cmeta->structure); ++} ++ ++static gboolean ++custom_transform_func (GstBuffer * transbuf, GstMeta * meta, ++ GstBuffer * buffer, GQuark type, gpointer data) ++{ ++ GstCustomMetaImpl *custom, *cmeta = (GstCustomMetaImpl *) meta; ++ GstMetaInfoImpl *info = (GstMetaInfoImpl *) meta->info; ++ ++ if (info->custom_transform_func) ++ return info->custom_transform_func (transbuf, (GstCustomMeta *) meta, ++ buffer, type, data, info->custom_transform_user_data); ++ ++ if (GST_META_TRANSFORM_IS_COPY (type)) { ++ custom = ++ (GstCustomMetaImpl *) gst_buffer_add_meta (transbuf, meta->info, NULL); ++ gst_structure_set_parent_refcount (custom->structure, NULL); ++ gst_structure_take (&custom->structure, ++ gst_structure_copy (cmeta->structure)); ++ gst_structure_set_parent_refcount (custom->structure, ++ &GST_MINI_OBJECT_REFCOUNT (transbuf)); ++ } else { ++ return FALSE; ++ } ++ ++ return TRUE; ++} ++ ++/** ++ * gst_custom_meta_get_structure: ++ * ++ * Retrieve the #GstStructure backing a custom meta, the structure's mutability ++ * is conditioned to the writability of the #GstBuffer @meta is attached to. ++ * ++ * Returns: (transfer none): the #GstStructure backing @meta ++ * Since: 1.20 ++ */ ++GstStructure * ++gst_custom_meta_get_structure (GstCustomMeta * meta) ++{ ++ g_return_val_if_fail (meta != NULL, NULL); ++ g_return_val_if_fail (gst_meta_info_is_custom (((GstMeta *) meta)->info), ++ NULL); ++ ++ return ((GstCustomMetaImpl *) meta)->structure; ++} ++ ++/** ++ * gst_custom_meta_has_name: ++ * ++ * Checks whether the name of the custom meta is @name ++ * ++ * Returns: Whether @name is the name of the custom meta ++ * Since: 1.20 ++ */ ++gboolean ++gst_custom_meta_has_name (GstCustomMeta * meta, const gchar * name) ++{ ++ g_return_val_if_fail (meta != NULL, FALSE); ++ g_return_val_if_fail (gst_meta_info_is_custom (((GstMeta *) meta)->info), ++ FALSE); ++ ++ return gst_structure_has_name (((GstCustomMetaImpl *) meta)->structure, name); ++} ++ ++/** ++ * gst_meta_register_custom: ++ * @name: the name of the #GstMeta implementation ++ * @tags: (array zero-terminated=1): tags for @api ++ * @transform_func: (scope notified) (nullable): a #GstMetaTransformFunction ++ * @user_data: (closure): user data passed to @transform_func ++ * @destroy_data: #GDestroyNotify for user_data ++ * ++ * Register a new custom #GstMeta implementation, backed by an opaque ++ * structure holding a #GstStructure. ++ * ++ * The registered info can be retrieved later with gst_meta_get_info() by using ++ * @name as the key. ++ * ++ * The backing #GstStructure can be retrieved with ++ * gst_custom_meta_get_structure(), its mutability is conditioned by the ++ * writability of the buffer the meta is attached to. ++ * ++ * When @transform_func is %NULL, the meta and its backing #GstStructure ++ * will always be copied when the transform operation is copy, other operations ++ * are discarded, copy regions are ignored. ++ * ++ * Returns: (transfer none): a #GstMetaInfo that can be used to ++ * access metadata. ++ * Since: 1.20 ++ */ ++const GstMetaInfo * ++gst_meta_register_custom (const gchar * name, const gchar ** tags, ++ GstCustomMetaTransformFunction transform_func, ++ gpointer user_data, GDestroyNotify destroy_data) ++{ ++ gchar *api_name = g_strdup_printf ("%s-api", name); ++ GType api; ++ GstMetaInfoImpl *info; ++ GstMetaInfo *ret = NULL; ++ ++ g_return_val_if_fail (tags != NULL, NULL); ++ g_return_val_if_fail (name != NULL, NULL); ++ ++ api = gst_meta_api_type_register (api_name, tags); ++ g_free (api_name); ++ if (api == G_TYPE_INVALID) ++ goto done; ++ ++ info = (GstMetaInfoImpl *) gst_meta_register (api, name, ++ sizeof (GstCustomMetaImpl), ++ custom_init_func, custom_free_func, custom_transform_func); ++ ++ if (!info) ++ goto done; ++ ++ info->is_custom = TRUE; ++ info->custom_transform_func = transform_func; ++ info->custom_transform_user_data = user_data; ++ info->custom_transform_destroy_notify = destroy_data; ++ ++ ret = (GstMetaInfo *) info; ++ ++done: ++ return ret; ++} ++ ++/** ++ * gst_meta_info_is_custom: ++ * ++ * Returns: whether @info was registered as a #GstCustomMeta with ++ * gst_meta_register_custom() ++ * Since:1.20 ++ */ ++gboolean ++gst_meta_info_is_custom (const GstMetaInfo * info) ++{ ++ g_return_val_if_fail (info != NULL, FALSE); ++ ++ return ((GstMetaInfoImpl *) info)->is_custom; ++} ++ + /** + * gst_meta_api_type_has_tag: + * @api: an API +@@ -158,7 +367,7 @@ gst_meta_api_type_get_tags (GType api) + * The same @info can be retrieved later with gst_meta_get_info() by using + * @impl as the key. + * +- * Returns: (transfer none) (nullable): a #GstMetaInfo that can be used to ++ * Returns: (transfer none): a #GstMetaInfo that can be used to + * access metadata. + */ + +@@ -182,16 +391,17 @@ gst_meta_register (GType api, const gchar * impl, gsize size, + * that this fails because it was already registered. Don't warn, + * glib did this for us already. */ + type = g_pointer_type_register_static (impl); +- if (type == 0) ++ if (type == G_TYPE_INVALID) + return NULL; + +- info = g_slice_new (GstMetaInfo); ++ info = (GstMetaInfo *) g_slice_new (GstMetaInfoImpl); + info->api = api; + info->type = type; + info->size = size; + info->init_func = init_func; + info->free_func = free_func; + info->transform_func = transform_func; ++ ((GstMetaInfoImpl *) info)->is_custom = FALSE; + + GST_CAT_DEBUG (GST_CAT_META, + "register \"%s\" implementing \"%s\" of size %" G_GSIZE_FORMAT, impl, +diff --git a/gst/gstmeta.h b/gst/gstmeta.h +index d617ef8b79..44edbe0b4a 100644 +--- a/gst/gstmeta.h ++++ b/gst/gstmeta.h +@@ -87,11 +87,21 @@ typedef enum { + * GST_META_TAG_MEMORY_STR: + * + * This metadata stays relevant as long as memory layout is unchanged. ++ * In hindsight, this tag should have been called "memory-layout". + * + * Since: 1.2 + */ + #define GST_META_TAG_MEMORY_STR "memory" + ++/** ++ * GST_META_TAG_MEMORY_REFERENCE_STR: ++ * ++ * This metadata stays relevant until a deep copy is made. ++ * ++ * Since: 1.20.4 ++ */ ++#define GST_META_TAG_MEMORY_REFERENCE_STR "memory-reference" ++ + /** + * GstMeta: + * @flags: extra flags for the metadata +@@ -105,6 +115,17 @@ struct _GstMeta { + const GstMetaInfo *info; + }; + ++/** ++ * GstCustomMeta: ++ * ++ * Simple typing wrapper around #GstMeta ++ * ++ * Since: 1.20 ++ */ ++typedef struct { ++ GstMeta meta; ++} GstCustomMeta; ++ + #include + + /** +@@ -178,6 +199,30 @@ typedef gboolean (*GstMetaTransformFunction) (GstBuffer *transbuf, + GstMeta *meta, GstBuffer *buffer, + GQuark type, gpointer data); + ++/** ++ * GstCustomMetaTransformFunction: ++ * @transbuf: a #GstBuffer ++ * @meta: a #GstCustomMeta ++ * @buffer: a #GstBuffer ++ * @type: the transform type ++ * @data: transform specific data. ++ * @user_data: user data passed when registering the meta ++ * ++ * Function called for each @meta in @buffer as a result of performing a ++ * transformation that yields @transbuf. Additional @type specific transform ++ * data is passed to the function as @data. ++ * ++ * Implementations should check the @type of the transform and parse ++ * additional type specific fields in @data that should be used to update ++ * the metadata on @transbuf. ++ * ++ * Returns: %TRUE if the transform could be performed ++ * Since: 1.20 ++ */ ++typedef gboolean (*GstCustomMetaTransformFunction) (GstBuffer *transbuf, ++ GstCustomMeta *meta, GstBuffer *buffer, ++ GQuark type, gpointer data, gpointer user_data); ++ + /** + * GstMetaInfo: + * @api: tag identifying the metadata structure and api +@@ -216,6 +261,21 @@ const GstMetaInfo * gst_meta_register (GType api, const gchar *impl, + GstMetaInitFunction init_func, + GstMetaFreeFunction free_func, + GstMetaTransformFunction transform_func); ++ ++GST_API ++const GstMetaInfo * gst_meta_register_custom (const gchar *name, const gchar **tags, ++ GstCustomMetaTransformFunction transform_func, ++ gpointer user_data, GDestroyNotify destroy_data); ++ ++GST_API ++gboolean gst_meta_info_is_custom (const GstMetaInfo *info); ++ ++GST_API ++GstStructure * gst_custom_meta_get_structure (GstCustomMeta *meta); ++ ++GST_API ++gboolean gst_custom_meta_has_name (GstCustomMeta *meta, const gchar * name); ++ + GST_API + const GstMetaInfo * gst_meta_get_info (const gchar * impl); + +@@ -232,6 +292,7 @@ gint gst_meta_compare_seqnum (const GstMeta * meta1, + /* some default tags */ + + GST_API GQuark _gst_meta_tag_memory; ++GST_API GQuark _gst_meta_tag_memory_reference; + + /** + * GST_META_TAG_MEMORY: +diff --git a/gst/gstminiobject.c b/gst/gstminiobject.c +index bc7aef07be..18fdce072e 100644 +--- a/gst/gstminiobject.c ++++ b/gst/gstminiobject.c +@@ -59,6 +59,8 @@ + #include "gst/gstinfo.h" + #include + ++GType _gst_mini_object_type = 0; ++ + /* Mutex used for weak referencing */ + G_LOCK_DEFINE_STATIC (qdata_mutex); + static GQuark weak_ref_quark; +@@ -72,6 +74,14 @@ static GQuark weak_ref_quark; + #define LOCK_MASK ((SHARE_ONE - 1) - FLAG_MASK) + #define LOCK_FLAG_MASK (SHARE_ONE - 1) + ++/** ++ * GST_TYPE_MINI_OBJECT: ++ * ++ * The #GType associated with #GstMiniObject. ++ * ++ * Since: 1.20 ++ */ ++ + /* For backwards compatibility reasons we use the + * guint and gpointer in the GstMiniObject struct in + * a rather complicated way to store the parent(s) and qdata. +@@ -126,9 +136,12 @@ typedef struct + #define QDATA_DATA(o,i) (QDATA(o,i).data) + #define QDATA_DESTROY(o,i) (QDATA(o,i).destroy) + ++GST_DEFINE_MINI_OBJECT_TYPE (GstMiniObject, gst_mini_object); ++ + void + _priv_gst_mini_object_initialize (void) + { ++ _gst_mini_object_type = gst_mini_object_get_type (); + weak_ref_quark = g_quark_from_static_string ("GstMiniObjectWeakRefQuark"); + } + +@@ -204,7 +217,7 @@ gst_mini_object_copy (const GstMiniObject * mini_object) + gboolean + gst_mini_object_lock (GstMiniObject * object, GstLockFlags flags) + { +- gint access_mode, state, newstate; ++ guint access_mode, state, newstate; + + g_return_val_if_fail (object != NULL, FALSE); + g_return_val_if_fail (GST_MINI_OBJECT_IS_LOCKABLE (object), FALSE); +@@ -215,9 +228,9 @@ gst_mini_object_lock (GstMiniObject * object, GstLockFlags flags) + + do { + access_mode = flags & FLAG_MASK; +- newstate = state = g_atomic_int_get (&object->lockstate); ++ newstate = state = (guint) g_atomic_int_get (&object->lockstate); + +- GST_CAT_TRACE (GST_CAT_LOCKING, "lock %p: state %08x, access_mode %d", ++ GST_CAT_TRACE (GST_CAT_LOCKING, "lock %p: state %08x, access_mode %u", + object, state, access_mode); + + if (access_mode & GST_LOCK_FLAG_EXCLUSIVE) { +@@ -252,7 +265,7 @@ gst_mini_object_lock (GstMiniObject * object, GstLockFlags flags) + lock_failed: + { + GST_CAT_DEBUG (GST_CAT_LOCKING, +- "lock failed %p: state %08x, access_mode %d", object, state, ++ "lock failed %p: state %08x, access_mode %u", object, state, + access_mode); + return FALSE; + } +@@ -268,16 +281,16 @@ lock_failed: + void + gst_mini_object_unlock (GstMiniObject * object, GstLockFlags flags) + { +- gint access_mode, state, newstate; ++ guint access_mode, state, newstate; + + g_return_if_fail (object != NULL); + g_return_if_fail (GST_MINI_OBJECT_IS_LOCKABLE (object)); + + do { + access_mode = flags & FLAG_MASK; +- newstate = state = g_atomic_int_get (&object->lockstate); ++ newstate = state = (guint) g_atomic_int_get (&object->lockstate); + +- GST_CAT_TRACE (GST_CAT_LOCKING, "unlock %p: state %08x, access_mode %d", ++ GST_CAT_TRACE (GST_CAT_LOCKING, "unlock %p: state %08x, access_mode %u", + object, state, access_mode); + + if (access_mode & GST_LOCK_FLAG_EXCLUSIVE) { +@@ -410,8 +423,8 @@ gst_mini_object_is_writable (const GstMiniObject * mini_object) + * + * MT safe + * +- * Returns: (transfer full): a mini-object (possibly the same pointer) that +- * is writable. ++ * Returns: (transfer full) (nullable): a writable mini-object (which may or may not be ++ * the same as @mini_object) or %NULL if copying is required but not possible. + */ + GstMiniObject * + gst_mini_object_make_writable (GstMiniObject * mini_object) +diff --git a/gst/gstminiobject.h b/gst/gstminiobject.h +index b931e76e7a..f4dfa8d913 100644 +--- a/gst/gstminiobject.h ++++ b/gst/gstminiobject.h +@@ -29,6 +29,10 @@ + + G_BEGIN_DECLS + ++GST_API GType _gst_mini_object_type; ++ ++#define GST_TYPE_MINI_OBJECT (_gst_mini_object_type) ++ + #define GST_IS_MINI_OBJECT_TYPE(obj,type) ((obj) && GST_MINI_OBJECT_TYPE(obj) == (type)) + #define GST_MINI_OBJECT_CAST(obj) ((GstMiniObject*)(obj)) + #define GST_MINI_OBJECT_CONST_CAST(obj) ((const GstMiniObject*)(obj)) +@@ -36,6 +40,9 @@ G_BEGIN_DECLS + + typedef struct _GstMiniObject GstMiniObject; + ++GST_API ++GType gst_mini_object_get_type (void); ++ + /** + * GstMiniObjectCopyFunction: + * @obj: MiniObject to copy +diff --git a/gst/gstquark.c b/gst/gstquark.c +index daf6a51120..a4feec0319 100644 +--- a/gst/gstquark.c ++++ b/gst/gstquark.c +@@ -1,7 +1,7 @@ + /* GStreamer + * Copyright (C) 2006 Jan Schmidt + * +- * gstquark.c: Registered quarks for the _priv_gst_quark_table, private to ++ * gstquark.c: Registered quarks for the _priv_gst_quark_table, private to + * GStreamer + * + * This library is free software; you can redistribute it and/or +@@ -80,6 +80,7 @@ static const gchar *_quark_strings[] = { + "GstEventInstantRateChange", + "GstEventInstantRateSyncTime", "GstMessageInstantRateRequest", + "upstream-running-time", "base", "offset", "plugin-api", "plugin-api-flags", ++ "gap-flags", "GstQuerySelectable", "selectable" + }; + + GQuark _priv_gst_quark_table[GST_QUARK_MAX]; +diff --git a/gst/gstquark.h b/gst/gstquark.h +index cc796bc49b..444344efe0 100644 +--- a/gst/gstquark.h ++++ b/gst/gstquark.h +@@ -1,7 +1,7 @@ + /* GStreamer + * Copyright (C) 2006 Jan Schmidt + * +- * gstquark.h: Private header for storing quark info ++ * gstquark.h: Private header for storing quark info + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public +@@ -230,7 +230,10 @@ typedef enum _GstQuarkId + GST_QUARK_OFFSET = 199, + GST_QUARK_PLUGIN_API = 200, + GST_QUARK_PLUGIN_API_FLAGS = 201, +- GST_QUARK_MAX = 202 ++ GST_QUARK_GAP_FLAGS = 202, ++ GST_QUARK_QUERY_SELECTABLE = 203, ++ GST_QUARK_SELECTABLE = 204, ++ GST_QUARK_MAX = 205 + } GstQuarkId; + + extern GQuark _priv_gst_quark_table[GST_QUARK_MAX]; +-- +2.47.1 + diff --git a/package/gstreamer1/gstreamer1/gstreamer1.mk b/package/gstreamer1/gstreamer1/gstreamer1.mk index 684a39aa6dd1..a5c17b8f2cbf 100644 --- a/package/gstreamer1/gstreamer1/gstreamer1.mk +++ b/package/gstreamer1/gstreamer1/gstreamer1.mk @@ -68,4 +68,11 @@ GSTREAMER1_DEPENDENCIES = \ GSTREAMER1_CFLAGS = $(TARGET_CFLAGS) $(GSTREAMER1_EXTRA_COMPILER_OPTIONS) GSTREAMER1_LDFLAGS = $(TARGET_LDFLAGS) $(TARGET_NLS_LIBS) +ifeq ($(BR2_PACKAGE_WPEWEBKIT_USE_GSTREAMER_WEBRTC),y) +define GSTREAMER1_APPLY_GSTWEBRTC_PATCHES_POST_HOOK + cd $(@D) && { for P in $(TOPDIR)/$(GSTREAMER1_PKGDIR)/$(GSTREAMER1_VERSION)-gstwebrtc/*.patch; do patch -p1 < "$$P" ; done; } +endef +GSTREAMER1_POST_PATCH_HOOKS += GSTREAMER1_APPLY_GSTWEBRTC_PATCHES_POST_HOOK +endif + $(eval $(meson-package)) From 227e3f3c3ad27c2cf5422099c8092f8cbdf93c07 Mon Sep 17 00:00:00 2001 From: Philippe Normand Date: Mon, 22 Apr 2024 15:19:55 +0100 Subject: [PATCH 6/7] libmanette: Bump to version 0.2.7. --- ...n-hardcode-building-a-shared-library.patch | 34 ------------------- package/libmanette/libmanette.hash | 4 +-- package/libmanette/libmanette.mk | 2 +- 3 files changed, 3 insertions(+), 37 deletions(-) delete mode 100644 package/libmanette/0001-Meson-Un-hardcode-building-a-shared-library.patch diff --git a/package/libmanette/0001-Meson-Un-hardcode-building-a-shared-library.patch b/package/libmanette/0001-Meson-Un-hardcode-building-a-shared-library.patch deleted file mode 100644 index a93a1f72193e..000000000000 --- a/package/libmanette/0001-Meson-Un-hardcode-building-a-shared-library.patch +++ /dev/null @@ -1,34 +0,0 @@ -From 003c2e84d95357bbbef72fb395ef85f1650097a3 Mon Sep 17 00:00:00 2001 -From: Adrian Perez de Castro -Date: Fri, 23 Sep 2022 14:35:51 +0300 -Subject: [PATCH] Meson: Un-hardcode building a shared library - -Use library() instead of shared_library() to allow specifying which kind -of library to build with "meson setup --default-library ..." - -This allows more easily incorporating libmanette in an application e.g. -as a Meson subproject which gets built into a program as a static -library. - -Signed-off-by: Adrian Perez de Castro -[Upstream status: https://gitlab.gnome.org/GNOME/libmanette/-/merge_requests/84] ---- - src/meson.build | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/src/meson.build b/src/meson.build -index c140cac..b71d090 100644 ---- a/src/meson.build -+++ b/src/meson.build -@@ -63,7 +63,7 @@ if gudev.found() - libmanette_deps += [ gudev ] - endif - --libmanette_lib = shared_library( -+libmanette_lib = library( - libmanette_module, - libmanette_sources, - c_args: libmanette_c_args, --- -2.37.3 - diff --git a/package/libmanette/libmanette.hash b/package/libmanette/libmanette.hash index 96089f971aaf..b1da3c19f627 100644 --- a/package/libmanette/libmanette.hash +++ b/package/libmanette/libmanette.hash @@ -1,5 +1,5 @@ -# From https://download.gnome.org/sources/libmanette/0.2/libmanette-0.2.6.sha256sum -sha256 63653259a821ec7d90d681e52e757e2219d462828c9d74b056a5f53267636bac libmanette-0.2.6.tar.xz +# From https://download.gnome.org/sources/libmanette/0.2/libmanette-0.2.7.sha256sum +sha256 cddd5c02a131072c19c6cde6f2cb2cd57eae7dacb50d14c337efd980baa51a51 libmanette-0.2.7.tar.xz # Locally calculated sha256 dc626520dcd53a22f727af3ee42c770e56c97a64fe3adb063799d8ab032fe551 COPYING diff --git a/package/libmanette/libmanette.mk b/package/libmanette/libmanette.mk index 10f2b8a46631..42fb47979574 100644 --- a/package/libmanette/libmanette.mk +++ b/package/libmanette/libmanette.mk @@ -5,7 +5,7 @@ ################################################################################ LIBMANETTE_VERSION_MAJOR = 0.2 -LIBMANETTE_VERSION = $(LIBMANETTE_VERSION_MAJOR).6 +LIBMANETTE_VERSION = $(LIBMANETTE_VERSION_MAJOR).7 LIBMANETTE_SOURCE = libmanette-$(LIBMANETTE_VERSION).tar.xz LIBMANETTE_SITE = https://download.gnome.org/sources/libmanette/$(LIBMANETTE_VERSION_MAJOR) LIBMANETTE_LICENSE = LGPL-2.1+ From 4ee2817d809885df706c528a355cf6c85deefec6 Mon Sep 17 00:00:00 2001 From: Philippe Normand Date: Mon, 22 Apr 2024 15:20:18 +0100 Subject: [PATCH 7/7] cog: Fix gamepad support for 0.16 version --- ...r-gamepad-provider-even-if-no-platfo.patch | 62 +++++++++++++++++++ 1 file changed, 62 insertions(+) create mode 100644 package/cog/0.16.0/0001-platform-Register-gamepad-provider-even-if-no-platfo.patch diff --git a/package/cog/0.16.0/0001-platform-Register-gamepad-provider-even-if-no-platfo.patch b/package/cog/0.16.0/0001-platform-Register-gamepad-provider-even-if-no-platfo.patch new file mode 100644 index 000000000000..2c6d0bad4c15 --- /dev/null +++ b/package/cog/0.16.0/0001-platform-Register-gamepad-provider-even-if-no-platfo.patch @@ -0,0 +1,62 @@ +From 3ed251babe1681cc05583cbf4d2b770807addc37 Mon Sep 17 00:00:00 2001 +From: Philippe Normand +Date: Mon, 22 Apr 2024 15:18:11 +0100 +Subject: [PATCH] platform: Register gamepad provider even if no platform + backend was created + +--- + core/cog-platform.c | 14 +++++++++++++- + 1 file changed, 13 insertions(+), 1 deletion(-) + +diff --git a/core/cog-platform.c b/core/cog-platform.c +index 57b16a5..653de8a 100644 +--- a/core/cog-platform.c ++++ b/core/cog-platform.c +@@ -8,6 +8,7 @@ + + #include "cog-platform.h" + #include "cog-modules.h" ++#include "cog-gamepad.h" + + G_DEFINE_QUARK(COG_PLATFORM_ERROR, cog_platform_error) + G_DEFINE_QUARK(COG_PLATFORM_EGL_ERROR, cog_platform_egl_error) +@@ -70,6 +71,12 @@ cog_platform_get_default(void) + return default_platform; + } + ++static struct wpe_view_backend * ++gamepad_provider_get_view_backend_for_gamepad(void *provider G_GNUC_UNUSED, void *gamepad G_GNUC_UNUSED) ++{ ++ return NULL; ++} ++ + CogPlatform * + cog_platform_new(const char *name, GError **error) + { +@@ -79,6 +86,7 @@ cog_platform_new(const char *name, GError **error) + if (platform_type == G_TYPE_INVALID) { + g_set_error_literal(error, COG_PLATFORM_ERROR, COG_PLATFORM_ERROR_NO_MODULE, + "Could not find an usable platform module"); ++ cog_gamepad_setup(gamepad_provider_get_view_backend_for_gamepad); + return NULL; + } + +@@ -86,10 +94,14 @@ cog_platform_new(const char *name, GError **error) + if (G_IS_INITABLE(self)) { + if (!g_initable_init(G_INITABLE(self), + NULL, /* cancellable */ +- error)) ++ error)) { ++ cog_gamepad_setup(gamepad_provider_get_view_backend_for_gamepad); + return NULL; ++ } + } + ++ /* Each platform backend registers its gamepad provider, so no need to call ++ cog_gamepad_setup() here. */ + return g_steal_pointer(&self); + } + +-- +2.44.0 +