[packages/qt5-qtwebengine] - patches from chromium and archlinux to get qtwebengine to build with ffmpeg 5+
baggins
baggins at pld-linux.org
Wed Oct 18 17:00:23 CEST 2023
commit 70c5ed0ccc9cd1052aaf782b1f4d8e3b1de65fbf
Author: Jan Rękorajski <baggins at pld-linux.org>
Date: Wed Oct 18 16:59:48 2023 +0200
- patches from chromium and archlinux to get qtwebengine to build with ffmpeg 5+
chromium-107-proprietary-codecs.patch | 16 +
chromium-112-ffmpeg-first_dts.patch | 102 ++
qt5-qtwebengine.spec | 8 +
qt5-webengine-ffmpeg5.patch | 150 +++
qt5-webengine-pipewire-0.3.patch | 1819 +++++++++++++++++++++++++++++++++
5 files changed, 2095 insertions(+)
---
diff --git a/qt5-qtwebengine.spec b/qt5-qtwebengine.spec
index d5dba8f..4443ddf 100644
--- a/qt5-qtwebengine.spec
+++ b/qt5-qtwebengine.spec
@@ -24,6 +24,10 @@ Source0: qtwebengine-%{version}.tar.xz
Patch0: x32.patch
Patch1: %{name}-gn-dynamic.patch
Patch2: 0001-avcodec-x86-mathops-clip-constants-used-with-shift-i.patch
+Patch3: qt5-webengine-ffmpeg5.patch
+Patch4: qt5-webengine-pipewire-0.3.patch
+Patch5: chromium-107-proprietary-codecs.patch
+Patch6: chromium-112-ffmpeg-first_dts.patch
URL: https://www.qt.io/
BuildRequires: Qt5Core-devel >= %{qtbase_ver}
BuildRequires: Qt5Designer-devel >= %{qttools_ver}
@@ -281,6 +285,10 @@ Przykłady do biblioteki Qt5 WebEngine.
%endif
%patch1 -p1
%patch2 -p1
+%patch3 -p1 -d src/3rdparty
+%patch4 -p1 -d src/3rdparty
+%patch5 -p1 -d src/3rdparty/chromium
+%patch6 -p1 -d src/3rdparty/chromium
%{qt5bindir}/syncqt.pl -version %{version}
diff --git a/chromium-107-proprietary-codecs.patch b/chromium-107-proprietary-codecs.patch
new file mode 100644
index 0000000..499be52
--- /dev/null
+++ b/chromium-107-proprietary-codecs.patch
@@ -0,0 +1,16 @@
+diff -up chromium-107.0.5304.121/media/BUILD.gn.me chromium-107.0.5304.121/media/BUILD.gn
+--- chromium-107.0.5304.121/media/BUILD.gn.me 2022-12-17 12:35:44.546779129 +0100
++++ chromium-107.0.5304.121/media/BUILD.gn 2022-12-17 12:35:59.296047028 +0100
+@@ -64,12 +64,6 @@ buildflag_header("media_buildflags") {
+ ]
+ }
+
+-if (proprietary_codecs && media_use_ffmpeg) {
+- assert(
+- ffmpeg_branding != "Chromium",
+- "proprietary codecs and ffmpeg_branding set to Chromium are incompatible")
+-}
+-
+ # Common configuration for targets in the media directory; these must not be
+ # exported since things like USE_NEON and USE_CRAS have different meanings
+ # elsewhere in the code base.
diff --git a/chromium-112-ffmpeg-first_dts.patch b/chromium-112-ffmpeg-first_dts.patch
new file mode 100644
index 0000000..e90b555
--- /dev/null
+++ b/chromium-112-ffmpeg-first_dts.patch
@@ -0,0 +1,102 @@
+From 2aef9000a1c8d76d3072365ffcb471ebffa20d3d Mon Sep 17 00:00:00 2001
+From: Andreas Schneider <asn at cryptomilk.org>
+Date: Tue, 15 Mar 2022 14:26:16 +0100
+Subject: [PATCH] Track first_dts instead of using non-upstream functions
+
+The function av_stream_get_first_dts() is not an upstream ffmpeg function and
+is not available if you build with system ffmpeg. We can easily track the
+first_dts on our own.
+
+See also
+https://ffmpeg.org/pipermail/ffmpeg-devel/2021-September/285401.html
+
+Bug: 1306560
+
+Signed-off-by: Andreas Schneider <asn at cryptomilk.org>
+Change-Id: I90ba3cf2f2e16f56a0b405f26c67f911349fb71d
+---
+ media/filters/ffmpeg_demuxer.cc | 18 ++++++++++++------
+ media/filters/ffmpeg_demuxer.h | 3 +++
+ 3 files changed, 16 insertions(+), 6 deletions(-)
+
+diff --git a/media/filters/ffmpeg_demuxer.cc b/media/filters/ffmpeg_demuxer.cc
+index 111899b661..799fc6e941 100644
+--- a/media/filters/ffmpeg_demuxer.cc
++++ b/media/filters/ffmpeg_demuxer.cc
+@@ -97,7 +97,7 @@ static base::TimeDelta FramesToTimeDelta(int frames, double sample_rate) {
+ frames * base::Time::kMicrosecondsPerSecond / sample_rate);
+ }
+
+-static base::TimeDelta ExtractStartTime(AVStream* stream) {
++static base::TimeDelta ExtractStartTime(AVStream* stream, int64_t first_dts) {
+ // The default start time is zero.
+ base::TimeDelta start_time;
+
+@@ -107,12 +107,12 @@ static base::TimeDelta ExtractStartTime(AVStream* stream) {
+
+ // Next try to use the first DTS value, for codecs where we know PTS == DTS
+ // (excludes all H26x codecs). The start time must be returned in PTS.
+- if (av_stream_get_first_dts(stream) != kNoFFmpegTimestamp &&
++ if (first_dts != AV_NOPTS_VALUE &&
+ stream->codecpar->codec_id != AV_CODEC_ID_HEVC &&
+ stream->codecpar->codec_id != AV_CODEC_ID_H264 &&
+ stream->codecpar->codec_id != AV_CODEC_ID_MPEG4) {
+ const base::TimeDelta first_pts =
+- ConvertFromTimeBase(stream->time_base, av_stream_get_first_dts(stream));
++ ConvertFromTimeBase(stream->time_base, first_dts);
+ if (first_pts < start_time)
+ start_time = first_pts;
+ }
+@@ -283,6 +283,7 @@ FFmpegDemuxerStream::FFmpegDemuxerStream(
+ fixup_negative_timestamps_(false),
+ fixup_chained_ogg_(false),
+ num_discarded_packet_warnings_(0),
++ first_dts_(AV_NOPTS_VALUE),
+ last_packet_pos_(AV_NOPTS_VALUE),
+ last_packet_dts_(AV_NOPTS_VALUE) {
+ DCHECK(demuxer_);
+@@ -349,6 +350,10 @@ void FFmpegDemuxerStream::EnqueuePacket(ScopedAVPacket packet) {
+ int64_t packet_dts =
+ packet->dts == AV_NOPTS_VALUE ? packet->pts : packet->dts;
+
++ if (first_dts_ == AV_NOPTS_VALUE) {
++ first_dts_ = packet_dts;
++ }
++
+ // Chained ogg files have non-monotonically increasing position and time stamp
+ // values, which prevents us from using them to determine if a packet should
+ // be dropped. Since chained ogg is only allowed on single track audio only
+@@ -1442,7 +1447,8 @@ void FFmpegDemuxer::OnFindStreamInfoDone(int result) {
+
+ max_duration = std::max(max_duration, streams_[i]->duration());
+
+- base::TimeDelta start_time = ExtractStartTime(stream);
++ base::TimeDelta start_time =
++ ExtractStartTime(stream, streams_[i]->first_dts());
+
+ // Note: This value is used for seeking, so we must take the true value and
+ // not the one possibly clamped to zero below.
+diff --git a/media/filters/ffmpeg_demuxer.h b/media/filters/ffmpeg_demuxer.h
+index c147309d6f..48a8f6ad8c 100644
+--- a/media/filters/ffmpeg_demuxer.h
++++ b/media/filters/ffmpeg_demuxer.h
+@@ -151,6 +151,8 @@ class MEDIA_EXPORT FFmpegDemuxerStream : public DemuxerStream {
+ base::TimeDelta start_time() const { return start_time_; }
+ void set_start_time(base::TimeDelta time) { start_time_ = time; }
+
++ int64_t first_dts() const { return first_dts_; }
++
+ private:
+ friend class FFmpegDemuxerTest;
+
+@@ -208,6 +210,7 @@ class MEDIA_EXPORT FFmpegDemuxerStream : public DemuxerStream {
+ bool fixup_chained_ogg_;
+
+ int num_discarded_packet_warnings_;
++ int64_t first_dts_;
+ int64_t last_packet_pos_;
+ int64_t last_packet_dts_;
+
+--
+2.35.1
+
diff --git a/qt5-webengine-ffmpeg5.patch b/qt5-webengine-ffmpeg5.patch
new file mode 100644
index 0000000..58e51d0
--- /dev/null
+++ b/qt5-webengine-ffmpeg5.patch
@@ -0,0 +1,150 @@
+diff --git a/chromium/media/ffmpeg/ffmpeg_common.h b/chromium/media/ffmpeg/ffmpeg_common.h
+index 2734a485cbd..70b1877a43c 100644
+--- a/chromium/media/ffmpeg/ffmpeg_common.h
++++ b/chromium/media/ffmpeg/ffmpeg_common.h
+@@ -29,6 +29,7 @@ extern "C" {
+ #include <libavformat/avformat.h>
+ #include <libavformat/avio.h>
+ #include <libavutil/avutil.h>
++#include <libavutil/channel_layout.h>
+ #include <libavutil/imgutils.h>
+ #include <libavutil/log.h>
+ #include <libavutil/mastering_display_metadata.h>
+diff --git a/chromium/media/filters/audio_file_reader.cc b/chromium/media/filters/audio_file_reader.cc
+index cb81d920def..bd73908d0ca 100644
+--- a/chromium/media/filters/audio_file_reader.cc
++++ b/chromium/media/filters/audio_file_reader.cc
+@@ -85,7 +85,7 @@ bool AudioFileReader::OpenDemuxer() {
+ }
+
+ bool AudioFileReader::OpenDecoder() {
+- AVCodec* codec = avcodec_find_decoder(codec_context_->codec_id);
++ const AVCodec* codec = avcodec_find_decoder(codec_context_->codec_id);
+ if (codec) {
+ // MP3 decodes to S16P which we don't support, tell it to use S16 instead.
+ if (codec_context_->sample_fmt == AV_SAMPLE_FMT_S16P)
+diff --git a/chromium/media/filters/ffmpeg_audio_decoder.cc b/chromium/media/filters/ffmpeg_audio_decoder.cc
+index 0d825ed791b..72fac6167ef 100644
+--- a/chromium/media/filters/ffmpeg_audio_decoder.cc
++++ b/chromium/media/filters/ffmpeg_audio_decoder.cc
+@@ -329,7 +329,7 @@ bool FFmpegAudioDecoder::ConfigureDecoder(const AudioDecoderConfig& config) {
+ }
+ }
+
+- AVCodec* codec = avcodec_find_decoder(codec_context_->codec_id);
++ const AVCodec* codec = avcodec_find_decoder(codec_context_->codec_id);
+ if (!codec ||
+ avcodec_open2(codec_context_.get(), codec, &codec_options) < 0) {
+ DLOG(ERROR) << "Could not initialize audio decoder: "
+diff --git a/chromium/media/filters/ffmpeg_demuxer.cc b/chromium/media/filters/ffmpeg_demuxer.cc
+index d34db63f3ef..427565b00c1 100644
+--- a/chromium/media/filters/ffmpeg_demuxer.cc
++++ b/chromium/media/filters/ffmpeg_demuxer.cc
+@@ -98,12 +98,12 @@ static base::TimeDelta ExtractStartTime(AVStream* stream) {
+
+ // Next try to use the first DTS value, for codecs where we know PTS == DTS
+ // (excludes all H26x codecs). The start time must be returned in PTS.
+- if (stream->first_dts != kNoFFmpegTimestamp &&
++ if (av_stream_get_first_dts(stream) != kNoFFmpegTimestamp &&
+ stream->codecpar->codec_id != AV_CODEC_ID_HEVC &&
+ stream->codecpar->codec_id != AV_CODEC_ID_H264 &&
+ stream->codecpar->codec_id != AV_CODEC_ID_MPEG4) {
+ const base::TimeDelta first_pts =
+- ConvertFromTimeBase(stream->time_base, stream->first_dts);
++ ConvertFromTimeBase(stream->time_base, av_stream_get_first_dts(stream));
+ if (first_pts < start_time)
+ start_time = first_pts;
+ }
+@@ -408,11 +408,11 @@ void FFmpegDemuxerStream::EnqueuePacket(ScopedAVPacket packet) {
+ scoped_refptr<DecoderBuffer> buffer;
+
+ if (type() == DemuxerStream::TEXT) {
+- int id_size = 0;
++ size_t id_size = 0;
+ uint8_t* id_data = av_packet_get_side_data(
+ packet.get(), AV_PKT_DATA_WEBVTT_IDENTIFIER, &id_size);
+
+- int settings_size = 0;
++ size_t settings_size = 0;
+ uint8_t* settings_data = av_packet_get_side_data(
+ packet.get(), AV_PKT_DATA_WEBVTT_SETTINGS, &settings_size);
+
+@@ -424,7 +424,7 @@ void FFmpegDemuxerStream::EnqueuePacket(ScopedAVPacket packet) {
+ buffer = DecoderBuffer::CopyFrom(packet->data, packet->size,
+ side_data.data(), side_data.size());
+ } else {
+- int side_data_size = 0;
++ size_t side_data_size = 0;
+ uint8_t* side_data = av_packet_get_side_data(
+ packet.get(), AV_PKT_DATA_MATROSKA_BLOCKADDITIONAL, &side_data_size);
+
+@@ -485,7 +485,7 @@ void FFmpegDemuxerStream::EnqueuePacket(ScopedAVPacket packet) {
+ packet->size - data_offset);
+ }
+
+- int skip_samples_size = 0;
++ size_t skip_samples_size = 0;
+ const uint32_t* skip_samples_ptr =
+ reinterpret_cast<const uint32_t*>(av_packet_get_side_data(
+ packet.get(), AV_PKT_DATA_SKIP_SAMPLES, &skip_samples_size));
+diff --git a/chromium/media/filters/ffmpeg_glue.cc b/chromium/media/filters/ffmpeg_glue.cc
+index 0ef3521473d..8483ecc348f 100644
+--- a/chromium/media/filters/ffmpeg_glue.cc
++++ b/chromium/media/filters/ffmpeg_glue.cc
+@@ -59,7 +59,6 @@ static int64_t AVIOSeekOperation(void* opaque, int64_t offset, int whence) {
+ }
+
+ void FFmpegGlue::InitializeFFmpeg() {
+- av_register_all();
+ }
+
+ static void LogContainer(bool is_local_file,
+@@ -95,9 +94,6 @@ FFmpegGlue::FFmpegGlue(FFmpegURLProtocol* protocol) {
+ // Enable fast, but inaccurate seeks for MP3.
+ format_context_->flags |= AVFMT_FLAG_FAST_SEEK;
+
+- // Ensures we can read out various metadata bits like vp8 alpha.
+- format_context_->flags |= AVFMT_FLAG_KEEP_SIDE_DATA;
+-
+ // Ensures format parsing errors will bail out. From an audit on 11/2017, all
+ // instances were real failures. Solves bugs like http://crbug.com/710791.
+ format_context_->error_recognition |= AV_EF_EXPLODE;
+diff --git a/chromium/media/filters/ffmpeg_video_decoder.cc b/chromium/media/filters/ffmpeg_video_decoder.cc
+index ef12477ee89..7996606f5f9 100644
+--- a/chromium/media/filters/ffmpeg_video_decoder.cc
++++ b/chromium/media/filters/ffmpeg_video_decoder.cc
+@@ -391,7 +391,7 @@ bool FFmpegVideoDecoder::ConfigureDecoder(const VideoDecoderConfig& config,
+ if (decode_nalus_)
+ codec_context_->flags2 |= AV_CODEC_FLAG2_CHUNKS;
+
+- AVCodec* codec = avcodec_find_decoder(codec_context_->codec_id);
++ const AVCodec* codec = avcodec_find_decoder(codec_context_->codec_id);
+ if (!codec || avcodec_open2(codec_context_.get(), codec, NULL) < 0) {
+ ReleaseFFmpegResources();
+ return false;
+diff --git a/chromium/media/filters/media_file_checker.cc b/chromium/media/filters/media_file_checker.cc
+index 59c2a2fc618..1a9872c7acb 100644
+--- a/chromium/media/filters/media_file_checker.cc
++++ b/chromium/media/filters/media_file_checker.cc
+@@ -68,7 +68,7 @@ bool MediaFileChecker::Start(base::TimeDelta check_time) {
+ auto context = AVStreamToAVCodecContext(format_context->streams[i]);
+ if (!context)
+ continue;
+- AVCodec* codec = avcodec_find_decoder(cp->codec_id);
++ const AVCodec* codec = avcodec_find_decoder(cp->codec_id);
+ if (codec && avcodec_open2(context.get(), codec, nullptr) >= 0) {
+ auto loop = std::make_unique<FFmpegDecodingLoop>(context.get());
+ stream_contexts[i] = {std::move(context), std::move(loop)};
+diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc
+index 9002b874611..d12fade8b63 100644
+--- a/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc
++++ b/chromium/third_party/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc
+@@ -203,7 +203,7 @@ int32_t H264DecoderImpl::InitDecode(const VideoCodec* codec_settings,
+ // a pointer |this|.
+ av_context_->opaque = this;
+
+- AVCodec* codec = avcodec_find_decoder(av_context_->codec_id);
++ const AVCodec* codec = avcodec_find_decoder(av_context_->codec_id);
+ if (!codec) {
+ // This is an indication that FFmpeg has not been initialized or it has not
+ // been compiled/initialized with the correct set of codecs.
diff --git a/qt5-webengine-pipewire-0.3.patch b/qt5-webengine-pipewire-0.3.patch
new file mode 100644
index 0000000..f8047e0
--- /dev/null
+++ b/qt5-webengine-pipewire-0.3.patch
@@ -0,0 +1,1819 @@
+diff --git a/chromium/third_party/webrtc/modules/desktop_capture/BUILD.gn b/chromium/third_party/webrtc/modules/desktop_capture/BUILD.gn
+index 5235512735d..8259442f811 100644
+--- a/chromium/third_party/webrtc/modules/desktop_capture/BUILD.gn
++++ b/chromium/third_party/webrtc/modules/desktop_capture/BUILD.gn
+@@ -11,6 +11,11 @@ import("//build/config/ui.gni")
+ import("//tools/generate_stubs/rules.gni")
+ import("../../webrtc.gni")
+
++if (rtc_use_pipewire) {
++ assert(rtc_pipewire_version == "0.2" || rtc_pipewire_version == "0.3",
++ "Unsupported PipeWire version")
++}
++
+ use_desktop_capture_differ_sse2 = current_cpu == "x86" || current_cpu == "x64"
+
+ config("x11_config") {
+@@ -200,22 +205,41 @@ if (is_linux || is_chromeos) {
+ ]
+ }
+
+- if (rtc_link_pipewire) {
++ if (rtc_pipewire_version == "0.3") {
+ pkg_config("pipewire") {
+- packages = [ "libpipewire-0.2" ]
++ packages = [ "libpipewire-0.3" ]
++ if (!rtc_link_pipewire) {
++ ignore_libs = true
++ }
+ }
+ } else {
++ pkg_config("pipewire") {
++ packages = [ "libpipewire-0.2" ]
++ if (!rtc_link_pipewire) {
++ ignore_libs = true
++ }
++ }
++ }
++
++ if (!rtc_link_pipewire) {
+ # When libpipewire is not directly linked, use stubs to allow for dlopening of
+ # the binary.
+ generate_stubs("pipewire_stubs") {
+- configs = [ "../../:common_config" ]
++ configs = [
++ "../../:common_config",
++ ":pipewire",
++ ]
+ deps = [ "../../rtc_base" ]
+ extra_header = "linux/pipewire_stub_header.fragment"
+ logging_function = "RTC_LOG(LS_VERBOSE)"
+ logging_include = "rtc_base/logging.h"
+ output_name = "linux/pipewire_stubs"
+ path_from_source = "modules/desktop_capture/linux"
+- sigs = [ "linux/pipewire.sigs" ]
++ if (rtc_pipewire_version == "0.3") {
++ sigs = [ "linux/pipewire03.sigs" ]
++ } else {
++ sigs = [ "linux/pipewire02.sigs" ]
++ }
+ }
+ }
+
+@@ -506,6 +530,7 @@ rtc_library("desktop_capture_generic") {
+ absl_deps = [
+ "//third_party/abseil-cpp/absl/memory",
+ "//third_party/abseil-cpp/absl/strings",
++ "//third_party/abseil-cpp/absl/types:optional",
+ ]
+
+ if (rtc_use_x11_extensions) {
+@@ -526,20 +551,15 @@ rtc_library("desktop_capture_generic") {
+ sources += [
+ "linux/base_capturer_pipewire.cc",
+ "linux/base_capturer_pipewire.h",
+- "linux/screen_capturer_pipewire.cc",
+- "linux/screen_capturer_pipewire.h",
+- "linux/window_capturer_pipewire.cc",
+- "linux/window_capturer_pipewire.h",
+ ]
+
+ configs += [
+ ":pipewire_config",
+ ":gio",
++ ":pipewire",
+ ]
+
+- if (rtc_link_pipewire) {
+- configs += [ ":pipewire" ]
+- } else {
++ if (!rtc_link_pipewire) {
+ deps += [ ":pipewire_stubs" ]
+ }
+ }
+diff --git a/chromium/third_party/webrtc/modules/desktop_capture/linux/base_capturer_pipewire.cc b/chromium/third_party/webrtc/modules/desktop_capture/linux/base_capturer_pipewire.cc
+index 2640e93aa98..c302a086ead 100644
+--- a/chromium/third_party/webrtc/modules/desktop_capture/linux/base_capturer_pipewire.cc
++++ b/chromium/third_party/webrtc/modules/desktop_capture/linux/base_capturer_pipewire.cc
+@@ -14,8 +14,14 @@
+ #include <glib-object.h>
+ #include <spa/param/format-utils.h>
+ #include <spa/param/props.h>
++#if !PW_CHECK_VERSION(0, 3, 0)
+ #include <spa/param/video/raw-utils.h>
+ #include <spa/support/type-map.h>
++#endif
++
++#include <sys/ioctl.h>
++#include <sys/mman.h>
++#include <sys/syscall.h>
+
+ #include <memory>
+ #include <utility>
+@@ -30,7 +36,11 @@
+ #include "modules/desktop_capture/linux/pipewire_stubs.h"
+
+ using modules_desktop_capture_linux::InitializeStubs;
+-using modules_desktop_capture_linux::kModulePipewire;
++#if PW_CHECK_VERSION(0, 3, 0)
++using modules_desktop_capture_linux::kModulePipewire03;
++#else
++using modules_desktop_capture_linux::kModulePipewire02;
++#endif
+ using modules_desktop_capture_linux::StubPathMap;
+ #endif // defined(WEBRTC_DLOPEN_PIPEWIRE)
+
+@@ -47,9 +57,156 @@ const char kScreenCastInterfaceName[] = "org.freedesktop.portal.ScreenCast";
+ const int kBytesPerPixel = 4;
+
+ #if defined(WEBRTC_DLOPEN_PIPEWIRE)
++#if PW_CHECK_VERSION(0, 3, 0)
++const char kPipeWireLib[] = "libpipewire-0.3.so.0";
++#else
+ const char kPipeWireLib[] = "libpipewire-0.2.so.1";
+ #endif
++#endif
+
++// static
++struct dma_buf_sync {
++ uint64_t flags;
++};
++#define DMA_BUF_SYNC_READ (1 << 0)
++#define DMA_BUF_SYNC_START (0 << 2)
++#define DMA_BUF_SYNC_END (1 << 2)
++#define DMA_BUF_BASE 'b'
++#define DMA_BUF_IOCTL_SYNC _IOW(DMA_BUF_BASE, 0, struct dma_buf_sync)
++
++static void SyncDmaBuf(int fd, uint64_t start_or_end) {
++ struct dma_buf_sync sync = {0};
++
++ sync.flags = start_or_end | DMA_BUF_SYNC_READ;
++
++ while (true) {
++ int ret;
++ ret = ioctl(fd, DMA_BUF_IOCTL_SYNC, &sync);
++ if (ret == -1 && errno == EINTR) {
++ continue;
++ } else if (ret == -1) {
++ RTC_LOG(LS_ERROR) << "Failed to synchronize DMA buffer: "
++ << g_strerror(errno);
++ break;
++ } else {
++ break;
++ }
++ }
++}
++
++class ScopedBuf {
++ public:
++ ScopedBuf() {}
++ ScopedBuf(unsigned char* map, int map_size, bool is_dma_buf, int fd)
++ : map_(map), map_size_(map_size), is_dma_buf_(is_dma_buf), fd_(fd) {}
++ ~ScopedBuf() {
++ if (map_ != MAP_FAILED) {
++ if (is_dma_buf_) {
++ SyncDmaBuf(fd_, DMA_BUF_SYNC_END);
++ }
++ munmap(map_, map_size_);
++ }
++ }
++
++ operator bool() { return map_ != MAP_FAILED; }
++
++ void initialize(unsigned char* map, int map_size, bool is_dma_buf, int fd) {
++ map_ = map;
++ map_size_ = map_size;
++ is_dma_buf_ = is_dma_buf;
++ fd_ = fd;
++ }
++
++ unsigned char* get() { return map_; }
++
++ protected:
++ unsigned char* map_ = nullptr;
++ int map_size_;
++ bool is_dma_buf_;
++ int fd_;
++};
++
++template <class T>
++class Scoped {
++ public:
++ Scoped() {}
++ explicit Scoped(T* val) { ptr_ = val; }
++ ~Scoped() { RTC_NOTREACHED(); }
++
++ T* operator->() { return ptr_; }
++
++ bool operator!() { return ptr_ == nullptr; }
++
++ T* get() { return ptr_; }
++
++ T** receive() {
++ RTC_CHECK(!ptr_);
++ return &ptr_;
++ }
++
++ Scoped& operator=(T* val) {
++ ptr_ = val;
++ return *this;
++ }
++
++ protected:
++ T* ptr_ = nullptr;
++};
++
++template <>
++Scoped<GError>::~Scoped() {
++ if (ptr_) {
++ g_error_free(ptr_);
++ }
++}
++
++template <>
++Scoped<gchar>::~Scoped() {
++ if (ptr_) {
++ g_free(ptr_);
++ }
++}
++
++template <>
++Scoped<GVariant>::~Scoped() {
++ if (ptr_) {
++ g_variant_unref(ptr_);
++ }
++}
++
++template <>
++Scoped<GVariantIter>::~Scoped() {
++ if (ptr_) {
++ g_variant_iter_free(ptr_);
++ }
++}
++
++template <>
++Scoped<GDBusMessage>::~Scoped() {
++ if (ptr_) {
++ g_object_unref(ptr_);
++ }
++}
++
++template <>
++Scoped<GUnixFDList>::~Scoped() {
++ if (ptr_) {
++ g_object_unref(ptr_);
++ }
++}
++
++#if PW_CHECK_VERSION(0, 3, 0)
++void BaseCapturerPipeWire::OnCoreError(void* data,
++ uint32_t id,
++ int seq,
++ int res,
++ const char* message) {
++ BaseCapturerPipeWire* that = static_cast<BaseCapturerPipeWire*>(data);
++ RTC_DCHECK(that);
++
++ RTC_LOG(LS_ERROR) << "PipeWire remote error: " << message;
++}
++#else
+ // static
+ void BaseCapturerPipeWire::OnStateChanged(void* data,
+ pw_remote_state old_state,
+@@ -64,7 +221,7 @@ void BaseCapturerPipeWire::OnStateChanged(void* data,
+ break;
+ case PW_REMOTE_STATE_CONNECTED:
+ RTC_LOG(LS_INFO) << "PipeWire remote state: connected.";
+- that->CreateReceivingStream();
++ that->pw_stream_ = that->CreateReceivingStream();
+ break;
+ case PW_REMOTE_STATE_CONNECTING:
+ RTC_LOG(LS_INFO) << "PipeWire remote state: connecting.";
+@@ -74,6 +231,7 @@ void BaseCapturerPipeWire::OnStateChanged(void* data,
+ break;
+ }
+ }
++#endif
+
+ // static
+ void BaseCapturerPipeWire::OnStreamStateChanged(void* data,
+@@ -83,6 +241,18 @@ void BaseCapturerPipeWire::OnStreamStateChanged(void* data,
+ BaseCapturerPipeWire* that = static_cast<BaseCapturerPipeWire*>(data);
+ RTC_DCHECK(that);
+
++#if PW_CHECK_VERSION(0, 3, 0)
++ switch (state) {
++ case PW_STREAM_STATE_ERROR:
++ RTC_LOG(LS_ERROR) << "PipeWire stream state error: " << error_message;
++ break;
++ case PW_STREAM_STATE_PAUSED:
++ case PW_STREAM_STATE_STREAMING:
++ case PW_STREAM_STATE_UNCONNECTED:
++ case PW_STREAM_STATE_CONNECTING:
++ break;
++ }
++#else
+ switch (state) {
+ case PW_STREAM_STATE_ERROR:
+ RTC_LOG(LS_ERROR) << "PipeWire stream state error: " << error_message;
+@@ -97,36 +267,74 @@ void BaseCapturerPipeWire::OnStreamStateChanged(void* data,
+ case PW_STREAM_STATE_STREAMING:
+ break;
+ }
++#endif
+ }
+
+ // static
++#if PW_CHECK_VERSION(0, 3, 0)
++void BaseCapturerPipeWire::OnStreamParamChanged(void* data,
++ uint32_t id,
++ const struct spa_pod* format) {
++#else
+ void BaseCapturerPipeWire::OnStreamFormatChanged(void* data,
+ const struct spa_pod* format) {
++#endif
+ BaseCapturerPipeWire* that = static_cast<BaseCapturerPipeWire*>(data);
+ RTC_DCHECK(that);
+
+ RTC_LOG(LS_INFO) << "PipeWire stream format changed.";
+
++#if PW_CHECK_VERSION(0, 3, 0)
++ if (!format || id != SPA_PARAM_Format) {
++#else
+ if (!format) {
+ pw_stream_finish_format(that->pw_stream_, /*res=*/0, /*params=*/nullptr,
+ /*n_params=*/0);
++#endif
+ return;
+ }
+
++#if PW_CHECK_VERSION(0, 3, 0)
++ spa_format_video_raw_parse(format, &that->spa_video_format_);
++#else
+ that->spa_video_format_ = new spa_video_info_raw();
+ spa_format_video_raw_parse(format, that->spa_video_format_,
+ &that->pw_type_->format_video);
++#endif
+
++#if PW_CHECK_VERSION(0, 3, 0)
++ auto width = that->spa_video_format_.size.width;
++ auto height = that->spa_video_format_.size.height;
++#else
+ auto width = that->spa_video_format_->size.width;
+ auto height = that->spa_video_format_->size.height;
++#endif
+ auto stride = SPA_ROUND_UP_N(width * kBytesPerPixel, 4);
+ auto size = height * stride;
+
++ that->desktop_size_ = DesktopSize(width, height);
++
+ uint8_t buffer[1024] = {};
+ auto builder = spa_pod_builder{buffer, sizeof(buffer)};
+
+ // Setup buffers and meta header for new format.
+- const struct spa_pod* params[2];
++ const struct spa_pod* params[3];
++#if PW_CHECK_VERSION(0, 3, 0)
++ params[0] = reinterpret_cast<spa_pod*>(spa_pod_builder_add_object(
++ &builder, SPA_TYPE_OBJECT_ParamBuffers, SPA_PARAM_Buffers,
++ SPA_PARAM_BUFFERS_size, SPA_POD_Int(size), SPA_PARAM_BUFFERS_stride,
++ SPA_POD_Int(stride), SPA_PARAM_BUFFERS_buffers,
++ SPA_POD_CHOICE_RANGE_Int(8, 1, 32)));
++ params[1] = reinterpret_cast<spa_pod*>(spa_pod_builder_add_object(
++ &builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, SPA_PARAM_META_type,
++ SPA_POD_Id(SPA_META_Header), SPA_PARAM_META_size,
++ SPA_POD_Int(sizeof(struct spa_meta_header))));
++ params[2] = reinterpret_cast<spa_pod*>(spa_pod_builder_add_object(
++ &builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, SPA_PARAM_META_type,
++ SPA_POD_Id(SPA_META_VideoCrop), SPA_PARAM_META_size,
++ SPA_POD_Int(sizeof(struct spa_meta_region))));
++ pw_stream_update_params(that->pw_stream_, params, 3);
++#else
+ params[0] = reinterpret_cast<spa_pod*>(spa_pod_builder_object(
+ &builder,
+ // id to enumerate buffer requirements
+@@ -155,8 +363,18 @@ void BaseCapturerPipeWire::OnStreamFormatChanged(void* data,
+ // Size: size of the metadata, specified as integer (i)
+ ":", that->pw_core_type_->param_meta.size, "i",
+ sizeof(struct spa_meta_header)));
+-
+- pw_stream_finish_format(that->pw_stream_, /*res=*/0, params, /*n_params=*/2);
++ params[2] = reinterpret_cast<spa_pod*>(spa_pod_builder_object(
++ &builder,
++ // id to enumerate supported metadata
++ that->pw_core_type_->param.idMeta, that->pw_core_type_->param_meta.Meta,
++ // Type: specified as id or enum (I)
++ ":", that->pw_core_type_->param_meta.type, "I",
++ that->pw_core_type_->meta.VideoCrop,
++ // Size: size of the metadata, specified as integer (i)
++ ":", that->pw_core_type_->param_meta.size, "i",
++ sizeof(struct spa_meta_video_crop)));
++ pw_stream_finish_format(that->pw_stream_, /*res=*/0, params, /*n_params=*/3);
++#endif
+ }
+
+ // static
+@@ -164,15 +382,26 @@ void BaseCapturerPipeWire::OnStreamProcess(void* data) {
+ BaseCapturerPipeWire* that = static_cast<BaseCapturerPipeWire*>(data);
+ RTC_DCHECK(that);
+
+- pw_buffer* buf = nullptr;
++ struct pw_buffer* next_buffer;
++ struct pw_buffer* buffer = nullptr;
++
++ next_buffer = pw_stream_dequeue_buffer(that->pw_stream_);
++ while (next_buffer) {
++ buffer = next_buffer;
++ next_buffer = pw_stream_dequeue_buffer(that->pw_stream_);
+
+- if (!(buf = pw_stream_dequeue_buffer(that->pw_stream_))) {
++ if (next_buffer) {
++ pw_stream_queue_buffer(that->pw_stream_, buffer);
++ }
++ }
++
++ if (!buffer) {
+ return;
+ }
+
+- that->HandleBuffer(buf);
++ that->HandleBuffer(buffer);
+
+- pw_stream_queue_buffer(that->pw_stream_, buf);
++ pw_stream_queue_buffer(that->pw_stream_, buffer);
+ }
+
+ BaseCapturerPipeWire::BaseCapturerPipeWire(CaptureSourceType source_type)
+@@ -183,6 +412,7 @@ BaseCapturerPipeWire::~BaseCapturerPipeWire() {
+ pw_thread_loop_stop(pw_main_loop_);
+ }
+
++#if !PW_CHECK_VERSION(0, 3, 0)
+ if (pw_type_) {
+ delete pw_type_;
+ }
+@@ -190,30 +420,41 @@ BaseCapturerPipeWire::~BaseCapturerPipeWire() {
+ if (spa_video_format_) {
+ delete spa_video_format_;
+ }
++#endif
+
+ if (pw_stream_) {
+ pw_stream_destroy(pw_stream_);
+ }
+
++#if !PW_CHECK_VERSION(0, 3, 0)
+ if (pw_remote_) {
+ pw_remote_destroy(pw_remote_);
+ }
++#endif
+
++#if PW_CHECK_VERSION(0, 3, 0)
++ if (pw_core_) {
++ pw_core_disconnect(pw_core_);
++ }
++
++ if (pw_context_) {
++ pw_context_destroy(pw_context_);
++ }
++#else
+ if (pw_core_) {
+ pw_core_destroy(pw_core_);
+ }
++#endif
+
+ if (pw_main_loop_) {
+ pw_thread_loop_destroy(pw_main_loop_);
+ }
+
++#if !PW_CHECK_VERSION(0, 3, 0)
+ if (pw_loop_) {
+ pw_loop_destroy(pw_loop_);
+ }
+-
+- if (current_frame_) {
+- free(current_frame_);
+- }
++#endif
+
+ if (start_request_signal_id_) {
+ g_dbus_connection_signal_unsubscribe(connection_, start_request_signal_id_);
+@@ -228,18 +469,16 @@ BaseCapturerPipeWire::~BaseCapturerPipeWire() {
+ }
+
+ if (session_handle_) {
+- GDBusMessage* message = g_dbus_message_new_method_call(
+- kDesktopBusName, session_handle_, kSessionInterfaceName, "Close");
+- if (message) {
+- GError* error = nullptr;
+- g_dbus_connection_send_message(connection_, message,
++ Scoped<GDBusMessage> message(g_dbus_message_new_method_call(
++ kDesktopBusName, session_handle_, kSessionInterfaceName, "Close"));
++ if (message.get()) {
++ Scoped<GError> error;
++ g_dbus_connection_send_message(connection_, message.get(),
+ G_DBUS_SEND_MESSAGE_FLAGS_NONE,
+- /*out_serial=*/nullptr, &error);
+- if (error) {
++ /*out_serial=*/nullptr, error.receive());
++ if (error.get()) {
+ RTC_LOG(LS_ERROR) << "Failed to close the session: " << error->message;
+- g_error_free(error);
+ }
+- g_object_unref(message);
+ }
+ }
+
+@@ -274,7 +513,11 @@ void BaseCapturerPipeWire::InitPipeWire() {
+ StubPathMap paths;
+
+ // Check if the PipeWire library is available.
+- paths[kModulePipewire].push_back(kPipeWireLib);
++#if PW_CHECK_VERSION(0, 3, 0)
++ paths[kModulePipewire03].push_back(kPipeWireLib);
++#else
++ paths[kModulePipewire02].push_back(kPipeWireLib);
++#endif
+ if (!InitializeStubs(paths)) {
+ RTC_LOG(LS_ERROR) << "Failed to load the PipeWire library and symbols.";
+ portal_init_failed_ = true;
+@@ -284,16 +527,46 @@ void BaseCapturerPipeWire::InitPipeWire() {
+
+ pw_init(/*argc=*/nullptr, /*argc=*/nullptr);
+
++#if PW_CHECK_VERSION(0, 3, 0)
++ pw_main_loop_ = pw_thread_loop_new("pipewire-main-loop", nullptr);
++
++ pw_thread_loop_lock(pw_main_loop_);
++
++ pw_context_ =
++ pw_context_new(pw_thread_loop_get_loop(pw_main_loop_), nullptr, 0);
++ if (!pw_context_) {
++ RTC_LOG(LS_ERROR) << "Failed to create PipeWire context";
++ return;
++ }
++
++ pw_core_ = pw_context_connect(pw_context_, nullptr, 0);
++ if (!pw_core_) {
++ RTC_LOG(LS_ERROR) << "Failed to connect PipeWire context";
++ return;
++ }
++#else
+ pw_loop_ = pw_loop_new(/*properties=*/nullptr);
+ pw_main_loop_ = pw_thread_loop_new(pw_loop_, "pipewire-main-loop");
+
++ pw_thread_loop_lock(pw_main_loop_);
++
+ pw_core_ = pw_core_new(pw_loop_, /*properties=*/nullptr);
+ pw_core_type_ = pw_core_get_type(pw_core_);
+ pw_remote_ = pw_remote_new(pw_core_, nullptr, /*user_data_size=*/0);
+
+ InitPipeWireTypes();
++#endif
+
+ // Initialize event handlers, remote end and stream-related.
++#if PW_CHECK_VERSION(0, 3, 0)
++ pw_core_events_.version = PW_VERSION_CORE_EVENTS;
++ pw_core_events_.error = &OnCoreError;
++
++ pw_stream_events_.version = PW_VERSION_STREAM_EVENTS;
++ pw_stream_events_.state_changed = &OnStreamStateChanged;
++ pw_stream_events_.param_changed = &OnStreamParamChanged;
++ pw_stream_events_.process = &OnStreamProcess;
++#else
+ pw_remote_events_.version = PW_VERSION_REMOTE_EVENTS;
+ pw_remote_events_.state_changed = &OnStateChanged;
+
+@@ -301,19 +574,33 @@ void BaseCapturerPipeWire::InitPipeWire() {
+ pw_stream_events_.state_changed = &OnStreamStateChanged;
+ pw_stream_events_.format_changed = &OnStreamFormatChanged;
+ pw_stream_events_.process = &OnStreamProcess;
++#endif
+
++#if PW_CHECK_VERSION(0, 3, 0)
++ pw_core_add_listener(pw_core_, &spa_core_listener_, &pw_core_events_, this);
++
++ pw_stream_ = CreateReceivingStream();
++ if (!pw_stream_) {
++ RTC_LOG(LS_ERROR) << "Failed to create PipeWire stream";
++ return;
++ }
++#else
+ pw_remote_add_listener(pw_remote_, &spa_remote_listener_, &pw_remote_events_,
+ this);
+ pw_remote_connect_fd(pw_remote_, pw_fd_);
++#endif
+
+ if (pw_thread_loop_start(pw_main_loop_) < 0) {
+ RTC_LOG(LS_ERROR) << "Failed to start main PipeWire loop";
+ portal_init_failed_ = true;
+ }
+
++ pw_thread_loop_unlock(pw_main_loop_);
++
+ RTC_LOG(LS_INFO) << "PipeWire remote opened.";
+ }
+
++#if !PW_CHECK_VERSION(0, 3, 0)
+ void BaseCapturerPipeWire::InitPipeWireTypes() {
+ spa_type_map* map = pw_core_type_->map;
+ pw_type_ = new PipeWireType();
+@@ -323,23 +610,44 @@ void BaseCapturerPipeWire::InitPipeWireTypes() {
+ spa_type_format_video_map(map, &pw_type_->format_video);
+ spa_type_video_format_map(map, &pw_type_->video_format);
+ }
++#endif
+
+-void BaseCapturerPipeWire::CreateReceivingStream() {
++pw_stream* BaseCapturerPipeWire::CreateReceivingStream() {
++#if !PW_CHECK_VERSION(0, 3, 0)
++ if (pw_remote_get_state(pw_remote_, nullptr) != PW_REMOTE_STATE_CONNECTED) {
++ RTC_LOG(LS_ERROR) << "Cannot create pipewire stream";
++ return nullptr;
++ }
++#endif
+ spa_rectangle pwMinScreenBounds = spa_rectangle{1, 1};
+- spa_rectangle pwScreenBounds =
+- spa_rectangle{static_cast<uint32_t>(desktop_size_.width()),
+- static_cast<uint32_t>(desktop_size_.height())};
+-
+- spa_fraction pwFrameRateMin = spa_fraction{0, 1};
+- spa_fraction pwFrameRateMax = spa_fraction{60, 1};
++ spa_rectangle pwMaxScreenBounds = spa_rectangle{UINT32_MAX, UINT32_MAX};
+
+ pw_properties* reuseProps =
+ pw_properties_new_string("pipewire.client.reuse=1");
+- pw_stream_ = pw_stream_new(pw_remote_, "webrtc-consume-stream", reuseProps);
++#if PW_CHECK_VERSION(0, 3, 0)
++ auto stream = pw_stream_new(pw_core_, "webrtc-consume-stream", reuseProps);
++#else
++ auto stream = pw_stream_new(pw_remote_, "webrtc-consume-stream", reuseProps);
++#endif
+
+ uint8_t buffer[1024] = {};
+ const spa_pod* params[1];
+ spa_pod_builder builder = spa_pod_builder{buffer, sizeof(buffer)};
++
++#if PW_CHECK_VERSION(0, 3, 0)
++ params[0] = reinterpret_cast<spa_pod*>(spa_pod_builder_add_object(
++ &builder, SPA_TYPE_OBJECT_Format, SPA_PARAM_EnumFormat,
++ SPA_FORMAT_mediaType, SPA_POD_Id(SPA_MEDIA_TYPE_video),
++ SPA_FORMAT_mediaSubtype, SPA_POD_Id(SPA_MEDIA_SUBTYPE_raw),
++ SPA_FORMAT_VIDEO_format,
++ SPA_POD_CHOICE_ENUM_Id(5, SPA_VIDEO_FORMAT_BGRx, SPA_VIDEO_FORMAT_RGBx,
++ SPA_VIDEO_FORMAT_RGBA, SPA_VIDEO_FORMAT_BGRx,
++ SPA_VIDEO_FORMAT_BGRA),
++ SPA_FORMAT_VIDEO_size,
++ SPA_POD_CHOICE_RANGE_Rectangle(&pwMinScreenBounds, &pwMinScreenBounds,
++ &pwMaxScreenBounds),
++ 0));
++#else
+ params[0] = reinterpret_cast<spa_pod*>(spa_pod_builder_object(
+ &builder,
+ // id to enumerate formats
+@@ -349,69 +657,218 @@ void BaseCapturerPipeWire::CreateReceivingStream() {
+ // then allowed formats are enumerated (e) and the format is undecided (u)
+ // to allow negotiation
+ ":", pw_type_->format_video.format, "Ieu", pw_type_->video_format.BGRx,
+- SPA_POD_PROP_ENUM(2, pw_type_->video_format.RGBx,
+- pw_type_->video_format.BGRx),
++ SPA_POD_PROP_ENUM(
++ 4, pw_type_->video_format.RGBx, pw_type_->video_format.BGRx,
++ pw_type_->video_format.RGBA, pw_type_->video_format.BGRA),
+ // Video size: specified as rectangle (R), preferred size is specified as
+ // first parameter, then allowed size is defined as range (r) from min and
+ // max values and the format is undecided (u) to allow negotiation
+- ":", pw_type_->format_video.size, "Rru", &pwScreenBounds, 2,
+- &pwMinScreenBounds, &pwScreenBounds,
+- // Frame rate: specified as fraction (F) and set to minimum frame rate
+- // value
+- ":", pw_type_->format_video.framerate, "F", &pwFrameRateMin,
+- // Max frame rate: specified as fraction (F), preferred frame rate is set
+- // to maximum value, then allowed frame rate is defined as range (r) from
+- // min and max values and it is undecided (u) to allow negotiation
+- ":", pw_type_->format_video.max_framerate, "Fru", &pwFrameRateMax, 2,
+- &pwFrameRateMin, &pwFrameRateMax));
+-
+- pw_stream_add_listener(pw_stream_, &spa_stream_listener_, &pw_stream_events_,
++ ":", pw_type_->format_video.size, "Rru", &pwMinScreenBounds,
++ SPA_POD_PROP_MIN_MAX(&pwMinScreenBounds, &pwMaxScreenBounds)));
++#endif
++
++ pw_stream_add_listener(stream, &spa_stream_listener_, &pw_stream_events_,
+ this);
++#if PW_CHECK_VERSION(0, 3, 0)
++ if (pw_stream_connect(stream, PW_DIRECTION_INPUT, pw_stream_node_id_,
++ PW_STREAM_FLAG_AUTOCONNECT, params, 1) != 0) {
++#else
+ pw_stream_flags flags = static_cast<pw_stream_flags>(
+- PW_STREAM_FLAG_AUTOCONNECT | PW_STREAM_FLAG_INACTIVE |
+- PW_STREAM_FLAG_MAP_BUFFERS);
+- if (pw_stream_connect(pw_stream_, PW_DIRECTION_INPUT, /*port_path=*/nullptr,
++ PW_STREAM_FLAG_AUTOCONNECT | PW_STREAM_FLAG_INACTIVE);
++ if (pw_stream_connect(stream, PW_DIRECTION_INPUT, /*port_path=*/nullptr,
+ flags, params,
+ /*n_params=*/1) != 0) {
++#endif
+ RTC_LOG(LS_ERROR) << "Could not connect receiving stream.";
+ portal_init_failed_ = true;
+- return;
++ return nullptr;
+ }
++
++ return stream;
+ }
+
+ void BaseCapturerPipeWire::HandleBuffer(pw_buffer* buffer) {
+ spa_buffer* spaBuffer = buffer->buffer;
+- void* src = nullptr;
++ ScopedBuf map;
++ uint8_t* src = nullptr;
++
++ if (spaBuffer->datas[0].chunk->size == 0) {
++ RTC_LOG(LS_ERROR) << "Failed to get video stream: Zero size.";
++ return;
++ }
++
++#if PW_CHECK_VERSION(0, 3, 0)
++ if (spaBuffer->datas[0].type == SPA_DATA_MemFd ||
++ spaBuffer->datas[0].type == SPA_DATA_DmaBuf) {
++#else
++ if (spaBuffer->datas[0].type == pw_core_type_->data.MemFd ||
++ spaBuffer->datas[0].type == pw_core_type_->data.DmaBuf) {
++#endif
++ map.initialize(
++ static_cast<uint8_t*>(
++ mmap(nullptr,
++ spaBuffer->datas[0].maxsize + spaBuffer->datas[0].mapoffset,
++ PROT_READ, MAP_PRIVATE, spaBuffer->datas[0].fd, 0)),
++ spaBuffer->datas[0].maxsize + spaBuffer->datas[0].mapoffset,
++#if PW_CHECK_VERSION(0, 3, 0)
++ spaBuffer->datas[0].type == SPA_DATA_DmaBuf,
++#else
++ spaBuffer->datas[0].type == pw_core_type_->data.DmaBuf,
++#endif
++ spaBuffer->datas[0].fd);
++
++ if (!map) {
++ RTC_LOG(LS_ERROR) << "Failed to mmap the memory: "
++ << std::strerror(errno);
++ return;
++ }
++
++#if PW_CHECK_VERSION(0, 3, 0)
++ if (spaBuffer->datas[0].type == SPA_DATA_DmaBuf) {
++#else
++ if (spaBuffer->datas[0].type == pw_core_type_->data.DmaBuf) {
++#endif
++ SyncDmaBuf(spaBuffer->datas[0].fd, DMA_BUF_SYNC_START);
++ }
++
++ src = SPA_MEMBER(map.get(), spaBuffer->datas[0].mapoffset, uint8_t);
++#if PW_CHECK_VERSION(0, 3, 0)
++ } else if (spaBuffer->datas[0].type == SPA_DATA_MemPtr) {
++#else
++ } else if (spaBuffer->datas[0].type == pw_core_type_->data.MemPtr) {
++#endif
++ src = static_cast<uint8_t*>(spaBuffer->datas[0].data);
++ }
+
+- if (!(src = spaBuffer->datas[0].data)) {
++ if (!src) {
++ return;
++ }
++
++#if PW_CHECK_VERSION(0, 3, 0)
++ struct spa_meta_region* video_metadata =
++ static_cast<struct spa_meta_region*>(spa_buffer_find_meta_data(
++ spaBuffer, SPA_META_VideoCrop, sizeof(*video_metadata)));
++#else
++ struct spa_meta_video_crop* video_metadata =
++ static_cast<struct spa_meta_video_crop*>(
++ spa_buffer_find_meta(spaBuffer, pw_core_type_->meta.VideoCrop));
++#endif
++
++ // Video size from metadata is bigger than an actual video stream size.
++ // The metadata are wrong or we should up-scale the video...in both cases
++ // just quit now.
++#if PW_CHECK_VERSION(0, 3, 0)
++ if (video_metadata && (video_metadata->region.size.width >
++ static_cast<uint32_t>(desktop_size_.width()) ||
++ video_metadata->region.size.height >
++ static_cast<uint32_t>(desktop_size_.height()))) {
++#else
++ if (video_metadata && (video_metadata->width > desktop_size_.width() ||
++ video_metadata->height > desktop_size_.height())) {
++#endif
++ RTC_LOG(LS_ERROR) << "Stream metadata sizes are wrong!";
+ return;
+ }
+
+- uint32_t maxSize = spaBuffer->datas[0].maxsize;
+- int32_t srcStride = spaBuffer->datas[0].chunk->stride;
+- if (srcStride != (desktop_size_.width() * kBytesPerPixel)) {
++ // Use video metadata when video size from metadata is set and smaller than
++ // video stream size, so we need to adjust it.
++ bool video_is_full_width = true;
++ bool video_is_full_height = true;
++#if PW_CHECK_VERSION(0, 3, 0)
++ if (video_metadata && video_metadata->region.size.width != 0 &&
++ video_metadata->region.size.height != 0) {
++ if (video_metadata->region.size.width <
++ static_cast<uint32_t>(desktop_size_.width())) {
++ video_is_full_width = false;
++ } else if (video_metadata->region.size.height <
++ static_cast<uint32_t>(desktop_size_.height())) {
++ video_is_full_height = false;
++ }
++ }
++#else
++ if (video_metadata && video_metadata->width != 0 &&
++ video_metadata->height != 0) {
++ if (video_metadata->width < desktop_size_.width()) {
++ } else if (video_metadata->height < desktop_size_.height()) {
++ video_is_full_height = false;
++ }
++ }
++#endif
++
++ DesktopSize video_size_prev = video_size_;
++ if (!video_is_full_height || !video_is_full_width) {
++#if PW_CHECK_VERSION(0, 3, 0)
++ video_size_ = DesktopSize(video_metadata->region.size.width,
++ video_metadata->region.size.height);
++#else
++ video_size_ = DesktopSize(video_metadata->width, video_metadata->height);
++#endif
++ } else {
++ video_size_ = desktop_size_;
++ }
++
++ webrtc::MutexLock lock(¤t_frame_lock_);
++ if (!current_frame_ || !video_size_.equals(video_size_prev)) {
++ current_frame_ = std::make_unique<uint8_t[]>(
++ video_size_.width() * video_size_.height() * kBytesPerPixel);
++ }
++
++ const int32_t dst_stride = video_size_.width() * kBytesPerPixel;
++ const int32_t src_stride = spaBuffer->datas[0].chunk->stride;
++
++ if (src_stride != (desktop_size_.width() * kBytesPerPixel)) {
+ RTC_LOG(LS_ERROR) << "Got buffer with stride different from screen stride: "
+- << srcStride
++ << src_stride
+ << " != " << (desktop_size_.width() * kBytesPerPixel);
+ portal_init_failed_ = true;
++
+ return;
+ }
+
+- if (!current_frame_) {
+- current_frame_ = static_cast<uint8_t*>(malloc(maxSize));
+- }
+- RTC_DCHECK(current_frame_ != nullptr);
+-
+- // If both sides decided to go with the RGBx format we need to convert it to
+- // BGRx to match color format expected by WebRTC.
+- if (spa_video_format_->format == pw_type_->video_format.RGBx) {
+- uint8_t* tempFrame = static_cast<uint8_t*>(malloc(maxSize));
+- std::memcpy(tempFrame, src, maxSize);
+- ConvertRGBxToBGRx(tempFrame, maxSize);
+- std::memcpy(current_frame_, tempFrame, maxSize);
+- free(tempFrame);
+- } else {
+- std::memcpy(current_frame_, src, maxSize);
++ // Adjust source content based on metadata video position
++#if PW_CHECK_VERSION(0, 3, 0)
++ if (!video_is_full_height &&
++ (video_metadata->region.position.y + video_size_.height() <=
++ desktop_size_.height())) {
++ src += src_stride * video_metadata->region.position.y;
++ }
++ const int x_offset =
++ !video_is_full_width &&
++ (video_metadata->region.position.x + video_size_.width() <=
++ desktop_size_.width())
++ ? video_metadata->region.position.x * kBytesPerPixel
++ : 0;
++#else
++ if (!video_is_full_height &&
++ (video_metadata->y + video_size_.height() <= desktop_size_.height())) {
++ src += src_stride * video_metadata->y;
++ }
++
++ const int x_offset =
++ !video_is_full_width &&
++ (video_metadata->x + video_size_.width() <= desktop_size_.width())
++ ? video_metadata->x * kBytesPerPixel
++ : 0;
++#endif
++
++ uint8_t* dst = current_frame_.get();
++ for (int i = 0; i < video_size_.height(); ++i) {
++ // Adjust source content based on crop video position if needed
++ src += x_offset;
++ std::memcpy(dst, src, dst_stride);
++ // If both sides decided to go with the RGBx format we need to convert it to
++ // BGRx to match color format expected by WebRTC.
++#if PW_CHECK_VERSION(0, 3, 0)
++ if (spa_video_format_.format == SPA_VIDEO_FORMAT_RGBx ||
++ spa_video_format_.format == SPA_VIDEO_FORMAT_RGBA) {
++#else
++ if (spa_video_format_->format == pw_type_->video_format.RGBx ||
++ spa_video_format_->format == pw_type_->video_format.RGBA) {
++#endif
++ ConvertRGBxToBGRx(dst, dst_stride);
++ }
++ src += src_stride - x_offset;
++ dst += dst_stride;
+ }
+ }
+
+@@ -441,14 +898,13 @@ void BaseCapturerPipeWire::OnProxyRequested(GObject* /*object*/,
+ BaseCapturerPipeWire* that = static_cast<BaseCapturerPipeWire*>(user_data);
+ RTC_DCHECK(that);
+
+- GError* error = nullptr;
+- GDBusProxy *proxy = g_dbus_proxy_new_finish(result, &error);
++ Scoped<GError> error;
++ GDBusProxy* proxy = g_dbus_proxy_new_finish(result, error.receive());
+ if (!proxy) {
+- if (g_error_matches(error, G_IO_ERROR, G_IO_ERROR_CANCELLED))
++ if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED))
+ return;
+ RTC_LOG(LS_ERROR) << "Failed to create a proxy for the screen cast portal: "
+ << error->message;
+- g_error_free(error);
+ that->portal_init_failed_ = true;
+ return;
+ }
+@@ -462,38 +918,36 @@ void BaseCapturerPipeWire::OnProxyRequested(GObject* /*object*/,
+ // static
+ gchar* BaseCapturerPipeWire::PrepareSignalHandle(GDBusConnection* connection,
+ const gchar* token) {
+- gchar* sender = g_strdup(g_dbus_connection_get_unique_name(connection) + 1);
+- for (int i = 0; sender[i]; i++) {
+- if (sender[i] == '.') {
+- sender[i] = '_';
++ Scoped<gchar> sender(
++ g_strdup(g_dbus_connection_get_unique_name(connection) + 1));
++ for (int i = 0; sender.get()[i]; i++) {
++ if (sender.get()[i] == '.') {
++ sender.get()[i] = '_';
+ }
+ }
+
+- gchar* handle = g_strconcat(kDesktopRequestObjectPath, "/", sender, "/",
++ gchar* handle = g_strconcat(kDesktopRequestObjectPath, "/", sender.get(), "/",
+ token, /*end of varargs*/ nullptr);
+- g_free(sender);
+
+ return handle;
+ }
+
+ void BaseCapturerPipeWire::SessionRequest() {
+ GVariantBuilder builder;
+- gchar* variant_string;
++ Scoped<gchar> variant_string;
+
+ g_variant_builder_init(&builder, G_VARIANT_TYPE_VARDICT);
+ variant_string =
+ g_strdup_printf("webrtc_session%d", g_random_int_range(0, G_MAXINT));
+ g_variant_builder_add(&builder, "{sv}", "session_handle_token",
+- g_variant_new_string(variant_string));
+- g_free(variant_string);
++ g_variant_new_string(variant_string.get()));
+ variant_string = g_strdup_printf("webrtc%d", g_random_int_range(0, G_MAXINT));
+ g_variant_builder_add(&builder, "{sv}", "handle_token",
+- g_variant_new_string(variant_string));
++ g_variant_new_string(variant_string.get()));
+
+- portal_handle_ = PrepareSignalHandle(connection_, variant_string);
++ portal_handle_ = PrepareSignalHandle(connection_, variant_string.get());
+ session_request_signal_id_ = SetupRequestResponseSignal(
+ portal_handle_, OnSessionRequestResponseSignal);
+- g_free(variant_string);
+
+ RTC_LOG(LS_INFO) << "Screen cast session requested.";
+ g_dbus_proxy_call(
+@@ -509,22 +963,21 @@ void BaseCapturerPipeWire::OnSessionRequested(GDBusProxy *proxy,
+ BaseCapturerPipeWire* that = static_cast<BaseCapturerPipeWire*>(user_data);
+ RTC_DCHECK(that);
+
+- GError* error = nullptr;
+- GVariant* variant = g_dbus_proxy_call_finish(proxy, result, &error);
++ Scoped<GError> error;
++ Scoped<GVariant> variant(
++ g_dbus_proxy_call_finish(proxy, result, error.receive()));
+ if (!variant) {
+- if (g_error_matches(error, G_IO_ERROR, G_IO_ERROR_CANCELLED))
++ if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED))
+ return;
+ RTC_LOG(LS_ERROR) << "Failed to create a screen cast session: "
+ << error->message;
+- g_error_free(error);
+ that->portal_init_failed_ = true;
+ return;
+ }
+ RTC_LOG(LS_INFO) << "Initializing the screen cast session.";
+
+- gchar* handle = nullptr;
+- g_variant_get_child(variant, 0, "o", &handle);
+- g_variant_unref(variant);
++ Scoped<gchar> handle;
++ g_variant_get_child(variant.get(), 0, "o", &handle);
+ if (!handle) {
+ RTC_LOG(LS_ERROR) << "Failed to initialize the screen cast session.";
+ if (that->session_request_signal_id_) {
+@@ -536,8 +989,6 @@ void BaseCapturerPipeWire::OnSessionRequested(GDBusProxy *proxy,
+ return;
+ }
+
+- g_free(handle);
+-
+ RTC_LOG(LS_INFO) << "Subscribing to the screen cast session.";
+ }
+
+@@ -557,11 +1008,11 @@ void BaseCapturerPipeWire::OnSessionRequestResponseSignal(
+ << "Received response for the screen cast session subscription.";
+
+ guint32 portal_response;
+- GVariant* response_data;
+- g_variant_get(parameters, "(u at a{sv})", &portal_response, &response_data);
+- g_variant_lookup(response_data, "session_handle", "s",
++ Scoped<GVariant> response_data;
++ g_variant_get(parameters, "(u at a{sv})", &portal_response,
++ response_data.receive());
++ g_variant_lookup(response_data.get(), "session_handle", "s",
+ &that->session_handle_);
+- g_variant_unref(response_data);
+
+ if (!that->session_handle_ || portal_response) {
+ RTC_LOG(LS_ERROR)
+@@ -575,23 +1026,23 @@ void BaseCapturerPipeWire::OnSessionRequestResponseSignal(
+
+ void BaseCapturerPipeWire::SourcesRequest() {
+ GVariantBuilder builder;
+- gchar* variant_string;
++ Scoped<gchar> variant_string;
+
+ g_variant_builder_init(&builder, G_VARIANT_TYPE_VARDICT);
+ // We want to record monitor content.
+- g_variant_builder_add(&builder, "{sv}", "types",
+- g_variant_new_uint32(capture_source_type_));
++ g_variant_builder_add(
++ &builder, "{sv}", "types",
++ g_variant_new_uint32(static_cast<uint32_t>(capture_source_type_)));
+ // We don't want to allow selection of multiple sources.
+ g_variant_builder_add(&builder, "{sv}", "multiple",
+ g_variant_new_boolean(false));
+ variant_string = g_strdup_printf("webrtc%d", g_random_int_range(0, G_MAXINT));
+ g_variant_builder_add(&builder, "{sv}", "handle_token",
+- g_variant_new_string(variant_string));
++ g_variant_new_string(variant_string.get()));
+
+- sources_handle_ = PrepareSignalHandle(connection_, variant_string);
++ sources_handle_ = PrepareSignalHandle(connection_, variant_string.get());
+ sources_request_signal_id_ = SetupRequestResponseSignal(
+ sources_handle_, OnSourcesRequestResponseSignal);
+- g_free(variant_string);
+
+ RTC_LOG(LS_INFO) << "Requesting sources from the screen cast session.";
+ g_dbus_proxy_call(
+@@ -608,22 +1059,21 @@ void BaseCapturerPipeWire::OnSourcesRequested(GDBusProxy *proxy,
+ BaseCapturerPipeWire* that = static_cast<BaseCapturerPipeWire*>(user_data);
+ RTC_DCHECK(that);
+
+- GError* error = nullptr;
+- GVariant* variant = g_dbus_proxy_call_finish(proxy, result, &error);
++ Scoped<GError> error;
++ Scoped<GVariant> variant(
++ g_dbus_proxy_call_finish(proxy, result, error.receive()));
+ if (!variant) {
+- if (g_error_matches(error, G_IO_ERROR, G_IO_ERROR_CANCELLED))
++ if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED))
+ return;
+ RTC_LOG(LS_ERROR) << "Failed to request the sources: " << error->message;
+- g_error_free(error);
+ that->portal_init_failed_ = true;
+ return;
+ }
+
+ RTC_LOG(LS_INFO) << "Sources requested from the screen cast session.";
+
+- gchar* handle = nullptr;
+- g_variant_get_child(variant, 0, "o", &handle);
+- g_variant_unref(variant);
++ Scoped<gchar> handle;
++ g_variant_get_child(variant.get(), 0, "o", handle.receive());
+ if (!handle) {
+ RTC_LOG(LS_ERROR) << "Failed to initialize the screen cast session.";
+ if (that->sources_request_signal_id_) {
+@@ -635,8 +1085,6 @@ void BaseCapturerPipeWire::OnSourcesRequested(GDBusProxy *proxy,
+ return;
+ }
+
+- g_free(handle);
+-
+ RTC_LOG(LS_INFO) << "Subscribed to sources signal.";
+ }
+
+@@ -668,17 +1116,16 @@ void BaseCapturerPipeWire::OnSourcesRequestResponseSignal(
+
+ void BaseCapturerPipeWire::StartRequest() {
+ GVariantBuilder builder;
+- gchar* variant_string;
++ Scoped<gchar> variant_string;
+
+ g_variant_builder_init(&builder, G_VARIANT_TYPE_VARDICT);
+ variant_string = g_strdup_printf("webrtc%d", g_random_int_range(0, G_MAXINT));
+ g_variant_builder_add(&builder, "{sv}", "handle_token",
+- g_variant_new_string(variant_string));
++ g_variant_new_string(variant_string.get()));
+
+- start_handle_ = PrepareSignalHandle(connection_, variant_string);
++ start_handle_ = PrepareSignalHandle(connection_, variant_string.get());
+ start_request_signal_id_ =
+ SetupRequestResponseSignal(start_handle_, OnStartRequestResponseSignal);
+- g_free(variant_string);
+
+ // "Identifier for the application window", this is Wayland, so not "x11:...".
+ const gchar parent_window[] = "";
+@@ -698,23 +1145,22 @@ void BaseCapturerPipeWire::OnStartRequested(GDBusProxy *proxy,
+ BaseCapturerPipeWire* that = static_cast<BaseCapturerPipeWire*>(user_data);
+ RTC_DCHECK(that);
+
+- GError* error = nullptr;
+- GVariant* variant = g_dbus_proxy_call_finish(proxy, result, &error);
++ Scoped<GError> error;
++ Scoped<GVariant> variant(
++ g_dbus_proxy_call_finish(proxy, result, error.receive()));
+ if (!variant) {
+- if (g_error_matches(error, G_IO_ERROR, G_IO_ERROR_CANCELLED))
++ if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED))
+ return;
+ RTC_LOG(LS_ERROR) << "Failed to start the screen cast session: "
+ << error->message;
+- g_error_free(error);
+ that->portal_init_failed_ = true;
+ return;
+ }
+
+ RTC_LOG(LS_INFO) << "Initializing the start of the screen cast session.";
+
+- gchar* handle = nullptr;
+- g_variant_get_child(variant, 0, "o", &handle);
+- g_variant_unref(variant);
++ Scoped<gchar> handle;
++ g_variant_get_child(variant.get(), 0, "o", handle.receive());
+ if (!handle) {
+ RTC_LOG(LS_ERROR)
+ << "Failed to initialize the start of the screen cast session.";
+@@ -727,8 +1173,6 @@ void BaseCapturerPipeWire::OnStartRequested(GDBusProxy *proxy,
+ return;
+ }
+
+- g_free(handle);
+-
+ RTC_LOG(LS_INFO) << "Subscribed to the start signal.";
+ }
+
+@@ -746,9 +1190,10 @@ void BaseCapturerPipeWire::OnStartRequestResponseSignal(
+
+ RTC_LOG(LS_INFO) << "Start signal received.";
+ guint32 portal_response;
+- GVariant* response_data;
+- GVariantIter* iter = nullptr;
+- g_variant_get(parameters, "(u at a{sv})", &portal_response, &response_data);
++ Scoped<GVariant> response_data;
++ Scoped<GVariantIter> iter;
++ g_variant_get(parameters, "(u at a{sv})", &portal_response,
++ response_data.receive());
+ if (portal_response || !response_data) {
+ RTC_LOG(LS_ERROR) << "Failed to start the screen cast session.";
+ that->portal_init_failed_ = true;
+@@ -758,28 +1203,28 @@ void BaseCapturerPipeWire::OnStartRequestResponseSignal(
+ // Array of PipeWire streams. See
+ // https://github.com/flatpak/xdg-desktop-portal/blob/master/data/org.freedesktop.portal.ScreenCast.xml
+ // documentation for <method name="Start">.
+- if (g_variant_lookup(response_data, "streams", "a(ua{sv})", &iter)) {
+- GVariant* variant;
++ if (g_variant_lookup(response_data.get(), "streams", "a(ua{sv})",
++ iter.receive())) {
++ Scoped<GVariant> variant;
+
+- while (g_variant_iter_next(iter, "@(ua{sv})", &variant)) {
++ while (g_variant_iter_next(iter.get(), "@(ua{sv})", variant.receive())) {
+ guint32 stream_id;
+- gint32 width;
+- gint32 height;
+- GVariant* options;
++ guint32 type;
++ Scoped<GVariant> options;
+
+- g_variant_get(variant, "(u at a{sv})", &stream_id, &options);
+- RTC_DCHECK(options != nullptr);
++ g_variant_get(variant.get(), "(u at a{sv})", &stream_id, options.receive());
++ RTC_DCHECK(options.get());
+
+- g_variant_lookup(options, "size", "(ii)", &width, &height);
++ if (g_variant_lookup(options.get(), "source_type", "u", &type)) {
++ that->capture_source_type_ =
++ static_cast<BaseCapturerPipeWire::CaptureSourceType>(type);
++ }
+
+- that->desktop_size_.set(width, height);
++ that->pw_stream_node_id_ = stream_id;
+
+- g_variant_unref(options);
+- g_variant_unref(variant);
++ break;
+ }
+ }
+- g_variant_iter_free(iter);
+- g_variant_unref(response_data);
+
+ that->OpenPipeWireRemote();
+ }
+@@ -807,35 +1252,30 @@ void BaseCapturerPipeWire::OnOpenPipeWireRemoteRequested(
+ BaseCapturerPipeWire* that = static_cast<BaseCapturerPipeWire*>(user_data);
+ RTC_DCHECK(that);
+
+- GError* error = nullptr;
+- GUnixFDList* outlist = nullptr;
+- GVariant* variant = g_dbus_proxy_call_with_unix_fd_list_finish(
+- proxy, &outlist, result, &error);
++ Scoped<GError> error;
++ Scoped<GUnixFDList> outlist;
++ Scoped<GVariant> variant(g_dbus_proxy_call_with_unix_fd_list_finish(
++ proxy, outlist.receive(), result, error.receive()));
+ if (!variant) {
+- if (g_error_matches(error, G_IO_ERROR, G_IO_ERROR_CANCELLED))
++ if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED))
+ return;
+ RTC_LOG(LS_ERROR) << "Failed to open the PipeWire remote: "
+ << error->message;
+- g_error_free(error);
+ that->portal_init_failed_ = true;
+ return;
+ }
+
+ gint32 index;
+- g_variant_get(variant, "(h)", &index);
++ g_variant_get(variant.get(), "(h)", &index);
+
+- if ((that->pw_fd_ = g_unix_fd_list_get(outlist, index, &error)) == -1) {
++ if ((that->pw_fd_ =
++ g_unix_fd_list_get(outlist.get(), index, error.receive())) == -1) {
+ RTC_LOG(LS_ERROR) << "Failed to get file descriptor from the list: "
+ << error->message;
+- g_error_free(error);
+- g_variant_unref(variant);
+ that->portal_init_failed_ = true;
+ return;
+ }
+
+- g_variant_unref(variant);
+- g_object_unref(outlist);
+-
+ that->InitPipeWire();
+ }
+
+@@ -854,15 +1294,18 @@ void BaseCapturerPipeWire::CaptureFrame() {
+ return;
+ }
+
++ webrtc::MutexLock lock(¤t_frame_lock_);
+ if (!current_frame_) {
+ callback_->OnCaptureResult(Result::ERROR_TEMPORARY, nullptr);
+ return;
+ }
+
+- std::unique_ptr<DesktopFrame> result(new BasicDesktopFrame(desktop_size_));
++ DesktopSize frame_size = video_size_;
++
++ std::unique_ptr<DesktopFrame> result(new BasicDesktopFrame(frame_size));
+ result->CopyPixelsFrom(
+- current_frame_, (desktop_size_.width() * kBytesPerPixel),
+- DesktopRect::MakeWH(desktop_size_.width(), desktop_size_.height()));
++ current_frame_.get(), (frame_size.width() * kBytesPerPixel),
++ DesktopRect::MakeWH(frame_size.width(), frame_size.height()));
+ if (!result) {
+ callback_->OnCaptureResult(Result::ERROR_TEMPORARY, nullptr);
+ return;
+@@ -887,4 +1330,11 @@ bool BaseCapturerPipeWire::SelectSource(SourceId id) {
+ return true;
+ }
+
++// static
++std::unique_ptr<DesktopCapturer> BaseCapturerPipeWire::CreateRawCapturer(
++ const DesktopCaptureOptions& options) {
++ return std::make_unique<BaseCapturerPipeWire>(
++ BaseCapturerPipeWire::CaptureSourceType::kAny);
++}
++
+ } // namespace webrtc
+diff --git a/chromium/third_party/webrtc/modules/desktop_capture/linux/base_capturer_pipewire.h b/chromium/third_party/webrtc/modules/desktop_capture/linux/base_capturer_pipewire.h
+index f28d7a558bc..75d20dbf1db 100644
+--- a/chromium/third_party/webrtc/modules/desktop_capture/linux/base_capturer_pipewire.h
++++ b/chromium/third_party/webrtc/modules/desktop_capture/linux/base_capturer_pipewire.h
+@@ -10,18 +10,23 @@
+
+ #ifndef MODULES_DESKTOP_CAPTURE_LINUX_BASE_CAPTURER_PIPEWIRE_H_
+ #define MODULES_DESKTOP_CAPTURE_LINUX_BASE_CAPTURER_PIPEWIRE_H_
+-
+ #include <gio/gio.h>
+ #define typeof __typeof__
+ #include <pipewire/pipewire.h>
+ #include <spa/param/video/format-utils.h>
++#if PW_CHECK_VERSION(0, 3, 0)
++#include <spa/utils/result.h>
++#endif
+
++#include "absl/types/optional.h"
+ #include "modules/desktop_capture/desktop_capture_options.h"
+ #include "modules/desktop_capture/desktop_capturer.h"
+ #include "rtc_base/constructor_magic.h"
++#include "rtc_base/synchronization/mutex.h"
+
+ namespace webrtc {
+
++#if !PW_CHECK_VERSION(0, 3, 0)
+ class PipeWireType {
+ public:
+ spa_type_media_type media_type;
+@@ -29,14 +34,25 @@ class PipeWireType {
+ spa_type_format_video format_video;
+ spa_type_video_format video_format;
+ };
++#endif
+
+ class BaseCapturerPipeWire : public DesktopCapturer {
+ public:
+- enum CaptureSourceType { Screen = 1, Window };
++ // Values are set based on source type property in
++ // xdg-desktop-portal/screencast
++ // https://github.com/flatpak/xdg-desktop-portal/blob/master/data/org.freedesktop.portal.ScreenCast.xml
++ enum class CaptureSourceType : uint32_t {
++ kScreen = 0b01,
++ kWindow = 0b10,
++ kAny = 0b11
++ };
+
+ explicit BaseCapturerPipeWire(CaptureSourceType source_type);
+ ~BaseCapturerPipeWire() override;
+
++ static std::unique_ptr<DesktopCapturer> CreateRawCapturer(
++ const DesktopCaptureOptions& options);
++
+ // DesktopCapturer interface.
+ void Start(Callback* delegate) override;
+ void CaptureFrame() override;
+@@ -45,6 +61,21 @@ class BaseCapturerPipeWire : public DesktopCapturer {
+
+ private:
+ // PipeWire types -->
++#if PW_CHECK_VERSION(0, 3, 0)
++ struct pw_context* pw_context_ = nullptr;
++ struct pw_core* pw_core_ = nullptr;
++ struct pw_stream* pw_stream_ = nullptr;
++ struct pw_thread_loop* pw_main_loop_ = nullptr;
++
++ spa_hook spa_core_listener_;
++ spa_hook spa_stream_listener_;
++
++ // event handlers
++ pw_core_events pw_core_events_ = {};
++ pw_stream_events pw_stream_events_ = {};
++
++ struct spa_video_info_raw spa_video_format_;
++#else
+ pw_core* pw_core_ = nullptr;
+ pw_type* pw_core_type_ = nullptr;
+ pw_stream* pw_stream_ = nullptr;
+@@ -60,11 +91,13 @@ class BaseCapturerPipeWire : public DesktopCapturer {
+ pw_remote_events pw_remote_events_ = {};
+
+ spa_video_info_raw* spa_video_format_ = nullptr;
++#endif
+
++ guint32 pw_stream_node_id_ = 0;
+ gint32 pw_fd_ = -1;
+
+ CaptureSourceType capture_source_type_ =
+- BaseCapturerPipeWire::CaptureSourceType::Screen;
++ BaseCapturerPipeWire::CaptureSourceType::kScreen;
+
+ // <-- end of PipeWire types
+
+@@ -79,10 +112,12 @@ class BaseCapturerPipeWire : public DesktopCapturer {
+ guint sources_request_signal_id_ = 0;
+ guint start_request_signal_id_ = 0;
+
++ DesktopSize video_size_;
+ DesktopSize desktop_size_ = {};
+ DesktopCaptureOptions options_ = {};
+
+- uint8_t* current_frame_ = nullptr;
++ webrtc::Mutex current_frame_lock_;
++ std::unique_ptr<uint8_t[]> current_frame_;
+ Callback* callback_ = nullptr;
+
+ bool portal_init_failed_ = false;
+@@ -91,21 +126,32 @@ class BaseCapturerPipeWire : public DesktopCapturer {
+ void InitPipeWire();
+ void InitPipeWireTypes();
+
+- void CreateReceivingStream();
++ pw_stream* CreateReceivingStream();
+ void HandleBuffer(pw_buffer* buffer);
+
+ void ConvertRGBxToBGRx(uint8_t* frame, uint32_t size);
+
++#if PW_CHECK_VERSION(0, 3, 0)
++ static void OnCoreError(void* data,
++ uint32_t id,
++ int seq,
++ int res,
++ const char* message);
++ static void OnStreamParamChanged(void* data,
++ uint32_t id,
++ const struct spa_pod* format);
++#else
+ static void OnStateChanged(void* data,
+ pw_remote_state old_state,
+ pw_remote_state state,
+ const char* error);
++ static void OnStreamFormatChanged(void* data, const struct spa_pod* format);
++#endif
+ static void OnStreamStateChanged(void* data,
+ pw_stream_state old_state,
+ pw_stream_state state,
+ const char* error_message);
+
+- static void OnStreamFormatChanged(void* data, const struct spa_pod* format);
+ static void OnStreamProcess(void* data);
+ static void OnNewBuffer(void* data, uint32_t id);
+
+diff --git a/chromium/third_party/webrtc/modules/desktop_capture/linux/pipewire.sigs b/chromium/third_party/webrtc/modules/desktop_capture/linux/pipewire.sigs
+deleted file mode 100644
+index 3e21e9dc07c..00000000000
+--- a/chromium/third_party/webrtc/modules/desktop_capture/linux/pipewire.sigs
++++ /dev/null
+@@ -1,44 +0,0 @@
+-// Copyright 2018 The WebRTC project authors. All rights reserved.
+-// Use of this source code is governed by a BSD-style license that can be
+-// found in the LICENSE file.
+-
+-//------------------------------------------------
+-// Functions from PipeWire used in capturer code.
+-//------------------------------------------------
+-
+-// core.h
+-void pw_core_destroy(pw_core *core);
+-pw_type *pw_core_get_type(pw_core *core);
+-pw_core * pw_core_new(pw_loop *main_loop, pw_properties *props);
+-
+-// loop.h
+-void pw_loop_destroy(pw_loop *loop);
+-pw_loop * pw_loop_new(pw_properties *properties);
+-
+-// pipewire.h
+-void pw_init(int *argc, char **argv[]);
+-
+-// properties.h
+-pw_properties * pw_properties_new_string(const char *args);
+-
+-// remote.h
+-void pw_remote_add_listener(pw_remote *remote, spa_hook *listener, const pw_remote_events *events, void *data);
+-int pw_remote_connect_fd(pw_remote *remote, int fd);
+-void pw_remote_destroy(pw_remote *remote);
+-pw_remote * pw_remote_new(pw_core *core, pw_properties *properties, size_t user_data_size);
+-
+-// stream.h
+-void pw_stream_add_listener(pw_stream *stream, spa_hook *listener, const pw_stream_events *events, void *data);
+-int pw_stream_connect(pw_stream *stream, enum pw_direction direction, const char *port_path, enum pw_stream_flags flags, const spa_pod **params, uint32_t n_params);
+-pw_buffer *pw_stream_dequeue_buffer(pw_stream *stream);
+-void pw_stream_destroy(pw_stream *stream);
+-void pw_stream_finish_format(pw_stream *stream, int res, const spa_pod **params, uint32_t n_params);
+-pw_stream * pw_stream_new(pw_remote *remote, const char *name, pw_properties *props);
+-int pw_stream_queue_buffer(pw_stream *stream, pw_buffer *buffer);
+-int pw_stream_set_active(pw_stream *stream, bool active);
+-
+-// thread-loop.h
+-void pw_thread_loop_destroy(pw_thread_loop *loop);
+-pw_thread_loop * pw_thread_loop_new(pw_loop *loop, const char *name);
+-int pw_thread_loop_start(pw_thread_loop *loop);
+-void pw_thread_loop_stop(pw_thread_loop *loop);
+diff --git a/chromium/third_party/webrtc/modules/desktop_capture/linux/screen_capturer_pipewire.cc b/chromium/third_party/webrtc/modules/desktop_capture/linux/screen_capturer_pipewire.cc
+deleted file mode 100644
+index fe672140cca..00000000000
+--- a/chromium/third_party/webrtc/modules/desktop_capture/linux/screen_capturer_pipewire.cc
++++ /dev/null
+@@ -1,29 +0,0 @@
+-/*
+- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+- *
+- * Use of this source code is governed by a BSD-style license
+- * that can be found in the LICENSE file in the root of the source
+- * tree. An additional intellectual property rights grant can be found
+- * in the file PATENTS. All contributing project authors may
+- * be found in the AUTHORS file in the root of the source tree.
+- */
+-
+-#include "modules/desktop_capture/linux/screen_capturer_pipewire.h"
+-
+-#include <memory>
+-
+-
+-namespace webrtc {
+-
+-ScreenCapturerPipeWire::ScreenCapturerPipeWire()
+- : BaseCapturerPipeWire(BaseCapturerPipeWire::CaptureSourceType::Screen) {}
+-ScreenCapturerPipeWire::~ScreenCapturerPipeWire() {}
+-
+-// static
+-std::unique_ptr<DesktopCapturer>
+-ScreenCapturerPipeWire::CreateRawScreenCapturer(
+- const DesktopCaptureOptions& options) {
+- return std::make_unique<ScreenCapturerPipeWire>();
+-}
+-
+-} // namespace webrtc
+diff --git a/chromium/third_party/webrtc/modules/desktop_capture/linux/screen_capturer_pipewire.h b/chromium/third_party/webrtc/modules/desktop_capture/linux/screen_capturer_pipewire.h
+deleted file mode 100644
+index 66dcd680e06..00000000000
+--- a/chromium/third_party/webrtc/modules/desktop_capture/linux/screen_capturer_pipewire.h
++++ /dev/null
+@@ -1,33 +0,0 @@
+-/*
+- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+- *
+- * Use of this source code is governed by a BSD-style license
+- * that can be found in the LICENSE file in the root of the source
+- * tree. An additional intellectual property rights grant can be found
+- * in the file PATENTS. All contributing project authors may
+- * be found in the AUTHORS file in the root of the source tree.
+- */
+-
+-#ifndef MODULES_DESKTOP_CAPTURE_LINUX_SCREEN_CAPTURER_PIPEWIRE_H_
+-#define MODULES_DESKTOP_CAPTURE_LINUX_SCREEN_CAPTURER_PIPEWIRE_H_
+-
+-#include <memory>
+-
+-#include "modules/desktop_capture/linux/base_capturer_pipewire.h"
+-
+-namespace webrtc {
+-
+-class ScreenCapturerPipeWire : public BaseCapturerPipeWire {
+- public:
+- ScreenCapturerPipeWire();
+- ~ScreenCapturerPipeWire() override;
+-
+- static std::unique_ptr<DesktopCapturer> CreateRawScreenCapturer(
+- const DesktopCaptureOptions& options);
+-
+- RTC_DISALLOW_COPY_AND_ASSIGN(ScreenCapturerPipeWire);
+-};
+-
+-} // namespace webrtc
+-
+-#endif // MODULES_DESKTOP_CAPTURE_LINUX_SCREEN_CAPTURER_PIPEWIRE_H_
+diff --git a/chromium/third_party/webrtc/modules/desktop_capture/linux/window_capturer_pipewire.cc b/chromium/third_party/webrtc/modules/desktop_capture/linux/window_capturer_pipewire.cc
+deleted file mode 100644
+index b4559156dce..00000000000
+--- a/chromium/third_party/webrtc/modules/desktop_capture/linux/window_capturer_pipewire.cc
++++ /dev/null
+@@ -1,29 +0,0 @@
+-/*
+- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+- *
+- * Use of this source code is governed by a BSD-style license
+- * that can be found in the LICENSE file in the root of the source
+- * tree. An additional intellectual property rights grant can be found
+- * in the file PATENTS. All contributing project authors may
+- * be found in the AUTHORS file in the root of the source tree.
+- */
+-
+-#include "modules/desktop_capture/linux/window_capturer_pipewire.h"
+-
+-#include <memory>
+-
+-
+-namespace webrtc {
+-
+-WindowCapturerPipeWire::WindowCapturerPipeWire()
+- : BaseCapturerPipeWire(BaseCapturerPipeWire::CaptureSourceType::Window) {}
+-WindowCapturerPipeWire::~WindowCapturerPipeWire() {}
+-
+-// static
+-std::unique_ptr<DesktopCapturer>
+-WindowCapturerPipeWire::CreateRawWindowCapturer(
+- const DesktopCaptureOptions& options) {
+- return std::make_unique<WindowCapturerPipeWire>();
+-}
+-
+-} // namespace webrtc
+diff --git a/chromium/third_party/webrtc/modules/desktop_capture/linux/window_capturer_pipewire.h b/chromium/third_party/webrtc/modules/desktop_capture/linux/window_capturer_pipewire.h
+deleted file mode 100644
+index 7f184ef2999..00000000000
+--- a/chromium/third_party/webrtc/modules/desktop_capture/linux/window_capturer_pipewire.h
++++ /dev/null
+@@ -1,33 +0,0 @@
+-/*
+- * Copyright 2018 The WebRTC project authors. All Rights Reserved.
+- *
+- * Use of this source code is governed by a BSD-style license
+- * that can be found in the LICENSE file in the root of the source
+- * tree. An additional intellectual property rights grant can be found
+- * in the file PATENTS. All contributing project authors may
+- * be found in the AUTHORS file in the root of the source tree.
+- */
+-
+-#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WINDOW_CAPTURER_PIPEWIRE_H_
+-#define MODULES_DESKTOP_CAPTURE_LINUX_WINDOW_CAPTURER_PIPEWIRE_H_
+-
+-#include <memory>
+-
+-#include "modules/desktop_capture/linux/base_capturer_pipewire.h"
+-
+-namespace webrtc {
+-
+-class WindowCapturerPipeWire : public BaseCapturerPipeWire {
+- public:
+- WindowCapturerPipeWire();
+- ~WindowCapturerPipeWire() override;
+-
+- static std::unique_ptr<DesktopCapturer> CreateRawWindowCapturer(
+- const DesktopCaptureOptions& options);
+-
+- RTC_DISALLOW_COPY_AND_ASSIGN(WindowCapturerPipeWire);
+-};
+-
+-} // namespace webrtc
+-
+-#endif // MODULES_DESKTOP_CAPTURE_LINUX_WINDOW_CAPTURER_PIPEWIRE_H_
+diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_linux.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_linux.cc
+index 82dbae48137..ed48b7d6d59 100644
+--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_linux.cc
++++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_linux.cc
+@@ -14,7 +14,7 @@
+ #include "modules/desktop_capture/desktop_capturer.h"
+
+ #if defined(WEBRTC_USE_PIPEWIRE)
+-#include "modules/desktop_capture/linux/screen_capturer_pipewire.h"
++#include "modules/desktop_capture/linux/base_capturer_pipewire.h"
+ #endif // defined(WEBRTC_USE_PIPEWIRE)
+
+ #if defined(WEBRTC_USE_X11)
+@@ -28,7 +28,7 @@ std::unique_ptr<DesktopCapturer> DesktopCapturer::CreateRawScreenCapturer(
+ const DesktopCaptureOptions& options) {
+ #if defined(WEBRTC_USE_PIPEWIRE)
+ if (options.allow_pipewire() && DesktopCapturer::IsRunningUnderWayland()) {
+- return ScreenCapturerPipeWire::CreateRawScreenCapturer(options);
++ return BaseCapturerPipeWire::CreateRawCapturer(options);
+ }
+ #endif // defined(WEBRTC_USE_PIPEWIRE)
+
+diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_linux.cc b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_linux.cc
+index 41dbf836b03..2b142ae3b92 100644
+--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_linux.cc
++++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_linux.cc
+@@ -14,7 +14,7 @@
+ #include "modules/desktop_capture/desktop_capturer.h"
+
+ #if defined(WEBRTC_USE_PIPEWIRE)
+-#include "modules/desktop_capture/linux/window_capturer_pipewire.h"
++#include "modules/desktop_capture/linux/base_capturer_pipewire.h"
+ #endif // defined(WEBRTC_USE_PIPEWIRE)
+
+ #if defined(WEBRTC_USE_X11)
+@@ -28,7 +28,7 @@ std::unique_ptr<DesktopCapturer> DesktopCapturer::CreateRawWindowCapturer(
+ const DesktopCaptureOptions& options) {
+ #if defined(WEBRTC_USE_PIPEWIRE)
+ if (options.allow_pipewire() && DesktopCapturer::IsRunningUnderWayland()) {
+- return WindowCapturerPipeWire::CreateRawWindowCapturer(options);
++ return BaseCapturerPipeWire::CreateRawCapturer(options);
+ }
+ #endif // defined(WEBRTC_USE_PIPEWIRE)
+
+diff --git a/chromium/third_party/webrtc/webrtc.gni b/chromium/third_party/webrtc/webrtc.gni
+index ca8acdbf259..505c975cece 100644
+--- a/chromium/third_party/webrtc/webrtc.gni
++++ b/chromium/third_party/webrtc/webrtc.gni
+@@ -117,6 +117,10 @@ declare_args() {
+ # Set this to link PipeWire directly instead of using the dlopen.
+ rtc_link_pipewire = false
+
++ # Set this to use certain PipeWire version
++ # Currently we support PipeWire 0.2 (default) and PipeWire 0.3
++ rtc_pipewire_version = "0.3"
++
+ # Enable to use the Mozilla internal settings.
+ build_with_mozilla = false
+
+diff --git a/chromium/third_party/webrtc/modules/desktop_capture/linux/pipewire02.sigs b/chromium/third_party/webrtc/modules/desktop_capture/linux/pipewire02.sigs
+new file mode 100644
+index 00000000000..5ac3d1d22b8
+--- /dev/null
++++ b/chromium/third_party/webrtc/modules/desktop_capture/linux/pipewire02.sigs
+@@ -0,0 +1,47 @@
++// Copyright 2018 The WebRTC project authors. All rights reserved.
++// Use of this source code is governed by a BSD-style license that can be
++// found in the LICENSE file.
++
++//------------------------------------------------
++// Functions from PipeWire used in capturer code.
++//------------------------------------------------
++
++// core.h
++void pw_core_destroy(pw_core *core);
++pw_type *pw_core_get_type(pw_core *core);
++pw_core * pw_core_new(pw_loop *main_loop, pw_properties *props);
++
++// loop.h
++void pw_loop_destroy(pw_loop *loop);
++pw_loop * pw_loop_new(pw_properties *properties);
++
++// pipewire.h
++void pw_init(int *argc, char **argv[]);
++
++// properties.h
++pw_properties * pw_properties_new_string(const char *args);
++
++// remote.h
++void pw_remote_add_listener(pw_remote *remote, spa_hook *listener, const pw_remote_events *events, void *data);
++int pw_remote_connect_fd(pw_remote *remote, int fd);
++void pw_remote_destroy(pw_remote *remote);
++pw_remote * pw_remote_new(pw_core *core, pw_properties *properties, size_t user_data_size);
++enum pw_remote_state pw_remote_get_state(pw_remote *remote, const char **error);
++
++// stream.h
++void pw_stream_add_listener(pw_stream *stream, spa_hook *listener, const pw_stream_events *events, void *data);
++int pw_stream_connect(pw_stream *stream, enum pw_direction direction, const char *port_path, enum pw_stream_flags flags, const spa_pod **params, uint32_t n_params);
++pw_buffer *pw_stream_dequeue_buffer(pw_stream *stream);
++void pw_stream_destroy(pw_stream *stream);
++void pw_stream_finish_format(pw_stream *stream, int res, const spa_pod **params, uint32_t n_params);
++pw_stream * pw_stream_new(pw_remote *remote, const char *name, pw_properties *props);
++int pw_stream_queue_buffer(pw_stream *stream, pw_buffer *buffer);
++int pw_stream_set_active(pw_stream *stream, bool active);
++
++// thread-loop.h
++void pw_thread_loop_destroy(pw_thread_loop *loop);
++pw_thread_loop * pw_thread_loop_new(pw_loop *loop, const char *name);
++int pw_thread_loop_start(pw_thread_loop *loop);
++void pw_thread_loop_stop(pw_thread_loop *loop);
++void pw_thread_loop_lock(struct pw_thread_loop *loop);
++void pw_thread_loop_unlock(struct pw_thread_loop *loop);
+diff --git a/chromium/third_party/webrtc/modules/desktop_capture/linux/pipewire03.sigs b/chromium/third_party/webrtc/modules/desktop_capture/linux/pipewire03.sigs
+new file mode 100644
+index 00000000000..78d241f40c6
+--- /dev/null
++++ b/chromium/third_party/webrtc/modules/desktop_capture/linux/pipewire03.sigs
+@@ -0,0 +1,46 @@
++// Copyright 2018 The WebRTC project authors. All rights reserved.
++// Use of this source code is governed by a BSD-style license that can be
++// found in the LICENSE file.
++
++//------------------------------------------------
++// Functions from PipeWire used in capturer code.
++//------------------------------------------------
++
++// core.h
++int pw_core_disconnect(pw_core *core);
++
++// loop.h
++void pw_loop_destroy(pw_loop *loop);
++pw_loop * pw_loop_new(const spa_dict *props);
++
++
++// pipewire.h
++void pw_init(int *argc, char **argv[]);
++
++// properties.h
++pw_properties * pw_properties_new_string(const char *args);
++
++// stream.h
++void pw_stream_add_listener(pw_stream *stream, spa_hook *listener, const pw_stream_events *events, void *data);
++int pw_stream_connect(pw_stream *stream, enum pw_direction direction, uint32_t target_id, enum pw_stream_flags flags, const spa_pod **params, uint32_t n_params);
++pw_buffer *pw_stream_dequeue_buffer(pw_stream *stream);
++void pw_stream_destroy(pw_stream *stream);
++pw_stream * pw_stream_new(pw_core *core, const char *name, pw_properties *props);
++int pw_stream_queue_buffer(pw_stream *stream, pw_buffer *buffer);
++int pw_stream_set_active(pw_stream *stream, bool active);
++int pw_stream_update_params(pw_stream *stream, const spa_pod **params, uint32_t n_params);
++
++// thread-loop.h
++void pw_thread_loop_destroy(pw_thread_loop *loop);
++pw_thread_loop * pw_thread_loop_new(const char *name, const spa_dict *props);
++int pw_thread_loop_start(pw_thread_loop *loop);
++void pw_thread_loop_stop(pw_thread_loop *loop);
++void pw_thread_loop_lock(pw_thread_loop *loop);
++void pw_thread_loop_unlock(pw_thread_loop *loop);
++pw_loop * pw_thread_loop_get_loop(pw_thread_loop *loop);
++
++
++// context.h
++void pw_context_destroy(pw_context *context);
++pw_context *pw_context_new(pw_loop *main_loop, pw_properties *props, size_t user_data_size);
++pw_core * pw_context_connect(pw_context *context, pw_properties *properties, size_t user_data_size);
================================================================
---- gitweb:
http://git.pld-linux.org/gitweb.cgi/packages/qt5-qtwebengine.git/commitdiff/70c5ed0ccc9cd1052aaf782b1f4d8e3b1de65fbf
More information about the pld-cvs-commit
mailing list