1
0
mirror of https://invent.kde.org/network/krfb synced 2024-07-01 07:24:29 +00:00

pipewire: Port to use KPipeWire

Rather than using pipewire directly, we can use this framework which
allows us to share a bunch of code with Plasma.
This commit is contained in:
Aleix Pol i Gonzalez 2022-12-12 15:39:33 +01:00 committed by Aleix Pol Gonzalez
parent 4f924c2306
commit 4db9c65083
5 changed files with 83 additions and 598 deletions

View File

@ -19,3 +19,4 @@ Dependencies:
'frameworks/kxmlgui': '@stable'
'frameworks/kwayland': '@stable'
'libraries/plasma-wayland-protocols': '@latest' # can be switched to @stable when 1.5.0 is released
'plasma/kpipewire': '@latest'

View File

@ -73,6 +73,7 @@ find_package(LibVNCServer REQUIRED)
option(DISABLE_PIPEWIRE "Disable PipeWire support." OFF)
if(NOT DISABLE_PIPEWIRE)
find_package(KPipeWire REQUIRED)
pkg_check_modules(PipeWire IMPORTED_TARGET libpipewire-0.3)
endif()
add_feature_info(PipeWire PipeWire_FOUND "Required for pipewire screencast plugin")

View File

@ -49,6 +49,8 @@ target_link_libraries(krfb_framebuffer_pw
Wayland::Client
krfbprivate
PkgConfig::PipeWire
K::KPipeWire
K::KPipeWireDmaBuf
)
if (HAVE_DMA_BUF)

View File

@ -42,9 +42,10 @@
#include "xdp_dbus_remotedesktop_interface.h"
#include "krfb_fb_pipewire_debug.h"
#include "screencasting.h"
#include <KPipeWire/PipeWireSourceStream>
#include <KPipeWire/DmaBufHandler>
#if HAVE_DMA_BUF
#include <fcntl.h>
#include <unistd.h>
#include <gbm.h>
@ -114,47 +115,21 @@ public:
private:
friend class PWFrameBuffer;
static void onCoreError(void *data, uint32_t id, int seq, int res, const char *message);
static void onStreamParamChanged(void *data, uint32_t id, const struct spa_pod *format);
static void onStreamStateChanged(void *data, pw_stream_state old, pw_stream_state state, const char *error_message);
static void onStreamProcess(void *data);
void initDbus();
void initPw();
// dbus handling
void handleSessionCreated(quint32 &code, QVariantMap &results);
void handleDevicesSelected(quint32 &code, QVariantMap &results);
void handleSourcesSelected(quint32 &code, QVariantMap &results);
void handleRemoteDesktopStarted(quint32 &code, QVariantMap &results);
void handleSessionCreated(quint32 code, const QVariantMap &results);
void handleDevicesSelected(quint32 code, const QVariantMap &results);
void handleSourcesSelected(quint32 code, const QVariantMap &results);
void handleRemoteDesktopStarted(quint32 code, const QVariantMap &results);
void setVideoSize(const QSize &size);
// pw handling
pw_stream *createReceivingStream();
void handleFrame(pw_buffer *pwBuffer);
void handleFrame(const PipeWireFrame &frame);
// link to public interface
PWFrameBuffer *q;
// pipewire stuff
struct pw_context *pwContext = nullptr;
struct pw_core *pwCore = nullptr;
struct pw_stream *pwStream = nullptr;
struct pw_thread_loop *pwMainLoop = nullptr;
// wayland-like listeners
// ...of events that happen in pipewire server
spa_hook coreListener = {};
spa_hook streamListener = {};
// event handlers
pw_core_events pwCoreEvents = {};
pw_stream_events pwStreamEvents = {};
uint pwStreamNodeId = 0;
// negotiated video format
spa_video_info_raw *videoFormat = nullptr;
// requests a session from XDG Desktop Portal
// auto-generated and compiled from xdp_dbus_interface.xml file
QScopedPointer<OrgFreedesktopPortalScreenCastInterface> dbusXdpScreenCastService;
@ -162,113 +137,24 @@ private:
// XDP screencast session handle
QDBusObjectPath sessionPath;
// Pipewire file descriptor
QDBusUnixFileDescriptor pipewireFd;
// screen geometry holder
QSize streamSize;
QSize videoSize;
// Allowed devices
uint devices = 0;
// sanity indicator
bool isValid = true;
QImage cursorTexture;
QPoint cursorPosition;
QPoint cursorHotspot;
#if HAVE_DMA_BUF
struct EGLStruct {
QList<QByteArray> extensions;
EGLDisplay display = EGL_NO_DISPLAY;
EGLContext context = EGL_NO_CONTEXT;
};
bool m_eglInitialized = false;
qint32 m_drmFd = 0; // for GBM buffer mmap
gbm_device *m_gbmDevice = nullptr; // for passed GBM buffer retrieval
EGLStruct m_egl;
#endif /* HAVE_DMA_BUF */
std::unique_ptr<PipeWireSourceStream> stream;
std::optional<PipeWireCursor> cursor;
DmaBufHandler m_dmabufHandler;
};
PWFrameBuffer::Private::Private(PWFrameBuffer *q) : q(q)
PWFrameBuffer::Private::Private(PWFrameBuffer *q)
: q(q)
, stream(new PipeWireSourceStream(q))
{
pwCoreEvents.version = PW_VERSION_CORE_EVENTS;
pwCoreEvents.error = &onCoreError;
pwStreamEvents.version = PW_VERSION_STREAM_EVENTS;
pwStreamEvents.state_changed = &onStreamStateChanged;
pwStreamEvents.param_changed = &onStreamParamChanged;
pwStreamEvents.process = &onStreamProcess;
#if HAVE_DMA_BUF
m_drmFd = open("/dev/dri/renderD128", O_RDWR);
if (m_drmFd < 0) {
qCWarning(KRFB_FB_PIPEWIRE) << "Failed to open drm render node: " << strerror(errno);
return;
}
m_gbmDevice = gbm_create_device(m_drmFd);
if (!m_gbmDevice) {
qCWarning(KRFB_FB_PIPEWIRE) << "Cannot create GBM device: " << strerror(errno);
return;
}
// Get the list of client extensions
const char* clientExtensionsCString = eglQueryString(EGL_NO_DISPLAY, EGL_EXTENSIONS);
const QByteArray clientExtensionsString = QByteArray::fromRawData(clientExtensionsCString, qstrlen(clientExtensionsCString));
if (clientExtensionsString.isEmpty()) {
// If eglQueryString() returned NULL, the implementation doesn't support
// EGL_EXT_client_extensions. Expect an EGL_BAD_DISPLAY error.
qCWarning(KRFB_FB_PIPEWIRE) << "No client extensions defined! " << formatGLError(eglGetError());
return;
}
m_egl.extensions = clientExtensionsString.split(' ');
// Use eglGetPlatformDisplayEXT() to get the display pointer
// if the implementation supports it.
if (!m_egl.extensions.contains(QByteArrayLiteral("EGL_EXT_platform_base")) ||
!m_egl.extensions.contains(QByteArrayLiteral("EGL_MESA_platform_gbm"))) {
qCWarning(KRFB_FB_PIPEWIRE) << "One of required EGL extensions is missing";
return;
}
m_egl.display = eglGetPlatformDisplayEXT(EGL_PLATFORM_GBM_MESA, m_gbmDevice, nullptr);
if (m_egl.display == EGL_NO_DISPLAY) {
qCWarning(KRFB_FB_PIPEWIRE) << "Error during obtaining EGL display: " << formatGLError(eglGetError());
return;
}
EGLint major, minor;
if (eglInitialize(m_egl.display, &major, &minor) == EGL_FALSE) {
qCWarning(KRFB_FB_PIPEWIRE) << "Error during eglInitialize: " << formatGLError(eglGetError());
return;
}
if (eglBindAPI(EGL_OPENGL_API) == EGL_FALSE) {
qCWarning(KRFB_FB_PIPEWIRE) << "bind OpenGL API failed";
return;
}
m_egl.context = eglCreateContext(m_egl.display, nullptr, EGL_NO_CONTEXT, nullptr);
if (m_egl.context == EGL_NO_CONTEXT) {
qCWarning(KRFB_FB_PIPEWIRE) << "Couldn't create EGL context: " << formatGLError(eglGetError());
return;
}
qCDebug(KRFB_FB_PIPEWIRE) << "Egl initialization succeeded";
qCDebug(KRFB_FB_PIPEWIRE) << QStringLiteral("EGL version: %1.%2").arg(major).arg(minor);
m_eglInitialized = true;
#endif /* HAVE_DMA_BUF */
QObject::connect(stream.get(), &PipeWireSourceStream::frameReceived, q, [this] (const PipeWireFrame &frame) {
handleFrame(frame);
});
}
/**
@ -314,7 +200,7 @@ void PWFrameBuffer::Private::initDbus()
SLOT(handleXdpSessionCreated(uint, QVariantMap)));
}
void PWFrameBuffer::handleXdpSessionCreated(quint32 code, QVariantMap results)
void PWFrameBuffer::handleXdpSessionCreated(quint32 code, const QVariantMap &results)
{
d->handleSessionCreated(code, results);
}
@ -326,7 +212,7 @@ void PWFrameBuffer::handleXdpSessionCreated(quint32 code, QVariantMap results)
* @param code return code for dbus call. Zero is success, non-zero means error
* @param results map with results of call.
*/
void PWFrameBuffer::Private::handleSessionCreated(quint32 &code, QVariantMap &results)
void PWFrameBuffer::Private::handleSessionCreated(quint32 code, const QVariantMap &results)
{
if (code != 0) {
qCWarning(KRFB_FB_PIPEWIRE) << "Failed to create session: " << code;
@ -357,7 +243,7 @@ void PWFrameBuffer::Private::handleSessionCreated(quint32 &code, QVariantMap &re
SLOT(handleXdpDevicesSelected(uint, QVariantMap)));
}
void PWFrameBuffer::handleXdpDevicesSelected(quint32 code, QVariantMap results)
void PWFrameBuffer::handleXdpDevicesSelected(quint32 code, const QVariantMap &results)
{
d->handleDevicesSelected(code, results);
}
@ -368,7 +254,7 @@ void PWFrameBuffer::handleXdpDevicesSelected(quint32 code, QVariantMap results)
* @param code return code for dbus call. Zero is success, non-zero means error
* @param results map with results of call.
*/
void PWFrameBuffer::Private::handleDevicesSelected(quint32 &code, QVariantMap &results)
void PWFrameBuffer::Private::handleDevicesSelected(quint32 code, const QVariantMap &results)
{
Q_UNUSED(results)
if (code != 0) {
@ -398,7 +284,7 @@ void PWFrameBuffer::Private::handleDevicesSelected(quint32 &code, QVariantMap &r
SLOT(handleXdpSourcesSelected(uint, QVariantMap)));
}
void PWFrameBuffer::handleXdpSourcesSelected(quint32 code, QVariantMap results)
void PWFrameBuffer::handleXdpSourcesSelected(quint32 code, const QVariantMap &results)
{
d->handleSourcesSelected(code, results);
}
@ -411,7 +297,7 @@ void PWFrameBuffer::handleXdpSourcesSelected(quint32 code, QVariantMap results)
* @param code return code for dbus call. Zero is success, non-zero means error
* @param results map with results of call.
*/
void PWFrameBuffer::Private::handleSourcesSelected(quint32 &code, QVariantMap &)
void PWFrameBuffer::Private::handleSourcesSelected(quint32 code, const QVariantMap &)
{
if (code != 0) {
qCWarning(KRFB_FB_PIPEWIRE) << "Failed to select sources: " << code;
@ -434,7 +320,7 @@ void PWFrameBuffer::Private::handleSourcesSelected(quint32 &code, QVariantMap &)
}
void PWFrameBuffer::handleXdpRemoteDesktopStarted(quint32 code, QVariantMap results)
void PWFrameBuffer::handleXdpRemoteDesktopStarted(quint32 code, const QVariantMap &results)
{
d->handleRemoteDesktopStarted(code, results);
}
@ -446,7 +332,7 @@ void PWFrameBuffer::handleXdpRemoteDesktopStarted(quint32 code, QVariantMap resu
* @param code return code for dbus call. Zero is success, non-zero means error
* @param results map with results of call.
*/
void PWFrameBuffer::Private::handleRemoteDesktopStarted(quint32 &code, QVariantMap &results)
void PWFrameBuffer::Private::handleRemoteDesktopStarted(quint32 code, const QVariantMap &results)
{
if (code != 0) {
qCWarning(KRFB_FB_PIPEWIRE) << "Failed to start screencast: " << code;
@ -454,8 +340,14 @@ void PWFrameBuffer::Private::handleRemoteDesktopStarted(quint32 &code, QVariantM
return;
}
if (results.value(QStringLiteral("devices")).toUInt() == 0) {
qCWarning(KRFB_FB_PIPEWIRE) << "No devices were granted" << results;
isValid = false;
return;
}
// there should be only one stream
Streams streams = qdbus_cast<Streams>(results.value(QStringLiteral("streams")));
const Streams streams = qdbus_cast<Streams>(results.value(QStringLiteral("streams")));
if (streams.isEmpty()) {
// maybe we should check deeper with qdbus_cast but this suffices for now
qCWarning(KRFB_FB_PIPEWIRE) << "Failed to get screencast streams";
@ -471,501 +363,85 @@ void PWFrameBuffer::Private::handleRemoteDesktopStarted(quint32 &code, QVariantM
return;
}
pipewireFd = streamReply.value();
QDBusUnixFileDescriptor pipewireFd = streamReply.value();
if (!pipewireFd.isValid()) {
qCWarning(KRFB_FB_PIPEWIRE) << "Couldn't get pipewire connection file descriptor";
isValid = false;
return;
}
devices = results.value(QStringLiteral("types")).toUInt();
pwStreamNodeId = streams.first().nodeId;
initPw();
}
/**
* @brief PWFrameBuffer::Private::initPw - initialize Pipewire socket connectivity.
* pipewireFd should be pointing to existing file descriptor that was passed by D-Bus at this point.
*/
void PWFrameBuffer::Private::initPw() {
qInfo() << "Initializing Pipewire connectivity";
// init pipewire (required)
pw_init(nullptr, nullptr); // args are not used anyways
pwMainLoop = pw_thread_loop_new("pipewire-main-loop", nullptr);
pw_thread_loop_lock(pwMainLoop);
pwContext = pw_context_new(pw_thread_loop_get_loop(pwMainLoop), nullptr, 0);
if (!pwContext) {
qCWarning(KRFB_FB_PIPEWIRE) << "Failed to create PipeWire context";
return;
}
pwCore = pw_context_connect(pwContext, nullptr, 0);
if (!pwCore) {
qCWarning(KRFB_FB_PIPEWIRE) << "Failed to connect PipeWire context";
return;
}
pw_core_add_listener(pwCore, &coreListener, &pwCoreEvents, this);
pwStream = createReceivingStream();
if (!pwStream) {
qCWarning(KRFB_FB_PIPEWIRE) << "Failed to create PipeWire stream";
return;
}
if (pw_thread_loop_start(pwMainLoop) < 0) {
qCWarning(KRFB_FB_PIPEWIRE) << "Failed to start main PipeWire loop";
if (!stream->createStream(streams.first().nodeId, pipewireFd.takeFileDescriptor())) {
qCWarning(KRFB_FB_PIPEWIRE) << "Couldn't create the pipewire stream";
isValid = false;
}
pw_thread_loop_unlock(pwMainLoop);
}
void PWFrameBuffer::Private::onCoreError(void *data, uint32_t id, int seq, int res, const char *message)
{
Q_UNUSED(data);
Q_UNUSED(id);
Q_UNUSED(seq);
Q_UNUSED(res);
qInfo() << "core error: " << message;
}
/**
* @brief PWFrameBuffer::Private::onStreamStateChanged - called whenever stream state changes on pipewire server
* @param data pointer that you have set in pw_stream_add_listener call's last argument
* @param state new state that stream has changed to
* @param error_message optional error message, is set to non-null if state is error
*/
void PWFrameBuffer::Private::onStreamStateChanged(void *data, pw_stream_state /*old*/, pw_stream_state state, const char *error_message)
{
Q_UNUSED(data);
qInfo() << "Stream state changed: " << pw_stream_state_as_string(state);
switch (state) {
case PW_STREAM_STATE_ERROR:
qCWarning(KRFB_FB_PIPEWIRE) << "pipewire stream error: " << error_message;
break;
case PW_STREAM_STATE_PAUSED:
case PW_STREAM_STATE_STREAMING:
case PW_STREAM_STATE_UNCONNECTED:
case PW_STREAM_STATE_CONNECTING:
break;
}
}
#define CURSOR_BPP 4
#define CURSOR_META_SIZE(w,h) (sizeof(struct spa_meta_cursor) + \
sizeof(struct spa_meta_bitmap) + w * h * CURSOR_BPP)
/**
* @brief PWFrameBuffer::Private::onStreamFormatChanged - being executed after stream is set to active
* and after setup has been requested to connect to it. The actual video format is being negotiated here.
* @param data pointer that you have set in pw_stream_add_listener call's last argument
* @param format format that's being proposed
*/
void PWFrameBuffer::Private::onStreamParamChanged(void *data, uint32_t id, const struct spa_pod *format)
{
qInfo() << "Stream format changed";
auto d = static_cast<PWFrameBuffer::Private *>(data);
if (!format || id != SPA_PARAM_Format) {
return;
}
d->videoFormat = new spa_video_info_raw();
spa_format_video_raw_parse(format, d->videoFormat);
auto width = d->videoFormat->size.width;
auto height = d->videoFormat->size.height;
auto stride = SPA_ROUND_UP_N(width * BYTES_PER_PIXEL, 4);
auto size = height * stride;
d->streamSize = QSize(width, height);
uint8_t buffer[1024];
auto builder = SPA_POD_BUILDER_INIT(buffer, sizeof(buffer));
// setup buffers and meta header for new format
#if HAVE_DMA_BUF
const auto bufferTypes = d->m_eglInitialized ? (1 << SPA_DATA_DmaBuf) | (1 << SPA_DATA_MemFd) | (1 << SPA_DATA_MemPtr) :
(1 << SPA_DATA_MemFd) | (1 << SPA_DATA_MemPtr);
#else
const auto bufferTypes = (1 << SPA_DATA_MemFd) | (1 << SPA_DATA_MemPtr);
#endif /* HAVE_DMA_BUF */
QVector<const struct spa_pod *> params = {
reinterpret_cast<spa_pod *>(spa_pod_builder_add_object(&builder,
SPA_TYPE_OBJECT_ParamBuffers, SPA_PARAM_Buffers,
SPA_PARAM_BUFFERS_size, SPA_POD_Int(size),
SPA_PARAM_BUFFERS_stride, SPA_POD_Int(stride),
SPA_PARAM_BUFFERS_buffers, SPA_POD_CHOICE_RANGE_Int(8, 1, 32),
SPA_PARAM_BUFFERS_blocks, SPA_POD_Int(1),
SPA_PARAM_BUFFERS_align, SPA_POD_Int(16),
SPA_PARAM_BUFFERS_dataType, SPA_POD_CHOICE_FLAGS_Int(bufferTypes))),
reinterpret_cast<spa_pod *>(spa_pod_builder_add_object(&builder,
SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta,
SPA_PARAM_META_type, SPA_POD_Id(SPA_META_Header),
SPA_PARAM_META_size, SPA_POD_Int(sizeof(struct spa_meta_header)))),
reinterpret_cast<spa_pod*>(spa_pod_builder_add_object(&builder,
SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, SPA_PARAM_META_type,
SPA_POD_Id(SPA_META_VideoCrop), SPA_PARAM_META_size,
SPA_POD_Int(sizeof(struct spa_meta_region)))),
reinterpret_cast<spa_pod*>(spa_pod_builder_add_object ( &builder,
SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta,
SPA_PARAM_META_type, SPA_POD_Id (SPA_META_Cursor),
SPA_PARAM_META_size, SPA_POD_CHOICE_RANGE_Int (CURSOR_META_SIZE (64, 64),
CURSOR_META_SIZE (1, 1),
CURSOR_META_SIZE (1024, 1024)))),
reinterpret_cast<spa_pod*>(spa_pod_builder_add_object ( &builder,
SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta,
SPA_PARAM_META_type, SPA_POD_Id(SPA_META_VideoDamage),
SPA_PARAM_META_size, SPA_POD_CHOICE_RANGE_Int(
sizeof(struct spa_meta_region) * 16,
sizeof(struct spa_meta_region) * 1,
sizeof(struct spa_meta_region) * 16))),
};
pw_stream_update_params(d->pwStream, params.data(), params.size());
setVideoSize(qdbus_cast<QSize>(streams.first().map[QStringLiteral("size")].value<QDBusArgument>()));
}
/**
* @brief PWFrameBuffer::Private::onNewBuffer - called when new buffer is available in pipewire stream
* @param data pointer that you have set in pw_stream_add_listener call's last argument
* @param id
*/
void PWFrameBuffer::Private::onStreamProcess(void *data)
void PWFrameBuffer::Private::handleFrame(const PipeWireFrame &frame)
{
auto d = static_cast<PWFrameBuffer::Private *>(data);
cursor = frame.cursor;
pw_buffer* next_buffer;
pw_buffer* buffer = nullptr;
next_buffer = pw_stream_dequeue_buffer(d->pwStream);
while (next_buffer) {
buffer = next_buffer;
next_buffer = pw_stream_dequeue_buffer(d->pwStream);
if (next_buffer) {
pw_stream_queue_buffer(d->pwStream, buffer);
}
}
if (!buffer) {
return;
}
d->handleFrame(buffer);
pw_stream_queue_buffer(d->pwStream, buffer);
}
static QImage::Format spaToQImageFormat(quint32 format)
{
return format == SPA_VIDEO_FORMAT_BGR ? QImage::Format_BGR888
: format == SPA_VIDEO_FORMAT_RGBx ? QImage::Format_RGBX8888
: QImage::Format_RGB32;
}
void PWFrameBuffer::Private::handleFrame(pw_buffer *pwBuffer)
{
auto spaBuffer = pwBuffer->buffer;
uint8_t *src = nullptr;
// process cursor
{
struct spa_meta_cursor *cursor = static_cast<struct spa_meta_cursor*>(spa_buffer_find_meta_data (spaBuffer, SPA_META_Cursor, sizeof (*cursor)));
if (spa_meta_cursor_is_valid (cursor)) {
struct spa_meta_bitmap *bitmap = nullptr;
if (cursor->bitmap_offset)
bitmap = SPA_MEMBER (cursor, cursor->bitmap_offset, struct spa_meta_bitmap);
if (bitmap && bitmap->size.width > 0 && bitmap->size.height > 0) {
const uint8_t *bitmap_data;
bitmap_data = SPA_MEMBER (bitmap, bitmap->offset, uint8_t);
cursorHotspot = { cursor->hotspot.x, cursor->hotspot.y };
cursorTexture = QImage(bitmap_data, bitmap->size.width, bitmap->size.height, bitmap->stride, spaToQImageFormat(bitmap->format));
}
cursorPosition = QPoint{ cursor->position.x, cursor->position.y };
}
}
if (spaBuffer->datas[0].chunk->size == 0) {
if (!frame.dmabuf && !frame.image) {
qCDebug(KRFB_FB_PIPEWIRE) << "Got empty buffer. The buffer possibly carried only "
"information about the mouse cursor.";
return;
}
std::function<void()> cleanup;
const qint64 srcStride = spaBuffer->datas[0].chunk->stride;
if (spaBuffer->datas->type == SPA_DATA_MemFd) {
uint8_t *map = static_cast<uint8_t*>(mmap(
nullptr, spaBuffer->datas->maxsize + spaBuffer->datas->mapoffset,
PROT_READ, MAP_PRIVATE, spaBuffer->datas->fd, 0));
if (map == MAP_FAILED) {
qCWarning(KRFB_FB_PIPEWIRE) << "Failed to mmap the memory: " << strerror(errno);
return;
}
src = SPA_MEMBER(map, spaBuffer->datas[0].mapoffset, uint8_t);
cleanup = [map, spaBuffer] {
munmap(map, spaBuffer->datas->maxsize + spaBuffer->datas->mapoffset);
};
} else if (spaBuffer->datas[0].type == SPA_DATA_MemPtr) {
src = static_cast<uint8_t*>(spaBuffer->datas[0].data);
if (frame.image) {
memcpy(q->fb, frame.image->constBits(), frame.image->sizeInBytes());
setVideoSize(frame.image->size());
}
#if HAVE_DMA_BUF
else if (spaBuffer->datas->type == SPA_DATA_DmaBuf) {
if (!m_eglInitialized) {
// Shouldn't reach this
qCWarning(KRFB_FB_PIPEWIRE) << "Failed to process DMA buffer.";
else if (frame.dmabuf) {
QImage src((uchar*) q->fb, videoSize.width(), videoSize.height(), QImage::Format_RGB32);
if (!m_dmabufHandler.downloadFrame(src, frame)) {
stream->renegotiateModifierFailed(frame.format, frame.dmabuf->modifier);
qCDebug(KRFB_FB_PIPEWIRE) << "Failed to download frame.";
return;
}
gbm_import_fd_data importInfo = {static_cast<int>(spaBuffer->datas->fd), static_cast<uint32_t>(streamSize.width()),
static_cast<uint32_t>(streamSize.height()), static_cast<uint32_t>(spaBuffer->datas[0].chunk->stride), GBM_BO_FORMAT_ARGB8888};
gbm_bo *imported = gbm_bo_import(m_gbmDevice, GBM_BO_IMPORT_FD, &importInfo, GBM_BO_USE_SCANOUT);
if (!imported) {
qCWarning(KRFB_FB_PIPEWIRE) << "Failed to process buffer: Cannot import passed GBM fd - " << strerror(errno);
return;
}
// bind context to render thread
eglMakeCurrent(m_egl.display, EGL_NO_SURFACE, EGL_NO_SURFACE, m_egl.context);
// create EGL image from imported BO
EGLImageKHR image = eglCreateImageKHR(m_egl.display, nullptr, EGL_NATIVE_PIXMAP_KHR, imported, nullptr);
if (image == EGL_NO_IMAGE_KHR) {
qCWarning(KRFB_FB_PIPEWIRE) << "Failed to record frame: Error creating EGLImageKHR - " << formatGLError(glGetError());
gbm_bo_destroy(imported);
return;
}
// create GL 2D texture for framebuffer
GLuint texture;
glGenTextures(1, &texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glBindTexture(GL_TEXTURE_2D, texture);
glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, image);
src = static_cast<uint8_t*>(malloc(srcStride * streamSize.height()));
GLenum glFormat = GL_BGRA;
switch (videoFormat->format) {
case SPA_VIDEO_FORMAT_RGBx:
glFormat = GL_RGBA;
break;
case SPA_VIDEO_FORMAT_RGBA:
glFormat = GL_RGBA;
break;
case SPA_VIDEO_FORMAT_BGRx:
glFormat = GL_BGRA;
break;
case SPA_VIDEO_FORMAT_RGB:
glFormat = GL_RGB;
break;
case SPA_VIDEO_FORMAT_BGR:
glFormat = GL_BGR;
break;
default:
glFormat = GL_BGRA;
break;
}
glGetTexImage(GL_TEXTURE_2D, 0, glFormat, GL_UNSIGNED_BYTE, src);
if (!src) {
qCWarning(KRFB_FB_PIPEWIRE) << "Failed to get image from DMA buffer.";
gbm_bo_destroy(imported);
return;
}
cleanup = [src] {
free(src);
};
glDeleteTextures(1, &texture);
eglDestroyImageKHR(m_egl.display, image);
gbm_bo_destroy(imported);
setVideoSize(src.size());
}
#endif /* HAVE_DMA_BUF */
struct spa_meta_region* videoMetadata =
static_cast<struct spa_meta_region*>(spa_buffer_find_meta_data(
spaBuffer, SPA_META_VideoCrop, sizeof(*videoMetadata)));
if (videoMetadata && (videoMetadata->region.size.width > static_cast<uint32_t>(streamSize.width()) ||
videoMetadata->region.size.height > static_cast<uint32_t>(streamSize.height()))) {
qCWarning(KRFB_FB_PIPEWIRE) << "Stream metadata sizes are wrong!";
return;
else {
qCDebug(KRFB_FB_PIPEWIRE) << "Unknown kind of frame";
}
// Use video metadata when video size from metadata is set and smaller than
// video stream size, so we need to adjust it.
bool videoFullWidth = true;
bool videoFullHeight = true;
if (videoMetadata && videoMetadata->region.size.width != 0 &&
videoMetadata->region.size.height != 0) {
if (videoMetadata->region.size.width < static_cast<uint32_t>(streamSize.width())) {
videoFullWidth = false;
} else if (videoMetadata->region.size.height < static_cast<uint32_t>(streamSize.height())) {
videoFullHeight = false;
}
}
QSize prevVideoSize = videoSize;
if (!videoFullHeight || !videoFullWidth) {
videoSize = QSize(videoMetadata->region.size.width, videoMetadata->region.size.height);
} else {
videoSize = streamSize;
}
if (!q->fb || videoSize != prevVideoSize) {
if (q->fb) {
free(q->fb);
}
q->fb = static_cast<char*>(malloc(videoSize.width() * videoSize.height() * BYTES_PER_PIXEL));
if (!q->fb) {
qCWarning(KRFB_FB_PIPEWIRE) << "Failed to allocate buffer";
isValid = false;
return;
}
Q_EMIT q->frameBufferChanged();
}
const qint32 dstStride = videoSize.width() * BYTES_PER_PIXEL;
Q_ASSERT(dstStride <= srcStride);
if (!videoFullHeight && (videoMetadata->region.position.y + videoSize.height() <= streamSize.height())) {
src += srcStride * videoMetadata->region.position.y;
}
const int xOffset = !videoFullWidth && (videoMetadata->region.position.x + videoSize.width() <= streamSize.width())
? videoMetadata->region.position.x * BYTES_PER_PIXEL : 0;
char *dst = q->fb;
for (int i = 0; i < videoSize.height(); ++i) {
// Adjust source content based on crop video position if needed
src += xOffset;
std::memcpy(dst, src, dstStride);
if (videoFormat->format == SPA_VIDEO_FORMAT_BGRA || videoFormat->format == SPA_VIDEO_FORMAT_BGRx) {
for (int j = 0; j < dstStride; j += 4) {
std::swap(dst[j], dst[j + 2]);
}
}
src += srcStride - xOffset;
dst += dstStride;
}
if (spaBuffer->datas->type == SPA_DATA_MemFd ||
spaBuffer->datas->type == SPA_DATA_DmaBuf) {
cleanup();
}
if (videoFormat->format != SPA_VIDEO_FORMAT_RGB) {
QImage img((uchar*) q->fb, videoSize.width(), videoSize.height(), dstStride, spaToQImageFormat(videoFormat->format));
img.convertTo(QImage::Format_RGB888);
}
if (spa_meta* vdMeta = spa_buffer_find_meta(spaBuffer, SPA_META_VideoDamage)) {
struct spa_meta_region *r;
spa_meta_for_each(r, vdMeta) {
if (!spa_meta_region_is_valid(r))
break;
q->tiles.append(QRect(r->region.position.x, r->region.position.y, r->region.size.width, r->region.size.height));
if (auto damage = frame.damage) {
for (const auto &rect : *damage) {
q->tiles.append(rect);
}
} else {
q->tiles.append(QRect(0, 0, videoSize.width(), videoSize.height()));
}
}
/**
* @brief PWFrameBuffer::Private::createReceivingStream - create a stream that will consume Pipewire buffers
* and copy the framebuffer to the existing image that we track. The state of the stream and configuration
* are later handled by the corresponding listener.
*/
pw_stream *PWFrameBuffer::Private::createReceivingStream()
void PWFrameBuffer::Private::setVideoSize(const QSize &size)
{
spa_rectangle pwMinScreenBounds = SPA_RECTANGLE(1, 1);
spa_rectangle pwMaxScreenBounds = SPA_RECTANGLE(UINT32_MAX, UINT32_MAX);
spa_fraction pwFramerateMin = SPA_FRACTION(0, 1);
spa_fraction pwFramerateMax = SPA_FRACTION(60, 1);
pw_properties* reuseProps = pw_properties_new_string("pipewire.client.reuse=1");
auto stream = pw_stream_new(pwCore, "krfb-fb-consume-stream", reuseProps);
uint8_t buffer[1024] = {};
const spa_pod *params[1];
auto builder = SPA_POD_BUILDER_INIT(buffer, sizeof(buffer));
params[0] = reinterpret_cast<spa_pod *>(spa_pod_builder_add_object(&builder,
SPA_TYPE_OBJECT_Format, SPA_PARAM_EnumFormat,
SPA_FORMAT_mediaType, SPA_POD_Id(SPA_MEDIA_TYPE_video),
SPA_FORMAT_mediaSubtype, SPA_POD_Id(SPA_MEDIA_SUBTYPE_raw),
SPA_FORMAT_VIDEO_format, SPA_POD_CHOICE_ENUM_Id(6,
SPA_VIDEO_FORMAT_RGBx, SPA_VIDEO_FORMAT_RGBA,
SPA_VIDEO_FORMAT_BGRx, SPA_VIDEO_FORMAT_BGRA,
SPA_VIDEO_FORMAT_RGB, SPA_VIDEO_FORMAT_BGR),
SPA_FORMAT_VIDEO_size, SPA_POD_CHOICE_RANGE_Rectangle(&pwMaxScreenBounds, &pwMinScreenBounds, &pwMaxScreenBounds),
SPA_FORMAT_VIDEO_framerate, SPA_POD_Fraction(&pwFramerateMin),
SPA_FORMAT_VIDEO_maxFramerate, SPA_POD_CHOICE_RANGE_Fraction(&pwFramerateMax, &pwFramerateMin, &pwFramerateMax)));
pw_stream_add_listener(stream, &streamListener, &pwStreamEvents, this);
if (pw_stream_connect(stream, PW_DIRECTION_INPUT, pwStreamNodeId, PW_STREAM_FLAG_AUTOCONNECT, params, 1) != 0) {
isValid = false;
if (q->fb && videoSize == size) {
return;
}
return stream;
free(q->fb);
q->fb = static_cast<char*>(malloc(size.width() * size.height() * BYTES_PER_PIXEL));
if (!q->fb) {
qCWarning(KRFB_FB_PIPEWIRE) << "Failed to allocate buffer";
isValid = false;
return;
}
videoSize = size;
Q_EMIT q->frameBufferChanged();
}
PWFrameBuffer::Private::~Private()
{
if (pwMainLoop) {
pw_thread_loop_stop(pwMainLoop);
}
if (pwStream) {
pw_stream_destroy(pwStream);
}
if (pwCore) {
pw_core_disconnect(pwCore);
}
if (pwContext) {
pw_context_destroy(pwContext);
}
if (pwMainLoop) {
pw_thread_loop_destroy(pwMainLoop);
}
}
PWFrameBuffer::PWFrameBuffer(QObject *parent)
: FrameBuffer (parent),
d(new Private(this))
{
fb = nullptr;
}
PWFrameBuffer::~PWFrameBuffer()
@ -998,8 +474,7 @@ void PWFrameBuffer::startVirtualMonitor(const QString& name, const QSize& resolu
auto screencasting = new Screencasting(registry, wlname, version, this);
auto r = screencasting->createVirtualMonitorStream(name, resolution, dpr, Screencasting::Metadata);
connect(r, &ScreencastingStream::created, this, [this] (quint32 nodeId) {
d->pwStreamNodeId = nodeId;
d->initPw();
d->stream->createStream(nodeId, 0);
});
});
registry->create(connection);
@ -1013,11 +488,17 @@ int PWFrameBuffer::depth()
int PWFrameBuffer::height()
{
if (!d->videoSize.isValid()) {
return 0;
}
return d->videoSize.height();
}
int PWFrameBuffer::width()
{
if (!d->videoSize.isValid()) {
return 0;
}
return d->videoSize.width();
}
@ -1047,7 +528,7 @@ void PWFrameBuffer::stopMonitor()
QVariant PWFrameBuffer::customProperty(const QString &property) const
{
if (property == QLatin1String("stream_node_id")) {
return QVariant::fromValue<uint>(d->pwStreamNodeId);
return QVariant::fromValue<uint>(d->stream->nodeId());
} if (property == QLatin1String("session_handle")) {
return QVariant::fromValue<QDBusObjectPath>(d->sessionPath);
}
@ -1062,5 +543,5 @@ bool PWFrameBuffer::isValid() const
QPoint PWFrameBuffer::cursorPosition()
{
return d->cursorPosition;
return d->cursor->position;
}

View File

@ -50,10 +50,10 @@ public:
bool isValid() const;
private Q_SLOTS:
void handleXdpSessionCreated(quint32 code, QVariantMap results);
void handleXdpDevicesSelected(quint32 code, QVariantMap results);
void handleXdpSourcesSelected(quint32 code, QVariantMap results);
void handleXdpRemoteDesktopStarted(quint32 code, QVariantMap results);
void handleXdpSessionCreated(quint32 code, const QVariantMap &results);
void handleXdpDevicesSelected(quint32 code, const QVariantMap &results);
void handleXdpSourcesSelected(quint32 code, const QVariantMap &results);
void handleXdpRemoteDesktopStarted(quint32 code, const QVariantMap &results);
private:
class Private;