aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authordec05eba <dec05eba@protonmail.com>2024-07-15 18:57:33 +0200
committerdec05eba <dec05eba@protonmail.com>2024-07-15 19:02:34 +0200
commit048b8d21ecbd1168ff8e033b12cbfd66bba0127c (patch)
treef536c35090a6ed78803d064c0784370a2424ca7a /src
parentc447a9a35f51815db307e5d58630040cd63ad4b7 (diff)
Add support for desktop portal capture (-w portal)
Diffstat (limited to 'src')
-rw-r--r--src/capture/kms.c83
-rw-r--r--src/capture/portal.c384
-rw-r--r--src/capture/xcomposite.c26
-rw-r--r--src/cursor.c1
-rw-r--r--src/dbus.c902
-rw-r--r--src/egl.c25
-rw-r--r--src/encoder/video/vaapi.c12
-rw-r--r--src/main.cpp121
-rw-r--r--src/pipewire.c620
-rw-r--r--src/utils.c40
10 files changed, 2083 insertions, 131 deletions
diff --git a/src/capture/kms.c b/src/capture/kms.c
index a9ce73c..e7b0b59 100644
--- a/src/capture/kms.c
+++ b/src/capture/kms.c
@@ -3,6 +3,7 @@
#include "../../include/color_conversion.h"
#include "../../kms/client/kms_client.h"
+#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include <unistd.h>
@@ -38,8 +39,8 @@ typedef struct {
gsr_monitor_rotation monitor_rotation;
- unsigned int input_texture;
- unsigned int cursor_texture;
+ unsigned int input_texture_id;
+ unsigned int cursor_texture_id;
} gsr_capture_kms;
static void gsr_capture_kms_cleanup_kms_fds(gsr_capture_kms *self) {
@@ -52,14 +53,14 @@ static void gsr_capture_kms_cleanup_kms_fds(gsr_capture_kms *self) {
}
static void gsr_capture_kms_stop(gsr_capture_kms *self) {
- if(self->input_texture) {
- self->params.egl->glDeleteTextures(1, &self->input_texture);
- self->input_texture = 0;
+ if(self->input_texture_id) {
+ self->params.egl->glDeleteTextures(1, &self->input_texture_id);
+ self->input_texture_id = 0;
}
- if(self->cursor_texture) {
- self->params.egl->glDeleteTextures(1, &self->cursor_texture);
- self->cursor_texture = 0;
+ if(self->cursor_texture_id) {
+ self->params.egl->glDeleteTextures(1, &self->cursor_texture_id);
+ self->cursor_texture_id = 0;
}
gsr_capture_kms_cleanup_kms_fds(self);
@@ -70,25 +71,25 @@ static int max_int(int a, int b) {
return a > b ? a : b;
}
-static void gsr_capture_kms_create_input_textures(gsr_capture_kms *self) {
- self->params.egl->glGenTextures(1, &self->input_texture);
- self->params.egl->glBindTexture(GL_TEXTURE_2D, self->input_texture);
+static void gsr_capture_kms_create_input_texture_ids(gsr_capture_kms *self) {
+ self->params.egl->glGenTextures(1, &self->input_texture_id);
+ self->params.egl->glBindTexture(GL_TEXTURE_2D, self->input_texture_id);
self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
self->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
- const bool cursor_texture_is_external = self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA;
- const int cursor_texture_target = cursor_texture_is_external ? GL_TEXTURE_EXTERNAL_OES : GL_TEXTURE_2D;
+ const bool cursor_texture_id_is_external = self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA;
+ const int cursor_texture_id_target = cursor_texture_id_is_external ? GL_TEXTURE_EXTERNAL_OES : GL_TEXTURE_2D;
- self->params.egl->glGenTextures(1, &self->cursor_texture);
- self->params.egl->glBindTexture(cursor_texture_target, self->cursor_texture);
- self->params.egl->glTexParameteri(cursor_texture_target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- self->params.egl->glTexParameteri(cursor_texture_target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- self->params.egl->glTexParameteri(cursor_texture_target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
- self->params.egl->glTexParameteri(cursor_texture_target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
- self->params.egl->glBindTexture(cursor_texture_target, 0);
+ self->params.egl->glGenTextures(1, &self->cursor_texture_id);
+ self->params.egl->glBindTexture(cursor_texture_id_target, self->cursor_texture_id);
+ self->params.egl->glTexParameteri(cursor_texture_id_target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ self->params.egl->glTexParameteri(cursor_texture_id_target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ self->params.egl->glTexParameteri(cursor_texture_id_target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ self->params.egl->glTexParameteri(cursor_texture_id_target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ self->params.egl->glBindTexture(cursor_texture_id_target, 0);
}
/* TODO: On monitor reconfiguration, find monitor x, y, width and height again. Do the same for nvfbc. */
@@ -119,7 +120,7 @@ static void monitor_callback(const gsr_monitor *monitor, void *userdata) {
static int gsr_capture_kms_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
gsr_capture_kms *self = cap->priv;
- gsr_capture_kms_create_input_textures(self);
+ gsr_capture_kms_create_input_texture_ids(self);
gsr_monitor monitor;
self->monitor_id.num_connector_ids = 0;
@@ -268,7 +269,7 @@ static vec2i swap_vec2i(vec2i value) {
static int gsr_capture_kms_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion) {
gsr_capture_kms *self = cap->priv;
const bool screen_plane_use_modifiers = self->params.egl->gpu_info.vendor != GSR_GPU_VENDOR_AMD;
- const bool cursor_texture_is_external = self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA;
+ const bool cursor_texture_id_is_external = self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA;
//egl->glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
self->params.egl->glClear(0);
@@ -337,12 +338,12 @@ static int gsr_capture_kms_capture(gsr_capture *cap, AVFrame *frame, gsr_color_c
// Assertion pic->display_order == pic->encode_order failed at libavcodec/vaapi_encode_h265.c:765
// kms server info: kms client shutdown, shutting down the server
intptr_t img_attr[18] = {
- EGL_LINUX_DRM_FOURCC_EXT, drm_fd->pixel_format,
- EGL_WIDTH, drm_fd->width,
- EGL_HEIGHT, drm_fd->height,
- EGL_DMA_BUF_PLANE0_FD_EXT, drm_fd->fd,
- EGL_DMA_BUF_PLANE0_OFFSET_EXT, drm_fd->offset,
- EGL_DMA_BUF_PLANE0_PITCH_EXT, drm_fd->pitch,
+ EGL_LINUX_DRM_FOURCC_EXT, drm_fd->pixel_format,
+ EGL_WIDTH, drm_fd->width,
+ EGL_HEIGHT, drm_fd->height,
+ EGL_DMA_BUF_PLANE0_FD_EXT, drm_fd->fd,
+ EGL_DMA_BUF_PLANE0_OFFSET_EXT, drm_fd->offset,
+ EGL_DMA_BUF_PLANE0_PITCH_EXT, drm_fd->pitch,
};
if(screen_plane_use_modifiers) {
@@ -360,7 +361,7 @@ static int gsr_capture_kms_capture(gsr_capture *cap, AVFrame *frame, gsr_color_c
}
EGLImage image = self->params.egl->eglCreateImage(self->params.egl->egl_display, 0, EGL_LINUX_DMA_BUF_EXT, NULL, img_attr);
- self->params.egl->glBindTexture(GL_TEXTURE_2D, self->input_texture);
+ self->params.egl->glBindTexture(GL_TEXTURE_2D, self->input_texture_id);
self->params.egl->glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, image);
self->params.egl->eglDestroyImage(self->params.egl->egl_display, image);
self->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
@@ -374,7 +375,7 @@ static int gsr_capture_kms_capture(gsr_capture *cap, AVFrame *frame, gsr_color_c
const int target_x = max_int(0, frame->width / 2 - self->capture_size.x / 2);
const int target_y = max_int(0, frame->height / 2 - self->capture_size.y / 2);
- gsr_color_conversion_draw(color_conversion, self->input_texture,
+ gsr_color_conversion_draw(color_conversion, self->input_texture_id,
(vec2i){target_x, target_y}, self->capture_size,
capture_pos, self->capture_size,
texture_rotation, false);
@@ -410,20 +411,20 @@ static int gsr_capture_kms_capture(gsr_capture *cap, AVFrame *frame, gsr_color_c
cursor_pos.y += target_y;
const intptr_t img_attr_cursor[] = {
- EGL_LINUX_DRM_FOURCC_EXT, cursor_drm_fd->pixel_format,
- EGL_WIDTH, cursor_drm_fd->width,
- EGL_HEIGHT, cursor_drm_fd->height,
- EGL_DMA_BUF_PLANE0_FD_EXT, cursor_drm_fd->fd,
- EGL_DMA_BUF_PLANE0_OFFSET_EXT, cursor_drm_fd->offset,
- EGL_DMA_BUF_PLANE0_PITCH_EXT, cursor_drm_fd->pitch,
+ EGL_LINUX_DRM_FOURCC_EXT, cursor_drm_fd->pixel_format,
+ EGL_WIDTH, cursor_drm_fd->width,
+ EGL_HEIGHT, cursor_drm_fd->height,
+ EGL_DMA_BUF_PLANE0_FD_EXT, cursor_drm_fd->fd,
+ EGL_DMA_BUF_PLANE0_OFFSET_EXT, cursor_drm_fd->offset,
+ EGL_DMA_BUF_PLANE0_PITCH_EXT, cursor_drm_fd->pitch,
EGL_DMA_BUF_PLANE0_MODIFIER_LO_EXT, cursor_drm_fd->modifier & 0xFFFFFFFFULL,
EGL_DMA_BUF_PLANE0_MODIFIER_HI_EXT, cursor_drm_fd->modifier >> 32ULL,
EGL_NONE
};
EGLImage cursor_image = self->params.egl->eglCreateImage(self->params.egl->egl_display, 0, EGL_LINUX_DMA_BUF_EXT, NULL, img_attr_cursor);
- const int target = cursor_texture_is_external ? GL_TEXTURE_EXTERNAL_OES : GL_TEXTURE_2D;
- self->params.egl->glBindTexture(target, self->cursor_texture);
+ const int target = cursor_texture_id_is_external ? GL_TEXTURE_EXTERNAL_OES : GL_TEXTURE_2D;
+ self->params.egl->glBindTexture(target, self->cursor_texture_id);
self->params.egl->glEGLImageTargetTexture2DOES(target, cursor_image);
self->params.egl->eglDestroyImage(self->params.egl->egl_display, cursor_image);
self->params.egl->glBindTexture(target, 0);
@@ -431,17 +432,15 @@ static int gsr_capture_kms_capture(gsr_capture *cap, AVFrame *frame, gsr_color_c
self->params.egl->glEnable(GL_SCISSOR_TEST);
self->params.egl->glScissor(target_x, target_y, self->capture_size.x, self->capture_size.y);
- gsr_color_conversion_draw(color_conversion, self->cursor_texture,
+ gsr_color_conversion_draw(color_conversion, self->cursor_texture_id,
cursor_pos, cursor_size,
(vec2i){0, 0}, cursor_size,
- texture_rotation, cursor_texture_is_external);
+ texture_rotation, cursor_texture_id_is_external);
self->params.egl->glDisable(GL_SCISSOR_TEST);
}
self->params.egl->eglSwapBuffers(self->params.egl->egl_display, self->params.egl->egl_surface);
-
- // TODO: Do software specific video encoder conversion here
//self->params.egl->glFlush();
//self->params.egl->glFinish();
diff --git a/src/capture/portal.c b/src/capture/portal.c
new file mode 100644
index 0000000..77da206
--- /dev/null
+++ b/src/capture/portal.c
@@ -0,0 +1,384 @@
+#include "../../include/capture/portal.h"
+#include "../../include/color_conversion.h"
+#include "../../include/egl.h"
+#include "../../include/utils.h"
+#include "../../include/dbus.h"
+#include "../../include/pipewire.h"
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <unistd.h>
+#include <assert.h>
+
+#include <libavcodec/avcodec.h>
+
+typedef struct {
+ gsr_capture_portal_params params;
+
+ bool should_stop;
+ bool stop_is_error;
+
+ unsigned int input_texture_id;
+ unsigned int cursor_texture_id;
+
+ gsr_dbus dbus;
+ char *session_handle;
+
+ uint32_t pipewire_node;
+ int pipewire_fd;
+ gsr_pipewire pipewire;
+ vec2i capture_size;
+} gsr_capture_portal;
+
+static void gsr_capture_portal_stop(gsr_capture_portal *self) {
+ if(self->input_texture_id) {
+ self->params.egl->glDeleteTextures(1, &self->input_texture_id);
+ self->input_texture_id = 0;
+ }
+
+ if(self->cursor_texture_id) {
+ self->params.egl->glDeleteTextures(1, &self->cursor_texture_id);
+ self->cursor_texture_id = 0;
+ }
+
+ if(self->pipewire_fd > 0) {
+ close(self->pipewire_fd);
+ self->pipewire_fd = -1;
+ }
+
+ gsr_pipewire_deinit(&self->pipewire);
+
+ if(self->session_handle) {
+ free(self->session_handle);
+ self->session_handle = NULL;
+ }
+
+ gsr_dbus_deinit(&self->dbus);
+}
+
+static void gsr_capture_portal_create_input_textures(gsr_capture_portal *self) {
+ self->params.egl->glGenTextures(1, &self->input_texture_id);
+ self->params.egl->glBindTexture(GL_TEXTURE_2D, self->input_texture_id);
+ self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ self->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
+
+ self->params.egl->glGenTextures(1, &self->cursor_texture_id);
+ self->params.egl->glBindTexture(GL_TEXTURE_2D, self->cursor_texture_id);
+ self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ self->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
+}
+
+static void get_gpu_screen_recorder_config_directory_path(char *buffer, size_t buffer_size) {
+ const char *xdg_config_home = getenv("XDG_CONFIG_HOME");
+ if(xdg_config_home) {
+ snprintf(buffer, buffer_size, "%s/gpu-screen-recorder", xdg_config_home);
+ } else {
+ const char *home = getenv("HOME");
+ if(!home)
+ home = "/tmp";
+ snprintf(buffer, buffer_size, "%s/.config/gpu-screen-recorder", home);
+ }
+}
+
+static void get_gpu_screen_recorder_restore_token_path(char *buffer, size_t buffer_size) {
+ const char *xdg_config_home = getenv("XDG_CONFIG_HOME");
+ if(xdg_config_home) {
+ snprintf(buffer, buffer_size, "%s/gpu-screen-recorder/restore_token", xdg_config_home);
+ } else {
+ const char *home = getenv("HOME");
+ if(!home)
+ home = "/tmp";
+ snprintf(buffer, buffer_size, "%s/.config/gpu-screen-recorder/restore_token", home);
+ }
+}
+
+static void gsr_capture_portal_save_restore_token(const char *restore_token) {
+ char config_path[PATH_MAX];
+ config_path[0] = '\0';
+ get_gpu_screen_recorder_config_directory_path(config_path, sizeof(config_path));
+
+ if(create_directory_recursive(config_path) != 0) {
+ fprintf(stderr, "gsr warning: gsr_capture_portal_save_restore_token: failed to create directory (%s) for restore token\n", config_path);
+ return;
+ }
+
+ char restore_token_path[PATH_MAX];
+ restore_token_path[0] = '\0';
+ get_gpu_screen_recorder_restore_token_path(restore_token_path, sizeof(restore_token_path));
+
+ FILE *f = fopen(restore_token_path, "wb");
+ if(!f) {
+ fprintf(stderr, "gsr warning: gsr_capture_portal_save_restore_token: failed to create restore token file (%s)\n", restore_token_path);
+ return;
+ }
+
+ const int restore_token_len = strlen(restore_token);
+ if((long)fwrite(restore_token, 1, restore_token_len, f) != restore_token_len) {
+ fprintf(stderr, "gsr warning: gsr_capture_portal_save_restore_token: failed to write restore token to file (%s)\n", restore_token_path);
+ fclose(f);
+ return;
+ }
+
+ fprintf(stderr, "gsr info: gsr_capture_portal_save_restore_token: saved restore token to cache (%s)\n", restore_token);
+ fclose(f);
+}
+
+static void gsr_capture_portal_get_restore_token_from_cache(char *buffer, size_t buffer_size) {
+ assert(buffer_size > 0);
+ buffer[0] = '\0';
+
+ char restore_token_path[PATH_MAX];
+ restore_token_path[0] = '\0';
+ get_gpu_screen_recorder_restore_token_path(restore_token_path, sizeof(restore_token_path));
+
+ FILE *f = fopen(restore_token_path, "rb");
+ if(!f) {
+ fprintf(stderr, "gsr info: gsr_capture_portal_get_restore_token_from_cache: no restore token found in cache or failed to load (%s)\n", restore_token_path);
+ return;
+ }
+
+ fseek(f, 0, SEEK_END);
+ long file_size = ftell(f);
+ fseek(f, 0, SEEK_SET);
+
+ fprintf(stderr, "file size: %ld\n", file_size);
+
+ if(file_size > 0 && file_size < 1024 && file_size < (long)buffer_size && (long)fread(buffer, 1, file_size, f) != file_size) {
+ buffer[0] = '\0';
+ fprintf(stderr, "gsr warning: gsr_capture_portal_get_restore_token_from_cache: failed to read restore token (%s)\n", restore_token_path);
+ fclose(f);
+ return;
+ }
+
+ if(file_size > 0 && file_size < (long)buffer_size)
+ buffer[file_size] = '\0';
+
+ fprintf(stderr, "gsr info: gsr_capture_portal_get_restore_token_from_cache: read cached restore token (%s)\n", buffer);
+ fclose(f);
+}
+
+static bool gsr_capture_portal_setup_dbus(gsr_capture_portal *self) {
+ char restore_token[1024];
+ restore_token[0] = '\0';
+ if(self->params.restore_portal_session)
+ gsr_capture_portal_get_restore_token_from_cache(restore_token, sizeof(restore_token));
+
+ if(!gsr_dbus_init(&self->dbus, restore_token))
+ return false;
+
+ fprintf(stderr, "gsr info: gsr_capture_portal_setup_dbus: CreateSession\n");
+ if(!gsr_dbus_screencast_create_session(&self->dbus, &self->session_handle)) {
+ fprintf(stderr, "gsr error: gsr_capture_portal_setup_dbus: CreateSession failed\n");
+ return false;
+ }
+
+ fprintf(stderr, "gsr info: gsr_capture_portal_setup_dbus: SelectSources\n");
+ if(!gsr_dbus_screencast_select_sources(&self->dbus, self->session_handle, GSR_PORTAL_CAPTURE_TYPE_MONITOR | GSR_PORTAL_CAPTURE_TYPE_WINDOW, self->params.record_cursor ? GSR_PORTAL_CURSOR_MODE_EMBEDDED : GSR_PORTAL_CURSOR_MODE_HIDDEN)) {
+ fprintf(stderr, "gsr error: gsr_capture_portal_setup_dbus: SelectSources failed\n");
+ return false;
+ }
+
+ fprintf(stderr, "gsr info: gsr_capture_portal_setup_dbus: Start\n");
+ if(!gsr_dbus_screencast_start(&self->dbus, self->session_handle, &self->pipewire_node)) {
+ fprintf(stderr, "gsr error: gsr_capture_portal_setup_dbus: Start failed\n");
+ return false;
+ }
+
+ const char *screencast_restore_token = gsr_dbus_screencast_get_restore_token(&self->dbus);
+ if(screencast_restore_token)
+ gsr_capture_portal_save_restore_token(screencast_restore_token);
+
+ fprintf(stderr, "gsr info: gsr_capture_portal_setup_dbus: OpenPipeWireRemote\n");
+ if(!gsr_dbus_screencast_open_pipewire_remote(&self->dbus, self->session_handle, &self->pipewire_fd)) {
+ fprintf(stderr, "gsr error: gsr_capture_portal_setup_dbus: OpenPipeWireRemote failed\n");
+ return false;
+ }
+
+ fprintf(stderr, "gsr info: gsr_capture_portal_setup_dbus: desktop portal setup finished\n");
+ return true;
+}
+
+static bool gsr_capture_portal_get_frame_dimensions(gsr_capture_portal *self) {
+ gsr_pipewire_region region = {0, 0, 0, 0};
+ gsr_pipewire_region cursor_region = {0, 0, 0, 0};
+ fprintf(stderr, "gsr info: gsr_capture_portal_start: waiting for pipewire negotiation\n");
+
+ const double start_time = clock_get_monotonic_seconds();
+ while(clock_get_monotonic_seconds() - start_time < 5.0) {
+ if(gsr_pipewire_map_texture(&self->pipewire, self->input_texture_id, self->cursor_texture_id, &region, &cursor_region)) {
+ self->capture_size.x = region.width;
+ self->capture_size.y = region.height;
+ fprintf(stderr, "gsr info: gsr_capture_portal_start: pipewire negotiation finished\n");
+ return true;
+ }
+ usleep(30 * 1000); /* 30 milliseconds */
+ }
+
+ fprintf(stderr, "gsr info: gsr_capture_portal_start: timed out waiting for pipewire negotiation (5 seconds)\n");
+ return false;
+}
+
+static int gsr_capture_portal_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
+ gsr_capture_portal *self = cap->priv;
+
+ gsr_capture_portal_create_input_textures(self);
+
+ if(!gsr_capture_portal_setup_dbus(self)) {
+ gsr_capture_portal_stop(self);
+ return -1;
+ }
+
+ fprintf(stderr, "gsr info: gsr_capture_portal_start: setting up pipewire\n");
+ /* TODO: support hdr when pipewire supports it */
+ if(!gsr_pipewire_init(&self->pipewire, self->pipewire_fd, self->pipewire_node, video_codec_context->framerate.num, self->params.record_cursor, self->params.egl)) {
+ fprintf(stderr, "gsr error: gsr_capture_portal_start: failed to setup pipewire with fd: %d, node: %" PRIu32 "\n", self->pipewire_fd, self->pipewire_node);
+ gsr_capture_portal_stop(self);
+ return -1;
+ }
+ self->pipewire_fd = -1;
+ fprintf(stderr, "gsr info: gsr_capture_portal_start: pipewire setup finished\n");
+
+ if(!gsr_capture_portal_get_frame_dimensions(self)) {
+ gsr_capture_portal_stop(self);
+ return -1;
+ }
+
+ /* Disable vsync */
+ self->params.egl->eglSwapInterval(self->params.egl->egl_display, 0);
+
+ video_codec_context->width = FFALIGN(self->capture_size.x, 2);
+ video_codec_context->height = FFALIGN(self->capture_size.y, 2);
+
+ frame->width = video_codec_context->width;
+ frame->height = video_codec_context->height;
+ return 0;
+}
+
+static int max_int(int a, int b) {
+ return a > b ? a : b;
+}
+
+static int gsr_capture_portal_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion) {
+ (void)frame;
+ (void)color_conversion;
+ gsr_capture_portal *self = cap->priv;
+
+ //egl->glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
+ self->params.egl->glClear(0);
+
+ vec2i content_size = self->capture_size;
+
+ /* TODO: Handle formats other than RGB(a) */
+ gsr_pipewire_region region = {0, 0, 0, 0};
+ gsr_pipewire_region cursor_region = {0, 0, 0, 0};
+ if(gsr_pipewire_map_texture(&self->pipewire, self->input_texture_id, self->cursor_texture_id, &region, &cursor_region)) {
+ content_size.x = region.width;
+ content_size.y = region.height;
+ }
+
+ const int target_x = max_int(0, frame->width / 2 - content_size.x / 2);
+ const int target_y = max_int(0, frame->height / 2 - content_size.y / 2);
+
+ gsr_color_conversion_draw(color_conversion, self->input_texture_id,
+ (vec2i){target_x, target_y}, content_size,
+ (vec2i){region.x, region.y}, content_size,
+ 0.0f, false);
+
+ const vec2i cursor_pos = {
+ target_x + cursor_region.x,
+ target_y + cursor_region.y
+ };
+
+ self->params.egl->glEnable(GL_SCISSOR_TEST);
+ self->params.egl->glScissor(target_x, target_y, content_size.x, content_size.y);
+ gsr_color_conversion_draw(color_conversion, self->cursor_texture_id,
+ (vec2i){cursor_pos.x, cursor_pos.y}, (vec2i){cursor_region.width, cursor_region.height},
+ (vec2i){0, 0}, (vec2i){cursor_region.width, cursor_region.height},
+ 0.0f, false);
+ self->params.egl->glDisable(GL_SCISSOR_TEST);
+
+ self->params.egl->eglSwapBuffers(self->params.egl->egl_display, self->params.egl->egl_surface);
+
+ //self->params.egl->glFlush();
+ //self->params.egl->glFinish();
+
+ return 0;
+}
+
+static bool gsr_capture_portal_should_stop(gsr_capture *cap, bool *err) {
+ gsr_capture_portal *cap_portal = cap->priv;
+ if(cap_portal->should_stop) {
+ if(err)
+ *err = cap_portal->stop_is_error;
+ return true;
+ }
+
+ if(err)
+ *err = false;
+ return false;
+}
+
+static void gsr_capture_portal_capture_end(gsr_capture *cap, AVFrame *frame) {
+ (void)cap;
+ (void)frame;
+}
+
+static gsr_source_color gsr_capture_portal_get_source_color(gsr_capture *cap) {
+ (void)cap;
+ return GSR_SOURCE_COLOR_RGB;
+}
+
+// static bool gsr_capture_portal_uses_external_image(gsr_capture *cap) {
+// gsr_capture_portal *cap_portal = cap->priv;
+// return cap_portal->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA;
+// }
+
+static void gsr_capture_portal_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
+ (void)video_codec_context;
+ gsr_capture_portal *cap_portal = cap->priv;
+ if(cap->priv) {
+ gsr_capture_portal_stop(cap_portal);
+ free(cap->priv);
+ cap->priv = NULL;
+ }
+ free(cap);
+}
+
+gsr_capture* gsr_capture_portal_create(const gsr_capture_portal_params *params) {
+ if(!params) {
+ fprintf(stderr, "gsr error: gsr_capture_portal_create params is NULL\n");
+ return NULL;
+ }
+
+ gsr_capture *cap = calloc(1, sizeof(gsr_capture));
+ if(!cap)
+ return NULL;
+
+ gsr_capture_portal *cap_portal = calloc(1, sizeof(gsr_capture_portal));
+ if(!cap_portal) {
+ free(cap);
+ return NULL;
+ }
+
+ cap_portal->params = *params;
+
+ *cap = (gsr_capture) {
+ .start = gsr_capture_portal_start,
+ .tick = NULL,
+ .should_stop = gsr_capture_portal_should_stop,
+ .capture = gsr_capture_portal_capture,
+ .capture_end = gsr_capture_portal_capture_end,
+ .get_source_color = gsr_capture_portal_get_source_color,
+ .uses_external_image = NULL,
+ .destroy = gsr_capture_portal_destroy,
+ .priv = cap_portal
+ };
+
+ return cap;
+}
diff --git a/src/capture/xcomposite.c b/src/capture/xcomposite.c
index f5d2b2f..83c4800 100644
--- a/src/capture/xcomposite.c
+++ b/src/capture/xcomposite.c
@@ -351,23 +351,15 @@ static int gsr_capture_xcomposite_capture(gsr_capture *cap, AVFrame *frame, gsr_
if(self->params.record_cursor && self->cursor.visible) {
gsr_cursor_tick(&self->cursor, self->window);
- const bool cursor_inside_window =
- cursor_pos.x + self->cursor.size.x >= target_x &&
- cursor_pos.x <= target_x + self->texture_size.x &&
- cursor_pos.y + self->cursor.size.y >= target_y &&
- cursor_pos.y <= target_y + self->texture_size.y;
-
- if(cursor_inside_window) {
- self->params.egl->glEnable(GL_SCISSOR_TEST);
- self->params.egl->glScissor(target_x, target_y, self->texture_size.x, self->texture_size.y);
-
- gsr_color_conversion_draw(color_conversion, self->cursor.texture_id,
- cursor_pos, self->cursor.size,
- (vec2i){0, 0}, self->cursor.size,
- 0.0f, false);
-
- self->params.egl->glDisable(GL_SCISSOR_TEST);
- }
+ self->params.egl->glEnable(GL_SCISSOR_TEST);
+ self->params.egl->glScissor(target_x, target_y, self->texture_size.x, self->texture_size.y);
+
+ gsr_color_conversion_draw(color_conversion, self->cursor.texture_id,
+ cursor_pos, self->cursor.size,
+ (vec2i){0, 0}, self->cursor.size,
+ 0.0f, false);
+
+ self->params.egl->glDisable(GL_SCISSOR_TEST);
}
self->params.egl->eglSwapBuffers(self->params.egl->egl_display, self->params.egl->egl_surface);
diff --git a/src/cursor.c b/src/cursor.c
index 9825ad2..078b55d 100644
--- a/src/cursor.c
+++ b/src/cursor.c
@@ -52,6 +52,7 @@ static bool gsr_cursor_set_from_x11_cursor_image(gsr_cursor *self, XFixesCursorI
}
}
+ // TODO: glTextureSubImage2D if same size
self->egl->glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, self->size.x, self->size.y, 0, GL_RGBA, GL_UNSIGNED_BYTE, cursor_data);
free(cursor_data);
diff --git a/src/dbus.c b/src/dbus.c
new file mode 100644
index 0000000..9d30a51
--- /dev/null
+++ b/src/dbus.c
@@ -0,0 +1,902 @@
+#include "../include/dbus.h"
+
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <errno.h>
+#include <assert.h>
+#include <unistd.h>
+#include <sys/random.h>
+
+/* TODO: Make non-blocking when GPU Screen Recorder is turned into a library */
+/* TODO: Make sure responses matches the requests */
+
+#define DESKTOP_PORTAL_SIGNAL_RULE "type='signal',interface='org.freedesktop.Portal.Request'"
+
+typedef enum {
+ DICT_TYPE_STRING,
+ DICT_TYPE_UINT32,
+ DICT_TYPE_BOOL,
+} dict_value_type;
+
+typedef struct {
+ const char *key;
+ dict_value_type value_type;
+ union {
+ char *str;
+ dbus_uint32_t u32;
+ dbus_bool_t boolean;
+ };
+} dict_entry;
+
+static const char* dict_value_type_to_string(dict_value_type type) {
+ switch(type) {
+ case DICT_TYPE_STRING: return "string";
+ case DICT_TYPE_UINT32: return "uint32";
+ case DICT_TYPE_BOOL: return "boolean";
+ }
+ return "(unknown)";
+}
+
+static bool generate_random_characters(char *buffer, int buffer_size, const char *alphabet, size_t alphabet_size) {
+ /* TODO: Use other functions on other platforms than linux */
+ if(getrandom(buffer, buffer_size, 0) < buffer_size) {
+ fprintf(stderr, "gsr error: generate_random_characters: failed to get random bytes, error: %s\n", strerror(errno));
+ return false;
+ }
+
+ for(int i = 0; i < buffer_size; ++i) {
+ unsigned char c = *(unsigned char*)&buffer[i];
+ buffer[i] = alphabet[c % alphabet_size];
+ }
+
+ return true;
+}
+
+bool gsr_dbus_init(gsr_dbus *self, const char *screencast_restore_token) {
+ memset(self, 0, sizeof(*self));
+ dbus_error_init(&self->err);
+
+ self->random_str[DBUS_RANDOM_STR_SIZE] = '\0';
+ if(!generate_random_characters(self->random_str, DBUS_RANDOM_STR_SIZE, "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789", 62)) {
+ fprintf(stderr, "gsr error: gsr_dbus_init: failed to generate random string\n");
+ return false;
+ }
+
+ self->con = dbus_bus_get(DBUS_BUS_SESSION, &self->err);
+ if(dbus_error_is_set(&self->err)) {
+ fprintf(stderr, "gsr error: gsr_dbus_init: dbus_bus_get failed with error: %s\n", self->err.message);
+ return false;
+ }
+
+ if(!self->con) {
+ fprintf(stderr, "gsr error: gsr_dbus_init: failed to get dbus session\n");
+ return false;
+ }
+
+ /* TODO: Check the name */
+ const int ret = dbus_bus_request_name(self->con, "com.dec05eba.gpu_screen_recorder", DBUS_NAME_FLAG_REPLACE_EXISTING, &self->err);
+ if(dbus_error_is_set(&self->err)) {
+ fprintf(stderr, "gsr error: gsr_dbus_init: dbus_bus_request_name failed with error: %s\n", self->err.message);
+ gsr_dbus_deinit(self);
+ return false;
+ }
+
+ if(screencast_restore_token) {
+ self->screencast_restore_token = strdup(screencast_restore_token);
+ if(!self->screencast_restore_token) {
+ fprintf(stderr, "gsr error: gsr_dbus_init: failed to clone restore token\n");
+ gsr_dbus_deinit(self);
+ return false;
+ }
+ }
+
+ (void)ret;
+ // if(ret != DBUS_REQUEST_NAME_REPLY_PRIMARY_OWNER) {
+ // fprintf(stderr, "gsr error: gsr_capture_portal_setup_dbus: dbus_bus_request_name failed to get primary owner\n");
+ // return false;
+ // }
+
+ return true;
+}
+
+void gsr_dbus_deinit(gsr_dbus *self) {
+ if(self->screencast_restore_token) {
+ free(self->screencast_restore_token);
+ self->screencast_restore_token = NULL;
+ }
+
+ if(self->desktop_portal_rule_added) {
+ dbus_bus_remove_match(self->con, DESKTOP_PORTAL_SIGNAL_RULE, NULL);
+ // dbus_connection_flush(self->con);
+ self->desktop_portal_rule_added = false;
+ }
+
+ if(self->con) {
+ dbus_error_free(&self->err);
+
+ dbus_bus_release_name(self->con, "com.dec05eba.gpu_screen_recorder", NULL);
+
+ // Apparently shouldn't be used when a connection is setup by using dbus_bus_get
+ //dbus_connection_close(self->con);
+ dbus_connection_unref(self->con);
+ self->con = NULL;
+ }
+}
+
+static bool gsr_dbus_desktop_portal_get_property(gsr_dbus *self, const char *interface, const char *property_name, uint32_t *result) {
+ *result = 0;
+
+ DBusMessage *msg = dbus_message_new_method_call(
+ "org.freedesktop.portal.Desktop", // target for the method call
+ "/org/freedesktop/portal/desktop", // object to call on
+ "org.freedesktop.DBus.Properties", // interface to call on
+ "Get"); // method name
+ if(!msg) {
+ fprintf(stderr, "gsr error: gsr_dbus_desktop_portal_get_property: dbus_message_new_method_call failed\n");
+ return false;
+ }
+
+ DBusMessageIter it;
+ dbus_message_iter_init_append(msg, &it);
+
+ if(!dbus_message_iter_append_basic(&it, DBUS_TYPE_STRING, &interface)) {
+ fprintf(stderr, "gsr error: gsr_dbus_desktop_portal_get_property: failed to add interface\n");
+ dbus_message_unref(msg);
+ return false;
+ }
+
+ if(!dbus_message_iter_append_basic(&it, DBUS_TYPE_STRING, &property_name)) {
+ fprintf(stderr, "gsr error: gsr_dbus_desktop_portal_get_property: failed to add property_name\n");
+ dbus_message_unref(msg);
+ return false;
+ }
+
+ DBusPendingCall *pending = NULL;
+ if(!dbus_connection_send_with_reply(self->con, msg, &pending, -1) || !pending) { // -1 is default timeout
+ fprintf(stderr, "gsr error: gsr_dbus_desktop_portal_get_property: dbus_connection_send_with_reply failed\n");
+ dbus_message_unref(msg);
+ return false;
+ }
+ dbus_connection_flush(self->con);
+
+ //fprintf(stderr, "Request Sent\n");
+
+ dbus_message_unref(msg);
+ msg = NULL;
+
+ dbus_pending_call_block(pending);
+
+ msg = dbus_pending_call_steal_reply(pending);
+ if(!msg) {
+ fprintf(stderr, "gsr error: gsr_dbus_desktop_portal_get_property: dbus_pending_call_steal_reply failed\n");
+ dbus_pending_call_unref(pending);
+ dbus_message_unref(msg);
+ return false;
+ }
+
+ dbus_pending_call_unref(pending);
+ pending = NULL;
+
+ DBusMessageIter resp_args;
+ if(!dbus_message_iter_init(msg, &resp_args)) {
+ fprintf(stderr, "gsr error: gsr_dbus_desktop_portal_get_property: response message is missing arguments\n");
+ dbus_message_unref(msg);
+ return false;
+ } else if(DBUS_TYPE_UINT32 == dbus_message_iter_get_arg_type(&resp_args)) {
+ dbus_message_iter_get_basic(&resp_args, result);
+ } else if(DBUS_TYPE_VARIANT == dbus_message_iter_get_arg_type(&resp_args)) {
+ DBusMessageIter variant_iter;
+ dbus_message_iter_recurse(&resp_args, &variant_iter);
+
+ if(dbus_message_iter_get_arg_type(&variant_iter) == DBUS_TYPE_UINT32) {
+ dbus_message_iter_get_basic(&variant_iter, result);
+ } else {
+ fprintf(stderr, "gsr error: gsr_dbus_call_screencast_method: response message is not a variant with an uint32, %c\n", dbus_message_iter_get_arg_type(&variant_iter));
+ dbus_message_unref(msg);
+ return false;
+ }
+ } else {
+ fprintf(stderr, "gsr error: gsr_dbus_call_screencast_method: response message is not an uint32, %c\n", dbus_message_iter_get_arg_type(&resp_args));
+ dbus_message_unref(msg);
+ return false;
+ // TODO: Check dbus_error_is_set?
+ }
+
+ dbus_message_unref(msg);
+ return true;
+}
+
+static uint32_t gsr_dbus_get_screencast_version_cached(gsr_dbus *self) {
+ if(self->screencast_version == 0)
+ gsr_dbus_desktop_portal_get_property(self, "org.freedesktop.portal.ScreenCast", "version", &self->screencast_version);
+ return self->screencast_version;
+}
+
+static bool gsr_dbus_ensure_desktop_portal_rule_added(gsr_dbus *self) {
+ if(self->desktop_portal_rule_added)
+ return true;
+
+ dbus_bus_add_match(self->con, DESKTOP_PORTAL_SIGNAL_RULE, &self->err);
+ dbus_connection_flush(self->con);
+ if(dbus_error_is_set(&self->err)) {
+ fprintf(stderr, "gsr error: gsr_dbus_ensure_desktop_portal_rule_added: failed to add dbus rule %s, error: %s\n", DESKTOP_PORTAL_SIGNAL_RULE, self->err.message);
+ return false;
+ }
+ self->desktop_portal_rule_added = true;
+ return true;
+}
+
+static void gsr_dbus_portal_get_unique_handle_token(gsr_dbus *self, char *buffer, int size) {
+ snprintf(buffer, size, "gpu_screen_recorder_handle_%s_%u", self->random_str, self->handle_counter++);
+}
+
+static void gsr_dbus_portal_get_unique_session_token(gsr_dbus *self, char *buffer, int size) {
+ snprintf(buffer, size, "gpu_screen_recorder_session_%s", self->random_str);
+}
+
+static bool dbus_add_dict(DBusMessageIter *it, const dict_entry *entries, int num_entries) {
+ DBusMessageIter array_it;
+ if(!dbus_message_iter_open_container(it, DBUS_TYPE_ARRAY, "{sv}", &array_it))
+ return false;
+
+ for (int i = 0; i < num_entries; ++i) {
+ DBusMessageIter entry_it = DBUS_MESSAGE_ITER_INIT_CLOSED;
+ DBusMessageIter variant_it = DBUS_MESSAGE_ITER_INIT_CLOSED;
+
+ if(!dbus_message_iter_open_container(&array_it, DBUS_TYPE_DICT_ENTRY, NULL, &entry_it))
+ goto entry_err;
+
+ if(!dbus_message_iter_append_basic(&entry_it, DBUS_TYPE_STRING, &entries[i].key))
+ goto entry_err;
+
+ switch (entries[i].value_type) {
+ case DICT_TYPE_STRING: {
+ if(!dbus_message_iter_open_container(&entry_it, DBUS_TYPE_VARIANT, DBUS_TYPE_STRING_AS_STRING, &variant_it))
+ goto entry_err;
+ if(!dbus_message_iter_append_basic(&variant_it, DBUS_TYPE_STRING, &entries[i].str))
+ goto entry_err;
+ break;
+ }
+ case DICT_TYPE_UINT32: {
+ if(!dbus_message_iter_open_container(&entry_it, DBUS_TYPE_VARIANT, DBUS_TYPE_UINT32_AS_STRING, &variant_it))
+ goto entry_err;
+ if(!dbus_message_iter_append_basic(&variant_it, DBUS_TYPE_UINT32, &entries[i].u32))
+ goto entry_err;
+ break;
+ }
+ case DICT_TYPE_BOOL: {
+ if(!dbus_message_iter_open_container(&entry_it, DBUS_TYPE_VARIANT, DBUS_TYPE_BOOLEAN_AS_STRING, &variant_it))
+ goto entry_err;
+ if(!dbus_message_iter_append_basic(&variant_it, DBUS_TYPE_BOOLEAN, &entries[i].boolean))
+ goto entry_err;
+ break;
+ }
+ }
+
+ dbus_message_iter_close_container(&entry_it, &variant_it);
+ dbus_message_iter_close_container(&array_it, &entry_it);
+ continue;
+
+ entry_err:
+ dbus_message_iter_abandon_container_if_open(&array_it, &variant_it);
+ dbus_message_iter_abandon_container_if_open(&array_it, &entry_it);
+ dbus_message_iter_abandon_container_if_open(it, &array_it);
+ return false;
+ }
+
+ return dbus_message_iter_close_container(it, &array_it);
+}
+
+static bool gsr_dbus_call_screencast_method(gsr_dbus *self, const char *method_name, const char *session_handle, const char *parent_window, const dict_entry *entries, int num_entries, int *resp_fd) {
+ if(resp_fd)
+ *resp_fd = -1;
+
+ if(!gsr_dbus_ensure_desktop_portal_rule_added(self))
+ return false;
+
+ DBusMessage *msg = dbus_message_new_method_call(
+ "org.freedesktop.portal.Desktop", // target for the method call
+ "/org/freedesktop/portal/desktop", // object to call on
+ "org.freedesktop.portal.ScreenCast", // interface to call on
+ method_name); // method name
+ if(!msg) {
+ fprintf(stderr, "gsr error: gsr_dbus_call_screencast_method: dbus_message_new_method_call failed\n");
+ return false;
+ }
+
+ DBusMessageIter it;
+ dbus_message_iter_init_append(msg, &it);
+
+ if(session_handle) {
+ if(!dbus_message_iter_append_basic(&it, DBUS_TYPE_OBJECT_PATH, &session_handle)) {
+ fprintf(stderr, "gsr error: gsr_dbus_call_screencast_method: failed to add session_handle\n");
+ dbus_message_unref(msg);
+ return false;
+ }
+ }
+
+ if(parent_window) {
+ if(!dbus_message_iter_append_basic(&it, DBUS_TYPE_STRING, &parent_window)) {
+ fprintf(stderr, "gsr error: gsr_dbus_call_screencast_method: failed to add parent_window\n");
+ dbus_message_unref(msg);
+ return false;
+ }
+ }
+
+ if(!dbus_add_dict(&it, entries, num_entries)) {
+ fprintf(stderr, "gsr error: gsr_dbus_call_screencast_method: failed to add dict\n");
+ dbus_message_unref(msg);
+ return false;
+ }
+
+ DBusPendingCall *pending = NULL;
+ if(!dbus_connection_send_with_reply(self->con, msg, &pending, -1) || !pending) { // -1 is default timeout
+ fprintf(stderr, "gsr error: gsr_dbus_call_screencast_method: dbus_connection_send_with_reply failed\n");
+ dbus_message_unref(msg);
+ return false;
+ }
+ dbus_connection_flush(self->con);
+
+ //fprintf(stderr, "Request Sent\n");
+
+ dbus_message_unref(msg);
+ msg = NULL;
+
+ dbus_pending_call_block(pending);
+
+ msg = dbus_pending_call_steal_reply(pending);
+ if(!msg) {
+ fprintf(stderr, "gsr error: gsr_dbus_call_screencast_method: dbus_pending_call_steal_reply failed\n");
+ dbus_pending_call_unref(pending);
+ dbus_message_unref(msg);
+ return false;
+ }
+
+ dbus_pending_call_unref(pending);
+ pending = NULL;
+
+ DBusMessageIter resp_args;
+ if(!dbus_message_iter_init(msg, &resp_args)) {
+ fprintf(stderr, "gsr error: gsr_dbus_call_screencast_method: response message is missing arguments\n");
+ dbus_message_unref(msg);
+ return false;
+ } else if (DBUS_TYPE_OBJECT_PATH == dbus_message_iter_get_arg_type(&resp_args)) {
+ const char *res = NULL;
+ dbus_message_iter_get_basic(&resp_args, &res);
+ } else if(DBUS_TYPE_UNIX_FD == dbus_message_iter_get_arg_type(&resp_args)) {
+ int fd = -1;
+ dbus_message_iter_get_basic(&resp_args, &fd);
+
+ if(resp_fd)
+ *resp_fd = fd;
+ } else {
+ fprintf(stderr, "gsr error: gsr_dbus_call_screencast_method: response message is not an object path or unix fd\n");
+ dbus_message_unref(msg);
+ return false;
+ // TODO: Check dbus_error_is_set?
+ }
+
+ dbus_message_unref(msg);
+ return true;
+}
+
+static bool gsr_dbus_response_status_ok(DBusMessageIter *resp_args) {
+ if(dbus_message_iter_get_arg_type(resp_args) != DBUS_TYPE_UINT32) {
+ fprintf(stderr, "gsr error: gsr_dbus_extract_desktop_portal_response_is_ok: missing uint32 in response\n");
+ return false;
+ }
+
+ dbus_uint32_t response_status = 0;
+ dbus_message_iter_get_basic(resp_args, &response_status);
+ if(response_status != 0) {
+ fprintf(stderr, "gsr error: gsr_dbus_extract_desktop_portal_response_is_ok: got status: %d, expected 0\n", response_status);
+ return false;
+ }
+
+ dbus_message_iter_next(resp_args);
+ return true;
+}
+
+static dict_entry* find_dict_entry_by_key(dict_entry *entries, int num_entries, const char *key) {
+ for(int i = 0; i < num_entries; ++i) {
+ if(strcmp(entries[i].key, key) == 0)
+ return &entries[i];
+ }
+ return NULL;
+}
+
+static bool gsr_dbus_get_variant_value(DBusMessageIter *iter, dict_entry *entry) {
+ if(dbus_message_iter_get_arg_type(iter) != DBUS_TYPE_VARIANT) {
+ fprintf(stderr, "gsr error: gsr_dbus_get_variant_value: value is not a variant\n");
+ return false;
+ }
+
+ DBusMessageIter variant_iter;
+ dbus_message_iter_recurse(iter, &variant_iter);
+
+ switch(dbus_message_iter_get_arg_type(&variant_iter)) {
+ case DBUS_TYPE_STRING: {
+ if(entry->value_type != DICT_TYPE_STRING) {
+ fprintf(stderr, "gsr error: gsr_dbus_get_variant_value: expected entry value to be a(n) %s was a string\n", dict_value_type_to_string(entry->value_type));
+ return false;
+ }
+
+ const char *value = NULL;
+ dbus_message_iter_get_basic(&variant_iter, &value);
+
+ if(!value) {
+ fprintf(stderr, "gsr error: gsr_dbus_get_variant_value: failed to get entry value as value\n");
+ return false;
+ }
+
+ if(entry->str) {
+ free(entry->str);
+ entry->str = NULL;
+ }
+
+ entry->str = strdup(value);
+ if(!entry->str) {
+ fprintf(stderr, "gsr error: gsr_dbus_get_variant_value: failed to copy value\n");
+ return false;
+ }
+ return true;
+ }
+ case DBUS_TYPE_UINT32: {
+ if(entry->value_type != DICT_TYPE_UINT32) {
+ fprintf(stderr, "gsr error: gsr_dbus_get_variant_value: expected entry value to be a(n) %s was an uint32\n", dict_value_type_to_string(entry->value_type));
+ return false;
+ }
+
+ dbus_message_iter_get_basic(&variant_iter, &entry->u32);
+ return true;
+ }
+ case DBUS_TYPE_BOOLEAN: {
+ if(entry->value_type != DICT_TYPE_BOOL) {
+ fprintf(stderr, "gsr error: gsr_dbus_get_variant_value: expected entry value to be a(n) %s was a boolean\n", dict_value_type_to_string(entry->value_type));
+ return false;
+ }
+
+ dbus_message_iter_get_basic(&variant_iter, &entry->boolean);
+ return true;
+ }
+ }
+
+ fprintf(stderr, "gsr error: gsr_dbus_get_variant_value: got unexpected type, expected string, uint32 or boolean\n");
+ return false;
+}
+
+/*
+ Parses a{sv} into matching key entries in |entries|.
+ If the entry value is a string then it's allocated with malloc and is null-terminated
+ and has to be free by the caller.
+ The entry values should be 0 before this method is called.
+ The entries are free'd if this function fails.
+*/
+static bool gsr_dbus_get_map(DBusMessageIter *resp_args, dict_entry *entries, int num_entries) {
+ if(dbus_message_iter_get_arg_type(resp_args) != DBUS_TYPE_ARRAY) {
+ fprintf(stderr, "gsr error: gsr_dbus_get_map: missing array in response\n");
+ return false;
+ }
+
+ DBusMessageIter subiter;
+ dbus_message_iter_recurse(resp_args, &subiter);
+
+ while(dbus_message_iter_get_arg_type(&subiter) != DBUS_TYPE_INVALID) {
+ DBusMessageIter dictiter = DBUS_MESSAGE_ITER_INIT_CLOSED;
+ const char *key = NULL;
+ dict_entry *entry = NULL;
+
+ // fprintf(stderr, " array element type: %c, %s\n",
+ // dbus_message_iter_get_arg_type(&subiter),
+ // dbus_message_iter_get_signature(&subiter));
+ if(dbus_message_iter_get_arg_type(&subiter) != DBUS_TYPE_DICT_ENTRY) {
+ fprintf(stderr, "gsr error: gsr_dbus_get_map: array value is not an entry\n");
+ return false;
+ }
+
+ dbus_message_iter_recurse(&subiter, &dictiter);
+
+ if(dbus_message_iter_get_arg_type(&dictiter) != DBUS_TYPE_STRING) {
+ fprintf(stderr, "gsr error: gsr_dbus_get_map: entry key is not a string\n");
+ goto error;
+ }
+
+ dbus_message_iter_get_basic(&dictiter, &key);
+ if(!key) {
+ fprintf(stderr, "gsr error: gsr_dbus_get_map: failed to get entry key as value\n");
+ goto error;
+ }
+
+ entry = find_dict_entry_by_key(entries, num_entries, key);
+ if(!entry) {
+ dbus_message_iter_next(&subiter);
+ continue;
+ }
+
+ if(!dbus_message_iter_next(&dictiter)) {
+ fprintf(stderr, "gsr error: gsr_dbus_get_map: missing entry value\n");
+ goto error;
+ }
+
+ if(!gsr_dbus_get_variant_value(&dictiter, entry))
+ goto error;
+
+ dbus_message_iter_next(&subiter);
+ }
+
+ return true;
+
+ error:
+ for(int i = 0; i < num_entries; ++i) {
+ if(entries[i].value_type == DICT_TYPE_STRING) {
+ free(entries[i].str);
+ entries[i].str = NULL;
+ }
+ }
+ return false;
+}
+
+bool gsr_dbus_screencast_create_session(gsr_dbus *self, char **session_handle) {
+ assert(session_handle);
+ *session_handle = NULL;
+
+ char handle_token[64];
+ gsr_dbus_portal_get_unique_handle_token(self, handle_token, sizeof(handle_token));
+
+ char session_handle_token[64];
+ gsr_dbus_portal_get_unique_session_token(self, session_handle_token, sizeof(session_handle_token));
+
+ dict_entry args[2];
+ args[0].key = "handle_token";
+ args[0].value_type = DICT_TYPE_STRING;
+ args[0].str = handle_token;
+
+ args[1].key = "session_handle_token";
+ args[1].value_type = DICT_TYPE_STRING;
+ args[1].str = session_handle_token;
+
+ if(!gsr_dbus_call_screencast_method(self, "CreateSession", NULL, NULL, args, 2, NULL))
+ return false;
+
+ DBusMessage *msg = NULL;
+
+ for (;;) {
+ const int timeout_milliseconds = 1;
+ dbus_connection_read_write(self->con, timeout_milliseconds);
+ msg = dbus_connection_pop_message(self->con);
+
+ if(!msg) {
+ usleep(10 * 1000); /* 10 milliseconds */
+ continue;
+ }
+
+ if(!dbus_message_is_signal(msg, "org.freedesktop.portal.Request", "Response")) {
+ dbus_message_unref(msg);
+ continue;
+ }
+
+ break;
+ }
+
+ // TODO: Verify signal path matches |res|, maybe check the below
+ // DBUS_TYPE_ARRAY value?
+ //fprintf(stderr, "signature: %s, sender: %s\n", dbus_message_get_signature(msg), dbus_message_get_sender(msg));
+ DBusMessageIter resp_args;
+ if(!dbus_message_iter_init(msg, &resp_args)) {
+ fprintf(stderr, "gsr error: gsr_dbus_screencast_create_session: missing response\n");
+ dbus_message_unref(msg);
+ return false;
+ }
+
+ if(!gsr_dbus_response_status_ok(&resp_args)) {
+ dbus_message_unref(msg);
+ return false;
+ }
+
+ dict_entry entries[1];
+ entries[0].key = "session_handle";
+ entries[0].str = NULL;
+ entries[0].value_type = DICT_TYPE_STRING;
+ if(!gsr_dbus_get_map(&resp_args, entries, 1)) {
+ dbus_message_unref(msg);
+ return false;
+ }
+
+ if(!entries[0].str) {
+ fprintf(stderr, "gsr error: gsr_dbus_screencast_create_session: missing \"session_handle\" in response\n");
+ dbus_message_unref(msg);
+ return false;
+ }
+
+ *session_handle = entries[0].str;
+ //fprintf(stderr, "session handle: |%s|\n", entries[0].str);
+ //free(entries[0].str);
+
+ dbus_message_unref(msg);
+ return true;
+}
+
+bool gsr_dbus_screencast_select_sources(gsr_dbus *self, const char *session_handle, gsr_portal_capture_type capture_type, gsr_portal_cursor_mode cursor_mode) {
+ assert(session_handle);
+
+ char handle_token[64];
+ gsr_dbus_portal_get_unique_handle_token(self, handle_token, sizeof(handle_token));
+
+ int num_arg_dict = 4;
+ dict_entry args[6];
+ args[0].key = "types";
+ args[0].value_type = DICT_TYPE_UINT32;
+ args[0].u32 = capture_type;
+
+ args[1].key = "multiple";
+ args[1].value_type = DICT_TYPE_BOOL;
+ args[1].boolean = false; /* TODO: Wayland ignores this and still gives the option to select multiple sources. Support that case.. */
+
+ args[2].key = "handle_token";
+ args[2].value_type = DICT_TYPE_STRING;
+ args[2].str = handle_token;
+
+ args[3].key = "cursor_mode";
+ args[3].value_type = DICT_TYPE_UINT32;
+ args[3].u32 = cursor_mode;
+
+ const int screencast_server_version = gsr_dbus_get_screencast_version_cached(self);
+ if(screencast_server_version >= 4) {
+ num_arg_dict = 5;
+ args[4].key = "persist_mode";
+ args[4].value_type = DICT_TYPE_UINT32;
+ args[4].u32 = 2; /* persist until explicitly revoked */
+
+ if(self->screencast_restore_token && self->screencast_restore_token[0]) {
+ num_arg_dict = 6;
+
+ args[5].key = "restore_token";
+ args[5].value_type = DICT_TYPE_STRING;
+ args[5].str = self->screencast_restore_token;
+ }
+ } else if(self->screencast_restore_token && self->screencast_restore_token[0]) {
+ fprintf(stderr, "gsr warning: gsr_dbus_screencast_select_sources: tried to use restore token but this option is only available in screencast version >= 4, your wayland compositors screencast version is %d\n", screencast_server_version);
+ }
+
+ if(!gsr_dbus_call_screencast_method(self, "SelectSources", session_handle, NULL, args, num_arg_dict, NULL)) {
+ if(num_arg_dict == 6) {
+ /* We dont know what the error exactly is but assume it may be because of invalid restore token. In that case try without restore token */
+ fprintf(stderr, "gsr warning: gsr_dbus_screencast_select_sources: SelectSources failed, retrying without restore_token\n");
+ num_arg_dict = 5;
+ if(!gsr_dbus_call_screencast_method(self, "SelectSources", session_handle, NULL, args, num_arg_dict, NULL))
+ return false;
+ } else {
+ return false;
+ }
+ }
+
+ DBusMessage *msg = NULL;
+
+ for (;;) {
+ const int timeout_milliseconds = 1;
+ dbus_connection_read_write(self->con, timeout_milliseconds);
+ msg = dbus_connection_pop_message(self->con);
+
+ if(!msg) {
+ usleep(10 * 1000); /* 10 milliseconds */
+ continue;
+ }
+
+ if(!dbus_message_is_signal(msg, "org.freedesktop.portal.Request", "Response")) {
+ dbus_message_unref(msg);
+ continue;
+ }
+
+ break;
+ }
+
+ // TODO: Verify signal path matches |res|, maybe check the below
+ //fprintf(stderr, "signature: %s, sender: %s\n", dbus_message_get_signature(msg), dbus_message_get_sender(msg));
+ DBusMessageIter resp_args;
+ if(!dbus_message_iter_init(msg, &resp_args)) {
+ fprintf(stderr, "gsr error: gsr_dbus_screencast_create_session: missing response\n");
+ dbus_message_unref(msg);
+ return false;
+ }
+
+ if(!gsr_dbus_response_status_ok(&resp_args)) {
+ dbus_message_unref(msg);
+ return false;
+ }
+
+ dbus_message_unref(msg);
+ return true;
+}
+
+static dbus_uint32_t screencast_stream_get_pipewire_node(DBusMessageIter *iter) {
+ DBusMessageIter subiter;
+ dbus_message_iter_recurse(iter, &subiter);
+
+ if(dbus_message_iter_get_arg_type(&subiter) == DBUS_TYPE_STRUCT) {
+ DBusMessageIter structiter;
+ dbus_message_iter_recurse(&subiter, &structiter);
+
+ if(dbus_message_iter_get_arg_type(&structiter) == DBUS_TYPE_UINT32) {
+ dbus_uint32_t data = 0;
+ dbus_message_iter_get_basic(&structiter, &data);
+ return data;
+ }
+ }
+
+ return 0;
+}
+
+bool gsr_dbus_screencast_start(gsr_dbus *self, const char *session_handle, uint32_t *pipewire_node) {
+ assert(session_handle);
+ *pipewire_node = 0;
+
+ char handle_token[64];
+ gsr_dbus_portal_get_unique_handle_token(self, handle_token, sizeof(handle_token));
+
+ dict_entry args[1];
+ args[0].key = "handle_token";
+ args[0].value_type = DICT_TYPE_STRING;
+ args[0].str = handle_token;
+
+ if(!gsr_dbus_call_screencast_method(self, "Start", session_handle, "", args, 1, NULL))
+ return false;
+
+ DBusMessage *msg = NULL;
+
+ for (;;) {
+ const int timeout_milliseconds = 1;
+ dbus_connection_read_write(self->con, timeout_milliseconds);
+ msg = dbus_connection_pop_message(self->con);
+
+ if(!msg) {
+ usleep(10 * 1000); /* 10 milliseconds */
+ continue;
+ }
+
+ if(!dbus_message_is_signal(msg, "org.freedesktop.portal.Request", "Response")) {
+ dbus_message_unref(msg);
+ continue;
+ }
+
+ break;
+ }
+
+ // TODO: Verify signal path matches |res|, maybe check the below
+ //fprintf(stderr, "signature: %s, sender: %s\n", dbus_message_get_signature(msg), dbus_message_get_sender(msg));
+ DBusMessageIter resp_args;
+ if(!dbus_message_iter_init(msg, &resp_args)) {
+ fprintf(stderr, "gsr error: gsr_dbus_screencast_start: missing response\n");
+ dbus_message_unref(msg);
+ return false;
+ }
+
+ if(!gsr_dbus_response_status_ok(&resp_args)) {
+ dbus_message_unref(msg);
+ return false;
+ }
+
+ if(dbus_message_iter_get_arg_type(&resp_args) != DBUS_TYPE_ARRAY) {
+ fprintf(stderr, "gsr error: gsr_dbus_screencast_start: missing array in response\n");
+ dbus_message_unref(msg);
+ return false;
+ }
+
+ DBusMessageIter subiter;
+ dbus_message_iter_recurse(&resp_args, &subiter);
+
+ while(dbus_message_iter_get_arg_type(&subiter) != DBUS_TYPE_INVALID) {
+ DBusMessageIter dictiter = DBUS_MESSAGE_ITER_INIT_CLOSED;
+ const char *key = NULL;
+
+ // fprintf(stderr, " array element type: %c, %s\n",
+ // dbus_message_iter_get_arg_type(&subiter),
+ // dbus_message_iter_get_signature(&subiter));
+ if(dbus_message_iter_get_arg_type(&subiter) != DBUS_TYPE_DICT_ENTRY) {
+ fprintf(stderr, "gsr error: gsr_dbus_screencast_start: array value is not an entry\n");
+ goto error;
+ }
+
+ dbus_message_iter_recurse(&subiter, &dictiter);
+
+ if(dbus_message_iter_get_arg_type(&dictiter) != DBUS_TYPE_STRING) {
+ fprintf(stderr, "gsr error: gsr_dbus_screencast_start: entry key is not a string\n");
+ goto error;
+ }
+
+ dbus_message_iter_get_basic(&dictiter, &key);
+ if(!key) {
+ fprintf(stderr, "gsr error: gsr_dbus_screencast_start: failed to get entry key as value\n");
+ goto error;
+ }
+
+ if(strcmp(key, "restore_token") == 0) {
+ if(!dbus_message_iter_next(&dictiter)) {
+ fprintf(stderr, "gsr error: gsr_dbus_screencast_start: missing restore_token value\n");
+ goto error;
+ }
+
+ if(dbus_message_iter_get_arg_type(&dictiter) != DBUS_TYPE_VARIANT) {
+ fprintf(stderr, "gsr error: gsr_dbus_screencast_start: restore_token is not a variant\n");
+ goto error;
+ }
+
+ DBusMessageIter variant_iter;
+ dbus_message_iter_recurse(&dictiter, &variant_iter);
+
+ if(dbus_message_iter_get_arg_type(&variant_iter) != DBUS_TYPE_STRING) {
+ fprintf(stderr, "gsr error: gsr_dbus_screencast_start: restore_token is not a string\n");
+ goto error;
+ }
+
+ char *restore_token_str = NULL;
+ dbus_message_iter_get_basic(&variant_iter, &restore_token_str);
+
+ if(restore_token_str) {
+ if(self->screencast_restore_token) {
+ free(self->screencast_restore_token);
+ self->screencast_restore_token = NULL;
+ }
+ self->screencast_restore_token = strdup(restore_token_str);
+ //fprintf(stderr, "got restore token: %s\n", self->screencast_restore_token);
+ }
+ } else if(strcmp(key, "streams") == 0) {
+ if(!dbus_message_iter_next(&dictiter)) {
+ fprintf(stderr, "gsr error: gsr_dbus_screencast_start: missing streams value\n");
+ goto error;
+ }
+
+ if(dbus_message_iter_get_arg_type(&dictiter) != DBUS_TYPE_VARIANT) {
+ fprintf(stderr, "gsr error: gsr_dbus_screencast_start: streams value is not a variant\n");
+ goto error;
+ }
+
+ DBusMessageIter variant_iter;
+ dbus_message_iter_recurse(&dictiter, &variant_iter);
+
+ if(dbus_message_iter_get_arg_type(&variant_iter) != DBUS_TYPE_ARRAY) {
+ fprintf(stderr, "gsr error: gsr_dbus_screencast_start: streams value is not an array\n");
+ goto error;
+ }
+
+ int num_streams = dbus_message_iter_get_element_count(&variant_iter);
+ //fprintf(stderr, "num streams: %d\n", num_streams);
+ /* Skip over all streams except the last one, since kde can return multiple streams even if only 1 is requested. The last one is the valid one */
+ for(int i = 0; i < num_streams - 1; ++i) {
+ screencast_stream_get_pipewire_node(&variant_iter);
+ }
+
+ if(num_streams > 0) {
+ *pipewire_node = screencast_stream_get_pipewire_node(&variant_iter);
+ //fprintf(stderr, "pipewire node: %u\n", *pipewire_node);
+ }
+ }
+
+ dbus_message_iter_next(&subiter);
+ }
+
+ if(*pipewire_node == 0) {
+ fprintf(stderr, "gsr error: gsr_dbus_screencast_start: no pipewire node returned\n");
+ dbus_message_unref(msg);
+ return false;
+ }
+
+ dbus_message_unref(msg);
+ return true;
+
+ error:
+ dbus_message_unref(msg);
+ return false;
+}
+
+bool gsr_dbus_screencast_open_pipewire_remote(gsr_dbus *self, const char *session_handle, int *pipewire_fd) {
+ assert(session_handle);
+ *pipewire_fd = -1;
+
+ dict_entry args[1];
+ return gsr_dbus_call_screencast_method(self, "OpenPipeWireRemote", session_handle, NULL, args, 0, pipewire_fd);
+}
+
+const char* gsr_dbus_screencast_get_restore_token(gsr_dbus *self) {
+ return self->screencast_restore_token;
+}
diff --git a/src/egl.c b/src/egl.c
index ec9ad07..c3464b6 100644
--- a/src/egl.c
+++ b/src/egl.c
@@ -134,21 +134,21 @@ static void reset_cap_nice(void) {
cap_free(caps);
}
-#define GLX_DRAWABLE_TYPE 0x8010
-#define GLX_RENDER_TYPE 0x8011
-#define GLX_RGBA_BIT 0x00000001
-#define GLX_WINDOW_BIT 0x00000001
-#define GLX_PIXMAP_BIT 0x00000002
+#define GLX_DRAWABLE_TYPE 0x8010
+#define GLX_RENDER_TYPE 0x8011
+#define GLX_RGBA_BIT 0x00000001
+#define GLX_WINDOW_BIT 0x00000001
+#define GLX_PIXMAP_BIT 0x00000002
#define GLX_BIND_TO_TEXTURE_RGBA_EXT 0x20D1
#define GLX_BIND_TO_TEXTURE_TARGETS_EXT 0x20D3
#define GLX_TEXTURE_2D_BIT_EXT 0x00000002
-#define GLX_DOUBLEBUFFER 5
-#define GLX_RED_SIZE 8
-#define GLX_GREEN_SIZE 9
-#define GLX_BLUE_SIZE 10
-#define GLX_ALPHA_SIZE 11
-#define GLX_DEPTH_SIZE 12
-#define GLX_RGBA_TYPE 0x8014
+#define GLX_DOUBLEBUFFER 5
+#define GLX_RED_SIZE 8
+#define GLX_GREEN_SIZE 9
+#define GLX_BLUE_SIZE 10
+#define GLX_ALPHA_SIZE 11
+#define GLX_DEPTH_SIZE 12
+#define GLX_RGBA_TYPE 0x8014
#define GLX_CONTEXT_PRIORITY_LEVEL_EXT 0x3100
#define GLX_CONTEXT_PRIORITY_HIGH_EXT 0x3101
@@ -417,6 +417,7 @@ static bool gsr_egl_load_gl(gsr_egl *self, void *library) {
{ (void**)&self->glTexParameteriv, "glTexParameteriv" },
{ (void**)&self->glGetTexLevelParameteriv, "glGetTexLevelParameteriv" },
{ (void**)&self->glTexImage2D, "glTexImage2D" },
+ { (void**)&self->glTexSubImage2D, "glTexSubImage2D" },
{ (void**)&self->glCopyImageSubData, "glCopyImageSubData" },
{ (void**)&self->glGetTexImage, "glGetTexImage" },
{ (void**)&self->glClearTexImage, "glClearTexImage" },
diff --git a/src/encoder/video/vaapi.c b/src/encoder/video/vaapi.c
index 2df140d..579aa93 100644
--- a/src/encoder/video/vaapi.c
+++ b/src/encoder/video/vaapi.c
@@ -100,12 +100,12 @@ static bool gsr_video_encoder_vaapi_setup_textures(gsr_video_encoder_vaapi *self
const uint64_t modifier = self->prime.objects[self->prime.layers[layer].object_index[plane]].drm_format_modifier;
const intptr_t img_attr[] = {
- EGL_LINUX_DRM_FOURCC_EXT, formats[i],
- EGL_WIDTH, self->prime.width / div[i],
- EGL_HEIGHT, self->prime.height / div[i],
- EGL_DMA_BUF_PLANE0_FD_EXT, self->prime.objects[self->prime.layers[layer].object_index[plane]].fd,
- EGL_DMA_BUF_PLANE0_OFFSET_EXT, self->prime.layers[layer].offset[plane],
- EGL_DMA_BUF_PLANE0_PITCH_EXT, self->prime.layers[layer].pitch[plane],
+ EGL_LINUX_DRM_FOURCC_EXT, formats[i],
+ EGL_WIDTH, self->prime.width / div[i],
+ EGL_HEIGHT, self->prime.height / div[i],
+ EGL_DMA_BUF_PLANE0_FD_EXT, self->prime.objects[self->prime.layers[layer].object_index[plane]].fd,
+ EGL_DMA_BUF_PLANE0_OFFSET_EXT, self->prime.layers[layer].offset[plane],
+ EGL_DMA_BUF_PLANE0_PITCH_EXT, self->prime.layers[layer].pitch[plane],
EGL_DMA_BUF_PLANE0_MODIFIER_LO_EXT, modifier & 0xFFFFFFFFULL,
EGL_DMA_BUF_PLANE0_MODIFIER_HI_EXT, modifier >> 32ULL,
EGL_NONE
diff --git a/src/main.cpp b/src/main.cpp
index 112a6ac..3d577b4 100644
--- a/src/main.cpp
+++ b/src/main.cpp
@@ -2,6 +2,9 @@ extern "C" {
#include "../include/capture/nvfbc.h"
#include "../include/capture/xcomposite.h"
#include "../include/capture/kms.h"
+#ifdef GSR_PORTAL
+#include "../include/capture/portal.h"
+#endif
#include "../include/encoder/video/cuda.h"
#include "../include/encoder/video/vaapi.h"
#include "../include/encoder/video/software.h"
@@ -999,17 +1002,21 @@ static void open_video_hardware(AVCodecContext *codec_context, VideoQuality vide
static void usage_header() {
const bool inside_flatpak = getenv("FLATPAK_ID") != NULL;
const char *program_name = inside_flatpak ? "flatpak run --command=gpu-screen-recorder com.dec05eba.gpu_screen_recorder" : "gpu-screen-recorder";
- fprintf(stderr, "usage: %s -w <window_id|monitor|focused> [-c <container_format>] [-s WxH] -f <fps> [-a <audio_input>] [-q <quality>] [-r <replay_buffer_size_sec>] [-k h264|hevc|hevc_hdr|av1|av1_hdr|vp8|vp9] [-ac aac|opus|flac] [-ab <bitrate>] [-oc yes|no] [-fm cfr|vfr|content] [-cr limited|full] [-mf yes|no] [-sc <script_path>] [-cursor yes|no] [-keyint <value>] [-encoder gpu|cpu] [-o <output_file>] [-v yes|no] [-h|--help]\n", program_name);
+ fprintf(stderr, "usage: %s -w <window_id|monitor|focused|portal> [-c <container_format>] [-s WxH] -f <fps> [-a <audio_input>] [-q <quality>] [-r <replay_buffer_size_sec>] [-k h264|hevc|hevc_hdr|av1|av1_hdr|vp8|vp9] [-ac aac|opus|flac] [-ab <bitrate>] [-oc yes|no] [-fm cfr|vfr|content] [-cr limited|full] [-mf yes|no] [-sc <script_path>] [-cursor yes|no] [-keyint <value>] [-restore-portal-session yes|no] [-encoder gpu|cpu] [-o <output_file>] [-v yes|no] [-h|--help]\n", program_name);
}
+// TODO: Update with portal info
static void usage_full() {
const bool inside_flatpak = getenv("FLATPAK_ID") != NULL;
const char *program_name = inside_flatpak ? "flatpak run --command=gpu-screen-recorder com.dec05eba.gpu_screen_recorder" : "gpu-screen-recorder";
usage_header();
fprintf(stderr, "\n");
fprintf(stderr, "OPTIONS:\n");
- fprintf(stderr, " -w Window id to record, a display (monitor name), \"screen\", \"screen-direct-force\" or \"focused\".\n");
- fprintf(stderr, " If this is \"screen\" or \"screen-direct-force\" then all monitors are recorded.\n");
+ fprintf(stderr, " -w Window id to record, a display (monitor name), \"screen\", \"screen-direct-force\", \"focused\" or \"portal\".\n");
+ fprintf(stderr, " If this is \"portal\" then xdg desktop screencast portal with pipewire will be used. This is in general only available on Wayland.\n");
+ fprintf(stderr, " If you select to save the session (token) in the desktop portal capture popup then the session will be saved for the next time you use \"portal\",\n");
+ fprintf(stderr, " but the session will be ignored unless you run GPU Screen Recorder with the '-restore-portal-session yes' option.\n");
+ fprintf(stderr, " If this is \"screen\" or \"screen-direct-force\" then all monitors are recorded on Nvidia X11. On AMD/Intel or wayland \"screen\" will record the first monitor found.\n");
fprintf(stderr, " \"screen-direct-force\" is not recommended unless you use a VRR (G-SYNC) monitor on Nvidia X11 and you are aware that using this option can cause games to freeze/crash or other issues because of Nvidia driver issues.\n");
fprintf(stderr, " \"screen-direct-force\" option is only available on Nvidia X11. VRR works without this option on other systems.\n");
fprintf(stderr, "\n");
@@ -1037,12 +1044,12 @@ static void usage_full() {
fprintf(stderr, " and the video will only be saved when the gpu-screen-recorder is closed. This feature is similar to Nvidia's instant replay feature.\n");
fprintf(stderr, " This option has be between 5 and 1200. Note that the replay buffer size will not always be precise, because of keyframes. Optional, disabled by default.\n");
fprintf(stderr, "\n");
- fprintf(stderr, " -k Video codec to use. Should be either 'auto', 'h264', 'hevc', 'av1', 'hevc_hdr', 'av1_hdr', 'vp8' or 'vp9'. Optional, defaults to 'auto' which defaults to 'h264'.\n");
+ fprintf(stderr, " -k Video codec to use. Should be either 'auto', 'h264', 'hevc', 'av1', 'hevc_hdr', 'av1_hdr', 'vp8' or 'vp9'. Optional, set to 'auto' by default which defaults to 'h264'.\n");
fprintf(stderr, " Forcefully set to 'h264' if the file container type is 'flv'.\n");
fprintf(stderr, " 'hevc_hdr' and 'av1_hdr' option is not available on X11.\n");
fprintf(stderr, " Note: hdr metadata is not included in the video when recording with 'hevc_hdr'/'av1_hdr' because of bugs in AMD, Intel and NVIDIA drivers (amazin', they are all bugged).\n");
fprintf(stderr, "\n");
- fprintf(stderr, " -ac Audio codec to use. Should be either 'aac', 'opus' or 'flac'. Defaults to 'opus' for .mp4/.mkv files, otherwise defaults to 'aac'.\n");
+ fprintf(stderr, " -ac Audio codec to use. Should be either 'aac', 'opus' or 'flac'. Optional, set to 'opus' for .mp4/.mkv files, otherwise set to 'aac'.\n");
fprintf(stderr, " 'opus' and 'flac' is only supported by .mp4/.mkv files. 'opus' is recommended for best performance and smallest audio size.\n");
fprintf(stderr, " Flac audio codec is option is disable at the moment because of a temporary issue.\n");
fprintf(stderr, "\n");
@@ -1053,11 +1060,11 @@ static void usage_full() {
fprintf(stderr, " is dropped when you record a game. Only needed if you are recording a game that is bottlenecked by GPU. The same issue exists on Wayland but overclocking is not possible on Wayland.\n");
fprintf(stderr, " Works only if your have \"Coolbits\" set to \"12\" in NVIDIA X settings, see README for more information. Note! use at your own risk! Optional, disabled by default.\n");
fprintf(stderr, "\n");
- fprintf(stderr, " -fm Framerate mode. Should be either 'cfr' (constant frame rate), 'vfr' (variable frame rate) or 'content'. Defaults to 'vfr'.\n");
+ fprintf(stderr, " -fm Framerate mode. Should be either 'cfr' (constant frame rate), 'vfr' (variable frame rate) or 'content'. Optional, set to 'vfr' by default.\n");
fprintf(stderr, " 'vfr' is recommended for recording for less issue with very high system load but some applications such as video editors may not support it properly.\n");
fprintf(stderr, " 'content' is currently only supported when recording a single window, on X11. The 'content' option matches the recording frame rate to the captured content.\n");
fprintf(stderr, "\n");
- fprintf(stderr, " -cr Color range. Should be either 'limited' (aka mpeg) or 'full' (aka jpeg). Defaults to 'limited'.\n");
+ fprintf(stderr, " -cr Color range. Should be either 'limited' (aka mpeg) or 'full' (aka jpeg). Optional, set to 'limited' by default.\n");
fprintf(stderr, " Limited color range means that colors are in range 16-235 (4112-60395 for hdr) while full color range means that colors are in range 0-255 (0-65535 for hdr).\n");
fprintf(stderr, " Note that some buggy video players (such as vlc) are unable to correctly display videos in full color range.\n");
fprintf(stderr, "\n");
@@ -1067,12 +1074,18 @@ static void usage_full() {
fprintf(stderr, " Not applicable for live streams.\n");
fprintf(stderr, "\n");
fprintf(stderr, " -cursor\n");
- fprintf(stderr, " Record cursor. Defaults to 'yes'.\n");
+ fprintf(stderr, " Record cursor. Optional, set to 'yes' by default.\n");
+ fprintf(stderr, "\n");
fprintf(stderr, " -keyint\n");
fprintf(stderr, " Specifies the keyframe interval in seconds, the max amount of time to wait to generate a keyframe. Keyframes can be generated more often than this.\n");
fprintf(stderr, " This also affects seeking in the video and may affect how the replay video is cut. If this is set to 10 for example then you can only seek in 10-second chunks in the video.\n");
fprintf(stderr, " Setting this to a higher value reduces the video file size if you are ok with the previously described downside. This option is expected to be a floating point number.\n");
fprintf(stderr, " By default this value is set to 2.0.\n");
+ fprintf(stderr, "\n");
+ fprintf(stderr, " -restore-portal-session\n");
+ fprintf(stderr, " If GPU Screen Recorder should use the same capture option as the last time. Using this option removes the popup asking what you want to record the next time you record with '-w portal' if you selected the option to save session (token) in the desktop portal screencast popup.\n");
+ fprintf(stderr, " This option may not have any effect on all Wayland compositors. Optional, set to 'no' by default.\n");
+ fprintf(stderr, "\n");
fprintf(stderr, " -encoder\n");
fprintf(stderr, " Which device should be used for video encoding. Should either be 'gpu' or 'cpu'. Does currently only work with h264 codec option (-k).\n");
fprintf(stderr, " Optional, set to 'gpu' by default.\n");
@@ -1080,7 +1093,7 @@ static void usage_full() {
fprintf(stderr, " --list-supported-video-codecs\n");
fprintf(stderr, " List supported video codecs and exits. Prints h264, hevc, hevc_hdr, av1 and av1_hdr (if supported).\n");
fprintf(stderr, "\n");
- //fprintf(stderr, " -pixfmt The pixel format to use for the output video. yuv420 is the most common format and is best supported, but the color is compressed, so colors can look washed out and certain colors of text can look bad. Use yuv444 for no color compression, but the video may not work everywhere and it may not work with hardware video decoding. Optional, defaults to yuv420\n");
+ //fprintf(stderr, " -pixfmt The pixel format to use for the output video. yuv420 is the most common format and is best supported, but the color is compressed, so colors can look washed out and certain colors of text can look bad. Use yuv444 for no color compression, but the video may not work everywhere and it may not work with hardware video decoding. Optional, set to 'yuv420' by default\n");
fprintf(stderr, " -o The output file path. If omitted then the encoded data is sent to stdout. Required in replay mode (when using -r).\n");
fprintf(stderr, " In replay mode this has to be a directory instead of a file.\n");
fprintf(stderr, " The directory to the file is created (recursively) if it doesn't already exist.\n");
@@ -1099,6 +1112,8 @@ static void usage_full() {
fprintf(stderr, " %s -w screen -f 60 -a \"$(pactl get-default-sink).monitor\" -o \"$HOME/Videos/video.mp4\"\n", program_name);
fprintf(stderr, " %s -w screen -f 60 -a \"$(pactl get-default-sink).monitor|$(pactl get-default-source)\" -o \"$HOME/Videos/video.mp4\"\n", program_name);
fprintf(stderr, " %s -w screen -f 60 -a \"$(pactl get-default-sink).monitor\" -c mkv -r 60 -o \"$HOME/Videos\"\n", program_name);
+ fprintf(stderr, " %s -w screen -f 60 -a \"$(pactl get-default-sink).monitor\" -c mkv -sc script.sh -r 60 -o \"$HOME/Videos\"\n", program_name);
+ fprintf(stderr, " %s -w portal -f 60 -a \"$(pactl get-default-sink).monitor\" -restore-portal-session yes -o \"$HOME/Videos/video.mp4\"\n", program_name);
//fprintf(stderr, " gpu-screen-recorder -w screen -f 60 -q ultra -pixfmt yuv444 -o video.mp4\n");
_exit(1);
}
@@ -1273,38 +1288,6 @@ static std::future<void> save_replay_thread;
static std::vector<std::shared_ptr<PacketData>> save_replay_packets;
static std::string save_replay_output_filepath;
-static int create_directory_recursive(char *path) {
- int path_len = strlen(path);
- char *p = path;
- char *end = path + path_len;
- for(;;) {
- char *slash_p = strchr(p, '/');
-
- // Skips first '/', we don't want to try and create the root directory
- if(slash_p == path) {
- ++p;
- continue;
- }
-
- if(!slash_p)
- slash_p = end;
-
- char prev_char = *slash_p;
- *slash_p = '\0';
- int err = mkdir(path, S_IRWXU);
- *slash_p = prev_char;
-
- if(err == -1 && errno != EEXIST)
- return err;
-
- if(slash_p == end)
- break;
- else
- p = slash_p + 1;
- }
- return 0;
-}
-
static void save_replay_async(AVCodecContext *video_codec_context, int video_stream_index, std::vector<AudioTrack> &audio_tracks, std::deque<std::shared_ptr<PacketData>> &frame_data_queue, bool frames_erased, std::string output_dir, const char *container_format, const std::string &file_extension, std::mutex &write_output_mutex, bool make_folders) {
if(save_replay_thread.valid())
return;
@@ -1629,11 +1612,10 @@ static void list_supported_video_codecs() {
_exit(1);
}
- char card_path[128];
- card_path[0] = '\0';
+ egl.card_path[0] = '\0';
if(wayland || egl.gpu_info.vendor != GSR_GPU_VENDOR_NVIDIA) {
// TODO: Allow specifying another card, and in other places
- if(!gsr_get_valid_card_path(&egl, card_path, false)) {
+ if(!gsr_get_valid_card_path(&egl, egl.card_path, false)) {
fprintf(stderr, "Error: no /dev/dri/cardX device found. If you are running GPU Screen Recorder with prime-run then try running without it. Also make sure that you have at least one connected monitor or record a single window instead on X11\n");
_exit(2);
}
@@ -1642,15 +1624,15 @@ static void list_supported_video_codecs() {
av_log_set_level(AV_LOG_FATAL);
// TODO: Output hdr
- if(find_h264_encoder(egl.gpu_info.vendor, card_path))
+ if(find_h264_encoder(egl.gpu_info.vendor, egl.card_path))
puts("h264");
- if(find_hevc_encoder(egl.gpu_info.vendor, card_path))
+ if(find_hevc_encoder(egl.gpu_info.vendor, egl.card_path))
puts("hevc");
- if(find_av1_encoder(egl.gpu_info.vendor, card_path))
+ if(find_av1_encoder(egl.gpu_info.vendor, egl.card_path))
puts("av1");
- if(find_vp8_encoder(egl.gpu_info.vendor, card_path))
+ if(find_vp8_encoder(egl.gpu_info.vendor, egl.card_path))
puts("vp8");
- if(find_vp9_encoder(egl.gpu_info.vendor, card_path))
+ if(find_vp9_encoder(egl.gpu_info.vendor, egl.card_path))
puts("vp9");
fflush(stdout);
@@ -1660,7 +1642,7 @@ static void list_supported_video_codecs() {
XCloseDisplay(dpy);
}
-static gsr_capture* create_capture_impl(const char *window_str, const char *screen_region, bool wayland, gsr_egl *egl, int fps, bool overclock, VideoCodec video_codec, gsr_color_range color_range, bool record_cursor, bool track_damage, bool use_software_video_encoder) {
+static gsr_capture* create_capture_impl(const char *window_str, const char *screen_region, bool wayland, gsr_egl *egl, int fps, bool overclock, VideoCodec video_codec, gsr_color_range color_range, bool record_cursor, bool track_damage, bool use_software_video_encoder, bool restore_portal_session) {
vec2i region_size = { 0, 0 };
Window src_window_id = None;
bool follow_focused = false;
@@ -1688,6 +1670,25 @@ static gsr_capture* create_capture_impl(const char *window_str, const char *scre
}
follow_focused = true;
+ } else if(strcmp(window_str, "portal") == 0) {
+#ifdef GSR_PORTAL
+ if(video_codec_is_hdr(video_codec)) {
+ fprintf(stderr, "Warning: portal capture option doesn't support hdr yet (pipewire doesn't support hdr)\n");
+ }
+
+ gsr_capture_portal_params portal_params;
+ portal_params.egl = egl;
+ portal_params.hdr = video_codec_is_hdr(video_codec);
+ portal_params.color_range = color_range;
+ portal_params.record_cursor = record_cursor;
+ portal_params.restore_portal_session = restore_portal_session;
+ capture = gsr_capture_portal_create(&portal_params);
+ if(!capture)
+ _exit(1);
+#else
+ fprintf(stderr, "Error: option '-w portal' used but GPU Screen Recorder was compiled without desktop portal support\n");
+ _exit(2);
+#endif
} else if(contains_non_hex_number(window_str)) {
if(wayland || egl->gpu_info.vendor != GSR_GPU_VENDOR_NVIDIA) {
if(strcmp(window_str, "screen") == 0) {
@@ -1698,7 +1699,7 @@ static gsr_capture* create_capture_impl(const char *window_str, const char *scre
if(first_output.output_name) {
window_str = first_output.output_name;
} else {
- fprintf(stderr, "Error: no available output found\n");
+ fprintf(stderr, "Error: no usable output found\n");
_exit(1);
}
}
@@ -1903,8 +1904,8 @@ int main(int argc, char **argv) {
{ "-sc", Arg { {}, true, false } },
{ "-cr", Arg { {}, true, false } },
{ "-cursor", Arg { {}, true, false } },
- { "-gopm", Arg { {}, true, false } }, // deprecated, used keyint instead
{ "-keyint", Arg { {}, true, false } },
+ { "-restore-portal-session", Arg { {}, true, false } },
{ "-encoder", Arg { {}, true, false } },
};
@@ -2073,6 +2074,20 @@ int main(int argc, char **argv) {
usage();
}
+ bool restore_portal_session = false;
+ const char *restore_portal_session_str = args["-restore-portal-session"].value();
+ if(!restore_portal_session_str)
+ restore_portal_session_str = "no";
+
+ if(strcmp(restore_portal_session_str, "yes") == 0) {
+ restore_portal_session = true;
+ } else if(strcmp(restore_portal_session_str, "no") == 0) {
+ restore_portal_session = false;
+ } else {
+ fprintf(stderr, "Error: -restore-portal-session should either be either 'yes' or 'no', got: '%s'\n", make_folders_str);
+ usage();
+ }
+
const char *recording_saved_script = args["-sc"].value();
if(recording_saved_script) {
struct stat buf;
@@ -2544,7 +2559,7 @@ int main(int argc, char **argv) {
_exit(2);
}
- gsr_capture *capture = create_capture_impl(window_str, screen_region, wayland, &egl, fps, overclock, video_codec, color_range, record_cursor, framerate_mode == FramerateMode::CONTENT, use_software_video_encoder);
+ gsr_capture *capture = create_capture_impl(window_str, screen_region, wayland, &egl, fps, overclock, video_codec, color_range, record_cursor, framerate_mode == FramerateMode::CONTENT, use_software_video_encoder, restore_portal_session);
// (Some?) livestreaming services require at least one audio track to work.
// If not audio is provided then create one silent audio track.
diff --git a/src/pipewire.c b/src/pipewire.c
new file mode 100644
index 0000000..09b7280
--- /dev/null
+++ b/src/pipewire.c
@@ -0,0 +1,620 @@
+#include "../include/pipewire.h"
+#include "../include/egl.h"
+
+#include <pipewire/pipewire.h>
+#include <spa/param/video/format-utils.h>
+#include <spa/debug/types.h>
+
+#include <libdrm/drm_fourcc.h>
+
+#include <fcntl.h>
+
+/* TODO: Make gsr_pipewire_init asynchronous */
+/* TODO: Support 10-bit capture (hdr) when pipewire supports it */
+/* TODO: Support video crop and cursor data (that is not embedded) */
+/* TODO: Test all of the image formats */
+
+#ifndef SPA_POD_PROP_FLAG_DONT_FIXATE
+#define SPA_POD_PROP_FLAG_DONT_FIXATE (1 << 4)
+#endif
+
+#define CURSOR_META_SIZE(width, height) \
+ (sizeof(struct spa_meta_cursor) + sizeof(struct spa_meta_bitmap) + \
+ width * height * 4)
+
+static bool parse_pw_version(gsr_pipewire_data_version *dst, const char *version) {
+ const int n_matches = sscanf(version, "%d.%d.%d", &dst->major, &dst->minor, &dst->micro);
+ return n_matches == 3;
+}
+
+static bool check_pw_version(const gsr_pipewire_data_version *pw_version, int major, int minor, int micro) {
+ if (pw_version->major != major)
+ return pw_version->major > major;
+ if (pw_version->minor != minor)
+ return pw_version->minor > minor;
+ return pw_version->micro >= micro;
+}
+
+static void update_pw_versions(gsr_pipewire *self, const char *version) {
+ fprintf(stderr, "gsr info: pipewire: server version: %s\n", version);
+ fprintf(stderr, "gsr info: pipewire: library version: %s\n", pw_get_library_version());
+ fprintf(stderr, "gsr info: pipewire: header version: %s\n", pw_get_headers_version());
+ if(!parse_pw_version(&self->server_version, version))
+ fprintf(stderr, "gsr error: pipewire: failed to parse server version\n");
+}
+
+static void on_core_info_cb(void *user_data, const struct pw_core_info *info) {
+ gsr_pipewire *self = user_data;
+ update_pw_versions(self, info->version);
+}
+
+static void on_core_error_cb(void *user_data, uint32_t id, int seq, int res, const char *message) {
+ gsr_pipewire *self = user_data;
+ fprintf(stderr, "gsr error: pipewire: error id:%u seq:%d res:%d: %s\n", id, seq, res, message);
+ pw_thread_loop_signal(self->thread_loop, false);
+}
+
+static void on_core_done_cb(void *user_data, uint32_t id, int seq) {
+ gsr_pipewire *self = user_data;
+ if (id == PW_ID_CORE && self->server_version_sync == seq)
+ pw_thread_loop_signal(self->thread_loop, false);
+}
+
+static bool is_cursor_format_supported(const enum spa_video_format format) {
+ switch(format) {
+ case SPA_VIDEO_FORMAT_RGBx: return true;
+ case SPA_VIDEO_FORMAT_BGRx: return true;
+ case SPA_VIDEO_FORMAT_xRGB: return true;
+ case SPA_VIDEO_FORMAT_xBGR: return true;
+ case SPA_VIDEO_FORMAT_RGBA: return true;
+ case SPA_VIDEO_FORMAT_BGRA: return true;
+ case SPA_VIDEO_FORMAT_ARGB: return true;
+ case SPA_VIDEO_FORMAT_ABGR: return true;
+ default: break;
+ }
+ return false;
+}
+
+static const struct pw_core_events core_events = {
+ PW_VERSION_CORE_EVENTS,
+ .info = on_core_info_cb,
+ .done = on_core_done_cb,
+ .error = on_core_error_cb,
+};
+
+static void on_process_cb(void *user_data) {
+ gsr_pipewire *self = user_data;
+ struct spa_meta_cursor *cursor = NULL;
+
+ /* Find the most recent buffer */
+ struct pw_buffer *pw_buf = NULL;
+ for(;;) {
+ struct pw_buffer *aux = pw_stream_dequeue_buffer(self->stream);
+ if(!aux)
+ break;
+ if(pw_buf)
+ pw_stream_queue_buffer(self->stream, pw_buf);
+ pw_buf = aux;
+ }
+
+ if(!pw_buf) {
+ fprintf(stderr, "gsr info: pipewire: out of buffers!\n");
+ return;
+ }
+
+ struct spa_buffer *buffer = pw_buf->buffer;
+ const bool has_buffer = buffer->datas[0].chunk->size != 0;
+ if(!has_buffer)
+ goto read_metadata;
+
+ pthread_mutex_lock(&self->mutex);
+
+ if(buffer->datas[0].type == SPA_DATA_DmaBuf) {
+ if(buffer->n_datas > 0) {
+ self->dmabuf_data.fd = buffer->datas[0].fd;
+ self->dmabuf_data.offset = buffer->datas[0].chunk->offset;
+ self->dmabuf_data.stride = buffer->datas[0].chunk->stride;
+ } else {
+ self->dmabuf_data.fd = -1;
+ }
+ } else {
+ // TODO:
+ }
+
+ struct spa_meta_region *region = spa_buffer_find_meta_data(buffer, SPA_META_VideoCrop, sizeof(*region));
+ if(region && spa_meta_region_is_valid(region)) {
+ // fprintf(stderr, "gsr info: pipewire: crop Region available (%dx%d+%d+%d)\n",
+ // region->region.position.x, region->region.position.y,
+ // region->region.size.width, region->region.size.height);
+ self->crop.x = region->region.position.x;
+ self->crop.y = region->region.position.y;
+ self->crop.width = region->region.size.width;
+ self->crop.height = region->region.size.height;
+ self->crop.valid = true;
+ } else {
+ self->crop.valid = false;
+ }
+
+ pthread_mutex_unlock(&self->mutex);
+
+read_metadata:
+
+ cursor = spa_buffer_find_meta_data(buffer, SPA_META_Cursor, sizeof(*cursor));
+ self->cursor.valid = cursor && spa_meta_cursor_is_valid(cursor);
+
+ if (self->cursor.visible && self->cursor.valid) {
+ pthread_mutex_lock(&self->mutex);
+
+ struct spa_meta_bitmap *bitmap = NULL;
+ if (cursor->bitmap_offset)
+ bitmap = SPA_MEMBER(cursor, cursor->bitmap_offset, struct spa_meta_bitmap);
+
+ if (bitmap && bitmap->size.width > 0 && bitmap->size.height && is_cursor_format_supported(bitmap->format)) {
+ const uint8_t *bitmap_data = SPA_MEMBER(bitmap, bitmap->offset, uint8_t);
+ fprintf(stderr, "gsr info: pipewire: cursor bitmap update, size: %dx%d, format: %s\n",
+ (int)bitmap->size.width, (int)bitmap->size.height, spa_debug_type_find_name(spa_type_video_format, bitmap->format));
+
+ const size_t bitmap_size = bitmap->size.width * bitmap->size.height * 4;
+ uint8_t *new_bitmap_data = realloc(self->cursor.data, bitmap_size);
+ if(new_bitmap_data) {
+ self->cursor.data = new_bitmap_data;
+ /* TODO: Convert bgr and other image formats to rgb here */
+ memcpy(self->cursor.data, bitmap_data, bitmap_size);
+ }
+
+ self->cursor.hotspot_x = cursor->hotspot.x;
+ self->cursor.hotspot_y = cursor->hotspot.y;
+ self->cursor.width = bitmap->size.width;
+ self->cursor.height = bitmap->size.height;
+ }
+
+ self->cursor.x = cursor->position.x;
+ self->cursor.y = cursor->position.y;
+ pthread_mutex_unlock(&self->mutex);
+
+ //fprintf(stderr, "gsr info: pipewire: cursor: %d %d %d %d\n", cursor->hotspot.x, cursor->hotspot.y, cursor->position.x, cursor->position.y);
+ }
+
+ pw_stream_queue_buffer(self->stream, pw_buf);
+}
+
+static void on_param_changed_cb(void *user_data, uint32_t id, const struct spa_pod *param) {
+ gsr_pipewire *self = user_data;
+
+ if (!param || id != SPA_PARAM_Format)
+ return;
+
+ int result = spa_format_parse(param, &self->format.media_type, &self->format.media_subtype);
+ if (result < 0)
+ return;
+
+ if (self->format.media_type != SPA_MEDIA_TYPE_video || self->format.media_subtype != SPA_MEDIA_SUBTYPE_raw)
+ return;
+
+ pthread_mutex_lock(&self->mutex);
+ spa_format_video_raw_parse(param, &self->format.info.raw);
+ pthread_mutex_unlock(&self->mutex);
+
+ uint32_t buffer_types = 0;
+ const bool has_modifier = spa_pod_find_prop(param, NULL, SPA_FORMAT_VIDEO_modifier) != NULL;
+ if(has_modifier || check_pw_version(&self->server_version, 0, 3, 24))
+ buffer_types |= 1 << SPA_DATA_DmaBuf;
+
+ fprintf(stderr, "gsr info: pipewire: negotiated format:\n");
+
+ fprintf(stderr, "gsr info: pipewire: Format: %d (%s)\n",
+ self->format.info.raw.format,
+ spa_debug_type_find_name(spa_type_video_format, self->format.info.raw.format));
+
+ if(has_modifier) {
+ fprintf(stderr, "gsr info: pipewire: Modifier: %" PRIu64 "\n", self->format.info.raw.modifier);
+ }
+
+ fprintf(stderr, "gsr info: pipewire: Size: %dx%d\n", self->format.info.raw.size.width, self->format.info.raw.size.height);
+ fprintf(stderr, "gsr info: pipewire: Framerate: %d/%d\n", self->format.info.raw.framerate.num, self->format.info.raw.framerate.denom);
+
+ uint8_t params_buffer[1024];
+ struct spa_pod_builder pod_builder = SPA_POD_BUILDER_INIT(params_buffer, sizeof(params_buffer));
+ const struct spa_pod *params[3];
+
+ params[0] = spa_pod_builder_add_object(
+ &pod_builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta,
+ SPA_PARAM_META_type, SPA_POD_Id(SPA_META_VideoCrop),
+ SPA_PARAM_META_size,
+ SPA_POD_Int(sizeof(struct spa_meta_region)));
+
+ params[1] = spa_pod_builder_add_object(
+ &pod_builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta,
+ SPA_PARAM_META_type, SPA_POD_Id(SPA_META_Cursor),
+ SPA_PARAM_META_size,
+ SPA_POD_CHOICE_RANGE_Int(CURSOR_META_SIZE(64, 64),
+ CURSOR_META_SIZE(1, 1),
+ CURSOR_META_SIZE(1024, 1024)));
+
+ params[2] = spa_pod_builder_add_object(
+ &pod_builder, SPA_TYPE_OBJECT_ParamBuffers, SPA_PARAM_Buffers,
+ SPA_PARAM_BUFFERS_dataType, SPA_POD_Int(buffer_types));
+
+ pw_stream_update_params(self->stream, params, 3);
+ self->negotiated = true;
+}
+
+static void on_state_changed_cb(void *user_data, enum pw_stream_state old, enum pw_stream_state state, const char *error) {
+ (void)old;
+ gsr_pipewire *self = user_data;
+ fprintf(stderr, "gsr info: pipewire: stream %p state: \"%s\" (error: %s)\n",
+ (void*)self->stream, pw_stream_state_as_string(state),
+ error ? error : "none");
+}
+
+static const struct pw_stream_events stream_events = {
+ PW_VERSION_STREAM_EVENTS,
+ .state_changed = on_state_changed_cb,
+ .param_changed = on_param_changed_cb,
+ .process = on_process_cb,
+};
+
+static inline struct spa_pod *build_format(struct spa_pod_builder *b,
+ const gsr_pipewire_video_info *ovi,
+ uint32_t format, const uint64_t *modifiers,
+ size_t modifier_count)
+{
+ struct spa_pod_frame format_frame;
+
+ spa_pod_builder_push_object(b, &format_frame, SPA_TYPE_OBJECT_Format, SPA_PARAM_EnumFormat);
+ spa_pod_builder_add(b, SPA_FORMAT_mediaType, SPA_POD_Id(SPA_MEDIA_TYPE_video), 0);
+ spa_pod_builder_add(b, SPA_FORMAT_mediaSubtype, SPA_POD_Id(SPA_MEDIA_SUBTYPE_raw), 0);
+
+ spa_pod_builder_add(b, SPA_FORMAT_VIDEO_format, SPA_POD_Id(format), 0);
+
+ if (modifier_count > 0) {
+ struct spa_pod_frame modifier_frame;
+
+ spa_pod_builder_prop(b, SPA_FORMAT_VIDEO_modifier, SPA_POD_PROP_FLAG_MANDATORY | SPA_POD_PROP_FLAG_DONT_FIXATE);
+ spa_pod_builder_push_choice(b, &modifier_frame, SPA_CHOICE_Enum, 0);
+
+ /* The first element of choice pods is the preferred value. Here
+ * we arbitrarily pick the first modifier as the preferred one.
+ */
+ // TODO:
+ spa_pod_builder_long(b, modifiers[0]);
+
+ for(uint32_t i = 0; i < modifier_count; i++)
+ spa_pod_builder_long(b, modifiers[i]);
+
+ spa_pod_builder_pop(b, &modifier_frame);
+ }
+
+ spa_pod_builder_add(b, SPA_FORMAT_VIDEO_size,
+ SPA_POD_CHOICE_RANGE_Rectangle(
+ &SPA_RECTANGLE(32, 32),
+ &SPA_RECTANGLE(1, 1),
+ &SPA_RECTANGLE(16384, 16384)),
+ SPA_FORMAT_VIDEO_framerate,
+ SPA_POD_CHOICE_RANGE_Fraction(
+ &SPA_FRACTION(ovi->fps_num, ovi->fps_den),
+ &SPA_FRACTION(0, 1), &SPA_FRACTION(500, 1)),
+ 0);
+ return spa_pod_builder_pop(b, &format_frame);
+}
+
+#define NUM_FORMATS 10
+
+/* https://gstreamer.freedesktop.org/documentation/additional/design/mediatype-video-raw.html?gi-language=c#formats */
+/* For some reason gstreamer formats are in opposite order to drm formats */
+static int64_t spa_video_format_to_drm_format(const enum spa_video_format format) {
+ switch(format) {
+ case SPA_VIDEO_FORMAT_RGBx: return DRM_FORMAT_XBGR8888;
+ case SPA_VIDEO_FORMAT_BGRx: return DRM_FORMAT_XRGB8888;
+ case SPA_VIDEO_FORMAT_xRGB: return DRM_FORMAT_BGRX8888;
+ case SPA_VIDEO_FORMAT_xBGR: return DRM_FORMAT_RGBX8888;
+ case SPA_VIDEO_FORMAT_RGBA: return DRM_FORMAT_ABGR8888;
+ case SPA_VIDEO_FORMAT_BGRA: return DRM_FORMAT_ARGB8888;
+ case SPA_VIDEO_FORMAT_ARGB: return DRM_FORMAT_BGRA8888;
+ case SPA_VIDEO_FORMAT_ABGR: return DRM_FORMAT_RGBA8888;
+ case SPA_VIDEO_FORMAT_RGB: return DRM_FORMAT_XBGR8888;
+ case SPA_VIDEO_FORMAT_BGR: return DRM_FORMAT_XRGB8888;
+ default: break;
+ }
+ return DRM_FORMAT_INVALID;
+}
+
+static bool gsr_pipewire_build_format_params(gsr_pipewire *self, struct spa_pod_builder *pod_builder, struct spa_pod **params) {
+ if(!check_pw_version(&self->server_version, 0, 3, 33))
+ return false;
+
+ const enum spa_video_format formats[] = {
+ SPA_VIDEO_FORMAT_RGBx,
+ SPA_VIDEO_FORMAT_BGRx,
+ SPA_VIDEO_FORMAT_xRGB,
+ SPA_VIDEO_FORMAT_xBGR,
+ SPA_VIDEO_FORMAT_RGBA,
+ SPA_VIDEO_FORMAT_BGRA,
+ SPA_VIDEO_FORMAT_ARGB,
+ SPA_VIDEO_FORMAT_ABGR,
+ SPA_VIDEO_FORMAT_RGB,
+ SPA_VIDEO_FORMAT_BGR,
+ };
+
+ const uint64_t modifiers[] = { DRM_FORMAT_MOD_LINEAR, DRM_FORMAT_MOD_INVALID };
+
+ for (size_t i = 0; i < NUM_FORMATS; i++) {
+ enum spa_video_format format = formats[i];
+ params[i] = build_format(pod_builder, &self->video_info, format, modifiers, 2);
+ }
+
+ return true;
+}
+
+static void renegotiate_format(void *data, uint64_t expirations) {
+ (void)expirations;
+ gsr_pipewire *self = (gsr_pipewire*)data;
+
+ pw_thread_loop_lock(self->thread_loop);
+
+ struct spa_pod *params[NUM_FORMATS];
+ uint8_t params_buffer[2048];
+ struct spa_pod_builder pod_builder = SPA_POD_BUILDER_INIT(params_buffer, sizeof(params_buffer));
+ if (!gsr_pipewire_build_format_params(self, &pod_builder, params)) {
+ pw_thread_loop_unlock(self->thread_loop);
+ return;
+ }
+
+ pw_stream_update_params(self->stream, (const struct spa_pod**)params, NUM_FORMATS);
+ pw_thread_loop_unlock(self->thread_loop);
+}
+
+static bool gsr_pipewire_setup_stream(gsr_pipewire *self) {
+ struct spa_pod *params[NUM_FORMATS];
+ uint8_t params_buffer[2048];
+ struct spa_pod_builder pod_builder = SPA_POD_BUILDER_INIT(params_buffer, sizeof(params_buffer));
+
+ self->thread_loop = pw_thread_loop_new("PipeWire thread loop", NULL);
+ if(!self->thread_loop) {
+ fprintf(stderr, "gsr error: gsr_pipewire_setup_stream: failed to create pipewire thread\n");
+ goto error;
+ }
+
+ self->context = pw_context_new(pw_thread_loop_get_loop(self->thread_loop), NULL, 0);
+ if(!self->context) {
+ fprintf(stderr, "gsr error: gsr_pipewire_setup_stream: failed to create pipewire context\n");
+ goto error;
+ }
+
+ if(pw_thread_loop_start(self->thread_loop) < 0) {
+ fprintf(stderr, "gsr error: gsr_pipewire_setup_stream: failed to start thread\n");
+ goto error;
+ }
+
+ pw_thread_loop_lock(self->thread_loop);
+
+ // TODO: Why pass 5 to fcntl?
+ self->core = pw_context_connect_fd(self->context, fcntl(self->fd, F_DUPFD_CLOEXEC, 5), NULL, 0);
+ if(!self->core) {
+ pw_thread_loop_unlock(self->thread_loop);
+ fprintf(stderr, "gsr error: gsr_pipewire_setup_stream: failed to connect to fd %d\n", self->fd);
+ goto error;
+ }
+
+ // TODO: Error check
+ pw_core_add_listener(self->core, &self->core_listener, &core_events, self);
+
+ // TODO: Cleanup?
+ self->reneg = pw_loop_add_event(pw_thread_loop_get_loop(self->thread_loop), renegotiate_format, self);
+ if(!self->reneg) {
+ pw_thread_loop_unlock(self->thread_loop);
+ fprintf(stderr, "gsr error: gsr_pipewire_setup_stream: pw_loop_add_event failed\n");
+ goto error;
+ }
+
+ self->server_version_sync = pw_core_sync(self->core, PW_ID_CORE, 0);
+ pw_thread_loop_wait(self->thread_loop);
+
+ self->stream = pw_stream_new(self->core, "com.dec05eba.gpu_screen_recorder",
+ pw_properties_new(PW_KEY_MEDIA_TYPE, "Video",
+ PW_KEY_MEDIA_CATEGORY, "Capture",
+ PW_KEY_MEDIA_ROLE, "Screen", NULL));
+ if(!self->stream) {
+ pw_thread_loop_unlock(self->thread_loop);
+ fprintf(stderr, "gsr error: gsr_pipewire_setup_stream: failed to create stream\n");
+ goto error;
+ }
+ pw_stream_add_listener(self->stream, &self->stream_listener, &stream_events, self);
+
+ self->video_info.fps_num = 60;
+ self->video_info.fps_den = 1;
+
+ if(!gsr_pipewire_build_format_params(self, &pod_builder, params)) {
+ pw_thread_loop_unlock(self->thread_loop);
+ fprintf(stderr, "gsr error: gsr_pipewire_setup_stream: failed to build format params\n");
+ goto error;
+ }
+
+ if(pw_stream_connect(
+ self->stream, PW_DIRECTION_INPUT, self->node,
+ PW_STREAM_FLAG_AUTOCONNECT | PW_STREAM_FLAG_MAP_BUFFERS, (const struct spa_pod**)params,
+ NUM_FORMATS) < 0)
+ {
+ pw_thread_loop_unlock(self->thread_loop);
+ fprintf(stderr, "gsr error: gsr_pipewire_setup_stream: failed to connect stream\n");
+ goto error;
+ }
+
+ pw_thread_loop_unlock(self->thread_loop);
+ return true;
+
+ error:
+ if(self->thread_loop) {
+ //pw_thread_loop_wait(self->thread_loop);
+ pw_thread_loop_stop(self->thread_loop);
+ }
+
+ if(self->stream) {
+ pw_stream_disconnect(self->stream);
+ pw_stream_destroy(self->stream);
+ self->stream = NULL;
+ }
+
+ if(self->core) {
+ pw_core_disconnect(self->core);
+ self->core = NULL;
+ }
+
+ if(self->context) {
+ pw_context_destroy(self->context);
+ self->context = NULL;
+ }
+
+ if(self->thread_loop) {
+ pw_thread_loop_destroy(self->thread_loop);
+ self->thread_loop = NULL;
+ }
+ return false;
+}
+
+static int pw_init_counter = 0;
+bool gsr_pipewire_init(gsr_pipewire *self, int pipewire_fd, uint32_t pipewire_node, int fps, bool capture_cursor, gsr_egl *egl) {
+ if(pw_init_counter == 0)
+ pw_init(NULL, NULL);
+ ++pw_init_counter;
+
+ memset(self, 0, sizeof(*self));
+ self->egl = egl;
+ self->fd = pipewire_fd;
+ self->node = pipewire_node;
+ if(pthread_mutex_init(&self->mutex, NULL) != 0) {
+ fprintf(stderr, "gsr error: gsr_pipewire_init: failed to initialize mutex\n");
+ gsr_pipewire_deinit(self);
+ return false;
+ }
+ self->mutex_initialized = true;
+ self->video_info.fps_num = fps;
+ self->video_info.fps_den = 1;
+ self->cursor.visible = capture_cursor;
+
+ if(!gsr_pipewire_setup_stream(self)) {
+ gsr_pipewire_deinit(self);
+ return false;
+ }
+
+ return true;
+}
+
+void gsr_pipewire_deinit(gsr_pipewire *self) {
+ if(self->thread_loop) {
+ //pw_thread_loop_wait(self->thread_loop);
+ pw_thread_loop_stop(self->thread_loop);
+ }
+
+ if(self->stream) {
+ pw_stream_disconnect(self->stream);
+ pw_stream_destroy(self->stream);
+ self->stream = NULL;
+ }
+
+ if(self->core) {
+ pw_core_disconnect(self->core);
+ self->core = NULL;
+ }
+
+ if(self->context) {
+ pw_context_destroy(self->context);
+ self->context = NULL;
+ }
+
+ if(self->thread_loop) {
+ pw_thread_loop_destroy(self->thread_loop);
+ self->thread_loop = NULL;
+ }
+
+ if(self->fd > 0) {
+ close(self->fd);
+ self->fd = 0;
+ }
+
+ self->negotiated = false;
+
+ if(self->mutex_initialized) {
+ pthread_mutex_destroy(&self->mutex);
+ self->mutex_initialized = false;
+ }
+
+ if(self->cursor.data) {
+ free(self->cursor.data);
+ self->cursor.data = NULL;
+ }
+
+ --pw_init_counter;
+ if(pw_init_counter == 0) {
+#if PW_CHECK_VERSION(0, 3, 49)
+ pw_deinit();
+#endif
+ }
+}
+
+/* TODO: Do this in the thread instead, otherwise this is not guaranteed to always work and may produce glitched output */
+bool gsr_pipewire_map_texture(gsr_pipewire *self, unsigned int texture_id, unsigned int cursor_texture_id, gsr_pipewire_region *region, gsr_pipewire_region *cursor_region) {
+ pthread_mutex_lock(&self->mutex);
+
+ if(!self->negotiated || self->dmabuf_data.fd <= 0) {
+ pthread_mutex_unlock(&self->mutex);
+ return false;
+ }
+
+ /* TODO: Support multiple planes */
+ const intptr_t img_attr[] = {
+ EGL_LINUX_DRM_FOURCC_EXT, spa_video_format_to_drm_format(self->format.info.raw.format),
+ EGL_WIDTH, self->format.info.raw.size.width,
+ EGL_HEIGHT, self->format.info.raw.size.height,
+ EGL_DMA_BUF_PLANE0_FD_EXT, self->dmabuf_data.fd,
+ EGL_DMA_BUF_PLANE0_OFFSET_EXT, self->dmabuf_data.offset,
+ EGL_DMA_BUF_PLANE0_PITCH_EXT, self->dmabuf_data.stride,
+ EGL_DMA_BUF_PLANE0_MODIFIER_LO_EXT, self->format.info.raw.modifier & 0xFFFFFFFFULL,
+ EGL_DMA_BUF_PLANE0_MODIFIER_HI_EXT, self->format.info.raw.modifier >> 32ULL,
+ EGL_NONE
+ };
+
+ EGLImage image = self->egl->eglCreateImage(self->egl->egl_display, 0, EGL_LINUX_DMA_BUF_EXT, NULL, img_attr);
+ self->egl->glBindTexture(GL_TEXTURE_2D, texture_id);
+ self->egl->glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, image);
+ self->egl->eglDestroyImage(self->egl->egl_display, image);
+ self->egl->glBindTexture(GL_TEXTURE_2D, 0);
+
+ if(self->cursor.data) {
+ self->egl->glBindTexture(GL_TEXTURE_2D, cursor_texture_id);
+ // TODO: glTextureSubImage2D if same size
+ self->egl->glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, self->cursor.width, self->cursor.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, self->cursor.data);
+ self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ self->egl->glBindTexture(GL_TEXTURE_2D, 0);
+
+ free(self->cursor.data);
+ self->cursor.data = NULL;
+ }
+
+ region->x = 0;
+ region->y = 0;
+
+ region->width = self->format.info.raw.size.width;
+ region->height = self->format.info.raw.size.height;
+
+ if(self->crop.valid) {
+ region->x = self->crop.x;
+ region->y = self->crop.y;
+
+ region->width = self->crop.width;
+ region->height = self->crop.height;
+ }
+
+ /* TODO: Test if cursor hotspot is correct */
+ cursor_region->x = self->cursor.x - self->cursor.hotspot_x;
+ cursor_region->y = self->cursor.y - self->cursor.hotspot_y;
+
+ cursor_region->width = self->cursor.width;
+ cursor_region->height = self->cursor.height;
+
+ pthread_mutex_unlock(&self->mutex);
+ return true;
+}
diff --git a/src/utils.c b/src/utils.c
index e00f3c5..d768f58 100644
--- a/src/utils.c
+++ b/src/utils.c
@@ -1,13 +1,19 @@
#include "../include/utils.h"
+
#include <time.h>
#include <string.h>
#include <stdio.h>
#include <unistd.h>
#include <fcntl.h>
+#include <stdlib.h>
+#include <sys/stat.h>
+#include <errno.h>
+
#include <xf86drmMode.h>
#include <xf86drm.h>
-#include <stdlib.h>
+
#include <X11/Xatom.h>
+#include <X11/extensions/Xrandr.h>
double clock_get_monotonic_seconds(void) {
struct timespec ts;
@@ -480,3 +486,35 @@ bool gsr_card_path_get_render_path(const char *card_path, char *render_path) {
close(fd);
return false;
}
+
+int create_directory_recursive(char *path) {
+ int path_len = strlen(path);
+ char *p = path;
+ char *end = path + path_len;
+ for(;;) {
+ char *slash_p = strchr(p, '/');
+
+ // Skips first '/', we don't want to try and create the root directory
+ if(slash_p == path) {
+ ++p;
+ continue;
+ }
+
+ if(!slash_p)
+ slash_p = end;
+
+ char prev_char = *slash_p;
+ *slash_p = '\0';
+ int err = mkdir(path, S_IRWXU);
+ *slash_p = prev_char;
+
+ if(err == -1 && errno != EEXIST)
+ return err;
+
+ if(slash_p == end)
+ break;
+ else
+ p = slash_p + 1;
+ }
+ return 0;
+}