aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--TODO4
-rw-r--r--include/capture/capture.h6
-rw-r--r--include/pipewire.h4
-rw-r--r--include/utils.h3
-rw-r--r--include/window_texture.h1
-rw-r--r--src/capture/capture.c10
-rw-r--r--src/capture/kms.c147
-rw-r--r--src/capture/nvfbc.c1
-rw-r--r--src/capture/portal.c77
-rw-r--r--src/capture/xcomposite.c28
-rw-r--r--src/main.cpp9
-rw-r--r--src/pipewire.c49
-rw-r--r--src/utils.c143
-rw-r--r--src/window_texture.c12
14 files changed, 351 insertions, 143 deletions
diff --git a/TODO b/TODO
index 2781db4..7c432b4 100644
--- a/TODO
+++ b/TODO
@@ -154,6 +154,4 @@ Enable 2-pass encoding.
Add vbr/cbr option.
-Restart replay/update video resolution if monitor resolution changes.
-
-Support damage tracking on wayland.
+Restart replay/update video resolution if monitor resolution changes. \ No newline at end of file
diff --git a/include/capture/capture.h b/include/capture/capture.h
index e31b3a5..7c8887d 100644
--- a/include/capture/capture.h
+++ b/include/capture/capture.h
@@ -17,10 +17,9 @@ struct gsr_capture {
/* These methods should not be called manually. Call gsr_capture_* instead */
int (*start)(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame);
void (*on_event)(gsr_capture *cap, gsr_egl *egl); /* can be NULL */
- void (*tick)(gsr_capture *cap, AVCodecContext *video_codec_context); /* can be NULL. If there is an event then |on_event| is called before this */
+ void (*tick)(gsr_capture *cap); /* can be NULL. If there is an event then |on_event| is called before this */
bool (*should_stop)(gsr_capture *cap, bool *err); /* can be NULL. If NULL, return false */
int (*capture)(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion);
- void (*capture_end)(gsr_capture *cap, AVFrame *frame); /* can be NULL */
gsr_source_color (*get_source_color)(gsr_capture *cap);
bool (*uses_external_image)(gsr_capture *cap); /* can be NULL. If NULL, return false */
bool (*set_hdr_metadata)(gsr_capture *cap, AVMasteringDisplayMetadata *mastering_display_metadata, AVContentLightMetadata *light_metadata); /* can be NULL. If NULL, return false */
@@ -35,10 +34,9 @@ struct gsr_capture {
int gsr_capture_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame);
void gsr_capture_on_event(gsr_capture *cap, gsr_egl *egl);
-void gsr_capture_tick(gsr_capture *cap, AVCodecContext *video_codec_context);
+void gsr_capture_tick(gsr_capture *cap);
bool gsr_capture_should_stop(gsr_capture *cap, bool *err);
int gsr_capture_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion);
-void gsr_capture_capture_end(gsr_capture *cap, AVFrame *frame);
gsr_source_color gsr_capture_get_source_color(gsr_capture *cap);
bool gsr_capture_uses_external_image(gsr_capture *cap);
bool gsr_capture_set_hdr_metadata(gsr_capture *cap, AVMasteringDisplayMetadata *mastering_display_metadata, AVContentLightMetadata *light_metadata);
diff --git a/include/pipewire.h b/include/pipewire.h
index 13c4b67..1908e2d 100644
--- a/include/pipewire.h
+++ b/include/pipewire.h
@@ -104,8 +104,8 @@ typedef struct {
bool gsr_pipewire_init(gsr_pipewire *self, int pipewire_fd, uint32_t pipewire_node, int fps, bool capture_cursor, gsr_egl *egl);
void gsr_pipewire_deinit(gsr_pipewire *self);
-/* |plane_fds| should be at least GSR_PIPEWIRE_DMABUF_MAX_PLANES in size */
-bool gsr_pipewire_map_texture(gsr_pipewire *self, gsr_texture_map texture_map, gsr_pipewire_region *region, gsr_pipewire_region *cursor_region, int *plane_fds, int *num_plane_fds, bool *using_external_image);
+/* |dmabuf_data| should be at least GSR_PIPEWIRE_DMABUF_MAX_PLANES in size */
+bool gsr_pipewire_map_texture(gsr_pipewire *self, gsr_texture_map texture_map, gsr_pipewire_region *region, gsr_pipewire_region *cursor_region, gsr_pipewire_dmabuf_data *dmabuf_data, int *num_dmabuf_data, uint32_t *fourcc, uint64_t *modifiers, bool *using_external_image);
bool gsr_pipewire_is_damaged(gsr_pipewire *self);
void gsr_pipewire_clear_damage(gsr_pipewire *self);
diff --git a/include/utils.h b/include/utils.h
index 6d58db7..92eb851 100644
--- a/include/utils.h
+++ b/include/utils.h
@@ -48,6 +48,7 @@ int create_directory_recursive(char *path);
/* |img_attr| needs to be at least 44 in size */
void setup_dma_buf_attrs(intptr_t *img_attr, uint32_t format, uint32_t width, uint32_t height, const int *fds, const uint32_t *offsets, const uint32_t *pitches, const uint64_t *modifiers, int num_planes, bool use_modifier);
bool video_codec_context_is_vaapi(AVCodecContext *video_codec_context);
-bool vaapi_copy_drm_planes_to_video_surface(AVCodecContext *video_codec_context, AVFrame *video_frame, int x, int y, uint32_t format, uint32_t width, uint32_t height, const int *fds, const uint32_t *offsets, const uint32_t *pitches, const uint64_t *modifiers, int num_planes);
+bool vaapi_copy_drm_planes_to_video_surface(AVCodecContext *video_codec_context, AVFrame *video_frame, vec2i source_pos, vec2i source_size, vec2i dest_pos, vec2i dest_size, uint32_t format, vec2i size, const int *fds, const uint32_t *offsets, const uint32_t *pitches, const uint64_t *modifiers, int num_planes);
+bool vaapi_copy_egl_image_to_video_surface(gsr_egl *egl, EGLImage image, vec2i source_pos, vec2i source_size, vec2i dest_pos, vec2i dest_size, AVCodecContext *video_codec_context, AVFrame *video_frame);
#endif /* GSR_UTILS_H */
diff --git a/include/window_texture.h b/include/window_texture.h
index 75bb2a7..6ee5df4 100644
--- a/include/window_texture.h
+++ b/include/window_texture.h
@@ -7,6 +7,7 @@ typedef struct {
Display *display;
Window window;
Pixmap pixmap;
+ EGLImage image;
unsigned int texture_id;
int redirected;
gsr_egl *egl;
diff --git a/src/capture/capture.c b/src/capture/capture.c
index 5fc96d0..ec10854 100644
--- a/src/capture/capture.c
+++ b/src/capture/capture.c
@@ -10,10 +10,10 @@ int gsr_capture_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVF
return res;
}
-void gsr_capture_tick(gsr_capture *cap, AVCodecContext *video_codec_context) {
+void gsr_capture_tick(gsr_capture *cap) {
assert(cap->started);
if(cap->tick)
- cap->tick(cap, video_codec_context);
+ cap->tick(cap);
}
void gsr_capture_on_event(gsr_capture *cap, gsr_egl *egl) {
@@ -34,12 +34,6 @@ int gsr_capture_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *
return cap->capture(cap, frame, color_conversion);
}
-void gsr_capture_capture_end(gsr_capture *cap, AVFrame *frame) {
- assert(cap->started);
- if(cap->capture_end)
- cap->capture_end(cap, frame);
-}
-
gsr_source_color gsr_capture_get_source_color(gsr_capture *cap) {
return cap->get_source_color(cap);
}
diff --git a/src/capture/kms.c b/src/capture/kms.c
index c36add4..a444b6d 100644
--- a/src/capture/kms.c
+++ b/src/capture/kms.c
@@ -8,7 +8,9 @@
#include <string.h>
#include <stdio.h>
#include <unistd.h>
+#include <fcntl.h>
+#include <xf86drm.h>
#include <libdrm/drm_fourcc.h>
#include <libavcodec/avcodec.h>
@@ -52,6 +54,13 @@ typedef struct {
AVCodecContext *video_codec_context;
bool performance_error_shown;
+
+ int drm_fd;
+ uint64_t prev_sequence;
+ bool damaged;
+
+ vec2i prev_target_pos;
+ vec2i prev_plane_size;
} gsr_capture_kms;
static void gsr_capture_kms_cleanup_kms_fds(gsr_capture_kms *self) {
@@ -84,6 +93,11 @@ static void gsr_capture_kms_stop(gsr_capture_kms *self) {
self->cursor_texture_id = 0;
}
+ if(self->drm_fd > 0) {
+ close(self->drm_fd);
+ self->drm_fd = -1;
+ }
+
gsr_capture_kms_cleanup_kms_fds(self);
gsr_kms_client_deinit(&self->kms_client);
gsr_cursor_deinit(&self->x11_cursor);
@@ -147,6 +161,15 @@ static void monitor_callback(const gsr_monitor *monitor, void *userdata) {
fprintf(stderr, "gsr warning: reached max connector ids\n");
}
+static vec2i rotate_capture_size_if_rotated(gsr_capture_kms *self, vec2i capture_size) {
+ if(self->monitor_rotation == GSR_MONITOR_ROT_90 || self->monitor_rotation == GSR_MONITOR_ROT_270) {
+ int tmp_x = capture_size.x;
+ capture_size.x = capture_size.y;
+ capture_size.y = tmp_x;
+ }
+ return capture_size;
+}
+
static int gsr_capture_kms_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
gsr_capture_kms *self = cap->priv;
@@ -182,12 +205,10 @@ static int gsr_capture_kms_start(gsr_capture *cap, AVCodecContext *video_codec_c
self->capture_pos = monitor.pos;
/* Monitor size is already rotated on x11 when the monitor is rotated, no need to apply it ourselves */
- if(!self->is_x11 && (self->monitor_rotation == GSR_MONITOR_ROT_90 || self->monitor_rotation == GSR_MONITOR_ROT_270)) {
- self->capture_size.x = monitor.size.y;
- self->capture_size.y = monitor.size.x;
- } else {
+ if(self->is_x11)
self->capture_size = monitor.size;
- }
+ else
+ self->capture_size = rotate_capture_size_if_rotated(self, monitor.size);
/* Disable vsync */
self->params.egl->eglSwapInterval(self->params.egl->egl_display, 0);
@@ -211,6 +232,26 @@ static void gsr_capture_kms_on_event(gsr_capture *cap, gsr_egl *egl) {
gsr_cursor_on_event(&self->x11_cursor, xev);
}
+static void gsr_capture_kms_tick(gsr_capture *cap) {
+ gsr_capture_kms *self = cap->priv;
+
+ if(self->drm_fd <= 0)
+ self->drm_fd = open(self->params.egl->card_path, O_RDONLY);
+
+ if(self->drm_fd <= 0)
+ return;
+
+ uint64_t sequence = 0;
+ uint64_t ns = 0;
+ if(drmCrtcGetSequence(self->drm_fd, 79, &sequence, &ns) != 0)
+ return;
+
+ if(sequence != self->prev_sequence) {
+ self->prev_sequence = sequence;
+ self->damaged = true;
+ }
+}
+
static float monitor_rotation_to_radians(gsr_monitor_rotation rot) {
switch(rot) {
case GSR_MONITOR_ROT_0: return 0.0f;
@@ -386,7 +427,7 @@ static gsr_kms_response_item* find_cursor_drm_if_on_monitor(gsr_capture_kms *sel
return cursor_drm_fd;
}
-static void render_drm_cursor(gsr_capture_kms *self, gsr_color_conversion *color_conversion, const gsr_kms_response_item *cursor_drm_fd, int target_x, int target_y, float texture_rotation) {
+static void render_drm_cursor(gsr_capture_kms *self, gsr_color_conversion *color_conversion, const gsr_kms_response_item *cursor_drm_fd, vec2i target_pos, float texture_rotation) {
const bool cursor_texture_id_is_external = self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA;
const vec2i cursor_size = {cursor_drm_fd->width, cursor_drm_fd->height};
@@ -415,8 +456,8 @@ static void render_drm_cursor(gsr_capture_kms *self, gsr_color_conversion *color
break;
}
- cursor_pos.x += target_x;
- cursor_pos.y += target_y;
+ cursor_pos.x += target_pos.x;
+ cursor_pos.y += target_pos.y;
int fds[GSR_KMS_MAX_DMA_BUFS];
uint32_t offsets[GSR_KMS_MAX_DMA_BUFS];
@@ -444,7 +485,7 @@ static void render_drm_cursor(gsr_capture_kms *self, gsr_color_conversion *color
self->params.egl->eglDestroyImage(self->params.egl->egl_display, cursor_image);
self->params.egl->glEnable(GL_SCISSOR_TEST);
- self->params.egl->glScissor(target_x, target_y, self->capture_size.x, self->capture_size.y);
+ self->params.egl->glScissor(target_pos.x, target_pos.y, self->capture_size.x, self->capture_size.y);
gsr_color_conversion_draw(color_conversion, self->cursor_texture_id,
cursor_pos, cursor_size,
@@ -454,16 +495,16 @@ static void render_drm_cursor(gsr_capture_kms *self, gsr_color_conversion *color
self->params.egl->glDisable(GL_SCISSOR_TEST);
}
-static void render_x11_cursor(gsr_capture_kms *self, gsr_color_conversion *color_conversion, vec2i capture_pos, int target_x, int target_y) {
+static void render_x11_cursor(gsr_capture_kms *self, gsr_color_conversion *color_conversion, vec2i capture_pos, vec2i target_pos) {
gsr_cursor_tick(&self->x11_cursor, DefaultRootWindow(self->params.egl->x11.dpy));
const vec2i cursor_pos = {
- target_x + self->x11_cursor.position.x - self->x11_cursor.hotspot.x - capture_pos.x,
- target_y + self->x11_cursor.position.y - self->x11_cursor.hotspot.y - capture_pos.y
+ target_pos.x + self->x11_cursor.position.x - self->x11_cursor.hotspot.x - capture_pos.x,
+ target_pos.y + self->x11_cursor.position.y - self->x11_cursor.hotspot.y - capture_pos.y
};
self->params.egl->glEnable(GL_SCISSOR_TEST);
- self->params.egl->glScissor(target_x, target_y, self->capture_size.x, self->capture_size.y);
+ self->params.egl->glScissor(target_pos.x, target_pos.y, self->capture_size.x, self->capture_size.y);
gsr_color_conversion_draw(color_conversion, self->x11_cursor.texture_id,
cursor_pos, self->x11_cursor.size,
@@ -473,6 +514,14 @@ static void render_x11_cursor(gsr_capture_kms *self, gsr_color_conversion *color
self->params.egl->glDisable(GL_SCISSOR_TEST);
}
+static void gsr_capture_kms_update_capture_size_change(gsr_capture_kms *self, gsr_color_conversion *color_conversion, vec2i target_pos, const gsr_kms_response_item *drm_fd) {
+ if(target_pos.x != self->prev_target_pos.x || target_pos.y != self->prev_target_pos.y || drm_fd->src_w != self->prev_plane_size.x || drm_fd->src_h != self->prev_plane_size.y) {
+ self->prev_target_pos = target_pos;
+ self->prev_plane_size = self->capture_size;
+ gsr_color_conversion_clear(color_conversion);
+ }
+}
+
static int gsr_capture_kms_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion) {
gsr_capture_kms *self = cap->priv;
@@ -494,24 +543,35 @@ static int gsr_capture_kms_capture(gsr_capture *cap, AVFrame *frame, gsr_color_c
bool capture_is_combined_plane = false;
const gsr_kms_response_item *drm_fd = find_monitor_drm(self, &capture_is_combined_plane);
- if(!drm_fd)
+ if(!drm_fd) {
+ gsr_capture_kms_cleanup_kms_fds(self);
return -1;
+ }
if(drm_fd->has_hdr_metadata && self->params.hdr && hdr_metadata_is_supported_format(&drm_fd->hdr_metadata))
gsr_kms_set_hdr_metadata(self, drm_fd);
+ if(!self->performance_error_shown && self->monitor_rotation != GSR_MONITOR_ROT_0 && video_codec_context_is_vaapi(self->video_codec_context)) {
+ self->performance_error_shown = true;
+ fprintf(stderr,"gsr warning: gsr_capture_kms_capture: the monitor you are recording is rotated, composition will have to be used."
+ " If you are experience performance problems in the video then record a single window on X11 or use portal capture option instead\n");
+ }
+
const float texture_rotation = monitor_rotation_to_radians(self->monitor_rotation);
- const int target_x = max_int(0, frame->width / 2 - self->capture_size.x / 2);
- const int target_y = max_int(0, frame->height / 2 - self->capture_size.y / 2);
+ const vec2i target_pos = { max_int(0, frame->width / 2 - self->capture_size.x / 2), max_int(0, frame->height / 2 - self->capture_size.y / 2) };
+ self->capture_size = rotate_capture_size_if_rotated(self, (vec2i){ drm_fd->src_w, drm_fd->src_h });
+ gsr_capture_kms_update_capture_size_change(self, color_conversion, target_pos, drm_fd);
vec2i capture_pos = self->capture_pos;
if(!capture_is_combined_plane)
capture_pos = (vec2i){drm_fd->x, drm_fd->y};
- if(!self->performance_error_shown && self->monitor_rotation != GSR_MONITOR_ROT_0 && video_codec_context_is_vaapi(self->video_codec_context)) {
- self->performance_error_shown = true;
- fprintf(stderr,"gsr warning: gsr_capture_kms_capture: the monitor you are recording is rotated, composition will have to be used."
- " If you are experience performance problems in the video then record a single window on X11 or use portal capture option instead\n");
+ // TODO: Hack!! cursor flickers without this when using vaapi copy on wayland.
+ // There is probably some sync issue between opengl and vaapi.
+ // Remove this when that has been figured out. Same for the below glFlush && glFinish
+ for(int i = 0; i < 3; ++i) {
+ self->params.egl->glFlush();
+ self->params.egl->glFinish();
}
/* Fast opengl free path */
@@ -526,7 +586,7 @@ static int gsr_capture_kms_capture(gsr_capture *cap, AVFrame *frame, gsr_color_c
pitches[i] = drm_fd->dma_buf[i].pitch;
modifiers[i] = drm_fd->modifier;
}
- vaapi_copy_drm_planes_to_video_surface(self->video_codec_context, frame, capture_pos.x, capture_pos.y, drm_fd->pixel_format, drm_fd->width, drm_fd->height, fds, offsets, pitches, modifiers, drm_fd->num_dma_bufs);
+ vaapi_copy_drm_planes_to_video_surface(self->video_codec_context, frame, (vec2i){capture_pos.x, capture_pos.y}, self->capture_size, target_pos, self->capture_size, drm_fd->pixel_format, (vec2i){drm_fd->width, drm_fd->height}, fds, offsets, pitches, modifiers, drm_fd->num_dma_bufs);
} else {
EGLImage image = gsr_capture_kms_create_egl_image_with_fallback(self, drm_fd);
if(image) {
@@ -538,23 +598,34 @@ static int gsr_capture_kms_capture(gsr_capture *cap, AVFrame *frame, gsr_color_c
self->params.egl->glFinish();
gsr_color_conversion_draw(color_conversion, self->external_texture_fallback ? self->external_input_texture_id : self->input_texture_id,
- (vec2i){target_x, target_y}, self->capture_size,
+ target_pos, self->capture_size,
capture_pos, self->capture_size,
texture_rotation, self->external_texture_fallback);
}
+ for(int i = 0; i < 3; ++i) {
+ self->params.egl->glFlush();
+ self->params.egl->glFinish();
+ }
+
if(self->params.record_cursor) {
gsr_kms_response_item *cursor_drm_fd = find_cursor_drm_if_on_monitor(self, drm_fd->connector_id, capture_is_combined_plane);
// The cursor is handled by x11 on x11 instead of using the cursor drm plane because on prime systems with a dedicated nvidia gpu
// the cursor plane is not available when the cursor is on the monitor controlled by the nvidia device.
- if(self->is_x11)
- render_x11_cursor(self, color_conversion, capture_pos, target_x, target_y);
- else if(cursor_drm_fd)
- render_drm_cursor(self, color_conversion, cursor_drm_fd, target_x, target_y, texture_rotation);
+ if(self->is_x11) {
+ const vec2i cursor_monitor_offset = self->capture_pos;
+ render_x11_cursor(self, color_conversion, cursor_monitor_offset, target_pos);
+ } else if(cursor_drm_fd) {
+ render_drm_cursor(self, color_conversion, cursor_drm_fd, target_pos, texture_rotation);
+ }
}
- self->params.egl->glFlush();
- self->params.egl->glFinish();
+ for(int i = 0; i < 3; ++i) {
+ self->params.egl->glFlush();
+ self->params.egl->glFinish();
+ }
+
+ gsr_capture_kms_cleanup_kms_fds(self);
return 0;
}
@@ -566,11 +637,6 @@ static bool gsr_capture_kms_should_stop(gsr_capture *cap, bool *err) {
return false;
}
-static void gsr_capture_kms_capture_end(gsr_capture *cap, AVFrame *frame) {
- (void)frame;
- gsr_capture_kms_cleanup_kms_fds(cap->priv);
-}
-
static gsr_source_color gsr_capture_kms_get_source_color(gsr_capture *cap) {
(void)cap;
return GSR_SOURCE_COLOR_RGB;
@@ -607,6 +673,16 @@ static bool gsr_capture_kms_set_hdr_metadata(gsr_capture *cap, AVMasteringDispla
return true;
}
+static bool gsr_capture_kms_is_damaged(gsr_capture *cap) {
+ gsr_capture_kms *self = cap->priv;
+ return self->damaged;
+}
+
+static void gsr_capture_kms_clear_damage(gsr_capture *cap) {
+ gsr_capture_kms *self = cap->priv;
+ self->damaged = false;
+}
+
static void gsr_capture_kms_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
(void)video_codec_context;
gsr_capture_kms *self = cap->priv;
@@ -649,13 +725,14 @@ gsr_capture* gsr_capture_kms_create(const gsr_capture_kms_params *params) {
*cap = (gsr_capture) {
.start = gsr_capture_kms_start,
.on_event = gsr_capture_kms_on_event,
- .tick = NULL,
+ .tick = gsr_capture_kms_tick,
.should_stop = gsr_capture_kms_should_stop,
.capture = gsr_capture_kms_capture,
- .capture_end = gsr_capture_kms_capture_end,
.get_source_color = gsr_capture_kms_get_source_color,
.uses_external_image = gsr_capture_kms_uses_external_image,
.set_hdr_metadata = gsr_capture_kms_set_hdr_metadata,
+ .is_damaged = gsr_capture_kms_is_damaged,
+ .clear_damage = gsr_capture_kms_clear_damage,
.destroy = gsr_capture_kms_destroy,
.priv = cap_kms
};
diff --git a/src/capture/nvfbc.c b/src/capture/nvfbc.c
index b7e6bb5..a38df63 100644
--- a/src/capture/nvfbc.c
+++ b/src/capture/nvfbc.c
@@ -471,7 +471,6 @@ gsr_capture* gsr_capture_nvfbc_create(const gsr_capture_nvfbc_params *params) {
.tick = NULL,
.should_stop = NULL,
.capture = gsr_capture_nvfbc_capture,
- .capture_end = NULL,
.get_source_color = gsr_capture_nvfbc_get_source_color,
.uses_external_image = NULL,
.destroy = gsr_capture_nvfbc_destroy,
diff --git a/src/capture/portal.c b/src/capture/portal.c
index 95470c1..2836c71 100644
--- a/src/capture/portal.c
+++ b/src/capture/portal.c
@@ -22,18 +22,20 @@ typedef struct {
gsr_pipewire pipewire;
vec2i capture_size;
- int plane_fds[GSR_PIPEWIRE_DMABUF_MAX_PLANES];
- int num_plane_fds;
+ gsr_pipewire_dmabuf_data dmabuf_data[GSR_PIPEWIRE_DMABUF_MAX_PLANES];
+ int num_dmabuf_data;
+
+ AVCodecContext *video_codec_context;
} gsr_capture_portal;
static void gsr_capture_portal_cleanup_plane_fds(gsr_capture_portal *self) {
- for(int i = 0; i < self->num_plane_fds; ++i) {
- if(self->plane_fds[i] > 0) {
- close(self->plane_fds[i]);
- self->plane_fds[i] = 0;
+ for(int i = 0; i < self->num_dmabuf_data; ++i) {
+ if(self->dmabuf_data[i].fd > 0) {
+ close(self->dmabuf_data[i].fd);
+ self->dmabuf_data[i].fd = 0;
}
}
- self->num_plane_fds = 0;
+ self->num_dmabuf_data = 0;
}
static void gsr_capture_portal_stop(gsr_capture_portal *self) {
@@ -237,7 +239,9 @@ static bool gsr_capture_portal_get_frame_dimensions(gsr_capture_portal *self) {
const double start_time = clock_get_monotonic_seconds();
while(clock_get_monotonic_seconds() - start_time < 5.0) {
bool uses_external_image = false;
- if(gsr_pipewire_map_texture(&self->pipewire, self->texture_map, &region, &cursor_region, self->plane_fds, &self->num_plane_fds, &uses_external_image)) {
+ uint32_t fourcc = 0;
+ uint64_t modifiers = 0;
+ if(gsr_pipewire_map_texture(&self->pipewire, self->texture_map, &region, &cursor_region, self->dmabuf_data, &self->num_dmabuf_data, &fourcc, &modifiers, &uses_external_image)) {
gsr_capture_portal_cleanup_plane_fds(self);
self->capture_size.x = region.width;
self->capture_size.y = region.height;
@@ -300,6 +304,8 @@ static int gsr_capture_portal_start(gsr_capture *cap, AVCodecContext *video_code
frame->width = video_codec_context->width;
frame->height = video_codec_context->height;
+
+ self->video_codec_context = video_codec_context;
return 0;
}
@@ -312,39 +318,57 @@ static int gsr_capture_portal_capture(gsr_capture *cap, AVFrame *frame, gsr_colo
(void)color_conversion;
gsr_capture_portal *self = cap->priv;
- gsr_capture_portal_cleanup_plane_fds(self);
-
/* TODO: Handle formats other than RGB(a) */
gsr_pipewire_region region = {0, 0, 0, 0};
gsr_pipewire_region cursor_region = {0, 0, 0, 0};
+ uint32_t pipewire_fourcc = 0;
+ uint64_t pipewire_modifiers = 0;
bool using_external_image = false;
- if(gsr_pipewire_map_texture(&self->pipewire, self->texture_map, &region, &cursor_region, self->plane_fds, &self->num_plane_fds, &using_external_image)) {
+ if(gsr_pipewire_map_texture(&self->pipewire, self->texture_map, &region, &cursor_region, self->dmabuf_data, &self->num_dmabuf_data, &pipewire_fourcc, &pipewire_modifiers, &using_external_image)) {
if(region.width != self->capture_size.x || region.height != self->capture_size.y) {
- gsr_color_conversion_clear(color_conversion);
self->capture_size.x = region.width;
self->capture_size.y = region.height;
+ gsr_color_conversion_clear(color_conversion);
}
+ } else {
+ return 0;
}
self->params.egl->glFlush();
self->params.egl->glFinish();
- const int target_x = max_int(0, frame->width / 2 - self->capture_size.x / 2);
- const int target_y = max_int(0, frame->height / 2 - self->capture_size.y / 2);
-
- gsr_color_conversion_draw(color_conversion, using_external_image ? self->texture_map.external_texture_id : self->texture_map.texture_id,
- (vec2i){target_x, target_y}, self->capture_size,
- (vec2i){region.x, region.y}, self->capture_size,
- 0.0f, using_external_image);
+ const vec2i target_pos = { max_int(0, frame->width / 2 - self->capture_size.x / 2), max_int(0, frame->height / 2 - self->capture_size.y / 2) };
+
+ // TODO: Handle region crop
+
+ /* Fast opengl free path */
+ if(video_codec_context_is_vaapi(self->video_codec_context)) {
+ int fds[4];
+ uint32_t offsets[4];
+ uint32_t pitches[4];
+ uint64_t modifiers[4];
+ for(int i = 0; i < self->num_dmabuf_data; ++i) {
+ fds[i] = self->dmabuf_data[i].fd;
+ offsets[i] = self->dmabuf_data[i].offset;
+ pitches[i] = self->dmabuf_data[i].stride;
+ modifiers[i] = pipewire_modifiers;
+ }
+ vaapi_copy_drm_planes_to_video_surface(self->video_codec_context, frame, (vec2i){region.x, region.y}, self->capture_size, target_pos, self->capture_size, pipewire_fourcc, self->capture_size, fds, offsets, pitches, modifiers, self->num_dmabuf_data);
+ } else {
+ gsr_color_conversion_draw(color_conversion, using_external_image ? self->texture_map.external_texture_id : self->texture_map.texture_id,
+ target_pos, self->capture_size,
+ (vec2i){region.x, region.y}, self->capture_size,
+ 0.0f, using_external_image);
+ }
if(self->params.record_cursor) {
const vec2i cursor_pos = {
- target_x + cursor_region.x,
- target_y + cursor_region.y
+ target_pos.x + cursor_region.x,
+ target_pos.y + cursor_region.y
};
self->params.egl->glEnable(GL_SCISSOR_TEST);
- self->params.egl->glScissor(target_x, target_y, self->capture_size.x, self->capture_size.y);
+ self->params.egl->glScissor(target_pos.x, target_pos.y, self->capture_size.x, self->capture_size.y);
gsr_color_conversion_draw(color_conversion, self->texture_map.cursor_texture_id,
(vec2i){cursor_pos.x, cursor_pos.y}, (vec2i){cursor_region.width, cursor_region.height},
(vec2i){0, 0}, (vec2i){cursor_region.width, cursor_region.height},
@@ -355,13 +379,9 @@ static int gsr_capture_portal_capture(gsr_capture *cap, AVFrame *frame, gsr_colo
self->params.egl->glFlush();
self->params.egl->glFinish();
- return 0;
-}
-
-static void gsr_capture_portal_capture_end(gsr_capture *cap, AVFrame *frame) {
- (void)frame;
- gsr_capture_portal *self = cap->priv;
gsr_capture_portal_cleanup_plane_fds(self);
+
+ return 0;
}
static gsr_source_color gsr_capture_portal_get_source_color(gsr_capture *cap) {
@@ -418,7 +438,6 @@ gsr_capture* gsr_capture_portal_create(const gsr_capture_portal_params *params)
.tick = NULL,
.should_stop = NULL,
.capture = gsr_capture_portal_capture,
- .capture_end = gsr_capture_portal_capture_end,
.get_source_color = gsr_capture_portal_get_source_color,
.uses_external_image = gsr_capture_portal_uses_external_image,
.is_damaged = gsr_capture_portal_is_damaged,
diff --git a/src/capture/xcomposite.c b/src/capture/xcomposite.c
index 1b6021b..87327ca 100644
--- a/src/capture/xcomposite.c
+++ b/src/capture/xcomposite.c
@@ -29,6 +29,7 @@ typedef struct {
double window_resize_timer;
WindowTexture window_texture;
+ AVCodecContext *video_codec_context;
Atom net_active_window_atom;
@@ -122,12 +123,12 @@ static int gsr_capture_xcomposite_start(gsr_capture *cap, AVCodecContext *video_
frame->width = video_codec_context->width;
frame->height = video_codec_context->height;
+ self->video_codec_context = video_codec_context;
self->window_resize_timer = clock_get_monotonic_seconds();
return 0;
}
-static void gsr_capture_xcomposite_tick(gsr_capture *cap, AVCodecContext *video_codec_context) {
- (void)video_codec_context;
+static void gsr_capture_xcomposite_tick(gsr_capture *cap) {
gsr_capture_xcomposite *self = cap->priv;
if(self->params.follow_focused && !self->follow_focused_initialized) {
@@ -255,27 +256,31 @@ static int gsr_capture_xcomposite_capture(gsr_capture *cap, AVFrame *frame, gsr_
gsr_color_conversion_clear(color_conversion);
}
- const int target_x = max_int(0, frame->width / 2 - self->texture_size.x / 2);
- const int target_y = max_int(0, frame->height / 2 - self->texture_size.y / 2);
+ const vec2i target_pos = { max_int(0, frame->width / 2 - self->texture_size.x / 2), max_int(0, frame->height / 2 - self->texture_size.y / 2) };
self->params.egl->glFlush();
self->params.egl->glFinish();
- gsr_color_conversion_draw(color_conversion, window_texture_get_opengl_texture_id(&self->window_texture),
- (vec2i){target_x, target_y}, self->texture_size,
- (vec2i){0, 0}, self->texture_size,
- 0.0f, false);
+ /* Fast opengl free path */
+ if(video_codec_context_is_vaapi(self->video_codec_context)) {
+ vaapi_copy_egl_image_to_video_surface(self->params.egl, self->window_texture.image, (vec2i){0, 0}, self->texture_size, target_pos, self->texture_size, self->video_codec_context, frame);
+ } else {
+ gsr_color_conversion_draw(color_conversion, window_texture_get_opengl_texture_id(&self->window_texture),
+ target_pos, self->texture_size,
+ (vec2i){0, 0}, self->texture_size,
+ 0.0f, false);
+ }
if(self->params.record_cursor && self->cursor.visible) {
gsr_cursor_tick(&self->cursor, self->window);
const vec2i cursor_pos = {
- target_x + self->cursor.position.x - self->cursor.hotspot.x,
- target_y + self->cursor.position.y - self->cursor.hotspot.y
+ target_pos.x + self->cursor.position.x - self->cursor.hotspot.x,
+ target_pos.y + self->cursor.position.y - self->cursor.hotspot.y
};
self->params.egl->glEnable(GL_SCISSOR_TEST);
- self->params.egl->glScissor(target_x, target_y, self->texture_size.x, self->texture_size.y);
+ self->params.egl->glScissor(target_pos.x, target_pos.y, self->texture_size.x, self->texture_size.y);
gsr_color_conversion_draw(color_conversion, self->cursor.texture_id,
cursor_pos, self->cursor.size,
@@ -335,7 +340,6 @@ gsr_capture* gsr_capture_xcomposite_create(const gsr_capture_xcomposite_params *
.tick = gsr_capture_xcomposite_tick,
.should_stop = gsr_capture_xcomposite_should_stop,
.capture = gsr_capture_xcomposite_capture,
- .capture_end = NULL,
.get_source_color = gsr_capture_xcomposite_get_source_color,
.uses_external_image = NULL,
.get_window_id = gsr_capture_xcomposite_get_window_id,
diff --git a/src/main.cpp b/src/main.cpp
index f2526f8..c0fa148 100644
--- a/src/main.cpp
+++ b/src/main.cpp
@@ -3331,7 +3331,7 @@ int main(int argc, char **argv) {
gsr_damage_on_event(&damage, gsr_egl_get_event_data(&egl));
}
gsr_damage_tick(&damage);
- gsr_capture_tick(capture, video_codec_context);
+ gsr_capture_tick(capture);
if(!is_monitor_capture) {
Window damage_target_window = 0;
@@ -3349,10 +3349,12 @@ int main(int argc, char **argv) {
}
bool damaged = false;
- if(capture->is_damaged)
+ if(use_damage_tracking)
+ damaged = gsr_damage_is_damaged(&damage);
+ else if(capture->is_damaged)
damaged = capture->is_damaged(capture);
else
- damaged = !use_damage_tracking || gsr_damage_is_damaged(&damage);
+ damaged = true;
if(damaged)
++damage_fps_counter;
@@ -3410,7 +3412,6 @@ int main(int argc, char **argv) {
}
}
- gsr_capture_capture_end(capture, video_frame);
video_pts_counter += num_frames;
}
diff --git a/src/pipewire.c b/src/pipewire.c
index 30e2c00..3bf54db 100644
--- a/src/pipewire.c
+++ b/src/pipewire.c
@@ -88,6 +88,7 @@ static const struct pw_core_events core_events = {
static void on_process_cb(void *user_data) {
gsr_pipewire *self = user_data;
struct spa_meta_cursor *cursor = NULL;
+ //struct spa_meta *video_damage = NULL;
/* Find the most recent buffer */
struct pw_buffer *pw_buf = NULL;
@@ -135,6 +136,7 @@ static void on_process_cb(void *user_data) {
// TODO:
}
+ // TODO: Move down to read_metadata
struct spa_meta_region *region = spa_buffer_find_meta_data(buffer, SPA_META_VideoCrop, sizeof(*region));
if(region && spa_meta_region_is_valid(region)) {
// fprintf(stderr, "gsr info: pipewire: crop Region available (%dx%d+%d+%d)\n",
@@ -153,6 +155,17 @@ static void on_process_cb(void *user_data) {
read_metadata:
+ // video_damage = spa_buffer_find_meta(buffer, SPA_META_VideoDamage);
+ // if(video_damage) {
+ // struct spa_meta_region *r = spa_meta_first(video_damage);
+ // if(spa_meta_check(r, video_damage)) {
+ // //fprintf(stderr, "damage: %d,%d %ux%u\n", r->region.position.x, r->region.position.y, r->region.size.width, r->region.size.height);
+ // pthread_mutex_lock(&self->mutex);
+ // self->damaged = true;
+ // pthread_mutex_unlock(&self->mutex);
+ // }
+ // }
+
cursor = spa_buffer_find_meta_data(buffer, SPA_META_Cursor, sizeof(*cursor));
self->cursor.valid = cursor && spa_meta_cursor_is_valid(cursor);
@@ -229,27 +242,35 @@ static void on_param_changed_cb(void *user_data, uint32_t id, const struct spa_p
uint8_t params_buffer[1024];
struct spa_pod_builder pod_builder = SPA_POD_BUILDER_INIT(params_buffer, sizeof(params_buffer));
- const struct spa_pod *params[3];
+ const struct spa_pod *params[4];
params[0] = spa_pod_builder_add_object(
- &pod_builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta,
- SPA_PARAM_META_type, SPA_POD_Id(SPA_META_VideoCrop),
- SPA_PARAM_META_size,
- SPA_POD_Int(sizeof(struct spa_meta_region)));
+ &pod_builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta,
+ SPA_PARAM_META_type, SPA_POD_Id(SPA_META_VideoCrop),
+ SPA_PARAM_META_size,
+ SPA_POD_Int(sizeof(struct spa_meta_region)));
params[1] = spa_pod_builder_add_object(
&pod_builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta,
+ SPA_PARAM_META_type, SPA_POD_Id(SPA_META_VideoDamage),
+ SPA_PARAM_META_size, SPA_POD_CHOICE_RANGE_Int(
+ sizeof(struct spa_meta_region) * 16,
+ sizeof(struct spa_meta_region) * 1,
+ sizeof(struct spa_meta_region) * 16));
+
+ params[2] = spa_pod_builder_add_object(
+ &pod_builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta,
SPA_PARAM_META_type, SPA_POD_Id(SPA_META_Cursor),
SPA_PARAM_META_size,
SPA_POD_CHOICE_RANGE_Int(CURSOR_META_SIZE(64, 64),
CURSOR_META_SIZE(1, 1),
CURSOR_META_SIZE(1024, 1024)));
- params[2] = spa_pod_builder_add_object(
+ params[3] = spa_pod_builder_add_object(
&pod_builder, SPA_TYPE_OBJECT_ParamBuffers, SPA_PARAM_Buffers,
SPA_PARAM_BUFFERS_dataType, SPA_POD_Int(buffer_types));
- pw_stream_update_params(self->stream, params, 3);
+ pw_stream_update_params(self->stream, params, 4);
self->negotiated = true;
}
@@ -694,12 +715,14 @@ static void gsr_pipewire_update_cursor_texture(gsr_pipewire *self, gsr_texture_m
self->cursor.data = NULL;
}
-bool gsr_pipewire_map_texture(gsr_pipewire *self, gsr_texture_map texture_map, gsr_pipewire_region *region, gsr_pipewire_region *cursor_region, int *plane_fds, int *num_plane_fds, bool *using_external_image) {
+bool gsr_pipewire_map_texture(gsr_pipewire *self, gsr_texture_map texture_map, gsr_pipewire_region *region, gsr_pipewire_region *cursor_region, gsr_pipewire_dmabuf_data *dmabuf_data, int *num_dmabuf_data, uint32_t *fourcc, uint64_t *modifiers, bool *using_external_image) {
for(int i = 0; i < GSR_PIPEWIRE_DMABUF_MAX_PLANES; ++i) {
- plane_fds[i] = -1;
+ memset(&dmabuf_data[i], 0, sizeof(gsr_pipewire_dmabuf_data));
}
- *num_plane_fds = 0;
+ *num_dmabuf_data = 0;
*using_external_image = self->external_texture_fallback;
+ *fourcc = 0;
+ *modifiers = 0;
pthread_mutex_lock(&self->mutex);
if(!self->negotiated || self->dmabuf_data[0].fd <= 0) {
@@ -738,10 +761,12 @@ bool gsr_pipewire_map_texture(gsr_pipewire *self, gsr_texture_map texture_map, g
cursor_region->height = self->cursor.height;
for(size_t i = 0; i < self->dmabuf_num_planes; ++i) {
- plane_fds[i] = self->dmabuf_data[i].fd;
+ dmabuf_data[i] = self->dmabuf_data[i];
self->dmabuf_data[i].fd = -1;
}
- *num_plane_fds = self->dmabuf_num_planes;
+ *num_dmabuf_data = self->dmabuf_num_planes;
+ *fourcc = spa_video_format_to_drm_format(self->format.info.raw.format);
+ *modifiers = self->format.info.raw.modifier;
self->dmabuf_num_planes = 0;
pthread_mutex_unlock(&self->mutex);
diff --git a/src/utils.c b/src/utils.c
index 2b1b07c..b4e34fd 100644
--- a/src/utils.c
+++ b/src/utils.c
@@ -12,6 +12,7 @@
#include <xf86drmMode.h>
#include <xf86drm.h>
+#include <libdrm/drm_fourcc.h>
#include <X11/Xatom.h>
#include <X11/extensions/Xrandr.h>
#include <va/va_drmcommon.h>
@@ -628,25 +629,47 @@ void setup_dma_buf_attrs(intptr_t *img_attr, uint32_t format, uint32_t width, ui
}
static VADisplay video_codec_context_get_vaapi_display(AVCodecContext *video_codec_context) {
- AVHWFramesContext *hw_frame_context = (AVHWFramesContext*)video_codec_context->hw_frames_ctx->data;
+ AVBufferRef *hw_frames_ctx = video_codec_context->hw_frames_ctx;
+ if(!hw_frames_ctx)
+ return NULL;
+
+ AVHWFramesContext *hw_frame_context = (AVHWFramesContext*)hw_frames_ctx->data;
AVHWDeviceContext *device_context = (AVHWDeviceContext*)hw_frame_context->device_ctx;
- if(device_context->type == AV_HWDEVICE_TYPE_VAAPI) {
- AVVAAPIDeviceContext *vactx = device_context->hwctx;
- return vactx->display;
- }
- return NULL;
+ if(device_context->type != AV_HWDEVICE_TYPE_VAAPI)
+ return NULL;
+
+ AVVAAPIDeviceContext *vactx = device_context->hwctx;
+ return vactx->display;
}
bool video_codec_context_is_vaapi(AVCodecContext *video_codec_context) {
- AVHWFramesContext *hw_frame_context = (AVHWFramesContext*)video_codec_context->hw_frames_ctx->data;
+ AVBufferRef *hw_frames_ctx = video_codec_context->hw_frames_ctx;
+ if(!hw_frames_ctx)
+ return NULL;
+
+ AVHWFramesContext *hw_frame_context = (AVHWFramesContext*)hw_frames_ctx->data;
AVHWDeviceContext *device_context = (AVHWDeviceContext*)hw_frame_context->device_ctx;
return device_context->type == AV_HWDEVICE_TYPE_VAAPI;
}
-bool vaapi_copy_drm_planes_to_video_surface(AVCodecContext *video_codec_context, AVFrame *video_frame, int x, int y, uint32_t format, uint32_t width, uint32_t height, const int *fds, const uint32_t *offsets, const uint32_t *pitches, const uint64_t *modifiers, int num_planes) {
+static uint32_t drm_fourcc_to_va_fourcc(uint32_t drm_fourcc) {
+ switch(drm_fourcc) {
+ case DRM_FORMAT_XRGB8888: return VA_FOURCC_BGRX;
+ case DRM_FORMAT_XBGR8888: return VA_FOURCC_RGBX;
+ case DRM_FORMAT_RGBX8888: return VA_FOURCC_XBGR;
+ case DRM_FORMAT_BGRX8888: return VA_FOURCC_XRGB;
+ case DRM_FORMAT_ARGB8888: return VA_FOURCC_BGRA;
+ case DRM_FORMAT_ABGR8888: return VA_FOURCC_RGBA;
+ case DRM_FORMAT_RGBA8888: return VA_FOURCC_ABGR;
+ case DRM_FORMAT_BGRA8888: return VA_FOURCC_ARGB;
+ default: return drm_fourcc;
+ }
+}
+
+bool vaapi_copy_drm_planes_to_video_surface(AVCodecContext *video_codec_context, AVFrame *video_frame, vec2i source_pos, vec2i source_size, vec2i dest_pos, vec2i dest_size, uint32_t format, vec2i size, const int *fds, const uint32_t *offsets, const uint32_t *pitches, const uint64_t *modifiers, int num_planes) {
VAConfigID config_id = 0;
VAContextID context_id = 0;
- VASurfaceID input_surface = 0;
+ VASurfaceID input_surface_id = 0;
VABufferID buffer_id = 0;
bool success = true;
@@ -658,30 +681,30 @@ bool vaapi_copy_drm_planes_to_video_surface(AVCodecContext *video_codec_context,
VAStatus va_status = vaCreateConfig(va_dpy, VAProfileNone, VAEntrypointVideoProc, NULL, 0, &config_id);
if(va_status != VA_STATUS_SUCCESS) {
- fprintf(stderr, "gsr error: vaapi_copy_drm_planes_to_video_surface: vaCreateConfig failed, error: %d\n", va_status);
+ fprintf(stderr, "gsr error: vaapi_copy_drm_planes_to_video_surface: vaCreateConfig failed, error: %s\n", vaErrorStr(va_status));
success = false;
goto done;
}
- VASurfaceID target_surface_id = (uintptr_t)video_frame->data[3];
- va_status = vaCreateContext(va_dpy, config_id, width, height, VA_PROGRESSIVE, &target_surface_id, 1, &context_id);
+ VASurfaceID output_surface_id = (uintptr_t)video_frame->data[3];
+ va_status = vaCreateContext(va_dpy, config_id, size.x, size.y, VA_PROGRESSIVE, &output_surface_id, 1, &context_id);
if(va_status != VA_STATUS_SUCCESS) {
- fprintf(stderr, "gsr error: vaapi_copy_drm_planes_to_video_surface: vaCreateContext failed, error: %d\n", va_status);
+ fprintf(stderr, "gsr error: vaapi_copy_drm_planes_to_video_surface: vaCreateContext failed, error: %s\n", vaErrorStr(va_status));
success = false;
goto done;
}
VADRMPRIMESurfaceDescriptor buf = {0};
- buf.fourcc = format;//VA_FOURCC_BGRX; // TODO: VA_FOURCC_BGRA, VA_FOURCC_X2R10G10B10
- buf.width = width;
- buf.height = height;
+ buf.fourcc = drm_fourcc_to_va_fourcc(format);//VA_FOURCC_BGRX; // TODO: VA_FOURCC_BGRA, VA_FOURCC_X2R10G10B10
+ buf.width = size.x;
+ buf.height = size.y;
buf.num_objects = num_planes;
buf.num_layers = 1;
buf.layers[0].drm_format = format;
buf.layers[0].num_planes = buf.num_objects;
for(int i = 0; i < num_planes; ++i) {
buf.objects[i].fd = fds[i];
- buf.objects[i].size = height * pitches[i]; // TODO:
+ buf.objects[i].size = size.y * pitches[i]; // TODO:
buf.objects[i].drm_format_modifier = modifiers[i];
buf.layers[0].object_index[i] = i;
@@ -700,25 +723,33 @@ bool vaapi_copy_drm_planes_to_video_surface(AVCodecContext *video_codec_context,
attribs[1].value.value.p = &buf;
// TODO: RT_FORMAT with 10 bit/hdr, VA_RT_FORMAT_RGB32_10
- va_status = vaCreateSurfaces(va_dpy, VA_RT_FORMAT_RGB32, width, height, &input_surface, 1, attribs, 2);
+ // TODO: Max size same as source_size
+ va_status = vaCreateSurfaces(va_dpy, VA_RT_FORMAT_RGB32, size.x, size.y, &input_surface_id, 1, attribs, 2);
if(va_status != VA_STATUS_SUCCESS) {
- fprintf(stderr, "gsr error: vaapi_copy_drm_planes_to_video_surface: vaCreateSurfaces failed, error: %d\n", va_status);
+ fprintf(stderr, "gsr error: vaapi_copy_drm_planes_to_video_surface: vaCreateSurfaces failed, error: %s\n", vaErrorStr(va_status));
success = false;
goto done;
}
- // TODO:
+ const VARectangle source_region = {
+ .x = source_pos.x,
+ .y = source_pos.y,
+ .width = source_size.x,
+ .height = source_size.y
+ };
+
const VARectangle output_region = {
- .x = x,
- .y = y,
- .width = width,
- .height = height
+ .x = dest_pos.x,
+ .y = dest_pos.y,
+ .width = dest_size.x,
+ .height = dest_size.y
};
// Copying a surface to another surface will automatically perform the color conversion. Thanks vaapi!
VAProcPipelineParameterBuffer params = {0};
- params.surface = input_surface;
+ params.surface = input_surface_id;
params.surface_region = NULL;
+ params.surface_region = &source_region;
params.output_region = &output_region;
params.output_background_color = 0;
params.filter_flags = VA_FRAME_PICTURE;
@@ -760,7 +791,7 @@ bool vaapi_copy_drm_planes_to_video_surface(AVCodecContext *video_codec_context,
goto done;
}
- va_status = vaBeginPicture(va_dpy, context_id, target_surface_id);
+ va_status = vaBeginPicture(va_dpy, context_id, output_surface_id);
if(va_status != VA_STATUS_SUCCESS) {
fprintf(stderr, "gsr error: vaapi_copy_drm_planes_to_video_surface: vaBeginPicture failed, error: %d\n", va_status);
success = false;
@@ -782,14 +813,16 @@ bool vaapi_copy_drm_planes_to_video_surface(AVCodecContext *video_codec_context,
goto done;
}
- //vaSyncBuffer(self->va_dpy, self->buffer_id, 1000 * 1000);
+ // vaSyncBuffer(va_dpy, buffer_id, 1000 * 1000 * 1000);
+ // vaSyncSurface(va_dpy, input_surface_id);
+ // vaSyncSurface(va_dpy, output_surface_id);
done:
if(buffer_id)
vaDestroyBuffer(va_dpy, buffer_id);
- if(input_surface)
- vaDestroySurfaces(va_dpy, &input_surface, 1);
+ if(input_surface_id)
+ vaDestroySurfaces(va_dpy, &input_surface_id, 1);
if(context_id)
vaDestroyContext(va_dpy, context_id);
@@ -799,3 +832,55 @@ bool vaapi_copy_drm_planes_to_video_surface(AVCodecContext *video_codec_context,
return success;
}
+
+bool vaapi_copy_egl_image_to_video_surface(gsr_egl *egl, EGLImage image, vec2i source_pos, vec2i source_size, vec2i dest_pos, vec2i dest_size, AVCodecContext *video_codec_context, AVFrame *video_frame) {
+ if(!image)
+ return false;
+
+ int texture_fourcc = 0;
+ int texture_num_planes = 0;
+ uint64_t texture_modifiers = 0;
+ if(!egl->eglExportDMABUFImageQueryMESA(egl->egl_display, image, &texture_fourcc, &texture_num_planes, &texture_modifiers)) {
+ fprintf(stderr, "gsr error: gsr_capture_xcomposite_vaapi_tick: eglExportDMABUFImageQueryMESA failed\n");
+ return false;
+ }
+
+ if(texture_num_planes <= 0 || texture_num_planes > 8) {
+ fprintf(stderr, "gsr error: gsr_capture_xcomposite_vaapi_tick: expected planes size to be 0<planes<8 for drm buf, got %d planes\n", texture_num_planes);
+ return false;
+ }
+
+ int texture_fds[8];
+ int32_t texture_strides[8];
+ int32_t texture_offsets[8];
+
+ while(egl->eglGetError() != EGL_SUCCESS){}
+ if(!egl->eglExportDMABUFImageMESA(egl->egl_display, image, texture_fds, texture_strides, texture_offsets)) {
+ fprintf(stderr, "gsr error: gsr_capture_xcomposite_vaapi_tick: eglExportDMABUFImageMESA failed, error: %d\n", egl->eglGetError());
+ return false;
+ }
+
+ int fds[8];
+ uint32_t offsets[8];
+ uint32_t pitches[8];
+ uint64_t modifiers[8];
+ for(int i = 0; i < texture_num_planes; ++i) {
+ fds[i] = texture_fds[i];
+ offsets[i] = texture_offsets[i];
+ pitches[i] = texture_strides[i];
+ modifiers[i] = texture_modifiers;
+
+ if(fds[i] == -1)
+ texture_num_planes = i;
+ }
+ const bool success = texture_num_planes > 0 && vaapi_copy_drm_planes_to_video_surface(video_codec_context, video_frame, source_pos, source_size, dest_pos, dest_size, texture_fourcc, source_size, fds, offsets, pitches, modifiers, texture_num_planes);
+
+ for(int i = 0; i < texture_num_planes; ++i) {
+ if(texture_fds[i] > 0) {
+ close(texture_fds[i]);
+ texture_fds[i] = -1;
+ }
+ }
+
+ return success;
+}
diff --git a/src/window_texture.c b/src/window_texture.c
index 0f4aa2c..8eef4c9 100644
--- a/src/window_texture.c
+++ b/src/window_texture.c
@@ -16,6 +16,7 @@ int window_texture_init(WindowTexture *window_texture, Display *display, Window
window_texture->display = display;
window_texture->window = window;
window_texture->pixmap = None;
+ window_texture->image = NULL;
window_texture->texture_id = 0;
window_texture->redirected = 0;
window_texture->egl = egl;
@@ -34,6 +35,11 @@ static void window_texture_cleanup(WindowTexture *self, int delete_texture) {
self->texture_id = 0;
}
+ if(self->image) {
+ self->egl->eglDestroyImage(self->egl->egl_display, self->image);
+ self->image = NULL;
+ }
+
if(self->pixmap) {
XFreePixmap(self->display, self->pixmap);
self->pixmap = None;
@@ -101,14 +107,14 @@ int window_texture_on_resize(WindowTexture *self) {
self->pixmap = pixmap;
self->texture_id = texture_id;
+ self->image = image;
cleanup:
self->egl->glBindTexture(GL_TEXTURE_2D, 0);
- if(image)
- self->egl->eglDestroyImage(self->egl->egl_display, image);
-
if(result != 0) {
+ if(image)
+ self->egl->eglDestroyImage(self->egl->egl_display, image);
if(texture_id != 0)
self->egl->glDeleteTextures(1, &texture_id);
if(pixmap)