aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authordec05eba <dec05eba@protonmail.com>2025-02-22 01:05:29 +0100
committerdec05eba <dec05eba@protonmail.com>2025-02-22 01:05:29 +0100
commit000da7d64044c4ea2a1679c2864252fee9895d48 (patch)
treedadeb51a2c1aeab844cbe5eb074a926905eff6d4 /src
parentfe4cd2bb0e244c568b24ed1c39a19497c41cb2f9 (diff)
Make image output lossy (use stb image writer), also significantly improves performance for jpeg
Diffstat (limited to 'src')
-rw-r--r--src/capture/capture.c12
-rw-r--r--src/capture/kms.c31
-rw-r--r--src/capture/nvfbc.c56
-rw-r--r--src/capture/portal.c32
-rw-r--r--src/capture/xcomposite.c29
-rw-r--r--src/egl.c39
-rw-r--r--src/encoder/video/image.c130
-rw-r--r--src/image_writer.c85
-rw-r--r--src/main.cpp255
-rw-r--r--src/utils.c5
10 files changed, 324 insertions, 350 deletions
diff --git a/src/capture/capture.c b/src/capture/capture.c
index 2a4a689..bc95300 100644
--- a/src/capture/capture.c
+++ b/src/capture/capture.c
@@ -1,9 +1,9 @@
#include "../../include/capture/capture.h"
#include <assert.h>
-int gsr_capture_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
+int gsr_capture_start(gsr_capture *cap, gsr_capture_metadata *capture_metadata) {
assert(!cap->started);
- int res = cap->start(cap, video_codec_context, frame);
+ int res = cap->start(cap, capture_metadata);
if(res == 0)
cap->started = true;
@@ -29,9 +29,9 @@ bool gsr_capture_should_stop(gsr_capture *cap, bool *err) {
return false;
}
-int gsr_capture_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion) {
+int gsr_capture_capture(gsr_capture *cap, gsr_capture_metadata *capture_metadata, gsr_color_conversion *color_conversion) {
assert(cap->started);
- return cap->capture(cap, frame, color_conversion);
+ return cap->capture(cap, capture_metadata, color_conversion);
}
bool gsr_capture_uses_external_image(gsr_capture *cap) {
@@ -48,6 +48,6 @@ bool gsr_capture_set_hdr_metadata(gsr_capture *cap, AVMasteringDisplayMetadata *
return false;
}
-void gsr_capture_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
- cap->destroy(cap, video_codec_context);
+void gsr_capture_destroy(gsr_capture *cap) {
+ cap->destroy(cap);
}
diff --git a/src/capture/kms.c b/src/capture/kms.c
index ae0c36f..266d4e6 100644
--- a/src/capture/kms.c
+++ b/src/capture/kms.c
@@ -55,7 +55,6 @@ typedef struct {
bool is_x11;
gsr_cursor x11_cursor;
- AVCodecContext *video_codec_context;
bool performance_error_shown;
bool fast_path_failed;
bool mesa_supports_compute_only_vaapi_copy;
@@ -177,7 +176,7 @@ static vec2i rotate_capture_size_if_rotated(gsr_capture_kms *self, vec2i capture
return capture_size;
}
-static int gsr_capture_kms_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
+static int gsr_capture_kms_start(gsr_capture *cap, gsr_capture_metadata *capture_metadata) {
gsr_capture_kms *self = cap->priv;
gsr_capture_kms_create_input_texture_ids(self);
@@ -219,17 +218,14 @@ static int gsr_capture_kms_start(gsr_capture *cap, AVCodecContext *video_codec_c
else
self->capture_size = rotate_capture_size_if_rotated(self, monitor.size);
- /* Disable vsync */
- self->params.egl->eglSwapInterval(self->params.egl->egl_display, 0);
-
if(self->params.output_resolution.x == 0 && self->params.output_resolution.y == 0) {
self->params.output_resolution = self->capture_size;
- video_codec_context->width = FFALIGN(self->capture_size.x, 2);
- video_codec_context->height = FFALIGN(self->capture_size.y, 2);
+ capture_metadata->width = FFALIGN(self->capture_size.x, 2);
+ capture_metadata->height = FFALIGN(self->capture_size.y, 2);
} else {
self->params.output_resolution = scale_keep_aspect_ratio(self->capture_size, self->params.output_resolution);
- video_codec_context->width = FFALIGN(self->params.output_resolution.x, 2);
- video_codec_context->height = FFALIGN(self->params.output_resolution.y, 2);
+ capture_metadata->width = FFALIGN(self->params.output_resolution.x, 2);
+ capture_metadata->height = FFALIGN(self->params.output_resolution.y, 2);
}
self->fast_path_failed = self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_AMD && !gl_driver_version_greater_than(&self->params.egl->gpu_info, 24, 0, 9);
@@ -243,10 +239,6 @@ static int gsr_capture_kms_start(gsr_capture *cap, AVCodecContext *video_codec_c
self->mesa_supports_compute_only_vaapi_copy = self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_AMD && gl_driver_version_greater_than(&self->params.egl->gpu_info, 24, 3, 6);
- frame->width = video_codec_context->width;
- frame->height = video_codec_context->height;
-
- self->video_codec_context = video_codec_context;
self->last_time_monitor_check = clock_get_monotonic_seconds();
return 0;
}
@@ -617,7 +609,7 @@ static void gsr_capture_kms_fail_fast_path_if_not_fast(gsr_capture_kms *self, ui
}
}
-static int gsr_capture_kms_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion) {
+static int gsr_capture_kms_capture(gsr_capture *cap, gsr_capture_metadata *capture_metadata, gsr_color_conversion *color_conversion) {
gsr_capture_kms *self = cap->priv;
gsr_capture_kms_cleanup_kms_fds(self);
@@ -648,7 +640,7 @@ static int gsr_capture_kms_capture(gsr_capture *cap, AVFrame *frame, gsr_color_c
if(drm_fd->has_hdr_metadata && self->params.hdr && hdr_metadata_is_supported_format(&drm_fd->hdr_metadata))
gsr_kms_set_hdr_metadata(self, drm_fd);
- if(!self->performance_error_shown && self->monitor_rotation != GSR_MONITOR_ROT_0 && video_codec_context_is_vaapi(self->video_codec_context) && self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_AMD) {
+ if(!self->performance_error_shown && self->monitor_rotation != GSR_MONITOR_ROT_0 && video_codec_context_is_vaapi(capture_metadata->video_codec_context) && self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_AMD) {
self->performance_error_shown = true;
self->fast_path_failed = true;
fprintf(stderr, "gsr warning: gsr_capture_kms_capture: the monitor you are recording is rotated, composition will have to be used."
@@ -664,7 +656,7 @@ static int gsr_capture_kms_capture(gsr_capture *cap, AVFrame *frame, gsr_color_c
output_size = scale_keep_aspect_ratio(self->capture_size, output_size);
const float texture_rotation = monitor_rotation_to_radians(self->monitor_rotation);
- const vec2i target_pos = { max_int(0, frame->width / 2 - output_size.x / 2), max_int(0, frame->height / 2 - output_size.y / 2) };
+ const vec2i target_pos = { max_int(0, capture_metadata->width / 2 - output_size.x / 2), max_int(0, capture_metadata->height / 2 - output_size.y / 2) };
gsr_capture_kms_update_capture_size_change(self, color_conversion, target_pos, drm_fd);
vec2i capture_pos = self->capture_pos;
@@ -675,7 +667,7 @@ static int gsr_capture_kms_capture(gsr_capture *cap, AVFrame *frame, gsr_color_c
self->params.egl->glFinish();
/* Fast opengl free path */
- if(!self->fast_path_failed && self->monitor_rotation == GSR_MONITOR_ROT_0 && video_codec_context_is_vaapi(self->video_codec_context) && self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_AMD) {
+ if(!self->fast_path_failed && self->monitor_rotation == GSR_MONITOR_ROT_0 && video_codec_context_is_vaapi(capture_metadata->video_codec_context) && self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_AMD) {
int fds[4];
uint32_t offsets[4];
uint32_t pitches[4];
@@ -686,7 +678,7 @@ static int gsr_capture_kms_capture(gsr_capture *cap, AVFrame *frame, gsr_color_c
pitches[i] = drm_fd->dma_buf[i].pitch;
modifiers[i] = drm_fd->modifier;
}
- if(!vaapi_copy_drm_planes_to_video_surface(self->video_codec_context, frame, (vec2i){capture_pos.x, capture_pos.y}, self->capture_size, target_pos, output_size, drm_fd->pixel_format, (vec2i){drm_fd->width, drm_fd->height}, fds, offsets, pitches, modifiers, drm_fd->num_dma_bufs)) {
+ if(!vaapi_copy_drm_planes_to_video_surface(capture_metadata->video_codec_context, capture_metadata->frame, (vec2i){capture_pos.x, capture_pos.y}, self->capture_size, target_pos, output_size, drm_fd->pixel_format, (vec2i){drm_fd->width, drm_fd->height}, fds, offsets, pitches, modifiers, drm_fd->num_dma_bufs)) {
fprintf(stderr, "gsr error: gsr_capture_kms_capture: vaapi_copy_drm_planes_to_video_surface failed, falling back to opengl copy. Please report this as an issue at https://github.com/dec05eba/gpu-screen-recorder-issues\n");
self->fast_path_failed = true;
}
@@ -777,8 +769,7 @@ static bool gsr_capture_kms_set_hdr_metadata(gsr_capture *cap, AVMasteringDispla
// self->damaged = false;
// }
-static void gsr_capture_kms_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
- (void)video_codec_context;
+static void gsr_capture_kms_destroy(gsr_capture *cap) {
gsr_capture_kms *self = cap->priv;
if(cap->priv) {
gsr_capture_kms_stop(self);
diff --git a/src/capture/nvfbc.c b/src/capture/nvfbc.c
index 676d269..af79e0d 100644
--- a/src/capture/nvfbc.c
+++ b/src/capture/nvfbc.c
@@ -133,31 +133,6 @@ static bool gsr_capture_nvfbc_load_library(gsr_capture *cap) {
return true;
}
-/* TODO: check for glx swap control extension string (GLX_EXT_swap_control, etc) */
-static void set_vertical_sync_enabled(gsr_egl *egl, int enabled) {
- int result = 0;
-
- if(egl->glXSwapIntervalEXT) {
- assert(gsr_window_get_display_server(egl->window) == GSR_DISPLAY_SERVER_X11);
- Display *display = gsr_window_get_display(egl->window);
- const Window window = (Window)gsr_window_get_window(egl->window);
- egl->glXSwapIntervalEXT(display, window, enabled ? 1 : 0);
- } else if(egl->glXSwapIntervalMESA) {
- result = egl->glXSwapIntervalMESA(enabled ? 1 : 0);
- } else if(egl->glXSwapIntervalSGI) {
- result = egl->glXSwapIntervalSGI(enabled ? 1 : 0);
- } else {
- static int warned = 0;
- if (!warned) {
- warned = 1;
- fprintf(stderr, "gsr warning: setting vertical sync not supported\n");
- }
- }
-
- if(result != 0)
- fprintf(stderr, "gsr warning: setting vertical sync failed\n");
-}
-
static void gsr_capture_nvfbc_destroy_session(gsr_capture_nvfbc *self) {
if(self->fbc_handle_created && self->capture_session_created) {
NVFBC_DESTROY_CAPTURE_SESSION_PARAMS destroy_capture_params;
@@ -311,7 +286,7 @@ static void gsr_capture_nvfbc_stop(gsr_capture_nvfbc *self) {
}
}
-static int gsr_capture_nvfbc_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
+static int gsr_capture_nvfbc_start(gsr_capture *cap, gsr_capture_metadata *capture_metadata) {
gsr_capture_nvfbc *self = cap->priv;
if(!gsr_capture_nvfbc_load_library(cap))
@@ -357,27 +332,21 @@ static int gsr_capture_nvfbc_start(gsr_capture *cap, AVCodecContext *video_codec
}
if(self->capture_region) {
- video_codec_context->width = FFALIGN(self->width, 2);
- video_codec_context->height = FFALIGN(self->height, 2);
+ capture_metadata->width = FFALIGN(self->width, 2);
+ capture_metadata->height = FFALIGN(self->height, 2);
} else {
- video_codec_context->width = FFALIGN(self->tracking_width, 2);
- video_codec_context->height = FFALIGN(self->tracking_height, 2);
+ capture_metadata->width = FFALIGN(self->tracking_width, 2);
+ capture_metadata->height = FFALIGN(self->tracking_height, 2);
}
if(self->params.output_resolution.x == 0 && self->params.output_resolution.y == 0) {
- self->params.output_resolution = (vec2i){video_codec_context->width, video_codec_context->height};
+ self->params.output_resolution = (vec2i){capture_metadata->width, capture_metadata->height};
} else {
- self->params.output_resolution = scale_keep_aspect_ratio((vec2i){video_codec_context->width, video_codec_context->height}, self->params.output_resolution);
- video_codec_context->width = FFALIGN(self->params.output_resolution.x, 2);
- video_codec_context->height = FFALIGN(self->params.output_resolution.y, 2);
+ self->params.output_resolution = scale_keep_aspect_ratio((vec2i){capture_metadata->width, capture_metadata->height}, self->params.output_resolution);
+ capture_metadata->width = FFALIGN(self->params.output_resolution.x, 2);
+ capture_metadata->height = FFALIGN(self->params.output_resolution.y, 2);
}
- frame->width = video_codec_context->width;
- frame->height = video_codec_context->height;
-
- /* Disable vsync */
- set_vertical_sync_enabled(self->params.egl, 0);
-
return 0;
error_cleanup:
@@ -385,7 +354,7 @@ static int gsr_capture_nvfbc_start(gsr_capture *cap, AVCodecContext *video_codec
return -1;
}
-static int gsr_capture_nvfbc_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion) {
+static int gsr_capture_nvfbc_capture(gsr_capture *cap, gsr_capture_metadata *capture_metadata, gsr_color_conversion *color_conversion) {
gsr_capture_nvfbc *self = cap->priv;
const double nvfbc_recreate_retry_time_seconds = 1.0;
@@ -416,7 +385,7 @@ static int gsr_capture_nvfbc_capture(gsr_capture *cap, AVFrame *frame, gsr_color
vec2i output_size = is_scaled ? self->params.output_resolution : frame_size;
output_size = scale_keep_aspect_ratio(frame_size, output_size);
- const vec2i target_pos = { max_int(0, frame->width / 2 - output_size.x / 2), max_int(0, frame->height / 2 - output_size.y / 2) };
+ const vec2i target_pos = { max_int(0, capture_metadata->width / 2 - output_size.x / 2), max_int(0, capture_metadata->height / 2 - output_size.y / 2) };
NVFBC_FRAME_GRAB_INFO frame_info;
memset(&frame_info, 0, sizeof(frame_info));
@@ -450,8 +419,7 @@ static int gsr_capture_nvfbc_capture(gsr_capture *cap, AVFrame *frame, gsr_color
return 0;
}
-static void gsr_capture_nvfbc_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
- (void)video_codec_context;
+static void gsr_capture_nvfbc_destroy(gsr_capture *cap) {
gsr_capture_nvfbc *self = cap->priv;
gsr_capture_nvfbc_stop(self);
free(cap->priv);
diff --git a/src/capture/portal.c b/src/capture/portal.c
index 27486fd..cfbfbcd 100644
--- a/src/capture/portal.c
+++ b/src/capture/portal.c
@@ -25,7 +25,6 @@ typedef struct {
gsr_pipewire_video_dmabuf_data dmabuf_data[GSR_PIPEWIRE_VIDEO_DMABUF_MAX_PLANES];
int num_dmabuf_data;
- AVCodecContext *video_codec_context;
bool fast_path_failed;
bool mesa_supports_compute_only_vaapi_copy;
} gsr_capture_portal;
@@ -257,7 +256,7 @@ static bool gsr_capture_portal_get_frame_dimensions(gsr_capture_portal *self) {
return false;
}
-static int gsr_capture_portal_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
+static int gsr_capture_portal_start(gsr_capture *cap, gsr_capture_metadata *capture_metadata) {
gsr_capture_portal *self = cap->priv;
gsr_capture_portal_create_input_textures(self);
@@ -286,7 +285,7 @@ static int gsr_capture_portal_start(gsr_capture *cap, AVCodecContext *video_code
fprintf(stderr, "gsr info: gsr_capture_portal_start: setting up pipewire\n");
/* TODO: support hdr when pipewire supports it */
/* gsr_pipewire closes the pipewire fd, even on failure */
- if(!gsr_pipewire_video_init(&self->pipewire, pipewire_fd, pipewire_node, video_codec_context->framerate.num, self->params.record_cursor, self->params.egl)) {
+ if(!gsr_pipewire_video_init(&self->pipewire, pipewire_fd, pipewire_node, capture_metadata->fps, self->params.record_cursor, self->params.egl)) {
fprintf(stderr, "gsr error: gsr_capture_portal_start: failed to setup pipewire with fd: %d, node: %" PRIu32 "\n", pipewire_fd, pipewire_node);
gsr_capture_portal_stop(self);
return -1;
@@ -298,17 +297,14 @@ static int gsr_capture_portal_start(gsr_capture *cap, AVCodecContext *video_code
return -1;
}
- /* Disable vsync */
- self->params.egl->eglSwapInterval(self->params.egl->egl_display, 0);
-
if(self->params.output_resolution.x == 0 && self->params.output_resolution.y == 0) {
self->params.output_resolution = self->capture_size;
- video_codec_context->width = FFALIGN(self->capture_size.x, 2);
- video_codec_context->height = FFALIGN(self->capture_size.y, 2);
+ capture_metadata->width = FFALIGN(self->capture_size.x, 2);
+ capture_metadata->height = FFALIGN(self->capture_size.y, 2);
} else {
self->params.output_resolution = scale_keep_aspect_ratio(self->capture_size, self->params.output_resolution);
- video_codec_context->width = FFALIGN(self->params.output_resolution.x, 2);
- video_codec_context->height = FFALIGN(self->params.output_resolution.y, 2);
+ capture_metadata->width = FFALIGN(self->params.output_resolution.x, 2);
+ capture_metadata->height = FFALIGN(self->params.output_resolution.y, 2);
}
self->fast_path_failed = self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_AMD && !gl_driver_version_greater_than(&self->params.egl->gpu_info, 24, 0, 9);
@@ -317,10 +313,6 @@ static int gsr_capture_portal_start(gsr_capture *cap, AVCodecContext *video_code
self->mesa_supports_compute_only_vaapi_copy = self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_AMD && gl_driver_version_greater_than(&self->params.egl->gpu_info, 24, 3, 6);
- frame->width = video_codec_context->width;
- frame->height = video_codec_context->height;
-
- self->video_codec_context = video_codec_context;
return 0;
}
@@ -338,8 +330,7 @@ static void gsr_capture_portal_fail_fast_path_if_not_fast(gsr_capture_portal *se
}
}
-static int gsr_capture_portal_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion) {
- (void)frame;
+static int gsr_capture_portal_capture(gsr_capture *cap, gsr_capture_metadata *capture_metadata, gsr_color_conversion *color_conversion) {
(void)color_conversion;
gsr_capture_portal *self = cap->priv;
@@ -365,7 +356,7 @@ static int gsr_capture_portal_capture(gsr_capture *cap, AVFrame *frame, gsr_colo
vec2i output_size = is_scaled ? self->params.output_resolution : self->capture_size;
output_size = scale_keep_aspect_ratio(self->capture_size, output_size);
- const vec2i target_pos = { max_int(0, frame->width / 2 - output_size.x / 2), max_int(0, frame->height / 2 - output_size.y / 2) };
+ const vec2i target_pos = { max_int(0, capture_metadata->width / 2 - output_size.x / 2), max_int(0, capture_metadata->height / 2 - output_size.y / 2) };
self->params.egl->glFlush();
self->params.egl->glFinish();
@@ -373,7 +364,7 @@ static int gsr_capture_portal_capture(gsr_capture *cap, AVFrame *frame, gsr_colo
// TODO: Handle region crop
/* Fast opengl free path */
- if(!self->fast_path_failed && video_codec_context_is_vaapi(self->video_codec_context) && self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_AMD) {
+ if(!self->fast_path_failed && video_codec_context_is_vaapi(capture_metadata->video_codec_context) && self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_AMD) {
int fds[4];
uint32_t offsets[4];
uint32_t pitches[4];
@@ -384,7 +375,7 @@ static int gsr_capture_portal_capture(gsr_capture *cap, AVFrame *frame, gsr_colo
pitches[i] = self->dmabuf_data[i].stride;
modifiers[i] = pipewire_modifiers;
}
- if(!vaapi_copy_drm_planes_to_video_surface(self->video_codec_context, frame, (vec2i){region.x, region.y}, self->capture_size, target_pos, output_size, pipewire_fourcc, self->capture_size, fds, offsets, pitches, modifiers, self->num_dmabuf_data)) {
+ if(!vaapi_copy_drm_planes_to_video_surface(capture_metadata->video_codec_context, capture_metadata->frame, (vec2i){region.x, region.y}, self->capture_size, target_pos, output_size, pipewire_fourcc, self->capture_size, fds, offsets, pitches, modifiers, self->num_dmabuf_data)) {
fprintf(stderr, "gsr error: gsr_capture_portal_capture: vaapi_copy_drm_planes_to_video_surface failed, falling back to opengl copy. Please report this as an issue at https://github.com/dec05eba/gpu-screen-recorder-issues\n");
self->fast_path_failed = true;
}
@@ -442,8 +433,7 @@ static void gsr_capture_portal_clear_damage(gsr_capture *cap) {
gsr_pipewire_video_clear_damage(&self->pipewire);
}
-static void gsr_capture_portal_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
- (void)video_codec_context;
+static void gsr_capture_portal_destroy(gsr_capture *cap) {
gsr_capture_portal *self = cap->priv;
if(cap->priv) {
gsr_capture_portal_stop(self);
diff --git a/src/capture/xcomposite.c b/src/capture/xcomposite.c
index 5cef71d..94e691b 100644
--- a/src/capture/xcomposite.c
+++ b/src/capture/xcomposite.c
@@ -31,7 +31,6 @@ typedef struct {
double window_resize_timer;
WindowTexture window_texture;
- AVCodecContext *video_codec_context;
Atom net_active_window_atom;
@@ -64,7 +63,7 @@ static Window get_focused_window(Display *display, Atom net_active_window_atom)
return None;
}
-static int gsr_capture_xcomposite_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
+static int gsr_capture_xcomposite_start(gsr_capture *cap, gsr_capture_metadata *capture_metadata) {
gsr_capture_xcomposite *self = cap->priv;
if(self->params.follow_focused) {
@@ -95,8 +94,6 @@ static int gsr_capture_xcomposite_start(gsr_capture *cap, AVCodecContext *video_
// TODO: Get select and add these on top of it and then restore at the end. Also do the same in other xcomposite
XSelectInput(self->display, self->window, StructureNotifyMask | ExposureMask);
- /* Disable vsync */
- self->params.egl->eglSwapInterval(self->params.egl->egl_display, 0);
if(window_texture_init(&self->window_texture, self->display, self->window, self->params.egl) != 0 && !self->params.follow_focused) {
fprintf(stderr, "gsr error: gsr_capture_xcomposite_start: failed to get window texture for window %ld\n", (long)self->window);
return -1;
@@ -117,21 +114,17 @@ static int gsr_capture_xcomposite_start(gsr_capture *cap, AVCodecContext *video_
if(self->params.output_resolution.x == 0 && self->params.output_resolution.y == 0) {
self->params.output_resolution = self->texture_size;
- video_codec_context->width = FFALIGN(self->texture_size.x, 2);
- video_codec_context->height = FFALIGN(self->texture_size.y, 2);
+ capture_metadata->width = FFALIGN(self->texture_size.x, 2);
+ capture_metadata->height = FFALIGN(self->texture_size.y, 2);
} else {
- video_codec_context->width = FFALIGN(self->params.output_resolution.x, 2);
- video_codec_context->height = FFALIGN(self->params.output_resolution.y, 2);
+ capture_metadata->width = FFALIGN(self->params.output_resolution.x, 2);
+ capture_metadata->height = FFALIGN(self->params.output_resolution.y, 2);
}
self->fast_path_failed = self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_AMD && !gl_driver_version_greater_than(&self->params.egl->gpu_info, 24, 0, 9);
if(self->fast_path_failed)
fprintf(stderr, "gsr warning: gsr_capture_kms_start: your amd driver (mesa) version is known to be buggy (<= version 24.0.9), falling back to opengl copy\n");
- frame->width = video_codec_context->width;
- frame->height = video_codec_context->height;
-
- self->video_codec_context = video_codec_context;
self->window_resize_timer = clock_get_monotonic_seconds();
return 0;
}
@@ -255,9 +248,8 @@ static bool gsr_capture_xcomposite_should_stop(gsr_capture *cap, bool *err) {
return false;
}
-static int gsr_capture_xcomposite_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion) {
+static int gsr_capture_xcomposite_capture(gsr_capture *cap, gsr_capture_metadata *capture_metdata, gsr_color_conversion *color_conversion) {
gsr_capture_xcomposite *self = cap->priv;
- (void)frame;
if(self->clear_background) {
self->clear_background = false;
@@ -268,14 +260,14 @@ static int gsr_capture_xcomposite_capture(gsr_capture *cap, AVFrame *frame, gsr_
vec2i output_size = is_scaled ? self->params.output_resolution : self->texture_size;
output_size = scale_keep_aspect_ratio(self->texture_size, output_size);
- const vec2i target_pos = { max_int(0, frame->width / 2 - output_size.x / 2), max_int(0, frame->height / 2 - output_size.y / 2) };
+ const vec2i target_pos = { max_int(0, capture_metdata->width / 2 - output_size.x / 2), max_int(0, capture_metdata->height / 2 - output_size.y / 2) };
self->params.egl->glFlush();
self->params.egl->glFinish();
/* Fast opengl free path */
- if(!self->fast_path_failed && video_codec_context_is_vaapi(self->video_codec_context) && self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_AMD) {
- if(!vaapi_copy_egl_image_to_video_surface(self->params.egl, self->window_texture.image, (vec2i){0, 0}, self->texture_size, target_pos, output_size, self->video_codec_context, frame)) {
+ if(!self->fast_path_failed && video_codec_context_is_vaapi(capture_metdata->video_codec_context) && self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_AMD) {
+ if(!vaapi_copy_egl_image_to_video_surface(self->params.egl, self->window_texture.image, (vec2i){0, 0}, self->texture_size, target_pos, output_size, capture_metdata->video_codec_context, capture_metdata->frame)) {
fprintf(stderr, "gsr error: gsr_capture_xcomposite_capture: vaapi_copy_egl_image_to_video_surface failed, falling back to opengl copy. Please report this as an issue at https://github.com/dec05eba/gpu-screen-recorder-issues\n");
self->fast_path_failed = true;
}
@@ -325,8 +317,7 @@ static uint64_t gsr_capture_xcomposite_get_window_id(gsr_capture *cap) {
return self->window;
}
-static void gsr_capture_xcomposite_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) {
- (void)video_codec_context;
+static void gsr_capture_xcomposite_destroy(gsr_capture *cap) {
if(cap->priv) {
gsr_capture_xcomposite_stop(cap->priv);
free(cap->priv);
diff --git a/src/egl.c b/src/egl.c
index 682810d..be5a44d 100644
--- a/src/egl.c
+++ b/src/egl.c
@@ -355,6 +355,44 @@ static void debug_callback(unsigned int source, unsigned int type, unsigned int
fprintf(stderr, "gsr info: gl callback: %s type = 0x%x, severity = 0x%x, message = %s\n", type == GL_DEBUG_TYPE_ERROR ? "** GL ERROR **" : "", type, severity, message);
}
+/* TODO: check for glx swap control extension string (GLX_EXT_swap_control, etc) */
+static void set_vertical_sync_enabled(gsr_egl *egl, int enabled) {
+ int result = 0;
+
+ if(egl->glXSwapIntervalEXT) {
+ assert(gsr_window_get_display_server(egl->window) == GSR_DISPLAY_SERVER_X11);
+ Display *display = gsr_window_get_display(egl->window);
+ const Window window = (Window)gsr_window_get_window(egl->window);
+ egl->glXSwapIntervalEXT(display, window, enabled ? 1 : 0);
+ } else if(egl->glXSwapIntervalMESA) {
+ result = egl->glXSwapIntervalMESA(enabled ? 1 : 0);
+ } else if(egl->glXSwapIntervalSGI) {
+ result = egl->glXSwapIntervalSGI(enabled ? 1 : 0);
+ } else {
+ static int warned = 0;
+ if (!warned) {
+ warned = 1;
+ fprintf(stderr, "gsr warning: setting vertical sync not supported\n");
+ }
+ }
+
+ if(result != 0)
+ fprintf(stderr, "gsr warning: setting vertical sync failed\n");
+}
+
+static void gsr_egl_disable_vsync(gsr_egl *self) {
+ switch(self->context_type) {
+ case GSR_GL_CONTEXT_TYPE_EGL: {
+ self->eglSwapInterval(self->egl_display, 0);
+ break;
+ }
+ case GSR_GL_CONTEXT_TYPE_GLX: {
+ set_vertical_sync_enabled(self, 0);
+ break;
+ }
+ }
+}
+
bool gsr_egl_load(gsr_egl *self, gsr_window *window, bool is_monitor_capture, bool enable_debug) {
memset(self, 0, sizeof(gsr_egl));
self->context_type = GSR_GL_CONTEXT_TYPE_EGL;
@@ -416,6 +454,7 @@ bool gsr_egl_load(gsr_egl *self, gsr_window *window, bool is_monitor_capture, bo
self->glDebugMessageCallback(debug_callback, NULL);
}
+ gsr_egl_disable_vsync(self);
return true;
fail:
diff --git a/src/encoder/video/image.c b/src/encoder/video/image.c
deleted file mode 100644
index 5f48b7b..0000000
--- a/src/encoder/video/image.c
+++ /dev/null
@@ -1,130 +0,0 @@
-#include "../../../include/encoder/video/image.h"
-#include "../../../include/egl.h"
-
-#include <libavcodec/avcodec.h>
-#include <libavutil/frame.h>
-
-#include <stdlib.h>
-
-#define LINESIZE_ALIGNMENT 4
-
-typedef struct {
- gsr_video_encoder_image_params params;
-
- unsigned int target_texture;
-} gsr_video_encoder_image;
-
-static unsigned int gl_create_texture(gsr_egl *egl, int width, int height, int internal_format, unsigned int format) {
- unsigned int texture_id = 0;
- egl->glGenTextures(1, &texture_id);
- egl->glBindTexture(GL_TEXTURE_2D, texture_id);
- egl->glTexImage2D(GL_TEXTURE_2D, 0, internal_format, width, height, 0, format, GL_UNSIGNED_BYTE, NULL);
-
- egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
- egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
-
- egl->glBindTexture(GL_TEXTURE_2D, 0);
- return texture_id;
-}
-
-static bool gsr_video_encoder_image_setup_textures(gsr_video_encoder_image *self, AVCodecContext *video_codec_context, AVFrame *frame) {
- int res = av_frame_get_buffer(frame, LINESIZE_ALIGNMENT);
- if(res < 0) {
- fprintf(stderr, "gsr error: gsr_video_encoder_image_setup_textures: av_frame_get_buffer failed: %d\n", res);
- return false;
- }
-
- res = av_frame_make_writable(frame);
- if(res < 0) {
- fprintf(stderr, "gsr error: gsr_video_encoder_image_setup_textures: av_frame_make_writable failed: %d\n", res);
- return false;
- }
-
- self->target_texture = gl_create_texture(self->params.egl, video_codec_context->width, video_codec_context->height, self->params.color_depth == GSR_COLOR_DEPTH_8_BITS ? GL_RGB8 : GL_RGB16, GL_RGB);
- if(self->target_texture == 0) {
- fprintf(stderr, "gsr error: gsr_capture_kms_setup_cuda_textures: failed to create opengl texture\n");
- return false;
- }
-
- return true;
-}
-
-static void gsr_video_encoder_image_stop(gsr_video_encoder_image *self, AVCodecContext *video_codec_context);
-
-static bool gsr_video_encoder_image_start(gsr_video_encoder *encoder, AVCodecContext *video_codec_context, AVFrame *frame) {
- gsr_video_encoder_image *self = encoder->priv;
-
- video_codec_context->width = FFALIGN(video_codec_context->width, LINESIZE_ALIGNMENT);
- video_codec_context->height = FFALIGN(video_codec_context->height, 2);
-
- frame->width = video_codec_context->width;
- frame->height = video_codec_context->height;
-
- if(!gsr_video_encoder_image_setup_textures(self, video_codec_context, frame)) {
- gsr_video_encoder_image_stop(self, video_codec_context);
- return false;
- }
-
- return true;
-}
-
-void gsr_video_encoder_image_stop(gsr_video_encoder_image *self, AVCodecContext *video_codec_context) {
- (void)video_codec_context;
- self->params.egl->glDeleteTextures(1, &self->target_texture);
- self->target_texture = 0;
-}
-
-static void gsr_video_encoder_image_copy_textures_to_frame(gsr_video_encoder *encoder, AVFrame *frame, gsr_color_conversion *color_conversion) {
- gsr_video_encoder_image *self = encoder->priv;
- // TODO: hdr support
- self->params.egl->glBindTexture(GL_TEXTURE_2D, self->target_texture);
- // We could use glGetTexSubImage and then we wouldn't have to use a specific linesize (LINESIZE_ALIGNMENT) that adds padding,
- // but glGetTexSubImage is only available starting from opengl 4.5.
- self->params.egl->glGetTexImage(GL_TEXTURE_2D, 0, GL_RGB, GL_UNSIGNED_BYTE, frame->data[0]);
- self->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
- // cap_kms->kms.base.egl->eglSwapBuffers(cap_kms->kms.base.egl->egl_display, cap_kms->kms.base.egl->egl_surface);
-
- self->params.egl->glFlush();
- self->params.egl->glFinish();
-}
-
-static void gsr_video_encoder_image_get_textures(gsr_video_encoder *encoder, unsigned int *textures, int *num_textures, gsr_destination_color *destination_color) {
- gsr_video_encoder_image *self = encoder->priv;
- textures[0] = self->target_texture;
- *num_textures = 1;
- // TODO: 10-bit support
- //*destination_color = self->params.color_depth == GSR_COLOR_DEPTH_10_BITS ? GSR_DESTINATION_COLOR_P010 : GSR_DESTINATION_COLOR_NV12;
- *destination_color = GSR_DESTINATION_COLOR_RGB8;
-}
-
-static void gsr_video_encoder_image_destroy(gsr_video_encoder *encoder, AVCodecContext *video_codec_context) {
- gsr_video_encoder_image_stop(encoder->priv, video_codec_context);
- free(encoder->priv);
- free(encoder);
-}
-
-gsr_video_encoder* gsr_video_encoder_image_create(const gsr_video_encoder_image_params *params) {
- gsr_video_encoder *encoder = calloc(1, sizeof(gsr_video_encoder));
- if(!encoder)
- return NULL;
-
- gsr_video_encoder_image *encoder_image = calloc(1, sizeof(gsr_video_encoder_image));
- if(!encoder_image) {
- free(encoder);
- return NULL;
- }
-
- encoder_image->params = *params;
-
- *encoder = (gsr_video_encoder) {
- .start = gsr_video_encoder_image_start,
- .copy_textures_to_frame = gsr_video_encoder_image_copy_textures_to_frame,
- .get_textures = gsr_video_encoder_image_get_textures,
- .destroy = gsr_video_encoder_image_destroy,
- .priv = encoder_image
- };
-
- return encoder;
-}
diff --git a/src/image_writer.c b/src/image_writer.c
new file mode 100644
index 0000000..d01a66c
--- /dev/null
+++ b/src/image_writer.c
@@ -0,0 +1,85 @@
+#include "../include/image_writer.h"
+#include "../include/egl.h"
+
+#define STB_IMAGE_WRITE_IMPLEMENTATION
+#include "../external/stb_image_write.h"
+
+#include <stdlib.h>
+#include <stdint.h>
+#include <stdio.h>
+#include <assert.h>
+
+static unsigned int gl_create_texture(gsr_egl *egl, int width, int height, int internal_format, unsigned int format) {
+ unsigned int texture_id = 0;
+ egl->glGenTextures(1, &texture_id);
+ egl->glBindTexture(GL_TEXTURE_2D, texture_id);
+ egl->glTexImage2D(GL_TEXTURE_2D, 0, internal_format, width, height, 0, format, GL_UNSIGNED_BYTE, NULL);
+
+ egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
+ egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
+
+ egl->glBindTexture(GL_TEXTURE_2D, 0);
+ return texture_id;
+}
+
+/* TODO: Support hdr/10-bit */
+bool gsr_image_writer_init(gsr_image_writer *self, gsr_image_writer_source source, gsr_egl *egl, int width, int height) {
+ assert(source == GSR_IMAGE_WRITER_SOURCE_OPENGL);
+ self->source = source;
+ self->egl = egl;
+ self->width = width;
+ self->height = height;
+ self->texture = gl_create_texture(self->egl, self->width, self->height, GL_RGB8, GL_RGB); /* TODO: use GL_RGB16 instead of GL_RGB8 for hdr/10-bit */
+ if(self->texture == 0) {
+ fprintf(stderr, "gsr error: gsr_image_writer_init: failed to create texture\n");
+ return false;
+ }
+ return true;
+}
+
+void gsr_image_writer_deinit(gsr_image_writer *self) {
+ if(self->texture) {
+ self->egl->glDeleteTextures(1, &self->texture);
+ self->texture = 0;
+ }
+}
+
+bool gsr_image_writer_write_to_file(gsr_image_writer *self, const char *filepath, gsr_image_format image_format, int quality) {
+ if(quality < 1)
+ quality = 1;
+ else if(quality > 100)
+ quality = 100;
+
+ uint8_t *frame_data = malloc(self->width * self->height * 3);
+ if(!frame_data) {
+ fprintf(stderr, "gsr error: gsr_image_writer_write_to_file: failed to allocate memory for image frame\n");
+ return false;
+ }
+
+ // TODO: hdr support
+ self->egl->glBindTexture(GL_TEXTURE_2D, self->texture);
+ // We could use glGetTexSubImage, but it's only available starting from opengl 4.5
+ self->egl->glGetTexImage(GL_TEXTURE_2D, 0, GL_RGB, GL_UNSIGNED_BYTE, frame_data);
+ self->egl->glBindTexture(GL_TEXTURE_2D, 0);
+
+ self->egl->glFlush();
+ self->egl->glFinish();
+
+ bool success = false;
+ switch(image_format) {
+ case GSR_IMAGE_FORMAT_JPEG:
+ success = stbi_write_jpg(filepath, self->width, self->height, 3, frame_data, quality);
+ break;
+ case GSR_IMAGE_FORMAT_PNG:
+ success = stbi_write_png(filepath, self->width, self->height, 3, frame_data, 0);
+ break;
+ }
+
+ if(!success)
+ fprintf(stderr, "gsr error: gsr_image_writer_write_to_file: failed to write image data to output file %s\n", filepath);
+
+ free(frame_data);
+ return success;
+}
diff --git a/src/main.cpp b/src/main.cpp
index 2eb348d..10dc5a6 100644
--- a/src/main.cpp
+++ b/src/main.cpp
@@ -13,7 +13,6 @@ extern "C" {
#include "../include/encoder/video/vaapi.h"
#include "../include/encoder/video/vulkan.h"
#include "../include/encoder/video/software.h"
-#include "../include/encoder/video/image.h"
#include "../include/codec_query/nvenc.h"
#include "../include/codec_query/vaapi.h"
#include "../include/codec_query/vulkan.h"
@@ -23,6 +22,7 @@ extern "C" {
#include "../include/utils.h"
#include "../include/damage.h"
#include "../include/color_conversion.h"
+#include "../include/image_writer.h"
}
#include <assert.h>
@@ -113,9 +113,7 @@ enum class VideoCodec {
VP8,
VP9,
H264_VULKAN,
- HEVC_VULKAN,
- JPEG,
- PNG
+ HEVC_VULKAN
};
enum class AudioCodec {
@@ -219,16 +217,6 @@ static bool video_codec_is_vulkan(VideoCodec video_codec) {
}
}
-static bool video_codec_is_image(VideoCodec video_codec) {
- switch(video_codec) {
- case VideoCodec::JPEG:
- case VideoCodec::PNG:
- return true;
- default:
- return false;
- }
-}
-
struct PacketData {
PacketData() {}
PacketData(const PacketData&) = delete;
@@ -593,22 +581,7 @@ static AVCodecContext *create_video_codec_context(AVPixelFormat pix_fmt,
if (codec_context->codec_id == AV_CODEC_ID_MPEG1VIDEO)
codec_context->mb_decision = 2;
- if(video_codec_is_image(video_codec)) {
- switch(video_quality) {
- case VideoQuality::MEDIUM:
- codec_context->compression_level = 8;
- break;
- case VideoQuality::HIGH:
- codec_context->compression_level = 6;
- break;
- case VideoQuality::VERY_HIGH:
- codec_context->compression_level = 4;
- break;
- case VideoQuality::ULTRA:
- codec_context->compression_level = 2;
- break;
- }
- } else if(!use_software_video_encoder && vendor != GSR_GPU_VENDOR_NVIDIA && bitrate_mode != BitrateMode::CBR) {
+ if(!use_software_video_encoder && vendor != GSR_GPU_VENDOR_NVIDIA && bitrate_mode != BitrateMode::CBR) {
// 8 bit / 10 bit = 80%, and increase it even more
const float quality_multiply = hdr ? (8.0f/10.0f * 0.7f) : 1.0f;
if(codec_context->codec_id == AV_CODEC_ID_AV1 || codec_context->codec_id == AV_CODEC_ID_H264 || codec_context->codec_id == AV_CODEC_ID_HEVC) {
@@ -743,15 +716,6 @@ static AVFrame* create_audio_frame(AVCodecContext *audio_codec_context) {
return frame;
}
-static void open_video_image(AVCodecContext *codec_context) {
- AVDictionary *options = nullptr;
- int ret = avcodec_open2(codec_context, codec_context->codec, &options);
- if (ret < 0) {
- fprintf(stderr, "Error: Could not open video codec: %s\n", av_error_to_string(ret));
- _exit(1);
- }
-}
-
static void dict_set_profile(AVCodecContext *codec_context, gsr_gpu_vendor vendor, gsr_color_depth color_depth, AVDictionary **options) {
#if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(61, 17, 100)
if(codec_context->codec_id == AV_CODEC_ID_H264) {
@@ -1295,7 +1259,8 @@ static void usage_full() {
printf(" %s -w screen -f 60 -a \"app:firefox|app:csgo\" -o \"$HOME/Videos/video.mp4\"\n", program_name);
printf(" %s -w screen -f 60 -a \"app-inverse:firefox|app-inverse:csgo\" -o \"$HOME/Videos/video.mp4\"\n", program_name);
printf(" %s -w screen -f 60 -a \"default-input|app-inverse:Brave\" -o \"$HOME/Videos/video.mp4\"\n", program_name);
- printf(" %s -w screen -f 60 -o \"$HOME/Pictures/image.jpg\"\n", program_name);
+ printf(" %s -w screen -o \"$HOME/Pictures/image.jpg\"\n", program_name);
+ printf(" %s -w screen -q medium -o \"$HOME/Pictures/image.jpg\"\n", program_name);
//fprintf(stderr, " gpu-screen-recorder -w screen -f 60 -q ultra -pixfmt yuv444 -o video.mp4\n");
fflush(stdout);
_exit(1);
@@ -1895,14 +1860,6 @@ fail:
static gsr_video_encoder* create_video_encoder(gsr_egl *egl, bool overclock, gsr_color_depth color_depth, bool use_software_video_encoder, VideoCodec video_codec) {
gsr_video_encoder *video_encoder = nullptr;
- if(video_codec_is_image(video_codec)) {
- gsr_video_encoder_image_params params;
- params.egl = egl;
- params.color_depth = color_depth;
- video_encoder = gsr_video_encoder_image_create(&params);
- return video_encoder;
- }
-
if(use_software_video_encoder) {
gsr_video_encoder_software_params params;
params.egl = egl;
@@ -2052,10 +2009,6 @@ static const AVCodec* get_ffmpeg_video_codec(VideoCodec video_codec, gsr_gpu_ven
return avcodec_find_encoder_by_name("h264_vulkan");
case VideoCodec::HEVC_VULKAN:
return avcodec_find_encoder_by_name("hevc_vulkan");
- case VideoCodec::JPEG:
- return avcodec_find_encoder_by_name("libopenjpeg");
- case VideoCodec::PNG:
- return avcodec_find_encoder_by_name("png");
}
return nullptr;
}
@@ -2126,10 +2079,6 @@ static void list_supported_video_codecs(gsr_egl *egl, bool wayland) {
puts("vp8");
if(supported_video_codecs.vp9.supported)
puts("vp9");
- if(avcodec_find_encoder_by_name("libopenjpeg"))
- puts("jpeg");
- if(avcodec_find_encoder_by_name("png"))
- puts("png");
//if(supported_video_codecs_vulkan.h264.supported)
// puts("h264_vulkan");
//if(supported_video_codecs_vulkan.hevc.supported)
@@ -2247,6 +2196,9 @@ static void info_command() {
list_gpu_info(&egl);
puts("section=video_codecs");
list_supported_video_codecs(&egl, wayland);
+ puts("section=image_formats");
+ puts("jpeg");
+ puts("png");
puts("section=capture_options");
list_supported_capture_options(window, egl.card_path, list_monitors);
@@ -2404,7 +2356,7 @@ static void validate_monitor_get_valid(const gsr_egl *egl, std::string &window_s
}
}
-static gsr_capture* create_capture_impl(std::string &window_str, vec2i output_resolution, bool wayland, gsr_egl *egl, int fps, VideoCodec video_codec, gsr_color_range color_range,
+static gsr_capture* create_capture_impl(std::string &window_str, vec2i output_resolution, bool wayland, gsr_egl *egl, int fps, bool hdr, gsr_color_range color_range,
bool record_cursor, bool restore_portal_session, const char *portal_session_token_filepath,
gsr_color_depth color_depth)
{
@@ -2478,7 +2430,7 @@ static gsr_capture* create_capture_impl(std::string &window_str, vec2i output_re
kms_params.color_depth = color_depth;
kms_params.color_range = color_range;
kms_params.record_cursor = record_cursor;
- kms_params.hdr = video_codec_is_hdr(video_codec);
+ kms_params.hdr = hdr;
kms_params.fps = fps;
kms_params.output_resolution = output_resolution;
capture = gsr_capture_kms_create(&kms_params);
@@ -2516,11 +2468,107 @@ static gsr_capture* create_capture_impl(std::string &window_str, vec2i output_re
return capture;
}
+static gsr_color_range image_format_to_color_range(gsr_image_format image_format) {
+ switch(image_format) {
+ case GSR_IMAGE_FORMAT_JPEG: return GSR_COLOR_RANGE_LIMITED;
+ case GSR_IMAGE_FORMAT_PNG: return GSR_COLOR_RANGE_FULL;
+ }
+ assert(false);
+ return GSR_COLOR_RANGE_FULL;
+}
+
+static int video_quality_to_image_quality_value(VideoQuality video_quality) {
+ switch(video_quality) {
+ case VideoQuality::MEDIUM:
+ return 60;
+ case VideoQuality::HIGH:
+ return 70;
+ case VideoQuality::VERY_HIGH:
+ return 80;
+ case VideoQuality::ULTRA:
+ return 95;
+ }
+ assert(false);
+ return 80;
+}
+
+// TODO: 10-bit and hdr.
+static void capture_image_to_file(const char *filepath, std::string &window_str, vec2i output_resolution, bool wayland, gsr_egl *egl, gsr_image_format image_format,
+ bool record_cursor, bool restore_portal_session, const char *portal_session_token_filepath, VideoQuality video_quality) {
+ const gsr_color_range color_range = image_format_to_color_range(image_format);
+ const int fps = 60;
+ gsr_capture *capture = create_capture_impl(window_str, output_resolution, wayland, egl, fps, false, color_range, record_cursor, restore_portal_session, portal_session_token_filepath, GSR_COLOR_DEPTH_8_BITS);
+
+ gsr_capture_metadata capture_metadata;
+ capture_metadata.width = 0;
+ capture_metadata.height = 0;
+ capture_metadata.fps = fps;
+ capture_metadata.video_codec_context = nullptr;
+ capture_metadata.frame = nullptr;
+
+ int capture_result = gsr_capture_start(capture, &capture_metadata);
+ if(capture_result != 0) {
+ fprintf(stderr, "gsr error: gsr_capture_start failed\n");
+ _exit(capture_result);
+ }
+
+ gsr_image_writer image_writer;
+ if(!gsr_image_writer_init(&image_writer, GSR_IMAGE_WRITER_SOURCE_OPENGL, egl, capture_metadata.width, capture_metadata.height)) {
+ fprintf(stderr, "gsr error: gsr_image_write_gl_init failed\n");
+ _exit(1);
+ }
+
+ gsr_color_conversion_params color_conversion_params;
+ memset(&color_conversion_params, 0, sizeof(color_conversion_params));
+ color_conversion_params.color_range = color_range;
+ color_conversion_params.egl = egl;
+ color_conversion_params.load_external_image_shader = gsr_capture_uses_external_image(capture);
+
+ color_conversion_params.destination_textures[0] = image_writer.texture;
+ color_conversion_params.num_destination_textures = 1;
+ color_conversion_params.destination_color = GSR_DESTINATION_COLOR_RGB8;
+
+ gsr_color_conversion color_conversion;
+ if(gsr_color_conversion_init(&color_conversion, &color_conversion_params) != 0) {
+ fprintf(stderr, "gsr error: gsr_capture_kms_setup_vaapi_textures: failed to create color conversion\n");
+ _exit(1);
+ }
+
+ gsr_color_conversion_clear(&color_conversion);
+
+ bool should_stop_error = false;
+ egl->glClear(0);
+
+ while(true) {
+ should_stop_error = false;
+ if(gsr_capture_should_stop(capture, &should_stop_error))
+ break;
+
+ // It can fail, for example when capturing portal and the target is a monitor that hasn't been updated.
+ // Desktop portal wont refresh the image until there is an update.
+ // TODO: Find out if there is a way to force update desktop portal image.
+ // This can also happen for example if the system suspends and the monitor to capture's framebuffer is gone, or if the target window disappeared.
+ if(gsr_capture_capture(capture, &capture_metadata, &color_conversion) == 0)
+ break;
+
+ usleep(30 * 1000); // 30 ms
+ }
+
+ gsr_egl_swap_buffers(egl);
+
+ const int image_quality = video_quality_to_image_quality_value(video_quality);
+ if(!gsr_image_writer_write_to_file(&image_writer, filepath, image_format, image_quality)) {
+ fprintf(stderr, "gsr error: failed to write opengl texture to image output file %s\n", filepath);
+ _exit(1);
+ }
+
+ gsr_image_writer_deinit(&image_writer);
+ gsr_capture_destroy(capture);
+ _exit(should_stop_error ? 3 : 0);
+}
+
static AVPixelFormat get_pixel_format(VideoCodec video_codec, gsr_gpu_vendor vendor, bool use_software_video_encoder) {
- if(video_codec_is_image(video_codec)) {
- // TODO: hdr
- return AV_PIX_FMT_RGB24;
- } else if(use_software_video_encoder) {
+ if(use_software_video_encoder) {
return AV_PIX_FMT_NV12;
} else {
if(video_codec_is_vulkan(video_codec))
@@ -2744,8 +2792,6 @@ static const char* video_codec_to_string(VideoCodec video_codec) {
case VideoCodec::VP9: return "vp9";
case VideoCodec::H264_VULKAN: return "h264_vulkan";
case VideoCodec::HEVC_VULKAN: return "hevc_vulkan";
- case VideoCodec::JPEG: return "jpeg";
- case VideoCodec::PNG: return "png";
}
return "";
}
@@ -2763,8 +2809,6 @@ static bool video_codec_only_supports_low_power_mode(const gsr_supported_video_c
case VideoCodec::VP9: return supported_video_codecs.vp9.low_power;
case VideoCodec::H264_VULKAN: return supported_video_codecs.h264.low_power;
case VideoCodec::HEVC_VULKAN: return supported_video_codecs.hevc.low_power; // TODO: hdr, 10 bit
- case VideoCodec::JPEG: return false;
- case VideoCodec::PNG: return false;
}
return false;
}
@@ -2840,11 +2884,6 @@ static const AVCodec* pick_video_codec(VideoCodec *video_codec, gsr_egl *egl, bo
video_codec_f = get_ffmpeg_video_codec(*video_codec, egl->gpu_info.vendor);
break;
}
- case VideoCodec::JPEG:
- case VideoCodec::PNG: {
- video_codec_f = get_ffmpeg_video_codec(*video_codec, egl->gpu_info.vendor);
- break;
- }
}
if(!video_codec_auto && !video_codec_f && !is_flv) {
@@ -2906,12 +2945,6 @@ static const AVCodec* pick_video_codec(VideoCodec *video_codec, gsr_egl *egl, bo
video_codec_f = get_ffmpeg_video_codec(*video_codec, egl->gpu_info.vendor);
break;
}
- case VideoCodec::JPEG:
- case VideoCodec::PNG: {
- // TODO:
- //assert(false);
- break;
- }
}
}
@@ -3085,18 +3118,15 @@ static AudioDeviceData create_application_audio_audio_input(const MergedAudioInp
}
#endif
-static void set_video_codec_for_image_output(const char *filename, VideoCodec *video_codec, const char **video_codec_to_use) {
- const bool video_codec_auto = strcmp(*video_codec_to_use, "auto") == 0;
+static bool get_image_format_from_filename(const char *filename, gsr_image_format *image_format) {
if(string_ends_with(filename, ".jpg") || string_ends_with(filename, ".jpeg")) {
- if(!video_codec_auto)
- fprintf(stderr, "Warning: expected -k option to be set to 'auto' (or not specified) for jpeg output\n");
- *video_codec = VideoCodec::JPEG;
- *video_codec_to_use = "jpeg";
+ *image_format = GSR_IMAGE_FORMAT_JPEG;
+ return true;
} else if(string_ends_with(filename, ".png")) {
- if(!video_codec_auto)
- fprintf(stderr, "Warning: expected -k option to be set to 'auto' (or not specified) for png output\n");
- *video_codec = VideoCodec::PNG;
- *video_codec_to_use = "png";
+ *image_format = GSR_IMAGE_FORMAT_PNG;
+ return true;
+ } else {
+ return false;
}
}
@@ -3739,10 +3769,15 @@ int main(int argc, char **argv) {
const bool is_output_piped = strcmp(filename, "/dev/stdout") == 0;
- set_video_codec_for_image_output(filename, &video_codec, &video_codec_to_use);
- if(video_codec_is_image(video_codec) && !audio_input_arg.values.empty()) {
- fprintf(stderr, "Error: can't record audio (-a) when taking a screenshot\n");
- _exit(1);
+ gsr_image_format image_format;
+ if(get_image_format_from_filename(filename, &image_format)) {
+ if(!audio_input_arg.values.empty()) {
+ fprintf(stderr, "Error: can't record audio (-a) when taking a screenshot\n");
+ _exit(1);
+ }
+
+ capture_image_to_file(filename, window_str, output_resolution, wayland, &egl, image_format, record_cursor, restore_portal_session, portal_session_token_filepath, quality);
+ _exit(0);
}
AVFormatContext *av_format_context;
@@ -3771,13 +3806,12 @@ int main(int argc, char **argv) {
const double target_fps = 1.0 / (double)fps;
const bool uses_amix = merged_audio_inputs_should_use_amix(requested_audio_inputs);
- if(!video_codec_is_image(video_codec))
- audio_codec = select_audio_codec_with_fallback(audio_codec, file_extension, uses_amix);
+ audio_codec = select_audio_codec_with_fallback(audio_codec, file_extension, uses_amix);
bool low_power = false;
const AVCodec *video_codec_f = select_video_codec_with_fallback(&video_codec, video_codec_to_use, file_extension.c_str(), use_software_video_encoder, &egl, &low_power);
const gsr_color_depth color_depth = video_codec_to_bit_depth(video_codec);
- gsr_capture *capture = create_capture_impl(window_str, output_resolution, wayland, &egl, fps, video_codec, color_range, record_cursor, restore_portal_session, portal_session_token_filepath, color_depth);
+ gsr_capture *capture = create_capture_impl(window_str, output_resolution, wayland, &egl, fps, video_codec_is_hdr(video_codec), color_range, record_cursor, restore_portal_session, portal_session_token_filepath, color_depth);
// (Some?) livestreaming services require at least one audio track to work.
// If not audio is provided then create one silent audio track.
@@ -3809,20 +3843,32 @@ int main(int argc, char **argv) {
_exit(1);
}
video_frame->format = video_codec_context->pix_fmt;
- video_frame->width = video_codec_context->width;
- video_frame->height = video_codec_context->height;
+ video_frame->width = 0;
+ video_frame->height = 0;
video_frame->color_range = video_codec_context->color_range;
video_frame->color_primaries = video_codec_context->color_primaries;
video_frame->color_trc = video_codec_context->color_trc;
video_frame->colorspace = video_codec_context->colorspace;
video_frame->chroma_location = video_codec_context->chroma_sample_location;
- int capture_result = gsr_capture_start(capture, video_codec_context, video_frame);
+ gsr_capture_metadata capture_metadata;
+ capture_metadata.width = 0;
+ capture_metadata.height = 0;
+ capture_metadata.fps = fps;
+ capture_metadata.video_codec_context = video_codec_context;
+ capture_metadata.frame = video_frame;
+
+ int capture_result = gsr_capture_start(capture, &capture_metadata);
if(capture_result != 0) {
fprintf(stderr, "gsr error: gsr_capture_start failed\n");
_exit(capture_result);
}
+ video_codec_context->width = capture_metadata.width;
+ video_codec_context->height = capture_metadata.height;
+ video_frame->width = capture_metadata.width;
+ video_frame->height = capture_metadata.height;
+
gsr_video_encoder *video_encoder = create_video_encoder(&egl, overclock, color_depth, use_software_video_encoder, video_codec);
if(!video_encoder) {
fprintf(stderr, "Error: failed to create video encoder\n");
@@ -3849,9 +3895,7 @@ int main(int argc, char **argv) {
gsr_color_conversion_clear(&color_conversion);
- if(video_codec_is_image(video_codec)) {
- open_video_image(video_codec_context);
- } else if(use_software_video_encoder) {
+ if(use_software_video_encoder) {
open_video_software(video_codec_context, quality, pixel_format, hdr, color_depth, bitrate_mode);
} else {
open_video_hardware(video_codec_context, quality, very_old_gpu, egl.gpu_info.vendor, pixel_format, hdr, color_depth, bitrate_mode, video_codec, low_power);
@@ -3941,8 +3985,6 @@ int main(int argc, char **argv) {
if(replay_buffer_size_secs == -1) {
AVDictionary *options = nullptr;
av_dict_set(&options, "strict", "experimental", 0);
- if(video_codec_is_image(video_codec))
- av_dict_set(&options, "update", "true", 0);
//av_dict_set_int(&av_format_context->metadata, "video_full_range_flag", 1, 0);
int ret = avformat_write_header(av_format_context, &options);
@@ -4192,7 +4234,6 @@ int main(int argc, char **argv) {
double last_capture_seconds = record_start_time;
bool wait_until_frame_time_elapsed = false;
- const bool is_image_output = video_codec_is_image(video_codec);
while(running) {
const double frame_start = clock_get_monotonic_seconds();
@@ -4272,7 +4313,7 @@ int main(int argc, char **argv) {
// TODO: Dont do this if no damage?
egl.glClear(0);
- gsr_capture_capture(capture, video_frame, &color_conversion);
+ gsr_capture_capture(capture, &capture_metadata, &color_conversion);
gsr_egl_swap_buffers(&egl);
gsr_video_encoder_copy_textures_to_frame(video_encoder, video_frame, &color_conversion);
@@ -4300,10 +4341,6 @@ int main(int argc, char **argv) {
// TODO: Move to separate thread because this could write to network (for example when livestreaming)
receive_frames(video_codec_context, VIDEO_STREAM_INDEX, video_stream, video_frame->pts, av_format_context,
replay_start_time, frame_data_queue, replay_buffer_size_secs, frames_erased, write_output_mutex, paused_time_offset);
- if(is_image_output) {
- running = 0;
- break;
- }
} else {
fprintf(stderr, "Error: avcodec_send_frame failed, error: %s\n", av_error_to_string(ret));
}
@@ -4409,7 +4446,7 @@ int main(int argc, char **argv) {
gsr_damage_deinit(&damage);
gsr_color_conversion_deinit(&color_conversion);
gsr_video_encoder_destroy(video_encoder, video_codec_context);
- gsr_capture_destroy(capture, video_codec_context);
+ gsr_capture_destroy(capture);
#ifdef GSR_APP_AUDIO
gsr_pipewire_audio_deinit(&pipewire_audio);
#endif
diff --git a/src/utils.c b/src/utils.c
index 61ca856..82c51b5 100644
--- a/src/utils.c
+++ b/src/utils.c
@@ -635,9 +635,12 @@ static VADisplay video_codec_context_get_vaapi_display(AVCodecContext *video_cod
}
bool video_codec_context_is_vaapi(AVCodecContext *video_codec_context) {
+ if(!video_codec_context)
+ return false;
+
AVBufferRef *hw_frames_ctx = video_codec_context->hw_frames_ctx;
if(!hw_frames_ctx)
- return NULL;
+ return false;
AVHWFramesContext *hw_frame_context = (AVHWFramesContext*)hw_frames_ctx->data;
AVHWDeviceContext *device_context = (AVHWDeviceContext*)hw_frame_context->device_ctx;