diff options
Diffstat (limited to 'src')
-rw-r--r-- | src/args_parser.c | 92 | ||||
-rw-r--r-- | src/capture/kms.c | 13 | ||||
-rw-r--r-- | src/capture/nvfbc.c | 6 | ||||
-rw-r--r-- | src/capture/portal.c | 163 | ||||
-rw-r--r-- | src/color_conversion.c | 65 | ||||
-rw-r--r-- | src/cursor.c | 4 | ||||
-rw-r--r-- | src/dbus.c | 913 | ||||
-rw-r--r-- | src/egl.c | 46 | ||||
-rw-r--r-- | src/encoder/video/software.c | 11 | ||||
-rw-r--r-- | src/encoder/video/vaapi.c | 7 | ||||
-rw-r--r-- | src/image_writer.c | 14 | ||||
-rw-r--r-- | src/main.cpp | 249 | ||||
-rw-r--r-- | src/pipewire_audio.c | 264 | ||||
-rw-r--r-- | src/pipewire_video.c | 116 | ||||
-rw-r--r-- | src/sound.cpp | 6 | ||||
-rw-r--r-- | src/utils.c | 208 | ||||
-rw-r--r-- | src/window/wayland.c | 15 | ||||
-rw-r--r-- | src/window_texture.c | 4 |
18 files changed, 1568 insertions, 628 deletions
diff --git a/src/args_parser.c b/src/args_parser.c index a678691..0e05557 100644 --- a/src/args_parser.c +++ b/src/args_parser.c @@ -194,7 +194,6 @@ static void usage_header() { fflush(stdout); } -// TODO: Update with portal info static void usage_full() { const bool inside_flatpak = getenv("FLATPAK_ID") != NULL; const char *program_name = inside_flatpak ? "flatpak run --command=gpu-screen-recorder com.dec05eba.gpu_screen_recorder" : "gpu-screen-recorder"; @@ -397,6 +396,7 @@ static void usage_full() { printf("EXAMPLES:\n"); printf(" %s -w screen -f 60 -a default_output -o video.mp4\n", program_name); printf(" %s -w screen -f 60 -a default_output -a default_input -o video.mp4\n", program_name); + printf(" %s -w $(xdotool selectwindow) -f 60 -a default_output -o video.mp4\n", program_name); printf(" %s -w screen -f 60 -a \"default_output|default_input\" -o video.mp4\n", program_name); printf(" %s -w screen -f 60 -a default_output -c mkv -r 60 -o \"$HOME/Videos\"\n", program_name); printf(" %s -w screen -f 60 -a default_output -c mkv -r 1800 -replay-storage disk -bm cbr -q 40000 -o \"$HOME/Videos\"\n", program_name); @@ -465,7 +465,7 @@ static bool args_parser_set_values(args_parser *self) { self->keyint = args_get_double_by_key(self->args, NUM_ARGS, "-keyint", 2.0); if(self->audio_codec == GSR_AUDIO_CODEC_FLAC) { - fprintf(stderr, "Warning: flac audio codec is temporary disabled, using opus audio codec instead\n"); + fprintf(stderr, "gsr warning: flac audio codec is temporary disabled, using opus audio codec instead\n"); self->audio_codec = GSR_AUDIO_CODEC_OPUS; } @@ -473,7 +473,7 @@ static bool args_parser_set_values(args_parser *self) { if(self->portal_session_token_filepath) { int len = strlen(self->portal_session_token_filepath); if(len > 0 && self->portal_session_token_filepath[len - 1] == '/') { - fprintf(stderr, "Error: -portal-session-token-filepath should be a path to a file but it ends with a /: %s\n", self->portal_session_token_filepath); + fprintf(stderr, "gsr error: -portal-session-token-filepath should be a path to a file but it ends with a /: %s\n", self->portal_session_token_filepath); return false; } } @@ -482,13 +482,13 @@ static bool args_parser_set_values(args_parser *self) { if(self->recording_saved_script) { struct stat buf; if(stat(self->recording_saved_script, &buf) == -1 || !S_ISREG(buf.st_mode)) { - fprintf(stderr, "Error: Script \"%s\" either doesn't exist or it's not a file\n", self->recording_saved_script); + fprintf(stderr, "gsr error: Script \"%s\" either doesn't exist or it's not a file\n", self->recording_saved_script); usage(); return false; } if(!(buf.st_mode & S_IXUSR)) { - fprintf(stderr, "Error: Script \"%s\" is not executable\n", self->recording_saved_script); + fprintf(stderr, "gsr error: Script \"%s\" is not executable\n", self->recording_saved_script); usage(); return false; } @@ -500,19 +500,19 @@ static bool args_parser_set_values(args_parser *self) { if(self->bitrate_mode == GSR_BITRATE_MODE_CBR) { if(!quality_str) { - fprintf(stderr, "Error: option '-q' is required when using '-bm cbr' option\n"); + fprintf(stderr, "gsr error: option '-q' is required when using '-bm cbr' option\n"); usage(); return false; } if(sscanf(quality_str, "%" PRIi64, &self->video_bitrate) != 1) { - fprintf(stderr, "Error: -q argument \"%s\" is not an integer value. When using '-bm cbr' option '-q' is expected to be an integer value\n", quality_str); + fprintf(stderr, "gsr error: -q argument \"%s\" is not an integer value. When using '-bm cbr' option '-q' is expected to be an integer value\n", quality_str); usage(); return false; } if(self->video_bitrate < 0) { - fprintf(stderr, "Error: -q is expected to be 0 or larger, got %" PRIi64 "\n", self->video_bitrate); + fprintf(stderr, "gsr error: -q is expected to be 0 or larger, got %" PRIi64 "\n", self->video_bitrate); usage(); return false; } @@ -531,7 +531,7 @@ static bool args_parser_set_values(args_parser *self) { } else if(strcmp(quality_str, "ultra") == 0) { self->video_quality = GSR_VIDEO_QUALITY_ULTRA; } else { - fprintf(stderr, "Error: -q should either be 'medium', 'high', 'very_high' or 'ultra', got: '%s'\n", quality_str); + fprintf(stderr, "gsr error: -q should either be 'medium', 'high', 'very_high' or 'ultra', got: '%s'\n", quality_str); usage(); return false; } @@ -539,7 +539,7 @@ static bool args_parser_set_values(args_parser *self) { const char *output_resolution_str = args_get_value_by_key(self->args, NUM_ARGS, "-s"); if(!output_resolution_str && strcmp(self->window, "focused") == 0) { - fprintf(stderr, "Error: option -s is required when using '-w focused' option\n"); + fprintf(stderr, "gsr error: option -s is required when using '-w focused' option\n"); usage(); return false; } @@ -547,13 +547,13 @@ static bool args_parser_set_values(args_parser *self) { self->output_resolution = (vec2i){0, 0}; if(output_resolution_str) { if(sscanf(output_resolution_str, "%dx%d", &self->output_resolution.x, &self->output_resolution.y) != 2) { - fprintf(stderr, "Error: invalid value for option -s '%s', expected a value in format WxH\n", output_resolution_str); + fprintf(stderr, "gsr error: invalid value for option -s '%s', expected a value in format WxH\n", output_resolution_str); usage(); return false; } if(self->output_resolution.x < 0 || self->output_resolution.y < 0) { - fprintf(stderr, "Error: invalid value for option -s '%s', expected width and height to be greater or equal to 0\n", output_resolution_str); + fprintf(stderr, "gsr error: invalid value for option -s '%s', expected width and height to be greater or equal to 0\n", output_resolution_str); usage(); return false; } @@ -564,25 +564,25 @@ static bool args_parser_set_values(args_parser *self) { const char *region_str = args_get_value_by_key(self->args, NUM_ARGS, "-region"); if(region_str) { if(strcmp(self->window, "region") != 0) { - fprintf(stderr, "Error: option -region can only be used when option '-w region' is used\n"); + fprintf(stderr, "gsr error: option -region can only be used when option '-w region' is used\n"); usage(); return false; } if(sscanf(region_str, "%dx%d+%d+%d", &self->region_size.x, &self->region_size.y, &self->region_position.x, &self->region_position.y) != 4) { - fprintf(stderr, "Error: invalid value for option -region '%s', expected a value in format WxH+X+Y\n", region_str); + fprintf(stderr, "gsr error: invalid value for option -region '%s', expected a value in format WxH+X+Y\n", region_str); usage(); return false; } if(self->region_size.x < 0 || self->region_size.y < 0 || self->region_position.x < 0 || self->region_position.y < 0) { - fprintf(stderr, "Error: invalid value for option -region '%s', expected width, height, x and y to be greater or equal to 0\n", region_str); + fprintf(stderr, "gsr error: invalid value for option -region '%s', expected width, height, x and y to be greater or equal to 0\n", region_str); usage(); return false; } } else { if(strcmp(self->window, "region") == 0) { - fprintf(stderr, "Error: option -region is required when '-w region' is used\n"); + fprintf(stderr, "gsr error: option -region is required when '-w region' is used\n"); usage(); return false; } @@ -604,7 +604,7 @@ static bool args_parser_set_values(args_parser *self) { self->is_livestream = is_livestream_path(self->filename); if(self->is_livestream) { if(is_replaying) { - fprintf(stderr, "Error: replay mode is not applicable to live streaming\n"); + fprintf(stderr, "gsr error: replay mode is not applicable to live streaming\n"); return false; } } else { @@ -614,20 +614,20 @@ static bool args_parser_set_values(args_parser *self) { char *directory = dirname(directory_buf); if(strcmp(directory, ".") != 0 && strcmp(directory, "/") != 0) { if(create_directory_recursive(directory) != 0) { - fprintf(stderr, "Error: failed to create directory for output file: %s\n", self->filename); + fprintf(stderr, "gsr error: failed to create directory for output file: %s\n", self->filename); return false; } } } else { if(!self->container_format) { - fprintf(stderr, "Error: option -c is required when using option -r\n"); + fprintf(stderr, "gsr error: option -c is required when using option -r\n"); usage(); return false; } struct stat buf; if(stat(self->filename, &buf) != -1 && !S_ISDIR(buf.st_mode)) { - fprintf(stderr, "Error: File \"%s\" exists but it's not a directory\n", self->filename); + fprintf(stderr, "gsr error: File \"%s\" exists but it's not a directory\n", self->filename); usage(); return false; } @@ -637,13 +637,13 @@ static bool args_parser_set_values(args_parser *self) { if(!is_replaying) { self->filename = "/dev/stdout"; } else { - fprintf(stderr, "Error: Option -o is required when using option -r\n"); + fprintf(stderr, "gsr error: Option -o is required when using option -r\n"); usage(); return false; } if(!self->container_format) { - fprintf(stderr, "Error: option -c is required when not using option -o\n"); + fprintf(stderr, "gsr error: option -c is required when not using option -o\n"); usage(); return false; } @@ -656,10 +656,10 @@ static bool args_parser_set_values(args_parser *self) { const bool is_portal_capture = strcmp(self->window, "portal") == 0; if(!self->restore_portal_session && is_portal_capture) - fprintf(stderr, "Info: option '-w portal' was used without '-restore-portal-session yes'. The previous screencast session will be ignored\n"); + fprintf(stderr, "gsr info: option '-w portal' was used without '-restore-portal-session yes'. The previous screencast session will be ignored\n"); if(self->is_livestream && self->recording_saved_script) { - fprintf(stderr, "Warning: live stream detected, -sc script is ignored\n"); + fprintf(stderr, "gsr warning: live stream detected, -sc script is ignored\n"); self->recording_saved_script = NULL; } @@ -704,7 +704,7 @@ bool args_parser_parse(args_parser *self, int argc, char **argv, const args_hand arg_handlers->list_capture_options(card_path, userdata); return true; } else { - fprintf(stderr, "Error: expected --list-capture-options to be called with either no extra arguments or 1 extra argument (card path)\n"); + fprintf(stderr, "gsr error: expected --list-capture-options to be called with either no extra arguments or 1 extra argument (card path)\n"); return false; } } @@ -751,19 +751,19 @@ bool args_parser_parse(args_parser *self, int argc, char **argv, const args_hand const char *arg_name = argv[i]; Arg *arg = args_get_by_key(self->args, NUM_ARGS, arg_name); if(!arg) { - fprintf(stderr, "Error: invalid argument '%s'\n", arg_name); + fprintf(stderr, "gsr error: invalid argument '%s'\n", arg_name); usage(); return false; } if(arg->num_values > 0 && !arg->list) { - fprintf(stderr, "Error: expected argument '%s' to only be specified once\n", arg_name); + fprintf(stderr, "gsr error: expected argument '%s' to only be specified once\n", arg_name); usage(); return false; } if(i + 1 >= argc) { - fprintf(stderr, "Error: missing value for argument '%s'\n", arg_name); + fprintf(stderr, "gsr error: missing value for argument '%s'\n", arg_name); usage(); return false; } @@ -779,7 +779,7 @@ bool args_parser_parse(args_parser *self, int argc, char **argv, const args_hand } else if(strcmp(arg_value, "no") == 0) { arg->typed_value.boolean = false; } else { - fprintf(stderr, "Error: %s should either be 'yes' or 'no', got: '%s'\n", arg_name, arg_value); + fprintf(stderr, "gsr error: %s should either be 'yes' or 'no', got: '%s'\n", arg_name, arg_value); usage(); return false; } @@ -787,7 +787,7 @@ bool args_parser_parse(args_parser *self, int argc, char **argv, const args_hand } case ARG_TYPE_ENUM: { if(!arg_get_enum_value_by_name(arg, arg_value, &arg->typed_value.enum_value)) { - fprintf(stderr, "Error: %s should either be ", arg_name); + fprintf(stderr, "gsr error: %s should either be ", arg_name); arg_print_expected_enum_names(arg); fprintf(stderr, ", got: '%s'\n", arg_value); usage(); @@ -797,19 +797,19 @@ bool args_parser_parse(args_parser *self, int argc, char **argv, const args_hand } case ARG_TYPE_I64: { if(sscanf(arg_value, "%" PRIi64, &arg->typed_value.i64_value) != 1) { - fprintf(stderr, "Error: %s argument \"%s\" is not an integer\n", arg_name, arg_value); + fprintf(stderr, "gsr error: %s argument \"%s\" is not an integer\n", arg_name, arg_value); usage(); return false; } if(arg->typed_value.i64_value < arg->integer_value_min) { - fprintf(stderr, "Error: %s argument is expected to be larger than %" PRIi64 ", got %" PRIi64 "\n", arg_name, arg->integer_value_min, arg->typed_value.i64_value); + fprintf(stderr, "gsr error: %s argument is expected to be larger than %" PRIi64 ", got %" PRIi64 "\n", arg_name, arg->integer_value_min, arg->typed_value.i64_value); usage(); return false; } if(arg->typed_value.i64_value > arg->integer_value_max) { - fprintf(stderr, "Error: %s argument is expected to be less than %" PRIi64 ", got %" PRIi64 "\n", arg_name, arg->integer_value_max, arg->typed_value.i64_value); + fprintf(stderr, "gsr error: %s argument is expected to be less than %" PRIi64 ", got %" PRIi64 "\n", arg_name, arg->integer_value_max, arg->typed_value.i64_value); usage(); return false; } @@ -817,19 +817,19 @@ bool args_parser_parse(args_parser *self, int argc, char **argv, const args_hand } case ARG_TYPE_DOUBLE: { if(sscanf(arg_value, "%lf", &arg->typed_value.d_value) != 1) { - fprintf(stderr, "Error: %s argument \"%s\" is not an floating-point number\n", arg_name, arg_value); + fprintf(stderr, "gsr error: %s argument \"%s\" is not an floating-point number\n", arg_name, arg_value); usage(); return false; } if(arg->typed_value.d_value < arg->integer_value_min) { - fprintf(stderr, "Error: %s argument is expected to be larger than %" PRIi64 ", got %lf\n", arg_name, arg->integer_value_min, arg->typed_value.d_value); + fprintf(stderr, "gsr error: %s argument is expected to be larger than %" PRIi64 ", got %lf\n", arg_name, arg->integer_value_min, arg->typed_value.d_value); usage(); return false; } if(arg->typed_value.d_value > arg->integer_value_max) { - fprintf(stderr, "Error: %s argument is expected to be less than %" PRIi64 ", got %lf\n", arg_name, arg->integer_value_max, arg->typed_value.d_value); + fprintf(stderr, "gsr error: %s argument is expected to be less than %" PRIi64 ", got %lf\n", arg_name, arg->integer_value_max, arg->typed_value.d_value); usage(); return false; } @@ -838,7 +838,7 @@ bool args_parser_parse(args_parser *self, int argc, char **argv, const args_hand } if(!arg_append_value(arg, arg_value)) { - fprintf(stderr, "Error: failed to append argument, out of memory\n"); + fprintf(stderr, "gsr error: failed to append argument, out of memory\n"); return false; } } @@ -846,7 +846,7 @@ bool args_parser_parse(args_parser *self, int argc, char **argv, const args_hand for(int i = 0; i < NUM_ARGS; ++i) { const Arg *arg = &self->args[i]; if(!arg->optional && arg->num_values == 0) { - fprintf(stderr, "Error: missing argument '%s'\n", arg->key); + fprintf(stderr, "gsr error: missing argument '%s'\n", arg->key); usage(); return false; } @@ -870,45 +870,45 @@ bool args_parser_validate_with_gl_info(args_parser *self, gsr_egl *egl) { } if(egl->gpu_info.is_steam_deck && self->bitrate_mode == GSR_BITRATE_MODE_QP) { - fprintf(stderr, "Warning: qp bitrate mode is not supported on Steam Deck because of Steam Deck driver bugs. Using vbr instead\n"); + fprintf(stderr, "gsr warning: qp bitrate mode is not supported on Steam Deck because of Steam Deck driver bugs. Using vbr instead\n"); self->bitrate_mode = GSR_BITRATE_MODE_VBR; } if(self->video_encoder == GSR_VIDEO_ENCODER_HW_CPU && self->bitrate_mode == GSR_BITRATE_MODE_VBR) { - fprintf(stderr, "Warning: bitrate mode has been forcefully set to qp because software encoding option doesn't support vbr option\n"); + fprintf(stderr, "gsr warning: bitrate mode has been forcefully set to qp because software encoding option doesn't support vbr option\n"); self->bitrate_mode = GSR_BITRATE_MODE_QP; } if(egl->gpu_info.vendor != GSR_GPU_VENDOR_NVIDIA && self->overclock) { - fprintf(stderr, "Info: overclock option has no effect on amd/intel, ignoring option\n"); + fprintf(stderr, "gsr info: overclock option has no effect on amd/intel, ignoring option\n"); self->overclock = false; } if(egl->gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA && self->overclock && wayland) { - fprintf(stderr, "Info: overclocking is not possible on nvidia on wayland, ignoring option\n"); + fprintf(stderr, "gsr info: overclocking is not possible on nvidia on wayland, ignoring option\n"); self->overclock = false; } if(egl->gpu_info.is_steam_deck) { - fprintf(stderr, "Warning: steam deck has multiple driver issues. One of them has been reported here: https://github.com/ValveSoftware/SteamOS/issues/1609\n" + fprintf(stderr, "gsr warning: steam deck has multiple driver issues. One of them has been reported here: https://github.com/ValveSoftware/SteamOS/issues/1609\n" "If you have issues with GPU Screen Recorder on steam deck that you don't have on a desktop computer then report the issue to Valve and/or AMD.\n"); } self->very_old_gpu = false; if(egl->gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA && egl->gpu_info.gpu_version != 0 && egl->gpu_info.gpu_version < 900) { - fprintf(stderr, "Info: your gpu appears to be very old (older than maxwell architecture). Switching to lower preset\n"); + fprintf(stderr, "gsr info: your gpu appears to be very old (older than maxwell architecture). Switching to lower preset\n"); self->very_old_gpu = true; } if(video_codec_is_hdr(self->video_codec) && !wayland) { - fprintf(stderr, "Error: hdr video codec option %s is not available on X11\n", video_codec_to_string(self->video_codec)); + fprintf(stderr, "gsr error: hdr video codec option %s is not available on X11\n", video_codec_to_string(self->video_codec)); usage(); return false; } const bool is_portal_capture = strcmp(self->window, "portal") == 0; if(video_codec_is_hdr(self->video_codec) && is_portal_capture) { - fprintf(stderr, "Warning: portal capture option doesn't support hdr yet (PipeWire doesn't support hdr), the video will be tonemapped from hdr to sdr\n"); + fprintf(stderr, "gsr warning: portal capture option doesn't support hdr yet (PipeWire doesn't support hdr), the video will be tonemapped from hdr to sdr\n"); self->video_codec = hdr_video_codec_to_sdr_video_codec(self->video_codec); } diff --git a/src/capture/kms.c b/src/capture/kms.c index 18858f2..36a5355 100644 --- a/src/capture/kms.c +++ b/src/capture/kms.c @@ -12,7 +12,7 @@ #include <fcntl.h> #include <xf86drm.h> -#include <libdrm/drm_fourcc.h> +#include <drm_fourcc.h> #include <libavutil/mastering_display_metadata.h> @@ -108,22 +108,14 @@ static int max_int(int a, int b) { } static void gsr_capture_kms_create_input_texture_ids(gsr_capture_kms *self) { - const float border_color[4] = { 0.0f, 0.0f, 0.0f, 0.0f }; - self->params.egl->glGenTextures(1, &self->input_texture_id); self->params.egl->glBindTexture(GL_TEXTURE_2D, self->input_texture_id); - self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER); - self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER); - self->params.egl->glTexParameterfv(GL_TEXTURE_2D, GL_TEXTURE_BORDER_COLOR, border_color); self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); self->params.egl->glBindTexture(GL_TEXTURE_2D, 0); self->params.egl->glGenTextures(1, &self->external_input_texture_id); self->params.egl->glBindTexture(GL_TEXTURE_EXTERNAL_OES, self->external_input_texture_id); - self->params.egl->glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER); - self->params.egl->glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER); - self->params.egl->glTexParameterfv(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_BORDER_COLOR, border_color); self->params.egl->glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR); self->params.egl->glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR); self->params.egl->glBindTexture(GL_TEXTURE_EXTERNAL_OES, 0); @@ -133,9 +125,6 @@ static void gsr_capture_kms_create_input_texture_ids(gsr_capture_kms *self) { self->params.egl->glGenTextures(1, &self->cursor_texture_id); self->params.egl->glBindTexture(cursor_texture_id_target, self->cursor_texture_id); - self->params.egl->glTexParameteri(cursor_texture_id_target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER); - self->params.egl->glTexParameteri(cursor_texture_id_target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER); - self->params.egl->glTexParameterfv(cursor_texture_id_target, GL_TEXTURE_BORDER_COLOR, border_color); self->params.egl->glTexParameteri(cursor_texture_id_target, GL_TEXTURE_MIN_FILTER, GL_LINEAR); self->params.egl->glTexParameteri(cursor_texture_id_target, GL_TEXTURE_MAG_FILTER, GL_LINEAR); self->params.egl->glBindTexture(cursor_texture_id_target, 0); diff --git a/src/capture/nvfbc.c b/src/capture/nvfbc.c index b92bd41..13b46c3 100644 --- a/src/capture/nvfbc.c +++ b/src/capture/nvfbc.c @@ -289,12 +289,12 @@ static int gsr_capture_nvfbc_start(gsr_capture *cap, gsr_capture_metadata *captu int driver_major_version = 0; int driver_minor_version = 0; if(self->params.direct_capture && get_driver_version(&driver_major_version, &driver_minor_version)) { - fprintf(stderr, "Info: detected nvidia version: %d.%d\n", driver_major_version, driver_minor_version); + fprintf(stderr, "gsr info: detected nvidia version: %d.%d\n", driver_major_version, driver_minor_version); // TODO: if(version_at_least(driver_major_version, driver_minor_version, 515, 57) && version_less_than(driver_major_version, driver_minor_version, 520, 56)) { self->params.direct_capture = false; - fprintf(stderr, "Warning: \"screen-direct\" has temporary been disabled as it causes stuttering with driver versions >= 515.57 and < 520.56. Please update your driver if possible. Capturing \"screen\" instead.\n"); + fprintf(stderr, "gsr warning: \"screen-direct\" has temporary been disabled as it causes stuttering with driver versions >= 515.57 and < 520.56. Please update your driver if possible. Capturing \"screen\" instead.\n"); } // TODO: @@ -304,7 +304,7 @@ static int gsr_capture_nvfbc_start(gsr_capture *cap, gsr_capture_metadata *captu if(version_at_least(driver_major_version, driver_minor_version, 515, 57)) self->supports_direct_cursor = true; else - fprintf(stderr, "Info: capturing \"screen-direct\" but driver version appears to be less than 515.57. Disabling capture of cursor. Please update your driver if you want to capture your cursor or record \"screen\" instead.\n"); + fprintf(stderr, "gsr info: capturing \"screen-direct\" but driver version appears to be less than 515.57. Disabling capture of cursor. Please update your driver if you want to capture your cursor or record \"screen\" instead.\n"); } */ } diff --git a/src/capture/portal.c b/src/capture/portal.c index a5e62af..d2217d1 100644 --- a/src/capture/portal.c +++ b/src/capture/portal.c @@ -2,7 +2,7 @@ #include "../../include/color_conversion.h" #include "../../include/egl.h" #include "../../include/utils.h" -#include "../../dbus/client/dbus_client.h" +#include "../../include/dbus.h" #include "../../include/pipewire_video.h" #include <stdlib.h> @@ -11,18 +11,36 @@ #include <limits.h> #include <assert.h> +#define PORTAL_CAPTURE_CANCELED_BY_USER_EXIT_CODE 60 + +typedef enum { + PORTAL_CAPTURE_SETUP_IDLE, + PORTAL_CAPTURE_SETUP_IN_PROGRESS, + PORTAL_CAPTURE_SETUP_FINISHED, + PORTAL_CAPTURE_SETUP_FAILED +} gsr_portal_capture_setup_state; + typedef struct { gsr_capture_portal_params params; gsr_texture_map texture_map; - gsr_dbus_client dbus_client; - char session_handle[128]; + gsr_dbus dbus; + char *session_handle; gsr_pipewire_video pipewire; vec2i capture_size; gsr_pipewire_video_dmabuf_data dmabuf_data[GSR_PIPEWIRE_VIDEO_DMABUF_MAX_PLANES]; int num_dmabuf_data; + + gsr_pipewire_video_region region; + gsr_pipewire_video_region cursor_region; + uint32_t pipewire_fourcc; + uint64_t pipewire_modifiers; + bool using_external_image; + + bool should_stop; + bool stop_is_error; } gsr_capture_portal; static void gsr_capture_portal_cleanup_plane_fds(gsr_capture_portal *self) { @@ -53,35 +71,24 @@ static void gsr_capture_portal_stop(gsr_capture_portal *self) { gsr_capture_portal_cleanup_plane_fds(self); gsr_pipewire_video_deinit(&self->pipewire); - gsr_dbus_client_deinit(&self->dbus_client); + gsr_dbus_deinit(&self->dbus); } static void gsr_capture_portal_create_input_textures(gsr_capture_portal *self) { - const float border_color[4] = { 0.0f, 0.0f, 0.0f, 0.0f }; - self->params.egl->glGenTextures(1, &self->texture_map.texture_id); self->params.egl->glBindTexture(GL_TEXTURE_2D, self->texture_map.texture_id); - self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER); - self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER); - self->params.egl->glTexParameterfv(GL_TEXTURE_2D, GL_TEXTURE_BORDER_COLOR, border_color); self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); self->params.egl->glBindTexture(GL_TEXTURE_2D, 0); self->params.egl->glGenTextures(1, &self->texture_map.external_texture_id); self->params.egl->glBindTexture(GL_TEXTURE_EXTERNAL_OES, self->texture_map.external_texture_id); - self->params.egl->glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER); - self->params.egl->glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER); - self->params.egl->glTexParameterfv(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_BORDER_COLOR, border_color); self->params.egl->glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR); self->params.egl->glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR); self->params.egl->glBindTexture(GL_TEXTURE_EXTERNAL_OES, 0); self->params.egl->glGenTextures(1, &self->texture_map.cursor_texture_id); self->params.egl->glBindTexture(GL_TEXTURE_2D, self->texture_map.cursor_texture_id); - self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER); - self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER); - self->params.egl->glTexParameterfv(GL_TEXTURE_2D, GL_TEXTURE_BORDER_COLOR, border_color); self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); self->params.egl->glBindTexture(GL_TEXTURE_2D, 0); @@ -188,36 +195,36 @@ static int gsr_capture_portal_setup_dbus(gsr_capture_portal *self, int *pipewire if(self->params.restore_portal_session) gsr_capture_portal_get_restore_token_from_cache(restore_token, sizeof(restore_token), self->params.portal_session_token_filepath); - if(!gsr_dbus_client_init(&self->dbus_client, restore_token)) + if(!gsr_dbus_init(&self->dbus, restore_token)) return -1; fprintf(stderr, "gsr info: gsr_capture_portal_setup_dbus: CreateSession\n"); - response_status = gsr_dbus_client_screencast_create_session(&self->dbus_client, self->session_handle, sizeof(self->session_handle)); + response_status = gsr_dbus_screencast_create_session(&self->dbus, &self->session_handle); if(response_status != 0) { fprintf(stderr, "gsr error: gsr_capture_portal_setup_dbus: CreateSession failed\n"); return response_status; } fprintf(stderr, "gsr info: gsr_capture_portal_setup_dbus: SelectSources\n"); - response_status = gsr_dbus_client_screencast_select_sources(&self->dbus_client, self->session_handle, GSR_PORTAL_CAPTURE_TYPE_ALL, self->params.record_cursor ? GSR_PORTAL_CURSOR_MODE_EMBEDDED : GSR_PORTAL_CURSOR_MODE_HIDDEN); + response_status = gsr_dbus_screencast_select_sources(&self->dbus, self->session_handle, GSR_PORTAL_CAPTURE_TYPE_ALL, self->params.record_cursor ? GSR_PORTAL_CURSOR_MODE_EMBEDDED : GSR_PORTAL_CURSOR_MODE_HIDDEN); if(response_status != 0) { fprintf(stderr, "gsr error: gsr_capture_portal_setup_dbus: SelectSources failed\n"); return response_status; } fprintf(stderr, "gsr info: gsr_capture_portal_setup_dbus: Start\n"); - response_status = gsr_dbus_client_screencast_start(&self->dbus_client, self->session_handle, pipewire_node); + response_status = gsr_dbus_screencast_start(&self->dbus, self->session_handle, pipewire_node); if(response_status != 0) { fprintf(stderr, "gsr error: gsr_capture_portal_setup_dbus: Start failed\n"); return response_status; } - const char *screencast_restore_token = gsr_dbus_client_screencast_get_restore_token(&self->dbus_client); + const char *screencast_restore_token = gsr_dbus_screencast_get_restore_token(&self->dbus); if(screencast_restore_token) gsr_capture_portal_save_restore_token(screencast_restore_token, self->params.portal_session_token_filepath); fprintf(stderr, "gsr info: gsr_capture_portal_setup_dbus: OpenPipeWireRemote\n"); - if(!gsr_dbus_client_screencast_open_pipewire_remote(&self->dbus_client, self->session_handle, pipewire_fd)) { + if(!gsr_dbus_screencast_open_pipewire_remote(&self->dbus, self->session_handle, pipewire_fd)) { fprintf(stderr, "gsr error: gsr_capture_portal_setup_dbus: OpenPipeWireRemote failed\n"); return -1; } @@ -227,19 +234,13 @@ static int gsr_capture_portal_setup_dbus(gsr_capture_portal *self, int *pipewire } static bool gsr_capture_portal_get_frame_dimensions(gsr_capture_portal *self) { - gsr_pipewire_video_region region = {0, 0, 0, 0}; - gsr_pipewire_video_region cursor_region = {0, 0, 0, 0}; fprintf(stderr, "gsr info: gsr_capture_portal_start: waiting for pipewire negotiation\n"); const double start_time = clock_get_monotonic_seconds(); while(clock_get_monotonic_seconds() - start_time < 5.0) { - bool uses_external_image = false; - uint32_t fourcc = 0; - uint64_t modifiers = 0; - if(gsr_pipewire_video_map_texture(&self->pipewire, self->texture_map, ®ion, &cursor_region, self->dmabuf_data, &self->num_dmabuf_data, &fourcc, &modifiers, &uses_external_image)) { - gsr_capture_portal_cleanup_plane_fds(self); - self->capture_size.x = region.width; - self->capture_size.y = region.height; + if(gsr_pipewire_video_map_texture(&self->pipewire, self->texture_map, &self->region, &self->cursor_region, self->dmabuf_data, &self->num_dmabuf_data, &self->pipewire_fourcc, &self->pipewire_modifiers, &self->using_external_image)) { + self->capture_size.x = self->region.width; + self->capture_size.y = self->region.height; fprintf(stderr, "gsr info: gsr_capture_portal_start: pipewire negotiation finished\n"); return true; } @@ -250,45 +251,51 @@ static bool gsr_capture_portal_get_frame_dimensions(gsr_capture_portal *self) { return false; } -static int gsr_capture_portal_start(gsr_capture *cap, gsr_capture_metadata *capture_metadata) { - gsr_capture_portal *self = cap->priv; - +static int gsr_capture_portal_setup(gsr_capture_portal *self, int fps) { gsr_capture_portal_create_input_textures(self); int pipewire_fd = 0; uint32_t pipewire_node = 0; const int response_status = gsr_capture_portal_setup_dbus(self, &pipewire_fd, &pipewire_node); if(response_status != 0) { - gsr_capture_portal_stop(self); // Response status values: // 0: Success, the request is carried out // 1: The user cancelled the interaction // 2: The user interaction was ended in some other way // Response status value 2 happens usually if there was some kind of error in the desktop portal on the system if(response_status == 2) { - fprintf(stderr, "gsr error: gsr_capture_portal_start: desktop portal capture failed. Either you Wayland compositor doesn't support desktop portal capture or it's incorrectly setup on your system\n"); + fprintf(stderr, "gsr error: gsr_capture_portal_setup: desktop portal capture failed. Either you Wayland compositor doesn't support desktop portal capture or it's incorrectly setup on your system\n"); return 50; } else if(response_status == 1) { - fprintf(stderr, "gsr error: gsr_capture_portal_start: desktop portal capture failed. It seems like desktop portal capture was canceled by the user.\n"); - return 60; + fprintf(stderr, "gsr error: gsr_capture_portal_setup: desktop portal capture failed. It seems like desktop portal capture was canceled by the user.\n"); + return PORTAL_CAPTURE_CANCELED_BY_USER_EXIT_CODE; } else { return -1; } } - fprintf(stderr, "gsr info: gsr_capture_portal_start: setting up pipewire\n"); + fprintf(stderr, "gsr info: gsr_capture_portal_setup: setting up pipewire\n"); /* TODO: support hdr when pipewire supports it */ /* gsr_pipewire closes the pipewire fd, even on failure */ - if(!gsr_pipewire_video_init(&self->pipewire, pipewire_fd, pipewire_node, capture_metadata->fps, self->params.record_cursor, self->params.egl)) { - fprintf(stderr, "gsr error: gsr_capture_portal_start: failed to setup pipewire with fd: %d, node: %" PRIu32 "\n", pipewire_fd, pipewire_node); - gsr_capture_portal_stop(self); + if(!gsr_pipewire_video_init(&self->pipewire, pipewire_fd, pipewire_node, fps, self->params.record_cursor, self->params.egl)) { + fprintf(stderr, "gsr error: gsr_capture_portal_setup: failed to setup pipewire with fd: %d, node: %" PRIu32 "\n", pipewire_fd, pipewire_node); return -1; } - fprintf(stderr, "gsr info: gsr_capture_portal_start: pipewire setup finished\n"); + fprintf(stderr, "gsr info: gsr_capture_portal_setup: pipewire setup finished\n"); - if(!gsr_capture_portal_get_frame_dimensions(self)) { - gsr_capture_portal_stop(self); + if(!gsr_capture_portal_get_frame_dimensions(self)) return -1; + + return 0; +} + +static int gsr_capture_portal_start(gsr_capture *cap, gsr_capture_metadata *capture_metadata) { + gsr_capture_portal *self = cap->priv; + + const int result = gsr_capture_portal_setup(self, capture_metadata->fps); + if(result != 0) { + gsr_capture_portal_stop(self); + return result; } if(self->params.output_resolution.x == 0 && self->params.output_resolution.y == 0) { @@ -307,24 +314,40 @@ static int max_int(int a, int b) { return a > b ? a : b; } +static bool gsr_capture_portal_capture_has_synchronous_task(gsr_capture *cap) { + gsr_capture_portal *self = cap->priv; + return gsr_pipewire_video_should_restart(&self->pipewire); +} + static int gsr_capture_portal_capture(gsr_capture *cap, gsr_capture_metadata *capture_metadata, gsr_color_conversion *color_conversion) { (void)color_conversion; gsr_capture_portal *self = cap->priv; - /* TODO: Handle formats other than RGB(a) */ - gsr_pipewire_video_region region = {0, 0, 0, 0}; - gsr_pipewire_video_region cursor_region = {0, 0, 0, 0}; - uint32_t pipewire_fourcc = 0; - uint64_t pipewire_modifiers = 0; - bool using_external_image = false; - if(gsr_pipewire_video_map_texture(&self->pipewire, self->texture_map, ®ion, &cursor_region, self->dmabuf_data, &self->num_dmabuf_data, &pipewire_fourcc, &pipewire_modifiers, &using_external_image)) { - if(region.width != self->capture_size.x || region.height != self->capture_size.y) { - self->capture_size.x = region.width; - self->capture_size.y = region.height; - gsr_color_conversion_clear(color_conversion); + if(self->should_stop) + return -1; + + if(gsr_pipewire_video_should_restart(&self->pipewire)) { + fprintf(stderr, "gsr info: gsr_capture_portal_capture: pipewire capture was paused, trying to start capture again\n"); + gsr_capture_portal_stop(self); + const int result = gsr_capture_portal_setup(self, capture_metadata->fps); + if(result != 0) { + self->stop_is_error = result != PORTAL_CAPTURE_CANCELED_BY_USER_EXIT_CODE; + self->should_stop = true; + } + return -1; + } + + /* TODO: Handle formats other than RGB(A) */ + if(self->num_dmabuf_data == 0) { + if(gsr_pipewire_video_map_texture(&self->pipewire, self->texture_map, &self->region, &self->cursor_region, self->dmabuf_data, &self->num_dmabuf_data, &self->pipewire_fourcc, &self->pipewire_modifiers, &self->using_external_image)) { + if(self->region.width != self->capture_size.x || self->region.height != self->capture_size.y) { + self->capture_size.x = self->region.width; + self->capture_size.y = self->region.height; + gsr_color_conversion_clear(color_conversion); + } + } else { + return -1; } - } else { - return 0; } const bool is_scaled = self->params.output_resolution.x > 0 && self->params.output_resolution.y > 0; @@ -338,27 +361,27 @@ static int gsr_capture_portal_capture(gsr_capture *cap, gsr_capture_metadata *ca // TODO: Handle region crop - gsr_color_conversion_draw(color_conversion, using_external_image ? self->texture_map.external_texture_id : self->texture_map.texture_id, + gsr_color_conversion_draw(color_conversion, self->using_external_image ? self->texture_map.external_texture_id : self->texture_map.texture_id, target_pos, output_size, - (vec2i){region.x, region.y}, self->capture_size, self->capture_size, - GSR_ROT_0, GSR_SOURCE_COLOR_RGB, using_external_image, false); + (vec2i){self->region.x, self->region.y}, self->capture_size, self->capture_size, + GSR_ROT_0, GSR_SOURCE_COLOR_RGB, self->using_external_image, false); - if(self->params.record_cursor && self->texture_map.cursor_texture_id > 0 && cursor_region.width > 0) { + if(self->params.record_cursor && self->texture_map.cursor_texture_id > 0 && self->cursor_region.width > 0) { const vec2d scale = { self->capture_size.x == 0 ? 0 : (double)output_size.x / (double)self->capture_size.x, self->capture_size.y == 0 ? 0 : (double)output_size.y / (double)self->capture_size.y }; const vec2i cursor_pos = { - target_pos.x + (cursor_region.x * scale.x), - target_pos.y + (cursor_region.y * scale.y) + target_pos.x + (self->cursor_region.x * scale.x), + target_pos.y + (self->cursor_region.y * scale.y) }; self->params.egl->glEnable(GL_SCISSOR_TEST); self->params.egl->glScissor(target_pos.x, target_pos.y, output_size.x, output_size.y); gsr_color_conversion_draw(color_conversion, self->texture_map.cursor_texture_id, - (vec2i){cursor_pos.x, cursor_pos.y}, (vec2i){cursor_region.width * scale.x, cursor_region.height * scale.y}, - (vec2i){0, 0}, (vec2i){cursor_region.width, cursor_region.height}, (vec2i){cursor_region.width, cursor_region.height}, + (vec2i){cursor_pos.x, cursor_pos.y}, (vec2i){self->cursor_region.width * scale.x, self->cursor_region.height * scale.y}, + (vec2i){0, 0}, (vec2i){self->cursor_region.width, self->cursor_region.height}, (vec2i){self->cursor_region.width, self->cursor_region.height}, GSR_ROT_0, GSR_SOURCE_COLOR_RGB, false, true); self->params.egl->glDisable(GL_SCISSOR_TEST); } @@ -376,6 +399,13 @@ static bool gsr_capture_portal_uses_external_image(gsr_capture *cap) { return true; } +static bool gsr_capture_portal_should_stop(gsr_capture *cap, bool *err) { + gsr_capture_portal *self = cap->priv; + if(err) + *err = self->stop_is_error; + return self->should_stop; +} + static bool gsr_capture_portal_is_damaged(gsr_capture *cap) { gsr_capture_portal *self = cap->priv; return gsr_pipewire_video_is_damaged(&self->pipewire); @@ -417,7 +447,8 @@ gsr_capture* gsr_capture_portal_create(const gsr_capture_portal_params *params) *cap = (gsr_capture) { .start = gsr_capture_portal_start, .tick = NULL, - .should_stop = NULL, + .should_stop = gsr_capture_portal_should_stop, + .capture_has_synchronous_task = gsr_capture_portal_capture_has_synchronous_task, .capture = gsr_capture_portal_capture, .uses_external_image = gsr_capture_portal_uses_external_image, .is_damaged = gsr_capture_portal_is_damaged, diff --git a/src/color_conversion.c b/src/color_conversion.c index 88dc398..23b166e 100644 --- a/src/color_conversion.c +++ b/src/color_conversion.c @@ -89,15 +89,13 @@ static void get_compute_shader_header(char *header, size_t header_size, bool ext if(external_texture) { snprintf(header, header_size, "#version 310 es\n" - "#extension GL_ARB_compute_shader: enable\n" "#extension GL_OES_EGL_image_external : enable\n" "#extension GL_OES_EGL_image_external_essl3 : require\n" "layout(binding = 0) uniform highp samplerExternalOES img_input;\n" "layout(binding = 1) uniform highp sampler2D img_background;\n"); } else { snprintf(header, header_size, - "#version 420\n" - "#extension GL_ARB_compute_shader: enable\n" + "#version 310 es\n" "layout(binding = 0) uniform highp sampler2D img_input;\n" "layout(binding = 1) uniform highp sampler2D img_background;\n"); } @@ -109,7 +107,7 @@ static int load_compute_shader_y(gsr_shader *shader, gsr_egl *egl, gsr_color_com char header[512]; get_compute_shader_header(header, sizeof(header), external_texture); - char compute_shader[2048]; + char compute_shader[4096]; snprintf(compute_shader, sizeof(compute_shader), "%s" "layout (local_size_x = %d, local_size_y = %d, local_size_z = 1) in;\n" @@ -127,12 +125,16 @@ static int load_compute_shader_y(gsr_shader *shader, gsr_egl *egl, gsr_color_com " ivec2 output_size = textureSize(img_background, 0);\n" " vec2 rotated_texel_coord = vec2(texel_coord - source_position - size_shift) * rotation_matrix + vec2(size_shift) + 0.5;\n" " vec2 output_texel_coord = vec2(texel_coord - source_position + target_position) + 0.5;\n" - " vec4 source_color = texture(img_input, rotated_texel_coord/vec2(size));\n" + " vec2 source_color_coords = rotated_texel_coord/vec2(size);\n" + " vec4 source_color = texture(img_input, source_color_coords);\n" + " if(source_color_coords.x > 1.0 || source_color_coords.y > 1.0)\n" + " source_color.rgba = vec4(0.0, 0.0, 0.0, %s);\n" " vec4 source_color_yuv = RGBtoYUV * vec4(source_color.rgb, 1.0);\n" " vec4 output_color_yuv = %s;\n" " float y_color = mix(output_color_yuv.r, source_color_yuv.r, source_color.a);\n" " imageStore(img_output, texel_coord + target_position, vec4(y_color, 1.0, 1.0, 1.0));\n" "}\n", header, max_local_size_dim, max_local_size_dim, color_transform_matrix, + alpha_blending ? "0.0" : "1.0", alpha_blending ? "texture(img_background, output_texel_coord/vec2(output_size))" : "source_color_yuv"); if(gsr_shader_init(shader, egl, NULL, NULL, compute_shader) != 0) @@ -151,7 +153,7 @@ static int load_compute_shader_uv(gsr_shader *shader, gsr_egl *egl, gsr_color_co char header[512]; get_compute_shader_header(header, sizeof(header), external_texture); - char compute_shader[2048]; + char compute_shader[4096]; snprintf(compute_shader, sizeof(compute_shader), "%s" "layout (local_size_x = %d, local_size_y = %d, local_size_z = 1) in;\n" @@ -169,12 +171,16 @@ static int load_compute_shader_uv(gsr_shader *shader, gsr_egl *egl, gsr_color_co " ivec2 output_size = textureSize(img_background, 0);\n" " vec2 rotated_texel_coord = vec2(texel_coord - source_position - size_shift) * rotation_matrix + vec2(size_shift) + 0.5;\n" " vec2 output_texel_coord = vec2(texel_coord - source_position + target_position) + 0.5;\n" - " vec4 source_color = texture(img_input, rotated_texel_coord/vec2(size>>1));\n" // size/2 + " vec2 source_color_coords = rotated_texel_coord/vec2(size>>1);\n" + " vec4 source_color = texture(img_input, source_color_coords);\n" // size/2 + " if(source_color_coords.x > 1.0 || source_color_coords.y > 1.0)\n" + " source_color.rgba = vec4(0.0, 0.0, 0.0, %s);\n" " vec4 source_color_yuv = RGBtoYUV * vec4(source_color.rgb, 1.0);\n" " vec4 output_color_yuv = %s;\n" " vec2 uv_color = mix(output_color_yuv.rg, source_color_yuv.gb, source_color.a);\n" " imageStore(img_output, texel_coord + target_position, vec4(uv_color, 1.0, 1.0));\n" "}\n", header, max_local_size_dim, max_local_size_dim, color_transform_matrix, + alpha_blending ? "0.0" : "1.0", alpha_blending ? "texture(img_background, output_texel_coord/vec2(output_size))" : "source_color_yuv"); if(gsr_shader_init(shader, egl, NULL, NULL, compute_shader) != 0) @@ -191,10 +197,11 @@ static int load_compute_shader_rgb(gsr_shader *shader, gsr_egl *egl, gsr_color_c char header[512]; get_compute_shader_header(header, sizeof(header), external_texture); - char compute_shader[2048]; + char compute_shader[4096]; snprintf(compute_shader, sizeof(compute_shader), "%s" "layout (local_size_x = %d, local_size_y = %d, local_size_z = 1) in;\n" + "precision highp float;\n" "uniform ivec2 source_position;\n" "uniform ivec2 target_position;\n" "uniform vec2 scale;\n" @@ -207,11 +214,15 @@ static int load_compute_shader_rgb(gsr_shader *shader, gsr_egl *egl, gsr_color_c " ivec2 output_size = textureSize(img_background, 0);\n" " vec2 rotated_texel_coord = vec2(texel_coord - source_position - size_shift) * rotation_matrix + vec2(size_shift) + 0.5;\n" " vec2 output_texel_coord = vec2(texel_coord - source_position + target_position) + 0.5;\n" - " vec4 source_color = texture(img_input, rotated_texel_coord/vec2(size));\n" + " vec2 source_color_coords = rotated_texel_coord/vec2(size);\n" + " vec4 source_color = texture(img_input, source_color_coords);\n" + " if(source_color_coords.x > 1.0 || source_color_coords.y > 1.0)\n" + " source_color.rgba = vec4(0.0, 0.0, 0.0, %s);\n" " vec4 output_color = %s;\n" " vec3 color = mix(output_color.rgb, source_color.rgb, source_color.a);\n" " imageStore(img_output, texel_coord + target_position, vec4(color, 1.0));\n" "}\n", header, max_local_size_dim, max_local_size_dim, + alpha_blending ? "0.0" : "1.0", alpha_blending ? "texture(img_background, output_texel_coord/vec2(output_size))" : "source_color"); if(gsr_shader_init(shader, egl, NULL, NULL, compute_shader) != 0) @@ -620,20 +631,33 @@ int gsr_color_conversion_init(gsr_color_conversion *self, const gsr_color_conver } } - if(!gsr_color_conversion_load_compute_shaders(self)) { + if(self->params.force_graphics_shader) { self->compute_shaders_failed_to_load = true; - fprintf(stderr, "gsr info: failed to load one or more compute shaders, run gpu-screen-recorder with the '-gl-debug yes' option to see why. Falling back to slower graphics shader instead\n"); + self->external_compute_shaders_failed_to_load = true; + if(!gsr_color_conversion_load_graphics_shaders(self)) goto err; - } - if(self->params.load_external_image_shader) { - if(!gsr_color_conversion_load_external_compute_shaders(self)) { - self->external_compute_shaders_failed_to_load = true; - fprintf(stderr, "gsr info: failed to load one or more external compute shaders, run gpu-screen-recorder with the '-gl-debug yes' option to see why. Falling back to slower graphics shader instead\n"); + if(self->params.load_external_image_shader) { if(!gsr_color_conversion_load_external_graphics_shaders(self)) goto err; } + } else { + if(!gsr_color_conversion_load_compute_shaders(self)) { + self->compute_shaders_failed_to_load = true; + fprintf(stderr, "gsr info: failed to load one or more compute shaders, run gpu-screen-recorder with the '-gl-debug yes' option to see why. Falling back to slower graphics shader instead\n"); + if(!gsr_color_conversion_load_graphics_shaders(self)) + goto err; + } + + if(self->params.load_external_image_shader) { + if(!gsr_color_conversion_load_external_compute_shaders(self)) { + self->external_compute_shaders_failed_to_load = true; + fprintf(stderr, "gsr info: failed to load one or more external compute shaders, run gpu-screen-recorder with the '-gl-debug yes' option to see why. Falling back to slower graphics shader instead\n"); + if(!gsr_color_conversion_load_external_graphics_shaders(self)) + goto err; + } + } } if(load_framebuffers(self) != 0) @@ -920,7 +944,6 @@ void gsr_color_conversion_draw(gsr_color_conversion *self, unsigned int texture_ source_position.x += source_pos.x; source_position.y += source_pos.y; gsr_color_conversion_draw_graphics(self, texture_id, external_texture, rotation_matrix, source_position, source_size, destination_pos, texture_size, scale, source_color); - // TODO: Is glFlush and glFinish needed here for graphics garbage? } else { switch(rotation) { case GSR_ROT_0: @@ -955,6 +978,7 @@ void gsr_color_conversion_draw(gsr_color_conversion *self, unsigned int texture_ } } + self->params.egl->glFlush(); // TODO: Use the minimal barrier required self->params.egl->glMemoryBarrier(GL_ALL_BARRIER_BITS); // GL_SHADER_IMAGE_ACCESS_BARRIER_BIT self->params.egl->glUseProgram(0); @@ -998,6 +1022,13 @@ void gsr_color_conversion_clear(gsr_color_conversion *self) { self->params.egl->glBindFramebuffer(GL_FRAMEBUFFER, 0); } +void gsr_color_conversion_read_destination_texture(gsr_color_conversion *self, int destination_texture_index, int x, int y, int width, int height, unsigned int color_format, unsigned int data_format, void *pixels) { + assert(destination_texture_index >= 0 && destination_texture_index < self->params.num_destination_textures); + self->params.egl->glBindFramebuffer(GL_FRAMEBUFFER, self->framebuffers[destination_texture_index]); + self->params.egl->glReadPixels(x, y, width, height, color_format, data_format, pixels); + self->params.egl->glBindFramebuffer(GL_FRAMEBUFFER, 0); +} + gsr_rotation gsr_monitor_rotation_to_rotation(gsr_monitor_rotation monitor_rotation) { return (gsr_rotation)monitor_rotation; } diff --git a/src/cursor.c b/src/cursor.c index 40532f8..e818d72 100644 --- a/src/cursor.c +++ b/src/cursor.c @@ -56,10 +56,6 @@ static bool gsr_cursor_set_from_x11_cursor_image(gsr_cursor *self, XFixesCursorI self->egl->glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, self->size.x, self->size.y, 0, GL_RGBA, GL_UNSIGNED_BYTE, cursor_data); free(cursor_data); - const float border_color[4] = { 0.0f, 0.0f, 0.0f, 0.0f }; - self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER); - self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER); - self->egl->glTexParameterfv(GL_TEXTURE_2D, GL_TEXTURE_BORDER_COLOR, border_color); self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); diff --git a/src/dbus.c b/src/dbus.c new file mode 100644 index 0000000..f12891f --- /dev/null +++ b/src/dbus.c @@ -0,0 +1,913 @@ +#include "../include/dbus.h" + +#include <sys/random.h> + +#include <stdio.h> +#include <string.h> +#include <stdlib.h> +#include <errno.h> +#include <assert.h> + +/* TODO: Make non-blocking when GPU Screen Recorder is turned into a library */ +/* TODO: Make sure responses matches the requests */ + +#define DESKTOP_PORTAL_SIGNAL_RULE "type='signal',interface='org.freedesktop.Portal.Request'" + +typedef enum { + DICT_TYPE_STRING, + DICT_TYPE_UINT32, + DICT_TYPE_BOOL, +} dict_value_type; + +typedef struct { + const char *key; + dict_value_type value_type; + union { + char *str; + dbus_uint32_t u32; + dbus_bool_t boolean; + }; +} dict_entry; + +static bool generate_random_characters(char *buffer, int buffer_size, const char *alphabet, size_t alphabet_size) { + /* TODO: Use other functions on other platforms than linux */ + if(getrandom(buffer, buffer_size, 0) < buffer_size) { + fprintf(stderr, "Failed to get random bytes, error: %s\n", strerror(errno)); + return false; + } + + for(int i = 0; i < buffer_size; ++i) { + unsigned char c = *(unsigned char*)&buffer[i]; + buffer[i] = alphabet[c % alphabet_size]; + } + + return true; +} + +static bool generate_random_characters_standard_alphabet(char *buffer, int buffer_size) { + return generate_random_characters(buffer, buffer_size, "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789", 62); +} + +static const char* dict_value_type_to_string(dict_value_type type) { + switch(type) { + case DICT_TYPE_STRING: return "string"; + case DICT_TYPE_UINT32: return "uint32"; + case DICT_TYPE_BOOL: return "boolean"; + } + return "(unknown)"; +} + +bool gsr_dbus_init(gsr_dbus *self, const char *screencast_restore_token) { + memset(self, 0, sizeof(*self)); + dbus_error_init(&self->err); + + self->random_str[DBUS_RANDOM_STR_SIZE] = '\0'; + if(!generate_random_characters_standard_alphabet(self->random_str, DBUS_RANDOM_STR_SIZE)) { + fprintf(stderr, "gsr error: gsr_dbus_init: failed to generate random string\n"); + return false; + } + + self->con = dbus_bus_get(DBUS_BUS_SESSION, &self->err); + if(dbus_error_is_set(&self->err)) { + fprintf(stderr, "gsr error: gsr_dbus_init: dbus_bus_get failed with error: %s\n", self->err.message); + return false; + } + + if(!self->con) { + fprintf(stderr, "gsr error: gsr_dbus_init: failed to get dbus session\n"); + return false; + } + + /* TODO: Check the name */ + const int ret = dbus_bus_request_name(self->con, "com.dec05eba.gpu_screen_recorder", DBUS_NAME_FLAG_REPLACE_EXISTING, &self->err); + if(dbus_error_is_set(&self->err)) { + fprintf(stderr, "gsr error: gsr_dbus_init: dbus_bus_request_name failed with error: %s\n", self->err.message); + gsr_dbus_deinit(self); + return false; + } + + if(screencast_restore_token) { + self->screencast_restore_token = strdup(screencast_restore_token); + if(!self->screencast_restore_token) { + fprintf(stderr, "gsr error: gsr_dbus_init: failed to clone restore token\n"); + gsr_dbus_deinit(self); + return false; + } + } + + (void)ret; + // if(ret != DBUS_REQUEST_NAME_REPLY_PRIMARY_OWNER) { + // fprintf(stderr, "gsr error: gsr_capture_portal_setup_dbus: dbus_bus_request_name failed to get primary owner\n"); + // return false; + // } + + return true; +} + +void gsr_dbus_deinit(gsr_dbus *self) { + if(self->screencast_restore_token) { + free(self->screencast_restore_token); + self->screencast_restore_token = NULL; + } + + if(self->desktop_portal_rule_added) { + dbus_bus_remove_match(self->con, DESKTOP_PORTAL_SIGNAL_RULE, NULL); + // dbus_connection_flush(self->con); + self->desktop_portal_rule_added = false; + } + + if(self->con) { + dbus_error_free(&self->err); + + dbus_bus_release_name(self->con, "com.dec05eba.gpu_screen_recorder", NULL); + + // Apparently shouldn't be used when a connection is setup by using dbus_bus_get + //dbus_connection_close(self->con); + dbus_connection_unref(self->con); + self->con = NULL; + } +} + +static bool gsr_dbus_desktop_portal_get_property(gsr_dbus *self, const char *interface, const char *property_name, uint32_t *result) { + *result = 0; + + DBusMessage *msg = dbus_message_new_method_call( + "org.freedesktop.portal.Desktop", // target for the method call + "/org/freedesktop/portal/desktop", // object to call on + "org.freedesktop.DBus.Properties", // interface to call on + "Get"); // method name + if(!msg) { + fprintf(stderr, "gsr error: gsr_dbus_desktop_portal_get_property: dbus_message_new_method_call failed\n"); + return false; + } + + DBusMessageIter it; + dbus_message_iter_init_append(msg, &it); + + if(!dbus_message_iter_append_basic(&it, DBUS_TYPE_STRING, &interface)) { + fprintf(stderr, "gsr error: gsr_dbus_desktop_portal_get_property: failed to add interface\n"); + dbus_message_unref(msg); + return false; + } + + if(!dbus_message_iter_append_basic(&it, DBUS_TYPE_STRING, &property_name)) { + fprintf(stderr, "gsr error: gsr_dbus_desktop_portal_get_property: failed to add property_name\n"); + dbus_message_unref(msg); + return false; + } + + DBusPendingCall *pending = NULL; + if(!dbus_connection_send_with_reply(self->con, msg, &pending, -1) || !pending) { // -1 is default timeout + fprintf(stderr, "gsr error: gsr_dbus_desktop_portal_get_property: dbus_connection_send_with_reply failed\n"); + dbus_message_unref(msg); + return false; + } + dbus_connection_flush(self->con); + + //fprintf(stderr, "Request Sent\n"); + + dbus_message_unref(msg); + msg = NULL; + + dbus_pending_call_block(pending); + + msg = dbus_pending_call_steal_reply(pending); + if(!msg) { + fprintf(stderr, "gsr error: gsr_dbus_desktop_portal_get_property: dbus_pending_call_steal_reply failed\n"); + dbus_pending_call_unref(pending); + dbus_message_unref(msg); + return false; + } + + dbus_pending_call_unref(pending); + pending = NULL; + + DBusMessageIter resp_args; + if(!dbus_message_iter_init(msg, &resp_args)) { + fprintf(stderr, "gsr error: gsr_dbus_desktop_portal_get_property: response message is missing arguments\n"); + dbus_message_unref(msg); + return false; + } else if(DBUS_TYPE_UINT32 == dbus_message_iter_get_arg_type(&resp_args)) { + dbus_message_iter_get_basic(&resp_args, result); + } else if(DBUS_TYPE_VARIANT == dbus_message_iter_get_arg_type(&resp_args)) { + DBusMessageIter variant_iter; + dbus_message_iter_recurse(&resp_args, &variant_iter); + + if(dbus_message_iter_get_arg_type(&variant_iter) == DBUS_TYPE_UINT32) { + dbus_message_iter_get_basic(&variant_iter, result); + } else { + fprintf(stderr, "gsr error: gsr_dbus_desktop_portal_get_property: response message is not a variant with an uint32, %c\n", dbus_message_iter_get_arg_type(&variant_iter)); + dbus_message_unref(msg); + return false; + } + } else { + fprintf(stderr, "gsr error: gsr_dbus_desktop_portal_get_property: response message is not an uint32, %c\n", dbus_message_iter_get_arg_type(&resp_args)); + dbus_message_unref(msg); + return false; + // TODO: Check dbus_error_is_set? + } + + dbus_message_unref(msg); + return true; +} + +static uint32_t gsr_dbus_get_screencast_version_cached(gsr_dbus *self) { + if(self->screencast_version == 0) + gsr_dbus_desktop_portal_get_property(self, "org.freedesktop.portal.ScreenCast", "version", &self->screencast_version); + return self->screencast_version; +} + +static bool gsr_dbus_ensure_desktop_portal_rule_added(gsr_dbus *self) { + if(self->desktop_portal_rule_added) + return true; + + dbus_bus_add_match(self->con, DESKTOP_PORTAL_SIGNAL_RULE, &self->err); + dbus_connection_flush(self->con); + if(dbus_error_is_set(&self->err)) { + fprintf(stderr, "gsr error: gsr_dbus_ensure_desktop_portal_rule_added: failed to add dbus rule %s, error: %s\n", DESKTOP_PORTAL_SIGNAL_RULE, self->err.message); + return false; + } + self->desktop_portal_rule_added = true; + return true; +} + +static void gsr_dbus_portal_get_unique_handle_token(gsr_dbus *self, char *buffer, int size) { + snprintf(buffer, size, "gpu_screen_recorder_handle_%s_%u", self->random_str, self->handle_counter++); +} + +static void gsr_dbus_portal_get_unique_session_token(gsr_dbus *self, char *buffer, int size) { + snprintf(buffer, size, "gpu_screen_recorder_session_%s", self->random_str); +} + +static bool dbus_add_dict(DBusMessageIter *it, const dict_entry *entries, int num_entries) { + DBusMessageIter array_it; + if(!dbus_message_iter_open_container(it, DBUS_TYPE_ARRAY, "{sv}", &array_it)) + return false; + + for (int i = 0; i < num_entries; ++i) { + DBusMessageIter entry_it = DBUS_MESSAGE_ITER_INIT_CLOSED; + DBusMessageIter variant_it = DBUS_MESSAGE_ITER_INIT_CLOSED; + + if(!dbus_message_iter_open_container(&array_it, DBUS_TYPE_DICT_ENTRY, NULL, &entry_it)) + goto entry_err; + + if(!dbus_message_iter_append_basic(&entry_it, DBUS_TYPE_STRING, &entries[i].key)) + goto entry_err; + + switch (entries[i].value_type) { + case DICT_TYPE_STRING: { + if(!dbus_message_iter_open_container(&entry_it, DBUS_TYPE_VARIANT, DBUS_TYPE_STRING_AS_STRING, &variant_it)) + goto entry_err; + if(!dbus_message_iter_append_basic(&variant_it, DBUS_TYPE_STRING, &entries[i].str)) + goto entry_err; + break; + } + case DICT_TYPE_UINT32: { + if(!dbus_message_iter_open_container(&entry_it, DBUS_TYPE_VARIANT, DBUS_TYPE_UINT32_AS_STRING, &variant_it)) + goto entry_err; + if(!dbus_message_iter_append_basic(&variant_it, DBUS_TYPE_UINT32, &entries[i].u32)) + goto entry_err; + break; + } + case DICT_TYPE_BOOL: { + if(!dbus_message_iter_open_container(&entry_it, DBUS_TYPE_VARIANT, DBUS_TYPE_BOOLEAN_AS_STRING, &variant_it)) + goto entry_err; + if(!dbus_message_iter_append_basic(&variant_it, DBUS_TYPE_BOOLEAN, &entries[i].boolean)) + goto entry_err; + break; + } + } + + dbus_message_iter_close_container(&entry_it, &variant_it); + dbus_message_iter_close_container(&array_it, &entry_it); + continue; + + entry_err: + dbus_message_iter_abandon_container_if_open(&array_it, &variant_it); + dbus_message_iter_abandon_container_if_open(&array_it, &entry_it); + dbus_message_iter_abandon_container_if_open(it, &array_it); + return false; + } + + return dbus_message_iter_close_container(it, &array_it); +} + +/* If |response_msg| is NULL then we dont wait for a response signal */ +static bool gsr_dbus_call_screencast_method(gsr_dbus *self, const char *method_name, const char *session_handle, const char *parent_window, const dict_entry *entries, int num_entries, int *resp_fd, DBusMessage **response_msg) { + if(resp_fd) + *resp_fd = -1; + + if(response_msg) + *response_msg = NULL; + + if(!gsr_dbus_ensure_desktop_portal_rule_added(self)) + return false; + + DBusMessage *msg = dbus_message_new_method_call( + "org.freedesktop.portal.Desktop", // target for the method call + "/org/freedesktop/portal/desktop", // object to call on + "org.freedesktop.portal.ScreenCast", // interface to call on + method_name); // method name + if(!msg) { + fprintf(stderr, "gsr error: gsr_dbus_call_screencast_method: dbus_message_new_method_call failed\n"); + return false; + } + + DBusMessageIter it; + dbus_message_iter_init_append(msg, &it); + + if(session_handle) { + if(!dbus_message_iter_append_basic(&it, DBUS_TYPE_OBJECT_PATH, &session_handle)) { + fprintf(stderr, "gsr error: gsr_dbus_call_screencast_method: failed to add session_handle\n"); + dbus_message_unref(msg); + return false; + } + } + + if(parent_window) { + if(!dbus_message_iter_append_basic(&it, DBUS_TYPE_STRING, &parent_window)) { + fprintf(stderr, "gsr error: gsr_dbus_call_screencast_method: failed to add parent_window\n"); + dbus_message_unref(msg); + return false; + } + } + + if(!dbus_add_dict(&it, entries, num_entries)) { + fprintf(stderr, "gsr error: gsr_dbus_call_screencast_method: failed to add dict\n"); + dbus_message_unref(msg); + return false; + } + + DBusPendingCall *pending = NULL; + if(!dbus_connection_send_with_reply(self->con, msg, &pending, -1) || !pending) { // -1 is default timeout + fprintf(stderr, "gsr error: gsr_dbus_call_screencast_method: dbus_connection_send_with_reply failed\n"); + dbus_message_unref(msg); + return false; + } + dbus_connection_flush(self->con); + + //fprintf(stderr, "Request Sent\n"); + + dbus_message_unref(msg); + msg = NULL; + + dbus_pending_call_block(pending); + + msg = dbus_pending_call_steal_reply(pending); + if(!msg) { + fprintf(stderr, "gsr error: gsr_dbus_call_screencast_method: dbus_pending_call_steal_reply failed\n"); + dbus_pending_call_unref(pending); + dbus_message_unref(msg); + return false; + } + + dbus_pending_call_unref(pending); + pending = NULL; + + DBusMessageIter resp_args; + if(!dbus_message_iter_init(msg, &resp_args)) { + fprintf(stderr, "gsr error: gsr_dbus_call_screencast_method: response message is missing arguments\n"); + dbus_message_unref(msg); + return false; + } else if (DBUS_TYPE_OBJECT_PATH == dbus_message_iter_get_arg_type(&resp_args)) { + const char *res = NULL; + dbus_message_iter_get_basic(&resp_args, &res); + } else if(DBUS_TYPE_UNIX_FD == dbus_message_iter_get_arg_type(&resp_args)) { + int fd = -1; + dbus_message_iter_get_basic(&resp_args, &fd); + + if(resp_fd) + *resp_fd = fd; + } else if(DBUS_TYPE_STRING == dbus_message_iter_get_arg_type(&resp_args)) { + char *err = NULL; + dbus_message_iter_get_basic(&resp_args, &err); + fprintf(stderr, "gsr error: gsr_dbus_call_screencast_method: failed with error: %s\n", err); + + dbus_message_unref(msg); + return false; + // TODO: Check dbus_error_is_set? + } else { + fprintf(stderr, "gsr error: gsr_dbus_call_screencast_method: response message is not an object path or unix fd\n"); + dbus_message_unref(msg); + return false; + // TODO: Check dbus_error_is_set? + } + + dbus_message_unref(msg); + if(!response_msg) + return true; + + /* TODO: Add timeout, but take into consideration user interactive signals (such as selecting a monitor to capture for ScreenCast) */ + for (;;) { + const int timeout_milliseconds = 10; + dbus_connection_read_write(self->con, timeout_milliseconds); + *response_msg = dbus_connection_pop_message(self->con); + + if(!*response_msg) + continue; + + if(!dbus_message_is_signal(*response_msg, "org.freedesktop.portal.Request", "Response")) { + dbus_message_unref(*response_msg); + *response_msg = NULL; + continue; + } + + break; + } + + return true; +} + +static int gsr_dbus_get_response_status(DBusMessageIter *resp_args) { + if(dbus_message_iter_get_arg_type(resp_args) != DBUS_TYPE_UINT32) { + fprintf(stderr, "gsr error: gsr_dbus_get_response_status: missing uint32 in response\n"); + return -1; + } + + dbus_uint32_t response_status = 0; + dbus_message_iter_get_basic(resp_args, &response_status); + + dbus_message_iter_next(resp_args); + return (int)response_status; +} + +static dict_entry* find_dict_entry_by_key(dict_entry *entries, int num_entries, const char *key) { + for(int i = 0; i < num_entries; ++i) { + if(strcmp(entries[i].key, key) == 0) + return &entries[i]; + } + return NULL; +} + +static bool gsr_dbus_get_variant_value(DBusMessageIter *iter, dict_entry *entry) { + if(dbus_message_iter_get_arg_type(iter) != DBUS_TYPE_VARIANT) { + fprintf(stderr, "gsr error: gsr_dbus_get_variant_value: value is not a variant\n"); + return false; + } + + DBusMessageIter variant_iter; + dbus_message_iter_recurse(iter, &variant_iter); + + switch(dbus_message_iter_get_arg_type(&variant_iter)) { + case DBUS_TYPE_STRING: { + if(entry->value_type != DICT_TYPE_STRING) { + fprintf(stderr, "gsr error: gsr_dbus_get_variant_value: expected entry value to be a(n) %s was a string\n", dict_value_type_to_string(entry->value_type)); + return false; + } + + const char *value = NULL; + dbus_message_iter_get_basic(&variant_iter, &value); + + if(!value) { + fprintf(stderr, "gsr error: gsr_dbus_get_variant_value: failed to get entry value as value\n"); + return false; + } + + if(entry->str) { + free(entry->str); + entry->str = NULL; + } + + entry->str = strdup(value); + if(!entry->str) { + fprintf(stderr, "gsr error: gsr_dbus_get_variant_value: failed to copy value\n"); + return false; + } + return true; + } + case DBUS_TYPE_UINT32: { + if(entry->value_type != DICT_TYPE_UINT32) { + fprintf(stderr, "gsr error: gsr_dbus_get_variant_value: expected entry value to be a(n) %s was an uint32\n", dict_value_type_to_string(entry->value_type)); + return false; + } + + dbus_message_iter_get_basic(&variant_iter, &entry->u32); + return true; + } + case DBUS_TYPE_BOOLEAN: { + if(entry->value_type != DICT_TYPE_BOOL) { + fprintf(stderr, "gsr error: gsr_dbus_get_variant_value: expected entry value to be a(n) %s was a boolean\n", dict_value_type_to_string(entry->value_type)); + return false; + } + + dbus_message_iter_get_basic(&variant_iter, &entry->boolean); + return true; + } + } + + fprintf(stderr, "gsr error: gsr_dbus_get_variant_value: got unexpected type, expected string, uint32 or boolean\n"); + return false; +} + +/* + Parses a{sv} into matching key entries in |entries|. + If the entry value is a string then it's allocated with malloc and is null-terminated + and has to be free by the caller. + The entry values should be 0 before this method is called. + The entries are free'd if this function fails. +*/ +static bool gsr_dbus_get_map(DBusMessageIter *resp_args, dict_entry *entries, int num_entries) { + if(dbus_message_iter_get_arg_type(resp_args) != DBUS_TYPE_ARRAY) { + fprintf(stderr, "gsr error: gsr_dbus_get_map: missing array in response\n"); + return false; + } + + DBusMessageIter subiter; + dbus_message_iter_recurse(resp_args, &subiter); + + while(dbus_message_iter_get_arg_type(&subiter) != DBUS_TYPE_INVALID) { + DBusMessageIter dictiter = DBUS_MESSAGE_ITER_INIT_CLOSED; + const char *key = NULL; + dict_entry *entry = NULL; + + // fprintf(stderr, " array element type: %c, %s\n", + // dbus_message_iter_get_arg_type(&subiter), + // dbus_message_iter_get_signature(&subiter)); + if(dbus_message_iter_get_arg_type(&subiter) != DBUS_TYPE_DICT_ENTRY) { + fprintf(stderr, "gsr error: gsr_dbus_get_map: array value is not an entry\n"); + return false; + } + + dbus_message_iter_recurse(&subiter, &dictiter); + + if(dbus_message_iter_get_arg_type(&dictiter) != DBUS_TYPE_STRING) { + fprintf(stderr, "gsr error: gsr_dbus_get_map: entry key is not a string\n"); + goto error; + } + + dbus_message_iter_get_basic(&dictiter, &key); + if(!key) { + fprintf(stderr, "gsr error: gsr_dbus_get_map: failed to get entry key as value\n"); + goto error; + } + + entry = find_dict_entry_by_key(entries, num_entries, key); + if(!entry) { + dbus_message_iter_next(&subiter); + continue; + } + + if(!dbus_message_iter_next(&dictiter)) { + fprintf(stderr, "gsr error: gsr_dbus_get_map: missing entry value\n"); + goto error; + } + + if(!gsr_dbus_get_variant_value(&dictiter, entry)) + goto error; + + dbus_message_iter_next(&subiter); + } + + return true; + + error: + for(int i = 0; i < num_entries; ++i) { + if(entries[i].value_type == DICT_TYPE_STRING) { + free(entries[i].str); + entries[i].str = NULL; + } + } + return false; +} + +int gsr_dbus_screencast_create_session(gsr_dbus *self, char **session_handle) { + assert(session_handle); + *session_handle = NULL; + + char handle_token[64]; + gsr_dbus_portal_get_unique_handle_token(self, handle_token, sizeof(handle_token)); + + char session_handle_token[64]; + gsr_dbus_portal_get_unique_session_token(self, session_handle_token, sizeof(session_handle_token)); + + dict_entry args[2]; + args[0].key = "handle_token"; + args[0].value_type = DICT_TYPE_STRING; + args[0].str = handle_token; + + args[1].key = "session_handle_token"; + args[1].value_type = DICT_TYPE_STRING; + args[1].str = session_handle_token; + + DBusMessage *response_msg = NULL; + if(!gsr_dbus_call_screencast_method(self, "CreateSession", NULL, NULL, args, 2, NULL, &response_msg)) { + fprintf(stderr, "gsr error: gsr_dbus_screencast_create_session: failed to setup ScreenCast session. Make sure you have a desktop portal running with support for the ScreenCast interface and that the desktop portal matches the Wayland compositor you are running.\n"); + return -1; + } + + // TODO: Verify signal path matches |res|, maybe check the below + // DBUS_TYPE_ARRAY value? + //fprintf(stderr, "signature: %s, sender: %s\n", dbus_message_get_signature(msg), dbus_message_get_sender(msg)); + DBusMessageIter resp_args; + if(!dbus_message_iter_init(response_msg, &resp_args)) { + fprintf(stderr, "gsr error: gsr_dbus_screencast_create_session: missing response\n"); + dbus_message_unref(response_msg); + return -1; + } + + const int response_status = gsr_dbus_get_response_status(&resp_args); + if(response_status != 0) { + dbus_message_unref(response_msg); + return response_status; + } + + dict_entry entries[1]; + entries[0].key = "session_handle"; + entries[0].str = NULL; + entries[0].value_type = DICT_TYPE_STRING; + if(!gsr_dbus_get_map(&resp_args, entries, 1)) { + dbus_message_unref(response_msg); + return -1; + } + + if(!entries[0].str) { + fprintf(stderr, "gsr error: gsr_dbus_screencast_create_session: missing \"session_handle\" in response\n"); + dbus_message_unref(response_msg); + return -1; + } + + *session_handle = entries[0].str; + //fprintf(stderr, "session handle: |%s|\n", entries[0].str); + //free(entries[0].str); + + dbus_message_unref(response_msg); + return 0; +} + +static uint32_t unset_unsupported_capture_types(uint32_t requested_capture_types, uint32_t available_capture_types) { + if(!(available_capture_types & GSR_PORTAL_CAPTURE_TYPE_MONITOR)) + requested_capture_types &= ~GSR_PORTAL_CAPTURE_TYPE_MONITOR; + if(!(available_capture_types & GSR_PORTAL_CAPTURE_TYPE_WINDOW)) + requested_capture_types &= ~GSR_PORTAL_CAPTURE_TYPE_WINDOW; + if(!(available_capture_types & GSR_PORTAL_CAPTURE_TYPE_VIRTUAL)) + requested_capture_types &= ~GSR_PORTAL_CAPTURE_TYPE_VIRTUAL; + return requested_capture_types; +} + +static uint32_t unset_unsupported_cursor_modes(uint32_t requested_cursor_modes, uint32_t available_cursor_modes) { + if(!(available_cursor_modes & GSR_PORTAL_CURSOR_MODE_HIDDEN)) + requested_cursor_modes &= ~GSR_PORTAL_CURSOR_MODE_HIDDEN; + if(!(available_cursor_modes & GSR_PORTAL_CURSOR_MODE_EMBEDDED)) + requested_cursor_modes &= ~GSR_PORTAL_CURSOR_MODE_EMBEDDED; + if(!(available_cursor_modes & GSR_PORTAL_CURSOR_MODE_METADATA)) + requested_cursor_modes &= ~GSR_PORTAL_CURSOR_MODE_METADATA; + return requested_cursor_modes; +} + +int gsr_dbus_screencast_select_sources(gsr_dbus *self, const char *session_handle, uint32_t capture_type, uint32_t cursor_mode) { + assert(session_handle); + + uint32_t available_source_types = 0; + gsr_dbus_desktop_portal_get_property(self, "org.freedesktop.portal.ScreenCast", "AvailableSourceTypes", &available_source_types); + if(available_source_types == 0) + fprintf(stderr, "gsr error: gsr_dbus_screencast_select_sources: no source types are available\n"); + capture_type = unset_unsupported_capture_types(capture_type, available_source_types); + + uint32_t available_cursor_modes = 0; + gsr_dbus_desktop_portal_get_property(self, "org.freedesktop.portal.ScreenCast", "AvailableCursorModes", &available_cursor_modes); + if(available_cursor_modes == 0) + fprintf(stderr, "gsr error: gsr_dbus_screencast_select_sources: no cursors modes are available\n"); + cursor_mode = unset_unsupported_cursor_modes(cursor_mode, available_cursor_modes); + + char handle_token[64]; + gsr_dbus_portal_get_unique_handle_token(self, handle_token, sizeof(handle_token)); + + int num_arg_dict = 4; + dict_entry args[6]; + args[0].key = "types"; + args[0].value_type = DICT_TYPE_UINT32; + args[0].u32 = capture_type; + + args[1].key = "multiple"; + args[1].value_type = DICT_TYPE_BOOL; + args[1].boolean = false; /* TODO: Wayland ignores this and still gives the option to select multiple sources. Support that case.. */ + + args[2].key = "handle_token"; + args[2].value_type = DICT_TYPE_STRING; + args[2].str = handle_token; + + args[3].key = "cursor_mode"; + args[3].value_type = DICT_TYPE_UINT32; + args[3].u32 = cursor_mode; + + const int screencast_server_version = gsr_dbus_get_screencast_version_cached(self); + if(screencast_server_version >= 4) { + num_arg_dict = 5; + args[4].key = "persist_mode"; + args[4].value_type = DICT_TYPE_UINT32; + args[4].u32 = 2; /* persist until explicitly revoked */ + + if(self->screencast_restore_token && self->screencast_restore_token[0]) { + num_arg_dict = 6; + + args[5].key = "restore_token"; + args[5].value_type = DICT_TYPE_STRING; + args[5].str = self->screencast_restore_token; + } + } else if(self->screencast_restore_token && self->screencast_restore_token[0]) { + fprintf(stderr, "gsr warning: gsr_dbus_screencast_select_sources: tried to use restore token but this option is only available in screencast version >= 4, your wayland compositors screencast version is %d\n", screencast_server_version); + } + + DBusMessage *response_msg = NULL; + if(!gsr_dbus_call_screencast_method(self, "SelectSources", session_handle, NULL, args, num_arg_dict, NULL, &response_msg)) { + if(num_arg_dict == 6) { + /* We dont know what the error exactly is but assume it may be because of invalid restore token. In that case try without restore token */ + fprintf(stderr, "gsr warning: gsr_dbus_screencast_select_sources: SelectSources failed, retrying without restore_token\n"); + num_arg_dict = 5; + if(!gsr_dbus_call_screencast_method(self, "SelectSources", session_handle, NULL, args, num_arg_dict, NULL, &response_msg)) + return -1; + } else { + return -1; + } + } + + // TODO: Verify signal path matches |res|, maybe check the below + //fprintf(stderr, "signature: %s, sender: %s\n", dbus_message_get_signature(msg), dbus_message_get_sender(msg)); + DBusMessageIter resp_args; + if(!dbus_message_iter_init(response_msg, &resp_args)) { + fprintf(stderr, "gsr error: gsr_dbus_screencast_create_session: missing response\n"); + dbus_message_unref(response_msg); + return -1; + } + + + const int response_status = gsr_dbus_get_response_status(&resp_args); + if(response_status != 0) { + dbus_message_unref(response_msg); + return response_status; + } + + dbus_message_unref(response_msg); + return 0; +} + +static dbus_uint32_t screencast_stream_get_pipewire_node(DBusMessageIter *iter) { + DBusMessageIter subiter; + dbus_message_iter_recurse(iter, &subiter); + + if(dbus_message_iter_get_arg_type(&subiter) == DBUS_TYPE_STRUCT) { + DBusMessageIter structiter; + dbus_message_iter_recurse(&subiter, &structiter); + + if(dbus_message_iter_get_arg_type(&structiter) == DBUS_TYPE_UINT32) { + dbus_uint32_t data = 0; + dbus_message_iter_get_basic(&structiter, &data); + return data; + } + } + + return 0; +} + +int gsr_dbus_screencast_start(gsr_dbus *self, const char *session_handle, uint32_t *pipewire_node) { + assert(session_handle); + *pipewire_node = 0; + + char handle_token[64]; + gsr_dbus_portal_get_unique_handle_token(self, handle_token, sizeof(handle_token)); + + dict_entry args[1]; + args[0].key = "handle_token"; + args[0].value_type = DICT_TYPE_STRING; + args[0].str = handle_token; + + DBusMessage *response_msg = NULL; + if(!gsr_dbus_call_screencast_method(self, "Start", session_handle, "", args, 1, NULL, &response_msg)) + return -1; + + // TODO: Verify signal path matches |res|, maybe check the below + //fprintf(stderr, "signature: %s, sender: %s\n", dbus_message_get_signature(msg), dbus_message_get_sender(msg)); + DBusMessageIter resp_args; + if(!dbus_message_iter_init(response_msg, &resp_args)) { + fprintf(stderr, "gsr error: gsr_dbus_screencast_start: missing response\n"); + dbus_message_unref(response_msg); + return -1; + } + + const int response_status = gsr_dbus_get_response_status(&resp_args); + if(response_status != 0) { + dbus_message_unref(response_msg); + return response_status; + } + + if(dbus_message_iter_get_arg_type(&resp_args) != DBUS_TYPE_ARRAY) { + fprintf(stderr, "gsr error: gsr_dbus_screencast_start: missing array in response\n"); + dbus_message_unref(response_msg); + return -1; + } + + DBusMessageIter subiter; + dbus_message_iter_recurse(&resp_args, &subiter); + + while(dbus_message_iter_get_arg_type(&subiter) != DBUS_TYPE_INVALID) { + DBusMessageIter dictiter = DBUS_MESSAGE_ITER_INIT_CLOSED; + const char *key = NULL; + + // fprintf(stderr, " array element type: %c, %s\n", + // dbus_message_iter_get_arg_type(&subiter), + // dbus_message_iter_get_signature(&subiter)); + if(dbus_message_iter_get_arg_type(&subiter) != DBUS_TYPE_DICT_ENTRY) { + fprintf(stderr, "gsr error: gsr_dbus_screencast_start: array value is not an entry\n"); + goto error; + } + + dbus_message_iter_recurse(&subiter, &dictiter); + + if(dbus_message_iter_get_arg_type(&dictiter) != DBUS_TYPE_STRING) { + fprintf(stderr, "gsr error: gsr_dbus_screencast_start: entry key is not a string\n"); + goto error; + } + + dbus_message_iter_get_basic(&dictiter, &key); + if(!key) { + fprintf(stderr, "gsr error: gsr_dbus_screencast_start: failed to get entry key as value\n"); + goto error; + } + + if(strcmp(key, "restore_token") == 0) { + if(!dbus_message_iter_next(&dictiter)) { + fprintf(stderr, "gsr error: gsr_dbus_screencast_start: missing restore_token value\n"); + goto error; + } + + if(dbus_message_iter_get_arg_type(&dictiter) != DBUS_TYPE_VARIANT) { + fprintf(stderr, "gsr error: gsr_dbus_screencast_start: restore_token is not a variant\n"); + goto error; + } + + DBusMessageIter variant_iter; + dbus_message_iter_recurse(&dictiter, &variant_iter); + + if(dbus_message_iter_get_arg_type(&variant_iter) != DBUS_TYPE_STRING) { + fprintf(stderr, "gsr error: gsr_dbus_screencast_start: restore_token is not a string\n"); + goto error; + } + + char *restore_token_str = NULL; + dbus_message_iter_get_basic(&variant_iter, &restore_token_str); + + if(restore_token_str) { + if(self->screencast_restore_token) { + free(self->screencast_restore_token); + self->screencast_restore_token = NULL; + } + self->screencast_restore_token = strdup(restore_token_str); + //fprintf(stderr, "got restore token: %s\n", self->screencast_restore_token); + } + } else if(strcmp(key, "streams") == 0) { + if(!dbus_message_iter_next(&dictiter)) { + fprintf(stderr, "gsr error: gsr_dbus_screencast_start: missing streams value\n"); + goto error; + } + + if(dbus_message_iter_get_arg_type(&dictiter) != DBUS_TYPE_VARIANT) { + fprintf(stderr, "gsr error: gsr_dbus_screencast_start: streams value is not a variant\n"); + goto error; + } + + DBusMessageIter variant_iter; + dbus_message_iter_recurse(&dictiter, &variant_iter); + + if(dbus_message_iter_get_arg_type(&variant_iter) != DBUS_TYPE_ARRAY) { + fprintf(stderr, "gsr error: gsr_dbus_screencast_start: streams value is not an array\n"); + goto error; + } + + int num_streams = dbus_message_iter_get_element_count(&variant_iter); + //fprintf(stderr, "num streams: %d\n", num_streams); + /* Skip over all streams except the last one, since kde can return multiple streams even if only 1 is requested. The last one is the valid one */ + for(int i = 0; i < num_streams - 1; ++i) { + screencast_stream_get_pipewire_node(&variant_iter); + } + + if(num_streams > 0) { + *pipewire_node = screencast_stream_get_pipewire_node(&variant_iter); + //fprintf(stderr, "pipewire node: %u\n", *pipewire_node); + } + } + + dbus_message_iter_next(&subiter); + } + + if(*pipewire_node == 0) { + fprintf(stderr, "gsr error: gsr_dbus_screencast_start: no pipewire node returned\n"); + goto error; + } + + dbus_message_unref(response_msg); + return 0; + + error: + dbus_message_unref(response_msg); + return -1; +} + +bool gsr_dbus_screencast_open_pipewire_remote(gsr_dbus *self, const char *session_handle, int *pipewire_fd) { + assert(session_handle); + *pipewire_fd = -1; + return gsr_dbus_call_screencast_method(self, "OpenPipeWireRemote", session_handle, NULL, NULL, 0, pipewire_fd, NULL); +} + +const char* gsr_dbus_screencast_get_restore_token(gsr_dbus *self) { + return self->screencast_restore_token; +} @@ -9,7 +9,6 @@ #include <dlfcn.h> #include <assert.h> #include <unistd.h> -#include <sys/capability.h> // TODO: rename gsr_egl to something else since this includes both egl and glx and in the future maybe vulkan too @@ -29,43 +28,23 @@ #define GLX_DEPTH_SIZE 12 #define GLX_RGBA_TYPE 0x8014 -#define GLX_CONTEXT_PRIORITY_LEVEL_EXT 0x3100 -#define GLX_CONTEXT_PRIORITY_HIGH_EXT 0x3101 -#define GLX_CONTEXT_PRIORITY_MEDIUM_EXT 0x3102 -#define GLX_CONTEXT_PRIORITY_LOW_EXT 0x3103 - -static void reset_cap_nice(void) { - cap_t caps = cap_get_proc(); - if(!caps) - return; - - const cap_value_t cap_to_remove = CAP_SYS_NICE; - cap_set_flag(caps, CAP_EFFECTIVE, 1, &cap_to_remove, CAP_CLEAR); - cap_set_flag(caps, CAP_PERMITTED, 1, &cap_to_remove, CAP_CLEAR); - cap_set_proc(caps); - cap_free(caps); -} - // TODO: Create egl context without surface (in other words, x11/wayland agnostic, doesn't require x11/wayland dependency) static bool gsr_egl_create_window(gsr_egl *self) { EGLConfig ecfg; int32_t num_config = 0; - // TODO: Use EGL_OPENGL_ES_BIT as amd requires that for external texture, but that breaks software encoding const int32_t attr[] = { EGL_BUFFER_SIZE, 24, - EGL_RENDERABLE_TYPE, EGL_OPENGL_BIT, + EGL_RENDERABLE_TYPE, EGL_OPENGL_ES3_BIT, EGL_NONE, EGL_NONE }; const int32_t ctxattr[] = { EGL_CONTEXT_CLIENT_VERSION, 2, - EGL_CONTEXT_PRIORITY_LEVEL_IMG, EGL_CONTEXT_PRIORITY_HIGH_IMG, /* requires cap_sys_nice, ignored otherwise */ EGL_NONE, EGL_NONE }; - // TODO: Use EGL_OPENGL_ES_API as amd requires that for external texture, but that breaks software encoding - self->eglBindAPI(EGL_OPENGL_API); + self->eglBindAPI(EGL_OPENGL_ES_API); self->egl_display = self->eglGetDisplay((EGLNativeDisplayType)gsr_window_get_display(self->window)); if(!self->egl_display) { @@ -100,11 +79,9 @@ static bool gsr_egl_create_window(gsr_egl *self) { goto fail; } - reset_cap_nice(); return true; fail: - reset_cap_nice(); gsr_egl_unload(self); return false; } @@ -300,6 +277,7 @@ static bool gsr_egl_load_gl(gsr_egl *self, void *library) { { (void**)&self->glGetTexLevelParameteriv, "glGetTexLevelParameteriv" }, { (void**)&self->glTexImage2D, "glTexImage2D" }, { (void**)&self->glTexSubImage2D, "glTexSubImage2D" }, + { (void**)&self->glTexStorage2D, "glTexStorage2D" }, { (void**)&self->glGetTexImage, "glGetTexImage" }, { (void**)&self->glGenFramebuffers, "glGenFramebuffers" }, { (void**)&self->glBindFramebuffer, "glBindFramebuffer" }, @@ -481,9 +459,9 @@ bool gsr_egl_load(gsr_egl *self, gsr_window *window, bool is_monitor_capture, bo /* This fixes nvenc codecs unable to load on openSUSE tumbleweed because of a cuda error. Don't ask me why */ const bool inside_flatpak = getenv("FLATPAK_ID") != NULL; if(inside_flatpak) - system("flatpak-spawn --host -- nvidia-smi -f /dev/null"); + system("flatpak-spawn --host -- sh -c 'grep -q openSUSE /etc/os-release && nvidia-smi -f /dev/null'"); else - system("nvidia-smi -f /dev/null"); + system("sh -c 'grep -q openSUSE /etc/os-release && nvidia-smi -f /dev/null'"); } return true; @@ -538,15 +516,7 @@ void gsr_egl_unload(gsr_egl *self) { } void gsr_egl_swap_buffers(gsr_egl *self) { - /* This uses less cpu than swap buffer on nvidia */ - // TODO: Do these and remove swap - //self->glFlush(); - //self->glFinish(); - if(self->egl_display) { - self->eglSwapBuffers(self->egl_display, self->egl_surface); - } else if(gsr_window_get_display_server(self->window) == GSR_DISPLAY_SERVER_X11) { - Display *display = gsr_window_get_display(self->window); - const Window window = (Window)gsr_window_get_window(self->window); - self->glXSwapBuffers(display, window); - } + self->glFlush(); + // TODO: Use the minimal barrier required + self->glMemoryBarrier(GL_ALL_BARRIER_BITS); // GL_SHADER_IMAGE_ACCESS_BARRIER_BIT } diff --git a/src/encoder/video/software.c b/src/encoder/video/software.c index 627cdea..d8d9828 100644 --- a/src/encoder/video/software.c +++ b/src/encoder/video/software.c @@ -71,16 +71,15 @@ void gsr_video_encoder_software_stop(gsr_video_encoder_software *self, AVCodecCo } static void gsr_video_encoder_software_copy_textures_to_frame(gsr_video_encoder *encoder, AVFrame *frame, gsr_color_conversion *color_conversion) { - gsr_video_encoder_software *self = encoder->priv; + (void)encoder; + //gsr_video_encoder_software *self = encoder->priv; // TODO: hdr support const unsigned int formats[2] = { GL_RED, GL_RG }; + const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size for(int i = 0; i < 2; ++i) { - self->params.egl->glBindTexture(GL_TEXTURE_2D, self->target_textures[i]); - // We could use glGetTexSubImage and then we wouldn't have to use a specific linesize (LINESIZE_ALIGNMENT) that adds padding, - // but glGetTexSubImage is only available starting from opengl 4.5. - self->params.egl->glGetTexImage(GL_TEXTURE_2D, 0, formats[i], GL_UNSIGNED_BYTE, frame->data[i]); + // TODO: Use glPixelStore? + gsr_color_conversion_read_destination_texture(color_conversion, i, 0, 0, frame->width / div[i], frame->height / div[i], formats[i], GL_UNSIGNED_BYTE, frame->data[i]); } - self->params.egl->glBindTexture(GL_TEXTURE_2D, 0); // cap_kms->kms.base.egl->eglSwapBuffers(cap_kms->kms.base.egl->egl_display, cap_kms->kms.base.egl->egl_surface); //self->params.egl->glFlush(); diff --git a/src/encoder/video/vaapi.c b/src/encoder/video/vaapi.c index c7ccd26..0daf4d8 100644 --- a/src/encoder/video/vaapi.c +++ b/src/encoder/video/vaapi.c @@ -92,10 +92,6 @@ static bool gsr_video_encoder_vaapi_setup_textures(gsr_video_encoder_vaapi *self if(self->prime.fourcc == VA_FOURCC_NV12 || self->prime.fourcc == VA_FOURCC_P010) { const uint32_t *formats = self->prime.fourcc == VA_FOURCC_NV12 ? formats_nv12 : formats_p010; const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size - const float border_colors[2][4] = { - {0.0f, 0.0f, 0.0f, 1.0f}, - {0.5f, 0.5f, 0.0f, 1.0f} - }; self->params.egl->glGenTextures(2, self->target_textures); for(int i = 0; i < 2; ++i) { @@ -125,9 +121,6 @@ static bool gsr_video_encoder_vaapi_setup_textures(gsr_video_encoder_vaapi *self } self->params.egl->glBindTexture(GL_TEXTURE_2D, self->target_textures[i]); - self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER); - self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER); - self->params.egl->glTexParameterfv(GL_TEXTURE_2D, GL_TEXTURE_BORDER_COLOR, border_colors[i]); self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); diff --git a/src/image_writer.c b/src/image_writer.c index e153a34..3d731a0 100644 --- a/src/image_writer.c +++ b/src/image_writer.c @@ -71,11 +71,15 @@ static bool gsr_image_writer_write_opengl_texture_to_file(gsr_image_writer *self return false; } - // TODO: hdr support - self->egl->glBindTexture(GL_TEXTURE_2D, self->texture); - // We could use glGetTexSubImage, but it's only available starting from opengl 4.5 - self->egl->glGetTexImage(GL_TEXTURE_2D, 0, GL_RGBA, GL_UNSIGNED_BYTE, frame_data); - self->egl->glBindTexture(GL_TEXTURE_2D, 0); + unsigned int fbo = 0; + self->egl->glGenFramebuffers(1, &fbo); + self->egl->glBindFramebuffer(GL_FRAMEBUFFER, fbo); + self->egl->glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, self->texture, 0); + + self->egl->glReadPixels(0, 0, self->width, self->height, GL_RGBA, GL_UNSIGNED_BYTE, frame_data); + + self->egl->glBindFramebuffer(GL_FRAMEBUFFER, 0); + self->egl->glDeleteFramebuffers(1, &fbo); self->egl->glFlush(); self->egl->glFinish(); diff --git a/src/main.cpp b/src/main.cpp index d716fcd..d04b52b 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -5,7 +5,7 @@ extern "C" { #include "../include/capture/kms.h" #ifdef GSR_PORTAL #include "../include/capture/portal.h" -#include "../dbus/client/dbus_client.h" +#include "../include/dbus.h" #endif #ifdef GSR_APP_AUDIO #include "../include/pipewire_audio.h" @@ -47,6 +47,7 @@ extern "C" { extern "C" { #include <libavutil/pixfmt.h> #include <libavcodec/avcodec.h> +#include <libavcodec/defs.h> #include <libavformat/avformat.h> #include <libavutil/opt.h> #include <libswresample/swresample.h> @@ -196,7 +197,7 @@ static AVSampleFormat audio_codec_get_sample_format(AVCodecContext *audio_codec_ supports_s16 = false; if(!supports_s16 && !supports_flt) { - fprintf(stderr, "Warning: opus audio codec is chosen but your ffmpeg version does not support s16/flt sample format and performance might be slightly worse.\n"); + fprintf(stderr, "gsr warning: opus audio codec is chosen but your ffmpeg version does not support s16/flt sample format and performance might be slightly worse.\n"); fprintf(stderr, " You can either rebuild ffmpeg with libopus instead of the built-in opus, use the flatpak version of gpu screen recorder or record with aac audio codec instead (-ac aac).\n"); fprintf(stderr, " Falling back to fltp audio sample format instead.\n"); } @@ -250,7 +251,7 @@ static AVCodecContext* create_audio_codec_context(int fps, gsr_audio_codec audio (void)fps; const AVCodec *codec = avcodec_find_encoder(audio_codec_get_id(audio_codec)); if (!codec) { - fprintf(stderr, "Error: Could not find %s audio encoder\n", audio_codec_get_name(audio_codec)); + fprintf(stderr, "gsr error: Could not find %s audio encoder\n", audio_codec_get_name(audio_codec)); _exit(1); } @@ -261,8 +262,13 @@ static AVCodecContext* create_audio_codec_context(int fps, gsr_audio_codec audio codec_context->sample_fmt = audio_codec_get_sample_format(codec_context, audio_codec, codec, mix_audio); codec_context->bit_rate = audio_bitrate == 0 ? audio_codec_get_get_bitrate(audio_codec) : audio_bitrate; codec_context->sample_rate = AUDIO_SAMPLE_RATE; - if(audio_codec == GSR_AUDIO_CODEC_AAC) + if(audio_codec == GSR_AUDIO_CODEC_AAC) { +#if LIBAVCODEC_VERSION_MAJOR < 62 codec_context->profile = FF_PROFILE_AAC_LOW; +#else + codec_context->profile = AV_PROFILE_AAC_LOW; +#endif + } #if LIBAVCODEC_VERSION_MAJOR < 60 codec_context->channel_layout = AV_CH_LAYOUT_STEREO; codec_context->channels = 2; @@ -649,7 +655,6 @@ static void video_software_set_qp(AVCodecContext *codec_context, gsr_video_quali } static void open_video_software(AVCodecContext *codec_context, const args_parser &arg_parser) { - const gsr_color_depth color_depth = video_codec_to_bit_depth(arg_parser.video_codec); const bool hdr = video_codec_is_hdr(arg_parser.video_codec); AVDictionary *options = nullptr; @@ -658,7 +663,6 @@ static void open_video_software(AVCodecContext *codec_context, const args_parser av_dict_set(&options, "preset", "veryfast", 0); av_dict_set(&options, "tune", "film", 0); - av_dict_set(&options, "profile", "high", 0); if(codec_context->codec_id == AV_CODEC_ID_H264) { av_dict_set(&options, "coder", "cabac", 0); // TODO: cavlc is faster than cabac but worse compression. Which to use? @@ -668,7 +672,7 @@ static void open_video_software(AVCodecContext *codec_context, const args_parser int ret = avcodec_open2(codec_context, codec_context->codec, &options); if (ret < 0) { - fprintf(stderr, "Error: Could not open video codec: %s\n", av_error_to_string(ret)); + fprintf(stderr, "gsr error: Could not open video codec: %s\n", av_error_to_string(ret)); _exit(1); } } @@ -916,7 +920,7 @@ static void open_video_hardware(AVCodecContext *codec_context, bool low_power, c int ret = avcodec_open2(codec_context, codec_context->codec, &options); if (ret < 0) { - fprintf(stderr, "Error: Could not open video codec: %s\n", av_error_to_string(ret)); + fprintf(stderr, "gsr error: Could not open video codec: %s\n", av_error_to_string(ret)); _exit(1); } } @@ -1022,7 +1026,7 @@ static std::string get_time_only_str() { static AVStream* create_stream(AVFormatContext *av_format_context, AVCodecContext *codec_context) { AVStream *stream = avformat_new_stream(av_format_context, nullptr); if (!stream) { - fprintf(stderr, "Error: Could not allocate stream\n"); + fprintf(stderr, "gsr error: Could not allocate stream\n"); _exit(1); } stream->id = av_format_context->nb_streams - 1; @@ -1035,7 +1039,7 @@ static void run_recording_saved_script_async(const char *script_file, const char char script_file_full[PATH_MAX]; script_file_full[0] = '\0'; if(!realpath(script_file, script_file_full)) { - fprintf(stderr, "Error: script file not found: %s\n", script_file); + fprintf(stderr, "gsr error: script file not found: %s\n", script_file); return; } @@ -1240,11 +1244,11 @@ static std::string create_new_recording_filepath_from_timestamp(std::string dire if(date_folders) { std::string output_folder = directory + '/' + get_date_only_str(); if(create_directory_recursive(&output_folder[0]) != 0) - fprintf(stderr, "Error: failed to create directory: %s\n", output_folder.c_str()); + fprintf(stderr, "gsr error: failed to create directory: %s\n", output_folder.c_str()); output_filepath = output_folder + "/" + filename_prefix + "_" + get_time_only_str() + "." + file_extension; } else { if(create_directory_recursive(&directory[0]) != 0) - fprintf(stderr, "Error: failed to create directory: %s\n", directory.c_str()); + fprintf(stderr, "gsr error: failed to create directory: %s\n", directory.c_str()); output_filepath = directory + "/" + filename_prefix + "_" + get_date_str() + "." + file_extension; } return output_filepath; @@ -1349,7 +1353,7 @@ static void save_replay_async(AVCodecContext *video_codec_context, int video_str const int ret = av_write_frame(recording_start_result.av_format_context, &av_packet); if(ret < 0) - fprintf(stderr, "Error: Failed to write frame index %d to muxer, reason: %s (%d)\n", av_packet.stream_index, av_error_to_string(ret), ret); + fprintf(stderr, "gsr error: Failed to write frame index %d to muxer, reason: %s (%d)\n", av_packet.stream_index, av_error_to_string(ret), ret); free(replay_packet_data); @@ -1501,7 +1505,7 @@ static int init_filter_graph(AVCodecContext* audio_codec_context, AVFilterGraph* snprintf(args, sizeof(args), "inputs=%d:normalize=%s", (int)num_sources, normalize ? "true" : "false"); #else snprintf(args, sizeof(args), "inputs=%d", (int)num_sources); - fprintf(stderr, "Warning: your ffmpeg version doesn't support disabling normalizing of mixed audio. Volume might be lower than expected\n"); + fprintf(stderr, "gsr warning: your ffmpeg version doesn't support disabling normalizing of mixed audio. Volume might be lower than expected\n"); #endif err = avfilter_graph_create_filter(&mix_ctx, mix_filter, "amix", args, NULL, filter_graph); @@ -1843,15 +1847,15 @@ static void list_supported_capture_options(const gsr_window *window, const char if(!wayland) return; - gsr_dbus_client dbus_client; - if(!gsr_dbus_client_init(&dbus_client, NULL)) + gsr_dbus dbus; + if(!gsr_dbus_init(&dbus, NULL)) return; - char session_handle[128]; - if(gsr_dbus_client_screencast_create_session(&dbus_client, session_handle, sizeof(session_handle)) == 0) + char *session_handle = NULL; + if(gsr_dbus_screencast_create_session(&dbus, &session_handle) == 0) puts("portal"); - gsr_dbus_client_deinit(&dbus_client); + gsr_dbus_deinit(&dbus); #endif } @@ -1868,7 +1872,7 @@ static void info_command(void *userdata) { Display *dpy = XOpenDisplay(nullptr); if (!dpy) { wayland = true; - fprintf(stderr, "Warning: failed to connect to the X server. Assuming wayland is running without Xwayland\n"); + fprintf(stderr, "gsr warning: failed to connect to the X server. Assuming wayland is running without Xwayland\n"); } XSetErrorHandler(x11_error_handler); @@ -1881,13 +1885,13 @@ static void info_command(void *userdata) { // Disable prime-run and similar options as it doesn't work, the monitor to capture has to be run on the same device. // This is fine on wayland since nvidia uses drm interface there and the monitor query checks the monitors connected // to the drm device. - fprintf(stderr, "Warning: use of prime-run on X11 is not supported. Disabling prime-run\n"); + fprintf(stderr, "gsr warning: use of prime-run on X11 is not supported. Disabling prime-run\n"); disable_prime_run(); } gsr_window *window = gsr_window_create(dpy, wayland); if(!window) { - fprintf(stderr, "Error: failed to create window\n"); + fprintf(stderr, "gsr error: failed to create window\n"); _exit(1); } @@ -1902,7 +1906,7 @@ static void info_command(void *userdata) { if(monitor_capture_use_drm(window, egl.gpu_info.vendor)) { // TODO: Allow specifying another card, and in other places if(!gsr_get_valid_card_path(&egl, egl.card_path, true)) { - fprintf(stderr, "Error: no /dev/dri/cardX device found. Make sure that you have at least one monitor connected\n"); + fprintf(stderr, "gsr error: no /dev/dri/cardX device found. Make sure that you have at least one monitor connected\n"); list_monitors = false; } } @@ -1983,7 +1987,7 @@ static void list_capture_options_command(const char *card_path, void *userdata) Display *dpy = XOpenDisplay(nullptr); if (!dpy) { wayland = true; - fprintf(stderr, "Warning: failed to connect to the X server. Assuming wayland is running without Xwayland\n"); + fprintf(stderr, "gsr warning: failed to connect to the X server. Assuming wayland is running without Xwayland\n"); } XSetErrorHandler(x11_error_handler); @@ -1996,13 +2000,13 @@ static void list_capture_options_command(const char *card_path, void *userdata) // Disable prime-run and similar options as it doesn't work, the monitor to capture has to be run on the same device. // This is fine on wayland since nvidia uses drm interface there and the monitor query checks the monitors connected // to the drm device. - fprintf(stderr, "Warning: use of prime-run on X11 is not supported. Disabling prime-run\n"); + fprintf(stderr, "gsr warning: use of prime-run on X11 is not supported. Disabling prime-run\n"); disable_prime_run(); } gsr_window *window = gsr_window_create(dpy, wayland); if(!window) { - fprintf(stderr, "Error: failed to create window\n"); + fprintf(stderr, "gsr error: failed to create window\n"); _exit(1); } @@ -2020,7 +2024,7 @@ static void list_capture_options_command(const char *card_path, void *userdata) if(monitor_capture_use_drm(window, egl.gpu_info.vendor)) { // TODO: Allow specifying another card, and in other places if(!gsr_get_valid_card_path(&egl, egl.card_path, true)) { - fprintf(stderr, "Error: no /dev/dri/cardX device found. Make sure that you have at least one monitor connected\n"); + fprintf(stderr, "gsr error: no /dev/dri/cardX device found. Make sure that you have at least one monitor connected\n"); list_monitors = false; } } @@ -2053,7 +2057,7 @@ static std::string validate_monitor_get_valid(const gsr_egl *egl, const char* wi window_result = data.output_name; free(data.output_name); } else { - fprintf(stderr, "Error: no usable output found\n"); + fprintf(stderr, "gsr error: no usable output found\n"); _exit(51); } } else if(capture_use_drm || (strcmp(window_result.c_str(), "screen-direct") != 0 && strcmp(window_result.c_str(), "screen-direct-force") != 0)) { @@ -2123,7 +2127,7 @@ static gsr_capture* create_monitor_capture(const args_parser &arg_parser, gsr_eg const bool direct_capture = strcmp(arg_parser.window, "screen-direct") == 0 || strcmp(arg_parser.window, "screen-direct-force") == 0; if(direct_capture) { capture_target = "screen"; - fprintf(stderr, "Warning: %s capture option is not recommended unless you use G-SYNC as Nvidia has driver issues that can cause your system or games to freeze/crash.\n", arg_parser.window); + fprintf(stderr, "gsr warning: %s capture option is not recommended unless you use G-SYNC as Nvidia has driver issues that can cause your system or games to freeze/crash.\n", arg_parser.window); } gsr_capture_nvfbc_params nvfbc_params; @@ -2146,7 +2150,7 @@ static std::string region_get_data(gsr_egl *egl, vec2i *region_size, vec2i *regi if(window.empty()) { const bool is_x11 = gsr_window_get_display_server(egl->window) == GSR_DISPLAY_SERVER_X11; const gsr_connection_type connection_type = is_x11 ? GSR_CONNECTION_X11 : GSR_CONNECTION_DRM; - fprintf(stderr, "Error: the region %dx%d+%d+%d doesn't match any monitor. Available monitors and their regions:\n", region_size->x, region_size->y, region_position->x, region_position->y); + fprintf(stderr, "gsr error: the region %dx%d+%d+%d doesn't match any monitor. Available monitors and their regions:\n", region_size->x, region_size->y, region_position->x, region_position->y); MonitorOutputCallbackUserdata userdata; userdata.window = egl->window; @@ -2173,12 +2177,12 @@ static gsr_capture* create_capture_impl(args_parser &arg_parser, gsr_egl *egl, b gsr_capture *capture = nullptr; if(strcmp(arg_parser.window, "focused") == 0) { if(wayland) { - fprintf(stderr, "Error: GPU Screen Recorder window capture only works in a pure X11 session. Xwayland is not supported. You can record a monitor instead on wayland\n"); + fprintf(stderr, "gsr error: GPU Screen Recorder window capture only works in a pure X11 session. Xwayland is not supported. You can record a monitor instead on wayland\n"); _exit(2); } if(arg_parser.output_resolution.x <= 0 || arg_parser.output_resolution.y <= 0) { - fprintf(stderr, "Error: invalid value for option -s '%dx%d' when using -w focused option. expected width and height to be greater than 0\n", arg_parser.output_resolution.x, arg_parser.output_resolution.y); + fprintf(stderr, "gsr error: invalid value for option -s '%dx%d' when using -w focused option. expected width and height to be greater than 0\n", arg_parser.output_resolution.x, arg_parser.output_resolution.y); args_parser_print_usage(); _exit(1); } @@ -2188,7 +2192,7 @@ static gsr_capture* create_capture_impl(args_parser &arg_parser, gsr_egl *egl, b #ifdef GSR_PORTAL // Desktop portal capture on x11 doesn't seem to be hardware accelerated if(!wayland) { - fprintf(stderr, "Error: desktop portal capture is not supported on X11\n"); + fprintf(stderr, "gsr error: desktop portal capture is not supported on X11\n"); _exit(1); } @@ -2202,7 +2206,7 @@ static gsr_capture* create_capture_impl(args_parser &arg_parser, gsr_egl *egl, b if(!capture) _exit(1); #else - fprintf(stderr, "Error: option '-w portal' used but GPU Screen Recorder was compiled without desktop portal support. Please recompile GPU Screen recorder with the -Dportal=true option\n"); + fprintf(stderr, "gsr error: option '-w portal' used but GPU Screen Recorder was compiled without desktop portal support. Please recompile GPU Screen recorder with the -Dportal=true option\n"); _exit(2); #endif } else if(strcmp(arg_parser.window, "region") == 0) { @@ -2219,14 +2223,14 @@ static gsr_capture* create_capture_impl(args_parser &arg_parser, gsr_egl *egl, b _exit(1); } else { if(wayland) { - fprintf(stderr, "Error: GPU Screen Recorder window capture only works in a pure X11 session. Xwayland is not supported. You can record a monitor instead on wayland or use -w portal option which supports window capture if your wayland compositor supports window capture\n"); + fprintf(stderr, "gsr error: GPU Screen Recorder window capture only works in a pure X11 session. Xwayland is not supported. You can record a monitor instead on wayland or use -w portal option which supports window capture if your wayland compositor supports window capture\n"); _exit(2); } errno = 0; src_window_id = strtol(arg_parser.window, nullptr, 0); if(src_window_id == None || errno == EINVAL) { - fprintf(stderr, "Error: invalid window number %s\n", arg_parser.window); + fprintf(stderr, "gsr error: invalid window number %s\n", arg_parser.window); args_parser_print_usage(); _exit(1); } @@ -2402,13 +2406,13 @@ static std::vector<MergedAudioInputs> parse_audio_inputs(const AudioDevices &aud if(request_audio_input.name == "default_output") { if(audio_devices.default_output.empty()) { - fprintf(stderr, "Error: -a default_output was specified but no default audio output is specified in the audio server\n"); + fprintf(stderr, "gsr error: -a default_output was specified but no default audio output is specified in the audio server\n"); _exit(2); } match = true; } else if(request_audio_input.name == "default_input") { if(audio_devices.default_input.empty()) { - fprintf(stderr, "Error: -a default_input was specified but no default audio input is specified in the audio server\n"); + fprintf(stderr, "gsr error: -a default_input was specified but no default audio input is specified in the audio server\n"); _exit(2); } match = true; @@ -2419,7 +2423,7 @@ static std::vector<MergedAudioInputs> parse_audio_inputs(const AudioDevices &aud } if(!match) { - fprintf(stderr, "Error: Audio device '%s' is not a valid audio device, expected one of:\n", request_audio_input.name.c_str()); + fprintf(stderr, "gsr error: Audio device '%s' is not a valid audio device, expected one of:\n", request_audio_input.name.c_str()); if(!audio_devices.default_output.empty()) fprintf(stderr, " default_output (Default output)\n"); if(!audio_devices.default_input.empty()) @@ -2503,7 +2507,7 @@ static gsr_audio_codec select_audio_codec_with_fallback(gsr_audio_codec audio_co if(file_extension == "webm") { //audio_codec_to_use = "opus"; audio_codec = GSR_AUDIO_CODEC_OPUS; - fprintf(stderr, "Warning: .webm files only support opus audio codec, changing audio codec from aac to opus\n"); + fprintf(stderr, "gsr warning: .webm files only support opus audio codec, changing audio codec from aac to opus\n"); } break; } @@ -2512,7 +2516,7 @@ static gsr_audio_codec select_audio_codec_with_fallback(gsr_audio_codec audio_co if(file_extension != "mp4" && file_extension != "mkv" && file_extension != "webm") { //audio_codec_to_use = "aac"; audio_codec = GSR_AUDIO_CODEC_AAC; - fprintf(stderr, "Warning: opus audio codec is only supported by .mp4, .mkv and .webm files, falling back to aac instead\n"); + fprintf(stderr, "gsr warning: opus audio codec is only supported by .mp4, .mkv and .webm files, falling back to aac instead\n"); } break; } @@ -2521,16 +2525,16 @@ static gsr_audio_codec select_audio_codec_with_fallback(gsr_audio_codec audio_co if(file_extension == "webm") { //audio_codec_to_use = "opus"; audio_codec = GSR_AUDIO_CODEC_OPUS; - fprintf(stderr, "Warning: .webm files only support opus audio codec, changing audio codec from flac to opus\n"); + fprintf(stderr, "gsr warning: .webm files only support opus audio codec, changing audio codec from flac to opus\n"); } else if(file_extension != "mp4" && file_extension != "mkv") { //audio_codec_to_use = "aac"; audio_codec = GSR_AUDIO_CODEC_AAC; - fprintf(stderr, "Warning: flac audio codec is only supported by .mp4 and .mkv files, falling back to aac instead\n"); + fprintf(stderr, "gsr warning: flac audio codec is only supported by .mp4 and .mkv files, falling back to aac instead\n"); } else if(uses_amix) { // TODO: remove this? is it true anymore? //audio_codec_to_use = "opus"; audio_codec = GSR_AUDIO_CODEC_OPUS; - fprintf(stderr, "Warning: flac audio codec is not supported when mixing audio sources, falling back to opus instead\n"); + fprintf(stderr, "gsr warning: flac audio codec is not supported when mixing audio sources, falling back to opus instead\n"); } break; } @@ -2561,7 +2565,7 @@ static const AVCodec* pick_video_codec(gsr_video_codec *video_codec, gsr_egl *eg gsr_supported_video_codecs supported_video_codecs; if(!get_supported_video_codecs(egl, *video_codec, use_software_video_encoder, true, &supported_video_codecs)) { - fprintf(stderr, "Error: failed to query for supported video codecs\n"); + fprintf(stderr, "gsr error: failed to query for supported video codecs\n"); _exit(11); } @@ -2631,7 +2635,7 @@ static const AVCodec* pick_video_codec(gsr_video_codec *video_codec, gsr_egl *eg if(!video_codec_auto && !video_codec_f && !is_flv) { switch(*video_codec) { case GSR_VIDEO_CODEC_H264: { - fprintf(stderr, "Warning: selected video codec h264 is not supported, trying hevc instead\n"); + fprintf(stderr, "gsr warning: selected video codec h264 is not supported, trying hevc instead\n"); *video_codec = GSR_VIDEO_CODEC_HEVC; if(supported_video_codecs.hevc.supported) video_codec_f = get_ffmpeg_video_codec(*video_codec, egl->gpu_info.vendor); @@ -2640,7 +2644,7 @@ static const AVCodec* pick_video_codec(gsr_video_codec *video_codec, gsr_egl *eg case GSR_VIDEO_CODEC_HEVC: case GSR_VIDEO_CODEC_HEVC_HDR: case GSR_VIDEO_CODEC_HEVC_10BIT: { - fprintf(stderr, "Warning: selected video codec hevc is not supported, trying h264 instead\n"); + fprintf(stderr, "gsr warning: selected video codec hevc is not supported, trying h264 instead\n"); *video_codec = GSR_VIDEO_CODEC_H264; if(supported_video_codecs.h264.supported) video_codec_f = get_ffmpeg_video_codec(*video_codec, egl->gpu_info.vendor); @@ -2649,7 +2653,7 @@ static const AVCodec* pick_video_codec(gsr_video_codec *video_codec, gsr_egl *eg case GSR_VIDEO_CODEC_AV1: case GSR_VIDEO_CODEC_AV1_HDR: case GSR_VIDEO_CODEC_AV1_10BIT: { - fprintf(stderr, "Warning: selected video codec av1 is not supported, trying h264 instead\n"); + fprintf(stderr, "gsr warning: selected video codec av1 is not supported, trying h264 instead\n"); *video_codec = GSR_VIDEO_CODEC_H264; if(supported_video_codecs.h264.supported) video_codec_f = get_ffmpeg_video_codec(*video_codec, egl->gpu_info.vendor); @@ -2660,11 +2664,11 @@ static const AVCodec* pick_video_codec(gsr_video_codec *video_codec, gsr_egl *eg // TODO: Cant fallback to other codec because webm only supports vp8/vp9 break; case GSR_VIDEO_CODEC_H264_VULKAN: { - fprintf(stderr, "Warning: selected video codec h264_vulkan is not supported, trying h264 instead\n"); + fprintf(stderr, "gsr warning: selected video codec h264_vulkan is not supported, trying h264 instead\n"); *video_codec = GSR_VIDEO_CODEC_H264; // Need to do a query again because this time it's without vulkan if(!get_supported_video_codecs(egl, *video_codec, use_software_video_encoder, true, &supported_video_codecs)) { - fprintf(stderr, "Error: failed to query for supported video codecs\n"); + fprintf(stderr, "gsr error: failed to query for supported video codecs\n"); _exit(11); } if(supported_video_codecs.h264.supported) @@ -2672,11 +2676,11 @@ static const AVCodec* pick_video_codec(gsr_video_codec *video_codec, gsr_egl *eg break; } case GSR_VIDEO_CODEC_HEVC_VULKAN: { - fprintf(stderr, "Warning: selected video codec hevc_vulkan is not supported, trying hevc instead\n"); + fprintf(stderr, "gsr warning: selected video codec hevc_vulkan is not supported, trying hevc instead\n"); *video_codec = GSR_VIDEO_CODEC_HEVC; // Need to do a query again because this time it's without vulkan if(!get_supported_video_codecs(egl, *video_codec, use_software_video_encoder, true, &supported_video_codecs)) { - fprintf(stderr, "Error: failed to query for supported video codecs\n"); + fprintf(stderr, "gsr error: failed to query for supported video codecs\n"); _exit(11); } if(supported_video_codecs.hevc.supported) @@ -2688,7 +2692,7 @@ static const AVCodec* pick_video_codec(gsr_video_codec *video_codec, gsr_egl *eg if(!video_codec_f) { const char *video_codec_name = video_codec_to_string(*video_codec); - fprintf(stderr, "Error: your gpu does not support '%s' video codec. If you are sure that your gpu does support '%s' video encoding and you are using an AMD/Intel GPU,\n" + fprintf(stderr, "gsr error: your gpu does not support '%s' video codec. If you are sure that your gpu does support '%s' video encoding and you are using an AMD/Intel GPU,\n" " then make sure you have installed the GPU specific vaapi packages (intel-media-driver, libva-intel-driver, libva-mesa-driver and linux-firmware).\n" " It's also possible that your distro has disabled hardware accelerated video encoding for '%s' video codec.\n" " This may be the case on corporate distros such as Manjaro, Fedora or OpenSUSE.\n" @@ -2710,10 +2714,10 @@ static const AVCodec* select_video_codec_with_fallback(gsr_video_codec *video_co const bool video_codec_auto = *video_codec == (gsr_video_codec)GSR_VIDEO_CODEC_AUTO; if(video_codec_auto) { if(strcmp(file_extension, "webm") == 0) { - fprintf(stderr, "Info: using vp8 encoder because a codec was not specified and the file extension is .webm\n"); + fprintf(stderr, "gsr info: using vp8 encoder because a codec was not specified and the file extension is .webm\n"); *video_codec = GSR_VIDEO_CODEC_VP8; } else { - fprintf(stderr, "Info: using h264 encoder because a codec was not specified\n"); + fprintf(stderr, "gsr info: using h264 encoder because a codec was not specified\n"); *video_codec = GSR_VIDEO_CODEC_H264; } } @@ -2723,13 +2727,13 @@ static const AVCodec* select_video_codec_with_fallback(gsr_video_codec *video_co if(is_flv) { if(*video_codec != GSR_VIDEO_CODEC_H264) { *video_codec = GSR_VIDEO_CODEC_H264; - fprintf(stderr, "Warning: hevc/av1 is not compatible with flv, falling back to h264 instead.\n"); + fprintf(stderr, "gsr warning: hevc/av1 is not compatible with flv, falling back to h264 instead.\n"); } // if(audio_codec != GSR_AUDIO_CODEC_AAC) { // audio_codec_to_use = "aac"; // audio_codec = GSR_AUDIO_CODEC_AAC; - // fprintf(stderr, "Warning: flv only supports aac, falling back to aac instead.\n"); + // fprintf(stderr, "gsr warning: flv only supports aac, falling back to aac instead.\n"); // } } @@ -2737,18 +2741,18 @@ static const AVCodec* select_video_codec_with_fallback(gsr_video_codec *video_co if(is_hls) { if(video_codec_is_av1(*video_codec)) { *video_codec = GSR_VIDEO_CODEC_HEVC; - fprintf(stderr, "Warning: av1 is not compatible with hls (m3u8), falling back to hevc instead.\n"); + fprintf(stderr, "gsr warning: av1 is not compatible with hls (m3u8), falling back to hevc instead.\n"); } // if(audio_codec != GSR_AUDIO_CODEC_AAC) { // audio_codec_to_use = "aac"; // audio_codec = GSR_AUDIO_CODEC_AAC; - // fprintf(stderr, "Warning: hls (m3u8) only supports aac, falling back to aac instead.\n"); + // fprintf(stderr, "gsr warning: hls (m3u8) only supports aac, falling back to aac instead.\n"); // } } if(use_software_video_encoder && *video_codec != GSR_VIDEO_CODEC_H264) { - fprintf(stderr, "Error: \"-encoder cpu\" option is currently only available when using h264 codec option (-k)\n"); + fprintf(stderr, "gsr error: \"-encoder cpu\" option is currently only available when using h264 codec option (-k)\n"); args_parser_print_usage(); _exit(1); } @@ -2774,7 +2778,7 @@ static std::vector<AudioDeviceData> create_device_audio_inputs(const std::vector } else { const std::string description = "gsr-" + audio_input.name; if(sound_device_get_by_name(&audio_device.sound_device, audio_input.name.c_str(), description.c_str(), num_channels, audio_codec_context->frame_size, audio_codec_context_get_audio_format(audio_codec_context)) != 0) { - fprintf(stderr, "Error: failed to get \"%s\" audio device\n", audio_input.name.c_str()); + fprintf(stderr, "gsr error: failed to get \"%s\" audio device\n", audio_input.name.c_str()); _exit(1); } } @@ -2809,7 +2813,7 @@ static AudioDeviceData create_application_audio_audio_input(const MergedAudioInp combined_sink_name += ".monitor"; if(sound_device_get_by_name(&audio_device.sound_device, combined_sink_name.c_str(), "gpu-screen-recorder", num_channels, audio_codec_context->frame_size, audio_codec_context_get_audio_format(audio_codec_context)) != 0) { - fprintf(stderr, "Error: failed to setup audio recording to combined sink\n"); + fprintf(stderr, "gsr error: failed to setup audio recording to combined sink\n"); _exit(1); } @@ -2867,7 +2871,7 @@ static bool get_image_format_from_filename(const char *filename, gsr_image_forma static bool av_open_file_write_header(AVFormatContext *av_format_context, const char *filename) { int ret = avio_open(&av_format_context->pb, filename, AVIO_FLAG_WRITE); if(ret < 0) { - fprintf(stderr, "Error: Could not open '%s': %s\n", filename, av_error_to_string(ret)); + fprintf(stderr, "gsr error: Could not open '%s': %s\n", filename, av_error_to_string(ret)); return false; } @@ -2965,7 +2969,7 @@ int main(int argc, char **argv) { unsetenv("vblank_mode"); if(geteuid() == 0) { - fprintf(stderr, "Error: don't run gpu-screen-recorder as the root user\n"); + fprintf(stderr, "gsr error: don't run gpu-screen-recorder as the root user\n"); _exit(1); } @@ -3024,7 +3028,7 @@ int main(int argc, char **argv) { Display *dpy = XOpenDisplay(nullptr); if (!dpy) { wayland = true; - fprintf(stderr, "Warning: failed to connect to the X server. Assuming wayland is running without Xwayland\n"); + fprintf(stderr, "gsr warning: failed to connect to the X server. Assuming wayland is running without Xwayland\n"); } XSetErrorHandler(x11_error_handler); @@ -3037,18 +3041,18 @@ int main(int argc, char **argv) { // Disable prime-run and similar options as it doesn't work, the monitor to capture has to be run on the same device. // This is fine on wayland since nvidia uses drm interface there and the monitor query checks the monitors connected // to the drm device. - fprintf(stderr, "Warning: use of prime-run on X11 is not supported. Disabling prime-run\n"); + fprintf(stderr, "gsr warning: use of prime-run on X11 is not supported. Disabling prime-run\n"); disable_prime_run(); } gsr_window *window = gsr_window_create(dpy, wayland); if(!window) { - fprintf(stderr, "Error: failed to create window\n"); + fprintf(stderr, "gsr error: failed to create window\n"); _exit(1); } if(is_portal_capture && is_using_prime_run()) { - fprintf(stderr, "Warning: use of prime-run with -w portal option is currently not supported. Disabling prime-run\n"); + fprintf(stderr, "gsr warning: use of prime-run with -w portal option is currently not supported. Disabling prime-run\n"); disable_prime_run(); } @@ -3071,7 +3075,7 @@ int main(int argc, char **argv) { if(monitor_capture_use_drm(window, egl.gpu_info.vendor)) { // TODO: Allow specifying another card, and in other places if(!gsr_get_valid_card_path(&egl, egl.card_path, is_monitor_capture)) { - fprintf(stderr, "Error: no /dev/dri/cardX device found. Make sure that you have at least one monitor connected or record a single window instead on X11 or record with the -w portal option\n"); + fprintf(stderr, "gsr error: no /dev/dri/cardX device found. Make sure that you have at least one monitor connected or record a single window instead on X11 or record with the -w portal option\n"); _exit(2); } } @@ -3084,7 +3088,7 @@ int main(int argc, char **argv) { gsr_image_format image_format; if(get_image_format_from_filename(arg_parser.filename, &image_format)) { if(audio_input_arg->num_values > 0) { - fprintf(stderr, "Error: can't record audio (-a) when taking a screenshot\n"); + fprintf(stderr, "gsr error: can't record audio (-a) when taking a screenshot\n"); _exit(1); } @@ -3097,9 +3101,9 @@ int main(int argc, char **argv) { avformat_alloc_output_context2(&av_format_context, nullptr, arg_parser.container_format, arg_parser.filename); if (!av_format_context) { if(arg_parser.container_format) { - fprintf(stderr, "Error: Container format '%s' (argument -c) is not valid\n", arg_parser.container_format); + fprintf(stderr, "gsr error: Container format '%s' (argument -c) is not valid\n", arg_parser.container_format); } else { - fprintf(stderr, "Error: Failed to deduce container format from file extension. Use the '-c' option to specify container format\n"); + fprintf(stderr, "gsr error: Failed to deduce container format from file extension. Use the '-c' option to specify container format\n"); args_parser_print_usage(); _exit(1); } @@ -3128,7 +3132,7 @@ int main(int argc, char **argv) { // (Some?) livestreaming services require at least one audio track to work. // If not audio is provided then create one silent audio track. if(arg_parser.is_livestream && requested_audio_inputs.empty()) { - fprintf(stderr, "Info: live streaming but no audio track was added. Adding a silent audio track\n"); + fprintf(stderr, "gsr info: live streaming but no audio track was added. Adding a silent audio track\n"); MergedAudioInputs mai; mai.audio_inputs.push_back({""}); requested_audio_inputs.push_back(std::move(mai)); @@ -3147,7 +3151,7 @@ int main(int argc, char **argv) { AVFrame *video_frame = av_frame_alloc(); if(!video_frame) { - fprintf(stderr, "Error: Failed to allocate video frame\n"); + fprintf(stderr, "gsr error: Failed to allocate video frame\n"); _exit(1); } video_frame->format = video_codec_context->pix_fmt; @@ -3180,18 +3184,18 @@ int main(int argc, char **argv) { const size_t estimated_replay_buffer_packets = calculate_estimated_replay_buffer_packets(arg_parser.replay_buffer_size_secs, arg_parser.fps, arg_parser.audio_codec, requested_audio_inputs); gsr_encoder encoder; if(!gsr_encoder_init(&encoder, arg_parser.replay_storage, estimated_replay_buffer_packets, arg_parser.replay_buffer_size_secs, arg_parser.filename)) { - fprintf(stderr, "Error: failed to create encoder\n"); + fprintf(stderr, "gsr error: failed to create encoder\n"); _exit(1); } gsr_video_encoder *video_encoder = create_video_encoder(&egl, arg_parser); if(!video_encoder) { - fprintf(stderr, "Error: failed to create video encoder\n"); + fprintf(stderr, "gsr error: failed to create video encoder\n"); _exit(1); } if(!gsr_video_encoder_start(video_encoder, video_codec_context, video_frame)) { - fprintf(stderr, "Error: failed to start video encoder\n"); + fprintf(stderr, "gsr error: failed to start video encoder\n"); _exit(1); } @@ -3258,7 +3262,7 @@ int main(int argc, char **argv) { if(use_amix) { int err = init_filter_graph(audio_codec_context, &graph, &sink, src_filter_ctx, merged_audio_inputs.audio_inputs.size()); if(err < 0) { - fprintf(stderr, "Error: failed to create audio filter\n"); + fprintf(stderr, "gsr error: failed to create audio filter\n"); _exit(1); } } @@ -3308,7 +3312,7 @@ int main(int argc, char **argv) { int damage_fps_counter = 0; bool paused = false; - double paused_time_offset = 0.0; + std::atomic<double> paused_time_offset(0.0); double paused_time_start = 0.0; bool replay_recording = false; RecordingStartResult replay_recording_start_result; @@ -3323,7 +3327,7 @@ int main(int argc, char **argv) { const size_t audio_buffer_size = audio_max_frame_size * 4 * 2; // max 4 bytes/sample, 2 channels uint8_t *empty_audio = (uint8_t*)malloc(audio_buffer_size); if(!empty_audio) { - fprintf(stderr, "Error: failed to create empty audio\n"); + fprintf(stderr, "gsr error: failed to create empty audio\n"); _exit(1); } memset(empty_audio, 0, audio_buffer_size); @@ -3396,7 +3400,7 @@ int main(int argc, char **argv) { } // TODO: Is this |received_audio_time| really correct? - const int64_t num_expected_frames = std::round((this_audio_frame_time - record_start_time) / timeout_sec); + const int64_t num_expected_frames = std::floor((this_audio_frame_time - record_start_time) / timeout_sec); int64_t num_missing_frames = std::max((int64_t)0LL, num_expected_frames - num_received_frames); if(got_audio_data) @@ -3429,7 +3433,7 @@ int main(int argc, char **argv) { if(audio_track.graph) { // TODO: av_buffersrc_add_frame if(av_buffersrc_write_frame(audio_device.src_filter_ctx, audio_device.frame) < 0) { - fprintf(stderr, "Error: failed to add audio frame to filter\n"); + fprintf(stderr, "gsr error: failed to add audio frame to filter\n"); } } else { ret = avcodec_send_frame(audio_track.codec_context, audio_device.frame); @@ -3463,7 +3467,7 @@ int main(int argc, char **argv) { if(audio_track.graph) { // TODO: av_buffersrc_add_frame if(av_buffersrc_write_frame(audio_device.src_filter_ctx, audio_device.frame) < 0) { - fprintf(stderr, "Error: failed to add audio frame to filter\n"); + fprintf(stderr, "gsr error: failed to add audio frame to filter\n"); } } else { ret = avcodec_send_frame(audio_track.codec_context, audio_device.frame); @@ -3543,12 +3547,7 @@ int main(int argc, char **argv) { if(is_monitor_capture) gsr_damage_set_target_monitor(&damage, arg_parser.window); - double last_capture_seconds = record_start_time; - bool wait_until_frame_time_elapsed = false; - while(running) { - const double frame_start = clock_get_monotonic_seconds(); - while(gsr_window_process_event(window)) { gsr_damage_on_event(&damage, gsr_window_get_event_data(window)); gsr_capture_on_event(capture, &egl); @@ -3600,40 +3599,39 @@ int main(int argc, char **argv) { } const double this_video_frame_time = clock_get_monotonic_seconds() - paused_time_offset; - const double time_since_last_frame_captured_seconds = this_video_frame_time - last_capture_seconds; - double frame_time_overflow = time_since_last_frame_captured_seconds - target_fps; - const bool frame_timeout = frame_time_overflow >= 0.0; - - bool force_frame_capture = wait_until_frame_time_elapsed && frame_timeout; - bool allow_capture = !wait_until_frame_time_elapsed || force_frame_capture; - if(arg_parser.framerate_mode == GSR_FRAMERATE_MODE_CONTENT) { - force_frame_capture = false; - allow_capture = frame_timeout; - } - - bool frame_captured = false; - if((damaged || force_frame_capture) && allow_capture && !paused) { - frame_captured = true; - frame_time_overflow = std::min(std::max(0.0, frame_time_overflow), target_fps); - last_capture_seconds = this_video_frame_time - frame_time_overflow; - wait_until_frame_time_elapsed = false; + const int64_t expected_frames = std::floor((this_video_frame_time - record_start_time) / target_fps); + const int64_t num_missed_frames = expected_frames - video_pts_counter; + if(damaged && num_missed_frames >= 1 && !paused) { gsr_damage_clear(&damage); if(capture->clear_damage) capture->clear_damage(capture); // TODO: Dont do this if no damage? egl.glClear(0); + + bool capture_has_synchronous_task = false; + if(capture->capture_has_synchronous_task) { + capture_has_synchronous_task = capture->capture_has_synchronous_task(capture); + if(capture_has_synchronous_task) { + paused_time_start = clock_get_monotonic_seconds(); + paused = true; + } + } + gsr_capture_capture(capture, &capture_metadata, &color_conversion); + + if(capture_has_synchronous_task) { + paused_time_offset = paused_time_offset + (clock_get_monotonic_seconds() - paused_time_start); + paused = false; + } + gsr_egl_swap_buffers(&egl); gsr_video_encoder_copy_textures_to_frame(video_encoder, video_frame, &color_conversion); if(hdr && !hdr_metadata_set && !is_replaying && add_hdr_metadata_to_video_stream(capture, video_stream)) hdr_metadata_set = true; - const int64_t expected_frames = std::round((this_video_frame_time - record_start_time) / target_fps); - const int num_missed_frames = std::max((int64_t)1LL, expected_frames - video_pts_counter); - // TODO: Check if duplicate frame can be saved just by writing it with a different pts instead of sending it again const int num_frames_to_encode = arg_parser.framerate_mode == GSR_FRAMERATE_MODE_CONSTANT ? num_missed_frames : 1; for(int i = 0; i < num_frames_to_encode; ++i) { @@ -3656,7 +3654,7 @@ int main(int argc, char **argv) { // TODO: Move to separate thread because this could write to network (for example when livestreaming) gsr_encoder_receive_packets(&encoder, video_codec_context, video_frame->pts, VIDEO_STREAM_INDEX); } else { - fprintf(stderr, "Error: avcodec_send_frame failed, error: %s\n", av_error_to_string(ret)); + fprintf(stderr, "gsr error: avcodec_send_frame failed, error: %s\n", av_error_to_string(ret)); } if(force_iframe_frame) { @@ -3665,7 +3663,7 @@ int main(int argc, char **argv) { } } - video_pts_counter += num_frames_to_encode; + video_pts_counter += num_missed_frames; } if(toggle_pause == 1 && !is_replaying) { @@ -3674,7 +3672,7 @@ int main(int argc, char **argv) { paused_time_start = clock_get_monotonic_seconds(); fprintf(stderr, "Paused\n"); } else { - paused_time_offset += (clock_get_monotonic_seconds() - paused_time_start); + paused_time_offset = paused_time_offset + (clock_get_monotonic_seconds() - paused_time_start); fprintf(stderr, "Unpaused\n"); } @@ -3684,7 +3682,7 @@ int main(int argc, char **argv) { if(toggle_replay_recording && !arg_parser.replay_recording_directory) { toggle_replay_recording = 0; - printf("Error: Unable to start recording since the -ro option was not specified\n"); + printf("gsr error: Unable to start recording since the -ro option was not specified\n"); fflush(stdout); } @@ -3711,7 +3709,7 @@ int main(int argc, char **argv) { force_iframe_frame = true; fprintf(stderr, "Started recording\n"); } else { - printf("Error: Failed to start recording\n"); + printf("gsr error: Failed to start recording\n"); fflush(stdout); } } else if(replay_recording_start_result.av_format_context) { @@ -3727,7 +3725,7 @@ int main(int argc, char **argv) { if(arg_parser.recording_saved_script) run_recording_saved_script_async(arg_parser.recording_saved_script, replay_recording_filepath.c_str(), "regular"); } else { - printf("Error: Failed to save recording\n"); + printf("gsr error: Failed to save recording\n"); fflush(stdout); } @@ -3740,7 +3738,7 @@ int main(int argc, char **argv) { if(save_replay_thread.valid() && save_replay_thread.wait_for(std::chrono::seconds(0)) == std::future_status::ready) { save_replay_thread.get(); if(save_replay_output_filepath.empty()) { - printf("Error: Failed to save replay\n"); + printf("gsr error: Failed to save replay\n"); fflush(stdout); } else { puts(save_replay_output_filepath.c_str()); @@ -3763,29 +3761,22 @@ int main(int argc, char **argv) { gsr_replay_buffer_clear(encoder.replay_buffer); } - const double frame_end = clock_get_monotonic_seconds(); - const double time_at_frame_end = frame_end - paused_time_offset; + const double time_at_frame_end = clock_get_monotonic_seconds() - paused_time_offset; const double time_elapsed_total = time_at_frame_end - record_start_time; - const int64_t frames_elapsed = (int64_t)(time_elapsed_total / target_fps); + const int64_t frames_elapsed = std::floor(time_elapsed_total / target_fps); const double time_at_next_frame = (frames_elapsed + 1) * target_fps; double time_to_next_frame = time_at_next_frame - time_elapsed_total; - if(time_to_next_frame > target_fps*1.1) + if(time_to_next_frame > target_fps) time_to_next_frame = target_fps; + const int64_t end_num_missed_frames = frames_elapsed - video_pts_counter; - const double frame_time = frame_end - frame_start; - const bool frame_deadline_missed = frame_time > target_fps; - if(time_to_next_frame >= 0.0 && !frame_deadline_missed && frame_captured) + if(time_to_next_frame > 0.0 && end_num_missed_frames <= 0) av_usleep(time_to_next_frame * 1000.0 * 1000.0); else { if(paused) av_usleep(20.0 * 1000.0); // 20 milliseconds - else if(frame_deadline_missed) - {} - else if(arg_parser.framerate_mode == GSR_FRAMERATE_MODE_CONTENT || !frame_captured) + else if(arg_parser.framerate_mode == GSR_FRAMERATE_MODE_CONTENT) av_usleep(2.8 * 1000.0); // 2.8 milliseconds - else if(!frame_captured) - av_usleep(1.0 * 1000.0); // 1 milliseconds - wait_until_frame_time_elapsed = true; } } @@ -3816,7 +3807,7 @@ int main(int argc, char **argv) { if(arg_parser.recording_saved_script) run_recording_saved_script_async(arg_parser.recording_saved_script, replay_recording_filepath.c_str(), "regular"); } else { - printf("Error: Failed to save recording\n"); + printf("gsr error: Failed to save recording\n"); fflush(stdout); } } diff --git a/src/pipewire_audio.c b/src/pipewire_audio.c index 5b920b4..4ce07fb 100644 --- a/src/pipewire_audio.c +++ b/src/pipewire_audio.c @@ -4,6 +4,11 @@ #include <pipewire/extensions/metadata.h> #include <pipewire/impl-module.h> +typedef struct { + const gsr_pipewire_audio_port *output_port; + const gsr_pipewire_audio_port *input_port; +} gsr_pipewire_audio_desired_link; + static void on_core_info_cb(void *user_data, const struct pw_core_info *info) { gsr_pipewire_audio *self = user_data; //fprintf(stderr, "server name: %s\n", info->name); @@ -29,7 +34,7 @@ static const struct pw_core_events core_events = { }; static gsr_pipewire_audio_node* gsr_pipewire_audio_get_node_by_name_case_insensitive(gsr_pipewire_audio *self, const char *node_name, gsr_pipewire_audio_node_type node_type) { - for(int i = 0; i < self->num_stream_nodes; ++i) { + for(size_t i = 0; i < self->num_stream_nodes; ++i) { const gsr_pipewire_audio_node *node = &self->stream_nodes[i]; if(node->type == node_type && strcasecmp(node->name, node_name) == 0) return &self->stream_nodes[i]; @@ -38,7 +43,7 @@ static gsr_pipewire_audio_node* gsr_pipewire_audio_get_node_by_name_case_insensi } static gsr_pipewire_audio_port* gsr_pipewire_audio_get_node_port_by_name(gsr_pipewire_audio *self, uint32_t node_id, const char *port_name) { - for(int i = 0; i < self->num_ports; ++i) { + for(size_t i = 0; i < self->num_ports; ++i) { if(self->ports[i].node_id == node_id && strcmp(self->ports[i].name, port_name) == 0) return &self->ports[i]; } @@ -81,69 +86,68 @@ static void gsr_pipewire_get_node_input_port_by_type(gsr_pipewire_audio *self, c } } -static void gsr_pipewire_get_node_output_port_by_type(gsr_pipewire_audio *self, const gsr_pipewire_audio_node *output_node, gsr_pipewire_audio_node_type output_type, - const gsr_pipewire_audio_port **output_fl_port, const gsr_pipewire_audio_port **output_fr_port) +static bool string_starts_with(const char *str, const char *substr) { + const int len = strlen(str); + const int substr_len = strlen(substr); + return len >= substr_len && memcmp(str, substr, substr_len) == 0; +} + +static bool string_ends_with(const char *str, const char *substr) { + const int len = strlen(str); + const int substr_len = strlen(substr); + return len >= substr_len && memcmp(str + len - substr_len, substr, substr_len) == 0; +} + +/* Returns number of desired links */ +static size_t gsr_pipewire_get_node_output_ports(gsr_pipewire_audio *self, const gsr_pipewire_audio_node *output_node, + gsr_pipewire_audio_desired_link *desired_links, size_t desired_links_max_size, + const gsr_pipewire_audio_port *input_fl_port, const gsr_pipewire_audio_port *input_fr_port) { - *output_fl_port = NULL; - *output_fr_port = NULL; + size_t num_desired_links = 0; + for(size_t i = 0; i < self->num_ports && num_desired_links < desired_links_max_size; ++i) { + if(self->ports[i].node_id != output_node->id) + continue; - switch(output_type) { - case GSR_PIPEWIRE_AUDIO_NODE_TYPE_STREAM_OUTPUT: - *output_fl_port = gsr_pipewire_audio_get_node_port_by_name(self, output_node->id, "output_FL"); - *output_fr_port = gsr_pipewire_audio_get_node_port_by_name(self, output_node->id, "output_FR"); - break; - case GSR_PIPEWIRE_AUDIO_NODE_TYPE_STREAM_INPUT: - *output_fl_port = gsr_pipewire_audio_get_node_port_by_name(self, output_node->id, "monitor_FL"); - *output_fr_port = gsr_pipewire_audio_get_node_port_by_name(self, output_node->id, "monitor_FR"); - break; - case GSR_PIPEWIRE_AUDIO_NODE_TYPE_SINK_OR_SOURCE: { - *output_fl_port = gsr_pipewire_audio_get_node_port_by_name(self, output_node->id, "monitor_FL"); - *output_fr_port = gsr_pipewire_audio_get_node_port_by_name(self, output_node->id, "monitor_FR"); - if(!*output_fl_port || !*output_fr_port) { - *output_fl_port = gsr_pipewire_audio_get_node_port_by_name(self, output_node->id, "capture_FL"); - *output_fr_port = gsr_pipewire_audio_get_node_port_by_name(self, output_node->id, "capture_FR"); - } - if(!*output_fl_port || !*output_fr_port) { - const gsr_pipewire_audio_port *output_mono_port = gsr_pipewire_audio_get_node_port_by_name(self, output_node->id, "monitor_MONO"); - if(!output_mono_port) - output_mono_port = gsr_pipewire_audio_get_node_port_by_name(self, output_node->id, "capture_MONO"); - - if(output_mono_port) { - *output_fl_port = output_mono_port; - *output_fr_port = output_mono_port; - } - } - break; + if(string_starts_with(self->ports[i].name, "playback_")) + continue; + + if(string_ends_with(self->ports[i].name, "_MONO") || string_ends_with(self->ports[i].name, "_FC") || string_ends_with(self->ports[i].name, "_LFE")) { + if(num_desired_links + 2 >= desired_links_max_size) + break; + + desired_links[num_desired_links + 0] = (gsr_pipewire_audio_desired_link){ .output_port = &self->ports[i], .input_port = input_fl_port }; + desired_links[num_desired_links + 1] = (gsr_pipewire_audio_desired_link){ .output_port = &self->ports[i], .input_port = input_fr_port }; + num_desired_links += 2; + } else if(string_ends_with(self->ports[i].name, "_FL") || string_ends_with(self->ports[i].name, "_RL") || string_ends_with(self->ports[i].name, "_SL")) { + if(num_desired_links + 1 >= desired_links_max_size) + break; + + desired_links[num_desired_links] = (gsr_pipewire_audio_desired_link){ .output_port = &self->ports[i], .input_port = input_fl_port }; + num_desired_links += 1; + } else if(string_ends_with(self->ports[i].name, "_FR") || string_ends_with(self->ports[i].name, "_RR") || string_ends_with(self->ports[i].name, "_SR")) { + if(num_desired_links + 1 >= desired_links_max_size) + break; + + desired_links[num_desired_links] = (gsr_pipewire_audio_desired_link){ .output_port = &self->ports[i], .input_port = input_fr_port }; + num_desired_links += 1; } } + return num_desired_links; } -static void gsr_pipewire_audio_establish_link(gsr_pipewire_audio *self, const gsr_pipewire_audio_port *input_fl_port, const gsr_pipewire_audio_port *input_fr_port, - const gsr_pipewire_audio_port *output_fl_port, const gsr_pipewire_audio_port *output_fr_port) -{ - // TODO: Detect if link already exists before so we dont create these proxies when not needed +static void gsr_pipewire_audio_establish_link(gsr_pipewire_audio *self, const gsr_pipewire_audio_port *output_port, const gsr_pipewire_audio_port *input_port) { + // TODO: Detect if link already exists before so we dont create these proxies when not needed. + // We could do that by saving which nodes have been linked with which nodes after linking them. //fprintf(stderr, "linking!\n"); // TODO: error check and cleanup - { - struct pw_properties *props = pw_properties_new(NULL, NULL); - pw_properties_setf(props, PW_KEY_LINK_OUTPUT_PORT, "%u", output_fl_port->id); - pw_properties_setf(props, PW_KEY_LINK_INPUT_PORT, "%u", input_fl_port->id); - // TODO: Clean this up when removing node - struct pw_proxy *proxy = pw_core_create_object(self->core, "link-factory", PW_TYPE_INTERFACE_Link, PW_VERSION_LINK, &props->dict, 0); - //self->server_version_sync = pw_core_sync(self->core, PW_ID_CORE, self->server_version_sync); - pw_properties_free(props); - } - - { - struct pw_properties *props = pw_properties_new(NULL, NULL); - pw_properties_setf(props, PW_KEY_LINK_OUTPUT_PORT, "%u", output_fr_port->id); - pw_properties_setf(props, PW_KEY_LINK_INPUT_PORT, "%u", input_fr_port->id); - // TODO: Clean this up when removing node - struct pw_proxy *proxy = pw_core_create_object(self->core, "link-factory", PW_TYPE_INTERFACE_Link, PW_VERSION_LINK, &props->dict, 0); - //self->server_version_sync = pw_core_sync(self->core, PW_ID_CORE, self->server_version_sync); - pw_properties_free(props); - } + struct pw_properties *props = pw_properties_new(NULL, NULL); + pw_properties_setf(props, PW_KEY_LINK_OUTPUT_PORT, "%u", output_port->id); + pw_properties_setf(props, PW_KEY_LINK_INPUT_PORT, "%u", input_port->id); + // TODO: Clean this up when removing node + struct pw_proxy *proxy = pw_core_create_object(self->core, "link-factory", PW_TYPE_INTERFACE_Link, PW_VERSION_LINK, &props->dict, 0); + //self->server_version_sync = pw_core_sync(self->core, PW_ID_CORE, self->server_version_sync); + pw_properties_free(props); } static void gsr_pipewire_audio_create_link(gsr_pipewire_audio *self, const gsr_pipewire_audio_requested_link *requested_link) { @@ -158,7 +162,8 @@ static void gsr_pipewire_audio_create_link(gsr_pipewire_audio *self, const gsr_p if(!input_fl_port || !input_fr_port) return; - for(int i = 0; i < self->num_stream_nodes; ++i) { + gsr_pipewire_audio_desired_link desired_links[64]; + for(size_t i = 0; i < self->num_stream_nodes; ++i) { const gsr_pipewire_audio_node *output_node = &self->stream_nodes[i]; if(output_node->type != requested_link->output_type) continue; @@ -172,18 +177,15 @@ static void gsr_pipewire_audio_create_link(gsr_pipewire_audio *self, const gsr_p continue; } - const gsr_pipewire_audio_port *output_fl_port = NULL; - const gsr_pipewire_audio_port *output_fr_port = NULL; - gsr_pipewire_get_node_output_port_by_type(self, output_node, requested_link->output_type, &output_fl_port, &output_fr_port); - if(!output_fl_port || !output_fr_port) - continue; - - gsr_pipewire_audio_establish_link(self, input_fl_port, input_fr_port, output_fl_port, output_fr_port); + const size_t num_desired_links = gsr_pipewire_get_node_output_ports(self, output_node, desired_links, 64, input_fl_port, input_fr_port); + for(size_t j = 0; j < num_desired_links; ++j) { + gsr_pipewire_audio_establish_link(self, desired_links[j].output_port, desired_links[j].input_port); + } } } static void gsr_pipewire_audio_create_links(gsr_pipewire_audio *self) { - for(int i = 0; i < self->num_requested_links; ++i) { + for(size_t i = 0; i < self->num_requested_links; ++i) { gsr_pipewire_audio_create_link(self, &self->requested_links[i]); } } @@ -214,24 +216,21 @@ static void gsr_pipewire_audio_create_link_for_default_devices(gsr_pipewire_audi if(!stream_output_node) return; - const gsr_pipewire_audio_port *output_fl_port = NULL; - const gsr_pipewire_audio_port *output_fr_port = NULL; - gsr_pipewire_get_node_output_port_by_type(self, stream_output_node, requested_link->output_type, &output_fl_port, &output_fr_port); - if(!output_fl_port || !output_fr_port) - return; - - gsr_pipewire_audio_establish_link(self, input_fl_port, input_fr_port, output_fl_port, output_fr_port); - //fprintf(stderr, "establishing a link from %u to %u\n", stream_output_node->id, stream_input_node->id); + gsr_pipewire_audio_desired_link desired_links[64]; + const size_t num_desired_links = gsr_pipewire_get_node_output_ports(self, stream_output_node, desired_links, 64, input_fl_port, input_fr_port); + for(size_t i = 0; i < num_desired_links; ++i) { + gsr_pipewire_audio_establish_link(self, desired_links[i].output_port, desired_links[i].input_port); + } } static void gsr_pipewire_audio_create_links_for_default_devices(gsr_pipewire_audio *self, gsr_pipewire_audio_requested_type default_device_type) { - for(int i = 0; i < self->num_requested_links; ++i) { + for(size_t i = 0; i < self->num_requested_links; ++i) { gsr_pipewire_audio_create_link_for_default_devices(self, &self->requested_links[i], default_device_type); } } static void gsr_pipewire_audio_destroy_links_by_output_to_input(gsr_pipewire_audio *self, uint32_t output_node_id, uint32_t input_node_id) { - for(int i = 0; i < self->num_links; ++i) { + for(size_t i = 0; i < self->num_links; ++i) { if(self->links[i].output_node_id == output_node_id && self->links[i].input_node_id == input_node_id) pw_registry_destroy(self->registry, self->links[i].id); } @@ -271,7 +270,7 @@ static void gsr_pipewire_destroy_default_device_link(gsr_pipewire_audio *self, c } static void gsr_pipewire_destroy_default_device_links(gsr_pipewire_audio *self, gsr_pipewire_audio_requested_type default_device_type) { - for(int i = 0; i < self->num_requested_links; ++i) { + for(size_t i = 0; i < self->num_requested_links; ++i) { gsr_pipewire_destroy_default_device_link(self, &self->requested_links[i], default_device_type); } } @@ -370,6 +369,24 @@ static bool gsr_pipewire_audio_listen_on_metadata(gsr_pipewire_audio *self, uint return true; } +static bool array_ensure_capacity(void **array, size_t size, size_t *capacity_items, size_t element_size) { + if(size + 1 >= *capacity_items) { + size_t new_capacity_items = *capacity_items * 2; + if(new_capacity_items == 0) + new_capacity_items = 32; + + void *new_data = realloc(*array, new_capacity_items * element_size); + if(!new_data) { + fprintf(stderr, "gsr error: pipewire_audio: failed to reallocate memory\n"); + return false; + } + + *array = new_data; + *capacity_items = new_capacity_items; + } + return true; +} + static void registry_event_global(void *data, uint32_t id, uint32_t permissions, const char *type, uint32_t version, const struct spa_dict *props) @@ -389,11 +406,14 @@ static void registry_event_global(void *data, uint32_t id, uint32_t permissions, const bool is_stream_input = media_class && strcmp(media_class, "Stream/Input/Audio") == 0; const bool is_sink = media_class && strcmp(media_class, "Audio/Sink") == 0; const bool is_source = media_class && strcmp(media_class, "Audio/Source") == 0; - if(self->num_stream_nodes < GSR_PIPEWIRE_AUDIO_MAX_STREAM_NODES && node_name && (is_stream_output || is_stream_input || is_sink || is_source)) { + if(node_name && (is_stream_output || is_stream_input || is_sink || is_source)) { //const char *application_binary = spa_dict_lookup(props, PW_KEY_APP_PROCESS_BINARY); //const char *application_name = spa_dict_lookup(props, PW_KEY_APP_NAME); //fprintf(stderr, " node name: %s, app binary: %s, app name: %s\n", node_name, application_binary, application_name); + if(!array_ensure_capacity((void**)&self->stream_nodes, self->num_stream_nodes, &self->stream_nodes_capacity_items, sizeof(gsr_pipewire_audio_node))) + return; + char *node_name_copy = strdup(node_name); if(node_name_copy) { self->stream_nodes[self->num_stream_nodes].id = id; @@ -408,8 +428,6 @@ static void registry_event_global(void *data, uint32_t id, uint32_t permissions, gsr_pipewire_audio_create_links(self); } - } else if(self->num_stream_nodes >= GSR_PIPEWIRE_AUDIO_MAX_STREAM_NODES) { - fprintf(stderr, "gsr error: reached the maximum amount of audio stream nodes\n"); } } else if(strcmp(type, PW_TYPE_INTERFACE_Port) == 0) { const char *port_name = spa_dict_lookup(props, PW_KEY_PORT_NAME); @@ -424,7 +442,10 @@ static void registry_event_global(void *data, uint32_t id, uint32_t permissions, const char *node_id = spa_dict_lookup(props, PW_KEY_NODE_ID); const int node_id_num = node_id ? atoi(node_id) : 0; - if(self->num_ports < GSR_PIPEWIRE_AUDIO_MAX_PORTS && port_name && direction >= 0 && node_id_num > 0) { + if(port_name && direction >= 0 && node_id_num > 0) { + if(!array_ensure_capacity((void**)&self->ports, self->num_ports, &self->ports_capacity_items, sizeof(gsr_pipewire_audio_port))) + return; + //fprintf(stderr, " port name: %s, node id: %d, direction: %s\n", port_name, node_id_num, port_direction); char *port_name_copy = strdup(port_name); if(port_name_copy) { @@ -437,8 +458,6 @@ static void registry_event_global(void *data, uint32_t id, uint32_t permissions, gsr_pipewire_audio_create_links(self); } - } else if(self->num_ports >= GSR_PIPEWIRE_AUDIO_MAX_PORTS) { - fprintf(stderr, "gsr error: reached the maximum amount of audio ports\n"); } } else if(strcmp(type, PW_TYPE_INTERFACE_Link) == 0) { const char *output_node = spa_dict_lookup(props, PW_KEY_LINK_OUTPUT_NODE); @@ -446,14 +465,15 @@ static void registry_event_global(void *data, uint32_t id, uint32_t permissions, const uint32_t output_node_id_num = output_node ? atoi(output_node) : 0; const uint32_t input_node_id_num = input_node ? atoi(input_node) : 0; - if(self->num_links < GSR_PIPEWIRE_AUDIO_MAX_LINKS && output_node_id_num > 0 && input_node_id_num > 0) { + if(output_node_id_num > 0 && input_node_id_num > 0) { + if(!array_ensure_capacity((void**)&self->links, self->num_links, &self->links_capacity_items, sizeof(gsr_pipewire_audio_link))) + return; + //fprintf(stderr, " new link (%u): %u -> %u\n", id, output_node_id_num, input_node_id_num); self->links[self->num_links].id = id; self->links[self->num_links].output_node_id = output_node_id_num; self->links[self->num_links].input_node_id = input_node_id_num; ++self->num_links; - } else if(self->num_ports >= GSR_PIPEWIRE_AUDIO_MAX_LINKS) { - fprintf(stderr, "gsr error: reached the maximum amount of audio links\n"); } } else if(strcmp(type, PW_TYPE_INTERFACE_Metadata) == 0) { const char *name = spa_dict_lookup(props, PW_KEY_METADATA_NAME); @@ -463,7 +483,7 @@ static void registry_event_global(void *data, uint32_t id, uint32_t permissions, } static bool gsr_pipewire_audio_remove_node_by_id(gsr_pipewire_audio *self, uint32_t node_id) { - for(int i = 0; i < self->num_stream_nodes; ++i) { + for(size_t i = 0; i < self->num_stream_nodes; ++i) { if(self->stream_nodes[i].id != node_id) continue; @@ -476,7 +496,7 @@ static bool gsr_pipewire_audio_remove_node_by_id(gsr_pipewire_audio *self, uint3 } static bool gsr_pipewire_audio_remove_port_by_id(gsr_pipewire_audio *self, uint32_t port_id) { - for(int i = 0; i < self->num_ports; ++i) { + for(size_t i = 0; i < self->num_ports; ++i) { if(self->ports[i].id != port_id) continue; @@ -489,7 +509,7 @@ static bool gsr_pipewire_audio_remove_port_by_id(gsr_pipewire_audio *self, uint3 } static bool gsr_pipewire_audio_remove_link_by_id(gsr_pipewire_audio *self, uint32_t link_id) { - for(int i = 0; i < self->num_links; ++i) { + for(size_t i = 0; i < self->num_links; ++i) { if(self->links[i].id != link_id) continue; @@ -580,13 +600,19 @@ void gsr_pipewire_audio_deinit(gsr_pipewire_audio *self) { pw_thread_loop_stop(self->thread_loop); } - for(int i = 0; i < self->num_virtual_sink_proxies; ++i) { + for(size_t i = 0; i < self->num_virtual_sink_proxies; ++i) { if(self->virtual_sink_proxies[i]) { pw_proxy_destroy(self->virtual_sink_proxies[i]); self->virtual_sink_proxies[i] = NULL; } } self->num_virtual_sink_proxies = 0; + self->virtual_sink_proxies_capacity_items = 0; + + if(self->virtual_sink_proxies) { + free(self->virtual_sink_proxies); + self->virtual_sink_proxies = NULL; + } if(self->metadata_proxy) { spa_hook_remove(&self->metadata_listener); @@ -615,26 +641,50 @@ void gsr_pipewire_audio_deinit(gsr_pipewire_audio *self) { self->thread_loop = NULL; } - for(int i = 0; i < self->num_stream_nodes; ++i) { - free(self->stream_nodes[i].name); + if(self->stream_nodes) { + for(size_t i = 0; i < self->num_stream_nodes; ++i) { + free(self->stream_nodes[i].name); + } + self->num_stream_nodes = 0; + self->stream_nodes_capacity_items = 0; + + free(self->stream_nodes); + self->stream_nodes = NULL; } - self->num_stream_nodes = 0; - for(int i = 0; i < self->num_ports; ++i) { - free(self->ports[i].name); + if(self->ports) { + for(size_t i = 0; i < self->num_ports; ++i) { + free(self->ports[i].name); + } + self->num_ports = 0; + self->ports_capacity_items = 0; + + free(self->ports); + self->ports = NULL; } - self->num_ports = 0; - self->num_links = 0; + if(self->links) { + self->num_links = 0; + self->links_capacity_items = 0; - for(int i = 0; i < self->num_requested_links; ++i) { - for(int j = 0; j < self->requested_links[i].num_outputs; ++j) { - free(self->requested_links[i].outputs[j].name); + free(self->links); + self->links = NULL; + } + + if(self->requested_links) { + for(size_t i = 0; i < self->num_requested_links; ++i) { + for(int j = 0; j < self->requested_links[i].num_outputs; ++j) { + free(self->requested_links[i].outputs[j].name); + } + free(self->requested_links[i].outputs); + free(self->requested_links[i].input_name); } - free(self->requested_links[i].outputs); - free(self->requested_links[i].input_name); + self->num_requested_links = 0; + self->requested_links_capacity_items = 0; + + free(self->requested_links); + self->requested_links = NULL; } - self->num_requested_links = 0; #if PW_CHECK_VERSION(0, 3, 49) pw_deinit(); @@ -653,10 +703,8 @@ static struct pw_properties* gsr_pipewire_create_null_audio_sink(const char *nam } bool gsr_pipewire_audio_create_virtual_sink(gsr_pipewire_audio *self, const char *name) { - if(self->num_virtual_sink_proxies == GSR_PIPEWIRE_AUDIO_MAX_VIRTUAL_SINKS) { - fprintf(stderr, "gsr error: gsr_pipewire_audio_create_virtual_sink: reached max number of virtual sinks\n"); + if(!array_ensure_capacity((void**)&self->virtual_sink_proxies, self->num_virtual_sink_proxies, &self->virtual_sink_proxies_capacity_items, sizeof(struct pw_proxy*))) return false; - } pw_thread_loop_lock(self->thread_loop); @@ -701,10 +749,8 @@ static bool string_remove_suffix(char *str, const char *suffix) { } static bool gsr_pipewire_audio_add_links_to_output(gsr_pipewire_audio *self, const char **output_names, int num_output_names, const char *input_name, gsr_pipewire_audio_node_type output_type, gsr_pipewire_audio_link_input_type input_type, bool inverted) { - if(self->num_requested_links >= GSR_PIPEWIRE_AUDIO_MAX_REQUESTED_LINKS) { - fprintf(stderr, "gsr error: reached the maximum amount of audio links\n"); + if(!array_ensure_capacity((void**)&self->requested_links, self->num_requested_links, &self->requested_links_capacity_items, sizeof(gsr_pipewire_audio_requested_link))) return false; - } gsr_pipewire_audio_requested_output *outputs = calloc(num_output_names, sizeof(gsr_pipewire_audio_requested_output)); if(!outputs) @@ -781,7 +827,7 @@ bool gsr_pipewire_audio_add_link_from_sources_to_sink(gsr_pipewire_audio *self, void gsr_pipewire_audio_for_each_app(gsr_pipewire_audio *self, gsr_pipewire_audio_app_query_callback callback, void *userdata) { pw_thread_loop_lock(self->thread_loop); - for(int i = 0; i < self->num_stream_nodes; ++i) { + for(int i = 0; i < (int)self->num_stream_nodes; ++i) { const gsr_pipewire_audio_node *node = &self->stream_nodes[i]; if(node->type != GSR_PIPEWIRE_AUDIO_NODE_TYPE_STREAM_OUTPUT) continue; diff --git a/src/pipewire_video.c b/src/pipewire_video.c index bbc3f5d..277004c 100644 --- a/src/pipewire_video.c +++ b/src/pipewire_video.c @@ -6,7 +6,7 @@ #include <spa/param/video/format-utils.h> #include <spa/debug/types.h> -#include <libdrm/drm_fourcc.h> +#include <drm_fourcc.h> #include <fcntl.h> #include <unistd.h> @@ -280,13 +280,21 @@ static void on_param_changed_cb(void *user_data, uint32_t id, const struct spa_p self->negotiated = true; } -static void on_state_changed_cb(void *user_data, enum pw_stream_state old, enum pw_stream_state state, const char *error) { - (void)old; +static void on_state_changed_cb(void *user_data, enum pw_stream_state prev_state, enum pw_stream_state new_state, const char *error) { gsr_pipewire_video *self = user_data; - fprintf(stderr, "gsr info: pipewire: stream %p state: \"%s\" (error: %s)\n", - (void*)self->stream, pw_stream_state_as_string(state), + fprintf(stderr, "gsr info: pipewire: stream %p previous state: \"%s\", new state: \"%s\" (error: %s)\n", + (void*)self->stream, pw_stream_state_as_string(prev_state), pw_stream_state_as_string(new_state), error ? error : "none"); + + pthread_mutex_lock(&self->mutex); + if(new_state == PW_STREAM_STATE_PAUSED) { + self->paused_start_secs = clock_get_monotonic_seconds(); + self->paused = true; + } else { + self->paused = false; + } + pthread_mutex_unlock(&self->mutex); } static const struct pw_stream_events stream_events = { @@ -346,19 +354,19 @@ static int64_t spa_video_format_to_drm_format(const enum spa_video_format format switch(format) { case SPA_VIDEO_FORMAT_RGBx: return DRM_FORMAT_XBGR8888; case SPA_VIDEO_FORMAT_BGRx: return DRM_FORMAT_XRGB8888; - case SPA_VIDEO_FORMAT_RGBA: return DRM_FORMAT_ABGR8888; - case SPA_VIDEO_FORMAT_BGRA: return DRM_FORMAT_ARGB8888; + // case SPA_VIDEO_FORMAT_RGBA: return DRM_FORMAT_ABGR8888; + //case SPA_VIDEO_FORMAT_BGRA: return DRM_FORMAT_ARGB8888; case SPA_VIDEO_FORMAT_RGB: return DRM_FORMAT_XBGR8888; case SPA_VIDEO_FORMAT_BGR: return DRM_FORMAT_XRGB8888; - case SPA_VIDEO_FORMAT_ARGB: return DRM_FORMAT_XRGB8888; - case SPA_VIDEO_FORMAT_ABGR: return DRM_FORMAT_XRGB8888; + //case SPA_VIDEO_FORMAT_ARGB: return DRM_FORMAT_XRGB8888; + //case SPA_VIDEO_FORMAT_ABGR: return DRM_FORMAT_XRGB8888; #if PW_CHECK_VERSION(0, 3, 41) case SPA_VIDEO_FORMAT_xRGB_210LE: return DRM_FORMAT_XRGB2101010; case SPA_VIDEO_FORMAT_xBGR_210LE: return DRM_FORMAT_XBGR2101010; - case SPA_VIDEO_FORMAT_ARGB_210LE: return DRM_FORMAT_ARGB2101010; - case SPA_VIDEO_FORMAT_ABGR_210LE: return DRM_FORMAT_ABGR2101010; + // case SPA_VIDEO_FORMAT_ARGB_210LE: return DRM_FORMAT_ARGB2101010; + // case SPA_VIDEO_FORMAT_ABGR_210LE: return DRM_FORMAT_ABGR2101010; #endif - default: break; + default: break; } return DRM_FORMAT_INVALID; } @@ -366,23 +374,23 @@ static int64_t spa_video_format_to_drm_format(const enum spa_video_format format #if PW_CHECK_VERSION(0, 3, 41) #define GSR_PIPEWIRE_VIDEO_NUM_VIDEO_FORMATS GSR_PIPEWIRE_VIDEO_MAX_VIDEO_FORMATS #else -#define GSR_PIPEWIRE_VIDEO_NUM_VIDEO_FORMATS 8 +#define GSR_PIPEWIRE_VIDEO_NUM_VIDEO_FORMATS 4 #endif static const enum spa_video_format video_formats[GSR_PIPEWIRE_VIDEO_MAX_VIDEO_FORMATS] = { - SPA_VIDEO_FORMAT_BGRA, + // SPA_VIDEO_FORMAT_BGRA, SPA_VIDEO_FORMAT_BGRx, SPA_VIDEO_FORMAT_BGR, SPA_VIDEO_FORMAT_RGBx, - SPA_VIDEO_FORMAT_RGBA, + // SPA_VIDEO_FORMAT_RGBA, SPA_VIDEO_FORMAT_RGB, - SPA_VIDEO_FORMAT_ARGB, - SPA_VIDEO_FORMAT_ABGR, + // SPA_VIDEO_FORMAT_ARGB, + // SPA_VIDEO_FORMAT_ABGR, #if PW_CHECK_VERSION(0, 3, 41) SPA_VIDEO_FORMAT_xRGB_210LE, SPA_VIDEO_FORMAT_xBGR_210LE, - SPA_VIDEO_FORMAT_ARGB_210LE, - SPA_VIDEO_FORMAT_ABGR_210LE + // SPA_VIDEO_FORMAT_ARGB_210LE, + // SPA_VIDEO_FORMAT_ABGR_210LE #endif }; @@ -471,6 +479,27 @@ static void gsr_pipewire_video_init_modifiers(gsr_pipewire_video *self) { spa_video_format_get_modifiers(self, self->supported_video_formats[i].format, self->modifiers + self->num_modifiers, GSR_PIPEWIRE_VIDEO_MAX_MODIFIERS - self->num_modifiers, &num_modifiers); self->supported_video_formats[i].modifiers_index = self->num_modifiers; self->supported_video_formats[i].modifiers_size = num_modifiers; + self->num_modifiers += num_modifiers; + } +} + +static void gsr_pipewire_video_format_remove_modifier(gsr_pipewire_video *self, gsr_video_format *video_format, uint64_t modifier) { + for(size_t i = 0; i < video_format->modifiers_size; ++i) { + if(self->modifiers[video_format->modifiers_index + i] != modifier) + continue; + + for(size_t j = i + 1; j < video_format->modifiers_size; ++j) { + self->modifiers[j - 1] = self->modifiers[j]; + } + --video_format->modifiers_size; + return; + } +} + +static void gsr_pipewire_video_remove_modifier(gsr_pipewire_video *self, uint64_t modifier) { + for(size_t i = 0; i < GSR_PIPEWIRE_VIDEO_NUM_VIDEO_FORMATS; i++) { + gsr_video_format *video_format = &self->supported_video_formats[i]; + gsr_pipewire_video_format_remove_modifier(self, video_format, modifier); } } @@ -651,6 +680,7 @@ void gsr_pipewire_video_deinit(gsr_pipewire_video *self) { self->dmabuf_num_planes = 0; self->negotiated = false; + self->renegotiated = false; if(self->mutex_initialized) { pthread_mutex_destroy(&self->mutex); @@ -702,9 +732,19 @@ static EGLImage gsr_pipewire_video_create_egl_image_with_fallback(gsr_pipewire_v } else { image = gsr_pipewire_video_create_egl_image(self, fds, offsets, pitches, modifiers, true); if(!image) { - fprintf(stderr, "gsr error: gsr_pipewire_video_create_egl_image_with_fallback: failed to create egl image with modifiers, trying without modifiers\n"); - self->no_modifiers_fallback = true; - image = gsr_pipewire_video_create_egl_image(self, fds, offsets, pitches, modifiers, false); + if(self->renegotiated) { + fprintf(stderr, "gsr error: gsr_pipewire_video_create_egl_image_with_fallback: failed to create egl image with modifiers, trying without modifiers\n"); + self->no_modifiers_fallback = true; + image = gsr_pipewire_video_create_egl_image(self, fds, offsets, pitches, modifiers, false); + } else { + fprintf(stderr, "gsr error: gsr_pipewire_video_create_egl_image_with_fallback: failed to create egl image with modifiers, renegotiating with a different modifier\n"); + self->negotiated = false; + self->renegotiated = true; + gsr_pipewire_video_remove_modifier(self, self->format.info.raw.modifier); + pw_thread_loop_lock(self->thread_loop); + pw_loop_signal_event(pw_thread_loop_get_loop(self->thread_loop), self->reneg); + pw_thread_loop_unlock(self->thread_loop); + } } } return image; @@ -736,13 +776,9 @@ static void gsr_pipewire_video_update_cursor_texture(gsr_pipewire_video *self, g if(!self->cursor.data) return; - const float border_color[4] = { 0.0f, 0.0f, 0.0f, 0.0f }; self->egl->glBindTexture(GL_TEXTURE_2D, texture_map.cursor_texture_id); // TODO: glTextureSubImage2D if same size self->egl->glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, self->cursor.width, self->cursor.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, self->cursor.data); - self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER); - self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER); - self->egl->glTexParameterfv(GL_TEXTURE_2D, GL_TEXTURE_BORDER_COLOR, border_color); self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); self->egl->glBindTexture(GL_TEXTURE_2D, 0); @@ -767,12 +803,15 @@ bool gsr_pipewire_video_map_texture(gsr_pipewire_video *self, gsr_texture_map te } EGLImage image = gsr_pipewire_video_create_egl_image_with_fallback(self); - if(image) { - gsr_pipewire_video_bind_image_to_texture_with_fallback(self, texture_map, image); - *using_external_image = self->external_texture_fallback; - self->egl->eglDestroyImage(self->egl->egl_display, image); + if(!image) { + pthread_mutex_unlock(&self->mutex); + return false; } + gsr_pipewire_video_bind_image_to_texture_with_fallback(self, texture_map, image); + *using_external_image = self->external_texture_fallback; + self->egl->eglDestroyImage(self->egl->egl_display, image); + gsr_pipewire_video_update_cursor_texture(self, texture_map); region->x = 0; @@ -810,6 +849,9 @@ bool gsr_pipewire_video_map_texture(gsr_pipewire_video *self, gsr_texture_map te } bool gsr_pipewire_video_is_damaged(gsr_pipewire_video *self) { + if(!self->mutex_initialized) + return false; + bool damaged = false; pthread_mutex_lock(&self->mutex); damaged = self->damaged; @@ -818,7 +860,21 @@ bool gsr_pipewire_video_is_damaged(gsr_pipewire_video *self) { } void gsr_pipewire_video_clear_damage(gsr_pipewire_video *self) { + if(!self->mutex_initialized) + return; + pthread_mutex_lock(&self->mutex); self->damaged = false; pthread_mutex_unlock(&self->mutex); } + +bool gsr_pipewire_video_should_restart(gsr_pipewire_video *self) { + if(!self->mutex_initialized) + return false; + + bool should_restart = false; + pthread_mutex_lock(&self->mutex); + should_restart = self->paused && clock_get_monotonic_seconds() - self->paused_start_secs >= 3.0; + pthread_mutex_unlock(&self->mutex); + return should_restart; +} diff --git a/src/sound.cpp b/src/sound.cpp index 5a0ce77..d954609 100644 --- a/src/sound.cpp +++ b/src/sound.cpp @@ -154,7 +154,7 @@ static bool startup_get_default_devices(pa_handle *p, const char *device_name) { } if(p->default_output_device_name[0] == '\0') { - fprintf(stderr, "Error: failed to find default audio output device\n"); + fprintf(stderr, "gsr error: failed to find default audio output device\n"); return false; } @@ -197,7 +197,7 @@ static pa_handle* pa_sound_device_new(const char *server, const int buffer_size = attr->fragsize; void *buffer = malloc(buffer_size); if(!buffer) { - fprintf(stderr, "Error: failed to allocate buffer for audio\n"); + fprintf(stderr, "gsr error: failed to allocate buffer for audio\n"); *rerror = -1; return NULL; } @@ -426,7 +426,7 @@ int sound_device_get_by_name(SoundDevice *device, const char *device_name, const int error = 0; pa_handle *handle = pa_sound_device_new(nullptr, description, device_name, description, &ss, &buffer_attr, &error); if(!handle) { - fprintf(stderr, "Error: pa_sound_device_new() failed: %s. Audio input device %s might not be valid\n", pa_strerror(error), device_name); + fprintf(stderr, "gsr error: pa_sound_device_new() failed: %s. Audio input device %s might not be valid\n", pa_strerror(error), device_name); return -1; } diff --git a/src/utils.c b/src/utils.c index 9b4a4b6..c1d399a 100644 --- a/src/utils.c +++ b/src/utils.c @@ -19,6 +19,8 @@ #include <libavcodec/avcodec.h> #include <libavutil/hwcontext_vaapi.h> +#define DRM_NUM_BUF_ATTRS 4 + double clock_get_monotonic_seconds(void) { struct timespec ts; ts.tv_sec = 0; @@ -108,7 +110,7 @@ void for_each_active_monitor_output_x11_not_cached(Display *display, active_moni // but gpu screen recorder captures the drm framebuffer instead of x11 api. This drm framebuffer which doesn't increase in size when using xrandr scaling. // Maybe a better option would be to get the drm crtc size instead. const XRRModeInfo *mode_info = get_mode_info(screen_res, crt_info->mode); - if(mode_info && out_info->nameLen < (int)sizeof(display_name)) { + if(mode_info) { snprintf(display_name, sizeof(display_name), "%.*s", (int)out_info->nameLen, out_info->name); const gsr_monitor_rotation rotation = x11_rotation_to_gsr_rotation(crt_info->rotation); const vec2i monitor_size = get_monitor_size_rotated(mode_info->width, mode_info->height, rotation); @@ -150,21 +152,22 @@ int get_connector_type_by_name(const char *name) { return -1; } -drm_connector_type_count* drm_connector_types_get_index(drm_connector_type_count *type_counts, int *num_type_counts, int connector_type) { - for(int i = 0; i < *num_type_counts; ++i) { - if(type_counts[i].type == connector_type) - return &type_counts[i]; +int get_connector_type_id_by_name(const char *name) { + int len = strlen(name); + int num_start = 0; + for(int i = len - 1; i >= 0; --i) { + const bool is_num = name[i] >= '0' && name[i] <= '9'; + if(!is_num) { + num_start = i + 1; + break; + } } - if(*num_type_counts == CONNECTOR_TYPE_COUNTS) - return NULL; + const int num_len = len - num_start; + if(num_len <= 0) + return -1; - const int index = *num_type_counts; - type_counts[index].type = connector_type; - type_counts[index].count = 0; - type_counts[index].count_active = 0; - ++*num_type_counts; - return &type_counts[index]; + return atoi(name + num_start); } uint32_t monitor_identifier_from_type_and_count(int monitor_type_index, int monitor_type_count) { @@ -195,9 +198,6 @@ static void for_each_active_monitor_output_drm(const char *card_path, active_mon drmSetClientCap(fd, DRM_CLIENT_CAP_ATOMIC, 1); - drm_connector_type_count type_counts[CONNECTOR_TYPE_COUNTS]; - int num_type_counts = 0; - char display_name[256]; drmModeResPtr resources = drmModeGetResources(fd); if(resources) { @@ -206,35 +206,29 @@ static void for_each_active_monitor_output_drm(const char *card_path, active_mon if(!connector) continue; - drm_connector_type_count *connector_type = drm_connector_types_get_index(type_counts, &num_type_counts, connector->connector_type); - const char *connection_name = drmModeGetConnectorTypeName(connector->connector_type); - const int connection_name_len = strlen(connection_name); - if(connector_type) - ++connector_type->count; - if(connector->connection != DRM_MODE_CONNECTED) { drmModeFreeConnector(connector); continue; } - if(connector_type) - ++connector_type->count_active; - uint64_t crtc_id = 0; connector_get_property_by_name(fd, connector, "CRTC_ID", &crtc_id); drmModeCrtcPtr crtc = drmModeGetCrtc(fd, crtc_id); - if(connector_type && crtc_id > 0 && crtc && connection_name_len + 5 < (int)sizeof(display_name)) { - const int display_name_len = snprintf(display_name, sizeof(display_name), "%s-%d", connection_name, connector_type->count); + const char *connection_name = drmModeGetConnectorTypeName(connector->connector_type); + + if(connection_name && crtc_id > 0 && crtc) { const int connector_type_index_name = get_connector_type_by_name(display_name); - gsr_monitor monitor = { + const int display_name_len = snprintf(display_name, sizeof(display_name), "%s-%u", connection_name, connector->connector_type_id); + + const gsr_monitor monitor = { .name = display_name, .name_len = display_name_len, .pos = { .x = crtc->x, .y = crtc->y }, .size = { .x = (int)crtc->width, .y = (int)crtc->height }, .connector_id = connector->connector_id, .rotation = GSR_MONITOR_ROT_0, - .monitor_identifier = connector_type_index_name != -1 ? monitor_identifier_from_type_and_count(connector_type_index_name, connector_type->count_active) : 0 + .monitor_identifier = connector_type_index_name != -1 ? monitor_identifier_from_type_and_count(connector_type_index_name, connector->connector_type_id) : 0 }; callback(&monitor, userdata); } @@ -516,6 +510,41 @@ int create_directory_recursive(char *path) { } void setup_dma_buf_attrs(intptr_t *img_attr, uint32_t format, uint32_t width, uint32_t height, const int *fds, const uint32_t *offsets, const uint32_t *pitches, const uint64_t *modifiers, int num_planes, bool use_modifier) { + const uint32_t plane_fd_attrs[DRM_NUM_BUF_ATTRS] = { + EGL_DMA_BUF_PLANE0_FD_EXT, + EGL_DMA_BUF_PLANE1_FD_EXT, + EGL_DMA_BUF_PLANE2_FD_EXT, + EGL_DMA_BUF_PLANE3_FD_EXT + }; + + const uint32_t plane_offset_attrs[DRM_NUM_BUF_ATTRS] = { + EGL_DMA_BUF_PLANE0_OFFSET_EXT, + EGL_DMA_BUF_PLANE1_OFFSET_EXT, + EGL_DMA_BUF_PLANE2_OFFSET_EXT, + EGL_DMA_BUF_PLANE3_OFFSET_EXT + }; + + const uint32_t plane_pitch_attrs[DRM_NUM_BUF_ATTRS] = { + EGL_DMA_BUF_PLANE0_PITCH_EXT, + EGL_DMA_BUF_PLANE1_PITCH_EXT, + EGL_DMA_BUF_PLANE2_PITCH_EXT, + EGL_DMA_BUF_PLANE3_PITCH_EXT + }; + + const uint32_t plane_modifier_lo_attrs[DRM_NUM_BUF_ATTRS] = { + EGL_DMA_BUF_PLANE0_MODIFIER_LO_EXT, + EGL_DMA_BUF_PLANE1_MODIFIER_LO_EXT, + EGL_DMA_BUF_PLANE2_MODIFIER_LO_EXT, + EGL_DMA_BUF_PLANE3_MODIFIER_LO_EXT + }; + + const uint32_t plane_modifier_hi_attrs[DRM_NUM_BUF_ATTRS] = { + EGL_DMA_BUF_PLANE0_MODIFIER_HI_EXT, + EGL_DMA_BUF_PLANE1_MODIFIER_HI_EXT, + EGL_DMA_BUF_PLANE2_MODIFIER_HI_EXT, + EGL_DMA_BUF_PLANE3_MODIFIER_HI_EXT + }; + size_t img_attr_index = 0; img_attr[img_attr_index++] = EGL_LINUX_DRM_FOURCC_EXT; @@ -527,79 +556,23 @@ void setup_dma_buf_attrs(intptr_t *img_attr, uint32_t format, uint32_t width, ui img_attr[img_attr_index++] = EGL_HEIGHT; img_attr[img_attr_index++] = height; - if(num_planes >= 1) { - img_attr[img_attr_index++] = EGL_DMA_BUF_PLANE0_FD_EXT; - img_attr[img_attr_index++] = fds[0]; + assert(num_planes <= DRM_NUM_BUF_ATTRS); + for(int i = 0; i < num_planes; ++i) { + img_attr[img_attr_index++] = plane_fd_attrs[i]; + img_attr[img_attr_index++] = fds[i]; - img_attr[img_attr_index++] = EGL_DMA_BUF_PLANE0_OFFSET_EXT; - img_attr[img_attr_index++] = offsets[0]; + img_attr[img_attr_index++] = plane_offset_attrs[i]; + img_attr[img_attr_index++] = offsets[i]; - img_attr[img_attr_index++] = EGL_DMA_BUF_PLANE0_PITCH_EXT; - img_attr[img_attr_index++] = pitches[0]; + img_attr[img_attr_index++] = plane_pitch_attrs[i]; + img_attr[img_attr_index++] = pitches[i]; if(use_modifier) { - img_attr[img_attr_index++] = EGL_DMA_BUF_PLANE0_MODIFIER_LO_EXT; - img_attr[img_attr_index++] = modifiers[0] & 0xFFFFFFFFULL; + img_attr[img_attr_index++] = plane_modifier_lo_attrs[i]; + img_attr[img_attr_index++] = modifiers[i] & 0xFFFFFFFFULL; - img_attr[img_attr_index++] = EGL_DMA_BUF_PLANE0_MODIFIER_HI_EXT; - img_attr[img_attr_index++] = modifiers[0] >> 32ULL; - } - } - - if(num_planes >= 2) { - img_attr[img_attr_index++] = EGL_DMA_BUF_PLANE1_FD_EXT; - img_attr[img_attr_index++] = fds[1]; - - img_attr[img_attr_index++] = EGL_DMA_BUF_PLANE1_OFFSET_EXT; - img_attr[img_attr_index++] = offsets[1]; - - img_attr[img_attr_index++] = EGL_DMA_BUF_PLANE1_PITCH_EXT; - img_attr[img_attr_index++] = pitches[1]; - - if(use_modifier) { - img_attr[img_attr_index++] = EGL_DMA_BUF_PLANE1_MODIFIER_LO_EXT; - img_attr[img_attr_index++] = modifiers[1] & 0xFFFFFFFFULL; - - img_attr[img_attr_index++] = EGL_DMA_BUF_PLANE1_MODIFIER_HI_EXT; - img_attr[img_attr_index++] = modifiers[1] >> 32ULL; - } - } - - if(num_planes >= 3) { - img_attr[img_attr_index++] = EGL_DMA_BUF_PLANE2_FD_EXT; - img_attr[img_attr_index++] = fds[2]; - - img_attr[img_attr_index++] = EGL_DMA_BUF_PLANE2_OFFSET_EXT; - img_attr[img_attr_index++] = offsets[2]; - - img_attr[img_attr_index++] = EGL_DMA_BUF_PLANE2_PITCH_EXT; - img_attr[img_attr_index++] = pitches[2]; - - if(use_modifier) { - img_attr[img_attr_index++] = EGL_DMA_BUF_PLANE2_MODIFIER_LO_EXT; - img_attr[img_attr_index++] = modifiers[2] & 0xFFFFFFFFULL; - - img_attr[img_attr_index++] = EGL_DMA_BUF_PLANE2_MODIFIER_HI_EXT; - img_attr[img_attr_index++] = modifiers[2] >> 32ULL; - } - } - - if(num_planes >= 4) { - img_attr[img_attr_index++] = EGL_DMA_BUF_PLANE3_FD_EXT; - img_attr[img_attr_index++] = fds[3]; - - img_attr[img_attr_index++] = EGL_DMA_BUF_PLANE3_OFFSET_EXT; - img_attr[img_attr_index++] = offsets[3]; - - img_attr[img_attr_index++] = EGL_DMA_BUF_PLANE3_PITCH_EXT; - img_attr[img_attr_index++] = pitches[3]; - - if(use_modifier) { - img_attr[img_attr_index++] = EGL_DMA_BUF_PLANE3_MODIFIER_LO_EXT; - img_attr[img_attr_index++] = modifiers[3] & 0xFFFFFFFFULL; - - img_attr[img_attr_index++] = EGL_DMA_BUF_PLANE3_MODIFIER_HI_EXT; - img_attr[img_attr_index++] = modifiers[3] >> 32ULL; + img_attr[img_attr_index++] = plane_modifier_hi_attrs[i]; + img_attr[img_attr_index++] = modifiers[i] >> 32ULL; } } @@ -607,33 +580,6 @@ void setup_dma_buf_attrs(intptr_t *img_attr, uint32_t format, uint32_t width, ui assert(img_attr_index <= 44); } -static VADisplay video_codec_context_get_vaapi_display(AVCodecContext *video_codec_context) { - AVBufferRef *hw_frames_ctx = video_codec_context->hw_frames_ctx; - if(!hw_frames_ctx) - return NULL; - - AVHWFramesContext *hw_frame_context = (AVHWFramesContext*)hw_frames_ctx->data; - AVHWDeviceContext *device_context = (AVHWDeviceContext*)hw_frame_context->device_ctx; - if(device_context->type != AV_HWDEVICE_TYPE_VAAPI) - return NULL; - - AVVAAPIDeviceContext *vactx = device_context->hwctx; - return vactx->display; -} - -bool video_codec_context_is_vaapi(AVCodecContext *video_codec_context) { - if(!video_codec_context) - return false; - - AVBufferRef *hw_frames_ctx = video_codec_context->hw_frames_ctx; - if(!hw_frames_ctx) - return false; - - AVHWFramesContext *hw_frame_context = (AVHWFramesContext*)hw_frames_ctx->data; - AVHWDeviceContext *device_context = (AVHWDeviceContext*)hw_frame_context->device_ctx; - return device_context->type == AV_HWDEVICE_TYPE_VAAPI; -} - vec2i scale_keep_aspect_ratio(vec2i from, vec2i to) { if(from.x == 0 || from.y == 0) return (vec2i){0, 0}; @@ -652,22 +598,12 @@ vec2i scale_keep_aspect_ratio(vec2i from, vec2i to) { } unsigned int gl_create_texture(gsr_egl *egl, int width, int height, int internal_format, unsigned int format, int filter) { - float border_color[4] = { 0.0f, 0.0f, 0.0f, 1.0f }; - if(format == GL_RG) { // UV - border_color[0] = 0.5f; - border_color[1] = 0.5f; - border_color[2] = 0.0f; - border_color[3] = 1.0f; - } - unsigned int texture_id = 0; egl->glGenTextures(1, &texture_id); egl->glBindTexture(GL_TEXTURE_2D, texture_id); - egl->glTexImage2D(GL_TEXTURE_2D, 0, internal_format, width, height, 0, format, GL_UNSIGNED_BYTE, NULL); + //egl->glTexImage2D(GL_TEXTURE_2D, 0, internal_format, width, height, 0, format, GL_UNSIGNED_BYTE, NULL); + egl->glTexStorage2D(GL_TEXTURE_2D, 1, internal_format, width, height); - egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER); - egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER); - egl->glTexParameterfv(GL_TEXTURE_2D, GL_TEXTURE_BORDER_COLOR, border_color); egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, filter); egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, filter); diff --git a/src/window/wayland.c b/src/window/wayland.c index b7907be..037c85f 100644 --- a/src/window/wayland.c +++ b/src/window/wayland.c @@ -339,24 +339,13 @@ static gsr_monitor_rotation wayland_transform_to_gsr_rotation(int32_t rot) { static void gsr_window_wayland_for_each_active_monitor_output_cached(const gsr_window *window, active_monitor_callback callback, void *userdata) { const gsr_window_wayland *self = window->priv; - drm_connector_type_count type_counts[CONNECTOR_TYPE_COUNTS]; - int num_type_counts = 0; - for(int i = 0; i < self->num_outputs; ++i) { const gsr_wayland_output *output = &self->outputs[i]; if(!output->name) continue; const int connector_type_index = get_connector_type_by_name(output->name); - drm_connector_type_count *connector_type = NULL; - if(connector_type_index != -1) - connector_type = drm_connector_types_get_index(type_counts, &num_type_counts, connector_type_index); - - if(connector_type) { - ++connector_type->count; - ++connector_type->count_active; - } - + const int connector_type_id = get_connector_type_id_by_name(output->name); const gsr_monitor monitor = { .name = output->name, .name_len = strlen(output->name), @@ -364,7 +353,7 @@ static void gsr_window_wayland_for_each_active_monitor_output_cached(const gsr_w .size = { .x = output->size.x, .y = output->size.y }, .connector_id = 0, .rotation = wayland_transform_to_gsr_rotation(output->transform), - .monitor_identifier = connector_type ? monitor_identifier_from_type_and_count(connector_type_index, connector_type->count_active) : 0 + .monitor_identifier = (connector_type_index != -1 && connector_type_id != -1) ? monitor_identifier_from_type_and_count(connector_type_index, connector_type_id) : 0 }; callback(&monitor, userdata); } diff --git a/src/window_texture.c b/src/window_texture.c index 4846bdc..ba7212a 100644 --- a/src/window_texture.c +++ b/src/window_texture.c @@ -85,10 +85,6 @@ int window_texture_on_resize(WindowTexture *self) { texture_id = self->texture_id; } - const float border_color[4] = { 0.0f, 0.0f, 0.0f, 0.0f }; - self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER); - self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER); - self->egl->glTexParameterfv(GL_TEXTURE_2D, GL_TEXTURE_BORDER_COLOR, border_color); self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); self->egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); |