aboutsummaryrefslogtreecommitdiff
path: root/src/main.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/main.cpp')
-rw-r--r--src/main.cpp256
1 files changed, 166 insertions, 90 deletions
diff --git a/src/main.cpp b/src/main.cpp
index 1c5024b..1d479fe 100644
--- a/src/main.cpp
+++ b/src/main.cpp
@@ -101,7 +101,8 @@ enum class PixelFormat {
enum class FramerateMode {
CONSTANT,
- VARIABLE
+ VARIABLE,
+ CONTENT
};
static int x11_error_handler(Display*, XErrorEvent*) {
@@ -327,8 +328,8 @@ static AVCodecContext* create_audio_codec_context(int fps, AudioCodec audio_code
static AVCodecContext *create_video_codec_context(AVPixelFormat pix_fmt,
VideoQuality video_quality,
- int fps, const AVCodec *codec, bool is_livestream, gsr_gpu_vendor vendor, FramerateMode framerate_mode,
- bool hdr, gsr_color_range color_range) {
+ int fps, const AVCodec *codec, bool low_latency, gsr_gpu_vendor vendor, FramerateMode framerate_mode,
+ bool hdr, gsr_color_range color_range, float keyint) {
AVCodecContext *codec_context = avcodec_alloc_context3(codec);
@@ -346,15 +347,15 @@ static AVCodecContext *create_video_codec_context(AVPixelFormat pix_fmt,
codec_context->framerate.den = 1;
codec_context->sample_aspect_ratio.num = 0;
codec_context->sample_aspect_ratio.den = 0;
- // High values reduce file size but increases time it takes to seek
- if(is_livestream) {
+ if(low_latency) {
codec_context->flags |= (AV_CODEC_FLAG_CLOSED_GOP | AV_CODEC_FLAG_LOW_DELAY);
codec_context->flags2 |= AV_CODEC_FLAG2_FAST;
//codec_context->gop_size = std::numeric_limits<int>::max();
//codec_context->keyint_min = std::numeric_limits<int>::max();
- codec_context->gop_size = fps * 2;
+ codec_context->gop_size = fps * keyint;
} else {
- codec_context->gop_size = fps * 2;
+ // High values reduce file size but increases time it takes to seek
+ codec_context->gop_size = fps * keyint;
}
codec_context->max_b_frames = 0;
codec_context->pix_fmt = pix_fmt;
@@ -505,7 +506,7 @@ static bool vaapi_create_codec_context(AVCodecContext *video_codec_context, cons
static bool check_if_codec_valid_for_hardware(const AVCodec *codec, gsr_gpu_vendor vendor, const char *card_path) {
// Do not use AV_PIX_FMT_CUDA because we dont want to do full check with hardware context
- AVCodecContext *codec_context = create_video_codec_context(vendor == GSR_GPU_VENDOR_NVIDIA ? AV_PIX_FMT_YUV420P : AV_PIX_FMT_VAAPI, VideoQuality::VERY_HIGH, 60, codec, false, vendor, FramerateMode::CONSTANT, false, GSR_COLOR_RANGE_LIMITED);
+ AVCodecContext *codec_context = create_video_codec_context(vendor == GSR_GPU_VENDOR_NVIDIA ? AV_PIX_FMT_YUV420P : AV_PIX_FMT_VAAPI, VideoQuality::VERY_HIGH, 60, codec, false, vendor, FramerateMode::CONSTANT, false, GSR_COLOR_RANGE_LIMITED, 2);
if(!codec_context)
return false;
@@ -792,6 +793,7 @@ static void open_video(AVCodecContext *codec_context, VideoQuality video_quality
if(codec_context->codec_id == AV_CODEC_ID_H264) {
av_dict_set(&options, "profile", "high", 0);
+ // Removed because it causes stutter in games for some people
//av_dict_set_int(&options, "quality", 5, 0); // quality preset
} else if(codec_context->codec_id == AV_CODEC_ID_AV1) {
av_dict_set(&options, "profile", "main", 0); // TODO: use professional instead?
@@ -822,7 +824,7 @@ static void open_video(AVCodecContext *codec_context, VideoQuality video_quality
static void usage_header() {
const bool inside_flatpak = getenv("FLATPAK_ID") != NULL;
const char *program_name = inside_flatpak ? "flatpak run --command=gpu-screen-recorder com.dec05eba.gpu_screen_recorder" : "gpu-screen-recorder";
- fprintf(stderr, "usage: %s -w <window_id|monitor|focused> [-c <container_format>] [-s WxH] -f <fps> [-a <audio_input>] [-q <quality>] [-r <replay_buffer_size_sec>] [-k h264|hevc|hevc_hdr|av1|av1_hdr] [-ac aac|opus|flac] [-ab <bitrate>] [-oc yes|no] [-fm cfr|vfr] [-cr limited|full] [-v yes|no] [-h|--help] [-o <output_file>] [-mf yes|no] [-sc <script_path>] [-cursor yes|no]\n", program_name);
+ fprintf(stderr, "usage: %s -w <window_id|monitor|focused> [-c <container_format>] [-s WxH] -f <fps> [-a <audio_input>] [-q <quality>] [-r <replay_buffer_size_sec>] [-k h264|hevc|hevc_hdr|av1|av1_hdr] [-ac aac|opus|flac] [-ab <bitrate>] [-oc yes|no] [-fm cfr|vfr|content] [-cr limited|full] [-mf yes|no] [-sc <script_path>] [-cursor yes|no] [-keyint <value>] [-o <output_file>] [-v yes|no] [-h|--help]\n", program_name);
}
static void usage_full() {
@@ -843,11 +845,15 @@ static void usage_full() {
fprintf(stderr, "\n");
fprintf(stderr, " -s The size (area) to record at in the format WxH, for example 1920x1080. This option is only supported (and required) when -w is \"focused\".\n");
fprintf(stderr, "\n");
- fprintf(stderr, " -f Framerate to record at.\n");
+ fprintf(stderr, " -f Frame rate to record at. Recording will only capture frames at this target frame rate.\n");
+ fprintf(stderr, " For constant frame rate mode this option is the frame rate every frame will be captured at and if the capture frame rate is below this target frame rate then the frames will be duplicated.\n");
+ fprintf(stderr, " For variable frame rate mode this option is the max frame rate and if the capture frame rate is below this target frame rate then frames will not be duplicated.\n");
+ fprintf(stderr, " Content frame rate is similar to variable frame rate mode, except the frame rate will match the frame rate of the captured content when possible, but not capturing above the frame rate set in this -f option.\n");
fprintf(stderr, "\n");
fprintf(stderr, " -a Audio device to record from (pulse audio device). Can be specified multiple times. Each time this is specified a new audio track is added for the specified audio device.\n");
fprintf(stderr, " A name can be given to the audio input device by prefixing the audio input with <name>/, for example \"dummy/alsa_output.pci-0000_00_1b.0.analog-stereo.monitor\".\n");
fprintf(stderr, " Multiple audio devices can be merged into one audio track by using \"|\" as a separator into one -a argument, for example: -a \"alsa_output1|alsa_output2\".\n");
+ fprintf(stderr, " If the audio device is an empty string then the audio device is ignored.\n");
fprintf(stderr, " Optional, no audio track is added by default.\n");
fprintf(stderr, "\n");
fprintf(stderr, " -q Video quality. Should be either 'medium', 'high', 'very_high' or 'ultra'. 'high' is the recommended option when live streaming or when you have a slower harddrive.\n");
@@ -857,9 +863,8 @@ static void usage_full() {
fprintf(stderr, " and the video will only be saved when the gpu-screen-recorder is closed. This feature is similar to Nvidia's instant replay feature.\n");
fprintf(stderr, " This option has be between 5 and 1200. Note that the replay buffer size will not always be precise, because of keyframes. Optional, disabled by default.\n");
fprintf(stderr, "\n");
- fprintf(stderr, " -k Video codec to use. Should be either 'auto', 'h264', 'hevc', 'av1', 'hevc_hdr' or 'av1_hdr'. Defaults to 'auto' which defaults to 'hevc' on AMD/Nvidia and 'h264' on intel.\n");
+ fprintf(stderr, " -k Video codec to use. Should be either 'auto', 'h264', 'hevc', 'av1', 'hevc_hdr' or 'av1_hdr'. Defaults to 'auto' which defaults to 'h264'.\n");
fprintf(stderr, " Forcefully set to 'h264' if the file container type is 'flv'.\n");
- fprintf(stderr, " Forcefully set to 'hevc' on AMD/intel if video codec is 'h264' and if the file container type is 'mkv'.\n");
fprintf(stderr, " 'hevc_hdr' and 'av1_hdr' option is not available on X11.\n");
fprintf(stderr, " Note: hdr metadata is not included in the video when recording with 'hevc_hdr'/'av1_hdr' because of bugs in AMD, Intel and NVIDIA drivers (amazin', they are all bugged).\n");
fprintf(stderr, "\n");
@@ -874,17 +879,14 @@ static void usage_full() {
fprintf(stderr, " is dropped when you record a game. Only needed if you are recording a game that is bottlenecked by GPU. The same issue exists on Wayland but overclocking is not possible on Wayland.\n");
fprintf(stderr, " Works only if your have \"Coolbits\" set to \"12\" in NVIDIA X settings, see README for more information. Note! use at your own risk! Optional, disabled by default.\n");
fprintf(stderr, "\n");
- fprintf(stderr, " -fm Framerate mode. Should be either 'cfr' or 'vfr'. Defaults to 'vfr'.\n");
+ fprintf(stderr, " -fm Framerate mode. Should be either 'cfr' (constant frame rate), 'vfr' (variable frame rate) or 'content'. Defaults to 'vfr'.\n");
+ fprintf(stderr, " 'vfr' is recommended for recording for less issue with very high system load but some applications such as video editors may not support it properly.\n");
+ fprintf(stderr, " 'content' is currently only supported when recording a single window, on X11. The 'content' option matches the recording frame rate to the captured content.\n");
fprintf(stderr, "\n");
fprintf(stderr, " -cr Color range. Should be either 'limited' (aka mpeg) or 'full' (aka jpeg). Defaults to 'limited'.\n");
fprintf(stderr, " Limited color range means that colors are in range 16-235 (4112-60395 for hdr) while full color range means that colors are in range 0-255 (0-65535 for hdr).\n");
fprintf(stderr, " Note that some buggy video players (such as vlc) are unable to correctly display videos in full color range.\n");
fprintf(stderr, "\n");
- fprintf(stderr, " -v Prints per second, fps updates. Optional, set to 'yes' by default.\n");
- fprintf(stderr, "\n");
- fprintf(stderr, " -h, --help\n");
- fprintf(stderr, " Show this help.\n");
- fprintf(stderr, "\n");
fprintf(stderr, " -mf Organise replays in folders based on the current date.\n");
fprintf(stderr, "\n");
fprintf(stderr, " -sc Run a script on the saved video file (non-blocking). The first argument to the script is the filepath to the saved video file and the second argument is the recording type (either \"regular\" or \"replay\").\n");
@@ -892,6 +894,11 @@ static void usage_full() {
fprintf(stderr, "\n");
fprintf(stderr, " -cursor\n");
fprintf(stderr, " Record cursor. Defaults to 'yes'.\n");
+ fprintf(stderr, " -keyint\n");
+ fprintf(stderr, " Specifies the keyframe interval in seconds, the max amount of time to wait to generate a keyframe. Keyframes can be generated more often than this.\n");
+ fprintf(stderr, " This also affects seeking in the video and may affect how the replay video is cut. If this is set to 10 for example then you can only seek in 10-second chunks in the video.\n");
+ fprintf(stderr, " Setting this to a higher value reduces the video file size if you are ok with the previously described downside. This option is expected to be a floating point number.\n");
+ fprintf(stderr, " By default this value is set to 2.0.\n");
fprintf(stderr, "\n");
fprintf(stderr, " --list-supported-video-codecs\n");
fprintf(stderr, " List supported video codecs and exits. Prints h264, hevc, hevc_hdr, av1 and av1_hdr (if supported).\n");
@@ -901,6 +908,11 @@ static void usage_full() {
fprintf(stderr, " In replay mode this has to be a directory instead of a file.\n");
fprintf(stderr, " The directory to the file is created (recursively) if it doesn't already exist.\n");
fprintf(stderr, "\n");
+ fprintf(stderr, " -v Prints per second, fps updates. Optional, set to 'yes' by default.\n");
+ fprintf(stderr, "\n");
+ fprintf(stderr, " -h, --help\n");
+ fprintf(stderr, " Show this help.\n");
+ fprintf(stderr, "\n");
fprintf(stderr, "NOTES:\n");
fprintf(stderr, " Send signal SIGINT to gpu-screen-recorder (Ctrl+C, or killall -SIGINT gpu-screen-recorder) to stop and save the recording. When in replay mode this stops recording without saving.\n");
fprintf(stderr, " Send signal SIGUSR1 to gpu-screen-recorder (killall -SIGUSR1 gpu-screen-recorder) to save a replay (when in replay mode).\n");
@@ -1209,6 +1221,7 @@ static void save_replay_async(AVCodecContext *video_codec_context, int video_str
av_packet.pts = save_replay_packets[i]->data.pts;
av_packet.dts = save_replay_packets[i]->data.pts;
av_packet.flags = save_replay_packets[i]->data.flags;
+ //av_packet.duration = save_replay_packets[i]->data.duration;
AVStream *stream = video_stream;
AVCodecContext *codec_context = video_codec_context;
@@ -1285,6 +1298,14 @@ static bool is_livestream_path(const char *str) {
return true;
else if((len >= 7 && memcmp(str, "rtmp://", 7) == 0) || (len >= 8 && memcmp(str, "rtmps://", 8) == 0))
return true;
+ else if((len >= 7 && memcmp(str, "rtsp://", 7) == 0))
+ return true;
+ else if((len >= 6 && memcmp(str, "srt://", 6) == 0))
+ return true;
+ else if((len >= 6 && memcmp(str, "tcp://", 6) == 0))
+ return true;
+ else if((len >= 6 && memcmp(str, "udp://", 6) == 0))
+ return true;
else
return false;
}
@@ -1435,8 +1456,8 @@ static void list_supported_video_codecs() {
card_path[0] = '\0';
if(wayland || egl.gpu_info.vendor != GSR_GPU_VENDOR_NVIDIA) {
// TODO: Allow specifying another card, and in other places
- if(!gsr_get_valid_card_path(&egl, card_path)) {
- fprintf(stderr, "Error: no /dev/dri/cardX device found. If you are running GPU Screen Recorder with prime-run then try running without it\n");
+ if(!gsr_get_valid_card_path(&egl, card_path, false)) {
+ fprintf(stderr, "Error: no /dev/dri/cardX device found. If you are running GPU Screen Recorder with prime-run then try running without it. Also make sure that you have at least one connected monitor or record a single window instead on X11\n");
_exit(2);
}
}
@@ -1458,7 +1479,7 @@ static void list_supported_video_codecs() {
XCloseDisplay(dpy);
}
-static gsr_capture* create_capture_impl(const char *window_str, const char *screen_region, bool wayland, gsr_gpu_info gpu_inf, gsr_egl &egl, int fps, bool overclock, VideoCodec video_codec, gsr_color_range color_range, bool record_cursor) {
+static gsr_capture* create_capture_impl(const char *window_str, const char *screen_region, bool wayland, gsr_egl &egl, int fps, bool overclock, VideoCodec video_codec, gsr_color_range color_range, bool record_cursor, bool track_damage) {
vec2i region_size = { 0, 0 };
Window src_window_id = None;
bool follow_focused = false;
@@ -1497,6 +1518,7 @@ static gsr_capture* create_capture_impl(const char *window_str, const char *scre
window_str = first_output.output_name;
} else {
fprintf(stderr, "Error: no available output found\n");
+ _exit(1);
}
}
@@ -1528,7 +1550,6 @@ static gsr_capture* create_capture_impl(const char *window_str, const char *scre
gsr_capture_kms_cuda_params kms_params;
kms_params.egl = &egl;
kms_params.display_to_capture = window_str;
- kms_params.gpu_inf = gpu_inf;
kms_params.hdr = video_codec_is_hdr(video_codec);
kms_params.color_range = color_range;
kms_params.record_cursor = record_cursor;
@@ -1569,7 +1590,6 @@ static gsr_capture* create_capture_impl(const char *window_str, const char *scre
gsr_capture_kms_vaapi_params kms_params;
kms_params.egl = &egl;
kms_params.display_to_capture = window_str;
- kms_params.gpu_inf = gpu_inf;
kms_params.hdr = video_codec_is_hdr(video_codec);
kms_params.color_range = color_range;
kms_params.record_cursor = record_cursor;
@@ -1602,6 +1622,7 @@ static gsr_capture* create_capture_impl(const char *window_str, const char *scre
xcomposite_params.base.region_size = region_size;
xcomposite_params.base.color_range = color_range;
xcomposite_params.base.record_cursor = record_cursor;
+ xcomposite_params.base.track_damage = track_damage;
capture = gsr_capture_xcomposite_vaapi_create(&xcomposite_params);
if(!capture)
_exit(1);
@@ -1615,6 +1636,7 @@ static gsr_capture* create_capture_impl(const char *window_str, const char *scre
xcomposite_params.base.region_size = region_size;
xcomposite_params.base.color_range = color_range;
xcomposite_params.base.record_cursor = record_cursor;
+ xcomposite_params.base.track_damage = track_damage;
xcomposite_params.overclock = overclock;
capture = gsr_capture_xcomposite_cuda_create(&xcomposite_params);
if(!capture)
@@ -1653,6 +1675,10 @@ int main(int argc, char **argv) {
// nvidia doesn't support vaapi and nvidia-vaapi-driver doesn't support encoding yet.
// Let vaapi find the match vaapi driver instead of forcing a specific one.
unsetenv("LIBVA_DRIVER_NAME");
+ // Some people set this to force all applications to vsync on nvidia, but this makes eglSwapBuffers never return.
+ unsetenv("__GL_SYNC_TO_VBLANK");
+ // Same as above, but for amd/intel
+ unsetenv("vblank_mode");
if(argc <= 1)
usage_full();
@@ -1687,6 +1713,8 @@ int main(int argc, char **argv) {
{ "-sc", Arg { {}, true, false } },
{ "-cr", Arg { {}, true, false } },
{ "-cursor", Arg { {}, true, false } },
+ { "-gopm", Arg { {}, true, false } }, // deprecated, used keyint instead
+ { "-keyint", Arg { {}, true, false } },
};
for(int i = 1; i < argc; i += 2) {
@@ -1716,7 +1744,7 @@ int main(int argc, char **argv) {
}
}
- VideoCodec video_codec = VideoCodec::HEVC;
+ VideoCodec video_codec = VideoCodec::H264;
const char *video_codec_to_use = args["-k"].value();
if(!video_codec_to_use)
video_codec_to_use = "auto";
@@ -1767,6 +1795,33 @@ int main(int argc, char **argv) {
}
}
+ float keyint = 2.0;
+ const char *gopm_str = args["-gopm"].value();
+ const char *keyint_str = args["-keyint"].value();
+ if(keyint_str) {
+ if(sscanf(keyint_str, "%f", &keyint) != 1) {
+ fprintf(stderr, "Error: -keyint argument \"%s\" is not a floating point number\n", keyint_str);
+ usage();
+ }
+
+ if(keyint < 0) {
+ fprintf(stderr, "Error: -keyint is expected to be 0 or larger\n");
+ usage();
+ }
+ } else if(gopm_str) {
+ if(sscanf(gopm_str, "%f", &keyint) != 1) {
+ fprintf(stderr, "Error: -gopm argument \"%s\" is not a floating point number\n", gopm_str);
+ usage();
+ }
+
+ if(keyint < 0) {
+ fprintf(stderr, "Error: -gopm is expected to be 0 or larger\n");
+ usage();
+ }
+
+ fprintf(stderr, "Warning: -gopm argument is deprecated, use -keyint instead\n");
+ }
+
bool overclock = false;
const char *overclock_str = args["-oc"].value();
if(!overclock_str)
@@ -1903,11 +1958,11 @@ int main(int argc, char **argv) {
if(fps < 1)
fps = 1;
+ VideoQuality quality = VideoQuality::VERY_HIGH;
const char *quality_str = args["-q"].value();
if(!quality_str)
quality_str = "very_high";
- VideoQuality quality;
if(strcmp(quality_str, "medium") == 0) {
quality = VideoQuality::MEDIUM;
} else if(strcmp(quality_str, "high") == 0) {
@@ -1929,7 +1984,7 @@ int main(int argc, char **argv) {
fprintf(stderr, "Error: option -r has to be between 5 and 1200, was: %s\n", replay_buffer_size_secs_str);
_exit(1);
}
- replay_buffer_size_secs += 3; // Add a few seconds to account of lost packets because of non-keyframe packets skipped
+ replay_buffer_size_secs += std::ceil(keyint); // Add a few seconds to account of lost packets because of non-keyframe packets skipped
}
const char *window_str = strdup(args["-w"].value());
@@ -1977,8 +2032,8 @@ int main(int argc, char **argv) {
egl.card_path[0] = '\0';
if(wayland || egl.gpu_info.vendor != GSR_GPU_VENDOR_NVIDIA) {
// TODO: Allow specifying another card, and in other places
- if(!gsr_get_valid_card_path(&egl, egl.card_path)) {
- fprintf(stderr, "Error: no /dev/dri/cardX device found. If you are running GPU Screen Recorder with prime-run then try running without it\n");
+ if(!gsr_get_valid_card_path(&egl, egl.card_path, is_monitor_capture)) {
+ fprintf(stderr, "Error: no /dev/dri/cardX device found. If you are running GPU Screen Recorder with prime-run then try running without it. Also make sure that you have at least one connected monitor or record a single window instead on X11\n");
_exit(2);
}
}
@@ -1986,7 +2041,7 @@ int main(int argc, char **argv) {
// TODO: Fix constant framerate not working properly on amd/intel because capture framerate gets locked to the same framerate as
// game framerate, which doesn't work well when you need to encode multiple duplicate frames (AMD/Intel is slow at encoding!).
// It also appears to skip audio frames on nvidia wayland? why? that should be fine, but it causes video stuttering because of audio/video sync.
- FramerateMode framerate_mode;
+ FramerateMode framerate_mode = FramerateMode::VARIABLE;
const char *framerate_mode_str = args["-fm"].value();
if(!framerate_mode_str)
framerate_mode_str = "vfr";
@@ -1995,12 +2050,19 @@ int main(int argc, char **argv) {
framerate_mode = FramerateMode::CONSTANT;
} else if(strcmp(framerate_mode_str, "vfr") == 0) {
framerate_mode = FramerateMode::VARIABLE;
+ } else if(strcmp(framerate_mode_str, "content") == 0) {
+ framerate_mode = FramerateMode::CONTENT;
} else {
- fprintf(stderr, "Error: -fm should either be either 'cfr' or 'vfr', got: '%s'\n", framerate_mode_str);
+ fprintf(stderr, "Error: -fm should either be either 'cfr', 'vfr' or 'content', got: '%s'\n", framerate_mode_str);
usage();
}
- gsr_color_range color_range;
+ if(framerate_mode == FramerateMode::CONTENT && (wayland || is_monitor_capture)) {
+ fprintf(stderr, "Error: -fm 'content' is currently only supported on X11 and when capturing a single window.\n");
+ usage();
+ }
+
+ gsr_color_range color_range = GSR_COLOR_RANGE_LIMITED;
const char *color_range_str = args["-cr"].value();
if(!color_range_str)
color_range_str = "limited";
@@ -2068,14 +2130,18 @@ int main(int argc, char **argv) {
}
}
+ const bool is_output_piped = strcmp(filename, "/dev/stdout") == 0;
+
AVFormatContext *av_format_context;
// The output format is automatically guessed by the file extension
avformat_alloc_output_context2(&av_format_context, nullptr, container_format, filename);
if (!av_format_context) {
- if(container_format)
+ if(container_format) {
fprintf(stderr, "Error: Container format '%s' (argument -c) is not valid\n", container_format);
- else
- fprintf(stderr, "Error: Failed to deduce container format from file extension\n");
+ } else {
+ fprintf(stderr, "Error: Failed to deduce container format from file extension. Use the '-c' option to specify container format\n");
+ usage();
+ }
_exit(1);
}
@@ -2088,30 +2154,33 @@ int main(int argc, char **argv) {
file_extension = file_extension.substr(0, comma_index);
}
- const bool force_no_audio_offset = file_extension == "ts" || file_extension == "flv";
-
- if(egl.gpu_info.vendor != GSR_GPU_VENDOR_NVIDIA && file_extension == "mkv" && strcmp(video_codec_to_use, "h264") == 0) {
- video_codec_to_use = "hevc";
- video_codec = VideoCodec::HEVC;
- fprintf(stderr, "Warning: video codec was forcefully set to hevc because mkv container is used and mesa (AMD and Intel driver) does not support h264 in mkv files\n");
- }
+ const bool force_no_audio_offset = is_livestream || is_output_piped || (file_extension != "mp4" && file_extension != "mkv" && file_extension != "webm");
switch(audio_codec) {
case AudioCodec::AAC: {
+ if(file_extension == "webm") {
+ audio_codec_to_use = "opus";
+ audio_codec = AudioCodec::OPUS;
+ fprintf(stderr, "Warning: .webm files only support opus audio codec, changing audio codec from aac to opus\n");
+ }
break;
}
case AudioCodec::OPUS: {
// TODO: Also check mpegts?
- if(file_extension != "mp4" && file_extension != "mkv") {
+ if(file_extension != "mp4" && file_extension != "mkv" && file_extension != "webm") {
audio_codec_to_use = "aac";
audio_codec = AudioCodec::AAC;
- fprintf(stderr, "Warning: opus audio codec is only supported by .mp4 and .mkv files, falling back to aac instead\n");
+ fprintf(stderr, "Warning: opus audio codec is only supported by .mp4, .mkv and .webm files, falling back to aac instead\n");
}
break;
}
case AudioCodec::FLAC: {
// TODO: Also check mpegts?
- if(file_extension != "mp4" && file_extension != "mkv") {
+ if(file_extension == "webm") {
+ audio_codec_to_use = "opus";
+ audio_codec = AudioCodec::OPUS;
+ fprintf(stderr, "Warning: .webm files only support opus audio codec, changing audio codec from flac to opus\n");
+ } else if(file_extension != "mp4" && file_extension != "mkv") {
audio_codec_to_use = "aac";
audio_codec = AudioCodec::AAC;
fprintf(stderr, "Warning: flac audio codec is only supported by .mp4 and .mkv files, falling back to aac instead\n");
@@ -2129,47 +2198,47 @@ int main(int argc, char **argv) {
const bool video_codec_auto = strcmp(video_codec_to_use, "auto") == 0;
if(video_codec_auto) {
- if(egl.gpu_info.vendor == GSR_GPU_VENDOR_INTEL) {
- const AVCodec *h264_codec = find_h264_encoder(egl.gpu_info.vendor, egl.card_path);
- if(!h264_codec) {
- fprintf(stderr, "Info: using hevc encoder because a codec was not specified and your gpu does not support h264\n");
- video_codec_to_use = "hevc";
- video_codec = VideoCodec::HEVC;
- } else {
- fprintf(stderr, "Info: using h264 encoder because a codec was not specified\n");
- video_codec_to_use = "h264";
- video_codec = VideoCodec::H264;
- }
+ const AVCodec *h264_codec = find_h264_encoder(egl.gpu_info.vendor, egl.card_path);
+ if(!h264_codec) {
+ fprintf(stderr, "Info: using hevc encoder because a codec was not specified and your gpu does not support h264\n");
+ video_codec_to_use = "hevc";
+ video_codec = VideoCodec::HEVC;
} else {
- const AVCodec *hevc_codec = find_hevc_encoder(egl.gpu_info.vendor, egl.card_path);
-
- if(hevc_codec && fps > 60) {
- fprintf(stderr, "Warning: recording at higher fps than 60 with hevc might result in recording at a very low fps. If this happens, switch to h264 or av1\n");
- }
-
- // TODO: Default to h264 if resolution is around 1366x768 on AMD
-
- // hevc generally allows recording at a higher resolution than h264 on nvidia cards. On a gtx 1080 4k is the max resolution for h264 but for hevc it's 8k.
- // Another important info is that when recording at a higher fps than.. 60? hevc has very bad performance. For example when recording at 144 fps the fps drops to 1
- // while with h264 the fps doesn't drop.
- if(!hevc_codec) {
- fprintf(stderr, "Info: using h264 encoder because a codec was not specified and your gpu does not support hevc\n");
- video_codec_to_use = "h264";
- video_codec = VideoCodec::H264;
- } else {
- fprintf(stderr, "Info: using hevc encoder because a codec was not specified\n");
- video_codec_to_use = "hevc";
- video_codec = VideoCodec::HEVC;
- }
+ fprintf(stderr, "Info: using h264 encoder because a codec was not specified\n");
+ video_codec_to_use = "h264";
+ video_codec = VideoCodec::H264;
}
}
// TODO: Allow hevc, vp9 and av1 in (enhanced) flv (supported since ffmpeg 6.1)
const bool is_flv = strcmp(file_extension.c_str(), "flv") == 0;
- if(video_codec != VideoCodec::H264 && is_flv) {
- video_codec_to_use = "h264";
- video_codec = VideoCodec::H264;
- fprintf(stderr, "Warning: hevc/av1 is not compatible with flv, falling back to h264 instead.\n");
+ if(is_flv) {
+ if(video_codec != VideoCodec::H264) {
+ video_codec_to_use = "h264";
+ video_codec = VideoCodec::H264;
+ fprintf(stderr, "Warning: hevc/av1 is not compatible with flv, falling back to h264 instead.\n");
+ }
+
+ if(audio_codec != AudioCodec::AAC) {
+ audio_codec_to_use = "aac";
+ audio_codec = AudioCodec::AAC;
+ fprintf(stderr, "Warning: flv only supports aac, falling back to aac instead.\n");
+ }
+ }
+
+ const bool is_hls = strcmp(file_extension.c_str(), "m3u8") == 0;
+ if(is_hls) {
+ if(video_codec == VideoCodec::AV1 || video_codec == VideoCodec::AV1_HDR) {
+ video_codec_to_use = "hevc";
+ video_codec = VideoCodec::HEVC;
+ fprintf(stderr, "Warning: av1 is not compatible with hls (m3u8), falling back to hevc instead.\n");
+ }
+
+ if(audio_codec != AudioCodec::AAC) {
+ audio_codec_to_use = "aac";
+ audio_codec = AudioCodec::AAC;
+ fprintf(stderr, "Warning: hls (m3u8) only supports aac, falling back to aac instead.\n");
+ }
}
const AVCodec *video_codec_f = nullptr;
@@ -2246,7 +2315,7 @@ int main(int argc, char **argv) {
_exit(2);
}
- gsr_capture *capture = create_capture_impl(window_str, screen_region, wayland, egl.gpu_info, egl, fps, overclock, video_codec, color_range, record_cursor);
+ gsr_capture *capture = create_capture_impl(window_str, screen_region, wayland, egl, fps, overclock, video_codec, color_range, record_cursor, framerate_mode == FramerateMode::CONTENT);
// (Some?) livestreaming services require at least one audio track to work.
// If not audio is provided then create one silent audio track.
@@ -2257,12 +2326,6 @@ int main(int argc, char **argv) {
requested_audio_inputs.push_back(std::move(mai));
}
- if(is_livestream && framerate_mode != FramerateMode::CONSTANT) {
- fprintf(stderr, "Info: framerate mode was forcefully set to \"cfr\" because live streaming was detected\n");
- framerate_mode = FramerateMode::CONSTANT;
- framerate_mode_str = "cfr";
- }
-
if(is_livestream && recording_saved_script) {
fprintf(stderr, "Warning: live stream detected, -sc script is ignored\n");
recording_saved_script = nullptr;
@@ -2271,8 +2334,9 @@ int main(int argc, char **argv) {
AVStream *video_stream = nullptr;
std::vector<AudioTrack> audio_tracks;
const bool hdr = video_codec_is_hdr(video_codec);
+ const bool low_latency_recording = is_livestream || is_output_piped;
- AVCodecContext *video_codec_context = create_video_codec_context(egl.gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA ? AV_PIX_FMT_CUDA : AV_PIX_FMT_VAAPI, quality, fps, video_codec_f, is_livestream, egl.gpu_info.vendor, framerate_mode, hdr, color_range);
+ AVCodecContext *video_codec_context = create_video_codec_context(egl.gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA ? AV_PIX_FMT_CUDA : AV_PIX_FMT_VAAPI, quality, fps, video_codec_f, low_latency_recording, egl.gpu_info.vendor, framerate_mode, hdr, color_range, keyint);
if(replay_buffer_size_secs == -1)
video_stream = create_stream(av_format_context, video_codec_context);
@@ -2409,6 +2473,7 @@ int main(int argc, char **argv) {
double fps_start_time = clock_get_monotonic_seconds();
double frame_timer_start = fps_start_time - target_fps; // We want to capture the first frame immediately
int fps_counter = 0;
+ int damage_fps_counter = 0;
bool paused = false;
double paused_time_offset = 0.0;
@@ -2445,6 +2510,9 @@ int main(int argc, char **argv) {
#if LIBAVUTIL_VERSION_MAJOR <= 56
av_opt_set_channel_layout(swr, "in_channel_layout", AV_CH_LAYOUT_STEREO, 0);
av_opt_set_channel_layout(swr, "out_channel_layout", AV_CH_LAYOUT_STEREO, 0);
+ #elif LIBAVUTIL_VERSION_MAJOR >= 59
+ av_opt_set_chlayout(swr, "in_chlayout", &audio_track.codec_context->ch_layout, 0);
+ av_opt_set_chlayout(swr, "out_chlayout", &audio_track.codec_context->ch_layout, 0);
#else
av_opt_set_chlayout(swr, "in_channel_layout", &audio_track.codec_context->ch_layout, 0);
av_opt_set_chlayout(swr, "out_channel_layout", &audio_track.codec_context->ch_layout, 0);
@@ -2600,7 +2668,6 @@ int main(int argc, char **argv) {
running = 0;
break;
}
- ++fps_counter;
// TODO: Move to another thread, since this shouldn't be locked to video encoding fps
{
@@ -2625,19 +2692,28 @@ int main(int argc, char **argv) {
}
}
+ const bool damaged = !capture->is_damaged || capture->is_damaged(capture);
+ if(damaged) {
+ ++damage_fps_counter;
+ }
+
+ ++fps_counter;
double time_now = clock_get_monotonic_seconds();
double frame_timer_elapsed = time_now - frame_timer_start;
double elapsed = time_now - fps_start_time;
if (elapsed >= 1.0) {
if(verbose) {
- fprintf(stderr, "update fps: %d\n", fps_counter);
+ fprintf(stderr, "update fps: %d, damage fps: %d\n", fps_counter, damage_fps_counter);
}
fps_start_time = time_now;
fps_counter = 0;
+ damage_fps_counter = 0;
}
double frame_time_overflow = frame_timer_elapsed - target_fps;
- if (frame_time_overflow >= 0.0) {
+ if (frame_time_overflow >= 0.0 && damaged) {
+ if(capture->clear_damage)
+ capture->clear_damage(capture);
frame_time_overflow = std::min(frame_time_overflow, target_fps);
frame_timer_start = time_now - frame_time_overflow;