aboutsummaryrefslogtreecommitdiff
path: root/src/main.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/main.cpp')
-rw-r--r--src/main.cpp135
1 files changed, 89 insertions, 46 deletions
diff --git a/src/main.cpp b/src/main.cpp
index 7687b07..c6b0e8d 100644
--- a/src/main.cpp
+++ b/src/main.cpp
@@ -143,6 +143,17 @@ static bool video_codec_is_hdr(VideoCodec video_codec) {
}
}
+static VideoCodec hdr_video_codec_to_sdr_video_codec(VideoCodec video_codec) {
+ switch(video_codec) {
+ case VideoCodec::HEVC_HDR:
+ return VideoCodec::HEVC;
+ case VideoCodec::AV1_HDR:
+ return VideoCodec::AV1;
+ default:
+ return video_codec;
+ }
+}
+
static gsr_color_depth video_codec_to_bit_depth(VideoCodec video_codec) {
switch(video_codec) {
case VideoCodec::HEVC_HDR:
@@ -276,7 +287,8 @@ static AVCodecID audio_codec_get_id(AudioCodec audio_codec) {
return AV_CODEC_ID_AAC;
}
-static AVSampleFormat audio_codec_get_sample_format(AudioCodec audio_codec, const AVCodec *codec, bool mix_audio) {
+static AVSampleFormat audio_codec_get_sample_format(AVCodecContext *audio_codec_context, AudioCodec audio_codec, const AVCodec *codec, bool mix_audio) {
+ (void)audio_codec_context;
switch(audio_codec) {
case AudioCodec::AAC: {
return AV_SAMPLE_FMT_FLTP;
@@ -285,13 +297,32 @@ static AVSampleFormat audio_codec_get_sample_format(AudioCodec audio_codec, cons
bool supports_s16 = false;
bool supports_flt = false;
- for(size_t i = 0; codec->sample_fmts && codec->sample_fmts[i] != -1; ++i) {
+ #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(61, 15, 0)
+ for(size_t i = 0; codec->sample_fmts && codec->sample_fmts[i] != AV_SAMPLE_FMT_NONE; ++i) {
if(codec->sample_fmts[i] == AV_SAMPLE_FMT_S16) {
supports_s16 = true;
} else if(codec->sample_fmts[i] == AV_SAMPLE_FMT_FLT) {
supports_flt = true;
}
}
+ #else
+ const enum AVSampleFormat *sample_fmts = NULL;
+ if(avcodec_get_supported_config(audio_codec_context, codec, AV_CODEC_CONFIG_SAMPLE_FORMAT, 0, (const void**)&sample_fmts, NULL) >= 0) {
+ if(sample_fmts) {
+ for(size_t i = 0; sample_fmts[i] != AV_SAMPLE_FMT_NONE; ++i) {
+ if(sample_fmts[i] == AV_SAMPLE_FMT_S16) {
+ supports_s16 = true;
+ } else if(sample_fmts[i] == AV_SAMPLE_FMT_FLT) {
+ supports_flt = true;
+ }
+ }
+ } else {
+ // What a dumb API. It returns NULL if all formats are supported
+ supports_s16 = true;
+ supports_flt = true;
+ }
+ }
+ #endif
// Amix only works with float audio
if(mix_audio)
@@ -360,7 +391,7 @@ static AVCodecContext* create_audio_codec_context(int fps, AudioCodec audio_code
assert(codec->type == AVMEDIA_TYPE_AUDIO);
codec_context->codec_id = codec->id;
- codec_context->sample_fmt = audio_codec_get_sample_format(audio_codec, codec, mix_audio);
+ codec_context->sample_fmt = audio_codec_get_sample_format(codec_context, audio_codec, codec, mix_audio);
codec_context->bit_rate = audio_bitrate == 0 ? audio_codec_get_get_bitrate(audio_codec) : audio_bitrate;
codec_context->sample_rate = AUDIO_SAMPLE_RATE;
if(audio_codec == AudioCodec::AAC)
@@ -519,13 +550,13 @@ static AVCodecContext *create_video_codec_context(AVPixelFormat pix_fmt,
if(codec_context->codec_id == AV_CODEC_ID_AV1 || codec_context->codec_id == AV_CODEC_ID_H264 || codec_context->codec_id == AV_CODEC_ID_HEVC) {
switch(video_quality) {
case VideoQuality::MEDIUM:
- codec_context->global_quality = 160 * quality_multiply;
+ codec_context->global_quality = 150 * quality_multiply;
break;
case VideoQuality::HIGH:
- codec_context->global_quality = 130 * quality_multiply;
+ codec_context->global_quality = 120 * quality_multiply;
break;
case VideoQuality::VERY_HIGH:
- codec_context->global_quality = 110 * quality_multiply;
+ codec_context->global_quality = 100 * quality_multiply;
break;
case VideoQuality::ULTRA:
codec_context->global_quality = 90 * quality_multiply;
@@ -683,9 +714,9 @@ static void open_video_software(AVCodecContext *codec_context, VideoQuality vide
av_dict_set(&options, "preset", "medium", 0);
if(color_depth == GSR_COLOR_DEPTH_10_BITS) {
- av_dict_set(&options, "profile", "high10", 0);
+ av_dict_set_int(&options, "profile", AV_PROFILE_H264_HIGH_10, 0);
} else {
- av_dict_set(&options, "profile", "high", 0);
+ av_dict_set_int(&options, "profile", AV_PROFILE_H264_HIGH, 0);
}
// TODO: If streaming or piping output set this to zerolatency
av_dict_set(&options, "tune", "fastdecode", 0);
@@ -876,23 +907,25 @@ static void open_video_hardware(AVCodecContext *codec_context, VideoQuality vide
} else {
// TODO: More quality options
//av_dict_set_int(&options, "low_power", 1, 0);
+ // Improves performance but increases vram
+ av_dict_set_int(&options, "async_depth", 8, 0);
if(codec_context->codec_id == AV_CODEC_ID_H264) {
// TODO:
if(color_depth == GSR_COLOR_DEPTH_10_BITS)
- av_dict_set(&options, "profile", "high10", 0);
+ av_dict_set_int(&options, "profile", AV_PROFILE_H264_HIGH_10, 0);
else
- av_dict_set(&options, "profile", "high", 0);
+ av_dict_set_int(&options, "profile", AV_PROFILE_H264_HIGH, 0);
// Removed because it causes stutter in games for some people
//av_dict_set_int(&options, "quality", 5, 0); // quality preset
} else if(codec_context->codec_id == AV_CODEC_ID_AV1) {
- av_dict_set(&options, "profile", "main", 0); // TODO: use professional instead?
+ av_dict_set_int(&options, "profile", AV_PROFILE_AV1_MAIN, 0); // TODO: use professional instead?
av_dict_set(&options, "tier", "main", 0);
} else if(codec_context->codec_id == AV_CODEC_ID_HEVC) {
if(color_depth == GSR_COLOR_DEPTH_10_BITS)
- av_dict_set(&options, "profile", "main10", 0);
+ av_dict_set_int(&options, "profile", AV_PROFILE_HEVC_MAIN_10, 0);
else
- av_dict_set(&options, "profile", "main", 0);
+ av_dict_set_int(&options, "profile", AV_PROFILE_HEVC_MAIN, 0);
if(hdr)
av_dict_set(&options, "sei", "hdr", 0);
@@ -1893,10 +1926,6 @@ static gsr_capture* create_capture_impl(std::string &window_str, const char *scr
_exit(1);
}
- if(video_codec_is_hdr(video_codec)) {
- fprintf(stderr, "Warning: portal capture option doesn't support hdr yet (pipewire doesn't support hdr)\n");
- }
-
gsr_capture_portal_params portal_params;
portal_params.egl = egl;
portal_params.color_depth = color_depth;
@@ -2753,6 +2782,11 @@ int main(int argc, char **argv) {
}
}
+ if(wayland && is_monitor_capture) {
+ fprintf(stderr, "gsr warning: it's not possible to sync video to recorded monitor exactly on wayland when recording a monitor."
+ " If you experience stutter in the video then record with portal capture option instead (-w portal) or use X11 instead\n");
+ }
+
// TODO: Fix constant framerate not working properly on amd/intel because capture framerate gets locked to the same framerate as
// game framerate, which doesn't work well when you need to encode multiple duplicate frames (AMD/Intel is slow at encoding!).
// It also appears to skip audio frames on nvidia wayland? why? that should be fine, but it causes video stuttering because of audio/video sync.
@@ -2896,6 +2930,11 @@ int main(int argc, char **argv) {
const bool force_no_audio_offset = is_livestream || is_output_piped || (file_extension != "mp4" && file_extension != "mkv" && file_extension != "webm");
const double target_fps = 1.0 / (double)fps;
+ if(video_codec_is_hdr(video_codec) && is_portal_capture) {
+ fprintf(stderr, "Warning: portal capture option doesn't support hdr yet (pipewire doesn't support hdr), the video will be tonemapped from hdr to sdr\n");
+ video_codec = hdr_video_codec_to_sdr_video_codec(video_codec);
+ }
+
audio_codec = select_audio_codec_with_fallback(audio_codec, file_extension, uses_amix);
const AVCodec *video_codec_f = select_video_codec_with_fallback(&video_codec, video_codec_to_use, file_extension.c_str(), use_software_video_encoder, &egl);
@@ -3270,23 +3309,25 @@ int main(int argc, char **argv) {
amix_thread = std::thread([&]() {
AVFrame *aframe = av_frame_alloc();
while(running) {
- std::lock_guard<std::mutex> lock(audio_filter_mutex);
- for(AudioTrack &audio_track : audio_tracks) {
- if(!audio_track.sink)
- continue;
-
- int err = 0;
- while ((err = av_buffersink_get_frame(audio_track.sink, aframe)) >= 0) {
- aframe->pts = audio_track.pts;
- err = avcodec_send_frame(audio_track.codec_context, aframe);
- if(err >= 0){
- // TODO: Move to separate thread because this could write to network (for example when livestreaming)
- receive_frames(audio_track.codec_context, audio_track.stream_index, audio_track.stream, aframe->pts, av_format_context, record_start_time, frame_data_queue, replay_buffer_size_secs, frames_erased, write_output_mutex, paused_time_offset);
- } else {
- fprintf(stderr, "Failed to encode audio!\n");
+ {
+ std::lock_guard<std::mutex> lock(audio_filter_mutex);
+ for(AudioTrack &audio_track : audio_tracks) {
+ if(!audio_track.sink)
+ continue;
+
+ int err = 0;
+ while ((err = av_buffersink_get_frame(audio_track.sink, aframe)) >= 0) {
+ aframe->pts = audio_track.pts;
+ err = avcodec_send_frame(audio_track.codec_context, aframe);
+ if(err >= 0){
+ // TODO: Move to separate thread because this could write to network (for example when livestreaming)
+ receive_frames(audio_track.codec_context, audio_track.stream_index, audio_track.stream, aframe->pts, av_format_context, record_start_time, frame_data_queue, replay_buffer_size_secs, frames_erased, write_output_mutex, paused_time_offset);
+ } else {
+ fprintf(stderr, "Failed to encode audio!\n");
+ }
+ av_frame_unref(aframe);
+ audio_track.pts += audio_track.codec_context->frame_size;
}
- av_frame_unref(aframe);
- audio_track.pts += audio_track.codec_context->frame_size;
}
}
av_usleep(5 * 1000); // 5 milliseconds
@@ -3326,7 +3367,7 @@ int main(int argc, char **argv) {
gsr_damage_on_event(&damage, gsr_egl_get_event_data(&egl));
}
gsr_damage_tick(&damage);
- gsr_capture_tick(capture, video_codec_context);
+ gsr_capture_tick(capture);
if(!is_monitor_capture) {
Window damage_target_window = 0;
@@ -3344,10 +3385,12 @@ int main(int argc, char **argv) {
}
bool damaged = false;
- if(capture->is_damaged)
+ if(use_damage_tracking)
+ damaged = gsr_damage_is_damaged(&damage);
+ else if(capture->is_damaged)
damaged = capture->is_damaged(capture);
else
- damaged = !use_damage_tracking || gsr_damage_is_damaged(&damage);
+ damaged = true;
if(damaged)
++damage_fps_counter;
@@ -3366,18 +3409,19 @@ int main(int argc, char **argv) {
const double this_video_frame_time = clock_get_monotonic_seconds() - paused_time_offset;
const int64_t expected_frames = std::round((this_video_frame_time - record_start_time) / target_fps);
- int num_frames = std::max((int64_t)0LL, expected_frames - video_pts_counter);
+ const int num_frames = std::max((int64_t)0LL, expected_frames - video_pts_counter);
const double num_frames_seconds = num_frames * target_fps;
- if((damaged || num_frames_seconds >= damage_timeout_seconds) && !paused/* && fps_counter < fps + 100*/) {
+ if((damaged || (framerate_mode == FramerateMode::CONSTANT && num_frames > 0) || (framerate_mode != FramerateMode::CONSTANT && num_frames_seconds >= damage_timeout_seconds)) && !paused) {
gsr_damage_clear(&damage);
if(capture->clear_damage)
capture->clear_damage(capture);
- egl.glClear(0);
- gsr_capture_capture(capture, video_frame, &color_conversion);
- gsr_egl_swap_buffers(&egl);
-
- gsr_video_encoder_copy_textures_to_frame(video_encoder, video_frame);
+ if(damaged || video_pts_counter == 0) {
+ egl.glClear(0);
+ gsr_capture_capture(capture, video_frame, &color_conversion);
+ gsr_egl_swap_buffers(&egl);
+ gsr_video_encoder_copy_textures_to_frame(video_encoder, video_frame);
+ }
if(hdr && !hdr_metadata_set && replay_buffer_size_secs == -1 && add_hdr_metadata_to_video_stream(capture, video_stream))
hdr_metadata_set = true;
@@ -3405,7 +3449,6 @@ int main(int argc, char **argv) {
}
}
- gsr_capture_capture_end(capture, video_frame);
video_pts_counter += num_frames;
}
@@ -3442,8 +3485,8 @@ int main(int argc, char **argv) {
const double frame_sleep_fps = 1.0 / update_fps;
const double sleep_time = frame_sleep_fps - (frame_end - frame_start);
if(sleep_time > 0.0) {
- if(damaged)
- av_usleep(sleep_time * 1000.0 * 1000.0);
+ if(damaged)
+ av_usleep(sleep_time * 1000.0 * 1000.0);
else
av_usleep(2 * 1000.0); // 2 milliseconds
}