diff options
author | dec05eba <dec05eba@protonmail.com> | 2024-07-23 18:11:38 +0200 |
---|---|---|
committer | dec05eba <dec05eba@protonmail.com> | 2024-07-23 18:11:38 +0200 |
commit | 7d719a44c6ce76ecfc05287d0cbb2b36e84b4ca6 (patch) | |
tree | bcc1d7a857e6b9efad44cfed750ba46d5decc785 | |
parent | 91485ba75d5a598eda5a041505c7c65648297e32 (diff) |
Fix hdr capture causing crash when in replay mode
-rw-r--r-- | include/capture/capture.h | 9 | ||||
-rw-r--r-- | kms/client/kms_client.c | 2 | ||||
-rw-r--r-- | src/capture/capture.c | 11 | ||||
-rw-r--r-- | src/capture/kms.c | 93 | ||||
-rw-r--r-- | src/capture/nvfbc.c | 3 | ||||
-rw-r--r-- | src/capture/portal.c | 3 | ||||
-rw-r--r-- | src/capture/xcomposite.c | 3 | ||||
-rw-r--r-- | src/main.cpp | 106 |
8 files changed, 138 insertions, 92 deletions
diff --git a/include/capture/capture.h b/include/capture/capture.h index 1e7b25f..278c431 100644 --- a/include/capture/capture.h +++ b/include/capture/capture.h @@ -3,11 +3,14 @@ #include "../color_conversion.h" #include <stdbool.h> +#include <stddef.h> typedef struct AVCodecContext AVCodecContext; typedef struct AVStream AVStream; typedef struct AVFrame AVFrame; typedef struct gsr_capture gsr_capture; +typedef struct AVMasteringDisplayMetadata AVMasteringDisplayMetadata; +typedef struct AVContentLightMetadata AVContentLightMetadata; struct gsr_capture { /* These methods should not be called manually. Call gsr_capture_* instead */ @@ -16,10 +19,11 @@ struct gsr_capture { bool (*is_damaged)(gsr_capture *cap); /* can be NULL */ void (*clear_damage)(gsr_capture *cap); /* can be NULL */ bool (*should_stop)(gsr_capture *cap, bool *err); /* can be NULL. If NULL, return false */ - int (*capture)(gsr_capture *cap, AVStream *video_stream, AVFrame *frame, gsr_color_conversion *color_conversion); + int (*capture)(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion); void (*capture_end)(gsr_capture *cap, AVFrame *frame); /* can be NULL */ gsr_source_color (*get_source_color)(gsr_capture *cap); bool (*uses_external_image)(gsr_capture *cap); /* can be NULL. If NULL, return false */ + bool (*set_hdr_metadata)(gsr_capture *cap, AVMasteringDisplayMetadata *mastering_display_metadata, AVContentLightMetadata *light_metadata); /* can be NULL. If NULL, return false */ void (*destroy)(gsr_capture *cap, AVCodecContext *video_codec_context); void *priv; /* can be NULL */ @@ -29,10 +33,11 @@ struct gsr_capture { int gsr_capture_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame); void gsr_capture_tick(gsr_capture *cap, AVCodecContext *video_codec_context); bool gsr_capture_should_stop(gsr_capture *cap, bool *err); -int gsr_capture_capture(gsr_capture *cap, AVStream *video_stream, AVFrame *frame, gsr_color_conversion *color_conversion); +int gsr_capture_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion); void gsr_capture_capture_end(gsr_capture *cap, AVFrame *frame); gsr_source_color gsr_capture_get_source_color(gsr_capture *cap); bool gsr_capture_uses_external_image(gsr_capture *cap); +bool gsr_capture_set_hdr_metadata(gsr_capture *cap, AVMasteringDisplayMetadata *mastering_display_metadata, AVContentLightMetadata *light_metadata); void gsr_capture_destroy(gsr_capture *cap, AVCodecContext *video_codec_context); #endif /* GSR_CAPTURE_CAPTURE_H */ diff --git a/kms/client/kms_client.c b/kms/client/kms_client.c index d3e68dd..80bb9cd 100644 --- a/kms/client/kms_client.c +++ b/kms/client/kms_client.c @@ -103,7 +103,7 @@ static int recv_msg_from_server(int server_pid, int server_fd, gsr_kms_response int status = 0; int wait_result = waitpid(server_pid, &status, WNOHANG); if(wait_result != 0) { - res = -1; + res = 0; break; } usleep(1000); diff --git a/src/capture/capture.c b/src/capture/capture.c index 283c0a1..7c5737d 100644 --- a/src/capture/capture.c +++ b/src/capture/capture.c @@ -24,9 +24,9 @@ bool gsr_capture_should_stop(gsr_capture *cap, bool *err) { return false; } -int gsr_capture_capture(gsr_capture *cap, AVStream *video_stream, AVFrame *frame, gsr_color_conversion *color_conversion) { +int gsr_capture_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion) { assert(cap->started); - return cap->capture(cap, video_stream, frame, color_conversion); + return cap->capture(cap, frame, color_conversion); } void gsr_capture_capture_end(gsr_capture *cap, AVFrame *frame) { @@ -46,6 +46,13 @@ bool gsr_capture_uses_external_image(gsr_capture *cap) { return false; } +bool gsr_capture_set_hdr_metadata(gsr_capture *cap, AVMasteringDisplayMetadata *mastering_display_metadata, AVContentLightMetadata *light_metadata) { + if(cap->set_hdr_metadata) + return cap->set_hdr_metadata(cap, mastering_display_metadata, light_metadata); + else + return false; +} + void gsr_capture_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) { cap->destroy(cap, video_codec_context); } diff --git a/src/capture/kms.c b/src/capture/kms.c index a148169..25d4956 100644 --- a/src/capture/kms.c +++ b/src/capture/kms.c @@ -34,17 +34,15 @@ typedef struct { vec2i capture_size; MonitorId monitor_id; - AVMasteringDisplayMetadata *mastering_display_metadata; - AVContentLightMetadata *light_metadata; - size_t light_metadata_size; - bool hdr_metadata_set; - gsr_monitor_rotation monitor_rotation; unsigned int input_texture_id; unsigned int cursor_texture_id; bool no_modifiers_fallback; + + struct hdr_output_metadata hdr_metadata; + bool hdr_metadata_set; } gsr_capture_kms; static void gsr_capture_kms_cleanup_kms_fds(gsr_capture_kms *self) { @@ -240,50 +238,12 @@ static bool hdr_metadata_is_supported_format(const struct hdr_output_metadata *h } // TODO: Check if this hdr data can be changed after the call to av_packet_side_data_add -static void gsr_kms_set_hdr_metadata(gsr_capture_kms *self, AVStream *video_stream, gsr_kms_response_item *drm_fd) { +static void gsr_kms_set_hdr_metadata(gsr_capture_kms *self, gsr_kms_response_item *drm_fd) { if(self->hdr_metadata_set) return; - if(!self->light_metadata) - self->light_metadata = av_content_light_metadata_alloc(&self->light_metadata_size); - - if(!self->mastering_display_metadata) - self->mastering_display_metadata = av_mastering_display_metadata_alloc(); - - if(self->light_metadata) { - self->light_metadata->MaxCLL = drm_fd->hdr_metadata.hdmi_metadata_type1.max_cll; - self->light_metadata->MaxFALL = drm_fd->hdr_metadata.hdmi_metadata_type1.max_fall; - - #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(60, 31, 102) - av_stream_add_side_data(video_stream, AV_PKT_DATA_CONTENT_LIGHT_LEVEL, self->light_metadata, self->light_metadata_size); - #else - av_packet_side_data_add(&video_stream->codecpar->coded_side_data, &video_stream->codecpar->nb_coded_side_data, AV_PKT_DATA_CONTENT_LIGHT_LEVEL, self->light_metadata, self->light_metadata_size, 0); - #endif - } - - if(self->mastering_display_metadata) { - for(int i = 0; i < 3; ++i) { - self->mastering_display_metadata->display_primaries[i][0] = av_make_q(drm_fd->hdr_metadata.hdmi_metadata_type1.display_primaries[i].x, 50000); - self->mastering_display_metadata->display_primaries[i][1] = av_make_q(drm_fd->hdr_metadata.hdmi_metadata_type1.display_primaries[i].y, 50000); - } - - self->mastering_display_metadata->white_point[0] = av_make_q(drm_fd->hdr_metadata.hdmi_metadata_type1.white_point.x, 50000); - self->mastering_display_metadata->white_point[1] = av_make_q(drm_fd->hdr_metadata.hdmi_metadata_type1.white_point.y, 50000); - - self->mastering_display_metadata->min_luminance = av_make_q(drm_fd->hdr_metadata.hdmi_metadata_type1.min_display_mastering_luminance, 10000); - self->mastering_display_metadata->max_luminance = av_make_q(drm_fd->hdr_metadata.hdmi_metadata_type1.max_display_mastering_luminance, 1); - - self->mastering_display_metadata->has_primaries = self->mastering_display_metadata->display_primaries[0][0].num > 0; - self->mastering_display_metadata->has_luminance = self->mastering_display_metadata->max_luminance.num > 0; - - #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(60, 31, 102) - av_stream_add_side_data(video_stream, AV_PKT_DATA_MASTERING_DISPLAY_METADATA, self->mastering_display_metadata, sizeof(*self->mastering_display_metadata)); - #else - av_packet_side_data_add(&video_stream->codecpar->coded_side_data, &video_stream->codecpar->nb_coded_side_data, AV_PKT_DATA_MASTERING_DISPLAY_METADATA, self->mastering_display_metadata, sizeof(*self->mastering_display_metadata), 0); - #endif - } - self->hdr_metadata_set = true; + self->hdr_metadata = drm_fd->hdr_metadata; } static vec2i swap_vec2i(vec2i value) { @@ -311,7 +271,7 @@ static bool is_plane_compressed(uint64_t modifier) { return false; } -static int gsr_capture_kms_capture(gsr_capture *cap, AVStream *video_stream, AVFrame *frame, gsr_color_conversion *color_conversion) { +static int gsr_capture_kms_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion) { gsr_capture_kms *self = cap->priv; const bool cursor_texture_id_is_external = self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA; @@ -361,7 +321,7 @@ static int gsr_capture_kms_capture(gsr_capture *cap, AVStream *video_stream, AVF cursor_drm_fd = NULL; if(drm_fd->has_hdr_metadata && self->params.hdr && hdr_metadata_is_supported_format(&drm_fd->hdr_metadata)) - gsr_kms_set_hdr_metadata(self, video_stream, drm_fd); + gsr_kms_set_hdr_metadata(self, drm_fd); if(is_plane_compressed(drm_fd->modifier)) { static bool compressed_plane_warning_shown = false; @@ -526,17 +486,43 @@ static gsr_source_color gsr_capture_kms_get_source_color(gsr_capture *cap) { } static bool gsr_capture_kms_uses_external_image(gsr_capture *cap) { - gsr_capture_kms *cap_kms = cap->priv; - return cap_kms->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA; + gsr_capture_kms *self = cap->priv; + return self->params.egl->gpu_info.vendor == GSR_GPU_VENDOR_NVIDIA; +} + +static bool gsr_capture_kms_set_hdr_metadata(gsr_capture *cap, AVMasteringDisplayMetadata *mastering_display_metadata, AVContentLightMetadata *light_metadata) { + gsr_capture_kms *self = cap->priv; + + if(!self->hdr_metadata_set) + return false; + + light_metadata->MaxCLL = self->hdr_metadata.hdmi_metadata_type1.max_cll; + light_metadata->MaxFALL = self->hdr_metadata.hdmi_metadata_type1.max_fall; + + for(int i = 0; i < 3; ++i) { + mastering_display_metadata->display_primaries[i][0] = av_make_q(self->hdr_metadata.hdmi_metadata_type1.display_primaries[i].x, 50000); + mastering_display_metadata->display_primaries[i][1] = av_make_q(self->hdr_metadata.hdmi_metadata_type1.display_primaries[i].y, 50000); + } + + mastering_display_metadata->white_point[0] = av_make_q(self->hdr_metadata.hdmi_metadata_type1.white_point.x, 50000); + mastering_display_metadata->white_point[1] = av_make_q(self->hdr_metadata.hdmi_metadata_type1.white_point.y, 50000); + + mastering_display_metadata->min_luminance = av_make_q(self->hdr_metadata.hdmi_metadata_type1.min_display_mastering_luminance, 10000); + mastering_display_metadata->max_luminance = av_make_q(self->hdr_metadata.hdmi_metadata_type1.max_display_mastering_luminance, 1); + + mastering_display_metadata->has_primaries = mastering_display_metadata->display_primaries[0][0].num > 0; + mastering_display_metadata->has_luminance = mastering_display_metadata->max_luminance.num > 0; + + return true; } static void gsr_capture_kms_destroy(gsr_capture *cap, AVCodecContext *video_codec_context) { (void)video_codec_context; - gsr_capture_kms *cap_kms = cap->priv; + gsr_capture_kms *self = cap->priv; if(cap->priv) { - gsr_capture_kms_stop(cap_kms); - free((void*)cap_kms->params.display_to_capture); - cap_kms->params.display_to_capture = NULL; + gsr_capture_kms_stop(self); + free((void*)self->params.display_to_capture); + self->params.display_to_capture = NULL; free(cap->priv); cap->priv = NULL; } @@ -577,6 +563,7 @@ gsr_capture* gsr_capture_kms_create(const gsr_capture_kms_params *params) { .capture_end = gsr_capture_kms_capture_end, .get_source_color = gsr_capture_kms_get_source_color, .uses_external_image = gsr_capture_kms_uses_external_image, + .set_hdr_metadata = gsr_capture_kms_set_hdr_metadata, .destroy = gsr_capture_kms_destroy, .priv = cap_kms }; diff --git a/src/capture/nvfbc.c b/src/capture/nvfbc.c index 4bf6186..97e0283 100644 --- a/src/capture/nvfbc.c +++ b/src/capture/nvfbc.c @@ -378,9 +378,8 @@ static int gsr_capture_nvfbc_start(gsr_capture *cap, AVCodecContext *video_codec return -1; } -static int gsr_capture_nvfbc_capture(gsr_capture *cap, AVStream *video_stream, AVFrame *frame, gsr_color_conversion *color_conversion) { +static int gsr_capture_nvfbc_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion) { gsr_capture_nvfbc *cap_nvfbc = cap->priv; - (void)video_stream; const double nvfbc_recreate_retry_time_seconds = 1.0; if(cap_nvfbc->nvfbc_needs_recreate) { diff --git a/src/capture/portal.c b/src/capture/portal.c index 42f9800..092fd9f 100644 --- a/src/capture/portal.c +++ b/src/capture/portal.c @@ -290,8 +290,7 @@ static int max_int(int a, int b) { return a > b ? a : b; } -static int gsr_capture_portal_capture(gsr_capture *cap, AVStream *video_stream, AVFrame *frame, gsr_color_conversion *color_conversion) { - (void)video_stream; +static int gsr_capture_portal_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion) { (void)frame; (void)color_conversion; gsr_capture_portal *self = cap->priv; diff --git a/src/capture/xcomposite.c b/src/capture/xcomposite.c index 52afc20..7e78fb9 100644 --- a/src/capture/xcomposite.c +++ b/src/capture/xcomposite.c @@ -313,9 +313,8 @@ static bool gsr_capture_xcomposite_should_stop(gsr_capture *cap, bool *err) { return false; } -static int gsr_capture_xcomposite_capture(gsr_capture *cap, AVStream *video_stream, AVFrame *frame, gsr_color_conversion *color_conversion) { +static int gsr_capture_xcomposite_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion) { gsr_capture_xcomposite *self = cap->priv; - (void)video_stream; (void)frame; //self->params.egl->glClearColor(0.0f, 0.0f, 0.0f, 1.0f); diff --git a/src/main.cpp b/src/main.cpp index 3924f46..2845d4c 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -39,6 +39,7 @@ extern "C" { #include <libswresample/swresample.h> #include <libavutil/avutil.h> #include <libavutil/time.h> +#include <libavutil/mastering_display_metadata.h> #include <libavfilter/avfilter.h> #include <libavfilter/buffersink.h> #include <libavfilter/buffersrc.h> @@ -1287,11 +1288,49 @@ struct AudioTrack { int64_t pts = 0; }; +static bool add_hdr_metadata_to_video_stream(gsr_capture *cap, AVStream *video_stream) { + size_t light_metadata_size = 0; + AVContentLightMetadata *light_metadata = av_content_light_metadata_alloc(&light_metadata_size); + AVMasteringDisplayMetadata *mastering_display_metadata = av_mastering_display_metadata_alloc(); + + if(!light_metadata || !mastering_display_metadata) { + if(light_metadata) + av_freep(light_metadata); + + if(mastering_display_metadata) + av_freep(mastering_display_metadata); + + return false; + } + + if(!gsr_capture_set_hdr_metadata(cap, mastering_display_metadata, light_metadata)) { + av_freep(light_metadata); + av_freep(mastering_display_metadata); + return false; + } + + // TODO: More error checking + + #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(60, 31, 102) + const bool added_light_metadata = av_stream_add_side_data(video_stream, AV_PKT_DATA_CONTENT_LIGHT_LEVEL, light_metadata, light_metadata_size); + #else + av_packet_side_data_add(&video_stream->codecpar->coded_side_data, &video_stream->codecpar->nb_coded_side_data, AV_PKT_DATA_CONTENT_LIGHT_LEVEL, light_metadata, light_metadata_size, 0); + #endif + + #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(60, 31, 102) + const bool added_display_metadata = av_stream_add_side_data(video_stream, AV_PKT_DATA_MASTERING_DISPLAY_METADATA, mastering_display_metadata, sizeof(*mastering_display_metadata)); + #else + av_packet_side_data_add(&video_stream->codecpar->coded_side_data, &video_stream->codecpar->nb_coded_side_data, AV_PKT_DATA_MASTERING_DISPLAY_METADATA, mastering_display_metadata, sizeof(*mastering_display_metadata), 0); + #endif + + return true; +} + static std::future<void> save_replay_thread; static std::vector<std::shared_ptr<PacketData>> save_replay_packets; static std::string save_replay_output_filepath; -static void save_replay_async(AVCodecContext *video_codec_context, int video_stream_index, std::vector<AudioTrack> &audio_tracks, std::deque<std::shared_ptr<PacketData>> &frame_data_queue, bool frames_erased, std::string output_dir, const char *container_format, const std::string &file_extension, std::mutex &write_output_mutex, bool date_folders) { +static void save_replay_async(AVCodecContext *video_codec_context, int video_stream_index, std::vector<AudioTrack> &audio_tracks, std::deque<std::shared_ptr<PacketData>> &frame_data_queue, bool frames_erased, std::string output_dir, const char *container_format, const std::string &file_extension, std::mutex &write_output_mutex, bool date_folders, bool hdr, gsr_capture *capture) { if(save_replay_thread.valid()) return; @@ -1343,36 +1382,42 @@ static void save_replay_async(AVCodecContext *video_codec_context, int video_str save_replay_output_filepath = output_dir + "/Replay_" + get_date_str() + "." + file_extension; } - save_replay_thread = std::async(std::launch::async, [video_stream_index, container_format, start_index, video_pts_offset, audio_pts_offset, video_codec_context, &audio_tracks]() mutable { - AVFormatContext *av_format_context; - avformat_alloc_output_context2(&av_format_context, nullptr, container_format, nullptr); + AVFormatContext *av_format_context; + avformat_alloc_output_context2(&av_format_context, nullptr, container_format, nullptr); - AVStream *video_stream = create_stream(av_format_context, video_codec_context); - avcodec_parameters_from_context(video_stream->codecpar, video_codec_context); + AVStream *video_stream = create_stream(av_format_context, video_codec_context); + avcodec_parameters_from_context(video_stream->codecpar, video_codec_context); - std::unordered_map<int, AudioTrack*> stream_index_to_audio_track_map; - for(AudioTrack &audio_track : audio_tracks) { - stream_index_to_audio_track_map[audio_track.stream_index] = &audio_track; - AVStream *audio_stream = create_stream(av_format_context, audio_track.codec_context); - avcodec_parameters_from_context(audio_stream->codecpar, audio_track.codec_context); - audio_track.stream = audio_stream; - } + std::unordered_map<int, AudioTrack*> stream_index_to_audio_track_map; + for(AudioTrack &audio_track : audio_tracks) { + stream_index_to_audio_track_map[audio_track.stream_index] = &audio_track; + AVStream *audio_stream = create_stream(av_format_context, audio_track.codec_context); + avcodec_parameters_from_context(audio_stream->codecpar, audio_track.codec_context); + audio_track.stream = audio_stream; + } - int ret = avio_open(&av_format_context->pb, save_replay_output_filepath.c_str(), AVIO_FLAG_WRITE); - if (ret < 0) { - fprintf(stderr, "Error: Could not open '%s': %s. Make sure %s is an existing directory with write access\n", save_replay_output_filepath.c_str(), av_error_to_string(ret), save_replay_output_filepath.c_str()); - return; - } + int ret = avio_open(&av_format_context->pb, save_replay_output_filepath.c_str(), AVIO_FLAG_WRITE); + if (ret < 0) { + fprintf(stderr, "Error: Could not open '%s': %s. Make sure %s is an existing directory with write access\n", save_replay_output_filepath.c_str(), av_error_to_string(ret), save_replay_output_filepath.c_str()); + return; + } - AVDictionary *options = nullptr; - av_dict_set(&options, "strict", "experimental", 0); + AVDictionary *options = nullptr; + av_dict_set(&options, "strict", "experimental", 0); - ret = avformat_write_header(av_format_context, &options); - if (ret < 0) { - fprintf(stderr, "Error occurred when writing header to output file: %s\n", av_error_to_string(ret)); - return; - } + ret = avformat_write_header(av_format_context, &options); + if (ret < 0) { + fprintf(stderr, "Error occurred when writing header to output file: %s\n", av_error_to_string(ret)); + avio_close(av_format_context->pb); + avformat_free_context(av_format_context); + av_dict_free(&options); + return; + } + + if(hdr) + add_hdr_metadata_to_video_stream(capture, video_stream); + save_replay_thread = std::async(std::launch::async, [video_stream_index, video_stream, start_index, video_pts_offset, audio_pts_offset, video_codec_context, &audio_tracks, stream_index_to_audio_track_map, av_format_context, options]() mutable { for(size_t i = start_index; i < save_replay_packets.size(); ++i) { // TODO: Check if successful AVPacket av_packet; @@ -1404,7 +1449,7 @@ static void save_replay_async(AVCodecContext *video_codec_context, int video_str av_packet.stream_index = stream->index; av_packet_rescale_ts(&av_packet, codec_context->time_base, stream->time_base); - ret = av_write_frame(av_format_context, &av_packet); + const int ret = av_write_frame(av_format_context, &av_packet); if(ret < 0) fprintf(stderr, "Error: Failed to write frame index %d to muxer, reason: %s (%d)\n", stream->index, av_error_to_string(ret), ret); @@ -3044,6 +3089,8 @@ int main(int argc, char **argv) { int64_t video_pts_counter = 0; int64_t video_prev_pts = 0; + bool hdr_metadata_set = false; + while(running) { double frame_start = clock_get_monotonic_seconds(); @@ -3107,9 +3154,12 @@ int main(int argc, char **argv) { const int num_frames = framerate_mode == FramerateMode::CONSTANT ? std::max((int64_t)0LL, expected_frames - video_pts_counter) : 1; if(num_frames > 0 && !paused) { - gsr_capture_capture(capture, video_stream, video_frame, &color_conversion); + gsr_capture_capture(capture, video_frame, &color_conversion); gsr_video_encoder_copy_textures_to_frame(video_encoder, video_frame); + if(hdr && !hdr_metadata_set && replay_buffer_size_secs == -1 && add_hdr_metadata_to_video_stream(capture, video_stream)) + hdr_metadata_set = true; + // TODO: Check if duplicate frame can be saved just by writing it with a different pts instead of sending it again for(int i = 0; i < num_frames; ++i) { if(framerate_mode == FramerateMode::CONSTANT) { @@ -3163,7 +3213,7 @@ int main(int argc, char **argv) { if(save_replay == 1 && !save_replay_thread.valid() && replay_buffer_size_secs != -1) { save_replay = 0; - save_replay_async(video_codec_context, VIDEO_STREAM_INDEX, audio_tracks, frame_data_queue, frames_erased, filename, container_format, file_extension, write_output_mutex, date_folders); + save_replay_async(video_codec_context, VIDEO_STREAM_INDEX, audio_tracks, frame_data_queue, frames_erased, filename, container_format, file_extension, write_output_mutex, date_folders, hdr, capture); } double frame_end = clock_get_monotonic_seconds(); |