aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--include/capture/kms.h2
-rw-r--r--src/capture/kms.c5
-rw-r--r--src/capture/kms_cuda.c2
-rw-r--r--src/capture/kms_vaapi.c4
-rw-r--r--src/capture/nvfbc.c3
-rw-r--r--src/capture/xcomposite_cuda.c3
-rw-r--r--src/capture/xcomposite_vaapi.c5
-rw-r--r--src/color_conversion.c22
-rw-r--r--src/main.cpp10
-rwxr-xr-xstudy/create_matrix.py14
10 files changed, 47 insertions, 23 deletions
diff --git a/include/capture/kms.h b/include/capture/kms.h
index ae53f15..a38b4ff 100644
--- a/include/capture/kms.h
+++ b/include/capture/kms.h
@@ -40,7 +40,7 @@ struct gsr_capture_kms {
};
/* Returns 0 on success */
-int gsr_capture_kms_start(gsr_capture_kms *self, gsr_capture_base *base, const char *display_to_capture, gsr_egl *egl, AVCodecContext *video_codec_context);
+int gsr_capture_kms_start(gsr_capture_kms *self, gsr_capture_base *base, const char *display_to_capture, gsr_egl *egl, AVCodecContext *video_codec_context, AVFrame *frame);
void gsr_capture_kms_stop(gsr_capture_kms *self);
bool gsr_capture_kms_capture(gsr_capture_kms *self, gsr_capture_base *base, AVFrame *frame, gsr_egl *egl, bool hdr, bool screen_plane_use_modifiers, bool cursor_texture_is_external);
void gsr_capture_kms_cleanup_kms_fds(gsr_capture_kms *self);
diff --git a/src/capture/kms.c b/src/capture/kms.c
index 32e5d3e..c3ec64b 100644
--- a/src/capture/kms.c
+++ b/src/capture/kms.c
@@ -39,7 +39,7 @@ static int max_int(int a, int b) {
return a > b ? a : b;
}
-int gsr_capture_kms_start(gsr_capture_kms *self, gsr_capture_base *base, const char *display_to_capture, gsr_egl *egl, AVCodecContext *video_codec_context) {
+int gsr_capture_kms_start(gsr_capture_kms *self, gsr_capture_base *base, const char *display_to_capture, gsr_egl *egl, AVCodecContext *video_codec_context, AVFrame *frame) {
base->video_codec_context = video_codec_context;
gsr_monitor monitor;
@@ -77,6 +77,9 @@ int gsr_capture_kms_start(gsr_capture_kms *self, gsr_capture_base *base, const c
base->video_codec_context->width = max_int(2, even_number_ceil(self->capture_size.x));
base->video_codec_context->height = max_int(2, even_number_ceil(self->capture_size.y));
+
+ frame->width = base->video_codec_context->width;
+ frame->height = base->video_codec_context->height;
return 0;
}
diff --git a/src/capture/kms_cuda.c b/src/capture/kms_cuda.c
index 8583d56..775aab5 100644
--- a/src/capture/kms_cuda.c
+++ b/src/capture/kms_cuda.c
@@ -79,7 +79,7 @@ static bool cuda_create_codec_context(gsr_capture_kms_cuda *cap_kms, AVCodecCont
static int gsr_capture_kms_cuda_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
gsr_capture_kms_cuda *cap_kms = cap->priv;
- const int res = gsr_capture_kms_start(&cap_kms->kms, &cap_kms->base, cap_kms->params.display_to_capture, cap_kms->params.egl, video_codec_context);
+ const int res = gsr_capture_kms_start(&cap_kms->kms, &cap_kms->base, cap_kms->params.display_to_capture, cap_kms->params.egl, video_codec_context, frame);
if(res != 0) {
gsr_capture_kms_cuda_stop(cap, video_codec_context);
return res;
diff --git a/src/capture/kms_vaapi.c b/src/capture/kms_vaapi.c
index 3363d9c..8fddf04 100644
--- a/src/capture/kms_vaapi.c
+++ b/src/capture/kms_vaapi.c
@@ -50,7 +50,7 @@ static bool drm_create_codec_context(gsr_capture_kms_vaapi *cap_kms, AVCodecCont
hw_frame_context->device_ref = device_ctx;
hw_frame_context->device_ctx = (AVHWDeviceContext*)device_ctx->data;
- hw_frame_context->initial_pool_size = 20;
+ //hw_frame_context->initial_pool_size = 20;
AVVAAPIDeviceContext *vactx =((AVHWDeviceContext*)device_ctx->data)->hwctx;
cap_kms->va_dpy = vactx->display;
@@ -71,7 +71,7 @@ static bool drm_create_codec_context(gsr_capture_kms_vaapi *cap_kms, AVCodecCont
static int gsr_capture_kms_vaapi_start(gsr_capture *cap, AVCodecContext *video_codec_context, AVFrame *frame) {
gsr_capture_kms_vaapi *cap_kms = cap->priv;
- int res = gsr_capture_kms_start(&cap_kms->kms, &cap_kms->base, cap_kms->params.display_to_capture, cap_kms->params.egl, video_codec_context);
+ int res = gsr_capture_kms_start(&cap_kms->kms, &cap_kms->base, cap_kms->params.display_to_capture, cap_kms->params.egl, video_codec_context, frame);
if(res != 0) {
gsr_capture_kms_vaapi_stop(cap, video_codec_context);
return res;
diff --git a/src/capture/nvfbc.c b/src/capture/nvfbc.c
index 0b7ce60..13ca160 100644
--- a/src/capture/nvfbc.c
+++ b/src/capture/nvfbc.c
@@ -349,6 +349,9 @@ static int gsr_capture_nvfbc_start(gsr_capture *cap, AVCodecContext *video_codec
video_codec_context->height = tracking_height & ~1;
}
+ frame->width = video_codec_context->width;
+ frame->height = video_codec_context->height;
+
if(!ffmpeg_create_cuda_contexts(cap_nvfbc, video_codec_context))
goto error_cleanup;
diff --git a/src/capture/xcomposite_cuda.c b/src/capture/xcomposite_cuda.c
index 1194edb..181aa70 100644
--- a/src/capture/xcomposite_cuda.c
+++ b/src/capture/xcomposite_cuda.c
@@ -204,6 +204,9 @@ static int gsr_capture_xcomposite_cuda_start(gsr_capture *cap, AVCodecContext *v
video_codec_context->height = max_int(2, cap_xcomp->params.region_size.y & ~1);
}
+ frame->width = video_codec_context->width;
+ frame->height = video_codec_context->height;
+
cap_xcomp->target_texture_id = gl_create_texture(cap_xcomp, video_codec_context->width, video_codec_context->height);
if(cap_xcomp->target_texture_id == 0) {
fprintf(stderr, "gsr error: gsr_capture_xcomposite_cuda_start: failed to create opengl texture\n");
diff --git a/src/capture/xcomposite_vaapi.c b/src/capture/xcomposite_vaapi.c
index 134c8bb..e387586 100644
--- a/src/capture/xcomposite_vaapi.c
+++ b/src/capture/xcomposite_vaapi.c
@@ -89,7 +89,7 @@ static bool drm_create_codec_context(gsr_capture_xcomposite_vaapi *cap_xcomp, AV
hw_frame_context->device_ref = device_ctx;
hw_frame_context->device_ctx = (AVHWDeviceContext*)device_ctx->data;
- hw_frame_context->initial_pool_size = 20;
+ //hw_frame_context->initial_pool_size = 20;
AVVAAPIDeviceContext *vactx =((AVHWDeviceContext*)device_ctx->data)->hwctx;
cap_xcomp->va_dpy = vactx->display;
@@ -178,6 +178,9 @@ static int gsr_capture_xcomposite_vaapi_start(gsr_capture *cap, AVCodecContext *
video_codec_context->height = max_int(2, even_number_ceil(cap_xcomp->params.region_size.y));
}
+ frame->width = video_codec_context->width;
+ frame->height = video_codec_context->height;
+
if(!drm_create_codec_context(cap_xcomp, video_codec_context)) {
gsr_capture_xcomposite_vaapi_stop(cap, video_codec_context);
return -1;
diff --git a/src/color_conversion.c b/src/color_conversion.c
index dad58ff..eb71dd6 100644
--- a/src/color_conversion.c
+++ b/src/color_conversion.c
@@ -36,18 +36,18 @@ static float abs_f(float v) {
" 0.050928, 0.429412, -0.034537, 0.000000,\n" \
" 0.062745, 0.500000, 0.500000, 1.000000);"
-/* ITU-R BT709, full */
+/* ITU-R BT709, full. Custom values (0.2000 0.7152 0.0722) */
/* https://www.itu.int/dms_pubrec/itu-r/rec/bt/R-REC-BT.709-6-201506-I!!PDF-E.pdf */
-#define RGB_TO_NV12_FULL "const mat4 RGBtoYUV = mat4(0.212600, -0.114572, 0.500000, 0.000000,\n" \
- " 0.715200, -0.385428, -0.454153, 0.000000,\n" \
- " 0.072200, 0.500000, -0.045847, 0.000000,\n" \
- " 0.000000, 0.500000, 0.500000, 1.000000);"
-
-/* ITU-R BT709, limited (full multiplied by (235-16)/255, adding 16/255 to luma) */
-#define RGB_TO_NV12_LIMITED "const mat4 RGBtoYUV = mat4(0.182586, -0.098397, 0.429412, 0.000000,\n" \
- " 0.614231, -0.331015, -0.390037, 0.000000,\n" \
- " 0.062007, 0.429412, -0.039375, 0.000000,\n" \
- " 0.062745, 0.500000, 0.500000, 1.000000);"
+#define RGB_TO_NV12_FULL "const mat4 RGBtoYUV = mat4(0.200000, -0.107782, 0.500000, 0.000000,\n" \
+ " 0.715200, -0.385428, -0.447000, 0.000000,\n" \
+ " 0.072200, 0.500000, -0.045125, 0.000000,\n" \
+ " 0.000000, 0.500000, 0.500000, 1.000000);\n"
+
+/* ITU-R BT709, limited. Custom values (0.2000 0.7152 0.0722). (full multiplied by (235-16)/255, adding 16/255 to luma) */
+#define RGB_TO_NV12_LIMITED "const mat4 RGBtoYUV = mat4(0.171765, -0.092566, 0.429412, 0.000000,\n" \
+ " 0.614231, -0.331015, -0.383894, 0.000000,\n" \
+ " 0.062007, 0.429412, -0.038754, 0.000000,\n" \
+ " 0.062745, 0.500000, 0.500000, 1.000000);\n"
static const char* color_format_range_get_transform_matrix(gsr_destination_color color_format, gsr_color_range color_range) {
switch(color_format) {
diff --git a/src/main.cpp b/src/main.cpp
index 6f66a7c..6832a9c 100644
--- a/src/main.cpp
+++ b/src/main.cpp
@@ -483,7 +483,7 @@ static bool vaapi_create_codec_context(AVCodecContext *video_codec_context, cons
hw_frame_context->device_ref = device_ctx;
hw_frame_context->device_ctx = (AVHWDeviceContext*)device_ctx->data;
- hw_frame_context->initial_pool_size = 1;
+ //hw_frame_context->initial_pool_size = 1;
if (av_hwframe_ctx_init(frame_context) < 0) {
fprintf(stderr, "Error: Failed to initialize hardware frame context "
@@ -834,9 +834,10 @@ static void usage_full() {
fprintf(stderr, " and the video will only be saved when the gpu-screen-recorder is closed. This feature is similar to Nvidia's instant replay feature.\n");
fprintf(stderr, " This option has be between 5 and 1200. Note that the replay buffer size will not always be precise, because of keyframes. Optional, disabled by default.\n");
fprintf(stderr, "\n");
- fprintf(stderr, " -k Video codec to use. Should be either 'auto', 'h264', 'hevc', 'hevc_hdr', 'av1' or 'av1_hdr'. Defaults to 'auto' which defaults to 'hevc' on AMD/Nvidia and 'h264' on intel.\n");
+ fprintf(stderr, " -k Video codec to use. Should be either 'auto', 'h264', 'hevc', 'av1', 'hevc_hdr' or 'av1_hdr'. Defaults to 'auto' which defaults to 'hevc' on AMD/Nvidia and 'h264' on intel.\n");
fprintf(stderr, " Forcefully set to 'h264' if the file container type is 'flv'.\n");
fprintf(stderr, " Forcefully set to 'hevc' on AMD/intel if video codec is 'h264' and if the file container type is 'mkv'.\n");
+ fprintf(stderr, " 'hevc_hdr' and 'av1_hdr' option is not available on X11.\n");
fprintf(stderr, "\n");
fprintf(stderr, " -ac Audio codec to use. Should be either 'aac', 'opus' or 'flac'. Defaults to 'opus' for .mp4/.mkv files, otherwise defaults to 'aac'.\n");
fprintf(stderr, " 'opus' and 'flac' is only supported by .mp4/.mkv files. 'opus' is recommended for best performance and smallest audio size.\n");
@@ -1845,6 +1846,11 @@ int main(int argc, char **argv) {
if(!wayland)
wayland = is_xwayland(dpy);
+ if(video_codec_is_hdr(video_codec) && !wayland) {
+ fprintf(stderr, "Error: hdr video codec option %s is not available on X11\n", video_codec_to_use);
+ _exit(1);
+ }
+
const bool is_monitor_capture = strcmp(window_str, "focused") != 0 && contains_non_hex_number(window_str);
gsr_egl egl;
if(!gsr_egl_load(&egl, dpy, wayland, is_monitor_capture)) {
diff --git a/study/create_matrix.py b/study/create_matrix.py
index 96bfcd4..1599a12 100755
--- a/study/create_matrix.py
+++ b/study/create_matrix.py
@@ -9,6 +9,12 @@ def usage():
print(" create_matrix.py 0.2126 0.7152 0.0722 limited")
exit(1)
+def a(v):
+ if v >= 0:
+ return " %f" % v
+ else:
+ return "%f" % v
+
def main(argv):
if len(argv) != 5:
usage()
@@ -34,9 +40,9 @@ def main(argv):
]
# Transform from row major to column major for glsl
- print("%f, %f, %f, %f" % (matrix[0][0] * transform_range, matrix[1][0] * transform_range, matrix[2][0] * transform_range, 0.0))
- print("%f, %f, %f, %f" % (matrix[0][1] * transform_range, matrix[1][1] * transform_range, matrix[2][1] * transform_range, 0.0))
- print("%f, %f, %f, %f" % (matrix[0][2] * transform_range, matrix[1][2] * transform_range, matrix[2][2] * transform_range, 0.0))
- print("%f, %f, %f, %f" % (matrix[3][0] + luma_offset, matrix[3][1], matrix[3][2], 1.0))
+ print("const mat4 RGBtoYUV = mat4(%f, %s, %s, %f," % (matrix[0][0] * transform_range, a(matrix[1][0] * transform_range), a(matrix[2][0] * transform_range), 0.0))
+ print(" %f, %s, %s, %f," % (matrix[0][1] * transform_range, a(matrix[1][1] * transform_range), a(matrix[2][1] * transform_range), 0.0))
+ print(" %f, %s, %s, %f," % (matrix[0][2] * transform_range, a(matrix[1][2] * transform_range), a(matrix[2][2] * transform_range), 0.0))
+ print(" %f, %s, %s, %f);" % (matrix[3][0] + luma_offset, a(matrix[3][1]), a(matrix[3][2]), 1.0))
main(sys.argv)