diff options
author | dec05eba <dec05eba@protonmail.com> | 2024-09-27 03:03:09 +0200 |
---|---|---|
committer | dec05eba <dec05eba@protonmail.com> | 2024-09-27 03:03:09 +0200 |
commit | ebc8c69bacb7c8aa7fd1ccebddc281760a11b88e (patch) | |
tree | f1d6b3d0e364a41c405d185a7cd0176453e3bf3c /src | |
parent | 412cf0dbae0705ca91a16ec92dadf129eaa9424e (diff) |
Add vulkan video encoding option (-k h264_vulkan). WIP, not fully hardware accelerated yet
Diffstat (limited to 'src')
-rw-r--r-- | src/codec_query/vulkan.c | 153 | ||||
-rw-r--r-- | src/egl.c | 3 | ||||
-rw-r--r-- | src/encoder/video/cuda.c | 2 | ||||
-rw-r--r-- | src/encoder/video/software.c | 2 | ||||
-rw-r--r-- | src/encoder/video/vaapi.c | 1 | ||||
-rw-r--r-- | src/encoder/video/video.c | 4 | ||||
-rw-r--r-- | src/encoder/video/vulkan.c | 186 | ||||
-rw-r--r-- | src/main.cpp | 103 |
8 files changed, 418 insertions, 36 deletions
diff --git a/src/codec_query/vulkan.c b/src/codec_query/vulkan.c new file mode 100644 index 0000000..c19c4f7 --- /dev/null +++ b/src/codec_query/vulkan.c @@ -0,0 +1,153 @@ +#include "../../include/codec_query/vulkan.h" + +#include <stdio.h> +#include <string.h> +#include <stdlib.h> +#include <xf86drm.h> +#include <vulkan/vulkan.h> + +#define MAX_PHYSICAL_DEVICES 32 + +static const char *required_device_extensions[] = { + "VK_KHR_external_memory_fd", + "VK_KHR_external_semaphore_fd", + "VK_KHR_video_encode_queue", + "VK_KHR_video_queue", + "VK_KHR_video_maintenance1", + "VK_EXT_external_memory_dma_buf", + "VK_EXT_external_memory_host", + "VK_EXT_image_drm_format_modifier" +}; +static int num_required_device_extensions = 8; + +bool gsr_get_supported_video_codecs_vulkan(gsr_supported_video_codecs *video_codecs, const char *card_path, bool cleanup) { + memset(video_codecs, 0, sizeof(*video_codecs)); +#if 0 + bool success = false; + VkInstance instance = NULL; + VkPhysicalDevice physical_devices[MAX_PHYSICAL_DEVICES]; + VkDevice device = NULL; + VkExtensionProperties *device_extensions = NULL; + + const VkApplicationInfo app_info = { + .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO, + .pApplicationName = "GPU Screen Recorder", + .applicationVersion = VK_MAKE_VERSION(1, 0, 0), + .pEngineName = "GPU Screen Recorder", + .engineVersion = VK_MAKE_VERSION(1, 0, 0), + .apiVersion = VK_API_VERSION_1_3, + }; + + const VkInstanceCreateInfo instance_create_info = { + .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, + .pApplicationInfo = &app_info + }; + + if(vkCreateInstance(&instance_create_info, NULL, &instance) != VK_SUCCESS) { + fprintf(stderr, "gsr error: gsr_get_supported_video_codecs_vulkan: vkCreateInstance failed\n"); + goto done; + } + + uint32_t num_devices = 0; + if(vkEnumeratePhysicalDevices(instance, &num_devices, NULL) != VK_SUCCESS) { + fprintf(stderr, "gsr error: gsr_get_supported_video_codecs_vulkan: vkEnumeratePhysicalDevices (query num devices) failed\n"); + goto done; + } + + if(num_devices == 0) { + fprintf(stderr, "gsr error: gsr_get_supported_video_codecs_vulkan: no vulkan capable device found\n"); + goto done; + } + + if(num_devices > MAX_PHYSICAL_DEVICES) + num_devices = MAX_PHYSICAL_DEVICES; + + if(vkEnumeratePhysicalDevices(instance, &num_devices, physical_devices) != VK_SUCCESS) { + fprintf(stderr, "gsr error: gsr_get_supported_video_codecs_vulkan: vkEnumeratePhysicalDevices (get data) failed\n"); + goto done; + } + + VkPhysicalDevice physical_device = NULL; + char device_card_path[128]; + for(uint32_t i = 0; i < num_devices; ++i) { + VkPhysicalDeviceDrmPropertiesEXT device_drm_properties = { + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT + }; + + VkPhysicalDeviceProperties2 device_properties = { + .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, + .pNext = &device_drm_properties + }; + vkGetPhysicalDeviceProperties2(physical_devices[i], &device_properties); + + if(!device_drm_properties.hasPrimary) + continue; + + snprintf(device_card_path, sizeof(device_card_path), DRM_DEV_NAME, DRM_DIR_NAME, (int)device_drm_properties.primaryMinor); + if(strcmp(device_card_path, card_path) == 0) { + physical_device = physical_devices[i]; + break; + } + } + + if(!physical_device) { + fprintf(stderr, "gsr error: gsr_get_supported_video_codecs_vulkan: failed to find a vulkan device that matches opengl device %s\n", card_path); + goto done; + } + + const VkDeviceCreateInfo device_create_info = { + .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, + .enabledExtensionCount = num_required_device_extensions, + .ppEnabledExtensionNames = required_device_extensions + }; + + if(vkCreateDevice(physical_device, &device_create_info, NULL, &device) != VK_SUCCESS) { + //fprintf(stderr, "gsr error: gsr_get_supported_video_codecs_vulkan: vkCreateDevice failed. Device %s likely doesn't support vulkan video encoding\n", card_path); + goto done; + } + + uint32_t num_device_extensions = 0; + if(vkEnumerateDeviceExtensionProperties(physical_device, NULL, &num_device_extensions, NULL) != VK_SUCCESS) { + fprintf(stderr, "gsr error: gsr_get_supported_video_codecs_vulkan: vkEnumerateDeviceExtensionProperties (query num device extensions) failed\n"); + goto done; + } + + device_extensions = calloc(num_device_extensions, sizeof(VkExtensionProperties)); + if(!device_extensions) { + fprintf(stderr, "gsr error: gsr_get_supported_video_codecs_vulkan: failed to allocate %d device extensions\n", num_device_extensions); + goto done; + } + + if(vkEnumerateDeviceExtensionProperties(physical_device, NULL, &num_device_extensions, device_extensions) != VK_SUCCESS) { + fprintf(stderr, "gsr error: gsr_get_supported_video_codecs_vulkan: vkEnumerateDeviceExtensionProperties (get data) failed\n"); + goto done; + } + + for(uint32_t i = 0; i < num_device_extensions; ++i) { + if(strcmp(device_extensions[i].extensionName, "VK_KHR_video_encode_h264") == 0) { + video_codecs->h264 = true; + } else if(strcmp(device_extensions[i].extensionName, "VK_KHR_video_encode_h265") == 0) { + // TODO: Verify if 10bit and hdr are actually supported + video_codecs->hevc = true; + video_codecs->hevc_10bit = true; + video_codecs->hevc_hdr = true; + } + } + + success = true; + + done: + if(cleanup) { + if(device) + vkDestroyDevice(device, NULL); + if(instance) + vkDestroyInstance(instance, NULL); + } + if(device_extensions) + free(device_extensions); + return success; +#else + video_codecs->h264 = true; + return true; +#endif +} @@ -499,6 +499,9 @@ static bool gsr_egl_load_gl(gsr_egl *self, void *library) { { (void**)&self->glUniform2f, "glUniform2f" }, { (void**)&self->glDebugMessageCallback, "glDebugMessageCallback" }, { (void**)&self->glScissor, "glScissor" }, + { (void**)&self->glReadPixels, "glReadPixels" }, + { (void**)&self->glMapBuffer, "glMapBuffer" }, + { (void**)&self->glUnmapBuffer, "glUnmapBuffer" }, { NULL, NULL } }; diff --git a/src/encoder/video/cuda.c b/src/encoder/video/cuda.c index f3cc9a4..6d26cdd 100644 --- a/src/encoder/video/cuda.c +++ b/src/encoder/video/cuda.c @@ -171,7 +171,7 @@ void gsr_video_encoder_cuda_stop(gsr_video_encoder_cuda *self, AVCodecContext *v gsr_cuda_unload(&self->cuda); } -static void gsr_video_encoder_cuda_copy_textures_to_frame(gsr_video_encoder *encoder, AVFrame *frame) { +static void gsr_video_encoder_cuda_copy_textures_to_frame(gsr_video_encoder *encoder, AVFrame *frame, gsr_color_conversion *color_conversion) { gsr_video_encoder_cuda *self = encoder->priv; const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size for(int i = 0; i < 2; ++i) { diff --git a/src/encoder/video/software.c b/src/encoder/video/software.c index cedcc1b..be227f2 100644 --- a/src/encoder/video/software.c +++ b/src/encoder/video/software.c @@ -84,7 +84,7 @@ void gsr_video_encoder_software_stop(gsr_video_encoder_software *self, AVCodecCo self->target_textures[1] = 0; } -static void gsr_video_encoder_software_copy_textures_to_frame(gsr_video_encoder *encoder, AVFrame *frame) { +static void gsr_video_encoder_software_copy_textures_to_frame(gsr_video_encoder *encoder, AVFrame *frame, gsr_color_conversion *color_conversion) { gsr_video_encoder_software *self = encoder->priv; // TODO: hdr support const unsigned int formats[2] = { GL_RED, GL_RG }; diff --git a/src/encoder/video/vaapi.c b/src/encoder/video/vaapi.c index b5ccce9..19bbab8 100644 --- a/src/encoder/video/vaapi.c +++ b/src/encoder/video/vaapi.c @@ -235,7 +235,6 @@ gsr_video_encoder* gsr_video_encoder_vaapi_create(const gsr_video_encoder_vaapi_ *encoder = (gsr_video_encoder) { .start = gsr_video_encoder_vaapi_start, - .copy_textures_to_frame = NULL, .get_textures = gsr_video_encoder_vaapi_get_textures, .destroy = gsr_video_encoder_vaapi_destroy, .priv = encoder_vaapi diff --git a/src/encoder/video/video.c b/src/encoder/video/video.c index 9b0def0..76d53b0 100644 --- a/src/encoder/video/video.c +++ b/src/encoder/video/video.c @@ -9,10 +9,10 @@ bool gsr_video_encoder_start(gsr_video_encoder *encoder, AVCodecContext *video_c return res; } -void gsr_video_encoder_copy_textures_to_frame(gsr_video_encoder *encoder, AVFrame *frame) { +void gsr_video_encoder_copy_textures_to_frame(gsr_video_encoder *encoder, AVFrame *frame, gsr_color_conversion *color_conversion) { assert(encoder->started); if(encoder->copy_textures_to_frame) - encoder->copy_textures_to_frame(encoder, frame); + encoder->copy_textures_to_frame(encoder, frame, color_conversion); } void gsr_video_encoder_get_textures(gsr_video_encoder *encoder, unsigned int *textures, int *num_textures, gsr_destination_color *destination_color) { diff --git a/src/encoder/video/vulkan.c b/src/encoder/video/vulkan.c index 4b0f1ea..cd6b592 100644 --- a/src/encoder/video/vulkan.c +++ b/src/encoder/video/vulkan.c @@ -5,10 +5,28 @@ #include <libavcodec/avcodec.h> #include <libavutil/hwcontext_vulkan.h> +//#include <vulkan/vulkan_core.h> + +#define GL_TEXTURE_TILING_EXT 0x9580 +#define GL_OPTIMAL_TILING_EXT 0x9584 +#define GL_LINEAR_TILING_EXT 0x9585 + +#define GL_PIXEL_PACK_BUFFER 0x88EB +#define GL_PIXEL_UNPACK_BUFFER 0x88EC +#define GL_STREAM_READ 0x88E1 +#define GL_STREAM_DRAW 0x88E0 +#define GL_READ_ONLY 0x88B8 +#define GL_WRITE_ONLY 0x88B9 +#define GL_READ_FRAMEBUFFER 0x8CA8 + typedef struct { gsr_video_encoder_vulkan_params params; unsigned int target_textures[2]; AVBufferRef *device_ctx; + AVVulkanDeviceContext* vv; + unsigned int pbo_y[2]; + unsigned int pbo_uv[2]; + AVFrame *sw_frame; } gsr_video_encoder_vulkan; static bool gsr_video_encoder_vulkan_setup_context(gsr_video_encoder_vulkan *self, AVCodecContext *video_codec_context) { @@ -32,6 +50,7 @@ static bool gsr_video_encoder_vulkan_setup_context(gsr_video_encoder_vulkan *sel hw_frame_context->format = video_codec_context->pix_fmt; hw_frame_context->device_ctx = (AVHWDeviceContext*)self->device_ctx->data; + //AVVulkanFramesContext *vk_frame_ctx = (AVVulkanFramesContext*)hw_frame_context->hwctx; //hw_frame_context->initial_pool_size = 20; if (av_hwframe_ctx_init(frame_context) < 0) { @@ -47,12 +66,92 @@ static bool gsr_video_encoder_vulkan_setup_context(gsr_video_encoder_vulkan *sel return true; } +static unsigned int gl_create_texture(gsr_egl *egl, int width, int height, int internal_format, unsigned int format) { + unsigned int texture_id = 0; + egl->glGenTextures(1, &texture_id); + egl->glBindTexture(GL_TEXTURE_2D, texture_id); + //egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_TILING_EXT, GL_OPTIMAL_TILING_EXT); + egl->glTexImage2D(GL_TEXTURE_2D, 0, internal_format, width, height, 0, format, GL_UNSIGNED_BYTE, NULL); + + egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); + egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + + egl->glBindTexture(GL_TEXTURE_2D, 0); + return texture_id; +} + +static AVVulkanDeviceContext* video_codec_context_get_vulkan_data(AVCodecContext *video_codec_context) { + AVBufferRef *hw_frames_ctx = video_codec_context->hw_frames_ctx; + if(!hw_frames_ctx) + return NULL; + + AVHWFramesContext *hw_frame_context = (AVHWFramesContext*)hw_frames_ctx->data; + AVHWDeviceContext *device_context = (AVHWDeviceContext*)hw_frame_context->device_ctx; + if(device_context->type != AV_HWDEVICE_TYPE_VULKAN) + return NULL; + + return (AVVulkanDeviceContext*)device_context->hwctx; +} + static bool gsr_video_encoder_vulkan_setup_textures(gsr_video_encoder_vulkan *self, AVCodecContext *video_codec_context, AVFrame *frame) { const int res = av_hwframe_get_buffer(video_codec_context->hw_frames_ctx, frame, 0); if(res < 0) { fprintf(stderr, "gsr error: gsr_video_encoder_vulkan_setup_textures: av_hwframe_get_buffer failed: %d\n", res); return false; } + + //AVVkFrame *target_surface_id = (AVVkFrame*)frame->data[0]; + self->vv = video_codec_context_get_vulkan_data(video_codec_context); + + const unsigned int internal_formats_nv12[2] = { GL_RGBA8, GL_RGBA8 }; + const unsigned int internal_formats_p010[2] = { GL_R16, GL_RG16 }; + const unsigned int formats[2] = { GL_RED, GL_RG }; + const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size + + for(int i = 0; i < 2; ++i) { + self->target_textures[i] = gl_create_texture(self->params.egl, video_codec_context->width / div[i], video_codec_context->height / div[i], self->params.color_depth == GSR_COLOR_DEPTH_8_BITS ? internal_formats_nv12[i] : internal_formats_p010[i], formats[i]); + if(self->target_textures[i] == 0) { + fprintf(stderr, "gsr error: gsr_video_encoder_cuda_setup_textures: failed to create opengl texture\n"); + return false; + } + } + + self->params.egl->glGenBuffers(2, self->pbo_y); + + self->params.egl->glBindBuffer(GL_PIXEL_PACK_BUFFER, self->pbo_y[0]); + self->params.egl->glBufferData(GL_PIXEL_PACK_BUFFER, 3840 * 2160, 0, GL_STREAM_READ); + + self->params.egl->glBindBuffer(GL_PIXEL_PACK_BUFFER, self->pbo_y[1]); + self->params.egl->glBufferData(GL_PIXEL_PACK_BUFFER, 3840 * 2160, 0, GL_STREAM_READ); + + self->params.egl->glBindBuffer(GL_PIXEL_PACK_BUFFER, 0); + + self->params.egl->glGenBuffers(2, self->pbo_uv); + + self->params.egl->glBindBuffer(GL_PIXEL_PACK_BUFFER, self->pbo_uv[0]); + self->params.egl->glBufferData(GL_PIXEL_PACK_BUFFER, 1920 * 1080 * 2, 0, GL_STREAM_READ); + + self->params.egl->glBindBuffer(GL_PIXEL_PACK_BUFFER, self->pbo_uv[1]); + self->params.egl->glBufferData(GL_PIXEL_PACK_BUFFER, 1920 * 1080 * 2, 0, GL_STREAM_READ); + + self->params.egl->glBindBuffer(GL_PIXEL_PACK_BUFFER, 0); + + self->sw_frame = av_frame_alloc(); + self->sw_frame->format = AV_PIX_FMT_NV12; + self->sw_frame->width = frame->width; + self->sw_frame->height = frame->height; + + // TODO: Remove + if(av_frame_get_buffer(self->sw_frame, 0) < 0) { + fprintf(stderr, "failed to allocate sw frame\n"); + } + + // TODO: Remove + if(av_frame_make_writable(self->sw_frame) < 0) { + fprintf(stderr, "failed to make writable\n"); + } return true; } @@ -85,6 +184,91 @@ void gsr_video_encoder_vulkan_stop(gsr_video_encoder_vulkan *self, AVCodecContex av_buffer_unref(&self->device_ctx); } +static void nop_free(void *opaque, uint8_t *data) { + +} + +static void gsr_video_encoder_vulkan_copy_textures_to_frame(gsr_video_encoder *encoder, AVFrame *frame, gsr_color_conversion *color_conversion) { + gsr_video_encoder_vulkan *self = encoder->priv; + + static int counter = 0; + ++counter; + + // AVBufferRef *av_buffer_create(uint8_t *data, size_t size, + // void (*free)(void *opaque, uint8_t *data), + // void *opaque, int flags); + + while(self->params.egl->glGetError()){} + self->params.egl->glBindFramebuffer(GL_READ_FRAMEBUFFER, color_conversion->framebuffers[0]); + //fprintf(stderr, "1 gl err: %d\n", self->params.egl->glGetError()); + self->params.egl->glBindBuffer(GL_PIXEL_PACK_BUFFER, self->pbo_y[counter % 2]); + self->params.egl->glBufferData(GL_PIXEL_PACK_BUFFER, 3840 * 2160, 0, GL_STREAM_READ); + self->params.egl->glReadPixels(0, 0, 3840, 2160, GL_RED, GL_UNSIGNED_BYTE, 0); + //fprintf(stderr, "2 gl err: %d\n", self->params.egl->glGetError()); + + const int next_pbo_y = (counter + 1) % 2; + self->params.egl->glBindBuffer(GL_PIXEL_PACK_BUFFER, self->pbo_y[next_pbo_y]); + self->params.egl->glBufferData(GL_PIXEL_PACK_BUFFER, 3840 * 2160, 0, GL_STREAM_READ); + //fprintf(stderr, "3 gl err: %d\n", self->params.egl->glGetError()); + uint8_t *ptr_y = (uint8_t*)self->params.egl->glMapBuffer(GL_PIXEL_PACK_BUFFER, GL_READ_ONLY); + //fprintf(stderr, "4 gl err: %d\n", self->params.egl->glGetError()); + if(!ptr_y) { + fprintf(stderr, "failed to map buffer y!\n"); + } + + while(self->params.egl->glGetError()){} + self->params.egl->glBindFramebuffer(GL_READ_FRAMEBUFFER, color_conversion->framebuffers[1]); + //fprintf(stderr, "5 gl err: %d\n", self->params.egl->glGetError()); + self->params.egl->glBindBuffer(GL_PIXEL_PACK_BUFFER, self->pbo_uv[counter % 2]); + self->params.egl->glBufferData(GL_PIXEL_PACK_BUFFER, 1920 * 1080 * 2, 0, GL_STREAM_READ); + //fprintf(stderr, "5.5 gl err: %d\n", self->params.egl->glGetError()); + self->params.egl->glReadPixels(0, 0, 1920, 1080, GL_RG, GL_UNSIGNED_BYTE, 0); + //fprintf(stderr, "6 gl err: %d\n", self->params.egl->glGetError()); + + const int next_pbo_uv = (counter + 1) % 2; + self->params.egl->glBindBuffer(GL_PIXEL_PACK_BUFFER, self->pbo_uv[next_pbo_uv]); + self->params.egl->glBufferData(GL_PIXEL_PACK_BUFFER, 1920 * 1080 * 2, 0, GL_STREAM_READ); + //fprintf(stderr, "7 gl err: %d\n", self->params.egl->glGetError()); + uint8_t *ptr_uv = (uint8_t*)self->params.egl->glMapBuffer(GL_PIXEL_PACK_BUFFER, GL_READ_ONLY); + //fprintf(stderr, "8 gl err: %d\n", self->params.egl->glGetError()); + if(!ptr_uv) { + fprintf(stderr, "failed to map buffer uv!\n"); + } + + //self->sw_frame->buf[0] = av_buffer_create(ptr_y, 3840 * 2160, nop_free, NULL, 0); + //self->sw_frame->buf[1] = av_buffer_create(ptr_uv, 1920 * 1080 * 2, nop_free, NULL, 0); + //self->sw_frame->data[0] = self->sw_frame->buf[0]->data; + //self->sw_frame->data[1] = self->sw_frame->buf[1]->data; + //self->sw_frame->extended_data[0] = self->sw_frame->data[0]; + //self->sw_frame->extended_data[1] = self->sw_frame->data[1]; + + self->sw_frame->data[0] = ptr_y; + self->sw_frame->data[1] = ptr_uv; + + self->params.egl->glBindBuffer(GL_PIXEL_PACK_BUFFER, 0); + self->params.egl->glBindFramebuffer(GL_READ_FRAMEBUFFER, 0); + + //self->params.egl->glBindTexture(GL_TEXTURE_2D, self->target_textures[1]); + //self->params.egl->glGetTexImage(GL_TEXTURE_2D, 0, GL_RG, GL_UNSIGNED_BYTE, sw_frame->data[1]); + + //self->params.egl->glBindTexture(GL_TEXTURE_2D, 0); + + int ret = av_hwframe_transfer_data(frame, self->sw_frame, 0); + if(ret < 0) { + fprintf(stderr, "transfer data failed, error: %s\n", av_err2str(ret)); + } + + //av_buffer_unref(&self->sw_frame->buf[0]); + //av_buffer_unref(&self->sw_frame->buf[1]); + + //av_frame_free(&sw_frame); + self->params.egl->glBindBuffer(GL_PIXEL_PACK_BUFFER, self->pbo_y[next_pbo_y]); + self->params.egl->glUnmapBuffer(GL_PIXEL_PACK_BUFFER); + self->params.egl->glBindBuffer(GL_PIXEL_PACK_BUFFER, self->pbo_y[next_pbo_uv]); + self->params.egl->glUnmapBuffer(GL_PIXEL_PACK_BUFFER); + self->params.egl->glBindBuffer(GL_PIXEL_PACK_BUFFER, 0); +} + static void gsr_video_encoder_vulkan_get_textures(gsr_video_encoder *encoder, unsigned int *textures, int *num_textures, gsr_destination_color *destination_color) { gsr_video_encoder_vulkan *self = encoder->priv; textures[0] = self->target_textures[0]; @@ -114,7 +298,7 @@ gsr_video_encoder* gsr_video_encoder_vulkan_create(const gsr_video_encoder_vulka *encoder = (gsr_video_encoder) { .start = gsr_video_encoder_vulkan_start, - .copy_textures_to_frame = NULL, + .copy_textures_to_frame = gsr_video_encoder_vulkan_copy_textures_to_frame, .get_textures = gsr_video_encoder_vulkan_get_textures, .destroy = gsr_video_encoder_vulkan_destroy, .priv = encoder_vulkan diff --git a/src/main.cpp b/src/main.cpp index 79f5be8..4b84f78 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -8,9 +8,11 @@ extern "C" { #endif #include "../include/encoder/video/cuda.h" #include "../include/encoder/video/vaapi.h" +#include "../include/encoder/video/vulkan.h" #include "../include/encoder/video/software.h" #include "../include/codec_query/cuda.h" #include "../include/codec_query/vaapi.h" +#include "../include/codec_query/vulkan.h" #include "../include/egl.h" #include "../include/utils.h" #include "../include/damage.h" @@ -102,7 +104,8 @@ enum class VideoCodec { AV1_HDR, AV1_10BIT, VP8, - VP9 + VP9, + H264_VULKAN }; enum class AudioCodec { @@ -977,7 +980,7 @@ static void open_video_hardware(AVCodecContext *codec_context, VideoQuality vide static void usage_header() { const bool inside_flatpak = getenv("FLATPAK_ID") != NULL; const char *program_name = inside_flatpak ? "flatpak run --command=gpu-screen-recorder com.dec05eba.gpu_screen_recorder" : "gpu-screen-recorder"; - fprintf(stderr, "usage: %s -w <window_id|monitor|focused|portal> [-c <container_format>] [-s WxH] -f <fps> [-a <audio_input>] [-q <quality>] [-r <replay_buffer_size_sec>] [-k h264|hevc|av1|vp8|vp9|hevc_hdr|av1_hdr|hevc_10bit|av1_10bit] [-ac aac|opus|flac] [-ab <bitrate>] [-oc yes|no] [-fm cfr|vfr|content] [-bm auto|qp|vbr] [-cr limited|full] [-df yes|no] [-sc <script_path>] [-cursor yes|no] [-keyint <value>] [-restore-portal-session yes|no] [-portal-session-token-filepath filepath] [-encoder gpu|cpu] [-o <output_file>] [-v yes|no] [--version] [-h|--help]\n", program_name); + fprintf(stderr, "usage: %s -w <window_id|monitor|focused|portal> [-c <container_format>] [-s WxH] -f <fps> [-a <audio_input>] [-q <quality>] [-r <replay_buffer_size_sec>] [-k h264|hevc|av1|vp8|vp9|hevc_hdr|av1_hdr|hevc_10bit|av1_10bit|h264_vulkan] [-ac aac|opus|flac] [-ab <bitrate>] [-oc yes|no] [-fm cfr|vfr|content] [-bm auto|qp|vbr] [-cr limited|full] [-df yes|no] [-sc <script_path>] [-cursor yes|no] [-keyint <value>] [-restore-portal-session yes|no] [-portal-session-token-filepath filepath] [-encoder gpu|cpu] [-o <output_file>] [-v yes|no] [--version] [-h|--help]\n", program_name); } // TODO: Update with portal info @@ -1020,8 +1023,8 @@ static void usage_full() { fprintf(stderr, " and the video will only be saved when the gpu-screen-recorder is closed. This feature is similar to Nvidia's instant replay feature.\n"); fprintf(stderr, " This option has be between 5 and 1200. Note that the replay buffer size will not always be precise, because of keyframes. Optional, disabled by default.\n"); fprintf(stderr, "\n"); - fprintf(stderr, " -k Video codec to use. Should be either 'auto', 'h264', 'hevc', 'av1', 'vp8', 'vp9', 'hevc_hdr', 'av1_hdr', 'hevc_10bit' or 'av1_10bit'. Optional, set to 'auto' by default which defaults to 'h264'.\n"); - fprintf(stderr, " Forcefully set to 'h264' if the file container type is 'flv'.\n"); + fprintf(stderr, " -k Video codec to use. Should be either 'auto', 'h264', 'hevc', 'av1', 'vp8', 'vp9', 'hevc_hdr', 'av1_hdr', 'hevc_10bit', 'av1_10bit' or 'h264_vulkan'.\n"); + fprintf(stderr, " Optional, set to 'auto' by default which defaults to 'h264'. Forcefully set to 'h264' if the file container type is 'flv'.\n"); fprintf(stderr, " 'hevc_hdr' and 'av1_hdr' option is not available on X11 nor when using the portal capture option.\n"); fprintf(stderr, " 'hevc_10bit' and 'av1_10bit' options allow you to select 10 bit color depth which can reduce banding and improve quality in darker areas, but not all video players support 10 bit color depth\n"); fprintf(stderr, " and if you upload the video to a website the website might reduce 10 bit to 8 bit.\n"); @@ -1081,7 +1084,7 @@ static void usage_full() { fprintf(stderr, "\n"); fprintf(stderr, " --info\n"); fprintf(stderr, " List info about the system (for use by GPU Screen Recorder UI). Lists the following information (prints them to stdout and exits):\n"); - fprintf(stderr, " Supported video codecs (h264, h264_software, hevc, hevc_hdr, hevc_10bit, av1, av1_hdr, av1_10bit, vp8, vp9, (if supported)).\n"); + fprintf(stderr, " Supported video codecs (h264, h264_software, hevc, hevc_hdr, hevc_10bit, av1, av1_hdr, av1_10bit, vp8, vp9, h264_vulkan (if supported)).\n"); fprintf(stderr, " Supported capture options (window, focused, screen, monitors and portal, if supported by the system).\n"); fprintf(stderr, " If opengl initialization fails then the program exits with 22, if no usable drm device is found then it exits with 23. On success it exits with 0.\n"); fprintf(stderr, "\n"); @@ -1625,7 +1628,7 @@ static int init_filter_graph(AVCodecContext *audio_codec_context, AVFilterGraph return 0; } -static gsr_video_encoder* create_video_encoder(gsr_egl *egl, bool overclock, gsr_color_depth color_depth, bool use_software_video_encoder) { +static gsr_video_encoder* create_video_encoder(gsr_egl *egl, bool overclock, gsr_color_depth color_depth, bool use_software_video_encoder, VideoCodec video_codec) { gsr_video_encoder *video_encoder = nullptr; if(use_software_video_encoder) { @@ -1636,6 +1639,14 @@ static gsr_video_encoder* create_video_encoder(gsr_egl *egl, bool overclock, gsr return video_encoder; } + if(video_codec == VideoCodec::H264_VULKAN) { + gsr_video_encoder_vulkan_params params; + params.egl = egl; + params.color_depth = color_depth; + video_encoder = gsr_video_encoder_vulkan_create(¶ms); + return video_encoder; + } + switch(egl->gpu_info.vendor) { case GSR_GPU_VENDOR_AMD: case GSR_GPU_VENDOR_INTEL: { @@ -1658,7 +1669,7 @@ static gsr_video_encoder* create_video_encoder(gsr_egl *egl, bool overclock, gsr return video_encoder; } -static bool get_supported_video_codecs(gsr_egl *egl, bool use_software_video_encoder, bool cleanup, gsr_supported_video_codecs *video_codecs) { +static bool get_supported_video_codecs(gsr_egl *egl, VideoCodec video_codec, bool use_software_video_encoder, bool cleanup, gsr_supported_video_codecs *video_codecs) { memset(video_codecs, 0, sizeof(*video_codecs)); if(use_software_video_encoder) { @@ -1666,6 +1677,9 @@ static bool get_supported_video_codecs(gsr_egl *egl, bool use_software_video_enc return true; } + if(video_codec == VideoCodec::H264_VULKAN) + return gsr_get_supported_video_codecs_vulkan(video_codecs, egl->card_path, cleanup); + switch(egl->gpu_info.vendor) { case GSR_GPU_VENDOR_AMD: case GSR_GPU_VENDOR_INTEL: @@ -1741,6 +1755,8 @@ static const AVCodec* get_ffmpeg_video_codec(VideoCodec video_codec, gsr_gpu_ven return avcodec_find_encoder_by_name(vendor == GSR_GPU_VENDOR_NVIDIA ? "vp8_nvenc" : "vp8_vaapi"); case VideoCodec::VP9: return avcodec_find_encoder_by_name(vendor == GSR_GPU_VENDOR_NVIDIA ? "vp9_nvenc" : "vp9_vaapi"); + case VideoCodec::H264_VULKAN: + return avcodec_find_encoder_by_name("h264_vulkan"); } return nullptr; } @@ -1773,14 +1789,15 @@ static void set_supported_video_codecs_ffmpeg(gsr_supported_video_codecs *suppor static void list_supported_video_codecs(gsr_egl *egl, bool wayland) { // Dont clean it up on purpose to increase shutdown speed - gsr_video_encoder *video_encoder = create_video_encoder(egl, false, GSR_COLOR_DEPTH_8_BITS, false); - if(!video_encoder) - return; - gsr_supported_video_codecs supported_video_codecs; - get_supported_video_codecs(egl, false, false, &supported_video_codecs); + get_supported_video_codecs(egl, VideoCodec::H264, false, false, &supported_video_codecs); set_supported_video_codecs_ffmpeg(&supported_video_codecs, egl->gpu_info.vendor); + gsr_supported_video_codecs supported_video_codecs_vulkan; + get_supported_video_codecs(egl, VideoCodec::H264_VULKAN, false, false, &supported_video_codecs_vulkan); + if(!get_ffmpeg_video_codec(VideoCodec::H264_VULKAN, egl->gpu_info.vendor)) + memset(&supported_video_codecs_vulkan, 0, sizeof(supported_video_codecs_vulkan)); + if(supported_video_codecs.h264) puts("h264"); if(avcodec_find_encoder_by_name("libx264")) @@ -1801,6 +1818,8 @@ static void list_supported_video_codecs(gsr_egl *egl, bool wayland) { puts("vp8"); if(supported_video_codecs.vp9) puts("vp9"); + if(supported_video_codecs_vulkan.h264) + puts("h264_vulkan"); } static bool monitor_capture_use_drm(gsr_egl *egl, bool wayland) { @@ -2107,11 +2126,14 @@ static gsr_capture* create_capture_impl(std::string &window_str, const char *scr return capture; } -static AVPixelFormat get_pixel_format(gsr_gpu_vendor vendor, bool use_software_video_encoder) { +static AVPixelFormat get_pixel_format(VideoCodec video_codec, gsr_gpu_vendor vendor, bool use_software_video_encoder) { if(use_software_video_encoder) { return AV_PIX_FMT_NV12; } else { - return vendor == GSR_GPU_VENDOR_NVIDIA ? AV_PIX_FMT_CUDA : AV_PIX_FMT_VAAPI; + if(video_codec == VideoCodec::H264_VULKAN) + return AV_PIX_FMT_VULKAN; + else + return vendor == GSR_GPU_VENDOR_NVIDIA ? AV_PIX_FMT_CUDA : AV_PIX_FMT_VAAPI; } } @@ -2228,16 +2250,17 @@ static AudioCodec select_audio_codec_with_fallback(AudioCodec audio_codec, const } static const char* video_codec_to_string(VideoCodec video_codec) { - switch(video_codec) { - case VideoCodec::H264: return "h264"; - case VideoCodec::HEVC: return "hevc"; - case VideoCodec::HEVC_HDR: return "hevc_hdr"; - case VideoCodec::HEVC_10BIT: return "hevc_10bit"; - case VideoCodec::AV1: return "av1"; - case VideoCodec::AV1_HDR: return "av1_hdr"; - case VideoCodec::AV1_10BIT: return "av1_10bit"; - case VideoCodec::VP8: return "vp8"; - case VideoCodec::VP9: return "vp9"; + switch(video_codec) { + case VideoCodec::H264: return "h264"; + case VideoCodec::HEVC: return "hevc"; + case VideoCodec::HEVC_HDR: return "hevc_hdr"; + case VideoCodec::HEVC_10BIT: return "hevc_10bit"; + case VideoCodec::AV1: return "av1"; + case VideoCodec::AV1_HDR: return "av1_hdr"; + case VideoCodec::AV1_10BIT: return "av1_10bit"; + case VideoCodec::VP8: return "vp8"; + case VideoCodec::VP9: return "vp9"; + case VideoCodec::H264_VULKAN: return "h264_vulkan"; } return ""; } @@ -2246,7 +2269,7 @@ static const AVCodec* pick_video_codec(VideoCodec *video_codec, gsr_egl *egl, bo // TODO: software encoder for hevc, av1, vp8 and vp9 gsr_supported_video_codecs supported_video_codecs; - if(!get_supported_video_codecs(egl, use_software_video_encoder, true, &supported_video_codecs)) { + if(!get_supported_video_codecs(egl, *video_codec, use_software_video_encoder, true, &supported_video_codecs)) { fprintf(stderr, "Error: failed to query for supported video codecs\n"); _exit(11); } @@ -2301,6 +2324,11 @@ static const AVCodec* pick_video_codec(VideoCodec *video_codec, gsr_egl *egl, bo video_codec_f = get_ffmpeg_video_codec(*video_codec, egl->gpu_info.vendor); break; } + case VideoCodec::H264_VULKAN: { + if(supported_video_codecs.h264) + video_codec_f = get_ffmpeg_video_codec(*video_codec, egl->gpu_info.vendor); + break; + } } if(!video_codec_auto && !video_codec_f && !is_flv) { @@ -2336,6 +2364,19 @@ static const AVCodec* pick_video_codec(VideoCodec *video_codec, gsr_egl *egl, bo case VideoCodec::VP9: // TODO: Cant fallback to other codec because webm only supports vp8/vp9 break; + case VideoCodec::H264_VULKAN: { + fprintf(stderr, "Warning: selected video codec h264_vulkan is not supported, trying h264 instead\n"); + video_codec_to_use = "h264"; + *video_codec = VideoCodec::H264; + // Need to do a query again because this time it's without vulkan + if(!get_supported_video_codecs(egl, *video_codec, use_software_video_encoder, true, &supported_video_codecs)) { + fprintf(stderr, "Error: failed to query for supported video codecs\n"); + _exit(11); + } + if(supported_video_codecs.h264) + video_codec_f = get_ffmpeg_video_codec(*video_codec, egl->gpu_info.vendor); + break; + } } } @@ -2532,8 +2573,10 @@ int main(int argc, char **argv) { video_codec = VideoCodec::VP8; } else if(strcmp(video_codec_to_use, "vp9") == 0) { video_codec = VideoCodec::VP9; + } else if(strcmp(video_codec_to_use, "h264_vulkan") == 0) { + video_codec = VideoCodec::H264_VULKAN; } else if(strcmp(video_codec_to_use, "auto") != 0) { - fprintf(stderr, "Error: -k should either be either 'auto', 'h264', 'hevc', 'av1', 'vp8', 'vp9', 'hevc_hdr', 'av1_hdr', 'hevc_10bit' or 'av1_10bit', got: '%s'\n", video_codec_to_use); + fprintf(stderr, "Error: -k should either be either 'auto', 'h264', 'hevc', 'av1', 'vp8', 'vp9', 'hevc_hdr', 'av1_hdr', 'hevc_10bit', 'av1_10bit' or 'h264_vulkan', got: '%s'\n", video_codec_to_use); usage(); } @@ -3010,7 +3053,8 @@ int main(int argc, char **argv) { const bool hdr = video_codec_is_hdr(video_codec); const bool low_latency_recording = is_livestream || is_output_piped; - AVCodecContext *video_codec_context = create_video_codec_context(get_pixel_format(egl.gpu_info.vendor, use_software_video_encoder), quality, fps, video_codec_f, low_latency_recording, egl.gpu_info.vendor, framerate_mode, hdr, color_range, keyint, use_software_video_encoder, bitrate_mode); + const enum AVPixelFormat video_pix_fmt = get_pixel_format(video_codec, egl.gpu_info.vendor, use_software_video_encoder); + AVCodecContext *video_codec_context = create_video_codec_context(video_pix_fmt, quality, fps, video_codec_f, low_latency_recording, egl.gpu_info.vendor, framerate_mode, hdr, color_range, keyint, use_software_video_encoder, bitrate_mode); if(replay_buffer_size_secs == -1) video_stream = create_stream(av_format_context, video_codec_context); @@ -3034,7 +3078,7 @@ int main(int argc, char **argv) { _exit(capture_result); } - gsr_video_encoder *video_encoder = create_video_encoder(&egl, overclock, color_depth, use_software_video_encoder); + gsr_video_encoder *video_encoder = create_video_encoder(&egl, overclock, color_depth, use_software_video_encoder, video_codec); if(!video_encoder) { fprintf(stderr, "Error: failed to create video encoder\n"); _exit(1); @@ -3479,8 +3523,7 @@ int main(int argc, char **argv) { egl.glClear(0); gsr_capture_capture(capture, video_frame, &color_conversion); gsr_egl_swap_buffers(&egl); - - gsr_video_encoder_copy_textures_to_frame(video_encoder, video_frame); + gsr_video_encoder_copy_textures_to_frame(video_encoder, video_frame, &color_conversion); if(hdr && !hdr_metadata_set && replay_buffer_size_secs == -1 && add_hdr_metadata_to_video_stream(capture, video_stream)) hdr_metadata_set = true; |