aboutsummaryrefslogtreecommitdiff
path: root/src/encoder/video
diff options
context:
space:
mode:
authordec05eba <dec05eba@protonmail.com>2024-07-05 01:59:04 +0200
committerdec05eba <dec05eba@protonmail.com>2024-07-05 10:33:47 +0200
commitb9fa7f2fa25ee37c87077bce468c95e48fce5c18 (patch)
tree3aff293110d8405e8057edb9c757f6e630c22de2 /src/encoder/video
parent62d61fda12e3774fee6b671e44fb89bd2ef8da8c (diff)
Separate video encoding method from capture method
With this instead of kms_cuda/kms_vaapi/kms_software and xcomposite_cuda/xcomposite_vaapi/xcomposite_software there is now only kms and xcomposite.
Diffstat (limited to 'src/encoder/video')
-rw-r--r--src/encoder/video/cuda.c236
-rw-r--r--src/encoder/video/software.c127
-rw-r--r--src/encoder/video/vaapi.c221
-rw-r--r--src/encoder/video/video.c26
4 files changed, 610 insertions, 0 deletions
diff --git a/src/encoder/video/cuda.c b/src/encoder/video/cuda.c
new file mode 100644
index 0000000..2568bc7
--- /dev/null
+++ b/src/encoder/video/cuda.c
@@ -0,0 +1,236 @@
+#include "../../../include/encoder/video/cuda.h"
+#include "../../../include/egl.h"
+#include "../../../include/cuda.h"
+
+#include <libavcodec/avcodec.h>
+#include <libavutil/hwcontext_cuda.h>
+
+#include <stdlib.h>
+
+typedef struct {
+ gsr_video_encoder_cuda_params params;
+
+ unsigned int target_textures[2];
+
+ gsr_cuda cuda;
+ CUgraphicsResource cuda_graphics_resources[2];
+ CUarray mapped_arrays[2];
+ CUstream cuda_stream;
+} gsr_video_encoder_cuda;
+
+static bool gsr_video_encoder_cuda_setup_context(gsr_video_encoder_cuda *self, AVCodecContext *video_codec_context) {
+ AVBufferRef *device_ctx = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_CUDA);
+ if(!device_ctx) {
+ fprintf(stderr, "gsr error: gsr_video_encoder_cuda_setup_context failed: failed to create hardware device context\n");
+ return false;
+ }
+
+ AVHWDeviceContext *hw_device_context = (AVHWDeviceContext*)device_ctx->data;
+ AVCUDADeviceContext *cuda_device_context = (AVCUDADeviceContext*)hw_device_context->hwctx;
+ cuda_device_context->cuda_ctx = self->cuda.cu_ctx;
+ if(av_hwdevice_ctx_init(device_ctx) < 0) {
+ fprintf(stderr, "gsr error: gsr_video_encoder_cuda_setup_context failed: failed to create hardware device context\n");
+ av_buffer_unref(&device_ctx);
+ return false;
+ }
+
+ AVBufferRef *frame_context = av_hwframe_ctx_alloc(device_ctx);
+ if(!frame_context) {
+ fprintf(stderr, "gsr error: gsr_video_encoder_cuda_setup_context failed: failed to create hwframe context\n");
+ av_buffer_unref(&device_ctx);
+ return false;
+ }
+
+ AVHWFramesContext *hw_frame_context = (AVHWFramesContext*)frame_context->data;
+ hw_frame_context->width = video_codec_context->width;
+ hw_frame_context->height = video_codec_context->height;
+ hw_frame_context->sw_format = self->params.hdr ? AV_PIX_FMT_P010LE : AV_PIX_FMT_NV12;
+ hw_frame_context->format = video_codec_context->pix_fmt;
+ hw_frame_context->device_ref = device_ctx;
+ hw_frame_context->device_ctx = (AVHWDeviceContext*)device_ctx->data;
+
+ if (av_hwframe_ctx_init(frame_context) < 0) {
+ fprintf(stderr, "gsr error: gsr_video_encoder_cuda_setup_context failed: failed to initialize hardware frame context "
+ "(note: ffmpeg version needs to be > 4.0)\n");
+ av_buffer_unref(&device_ctx);
+ //av_buffer_unref(&frame_context);
+ return false;
+ }
+
+ self->cuda_stream = cuda_device_context->stream;
+ video_codec_context->hw_device_ctx = av_buffer_ref(device_ctx);
+ video_codec_context->hw_frames_ctx = av_buffer_ref(frame_context);
+ return true;
+}
+
+static unsigned int gl_create_texture(gsr_egl *egl, int width, int height, int internal_format, unsigned int format) {
+ unsigned int texture_id = 0;
+ egl->glGenTextures(1, &texture_id);
+ egl->glBindTexture(GL_TEXTURE_2D, texture_id);
+ egl->glTexImage2D(GL_TEXTURE_2D, 0, internal_format, width, height, 0, format, GL_UNSIGNED_BYTE, NULL);
+
+ egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
+ egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
+
+ egl->glBindTexture(GL_TEXTURE_2D, 0);
+ return texture_id;
+}
+
+static bool cuda_register_opengl_texture(gsr_cuda *cuda, CUgraphicsResource *cuda_graphics_resource, CUarray *mapped_array, unsigned int texture_id) {
+ CUresult res;
+ res = cuda->cuGraphicsGLRegisterImage(cuda_graphics_resource, texture_id, GL_TEXTURE_2D, CU_GRAPHICS_REGISTER_FLAGS_NONE);
+ if (res != CUDA_SUCCESS) {
+ const char *err_str = "unknown";
+ cuda->cuGetErrorString(res, &err_str);
+ fprintf(stderr, "gsr error: cuda_register_opengl_texture: cuGraphicsGLRegisterImage failed, error: %s, texture " "id: %u\n", err_str, texture_id);
+ return false;
+ }
+
+ res = cuda->cuGraphicsResourceSetMapFlags(*cuda_graphics_resource, CU_GRAPHICS_MAP_RESOURCE_FLAGS_NONE);
+ res = cuda->cuGraphicsMapResources(1, cuda_graphics_resource, 0);
+
+ res = cuda->cuGraphicsSubResourceGetMappedArray(mapped_array, *cuda_graphics_resource, 0, 0);
+ return true;
+}
+
+static bool gsr_video_encoder_cuda_setup_textures(gsr_video_encoder_cuda *self, AVCodecContext *video_codec_context, AVFrame *frame) {
+ const int res = av_hwframe_get_buffer(video_codec_context->hw_frames_ctx, frame, 0);
+ if(res < 0) {
+ fprintf(stderr, "gsr error: gsr_video_encoder_cuda_setup_textures: av_hwframe_get_buffer failed: %d\n", res);
+ return false;
+ }
+
+ const unsigned int internal_formats_nv12[2] = { GL_R8, GL_RG8 };
+ const unsigned int internal_formats_p010[2] = { GL_R16, GL_RG16 };
+ const unsigned int formats[2] = { GL_RED, GL_RG };
+ const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size
+
+ for(int i = 0; i < 2; ++i) {
+ self->target_textures[i] = gl_create_texture(self->params.egl, video_codec_context->width / div[i], video_codec_context->height / div[i], !self->params.hdr ? internal_formats_nv12[i] : internal_formats_p010[i], formats[i]);
+ if(self->target_textures[i] == 0) {
+ fprintf(stderr, "gsr error: gsr_video_encoder_cuda_setup_textures: failed to create opengl texture\n");
+ return false;
+ }
+
+ if(!cuda_register_opengl_texture(&self->cuda, &self->cuda_graphics_resources[i], &self->mapped_arrays[i], self->target_textures[i])) {
+ return false;
+ }
+ }
+
+ return true;
+}
+
+static void gsr_video_encoder_cuda_stop(gsr_video_encoder_cuda *self, AVCodecContext *video_codec_context);
+
+static bool gsr_video_encoder_cuda_start(gsr_video_encoder *encoder, AVCodecContext *video_codec_context, AVFrame *frame) {
+ gsr_video_encoder_cuda *encoder_cuda = encoder->priv;
+
+ // TODO: Force set overclock to false if wayland
+ if(!gsr_cuda_load(&encoder_cuda->cuda, encoder_cuda->params.egl->x11.dpy, encoder_cuda->params.overclock)) {
+ fprintf(stderr, "gsr error: gsr_video_encoder_cuda_start: failed to load cuda\n");
+ gsr_video_encoder_cuda_stop(encoder_cuda, video_codec_context);
+ return false;
+ }
+
+ if(!gsr_video_encoder_cuda_setup_context(encoder_cuda, video_codec_context)) {
+ gsr_video_encoder_cuda_stop(encoder_cuda, video_codec_context);
+ return false;
+ }
+
+ if(!gsr_video_encoder_cuda_setup_textures(encoder_cuda, video_codec_context, frame)) {
+ gsr_video_encoder_cuda_stop(encoder_cuda, video_codec_context);
+ return false;
+ }
+
+ return true;
+}
+
+void gsr_video_encoder_cuda_stop(gsr_video_encoder_cuda *self, AVCodecContext *video_codec_context) {
+ self->params.egl->glDeleteTextures(2, self->target_textures);
+ self->target_textures[0] = 0;
+ self->target_textures[1] = 0;
+
+ if(video_codec_context->hw_device_ctx)
+ av_buffer_unref(&video_codec_context->hw_device_ctx);
+ if(video_codec_context->hw_frames_ctx)
+ av_buffer_unref(&video_codec_context->hw_frames_ctx);
+
+ if(self->cuda.cu_ctx) {
+ for(int i = 0; i < 2; ++i) {
+ if(self->cuda_graphics_resources[i]) {
+ self->cuda.cuGraphicsUnmapResources(1, &self->cuda_graphics_resources[i], 0);
+ self->cuda.cuGraphicsUnregisterResource(self->cuda_graphics_resources[i]);
+ self->cuda_graphics_resources[i] = 0;
+ }
+ }
+ }
+
+ gsr_cuda_unload(&self->cuda);
+}
+
+static void gsr_video_encoder_cuda_copy_textures_to_frame(gsr_video_encoder *encoder, AVFrame *frame) {
+ gsr_video_encoder_cuda *encoder_cuda = encoder->priv;
+ const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size
+ for(int i = 0; i < 2; ++i) {
+ CUDA_MEMCPY2D memcpy_struct;
+ memcpy_struct.srcXInBytes = 0;
+ memcpy_struct.srcY = 0;
+ memcpy_struct.srcMemoryType = CU_MEMORYTYPE_ARRAY;
+
+ memcpy_struct.dstXInBytes = 0;
+ memcpy_struct.dstY = 0;
+ memcpy_struct.dstMemoryType = CU_MEMORYTYPE_DEVICE;
+
+ memcpy_struct.srcArray = encoder_cuda->mapped_arrays[i];
+ memcpy_struct.srcPitch = frame->width / div[i];
+ memcpy_struct.dstDevice = (CUdeviceptr)frame->data[i];
+ memcpy_struct.dstPitch = frame->linesize[i];
+ memcpy_struct.WidthInBytes = frame->width * (encoder_cuda->params.hdr ? 2 : 1);
+ memcpy_struct.Height = frame->height / div[i];
+ // TODO: Remove this copy if possible
+ encoder_cuda->cuda.cuMemcpy2DAsync_v2(&memcpy_struct, encoder_cuda->cuda_stream);
+ }
+
+ // TODO: needed?
+ encoder_cuda->cuda.cuStreamSynchronize(encoder_cuda->cuda_stream);
+}
+
+static void gsr_video_encoder_cuda_get_textures(gsr_video_encoder *encoder, unsigned int *textures, int *num_textures, gsr_destination_color *destination_color) {
+ gsr_video_encoder_cuda *encoder_cuda = encoder->priv;
+ textures[0] = encoder_cuda->target_textures[0];
+ textures[1] = encoder_cuda->target_textures[1];
+ *num_textures = 2;
+ *destination_color = encoder_cuda->params.hdr ? GSR_DESTINATION_COLOR_P010 : GSR_DESTINATION_COLOR_NV12;
+}
+
+static void gsr_video_encoder_cuda_destroy(gsr_video_encoder *encoder, AVCodecContext *video_codec_context) {
+ gsr_video_encoder_cuda_stop(encoder->priv, video_codec_context);
+ free(encoder->priv);
+ free(encoder);
+}
+
+gsr_video_encoder* gsr_video_encoder_cuda_create(const gsr_video_encoder_cuda_params *params) {
+ gsr_video_encoder *encoder = calloc(1, sizeof(gsr_video_encoder));
+ if(!encoder)
+ return NULL;
+
+ gsr_video_encoder_cuda *encoder_cuda = calloc(1, sizeof(gsr_video_encoder_cuda));
+ if(!encoder_cuda) {
+ free(encoder);
+ return NULL;
+ }
+
+ encoder_cuda->params = *params;
+
+ *encoder = (gsr_video_encoder) {
+ .start = gsr_video_encoder_cuda_start,
+ .copy_textures_to_frame = gsr_video_encoder_cuda_copy_textures_to_frame,
+ .get_textures = gsr_video_encoder_cuda_get_textures,
+ .destroy = gsr_video_encoder_cuda_destroy,
+ .priv = encoder_cuda
+ };
+
+ return encoder;
+}
diff --git a/src/encoder/video/software.c b/src/encoder/video/software.c
new file mode 100644
index 0000000..c5fde4d
--- /dev/null
+++ b/src/encoder/video/software.c
@@ -0,0 +1,127 @@
+#include "../../../include/encoder/video/software.h"
+#include "../../../include/egl.h"
+
+#include <libavcodec/avcodec.h>
+#include <libavutil/frame.h>
+
+#include <stdlib.h>
+
+typedef struct {
+ gsr_video_encoder_software_params params;
+
+ unsigned int target_textures[2];
+} gsr_video_encoder_software;
+
+static unsigned int gl_create_texture(gsr_egl *egl, int width, int height, int internal_format, unsigned int format) {
+ unsigned int texture_id = 0;
+ egl->glGenTextures(1, &texture_id);
+ egl->glBindTexture(GL_TEXTURE_2D, texture_id);
+ egl->glTexImage2D(GL_TEXTURE_2D, 0, internal_format, width, height, 0, format, GL_UNSIGNED_BYTE, NULL);
+
+ egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
+ egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
+
+ egl->glBindTexture(GL_TEXTURE_2D, 0);
+ return texture_id;
+}
+
+static bool gsr_video_encoder_software_setup_textures(gsr_video_encoder_software *self, AVCodecContext *video_codec_context, AVFrame *frame) {
+ int res = av_frame_get_buffer(frame, 1); // TODO: Align?
+ if(res < 0) {
+ fprintf(stderr, "gsr error: gsr_video_encoder_software_setup_textures: av_frame_get_buffer failed: %d\n", res);
+ return false;
+ }
+
+ res = av_frame_make_writable(frame);
+ if(res < 0) {
+ fprintf(stderr, "gsr error: gsr_video_encoder_software_setup_textures: av_frame_make_writable failed: %d\n", res);
+ return false;
+ }
+
+ const unsigned int internal_formats_nv12[2] = { GL_R8, GL_RG8 };
+ const unsigned int internal_formats_p010[2] = { GL_R16, GL_RG16 };
+ const unsigned int formats[2] = { GL_RED, GL_RG };
+ const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size
+
+ for(int i = 0; i < 2; ++i) {
+ self->target_textures[i] = gl_create_texture(self->params.egl, video_codec_context->width / div[i], video_codec_context->height / div[i], !self->params.hdr ? internal_formats_nv12[i] : internal_formats_p010[i], formats[i]);
+ if(self->target_textures[i] == 0) {
+ fprintf(stderr, "gsr error: gsr_capture_kms_setup_cuda_textures: failed to create opengl texture\n");
+ return false;
+ }
+ }
+
+ return true;
+}
+
+static void gsr_video_encoder_software_stop(gsr_video_encoder_software *self, AVCodecContext *video_codec_context);
+
+static bool gsr_video_encoder_software_start(gsr_video_encoder *encoder, AVCodecContext *video_codec_context, AVFrame *frame) {
+ gsr_video_encoder_software *encoder_software = encoder->priv;
+
+ if(!gsr_video_encoder_software_setup_textures(encoder_software, video_codec_context, frame)) {
+ gsr_video_encoder_software_stop(encoder_software, video_codec_context);
+ return false;
+ }
+
+ return true;
+}
+
+void gsr_video_encoder_software_stop(gsr_video_encoder_software *self, AVCodecContext *video_codec_context) {
+ (void)video_codec_context;
+ self->params.egl->glDeleteTextures(2, self->target_textures);
+ self->target_textures[0] = 0;
+ self->target_textures[1] = 0;
+}
+
+static void gsr_video_encoder_software_copy_textures_to_frame(gsr_video_encoder *encoder, AVFrame *frame) {
+ gsr_video_encoder_software *encoder_software = encoder->priv;
+ // TODO: hdr support
+ const unsigned int formats[2] = { GL_RED, GL_RG };
+ for(int i = 0; i < 2; ++i) {
+ encoder_software->params.egl->glBindTexture(GL_TEXTURE_2D, encoder_software->target_textures[i]);
+ encoder_software->params.egl->glGetTexImage(GL_TEXTURE_2D, 0, formats[i], GL_UNSIGNED_BYTE, frame->data[i]);
+ }
+ encoder_software->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
+ // cap_kms->kms.base.egl->eglSwapBuffers(cap_kms->kms.base.egl->egl_display, cap_kms->kms.base.egl->egl_surface);
+}
+
+static void gsr_video_encoder_software_get_textures(gsr_video_encoder *encoder, unsigned int *textures, int *num_textures, gsr_destination_color *destination_color) {
+ gsr_video_encoder_software *encoder_software = encoder->priv;
+ textures[0] = encoder_software->target_textures[0];
+ textures[1] = encoder_software->target_textures[1];
+ *num_textures = 2;
+ *destination_color = encoder_software->params.hdr ? GSR_DESTINATION_COLOR_P010 : GSR_DESTINATION_COLOR_NV12;
+}
+
+static void gsr_video_encoder_software_destroy(gsr_video_encoder *encoder, AVCodecContext *video_codec_context) {
+ gsr_video_encoder_software_stop(encoder->priv, video_codec_context);
+ free(encoder->priv);
+ free(encoder);
+}
+
+gsr_video_encoder* gsr_video_encoder_software_create(const gsr_video_encoder_software_params *params) {
+ gsr_video_encoder *encoder = calloc(1, sizeof(gsr_video_encoder));
+ if(!encoder)
+ return NULL;
+
+ gsr_video_encoder_software *encoder_software = calloc(1, sizeof(gsr_video_encoder_software));
+ if(!encoder_software) {
+ free(encoder);
+ return NULL;
+ }
+
+ encoder_software->params = *params;
+
+ *encoder = (gsr_video_encoder) {
+ .start = gsr_video_encoder_software_start,
+ .copy_textures_to_frame = gsr_video_encoder_software_copy_textures_to_frame,
+ .get_textures = gsr_video_encoder_software_get_textures,
+ .destroy = gsr_video_encoder_software_destroy,
+ .priv = encoder_software
+ };
+
+ return encoder;
+}
diff --git a/src/encoder/video/vaapi.c b/src/encoder/video/vaapi.c
new file mode 100644
index 0000000..318ab1a
--- /dev/null
+++ b/src/encoder/video/vaapi.c
@@ -0,0 +1,221 @@
+#include "../../../include/encoder/video/vaapi.h"
+#include "../../../include/utils.h"
+#include "../../../include/egl.h"
+
+#include <libavcodec/avcodec.h>
+#include <libavutil/hwcontext_vaapi.h>
+
+#include <va/va_drmcommon.h>
+
+#include <stdlib.h>
+#include <unistd.h>
+
+typedef struct {
+ gsr_video_encoder_vaapi_params params;
+
+ unsigned int target_textures[2];
+
+ VADisplay va_dpy;
+ VADRMPRIMESurfaceDescriptor prime;
+} gsr_video_encoder_vaapi;
+
+static bool gsr_video_encoder_vaapi_setup_context(gsr_video_encoder_vaapi *self, AVCodecContext *video_codec_context) {
+ char render_path[128];
+ if(!gsr_card_path_get_render_path(self->params.egl->card_path, render_path)) {
+ fprintf(stderr, "gsr error: gsr_video_encoder_vaapi_setup_context: failed to get /dev/dri/renderDXXX file from %s\n", self->params.egl->card_path);
+ return false;
+ }
+
+ AVBufferRef *device_ctx;
+ if(av_hwdevice_ctx_create(&device_ctx, AV_HWDEVICE_TYPE_VAAPI, render_path, NULL, 0) < 0) {
+ fprintf(stderr, "gsr error: gsr_video_encoder_vaapi_setup_context: failed to create hardware device context\n");
+ return false;
+ }
+
+ AVBufferRef *frame_context = av_hwframe_ctx_alloc(device_ctx);
+ if(!frame_context) {
+ fprintf(stderr, "gsr error: gsr_video_encoder_vaapi_setup_context: failed to create hwframe context\n");
+ av_buffer_unref(&device_ctx);
+ return false;
+ }
+
+ AVHWFramesContext *hw_frame_context =
+ (AVHWFramesContext *)frame_context->data;
+ hw_frame_context->width = video_codec_context->width;
+ hw_frame_context->height = video_codec_context->height;
+ hw_frame_context->sw_format = self->params.hdr ? AV_PIX_FMT_P010LE : AV_PIX_FMT_NV12;
+ hw_frame_context->format = video_codec_context->pix_fmt;
+ hw_frame_context->device_ref = device_ctx;
+ hw_frame_context->device_ctx = (AVHWDeviceContext*)device_ctx->data;
+
+ //hw_frame_context->initial_pool_size = 20;
+
+ AVVAAPIDeviceContext *vactx =((AVHWDeviceContext*)device_ctx->data)->hwctx;
+ self->va_dpy = vactx->display;
+
+ if (av_hwframe_ctx_init(frame_context) < 0) {
+ fprintf(stderr, "gsr error: gsr_video_encoder_vaapi_setup_context: failed to initialize hardware frame context "
+ "(note: ffmpeg version needs to be > 4.0)\n");
+ av_buffer_unref(&device_ctx);
+ //av_buffer_unref(&frame_context);
+ return false;
+ }
+
+ video_codec_context->hw_device_ctx = av_buffer_ref(device_ctx);
+ video_codec_context->hw_frames_ctx = av_buffer_ref(frame_context);
+ return true;
+}
+
+static uint32_t fourcc(uint32_t a, uint32_t b, uint32_t c, uint32_t d) {
+ return (d << 24) | (c << 16) | (b << 8) | a;
+}
+
+static bool gsr_video_encoder_vaapi_setup_textures(gsr_video_encoder_vaapi *self, AVCodecContext *video_codec_context, AVFrame *frame) {
+ const int res = av_hwframe_get_buffer(video_codec_context->hw_frames_ctx, frame, 0);
+ if(res < 0) {
+ fprintf(stderr, "gsr error: gsr_video_encoder_vaapi_setup_textures: av_hwframe_get_buffer failed: %d\n", res);
+ return false;
+ }
+
+ VASurfaceID target_surface_id = (uintptr_t)frame->data[3];
+
+ VAStatus va_status = vaExportSurfaceHandle(self->va_dpy, target_surface_id, VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2, VA_EXPORT_SURFACE_WRITE_ONLY | VA_EXPORT_SURFACE_SEPARATE_LAYERS, &self->prime);
+ if(va_status != VA_STATUS_SUCCESS) {
+ fprintf(stderr, "gsr error: gsr_video_encoder_vaapi_setup_textures: vaExportSurfaceHandle failed, error: %d\n", va_status);
+ return false;
+ }
+ vaSyncSurface(self->va_dpy, target_surface_id);
+
+ const uint32_t formats_nv12[2] = { fourcc('R', '8', ' ', ' '), fourcc('G', 'R', '8', '8') };
+ const uint32_t formats_p010[2] = { fourcc('R', '1', '6', ' '), fourcc('G', 'R', '3', '2') };
+
+ if(self->prime.fourcc == VA_FOURCC_NV12 || self->prime.fourcc == VA_FOURCC_P010) {
+ const uint32_t *formats = self->prime.fourcc == VA_FOURCC_NV12 ? formats_nv12 : formats_p010;
+ const int div[2] = {1, 2}; // divide UV texture size by 2 because chroma is half size
+
+ self->params.egl->glGenTextures(2, self->target_textures);
+ for(int i = 0; i < 2; ++i) {
+ const int layer = i;
+ const int plane = 0;
+
+ const uint64_t modifier = self->prime.objects[self->prime.layers[layer].object_index[plane]].drm_format_modifier;
+ const intptr_t img_attr[] = {
+ EGL_LINUX_DRM_FOURCC_EXT, formats[i],
+ EGL_WIDTH, self->prime.width / div[i],
+ EGL_HEIGHT, self->prime.height / div[i],
+ EGL_DMA_BUF_PLANE0_FD_EXT, self->prime.objects[self->prime.layers[layer].object_index[plane]].fd,
+ EGL_DMA_BUF_PLANE0_OFFSET_EXT, self->prime.layers[layer].offset[plane],
+ EGL_DMA_BUF_PLANE0_PITCH_EXT, self->prime.layers[layer].pitch[plane],
+ EGL_DMA_BUF_PLANE0_MODIFIER_LO_EXT, modifier & 0xFFFFFFFFULL,
+ EGL_DMA_BUF_PLANE0_MODIFIER_HI_EXT, modifier >> 32ULL,
+ EGL_NONE
+ };
+
+ while(self->params.egl->eglGetError() != EGL_SUCCESS){}
+ EGLImage image = self->params.egl->eglCreateImage(self->params.egl->egl_display, 0, EGL_LINUX_DMA_BUF_EXT, NULL, img_attr);
+ if(!image) {
+ fprintf(stderr, "gsr error: gsr_video_encoder_vaapi_setup_textures: failed to create egl image from drm fd for output drm fd, error: %d\n", self->params.egl->eglGetError());
+ return false;
+ }
+
+ self->params.egl->glBindTexture(GL_TEXTURE_2D, self->target_textures[i]);
+ self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ self->params.egl->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+
+ while(self->params.egl->glGetError()) {}
+ while(self->params.egl->eglGetError() != EGL_SUCCESS){}
+ self->params.egl->glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, image);
+ if(self->params.egl->glGetError() != 0 || self->params.egl->eglGetError() != EGL_SUCCESS) {
+ // TODO: Get the error properly
+ fprintf(stderr, "gsr error: gsr_video_encoder_vaapi_setup_textures: failed to bind egl image to gl texture, error: %d\n", self->params.egl->eglGetError());
+ self->params.egl->eglDestroyImage(self->params.egl->egl_display, image);
+ self->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
+ return false;
+ }
+
+ self->params.egl->eglDestroyImage(self->params.egl->egl_display, image);
+ self->params.egl->glBindTexture(GL_TEXTURE_2D, 0);
+ }
+
+ return true;
+ } else {
+ fprintf(stderr, "gsr error: gsr_video_encoder_vaapi_setup_textures: unexpected fourcc %u for output drm fd, expected nv12 or p010\n", self->prime.fourcc);
+ return false;
+ }
+}
+
+static void gsr_video_encoder_vaapi_stop(gsr_video_encoder_vaapi *self, AVCodecContext *video_codec_context);
+
+static bool gsr_video_encoder_vaapi_start(gsr_video_encoder *encoder, AVCodecContext *video_codec_context, AVFrame *frame) {
+ gsr_video_encoder_vaapi *encoder_vaapi = encoder->priv;
+
+ if(!gsr_video_encoder_vaapi_setup_context(encoder_vaapi, video_codec_context)) {
+ gsr_video_encoder_vaapi_stop(encoder_vaapi, video_codec_context);
+ return false;
+ }
+
+ if(!gsr_video_encoder_vaapi_setup_textures(encoder_vaapi, video_codec_context, frame)) {
+ gsr_video_encoder_vaapi_stop(encoder_vaapi, video_codec_context);
+ return false;
+ }
+
+ return true;
+}
+
+void gsr_video_encoder_vaapi_stop(gsr_video_encoder_vaapi *self, AVCodecContext *video_codec_context) {
+ self->params.egl->glDeleteTextures(2, self->target_textures);
+ self->target_textures[0] = 0;
+ self->target_textures[1] = 0;
+
+ if(video_codec_context->hw_device_ctx)
+ av_buffer_unref(&video_codec_context->hw_device_ctx);
+ if(video_codec_context->hw_frames_ctx)
+ av_buffer_unref(&video_codec_context->hw_frames_ctx);
+
+ for(uint32_t i = 0; i < self->prime.num_objects; ++i) {
+ if(self->prime.objects[i].fd > 0) {
+ close(self->prime.objects[i].fd);
+ self->prime.objects[i].fd = 0;
+ }
+ }
+}
+
+static void gsr_video_encoder_vaapi_get_textures(gsr_video_encoder *encoder, unsigned int *textures, int *num_textures, gsr_destination_color *destination_color) {
+ gsr_video_encoder_vaapi *encoder_vaapi = encoder->priv;
+ textures[0] = encoder_vaapi->target_textures[0];
+ textures[1] = encoder_vaapi->target_textures[1];
+ *num_textures = 2;
+ *destination_color = encoder_vaapi->params.hdr ? GSR_DESTINATION_COLOR_P010 : GSR_DESTINATION_COLOR_NV12;
+}
+
+static void gsr_video_encoder_vaapi_destroy(gsr_video_encoder *encoder, AVCodecContext *video_codec_context) {
+ gsr_video_encoder_vaapi_stop(encoder->priv, video_codec_context);
+ free(encoder->priv);
+ free(encoder);
+}
+
+gsr_video_encoder* gsr_video_encoder_vaapi_create(const gsr_video_encoder_vaapi_params *params) {
+ gsr_video_encoder *encoder = calloc(1, sizeof(gsr_video_encoder));
+ if(!encoder)
+ return NULL;
+
+ gsr_video_encoder_vaapi *encoder_vaapi = calloc(1, sizeof(gsr_video_encoder_vaapi));
+ if(!encoder_vaapi) {
+ free(encoder);
+ return NULL;
+ }
+
+ encoder_vaapi->params = *params;
+
+ *encoder = (gsr_video_encoder) {
+ .start = gsr_video_encoder_vaapi_start,
+ .copy_textures_to_frame = NULL,
+ .get_textures = gsr_video_encoder_vaapi_get_textures,
+ .destroy = gsr_video_encoder_vaapi_destroy,
+ .priv = encoder_vaapi
+ };
+
+ return encoder;
+}
diff --git a/src/encoder/video/video.c b/src/encoder/video/video.c
new file mode 100644
index 0000000..9b0def0
--- /dev/null
+++ b/src/encoder/video/video.c
@@ -0,0 +1,26 @@
+#include "../../../include/encoder/video/video.h"
+#include <assert.h>
+
+bool gsr_video_encoder_start(gsr_video_encoder *encoder, AVCodecContext *video_codec_context, AVFrame *frame) {
+ assert(!encoder->started);
+ bool res = encoder->start(encoder, video_codec_context, frame);
+ if(res)
+ encoder->started = true;
+ return res;
+}
+
+void gsr_video_encoder_copy_textures_to_frame(gsr_video_encoder *encoder, AVFrame *frame) {
+ assert(encoder->started);
+ if(encoder->copy_textures_to_frame)
+ encoder->copy_textures_to_frame(encoder, frame);
+}
+
+void gsr_video_encoder_get_textures(gsr_video_encoder *encoder, unsigned int *textures, int *num_textures, gsr_destination_color *destination_color) {
+ assert(encoder->started);
+ encoder->get_textures(encoder, textures, num_textures, destination_color);
+}
+
+void gsr_video_encoder_destroy(gsr_video_encoder *encoder, AVCodecContext *video_codec_context) {
+ assert(encoder->started);
+ encoder->destroy(encoder, video_codec_context);
+}