aboutsummaryrefslogtreecommitdiff
path: root/include
diff options
context:
space:
mode:
Diffstat (limited to 'include')
-rw-r--r--include/capture/capture.h4
-rw-r--r--include/capture/kms.h1
-rw-r--r--include/capture/nvfbc.h1
-rw-r--r--include/capture/portal.h1
-rw-r--r--include/capture/xcomposite.h2
-rw-r--r--include/codec_query/codec_query.h23
-rw-r--r--include/codec_query/nvenc.h8
-rw-r--r--include/codec_query/vaapi.h8
-rw-r--r--include/codec_query/vulkan.h8
-rw-r--r--include/color_conversion.h3
-rw-r--r--include/egl.h3
-rw-r--r--include/encoder/video/cuda.h16
-rw-r--r--include/encoder/video/nvenc.h16
-rw-r--r--include/encoder/video/video.h18
-rw-r--r--include/encoder/video/vulkan.h15
-rw-r--r--include/pipewire_audio.h112
-rw-r--r--include/pipewire_video.h (renamed from include/pipewire.h)44
-rw-r--r--include/sound.hpp21
-rw-r--r--include/utils.h4
-rw-r--r--include/vec2.h4
20 files changed, 249 insertions, 63 deletions
diff --git a/include/capture/capture.h b/include/capture/capture.h
index 7c8887d..dc5b7ac 100644
--- a/include/capture/capture.h
+++ b/include/capture/capture.h
@@ -9,9 +9,9 @@
typedef struct AVCodecContext AVCodecContext;
typedef struct AVStream AVStream;
typedef struct AVFrame AVFrame;
-typedef struct gsr_capture gsr_capture;
typedef struct AVMasteringDisplayMetadata AVMasteringDisplayMetadata;
typedef struct AVContentLightMetadata AVContentLightMetadata;
+typedef struct gsr_capture gsr_capture;
struct gsr_capture {
/* These methods should not be called manually. Call gsr_capture_* instead */
@@ -20,7 +20,6 @@ struct gsr_capture {
void (*tick)(gsr_capture *cap); /* can be NULL. If there is an event then |on_event| is called before this */
bool (*should_stop)(gsr_capture *cap, bool *err); /* can be NULL. If NULL, return false */
int (*capture)(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion);
- gsr_source_color (*get_source_color)(gsr_capture *cap);
bool (*uses_external_image)(gsr_capture *cap); /* can be NULL. If NULL, return false */
bool (*set_hdr_metadata)(gsr_capture *cap, AVMasteringDisplayMetadata *mastering_display_metadata, AVContentLightMetadata *light_metadata); /* can be NULL. If NULL, return false */
uint64_t (*get_window_id)(gsr_capture *cap); /* can be NULL. Returns 0 if unknown */
@@ -37,7 +36,6 @@ void gsr_capture_on_event(gsr_capture *cap, gsr_egl *egl);
void gsr_capture_tick(gsr_capture *cap);
bool gsr_capture_should_stop(gsr_capture *cap, bool *err);
int gsr_capture_capture(gsr_capture *cap, AVFrame *frame, gsr_color_conversion *color_conversion);
-gsr_source_color gsr_capture_get_source_color(gsr_capture *cap);
bool gsr_capture_uses_external_image(gsr_capture *cap);
bool gsr_capture_set_hdr_metadata(gsr_capture *cap, AVMasteringDisplayMetadata *mastering_display_metadata, AVContentLightMetadata *light_metadata);
void gsr_capture_destroy(gsr_capture *cap, AVCodecContext *video_codec_context);
diff --git a/include/capture/kms.h b/include/capture/kms.h
index 646928e..bf1ba62 100644
--- a/include/capture/kms.h
+++ b/include/capture/kms.h
@@ -11,6 +11,7 @@ typedef struct {
bool hdr;
bool record_cursor;
int fps;
+ vec2i output_resolution;
} gsr_capture_kms_params;
gsr_capture* gsr_capture_kms_create(const gsr_capture_kms_params *params);
diff --git a/include/capture/nvfbc.h b/include/capture/nvfbc.h
index 95ca88d..f291f33 100644
--- a/include/capture/nvfbc.h
+++ b/include/capture/nvfbc.h
@@ -15,6 +15,7 @@ typedef struct {
gsr_color_range color_range;
bool record_cursor;
bool use_software_video_encoder;
+ vec2i output_resolution;
} gsr_capture_nvfbc_params;
gsr_capture* gsr_capture_nvfbc_create(const gsr_capture_nvfbc_params *params);
diff --git a/include/capture/portal.h b/include/capture/portal.h
index 2e2c6f2..3989b98 100644
--- a/include/capture/portal.h
+++ b/include/capture/portal.h
@@ -11,6 +11,7 @@ typedef struct {
bool restore_portal_session;
/* If this is set to NULL then this defaults to $XDG_CONFIG_HOME/gpu-screen-recorder/restore_token ($XDG_CONFIG_HOME defaults to $HOME/.config) */
const char *portal_session_token_filepath;
+ vec2i output_resolution;
} gsr_capture_portal_params;
gsr_capture* gsr_capture_portal_create(const gsr_capture_portal_params *params);
diff --git a/include/capture/xcomposite.h b/include/capture/xcomposite.h
index 8c87404..45eb481 100644
--- a/include/capture/xcomposite.h
+++ b/include/capture/xcomposite.h
@@ -8,10 +8,10 @@ typedef struct {
gsr_egl *egl;
unsigned long window;
bool follow_focused; /* If this is set then |window| is ignored */
- vec2i region_size; /* This is currently only used with |follow_focused| */
gsr_color_range color_range;
bool record_cursor;
gsr_color_depth color_depth;
+ vec2i output_resolution;
} gsr_capture_xcomposite_params;
gsr_capture* gsr_capture_xcomposite_create(const gsr_capture_xcomposite_params *params);
diff --git a/include/codec_query/codec_query.h b/include/codec_query/codec_query.h
new file mode 100644
index 0000000..316217d
--- /dev/null
+++ b/include/codec_query/codec_query.h
@@ -0,0 +1,23 @@
+#ifndef GSR_CODEC_QUERY_H
+#define GSR_CODEC_QUERY_H
+
+#include <stdbool.h>
+
+typedef struct {
+ bool supported;
+ bool low_power;
+} gsr_supported_video_codec;
+
+typedef struct {
+ gsr_supported_video_codec h264;
+ gsr_supported_video_codec hevc;
+ gsr_supported_video_codec hevc_hdr;
+ gsr_supported_video_codec hevc_10bit;
+ gsr_supported_video_codec av1;
+ gsr_supported_video_codec av1_hdr;
+ gsr_supported_video_codec av1_10bit;
+ gsr_supported_video_codec vp8;
+ gsr_supported_video_codec vp9;
+} gsr_supported_video_codecs;
+
+#endif /* GSR_CODEC_QUERY_H */
diff --git a/include/codec_query/nvenc.h b/include/codec_query/nvenc.h
new file mode 100644
index 0000000..c01acf6
--- /dev/null
+++ b/include/codec_query/nvenc.h
@@ -0,0 +1,8 @@
+#ifndef GSR_CODEC_QUERY_NVENC_H
+#define GSR_CODEC_QUERY_NVENC_H
+
+#include "codec_query.h"
+
+bool gsr_get_supported_video_codecs_nvenc(gsr_supported_video_codecs *video_codecs, bool cleanup);
+
+#endif /* GSR_CODEC_QUERY_NVENC_H */
diff --git a/include/codec_query/vaapi.h b/include/codec_query/vaapi.h
new file mode 100644
index 0000000..60bdeca
--- /dev/null
+++ b/include/codec_query/vaapi.h
@@ -0,0 +1,8 @@
+#ifndef GSR_CODEC_QUERY_VAAPI_H
+#define GSR_CODEC_QUERY_VAAPI_H
+
+#include "codec_query.h"
+
+bool gsr_get_supported_video_codecs_vaapi(gsr_supported_video_codecs *video_codecs, const char *card_path, bool cleanup);
+
+#endif /* GSR_CODEC_QUERY_VAAPI_H */
diff --git a/include/codec_query/vulkan.h b/include/codec_query/vulkan.h
new file mode 100644
index 0000000..bb06c6b
--- /dev/null
+++ b/include/codec_query/vulkan.h
@@ -0,0 +1,8 @@
+#ifndef GSR_CODEC_QUERY_VULKAN_H
+#define GSR_CODEC_QUERY_VULKAN_H
+
+#include "codec_query.h"
+
+bool gsr_get_supported_video_codecs_vulkan(gsr_supported_video_codecs *video_codecs, const char *card_path, bool cleanup);
+
+#endif /* GSR_CODEC_QUERY_VULKAN_H */
diff --git a/include/color_conversion.h b/include/color_conversion.h
index 236bfbd..c079edd 100644
--- a/include/color_conversion.h
+++ b/include/color_conversion.h
@@ -33,7 +33,6 @@ typedef struct {
typedef struct {
gsr_egl *egl;
- gsr_source_color source_color;
gsr_destination_color destination_color;
unsigned int destination_textures[2];
@@ -57,7 +56,7 @@ typedef struct {
int gsr_color_conversion_init(gsr_color_conversion *self, const gsr_color_conversion_params *params);
void gsr_color_conversion_deinit(gsr_color_conversion *self);
-void gsr_color_conversion_draw(gsr_color_conversion *self, unsigned int texture_id, vec2i source_pos, vec2i source_size, vec2i texture_pos, vec2i texture_size, float rotation, bool external_texture);
+void gsr_color_conversion_draw(gsr_color_conversion *self, unsigned int texture_id, vec2i source_pos, vec2i source_size, vec2i texture_pos, vec2i texture_size, float rotation, bool external_texture, gsr_source_color source_color);
void gsr_color_conversion_clear(gsr_color_conversion *self);
#endif /* GSR_COLOR_CONVERSION_H */
diff --git a/include/egl.h b/include/egl.h
index 3fdbf48..82014b9 100644
--- a/include/egl.h
+++ b/include/egl.h
@@ -313,6 +313,9 @@ struct gsr_egl {
void (*glUniform2f)(int location, float v0, float v1);
void (*glDebugMessageCallback)(GLDEBUGPROC callback, const void *userParam);
void (*glScissor)(int x, int y, int width, int height);
+ void (*glReadPixels)(int x, int y, int width, int height, unsigned int format, unsigned int type, void *pixels);
+ void* (*glMapBuffer)(unsigned int target, unsigned int access);
+ unsigned char (*glUnmapBuffer)(unsigned int target);
};
bool gsr_egl_load(gsr_egl *self, Display *dpy, bool wayland, bool is_monitor_capture);
diff --git a/include/encoder/video/cuda.h b/include/encoder/video/cuda.h
deleted file mode 100644
index 6d32e09..0000000
--- a/include/encoder/video/cuda.h
+++ /dev/null
@@ -1,16 +0,0 @@
-#ifndef GSR_ENCODER_VIDEO_CUDA_H
-#define GSR_ENCODER_VIDEO_CUDA_H
-
-#include "video.h"
-
-typedef struct gsr_egl gsr_egl;
-
-typedef struct {
- gsr_egl *egl;
- bool overclock;
- gsr_color_depth color_depth;
-} gsr_video_encoder_cuda_params;
-
-gsr_video_encoder* gsr_video_encoder_cuda_create(const gsr_video_encoder_cuda_params *params);
-
-#endif /* GSR_ENCODER_VIDEO_CUDA_H */
diff --git a/include/encoder/video/nvenc.h b/include/encoder/video/nvenc.h
new file mode 100644
index 0000000..d4a906b
--- /dev/null
+++ b/include/encoder/video/nvenc.h
@@ -0,0 +1,16 @@
+#ifndef GSR_ENCODER_VIDEO_NVENC_H
+#define GSR_ENCODER_VIDEO_NVENC_H
+
+#include "video.h"
+
+typedef struct gsr_egl gsr_egl;
+
+typedef struct {
+ gsr_egl *egl;
+ bool overclock;
+ gsr_color_depth color_depth;
+} gsr_video_encoder_nvenc_params;
+
+gsr_video_encoder* gsr_video_encoder_nvenc_create(const gsr_video_encoder_nvenc_params *params);
+
+#endif /* GSR_ENCODER_VIDEO_NVENC_H */
diff --git a/include/encoder/video/video.h b/include/encoder/video/video.h
index 899357a..49f48bd 100644
--- a/include/encoder/video/video.h
+++ b/include/encoder/video/video.h
@@ -8,22 +8,9 @@ typedef struct gsr_video_encoder gsr_video_encoder;
typedef struct AVCodecContext AVCodecContext;
typedef struct AVFrame AVFrame;
-typedef struct {
- bool h264;
- bool hevc;
- bool hevc_hdr;
- bool hevc_10bit;
- bool av1;
- bool av1_hdr;
- bool av1_10bit;
- bool vp8;
- bool vp9;
-} gsr_supported_video_codecs;
-
struct gsr_video_encoder {
- gsr_supported_video_codecs (*get_supported_codecs)(gsr_video_encoder *encoder, bool cleanup);
bool (*start)(gsr_video_encoder *encoder, AVCodecContext *video_codec_context, AVFrame *frame);
- void (*copy_textures_to_frame)(gsr_video_encoder *encoder, AVFrame *frame); /* Can be NULL */
+ void (*copy_textures_to_frame)(gsr_video_encoder *encoder, AVFrame *frame, gsr_color_conversion *color_conversion); /* Can be NULL */
/* |textures| should be able to fit 2 elements */
void (*get_textures)(gsr_video_encoder *encoder, unsigned int *textures, int *num_textures, gsr_destination_color *destination_color);
void (*destroy)(gsr_video_encoder *encoder, AVCodecContext *video_codec_context);
@@ -32,9 +19,8 @@ struct gsr_video_encoder {
bool started;
};
-gsr_supported_video_codecs gsr_video_encoder_get_supported_codecs(gsr_video_encoder *encoder, bool cleanup);
bool gsr_video_encoder_start(gsr_video_encoder *encoder, AVCodecContext *video_codec_context, AVFrame *frame);
-void gsr_video_encoder_copy_textures_to_frame(gsr_video_encoder *encoder, AVFrame *frame);
+void gsr_video_encoder_copy_textures_to_frame(gsr_video_encoder *encoder, AVFrame *frame, gsr_color_conversion *color_conversion);
void gsr_video_encoder_get_textures(gsr_video_encoder *encoder, unsigned int *textures, int *num_textures, gsr_destination_color *destination_color);
void gsr_video_encoder_destroy(gsr_video_encoder *encoder, AVCodecContext *video_codec_context);
diff --git a/include/encoder/video/vulkan.h b/include/encoder/video/vulkan.h
new file mode 100644
index 0000000..383fc4f
--- /dev/null
+++ b/include/encoder/video/vulkan.h
@@ -0,0 +1,15 @@
+#ifndef GSR_ENCODER_VIDEO_VULKAN_H
+#define GSR_ENCODER_VIDEO_VULKAN_H
+
+#include "video.h"
+
+typedef struct gsr_egl gsr_egl;
+
+typedef struct {
+ gsr_egl *egl;
+ gsr_color_depth color_depth;
+} gsr_video_encoder_vulkan_params;
+
+gsr_video_encoder* gsr_video_encoder_vulkan_create(const gsr_video_encoder_vulkan_params *params);
+
+#endif /* GSR_ENCODER_VIDEO_VULKAN_H */
diff --git a/include/pipewire_audio.h b/include/pipewire_audio.h
new file mode 100644
index 0000000..8cfb2d2
--- /dev/null
+++ b/include/pipewire_audio.h
@@ -0,0 +1,112 @@
+#ifndef GSR_PIPEWIRE_AUDIO_H
+#define GSR_PIPEWIRE_AUDIO_H
+
+#include <pipewire/thread-loop.h>
+#include <pipewire/context.h>
+#include <pipewire/core.h>
+#include <spa/utils/hook.h>
+
+#include <stdbool.h>
+
+#define GSR_PIPEWIRE_AUDIO_MAX_STREAM_NODES 64
+#define GSR_PIPEWIRE_AUDIO_MAX_PORTS 64
+#define GSR_PIPEWIRE_AUDIO_MAX_REQUESTED_LINKS 32
+
+typedef enum {
+ GSR_PIPEWIRE_AUDIO_NODE_TYPE_STREAM_OUTPUT, /* Application audio */
+ GSR_PIPEWIRE_AUDIO_NODE_TYPE_STREAM_INPUT, /* Audio recording input */
+ GSR_PIPEWIRE_AUDIO_NODE_TYPE_SINK
+} gsr_pipewire_audio_node_type;
+
+typedef struct {
+ uint32_t id;
+ char *name;
+ gsr_pipewire_audio_node_type type;
+} gsr_pipewire_audio_node;
+
+typedef enum {
+ GSR_PIPEWIRE_AUDIO_PORT_DIRECTION_INPUT,
+ GSR_PIPEWIRE_AUDIO_PORT_DIRECTION_OUTPUT
+} gsr_pipewire_audio_port_direction;
+
+typedef struct {
+ uint32_t id;
+ uint32_t node_id;
+ gsr_pipewire_audio_port_direction direction;
+ char *name;
+} gsr_pipewire_audio_port;
+
+typedef enum {
+ GSR_PIPEWIRE_AUDIO_LINK_OUTPUT_TYPE_STREAM,
+ GSR_PIPEWIRE_AUDIO_LINK_OUTPUT_TYPE_SINK
+} gsr_pipewire_audio_link_output_type;
+
+typedef struct {
+ char **app_names;
+ int num_app_names;
+ char *output_name;
+ bool inverted;
+ gsr_pipewire_audio_link_output_type output_type;
+} gsr_pipewire_audio_requested_link;
+
+typedef struct {
+ struct pw_thread_loop *thread_loop;
+ struct pw_context *context;
+ struct pw_core *core;
+ struct spa_hook core_listener;
+ struct pw_registry *registry;
+ struct spa_hook registry_listener;
+ int server_version_sync;
+
+ gsr_pipewire_audio_node stream_nodes[GSR_PIPEWIRE_AUDIO_MAX_STREAM_NODES];
+ int num_stream_nodes;
+
+ gsr_pipewire_audio_port ports[GSR_PIPEWIRE_AUDIO_MAX_PORTS];
+ int num_ports;
+
+ gsr_pipewire_audio_requested_link requested_links[GSR_PIPEWIRE_AUDIO_MAX_REQUESTED_LINKS];
+ int num_requested_links;
+} gsr_pipewire_audio;
+
+bool gsr_pipewire_audio_init(gsr_pipewire_audio *self);
+void gsr_pipewire_audio_deinit(gsr_pipewire_audio *self);
+
+/*
+ This function links audio source outputs from applications that match the name |app_names_output| to the input
+ that matches the name |stream_name_input|.
+ If an application or a new application starts outputting audio after this function is called and the app name matches
+ then it will automatically link the audio sources.
+ |app_names_output| and |stream_name_input| are case-insensitive matches.
+*/
+bool gsr_pipewire_audio_add_link_from_apps_to_stream(gsr_pipewire_audio *self, const char **app_names_output, int num_app_names_output, const char *stream_name_input);
+/*
+ This function links audio source outputs from all applications except the ones that match the name |app_names_output| to the input
+ that matches the name |stream_name_input|.
+ If an application or a new application starts outputting audio after this function is called and the app name doesn't matche
+ then it will automatically link the audio sources.
+ |app_names_output| and |stream_name_input| are case-insensitive matches.
+*/
+bool gsr_pipewire_audio_add_link_from_apps_to_stream_inverted(gsr_pipewire_audio *self, const char **app_names_output, int num_app_names_output, const char *stream_name_input);
+
+/*
+ This function links audio source outputs from applications that match the name |app_names_output| to the input
+ that matches the name |sink_name_input|.
+ If an application or a new application starts outputting audio after this function is called and the app name matches
+ then it will automatically link the audio sources.
+ |app_names_output| and |sink_name_input| are case-insensitive matches.
+*/
+bool gsr_pipewire_audio_add_link_from_apps_to_sink(gsr_pipewire_audio *self, const char **app_names_output, int num_app_names_output, const char *sink_name_input);
+/*
+ This function links audio source outputs from all applications except the ones that match the name |app_names_output| to the input
+ that matches the name |sink_name_input|.
+ If an application or a new application starts outputting audio after this function is called and the app name doesn't matche
+ then it will automatically link the audio sources.
+ |app_names_output| and |sink_name_input| are case-insensitive matches.
+*/
+bool gsr_pipewire_audio_add_link_from_apps_to_sink_inverted(gsr_pipewire_audio *self, const char **app_names_output, int num_app_names_output, const char *sink_name_input);
+
+/* Return true to continue */
+typedef bool (*gsr_pipewire_audio_app_query_callback)(const char *app_name, void *userdata);
+void gsr_pipewire_audio_for_each_app(gsr_pipewire_audio *self, gsr_pipewire_audio_app_query_callback callback, void *userdata);
+
+#endif /* GSR_PIPEWIRE_AUDIO_H */
diff --git a/include/pipewire.h b/include/pipewire_video.h
index 1908e2d..00e2835 100644
--- a/include/pipewire.h
+++ b/include/pipewire_video.h
@@ -1,5 +1,5 @@
-#ifndef GSR_PIPEWIRE_H
-#define GSR_PIPEWIRE_H
+#ifndef GSR_PIPEWIRE_VIDEO_H
+#define GSR_PIPEWIRE_VIDEO_H
#include <stdbool.h>
#include <stdint.h>
@@ -8,9 +8,9 @@
#include <spa/utils/hook.h>
#include <spa/param/video/format.h>
-#define GSR_PIPEWIRE_MAX_MODIFIERS 1024
-#define GSR_PIPEWIRE_NUM_VIDEO_FORMATS 6
-#define GSR_PIPEWIRE_DMABUF_MAX_PLANES 4
+#define GSR_PIPEWIRE_VIDEO_MAX_MODIFIERS 1024
+#define GSR_PIPEWIRE_VIDEO_NUM_VIDEO_FORMATS 6
+#define GSR_PIPEWIRE_VIDEO_DMABUF_MAX_PLANES 4
typedef struct gsr_egl gsr_egl;
@@ -18,23 +18,23 @@ typedef struct {
int major;
int minor;
int micro;
-} gsr_pipewire_data_version;
+} gsr_pipewire_video_data_version;
typedef struct {
uint32_t fps_num;
uint32_t fps_den;
-} gsr_pipewire_video_info;
+} gsr_pipewire_video_video_info;
typedef struct {
int fd;
uint32_t offset;
int32_t stride;
-} gsr_pipewire_dmabuf_data;
+} gsr_pipewire_video_dmabuf_data;
typedef struct {
int x, y;
int width, height;
-} gsr_pipewire_region;
+} gsr_pipewire_video_region;
typedef struct {
enum spa_video_format format;
@@ -82,31 +82,31 @@ typedef struct {
uint32_t width, height;
} crop;
- gsr_video_format supported_video_formats[GSR_PIPEWIRE_NUM_VIDEO_FORMATS];
+ gsr_video_format supported_video_formats[GSR_PIPEWIRE_VIDEO_NUM_VIDEO_FORMATS];
- gsr_pipewire_data_version server_version;
- gsr_pipewire_video_info video_info;
- gsr_pipewire_dmabuf_data dmabuf_data[GSR_PIPEWIRE_DMABUF_MAX_PLANES];
+ gsr_pipewire_video_data_version server_version;
+ gsr_pipewire_video_video_info video_info;
+ gsr_pipewire_video_dmabuf_data dmabuf_data[GSR_PIPEWIRE_VIDEO_DMABUF_MAX_PLANES];
size_t dmabuf_num_planes;
bool no_modifiers_fallback;
bool external_texture_fallback;
- uint64_t modifiers[GSR_PIPEWIRE_MAX_MODIFIERS];
+ uint64_t modifiers[GSR_PIPEWIRE_VIDEO_MAX_MODIFIERS];
size_t num_modifiers;
-} gsr_pipewire;
+} gsr_pipewire_video;
/*
|capture_cursor| only applies to when capturing a window or region.
In other cases |pipewire_node|'s setup will determine if the cursor is included.
Note that the cursor is not guaranteed to be shown even if set to true, it depends on the wayland compositor.
*/
-bool gsr_pipewire_init(gsr_pipewire *self, int pipewire_fd, uint32_t pipewire_node, int fps, bool capture_cursor, gsr_egl *egl);
-void gsr_pipewire_deinit(gsr_pipewire *self);
+bool gsr_pipewire_video_init(gsr_pipewire_video *self, int pipewire_fd, uint32_t pipewire_node, int fps, bool capture_cursor, gsr_egl *egl);
+void gsr_pipewire_video_deinit(gsr_pipewire_video *self);
-/* |dmabuf_data| should be at least GSR_PIPEWIRE_DMABUF_MAX_PLANES in size */
-bool gsr_pipewire_map_texture(gsr_pipewire *self, gsr_texture_map texture_map, gsr_pipewire_region *region, gsr_pipewire_region *cursor_region, gsr_pipewire_dmabuf_data *dmabuf_data, int *num_dmabuf_data, uint32_t *fourcc, uint64_t *modifiers, bool *using_external_image);
-bool gsr_pipewire_is_damaged(gsr_pipewire *self);
-void gsr_pipewire_clear_damage(gsr_pipewire *self);
+/* |dmabuf_data| should be at least GSR_PIPEWIRE_VIDEO_DMABUF_MAX_PLANES in size */
+bool gsr_pipewire_video_map_texture(gsr_pipewire_video *self, gsr_texture_map texture_map, gsr_pipewire_video_region *region, gsr_pipewire_video_region *cursor_region, gsr_pipewire_video_dmabuf_data *dmabuf_data, int *num_dmabuf_data, uint32_t *fourcc, uint64_t *modifiers, bool *using_external_image);
+bool gsr_pipewire_video_is_damaged(gsr_pipewire_video *self);
+void gsr_pipewire_video_clear_damage(gsr_pipewire_video *self);
-#endif /* GSR_PIPEWIRE_H */
+#endif /* GSR_PIPEWIRE_VIDEO_H */
diff --git a/include/sound.hpp b/include/sound.hpp
index 7bcc120..018ff4a 100644
--- a/include/sound.hpp
+++ b/include/sound.hpp
@@ -26,6 +26,11 @@ typedef struct {
unsigned int frames;
} SoundDevice;
+enum class AudioInputType {
+ DEVICE,
+ APPLICATION
+};
+
struct AudioInput {
std::string name;
std::string description;
@@ -37,8 +42,14 @@ struct AudioDevices {
std::vector<AudioInput> audio_inputs;
};
+struct ApplicationAudio {
+ std::string name;
+};
+
struct MergedAudioInputs {
std::vector<AudioInput> audio_inputs;
+ AudioInputType type = AudioInputType::DEVICE;
+ bool inverted = false;
};
typedef enum {
@@ -48,12 +59,15 @@ typedef enum {
} AudioFormat;
/*
- Get a sound device by name, returning the device into the @device parameter.
- The device should be closed with @sound_device_close after it has been used
- to clean up internal resources.
+ Get a sound device by name, returning the device into the |device| parameter.
Returns 0 on success, or a negative value on failure.
*/
int sound_device_get_by_name(SoundDevice *device, const char *device_name, const char *description, unsigned int num_channels, unsigned int period_frame_size, AudioFormat audio_format);
+/*
+ Creates a module-combine-sink and connects to it for recording, returning the device into the |device| parameter.
+ Returns 0 on success, or a negative value on failure.
+*/
+int sound_device_create_combined_sink_connect(SoundDevice *device, const char *combined_sink_name, unsigned int num_channels, unsigned int period_frame_size, AudioFormat audio_format);
void sound_device_close(SoundDevice *device);
@@ -64,5 +78,6 @@ void sound_device_close(SoundDevice *device);
int sound_device_read_next_chunk(SoundDevice *device, void **buffer, double timeout_sec, double *latency_seconds);
AudioDevices get_pulseaudio_inputs();
+std::vector<ApplicationAudio> get_pulseaudio_applications();
#endif /* GPU_SCREEN_RECORDER_H */
diff --git a/include/utils.h b/include/utils.h
index 92eb851..984b963 100644
--- a/include/utils.h
+++ b/include/utils.h
@@ -28,6 +28,8 @@ typedef struct {
} get_monitor_by_name_userdata;
double clock_get_monotonic_seconds(void);
+bool generate_random_characters(char *buffer, int buffer_size, const char *alphabet, size_t alphabet_size);
+bool generate_random_characters_standard_alphabet(char *buffer, int buffer_size);
typedef void (*active_monitor_callback)(const gsr_monitor *monitor, void *userdata);
void for_each_active_monitor_output_x11_not_cached(Display *display, active_monitor_callback callback, void *userdata);
@@ -51,4 +53,6 @@ bool video_codec_context_is_vaapi(AVCodecContext *video_codec_context);
bool vaapi_copy_drm_planes_to_video_surface(AVCodecContext *video_codec_context, AVFrame *video_frame, vec2i source_pos, vec2i source_size, vec2i dest_pos, vec2i dest_size, uint32_t format, vec2i size, const int *fds, const uint32_t *offsets, const uint32_t *pitches, const uint64_t *modifiers, int num_planes);
bool vaapi_copy_egl_image_to_video_surface(gsr_egl *egl, EGLImage image, vec2i source_pos, vec2i source_size, vec2i dest_pos, vec2i dest_size, AVCodecContext *video_codec_context, AVFrame *video_frame);
+vec2i scale_keep_aspect_ratio(vec2i from, vec2i to);
+
#endif /* GSR_UTILS_H */
diff --git a/include/vec2.h b/include/vec2.h
index 3e33cfb..8fd3858 100644
--- a/include/vec2.h
+++ b/include/vec2.h
@@ -9,4 +9,8 @@ typedef struct {
float x, y;
} vec2f;
+typedef struct {
+ double x, y;
+} vec2d;
+
#endif /* VEC2_H */