From abb8cdf0dad2267b59072b1b83da24e5fcd0b19f Mon Sep 17 00:00:00 2001 From: Alex Luccisano Date: Mon, 29 Jul 2024 14:47:09 -0400 Subject: [PATCH] libobs, libobs-opengl: Add memory and identification APIs Add graphics APIs to obtain the GPU driver version and renderer strings, as well as GDDR memory sizes. The GDDR memory sizes include the dmem (dedicated memory on the GPU) and smem (shared CPU memory used by the GPU but resides in the CPU DDR). The version and renderer strings are needed for identification purposes, for example enhanced broadcasting used by Twitch, to associate the GPU used by OBS with the PCIe-based identification values such as device_id and vendor_id. --- libobs-opengl/gl-subsystem.c | 46 +++++++++++++++++++++++++++++ libobs/graphics/device-exports.h | 4 +++ libobs/graphics/graphics-imports.c | 4 +++ libobs/graphics/graphics-internal.h | 4 +++ libobs/graphics/graphics.c | 20 +++++++++++++ libobs/graphics/graphics.h | 4 +++ 6 files changed, 82 insertions(+) diff --git a/libobs-opengl/gl-subsystem.c b/libobs-opengl/gl-subsystem.c index f7b6ea097..30d961cf3 100644 --- a/libobs-opengl/gl-subsystem.c +++ b/libobs-opengl/gl-subsystem.c @@ -214,6 +214,52 @@ const char *device_preprocessor_name(void) return "_OPENGL"; } +const char *gpu_get_driver_version(void) +{ + return ((const char *)glGetString(GL_VERSION)); +} + +const char *gpu_get_renderer(void) +{ + return ((const char *)glGetString(GL_RENDERER)); +} + +// Get the amount of dedicated GDDR memory, aka VRAM, in units of kilobytes. +uint64_t gpu_get_dmem(void) +{ + GLint dmem = 0; + glGetIntegerv(GL_GPU_MEMORY_INFO_DEDICATED_VIDMEM_NVX, &dmem); + + /* GLint is signed, however it makes little sense to have "negative" amounts of GPU memory. Check on this, clamp to + * 0 if so, and cast to an unsigned value explicitly. + */ + if (dmem < 0) { + dmem = 0; + } + + return (uint64_t)dmem; +} + +// Get the amount of CPU memory shared by the GPU, in units of kilobytes. +uint64_t gpu_get_smem(void) +{ + GLint dmem, total_mem = 0; + glGetIntegerv(GL_GPU_MEMORY_INFO_DEDICATED_VIDMEM_NVX, &dmem); + glGetIntegerv(GL_GPU_MEMORY_INFO_TOTAL_AVAILABLE_MEMORY_NVX, &total_mem); + + /* GLint is signed, however it makes little sense to have "negative" amounts of GPU memory. Check on this, clamp to + * 0 if so, and cast to an unsigned value explicitly. + */ + if (dmem < 0) { + dmem = 0; + } + if (total_mem < 0) { + total_mem = 0; + } + + return (uint64_t)total_mem - dmem; +} + int device_create(gs_device_t **p_device, uint32_t adapter) { struct gs_device *device = bzalloc(sizeof(struct gs_device)); diff --git a/libobs/graphics/device-exports.h b/libobs/graphics/device-exports.h index 9ffbaf3a0..782b62334 100644 --- a/libobs/graphics/device-exports.h +++ b/libobs/graphics/device-exports.h @@ -24,6 +24,10 @@ extern "C" { #endif EXPORT const char *device_get_name(void); +EXPORT const char *gpu_get_driver_version(void); +EXPORT const char *gpu_get_renderer(void); +EXPORT uint64_t gpu_get_dmem(void); +EXPORT uint64_t gpu_get_smem(void); EXPORT int device_get_type(void); EXPORT bool device_enum_adapters(gs_device_t *device, bool (*callback)(void *param, const char *name, uint32_t id), void *param); diff --git a/libobs/graphics/graphics-imports.c b/libobs/graphics/graphics-imports.c index df0c27e3a..6a4a246ef 100644 --- a/libobs/graphics/graphics-imports.c +++ b/libobs/graphics/graphics-imports.c @@ -42,6 +42,10 @@ bool load_graphics_imports(struct gs_exports *exports, void *module, const char bool success = true; GRAPHICS_IMPORT(device_get_name); + GRAPHICS_IMPORT_OPTIONAL(gpu_get_driver_version); + GRAPHICS_IMPORT_OPTIONAL(gpu_get_renderer); + GRAPHICS_IMPORT_OPTIONAL(gpu_get_dmem); + GRAPHICS_IMPORT_OPTIONAL(gpu_get_smem); GRAPHICS_IMPORT(device_get_type); GRAPHICS_IMPORT_OPTIONAL(device_enum_adapters); GRAPHICS_IMPORT(device_preprocessor_name); diff --git a/libobs/graphics/graphics-internal.h b/libobs/graphics/graphics-internal.h index b2717c117..b344c2fb6 100644 --- a/libobs/graphics/graphics-internal.h +++ b/libobs/graphics/graphics-internal.h @@ -25,6 +25,10 @@ struct gs_exports { const char *(*device_get_name)(void); + const char *(*gpu_get_driver_version)(void); + const char *(*gpu_get_renderer)(void); + uint64_t (*gpu_get_dmem)(void); + uint64_t (*gpu_get_smem)(void); int (*device_get_type)(void); bool (*device_enum_adapters)(gs_device_t *device, bool (*callback)(void *, const char *, uint32_t), void *); const char *(*device_preprocessor_name)(void); diff --git a/libobs/graphics/graphics.c b/libobs/graphics/graphics.c index 95b6eafaf..f0b55b95e 100644 --- a/libobs/graphics/graphics.c +++ b/libobs/graphics/graphics.c @@ -297,6 +297,26 @@ const char *gs_get_device_name(void) return gs_valid("gs_get_device_name") ? thread_graphics->exports.device_get_name() : NULL; } +const char *gs_get_driver_version(void) +{ + return gs_valid("gs_get_driver_version") ? thread_graphics->exports.gpu_get_driver_version() : NULL; +} + +const char *gs_get_renderer(void) +{ + return gs_valid("gs_get_renderer") ? thread_graphics->exports.gpu_get_renderer() : NULL; +} + +uint64_t gs_get_gpu_dmem(void) +{ + return gs_valid("gs_get_gpu_dmem") ? thread_graphics->exports.gpu_get_dmem() : 0; +} + +uint64_t gs_get_gpu_smem(void) +{ + return gs_valid("gs_get_gpu_smem") ? thread_graphics->exports.gpu_get_smem() : 0; +} + int gs_get_device_type(void) { return gs_valid("gs_get_device_type") ? thread_graphics->exports.device_get_type() : -1; diff --git a/libobs/graphics/graphics.h b/libobs/graphics/graphics.h index ce680e0c6..1d290fb1b 100644 --- a/libobs/graphics/graphics.h +++ b/libobs/graphics/graphics.h @@ -502,6 +502,10 @@ struct gs_init_data { #define GS_DEVICE_DIRECT3D_11 2 EXPORT const char *gs_get_device_name(void); +EXPORT const char *gs_get_driver_version(void); +EXPORT const char *gs_get_renderer(void); +EXPORT uint64_t gs_get_gpu_dmem(void); +EXPORT uint64_t gs_get_gpu_smem(void); EXPORT int gs_get_device_type(void); EXPORT uint32_t gs_get_adapter_count(void); EXPORT void gs_enum_adapters(bool (*callback)(void *param, const char *name, uint32_t id), void *param);