From fec32ac53d724cfebe67abaf7427ea17d7f1972c Mon Sep 17 00:00:00 2001 From: Marc Di Luzio Date: Sun, 10 Mar 2019 15:11:24 +0000 Subject: [PATCH] Remove the nv_perf_level config option and figure it out programmatically This also fixes the instances in testing where we don't have the nv overclock in use, but we do have the mode set Solves issues explaining the what the perf_level actually meant, and future proofs for any PR that wants to set individual perf levels --- daemon/daemon_config.c | 7 +- daemon/daemon_config.h | 1 - daemon/gamemode-gpu.c | 19 ----- daemon/gamemode-tests.c | 6 +- daemon/gpu-control.h | 3 +- daemon/gpuclockctl.c | 166 +++++++++++++++++++++++----------------- example/gamemode.ini | 7 +- 7 files changed, 105 insertions(+), 104 deletions(-) diff --git a/daemon/daemon_config.c b/daemon/daemon_config.c index df24016..69eed10 100644 --- a/daemon/daemon_config.c +++ b/daemon/daemon_config.c @@ -91,7 +91,6 @@ struct GameModeConfig { long gpu_device; long nv_core_clock_mhz_offset; long nv_mem_clock_mhz_offset; - long nv_perf_level; long nv_powermizer_mode; char amd_performance_level[CONFIG_VALUE_MAX]; @@ -257,8 +256,6 @@ static int inih_handler(void *user, const char *section, const char *name, const valid = get_long_value(name, value, &self->values.nv_core_clock_mhz_offset); } else if (strcmp(name, "nv_mem_clock_mhz_offset") == 0) { valid = get_long_value(name, value, &self->values.nv_mem_clock_mhz_offset); - } else if (strcmp(name, "nv_perf_level") == 0) { - valid = get_long_value(name, value, &self->values.nv_perf_level); } else if (strcmp(name, "nv_powermizer_mode") == 0) { valid = get_long_value(name, value, &self->values.nv_powermizer_mode); } else if (strcmp(name, "amd_performance_level") == 0) { @@ -332,8 +329,9 @@ static void load_config_files(GameModeConfig *self) self->values.renice = 4; /* default value of 4 */ self->values.reaper_frequency = DEFAULT_REAPER_FREQ; self->values.gpu_device = -1; /* 0 is a valid device ID so use -1 to indicate no value */ - self->values.nv_perf_level = -1; self->values.nv_powermizer_mode = -1; + self->values.nv_core_clock_mhz_offset = -1; + self->values.nv_mem_clock_mhz_offset = -1; self->values.script_timeout = 10; /* Default to 10 seconds for scripts */ /* @@ -585,7 +583,6 @@ void config_get_apply_gpu_optimisations(GameModeConfig *self, char value[CONFIG_ DEFINE_CONFIG_GET(gpu_device) DEFINE_CONFIG_GET(nv_core_clock_mhz_offset) DEFINE_CONFIG_GET(nv_mem_clock_mhz_offset) -DEFINE_CONFIG_GET(nv_perf_level) DEFINE_CONFIG_GET(nv_powermizer_mode) void config_get_amd_performance_level(GameModeConfig *self, char value[CONFIG_VALUE_MAX]) diff --git a/daemon/daemon_config.h b/daemon/daemon_config.h index d6795f5..538ef10 100644 --- a/daemon/daemon_config.h +++ b/daemon/daemon_config.h @@ -142,7 +142,6 @@ void config_get_apply_gpu_optimisations(GameModeConfig *self, char value[CONFIG_ long config_get_gpu_device(GameModeConfig *self); long config_get_nv_core_clock_mhz_offset(GameModeConfig *self); long config_get_nv_mem_clock_mhz_offset(GameModeConfig *self); -long config_get_nv_perf_level(GameModeConfig *self); long config_get_nv_powermizer_mode(GameModeConfig *self); void config_get_amd_performance_level(GameModeConfig *self, char value[CONFIG_VALUE_MAX]); diff --git a/daemon/gamemode-gpu.c b/daemon/gamemode-gpu.c index db3509b..c7ae96c 100644 --- a/daemon/gamemode-gpu.c +++ b/daemon/gamemode-gpu.c @@ -92,7 +92,6 @@ int game_mode_initialise_gpu(GameModeConfig *config, GameModeGPUInfo **info) case Vendor_NVIDIA: new_info->nv_core = config_get_nv_core_clock_mhz_offset(config); new_info->nv_mem = config_get_nv_mem_clock_mhz_offset(config); - new_info->nv_perf_level = config_get_nv_perf_level(config); new_info->nv_powermizer_mode = config_get_nv_powermizer_mode(config); /* Reject values over some guessed values @@ -113,18 +112,6 @@ int game_mode_initialise_gpu(GameModeConfig *config, GameModeGPUInfo **info) return -1; } - /* Sanity check the performance level value as well */ - /* Allow an invalid perf level if we've got the powermizer mode set */ - if (!(new_info->nv_perf_level == -1 && new_info->nv_powermizer_mode != -1) && - (new_info->nv_perf_level < 0 || new_info->nv_perf_level > 16)) { - LOG_ERROR( - "NVIDIA Performance level value likely invalid (%ld), will not apply " - "optimisations!\n", - new_info->nv_perf_level); - free(new_info); - return -1; - } - break; case Vendor_AMD: config_get_amd_performance_level(config, new_info->amd_performance_level); @@ -177,8 +164,6 @@ int game_mode_apply_gpu(const GameModeGPUInfo *info) snprintf(nv_core, 8, "%ld", info->nv_core); char nv_mem[8]; snprintf(nv_mem, 8, "%ld", info->nv_mem); - char nv_perf_level[4]; - snprintf(nv_perf_level, 4, "%ld", info->nv_perf_level); char nv_powermizer_mode[4]; snprintf(nv_powermizer_mode, 4, "%ld", info->nv_powermizer_mode); @@ -190,7 +175,6 @@ int game_mode_apply_gpu(const GameModeGPUInfo *info) "set", info->vendor == Vendor_NVIDIA ? nv_core : info->amd_performance_level, info->vendor == Vendor_NVIDIA ? nv_mem : NULL, /* Only use this if Nvidia */ - info->vendor == Vendor_NVIDIA ? nv_perf_level : NULL, /* Only use this if Nvidia */ info->vendor == Vendor_NVIDIA ? nv_powermizer_mode : NULL, /* Only use this if Nvidia */ NULL, }; @@ -211,8 +195,6 @@ int game_mode_get_gpu(GameModeGPUInfo *info) /* Generate the input strings */ char device[4]; snprintf(device, 4, "%ld", info->device); - char nv_perf_level[4]; - snprintf(nv_perf_level, 4, "%ld", info->nv_perf_level); // Set up our command line to pass to gpuclockctl // This doesn't need pkexec as get does not need elevated perms @@ -220,7 +202,6 @@ int game_mode_get_gpu(GameModeGPUInfo *info) LIBEXECDIR "/gpuclockctl", device, "get", - info->vendor == Vendor_NVIDIA ? nv_perf_level : NULL, /* Only use this if Nvidia */ NULL, }; diff --git a/daemon/gamemode-tests.c b/daemon/gamemode-tests.c index 57b1746..540311d 100644 --- a/daemon/gamemode-tests.c +++ b/daemon/gamemode-tests.c @@ -456,9 +456,13 @@ int run_gpu_optimisation_tests(struct GameModeConfig *config) char original_amd_performance_level[CONFIG_VALUE_MAX]; strncpy(original_amd_performance_level, gpuinfo->amd_performance_level, CONFIG_VALUE_MAX); - /* account for when powermizer is not set */ + /* account for when settings are not set */ if (expected_nv_powermizer_mode == -1) expected_nv_powermizer_mode = original_nv_powermizer_mode; + if (expected_core == -1) + expected_core = original_nv_core; + if (expected_mem == -1) + expected_mem = original_nv_mem; /* Start gamemode and check the new values */ gamemode_request_start(); diff --git a/daemon/gpu-control.h b/daemon/gpu-control.h index e58b11b..fd9ec21 100644 --- a/daemon/gpu-control.h +++ b/daemon/gpu-control.h @@ -46,8 +46,7 @@ enum GPUVendor { /* Storage for GPU info*/ struct GameModeGPUInfo { long vendor; - long device; /* path to device, ie. /sys/class/drm/card#/ */ - long nv_perf_level; /* The Nvidia Performance Level to adjust */ + long device; /* path to device, ie. /sys/class/drm/card#/ */ long nv_core; /* Nvidia core clock */ long nv_mem; /* Nvidia mem clock */ diff --git a/daemon/gpuclockctl.c b/daemon/gpuclockctl.c index f29093b..e50f5db 100644 --- a/daemon/gpuclockctl.c +++ b/daemon/gpuclockctl.c @@ -40,6 +40,7 @@ POSSIBILITY OF SUCH DAMAGE. #define NV_CORE_OFFSET_ATTRIBUTE "GPUGraphicsClockOffset" #define NV_MEM_OFFSET_ATTRIBUTE "GPUMemoryTransferRateOffset" #define NV_POWERMIZER_MODE_ATTRIBUTE "GPUPowerMizerMode" +#define NV_PERFMODES_ATTRIBUTE "GPUPerfModes" #define NV_ATTRIBUTE_FORMAT "[gpu:%ld]/%s" #define NV_PERF_LEVEL_FORMAT "[%ld]" @@ -54,14 +55,16 @@ POSSIBILITY OF SUCH DAMAGE. /* Helper to quit with usage */ static const char *usage_text = - "usage: gpuclockctl PCI_ID DEVICE [get] [set CORE MEM [PERF_LEVEL]]]"; + "usage: gpuclockctl DEVICE {arg}\n\t\tget - return current values\n\t\tset [NV_CORE NV_MEM " + "NV_POWERMIZER_MODE | AMD_PERFORMANCE_LEVEL] - set current values"; static void print_usage_and_exit(void) { fprintf(stderr, "%s\n", usage_text); exit(EXIT_FAILURE); } -static int get_gpu_state_nv(struct GameModeGPUInfo *info) +/* Get the max nvidia perf level */ +static long get_max_perf_level_nv(struct GameModeGPUInfo *info) { if (info->vendor != Vendor_NVIDIA) return -1; @@ -69,48 +72,80 @@ static int get_gpu_state_nv(struct GameModeGPUInfo *info) if (!getenv("DISPLAY")) LOG_ERROR("Getting Nvidia parameters requires DISPLAY to be set - will likely fail!\n"); + char arg[128] = { 0 }; + char buf[EXTERNAL_BUFFER_MAX] = { 0 }; + + snprintf(arg, 128, NV_ATTRIBUTE_FORMAT, info->device, NV_PERFMODES_ATTRIBUTE); + const char *exec_args[] = { "/usr/bin/nvidia-settings", "-q", arg, "-t", NULL }; + if (run_external_process(exec_args, buf, -1) != 0) { + LOG_ERROR("Failed to get %s!\n", arg); + return -1; + } + + char *ptr = strrchr(buf, ';'); + long level = -1; + if (!ptr || sscanf(ptr, "; perf=%ld", &level) != 1) { + LOG_ERROR( + "Output didn't match expected format, couldn't discern highest perf level from " + "nvidia-settings!\n"); + LOG_ERROR("Output:%s\n", buf); + return -1; + } + + return level; +} + +/* Get the nvidia gpu state */ +static int get_gpu_state_nv(struct GameModeGPUInfo *info) +{ + if (info->vendor != Vendor_NVIDIA) + return -1; + + if (!getenv("DISPLAY")) + LOG_ERROR("Getting Nvidia parameters requires DISPLAY to be set - will likely fail!\n"); + + long perf_level = get_max_perf_level_nv(info); + char arg[128] = { 0 }; char buf[EXTERNAL_BUFFER_MAX] = { 0 }; char *end; - if (info->nv_perf_level != -1) { - /* Get the GPUGraphicsClockOffset parameter */ - snprintf(arg, - 128, - NV_ATTRIBUTE_FORMAT NV_PERF_LEVEL_FORMAT, - info->device, - NV_CORE_OFFSET_ATTRIBUTE, - info->nv_perf_level); - const char *exec_args_core[] = { "/usr/bin/nvidia-settings", "-q", arg, "-t", NULL }; - if (run_external_process(exec_args_core, buf, -1) != 0) { - LOG_ERROR("Failed to get %s!\n", arg); - return -1; - } + /* Get the GPUGraphicsClockOffset parameter */ + snprintf(arg, + 128, + NV_ATTRIBUTE_FORMAT NV_PERF_LEVEL_FORMAT, + info->device, + NV_CORE_OFFSET_ATTRIBUTE, + perf_level); + const char *exec_args_core[] = { "/usr/bin/nvidia-settings", "-q", arg, "-t", NULL }; + if (run_external_process(exec_args_core, buf, -1) != 0) { + LOG_ERROR("Failed to get %s!\n", arg); + return -1; + } - info->nv_core = strtol(buf, &end, 10); - if (end == buf) { - LOG_ERROR("Failed to parse output for \"%s\" output was \"%s\"!\n", arg, buf); - return -1; - } + info->nv_core = strtol(buf, &end, 10); + if (end == buf) { + LOG_ERROR("Failed to parse output for \"%s\" output was \"%s\"!\n", arg, buf); + return -1; + } - /* Get the GPUMemoryTransferRateOffset parameter */ - snprintf(arg, - 128, - NV_ATTRIBUTE_FORMAT NV_PERF_LEVEL_FORMAT, - info->device, - NV_MEM_OFFSET_ATTRIBUTE, - info->nv_perf_level); - const char *exec_args_mem[] = { "/usr/bin/nvidia-settings", "-q", arg, "-t", NULL }; - if (run_external_process(exec_args_mem, buf, -1) != 0) { - LOG_ERROR("Failed to get %s!\n", arg); - return -1; - } + /* Get the GPUMemoryTransferRateOffset parameter */ + snprintf(arg, + 128, + NV_ATTRIBUTE_FORMAT NV_PERF_LEVEL_FORMAT, + info->device, + NV_MEM_OFFSET_ATTRIBUTE, + perf_level); + const char *exec_args_mem[] = { "/usr/bin/nvidia-settings", "-q", arg, "-t", NULL }; + if (run_external_process(exec_args_mem, buf, -1) != 0) { + LOG_ERROR("Failed to get %s!\n", arg); + return -1; + } - info->nv_mem = strtol(buf, &end, 10); - if (end == buf) { - LOG_ERROR("Failed to parse output for \"%s\" output was \"%s\"!\n", arg, buf); - return -1; - } + info->nv_mem = strtol(buf, &end, 10); + if (end == buf) { + LOG_ERROR("Failed to parse output for \"%s\" output was \"%s\"!\n", arg, buf); + return -1; } /* Get the GPUPowerMizerMode parameter */ @@ -135,6 +170,8 @@ static int get_gpu_state_nv(struct GameModeGPUInfo *info) */ static int set_gpu_state_nv(struct GameModeGPUInfo *info) { + int status = 0; + if (info->vendor != Vendor_NVIDIA) return -1; @@ -143,35 +180,39 @@ static int set_gpu_state_nv(struct GameModeGPUInfo *info) "Setting Nvidia parameters requires DISPLAY and XAUTHORITY to be set - will likely " "fail!\n"); + long perf_level = get_max_perf_level_nv(info); + char arg[128] = { 0 }; - if (info->nv_perf_level != -1) { - /* Set the GPUGraphicsClockOffset parameter */ + /* Set the GPUGraphicsClockOffset parameter */ + if (info->nv_core != -1) { snprintf(arg, 128, NV_ATTRIBUTE_FORMAT NV_PERF_LEVEL_FORMAT "=%ld", info->device, NV_CORE_OFFSET_ATTRIBUTE, - info->nv_perf_level, + perf_level, info->nv_core); const char *exec_args_core[] = { "/usr/bin/nvidia-settings", "-a", arg, NULL }; if (run_external_process(exec_args_core, NULL, -1) != 0) { LOG_ERROR("Failed to set %s!\n", arg); - return -1; + status = -1; } + } - /* Set the GPUMemoryTransferRateOffset parameter */ + /* Set the GPUMemoryTransferRateOffset parameter */ + if (info->nv_mem != -1) { snprintf(arg, 128, NV_ATTRIBUTE_FORMAT NV_PERF_LEVEL_FORMAT "=%ld", info->device, NV_MEM_OFFSET_ATTRIBUTE, - info->nv_perf_level, + perf_level, info->nv_mem); const char *exec_args_mem[] = { "/usr/bin/nvidia-settings", "-a", arg, NULL }; if (run_external_process(exec_args_mem, NULL, -1) != 0) { LOG_ERROR("Failed to set %s!\n", arg); - return -1; + status = -1; } } @@ -186,11 +227,11 @@ static int set_gpu_state_nv(struct GameModeGPUInfo *info) const char *exec_args_pm[] = { "/usr/bin/nvidia-settings", "-a", arg, NULL }; if (run_external_process(exec_args_pm, NULL, -1) != 0) { LOG_ERROR("Failed to set %s!\n", arg); - return -1; + status = -1; } } - return 0; + return status; } /** @@ -313,7 +354,7 @@ static long get_generic_value(const char *val) */ int main(int argc, char *argv[]) { - if (argc >= 3 && strncmp(argv[2], "get", 3) == 0) { + if (argc == 3 && strncmp(argv[2], "get", 3) == 0) { /* Get and verify the vendor and device */ struct GameModeGPUInfo info; memset(&info, 0, sizeof(info)); @@ -323,13 +364,6 @@ int main(int argc, char *argv[]) /* Fetch the state and print it out */ switch (info.vendor) { case Vendor_NVIDIA: - if (argc < 3) { - LOG_ERROR("Must pass perf_level for nvidia gpu!\n"); - print_usage_and_exit(); - } - info.nv_perf_level = get_generic_value(argv[3]); - - /* Get nvidia info */ if (get_gpu_state_nv(&info) != 0) exit(EXIT_FAILURE); printf("%ld %ld %ld\n", info.nv_core, info.nv_mem, info.nv_powermizer_mode); @@ -354,18 +388,19 @@ int main(int argc, char *argv[]) switch (info.vendor) { case Vendor_NVIDIA: - if (argc < 5) { - LOG_ERROR("Must pass at least 5 arguments for nvidia gpu!\n"); + if (argc < 4) { + LOG_ERROR("Must pass at least 4 arguments for nvidia gpu!\n"); print_usage_and_exit(); } info.nv_core = get_generic_value(argv[3]); info.nv_mem = get_generic_value(argv[4]); - info.nv_perf_level = get_generic_value(argv[5]); /* Optional */ info.nv_powermizer_mode = -1; - if (argc >= 5) - info.nv_powermizer_mode = get_generic_value(argv[6]); + if (argc >= 6) + info.nv_powermizer_mode = get_generic_value(argv[5]); + + return set_gpu_state_nv(&info); break; case Vendor_AMD: if (argc < 3) { @@ -373,6 +408,7 @@ int main(int argc, char *argv[]) print_usage_and_exit(); } strncpy(info.amd_performance_level, argv[3], CONFIG_VALUE_MAX); + return set_gpu_state_amd(&info); break; default: LOG_ERROR("Currently unsupported GPU vendor 0x%04x, doing nothing!\n", @@ -381,18 +417,6 @@ int main(int argc, char *argv[]) break; } - printf("gpuclockctl setting values on device:%ld with vendor 0x%04x", - info.device, - (unsigned short)info.vendor); - - switch (info.vendor) { - case Vendor_NVIDIA: - return set_gpu_state_nv(&info); - case Vendor_AMD: - return set_gpu_state_amd(&info); - default: - break; - } } else { print_usage_and_exit(); } diff --git a/example/gamemode.ini b/example/gamemode.ini index 99ee74b..b4a2d6e 100644 --- a/example/gamemode.ini +++ b/example/gamemode.ini @@ -53,11 +53,8 @@ inhibit_screensaver=1 ; See NV_CTRL_GPU_POWER_MIZER_MODE and friends in https://github.com/NVIDIA/nvidia-settings/blob/master/src/libXNVCtrl/NVCtrl.h ;nv_powermizer_mode=1 -; This corresponds to the performance level to edit in nvidia-xconfig -; Set this to the highest level shown in the nvidia settings power mizer settings -; Or from the command line check the highest perf value listed by "nvidia-settings -q [gpu:0]/GPUPerfModes" -;nv_perf_level=1 -; (these two are Mhz offsets from the baseline, ie. 0 applies no change) +; These will modify the core and mem clocks of the highest perf state in the Nvidia PowerMizer +; They are measured as Mhz offsets from the baseline, 0 will reset values to default, -1 or unset will not modify values ;nv_core_clock_mhz_offset=0 ;nv_mem_clock_mhz_offset=0