mirror of
https://github.com/FeralInteractive/gamemode.git
synced 2025-06-26 17:31:45 +02:00
Remove the nv_perf_level config option and figure it out programmatically
This also fixes the instances in testing where we don't have the nv overclock in use, but we do have the mode set Solves issues explaining the what the perf_level actually meant, and future proofs for any PR that wants to set individual perf levels
This commit is contained in:
@ -92,7 +92,6 @@ int game_mode_initialise_gpu(GameModeConfig *config, GameModeGPUInfo **info)
|
||||
case Vendor_NVIDIA:
|
||||
new_info->nv_core = config_get_nv_core_clock_mhz_offset(config);
|
||||
new_info->nv_mem = config_get_nv_mem_clock_mhz_offset(config);
|
||||
new_info->nv_perf_level = config_get_nv_perf_level(config);
|
||||
new_info->nv_powermizer_mode = config_get_nv_powermizer_mode(config);
|
||||
|
||||
/* Reject values over some guessed values
|
||||
@ -113,18 +112,6 @@ int game_mode_initialise_gpu(GameModeConfig *config, GameModeGPUInfo **info)
|
||||
return -1;
|
||||
}
|
||||
|
||||
/* Sanity check the performance level value as well */
|
||||
/* Allow an invalid perf level if we've got the powermizer mode set */
|
||||
if (!(new_info->nv_perf_level == -1 && new_info->nv_powermizer_mode != -1) &&
|
||||
(new_info->nv_perf_level < 0 || new_info->nv_perf_level > 16)) {
|
||||
LOG_ERROR(
|
||||
"NVIDIA Performance level value likely invalid (%ld), will not apply "
|
||||
"optimisations!\n",
|
||||
new_info->nv_perf_level);
|
||||
free(new_info);
|
||||
return -1;
|
||||
}
|
||||
|
||||
break;
|
||||
case Vendor_AMD:
|
||||
config_get_amd_performance_level(config, new_info->amd_performance_level);
|
||||
@ -177,8 +164,6 @@ int game_mode_apply_gpu(const GameModeGPUInfo *info)
|
||||
snprintf(nv_core, 8, "%ld", info->nv_core);
|
||||
char nv_mem[8];
|
||||
snprintf(nv_mem, 8, "%ld", info->nv_mem);
|
||||
char nv_perf_level[4];
|
||||
snprintf(nv_perf_level, 4, "%ld", info->nv_perf_level);
|
||||
char nv_powermizer_mode[4];
|
||||
snprintf(nv_powermizer_mode, 4, "%ld", info->nv_powermizer_mode);
|
||||
|
||||
@ -190,7 +175,6 @@ int game_mode_apply_gpu(const GameModeGPUInfo *info)
|
||||
"set",
|
||||
info->vendor == Vendor_NVIDIA ? nv_core : info->amd_performance_level,
|
||||
info->vendor == Vendor_NVIDIA ? nv_mem : NULL, /* Only use this if Nvidia */
|
||||
info->vendor == Vendor_NVIDIA ? nv_perf_level : NULL, /* Only use this if Nvidia */
|
||||
info->vendor == Vendor_NVIDIA ? nv_powermizer_mode : NULL, /* Only use this if Nvidia */
|
||||
NULL,
|
||||
};
|
||||
@ -211,8 +195,6 @@ int game_mode_get_gpu(GameModeGPUInfo *info)
|
||||
/* Generate the input strings */
|
||||
char device[4];
|
||||
snprintf(device, 4, "%ld", info->device);
|
||||
char nv_perf_level[4];
|
||||
snprintf(nv_perf_level, 4, "%ld", info->nv_perf_level);
|
||||
|
||||
// Set up our command line to pass to gpuclockctl
|
||||
// This doesn't need pkexec as get does not need elevated perms
|
||||
@ -220,7 +202,6 @@ int game_mode_get_gpu(GameModeGPUInfo *info)
|
||||
LIBEXECDIR "/gpuclockctl",
|
||||
device,
|
||||
"get",
|
||||
info->vendor == Vendor_NVIDIA ? nv_perf_level : NULL, /* Only use this if Nvidia */
|
||||
NULL,
|
||||
};
|
||||
|
||||
|
Reference in New Issue
Block a user