Fix the parsing of gpu and memory clk/voltage table (#3)

* Fix the parsing of gpu and memory clk/voltage table

* Fix error handling

* Remove debug println

Co-authored-by: ilyazzz <ilya.zl@protonmail.com>
This commit is contained in:
neon-sunset 2020-12-17 10:16:15 +02:00 committed by GitHub
parent d4ef64afae
commit 45ab06c661
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -1,7 +1,7 @@
use crate::config::{GpuConfig, GpuIdentifier};
use crate::hw_mon::{HWMon, HWMonError};
use serde::{Deserialize, Serialize};
use std::{path::{Path, PathBuf}};
use std::{num::ParseIntError, path::{Path, PathBuf}};
use std::{collections::BTreeMap, fs};
use vulkano::instance::{Instance, InstanceExtensions, PhysicalDevice};
@ -23,6 +23,12 @@ impl From<std::io::Error> for GpuControllerError {
}
}
impl From<ParseIntError> for GpuControllerError {
fn from(_err: ParseIntError) -> GpuControllerError {
GpuControllerError::ParseError
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub enum PowerProfile {
Auto,
@ -455,10 +461,8 @@ impl GpuController {
"OD_SCLK:" => {
i += 1;
while (lines[i].split_at(2).0 != "OD") && i < lines.len() {
let split: Vec<&str> = lines[i].split_whitespace().collect();
let num = split[0].chars().nth(0).unwrap().to_digit(10).unwrap();
let clock = split[1].replace("MHz", "").parse::<i32>().unwrap();
let voltage = split[2].replace("mV", "").parse::<i32>().unwrap();
let (num, clock, voltage) = GpuController::parse_clock_voltage_line(lines[i])?;
clocks_table.gpu_power_levels.insert(num, (clock, voltage));
log::trace!("Adding gpu power level {}MHz {}mv", clock, voltage);
i += 1;
@ -467,10 +471,8 @@ impl GpuController {
"OD_MCLK:" => {
i += 1;
while (lines[i].split_at(2).0 != "OD") && i < lines.len() {
let split: Vec<&str> = lines[i].split_whitespace().collect();
let num = split[0].chars().nth(0).unwrap().to_digit(10).unwrap();
let clock = split[1].replace("MHz", "").parse::<i32>().unwrap();
let voltage = split[2].replace("mV", "").parse::<i32>().unwrap();
let (num, clock, voltage) = GpuController::parse_clock_voltage_line(lines[i])?;
clocks_table.mem_power_levels.insert(num, (clock, voltage));
log::trace!("Adding vram power level {}MHz {}mv", clock, voltage);
i += 1;
@ -592,6 +594,17 @@ impl GpuController {
}
}
fn parse_clock_voltage_line(line: &str) -> Result<(u32, i32, i32), GpuControllerError> {
let line = line.to_uppercase();
let line_parts: Vec<&str> = line.split_whitespace().collect();
let num: u32 = line_parts[0].chars().nth(0).unwrap().to_digit(10).unwrap();
let clock: i32 = line_parts[1].strip_suffix("MHZ").ok_or_else(|| GpuControllerError::ParseError)?.parse()?;
let voltage: i32 = line_parts[2].strip_suffix("MV").ok_or_else(|| GpuControllerError::ParseError)?.parse()?;
Ok((num, clock, voltage))
}
}
@ -604,4 +617,4 @@ mod tests {
let mut c = GpuController::new(PathBuf::from("/sys/class/drm/card0/device"), GpuConfig::new());
c.set_gpu_power_state(7, 1360, None)
}
}
}