mirror of
https://github.com/tauri-apps/tauri.git
synced 2026-01-31 00:35:19 +01:00
fix(bench): Result interpretation problems (#5798)
Co-authored-by: Lucas Nogueira <lucas@tauri.studio>
This commit is contained in:
1
.github/workflows/bench.yml
vendored
1
.github/workflows/bench.yml
vendored
@@ -10,6 +10,7 @@ on:
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
CARGO_PROFILE_DEV_DEBUG: 0 # This would add unnecessary bloat to the target folder, decreasing cache efficiency.
|
||||
LC_ALL: en_US.UTF-8 # This prevents strace from changing it's number format to use commas.
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
|
||||
@@ -22,7 +22,7 @@ fn main() {
|
||||
|
||||
// all data's
|
||||
let all_data_buffer =
|
||||
BufReader::new(File::open(&tauri_data).expect("Unable to read all data file"));
|
||||
BufReader::new(File::open(tauri_data).expect("Unable to read all data file"));
|
||||
let mut all_data: Vec<utils::BenchResult> =
|
||||
serde_json::from_reader(all_data_buffer).expect("Unable to read all data buffer");
|
||||
|
||||
@@ -30,12 +30,11 @@ fn main() {
|
||||
all_data.push(current_data);
|
||||
|
||||
// use only latest 20 elements from alls data
|
||||
let recent: Vec<utils::BenchResult>;
|
||||
if all_data.len() > 20 {
|
||||
recent = all_data[all_data.len() - 20..].to_vec();
|
||||
let recent: Vec<utils::BenchResult> = if all_data.len() > 20 {
|
||||
all_data[all_data.len() - 20..].to_vec()
|
||||
} else {
|
||||
recent = all_data.clone();
|
||||
}
|
||||
all_data.clone()
|
||||
};
|
||||
|
||||
// write json's
|
||||
utils::write_json(
|
||||
@@ -44,7 +43,7 @@ fn main() {
|
||||
.expect("Something wrong with tauri_data"),
|
||||
&serde_json::to_value(&all_data).expect("Unable to build final json (alls)"),
|
||||
)
|
||||
.expect(format!("Unable to write {:?}", tauri_data).as_str());
|
||||
.unwrap_or_else(|_| panic!("Unable to write {:?}", tauri_data));
|
||||
|
||||
utils::write_json(
|
||||
tauri_recent
|
||||
@@ -52,5 +51,5 @@ fn main() {
|
||||
.expect("Something wrong with tauri_recent"),
|
||||
&serde_json::to_value(&recent).expect("Unable to build final json (recent)"),
|
||||
)
|
||||
.expect(format!("Unable to write {:?}", tauri_recent).as_str());
|
||||
.unwrap_or_else(|_| panic!("Unable to write {:?}", tauri_recent));
|
||||
}
|
||||
|
||||
@@ -49,7 +49,7 @@ fn run_strace_benchmarks(new_data: &mut utils::BenchResult) -> Result<()> {
|
||||
let mut file = tempfile::NamedTempFile::new()?;
|
||||
|
||||
Command::new("strace")
|
||||
.args(&[
|
||||
.args([
|
||||
"-c",
|
||||
"-f",
|
||||
"-o",
|
||||
@@ -64,7 +64,10 @@ fn run_strace_benchmarks(new_data: &mut utils::BenchResult) -> Result<()> {
|
||||
file.as_file_mut().read_to_string(&mut output)?;
|
||||
|
||||
let strace_result = utils::parse_strace_output(&output);
|
||||
let clone = strace_result.get("clone").map(|d| d.calls).unwrap_or(0) + 1;
|
||||
// Note, we always have 1 thread. Use cloneX calls as counter for additional threads created.
|
||||
let clone = 1
|
||||
+ strace_result.get("clone").map(|d| d.calls).unwrap_or(0)
|
||||
+ strace_result.get("clone3").map(|d| d.calls).unwrap_or(0);
|
||||
let total = strace_result.get("total").unwrap().calls;
|
||||
thread_count.insert(name.to_string(), clone);
|
||||
syscall_count.insert(name.to_string(), total);
|
||||
@@ -84,7 +87,7 @@ fn run_max_mem_benchmark() -> Result<HashMap<String, u64>> {
|
||||
let benchmark_file = benchmark_file.to_str().unwrap();
|
||||
|
||||
let proc = Command::new("mprof")
|
||||
.args(&[
|
||||
.args([
|
||||
"run",
|
||||
"-C",
|
||||
"-o",
|
||||
@@ -99,7 +102,7 @@ fn run_max_mem_benchmark() -> Result<HashMap<String, u64>> {
|
||||
println!("{:?}", proc_result);
|
||||
results.insert(
|
||||
name.to_string(),
|
||||
utils::parse_max_mem(&benchmark_file).unwrap(),
|
||||
utils::parse_max_mem(benchmark_file).unwrap(),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -132,7 +135,7 @@ fn rlib_size(target_dir: &std::path::Path, prefix: &str) -> u64 {
|
||||
fn get_binary_sizes(target_dir: &Path) -> Result<HashMap<String, u64>> {
|
||||
let mut sizes = HashMap::<String, u64>::new();
|
||||
|
||||
let wry_size = rlib_size(&target_dir, "libwry");
|
||||
let wry_size = rlib_size(target_dir, "libwry");
|
||||
println!("wry {} bytes", wry_size);
|
||||
sizes.insert("wry_rlib".to_string(), wry_size);
|
||||
|
||||
@@ -174,9 +177,9 @@ fn cargo_deps() -> HashMap<String, usize> {
|
||||
let mut cmd = Command::new("cargo");
|
||||
cmd.arg("tree");
|
||||
cmd.arg("--no-dedupe");
|
||||
cmd.args(&["--edges", "normal"]);
|
||||
cmd.args(&["--prefix", "none"]);
|
||||
cmd.args(&["--target", target]);
|
||||
cmd.args(["--edges", "normal"]);
|
||||
cmd.args(["--prefix", "none"]);
|
||||
cmd.args(["--target", target]);
|
||||
cmd.current_dir(&utils::tauri_root_path());
|
||||
|
||||
let full_deps = cmd.output().expect("failed to run cargo tree").stdout;
|
||||
@@ -268,7 +271,7 @@ fn main() -> Result<()> {
|
||||
time::format_description::parse("[year]-[month]-[day]T[hour]:[minute]:[second]Z").unwrap();
|
||||
let now = time::OffsetDateTime::now_utc();
|
||||
let mut new_data = utils::BenchResult {
|
||||
created_at: format!("{}", now.format(&format).unwrap()),
|
||||
created_at: now.format(&format).unwrap(),
|
||||
sha1: utils::run_collect(&["git", "rev-parse", "HEAD"])
|
||||
.0
|
||||
.trim()
|
||||
|
||||
@@ -45,12 +45,11 @@ pub fn get_target() -> &'static str {
|
||||
}
|
||||
|
||||
pub fn target_dir() -> PathBuf {
|
||||
let target_dir = bench_root_path()
|
||||
bench_root_path()
|
||||
.join("tests")
|
||||
.join("target")
|
||||
.join(get_target())
|
||||
.join("release");
|
||||
target_dir.into()
|
||||
.join("release")
|
||||
}
|
||||
|
||||
pub fn bench_root_path() -> PathBuf {
|
||||
@@ -105,16 +104,14 @@ pub fn parse_max_mem(file_path: &str) -> Option<u64> {
|
||||
let output = BufReader::new(file);
|
||||
let mut highest: u64 = 0;
|
||||
// MEM 203.437500 1621617192.4123
|
||||
for line in output.lines() {
|
||||
if let Ok(line) = line {
|
||||
// split line by space
|
||||
let split = line.split(" ").collect::<Vec<_>>();
|
||||
if split.len() == 3 {
|
||||
// mprof generate result in MB
|
||||
let current_bytes = str::parse::<f64>(split[1]).unwrap() as u64 * 1024 * 1024;
|
||||
if current_bytes > highest {
|
||||
highest = current_bytes;
|
||||
}
|
||||
for line in output.lines().flatten() {
|
||||
// split line by space
|
||||
let split = line.split(' ').collect::<Vec<_>>();
|
||||
if split.len() == 3 {
|
||||
// mprof generate result in MB
|
||||
let current_bytes = str::parse::<f64>(split[1]).unwrap() as u64 * 1024 * 1024;
|
||||
if current_bytes > highest {
|
||||
highest = current_bytes;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -169,14 +166,26 @@ pub fn parse_strace_output(output: &str) -> HashMap<String, StraceOutput> {
|
||||
}
|
||||
|
||||
let total_fields = total_line.split_whitespace().collect::<Vec<_>>();
|
||||
|
||||
summary.insert(
|
||||
"total".to_string(),
|
||||
StraceOutput {
|
||||
percent_time: str::parse::<f64>(total_fields[0]).unwrap(),
|
||||
seconds: str::parse::<f64>(total_fields[1]).unwrap(),
|
||||
usecs_per_call: None,
|
||||
calls: str::parse::<u64>(total_fields[2]).unwrap(),
|
||||
errors: str::parse::<u64>(total_fields[3]).unwrap(),
|
||||
match total_fields.len() {
|
||||
// Old format, has no usecs/call
|
||||
5 => StraceOutput {
|
||||
percent_time: str::parse::<f64>(total_fields[0]).unwrap(),
|
||||
seconds: str::parse::<f64>(total_fields[1]).unwrap(),
|
||||
usecs_per_call: None,
|
||||
calls: str::parse::<u64>(total_fields[2]).unwrap(),
|
||||
errors: str::parse::<u64>(total_fields[3]).unwrap(),
|
||||
},
|
||||
6 => StraceOutput {
|
||||
percent_time: str::parse::<f64>(total_fields[0]).unwrap(),
|
||||
seconds: str::parse::<f64>(total_fields[1]).unwrap(),
|
||||
usecs_per_call: Some(str::parse::<u64>(total_fields[2]).unwrap()),
|
||||
calls: str::parse::<u64>(total_fields[3]).unwrap(),
|
||||
errors: str::parse::<u64>(total_fields[4]).unwrap(),
|
||||
},
|
||||
_ => panic!("Unexpected total field count: {}", total_fields.len()),
|
||||
},
|
||||
);
|
||||
|
||||
@@ -222,7 +231,7 @@ pub fn download_file(url: &str, filename: PathBuf) {
|
||||
.arg("-s")
|
||||
.arg("-o")
|
||||
.arg(&filename)
|
||||
.arg(&url)
|
||||
.arg(url)
|
||||
.status()
|
||||
.unwrap();
|
||||
|
||||
|
||||
Reference in New Issue
Block a user