Skip to content

Commit

Permalink
[WIP] Add CI benchmarking
Browse files Browse the repository at this point in the history
  • Loading branch information
bjorn3 committed Dec 17, 2024
1 parent f90e45c commit 906e172
Show file tree
Hide file tree
Showing 3 changed files with 292 additions and 0 deletions.
65 changes: 65 additions & 0 deletions .github/workflows/bench.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
name: Benchmark

permissions:
contents: read

on:
push:
workflow_dispatch:
inputs:
ref:
description: "The commit or branch to benchmark"
required: true
type: string
merge_group:
branches:
- main

jobs:
bench:
name: "Benchmark ${{ matrix.name }}"
runs-on: ${{ matrix.os }}
timeout-minutes: 30
strategy:
matrix:
include:
- name: linux-x86
os: [benchmark, X64]
target: "x86_64-unknown-linux-gnu"
- name: macos-arm64
os: [benchmark, ARM64, macOS]
target: "aarch64-apple-darwin"
steps:
- name: Checkout sources
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
persist-credentials: false
ref: "${{inputs.ref}}"
- name: cargo build
run: |
. "$HOME/.cargo/env"
cargo build --target ${{matrix.target}} -p test-libz-rs-sys --release --examples
cd benchmarker && cargo build --release
- name: Benchmark
run: |
cp target/${{matrix.target}}/release/examples/blogpost-compress .
benchmarker/target/release/benchmarker "blogpost-compress 9 rs silesia-small.tar" "blogpost-compress 9 ng silesia-small.tar" > bench_results.json
- name: Upload benchmark results to artifacts
uses: actions/upload-artifact@v4
with:
name: "benchmark-results-${{matrix.name}}"
path: bench_results.json
- name: Upload benchmark results to bench repo
if: github.event_name == 'push'
run: |
mkdir -p ~/.ssh
echo "${{ secrets.BENCH_DATA_DEPLOY_KEY }}" > ~/.ssh/id_ed25519
chmod 600 ~/.ssh/id_ed25519
chmod 700 ~/.ssh
git clone --depth 1 [email protected]:trifectatechfoundation/zlib-rs-bench.git
cat bench_results.json >> zlib-rs-bench/metrics-${{matrix.name}}.json
cd zlib-rs-bench
git add .
git -c user.name="Perf bot" -c [email protected] commit --message 📈
git push origin main
13 changes: 13 additions & 0 deletions benchmarker/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
[package]
name = "benchmarker"
version = "0.0.0"
edition = "2021"
license = "Apache-2.0 OR MIT"
publish = false

[workspace]

[dependencies]
libc = "0.2.168"
serde = { version = "1.0.216", features = ["derive"] }
serde_json = "1.0.133"
214 changes: 214 additions & 0 deletions benchmarker/src/main.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,214 @@
use std::collections::BTreeMap;
use std::process::Command;
use std::time::SystemTime;
use std::{env, fs};

use serde::{Deserialize, Serialize};

#[derive(Debug, Deserialize)]
#[serde(rename_all = "kebab-case")]
struct PerfData {
event: String,
counter_value: String,
unit: String,
}

#[derive(Debug, Serialize)]
struct BenchData {
// What and when are we benchmarking
commit_hash: String,
timestamp: SystemTime,

// Where are we benchmarking it on
arch: String,
os: String,
runner: String,
cpu_model: String,

// The actual results for individual benchmarks
results: Vec<SingleBench>,
}

#[derive(Debug, Serialize)]
struct SingleBench {
cmd: Vec<String>,
counters: BTreeMap<String, BenchCounter>,
}

#[derive(Debug, Serialize)]
struct BenchCounter {
value: String,
unit: String,
}

impl BenchData {
fn render_markdown(&self) -> String {
use std::fmt::Write;

let mut md = String::new();

writeln!(
md,
"## [`{commit}`](https://github.com/trifectatechfoundation/zlib-rs/commit/{commit}) \
(on {cpu})",
commit = self.commit_hash,
cpu = self.cpu_model
)
.unwrap();
writeln!(md, "").unwrap();

for bench in &self.results {
writeln!(md, "### `{}`", bench.cmd.join(" ")).unwrap();
writeln!(md, "").unwrap();
writeln!(md, "|metric|value|").unwrap();
writeln!(md, "|------|-----|").unwrap();
for (name, data) in bench.counters.iter() {
writeln!(md, "|{name}|`{}` {}|", data.value, data.unit).unwrap();
}
writeln!(md, "").unwrap();
}

md
}
}

fn get_cpu_model() -> String {
if !cfg!(target_os = "linux") {
return "<unknown>".to_owned();
}

serde_json::from_slice::<serde_json::Value>(
&Command::new("lscpu").arg("-J").output().unwrap().stdout,
)
.unwrap()["lscpu"]
.as_array()
.unwrap()
.iter()
.find(|entry| entry["field"] == "Model name:")
.unwrap()["data"]
.as_str()
.unwrap()
.to_owned()
}

fn bench_single_cmd(cmd: Vec<String>) -> SingleBench {
if cfg!(target_os = "linux") {
bench_single_cmd_perf(cmd)
} else {
bench_single_cmd_getrusage(cmd)
}
}

fn bench_single_cmd_perf(cmd: Vec<String>) -> SingleBench {
let mut perf_stat_cmd = Command::new("perf");
perf_stat_cmd
.arg("stat")
.arg("-j")
.arg("-e")
.arg("task-clock,cycles,instructions")
.arg("--repeat")
.arg("1") // FIXME 20
.arg("--");
perf_stat_cmd.args(&cmd);

let output = perf_stat_cmd.output().unwrap();
assert!(
output.status.success(),
"`{:?}` failed with {:?}:=== stdout ===\n{}\n\n=== stderr ===\n{}",
perf_stat_cmd,
output.status,
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr),
);

let counters = String::from_utf8(output.stderr)
.unwrap()
.lines()
.map(|line| serde_json::from_str::<PerfData>(line).unwrap())
.map(|counter| {
(
counter.event,
BenchCounter {
value: counter.counter_value,
unit: counter.unit,
},
)
})
.collect::<BTreeMap<_, _>>();

SingleBench { cmd, counters }
}

fn bench_single_cmd_getrusage(cmd: Vec<String>) -> SingleBench {
use std::mem;
use std::time::Duration;

fn get_cpu_times() -> Duration {
use libc::{getrusage, rusage, RUSAGE_CHILDREN};

let result: rusage = unsafe {
let mut buf = mem::zeroed();
let success = getrusage(RUSAGE_CHILDREN, &mut buf);
assert_eq!(0, success);
buf
};

Duration::new(
result.ru_utime.tv_sec as _,
(result.ru_utime.tv_usec * 1000) as _,
)
}

let mut bench_cmd = Command::new(cmd.get(0).unwrap());
bench_cmd.args(&cmd[1..]);

let start_cpu = get_cpu_times();
let output = bench_cmd.output().unwrap();
let user_time = get_cpu_times() - start_cpu;
assert!(
output.status.success(),
"`{:?}` failed with {:?}:=== stdout ===\n{}\n\n=== stderr ===\n{}",
bench_cmd,
output.status,
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr),
);

SingleBench {
cmd,
counters: BTreeMap::from_iter([(
"user-time".to_owned(),
BenchCounter {
value: format!("{:.06}", user_time.as_secs_f64() * 1000.0),
unit: "msec".to_owned(),
},
)]),
}
}

fn main() {
let mut bench_data = BenchData {
commit_hash: env::var("GITHUB_SHA").unwrap_or_default(),
timestamp: SystemTime::now(),

arch: env::var("RUNNER_ARCH").unwrap_or_default(),
os: env::var("RUNNER_OS").unwrap_or_default(),
runner: env::var("RUNNER_NAME").unwrap_or_else(|_| "<local bench>".to_owned()),
cpu_model: get_cpu_model(),

results: vec![],
};

for cmd in env::args().skip(1) {
bench_data.results.push(bench_single_cmd(
cmd.split(" ").map(|arg| arg.to_owned()).collect(),
));
}

println!("{}", serde_json::to_string(&bench_data).unwrap());

eprintln!("{}", bench_data.render_markdown());
if let Ok(path) = env::var("GITHUB_STEP_SUMMARY") {
fs::write(path, bench_data.render_markdown()).unwrap();
}
}

0 comments on commit 906e172

Please sign in to comment.