Skip to content
Prev Previous commit
Next Next commit
Add microbenchmarks
  • Loading branch information
orf committed Dec 13, 2020
commit dde8af14f4912d3a96f941b75e8a48ec9e0de6b5
27 changes: 27 additions & 0 deletions benches/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,33 @@ in two ways:
1. The time to parse the file to AST
2. The time it takes to execute the file

### Adding a micro benchmark

Micro benchmarks are small snippets of code added under the `microbenchmarks/` directory. A microbenchmark file has
two sections:
1. Optional setup code
2. The code to be benchmarked

These two sections are delimited by `# ---`. For example:

```python
a_list = [1,2,3]

# ---

len(a_list)
```

Only `len(a_list)` will be timed. Setup or benchmarked code can optionally reference a variable called `ITERATIONS`. If
present then the benchmark code will be invoked 5 times with `ITERATIONS` set to a value between 100 and 1,000. For
example:

```python
obj = [i for i in range(ITERATIONS)]
```

`ITERATIONS` can appear in both the setup code and the benchmark code.

## MacOS setup

On MacOS you will need to add the following to a `.cargo/config` file:
Expand Down
177 changes: 177 additions & 0 deletions benches/microbenchmarks.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,177 @@
use cpython::Python;
use criterion::measurement::WallTime;
use criterion::{
criterion_group, criterion_main, BatchSize, BenchmarkGroup, BenchmarkId, Criterion, Throughput,
};
use rustpython_compiler::Mode;
use rustpython_vm::pyobject::ItemProtocol;
use rustpython_vm::pyobject::PyResult;
use rustpython_vm::Interpreter;
use std::path::{Path, PathBuf};
use std::{fs, io};

pub struct MicroBenchmark {
name: String,
setup: String,
code: String,
iterate: bool,
}
//
// fn bench_cpython_code(b: &mut Bencher, source: &str) {
// let gil = cpython::Python::acquire_gil();
// let python = gil.python();
//
// b.iter(|| {
// let res: cpython::PyResult<()> = python.run(source, None, None);
// if let Err(e) = res {
// e.print(python);
// panic!("Error running source")
// }
// });
// }

fn bench_cpython_code(group: &mut BenchmarkGroup<WallTime>, bench: &MicroBenchmark) {
let gil = cpython::Python::acquire_gil();
let python = gil.python();

let bench_func = |(python, code): (Python, String)| {
let res: cpython::PyResult<()> = python.run(&code, None, None);
if let Err(e) = res {
e.print(python);
panic!("Error running microbenchmark")
}
};

let bench_setup = |iterations| {
let code = if let Some(idx) = iterations {
// We can't easily modify the locals when running cPython. So we just add the
// loop iterations at the top of the code...
format!("ITERATIONS = {}\n{}", idx, bench.code)
} else {
(&bench.code).to_string()
};

let res: cpython::PyResult<()> = python.run(&bench.setup, None, None);
if let Err(e) = res {
e.print(python);
panic!("Error running microbenchmark setup code")
}
(python, code)
};

if bench.iterate {
for idx in (100..=1_000).step_by(200) {
group.throughput(Throughput::Elements(idx as u64));
group.bench_with_input(BenchmarkId::new("cpython", &bench.name), &idx, |b, idx| {
b.iter_batched(
|| bench_setup(Some(*idx)),
bench_func,
BatchSize::PerIteration,
);
});
}
} else {
group.bench_function(BenchmarkId::new("cpython", &bench.name), move |b| {
b.iter_batched(|| bench_setup(None), bench_func, BatchSize::PerIteration);
});
}
}

fn bench_rustpy_code(group: &mut BenchmarkGroup<WallTime>, bench: &MicroBenchmark) {
Interpreter::default().enter(|vm| {
let setup_code = vm
.compile(&bench.setup, Mode::Exec, bench.name.to_owned())
.expect("Error compiling setup code");
let bench_code = vm
.compile(&bench.code, Mode::Exec, bench.name.to_owned())
.expect("Error compiling bench code");

let bench_func = |(scope, bench_code)| {
let res: PyResult = vm.run_code_obj(bench_code, scope);
vm.unwrap_pyresult(res);
};

let bench_setup = |iterations| {
let scope = vm.new_scope_with_builtins();
if let Some(idx) = iterations {
scope
.locals
.set_item(vm.ctx.new_str("ITERATIONS"), vm.ctx.new_int(idx), vm)
.expect("Error adding ITERATIONS local variable");
}
vm.run_code_obj(setup_code.clone(), scope.clone())
.expect("Error running benchmark setup code");
(scope, bench_code.clone())
};

if bench.iterate {
for idx in (100..=1_000).step_by(200) {
group.throughput(Throughput::Elements(idx as u64));
group.bench_with_input(
BenchmarkId::new("rustpython", &bench.name),
&idx,
|b, idx| {
b.iter_batched(
|| bench_setup(Some(*idx)),
bench_func,
BatchSize::PerIteration,
);
},
);
}
} else {
group.bench_function(BenchmarkId::new("rustpython", &bench.name), move |b| {
b.iter_batched(|| bench_setup(None), bench_func, BatchSize::PerIteration);
});
}
})
}

pub fn run_micro_benchmark(c: &mut Criterion, benchmark: MicroBenchmark) {
let mut group = c.benchmark_group("microbenchmarks");

bench_cpython_code(&mut group, &benchmark);
bench_rustpy_code(&mut group, &benchmark);

group.finish();
}

pub fn criterion_benchmark(c: &mut Criterion) {
let benchmark_dir = Path::new("./benches/microbenchmarks/");
let dirs: Vec<fs::DirEntry> = benchmark_dir
.read_dir()
.unwrap()
.collect::<io::Result<_>>()
.unwrap();
let paths: Vec<PathBuf> = dirs.iter().map(|p| p.path()).collect();

let benchmarks: Vec<MicroBenchmark> = paths
.into_iter()
.map(|p| {
let name = p.file_name().unwrap().to_os_string();
let contents = fs::read_to_string(p).unwrap();
let iterate = contents.contains("ITERATIONS");

let (setup, code) = if contents.contains("# ---") {
let split: Vec<&str> = contents.splitn(2, "# ---").collect();
(split[0].to_string(), split[1].to_string())
} else {
("".to_string(), contents)
};
let name = name.into_string().unwrap();
MicroBenchmark {
name,
setup,
code,
iterate,
}
})
.collect();

for benchmark in benchmarks {
run_micro_benchmark(c, benchmark);
}
}

criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
3 changes: 3 additions & 0 deletions benches/microbenchmarks/addition.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
total = 0
for i in range(ITERATIONS):
total += i
7 changes: 7 additions & 0 deletions benches/microbenchmarks/call_kwargs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
def add(a, b):
a + b


# ---

add(a=1, b=10)
7 changes: 7 additions & 0 deletions benches/microbenchmarks/call_simple.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
def add(a, b):
a + b


# ---

add(1, 2)
12 changes: 12 additions & 0 deletions benches/microbenchmarks/complex_class.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
class Foo:
ABC = 1

def __init__(self):
super().__init__()

def bar(self):
pass

@classmethod
def bar_2(cls):
pass
1 change: 1 addition & 0 deletions benches/microbenchmarks/comprehension_dict.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
obj = {i: i for i in range(ITERATIONS)}
1 change: 1 addition & 0 deletions benches/microbenchmarks/comprehension_list.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
obj = [i for i in range(ITERATIONS)]
1 change: 1 addition & 0 deletions benches/microbenchmarks/comprehension_set.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
obj = {i for i in range(ITERATIONS)}
7 changes: 7 additions & 0 deletions benches/microbenchmarks/construct_object.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
class Foo:
pass


# ---

Foo()
2 changes: 2 additions & 0 deletions benches/microbenchmarks/define_class.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
class Foo:
pass
2 changes: 2 additions & 0 deletions benches/microbenchmarks/define_function.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
def function():
pass
13 changes: 13 additions & 0 deletions benches/microbenchmarks/exception_context.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
from contextlib import contextmanager

@contextmanager
def try_catch(*args, **kwargs):
try:
yield
except RuntimeError:
pass

# ---

with try_catch():
raise RuntimeError()
7 changes: 7 additions & 0 deletions benches/microbenchmarks/exception_nested.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
try:
try:
raise ValueError()
except ValueError as e:
raise RuntimeError() from e
except RuntimeError as e:
pass
4 changes: 4 additions & 0 deletions benches/microbenchmarks/exception_simple.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
try:
raise RuntimeError()
except RuntimeError as e:
pass
6 changes: 6 additions & 0 deletions benches/microbenchmarks/loop_append.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
obj = []

# ---

for i in range(ITERATIONS):
obj.append(i)
6 changes: 6 additions & 0 deletions benches/microbenchmarks/loop_string.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
string = "a" * ITERATIONS

# ---

for char in string:
pass