-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathbenchmark.py
73 lines (61 loc) · 1.88 KB
/
benchmark.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
from utils import compressor, freq_simple_compressor
from functools import partial
import torch
from torch.utils import benchmark
from gen_mod_pulses import make_mod_signal
th = torch.tensor(-10, dtype=torch.float32)
ratio = torch.tensor(4, dtype=torch.float32)
at = torch.tensor(0.02, dtype=torch.float32, requires_grad=True)
rt = torch.tensor(0.003, dtype=torch.float32, requires_grad=True)
make_up = torch.tensor(0, dtype=torch.float32)
rms_avg = torch.tensor(0.01, dtype=torch.float32)
sr = 44100
duration = [30, 60, 120]
simple_runner = partial(
freq_simple_compressor,
avg_coef=rms_avg,
th=th,
ratio=ratio,
at=at,
make_up=make_up,
delay=0,
)
runner = partial(
compressor,
avg_coef=rms_avg,
th=th,
ratio=ratio,
at=at,
rt=rt,
make_up=make_up,
delay=0,
)
def main():
results = []
for d in duration:
samples = int(d * sr)
test_signal = make_mod_signal(samples, sr, 10, shape="cos", exp=1.0)[None, :]
test_signal.requires_grad_(True)
sub_label = f"{d}s"
results.append(
benchmark.Timer(
stmt="y = runner(test_signal); loss = y.sum(); loss.backward()",
setup="from __main__ import runner",
globals={"test_signal": test_signal},
sub_label=sub_label,
description="original",
).blocked_autorange(min_run_time=1)
)
results.append(
benchmark.Timer(
stmt="y = simple_runner(test_signal); loss = y.sum(); loss.backward()",
setup="from __main__ import simple_runner",
globals={"test_signal": test_signal},
sub_label=sub_label,
description="simple",
).blocked_autorange(min_run_time=1)
)
compare = benchmark.Compare(results)
compare.print()
if __name__ == "__main__":
main()