Skip to content
This repository has been archived by the owner on Feb 6, 2023. It is now read-only.

Commit

Permalink
Add benchmark for Adam
Browse files Browse the repository at this point in the history
  • Loading branch information
asi1024 committed Oct 29, 2018
1 parent f90afa2 commit 10d246d
Show file tree
Hide file tree
Showing 2 changed files with 77 additions and 0 deletions.
54 changes: 54 additions & 0 deletions benchmarks/optimizers/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
import six

import chainer

from benchmarks import BenchmarkBase


class Link(chainer.Link):
def __init__(self, param):
super(Link, self).__init__()
with self.init_scope():
self.p = chainer.Parameter(param)

def __call__(self, x):
return x * self.p


class OptimizerBenchmark(BenchmarkBase):

"""The base class for benchmark of optimizers."""

# Call `test_*` methods only once as `backward()` has a side-effect.
number = 1

# Repeat the test for 10 times instead of 3 (`timeit.default_repeat`).
repeat = 10

def setup_benchmark(self, optimizer, batch_size, unit_num, dtype):
"""Performs setup of benchmark for optimizers.
Call this in `setup` method of your benchmark class.
Note that this function performs forward computation.
"""

xp = self.xp
self.optimizer = optimizer

x = xp.random.uniform(-1, 1, (batch_size, unit_num)).astype(dtype)
param = xp.random.uniform(-1, 1, unit_num).astype(dtype)
model = Link(param)

x = chainer.Variable(x)
y = model(x)
y.zerograd()
y.backward()
optimizer.setup(model)

def update(self, n_times):
"""Runs optimizer.update()."""

optimizer = self.optimizer

for i in six.moves.range(n_times):
optimizer.update()
23 changes: 23 additions & 0 deletions benchmarks/optimizers/adam.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import numpy

from chainer import optimizers

from benchmarks.optimizers import OptimizerBenchmark
from benchmarks.utils import backends
from benchmarks.utils import parameterize


@backends('gpu', 'cpu')
@parameterize(
[('dtype', [numpy.float32, numpy.float64]),
('amsgrad', [True, False])])
class Adam(OptimizerBenchmark):
def setup(self, dtype, amsgrad):
unit_num = 100000
batch_size = 32
optimizer = optimizers.Adam(0.05, amsgrad=amsgrad)

self.setup_benchmark(optimizer, batch_size, unit_num, dtype)

def time_update(self, dtype, amsgrad):
self.update(1000)

0 comments on commit 10d246d

Please sign in to comment.