Skip to content
This repository has been archived by the owner on Feb 6, 2023. It is now read-only.

Commit

Permalink
Add benchmark for Adam
Browse files Browse the repository at this point in the history
  • Loading branch information
asi1024 committed Aug 28, 2018
1 parent f90afa2 commit 1395d69
Show file tree
Hide file tree
Showing 2 changed files with 90 additions and 0 deletions.
67 changes: 67 additions & 0 deletions benchmarks/optimizers/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
import numpy
import six

import chainer
import chainer.functions as F
from chainer import initializers
import chainer.links as L

from benchmarks import BenchmarkBase


class LinearModel(object):

def __init__(self, xp, optimizer, unit_num, dtype):
self.dtype = dtype
weight = initializers.HeNormal(1 / numpy.sqrt(2), dtype)
bias = initializers.Constant(0, dtype)
self.model = L.Linear(unit_num, 2, initialW=weight, initial_bias=bias)
self.optimizer = optimizer
optimizer.setup(self.model)

if xp != numpy:
self.model.to_gpu()


class OptimizerBenchmark(BenchmarkBase):

"""The base class for benchmark of optimizers."""

# Call `test_*` methods only once as `backward()` has a side-effect.
number = 1

# Repeat the test for 10 times instead of 3 (`timeit.default_repeat`).
repeat = 10

def _make_dataset(self, batch_size, unit_num, dtype):
xp = self.xp
x = xp.random.uniform(-1, 1, (batch_size, unit_num)).astype(dtype)
w = xp.random.uniform(-1, 1, (unit_num, 1)).astype(dtype)
b = xp.random.uniform(-1, 1, (1, )).astype(dtype)
a = (xp.dot(x, w) + b).reshape((batch_size, ))
t = (a < 0).astype(numpy.int32)
return chainer.Variable(x), chainer.Variable(t)

def setup_benchmark(self, optimizer, batch_size, unit_num, dtype):
"""Performs setup of benchmark for optimizers.
Call this in `setup` method of your benchmark class.
Note that this function performs forward computation.
"""

self.optimizer = optimizer

model = LinearModel(self.xp, optimizer, unit_num, dtype)
model.model.cleargrads()
x, t = self._make_dataset(batch_size, unit_num, dtype)
y = model.model(x)
loss = F.softmax_cross_entropy(y, t)
loss.backward()

def update(self, n_times):
"""Runs optimizer.update()."""

optimizer = self.optimizer

for i in six.moves.range(n_times):
optimizer.update()
23 changes: 23 additions & 0 deletions benchmarks/optimizers/adam.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import numpy

from chainer import optimizers

from benchmarks.optimizers import OptimizerBenchmark
from benchmarks.utils import backends
from benchmarks.utils import parameterize


@backends('gpu', 'cpu')
@parameterize(
[('dtype', [numpy.float32, numpy.float64]),
('amsgrad', [True, False])])
class Adam(OptimizerBenchmark):
def setup(self, dtype, amsgrad):
unit_num = 100000
batch_size = 32
optimizer = optimizers.Adam(0.05, amsgrad=amsgrad)

self.setup_benchmark(optimizer, batch_size, unit_num, dtype)

def time_update(self, dtype, amsgrad):
self.update(1000)

0 comments on commit 1395d69

Please sign in to comment.