class Chainer::Optimizers::Adam
Public Class Methods
new(alpha: nil, beta1: nil, beta2: nil, eps: nil)
click to toggle source
Calls superclass method
Chainer::GradientMethod::new
# File lib/chainer/optimizers/adam.rb, line 44 def initialize(alpha: nil, beta1: nil, beta2: nil, eps: nil) super() @hyperparam.instance_variable_set('@alpha', alpha || 0.001) @hyperparam.instance_variable_set('@beta1', beta1 || 0.9) @hyperparam.instance_variable_set('@beta2', beta2 || 0.999) @hyperparam.instance_variable_set('@eps', eps || 1e-8) end
Public Instance Methods
create_update_rule()
click to toggle source
# File lib/chainer/optimizers/adam.rb, line 52 def create_update_rule AdamRule.new(parent_hyperparam: @hyperparam) end
lr()
click to toggle source
# File lib/chainer/optimizers/adam.rb, line 56 def lr fix1 = 1.0 - (@hyperparam.beta1 ** @t) fix2 = 1.0 - (@hyperparam.beta2 ** @t) @hyperparam.alpha * Math.sqrt(fix2) / fix1 end