class TensorStream::Train::AdadeltaOptimizer

High Level implementation of the Adadelta algorithm

Attributes

learning_rate[RW]

Public Class Methods

new(learning_rate = 0.001, rho = 0.95, epsilon = 1e-8, use_locking: false, name: "Adadelta") click to toggle source
Calls superclass method
# File lib/tensor_stream/train/adadelta_optimizer.rb, line 9
def initialize(learning_rate = 0.001, rho = 0.95, epsilon = 1e-8,
  use_locking: false, name: "Adadelta")
  @learning_rate = learning_rate
  @rho = rho
  @epsilon = epsilon

  # Tensor versions of the constructor arguments, created in _prepare().
  @learning_rate_tensor = nil
  @rho_t = nil
  @epsilon_t = nil
  super(name: name, use_locking: use_locking)
end

Protected Instance Methods

apply_dense(grad, var) click to toggle source
# File lib/tensor_stream/train/adadelta_optimizer.rb, line 37
def apply_dense(grad, var)
  accum = get_slot(var, "accum")
  accum_update = get_slot(var, "accum_update")
  _op(:apply_adadelta,
    var,
    accum,
    accum_update,
    TensorStream.cast(@learning_rate_tensor, var.data_type),
    TensorStream.cast(@rho_t, var.data_type),
    TensorStream.cast(@epsilon_t, var.data_type),
    grad,
    use_locking: @use_locking)
end
create_slots(var_list) click to toggle source
# File lib/tensor_stream/train/adadelta_optimizer.rb, line 24
def create_slots(var_list)
  var_list.each do |v|
    zeros_slot(v, "accum", @name)
    zeros_slot(v, "accum_update", @name)
  end
end
prepare() click to toggle source
# File lib/tensor_stream/train/adadelta_optimizer.rb, line 31
def prepare
  @learning_rate_tensor = convert_to_tensor(@learning_rate, name: "lr")
  @rho_t = convert_to_tensor(@rho, name: "rho")
  @epsilon_t = convert_to_tensor(@epsilon, name: "epsilon")
end