class DNN::Layers::LSTMDense
Attributes
trainable[RW]
Public Class Methods
new(weight, recurrent_weight, bias)
click to toggle source
# File lib/dnn/core/layers/rnn_layers.rb, line 234 def initialize(weight, recurrent_weight, bias) @weight = weight @recurrent_weight = recurrent_weight @bias = bias @tanh = Layers::Tanh.new @g_tanh = Layers::Tanh.new @forget_sigmoid = Layers::Sigmoid.new @in_sigmoid = Layers::Sigmoid.new @out_sigmoid = Layers::Sigmoid.new @trainable = true end
Public Instance Methods
backward(dh2, dc2)
click to toggle source
# File lib/dnn/core/layers/rnn_layers.rb, line 265 def backward(dh2, dc2) dh2_tmp = @tanh_c2 * dh2 dc2_tmp = @tanh.backward_node(@out * dh2) + dc2 dout = @out_sigmoid.backward_node(dh2_tmp) din = @in_sigmoid.backward_node(dc2_tmp * @g) dg = @g_tanh.backward_node(dc2_tmp * @in) dforget = @forget_sigmoid.backward_node(dc2_tmp * @c) da = Xumo::SFloat.hstack([dforget, dg, din, dout]) if @trainable @weight.grad += @x.transpose.dot(da) @recurrent_weight.grad += @h.transpose.dot(da) @bias.grad += da.sum(0) if @bias end dx = da.dot(@weight.data.transpose) dh = da.dot(@recurrent_weight.data.transpose) dc = dc2_tmp * @forget [dx, dh, dc] end
forward(x, h, c)
click to toggle source
# File lib/dnn/core/layers/rnn_layers.rb, line 246 def forward(x, h, c) @x = x @h = h @c = c num_units = h.shape[1] a = x.dot(@weight.data) + h.dot(@recurrent_weight.data) a += @bias.data if @bias @forget = @forget_sigmoid.forward_node(a[true, 0...num_units]) @g = @g_tanh.forward_node(a[true, num_units...(num_units * 2)]) @in = @in_sigmoid.forward_node(a[true, (num_units * 2)...(num_units * 3)]) @out = @out_sigmoid.forward_node(a[true, (num_units * 3)..-1]) c2 = @forget * c + @g * @in @tanh_c2 = @tanh.forward_node(c2) h2 = @out * @tanh_c2 [h2, c2] end