class DNN::Layers::Dropout
Attributes
dropout_ratio[RW]
use_scale[R]
Public Class Methods
new(dropout_ratio = 0.5, seed: rand(1 << 31), use_scale: true)
click to toggle source
@param [Float] dropout_ratio
Nodes dropout ratio. @param [Integer] seed Seed of random number used for masking. @param [Boolean] use_scale
Set to true to scale the output according to the dropout ratio.
Calls superclass method
DNN::Layers::Layer::new
# File lib/dnn/core/layers/basic_layers.rb, line 427 def initialize(dropout_ratio = 0.5, seed: rand(1 << 31), use_scale: true) super() @dropout_ratio = dropout_ratio @seed = seed @use_scale = use_scale @mask = nil @rnd = Random.new(@seed) end
Public Instance Methods
backward_node(dy)
click to toggle source
# File lib/dnn/core/layers/basic_layers.rb, line 447 def backward_node(dy) dy * @mask end
forward_node(x)
click to toggle source
# File lib/dnn/core/layers/basic_layers.rb, line 436 def forward_node(x) if DNN.learning_phase Xumo::SFloat.srand(@rnd.rand(1 << 31)) @mask = Xumo::SFloat.cast(Xumo::SFloat.new(*x.shape).rand >= @dropout_ratio) x = x * @mask elsif @use_scale x *= (1 - @dropout_ratio) end x end
load_hash(hash)
click to toggle source
# File lib/dnn/core/layers/basic_layers.rb, line 455 def load_hash(hash) initialize(hash[:dropout_ratio], seed: hash[:seed], use_scale: hash[:use_scale]) end
to_hash()
click to toggle source
Calls superclass method
DNN::Layers::Layer#to_hash
# File lib/dnn/core/layers/basic_layers.rb, line 451 def to_hash super(dropout_ratio: @dropout_ratio, seed: @seed, use_scale: @use_scale) end