class Chainer::Functions::Normalization::FixedBatchNormalization
Attributes
inv_var[R]
Public Class Methods
fixed_batch_normalization(x, gamma, beta, mean, var, eps: 2e-5)
click to toggle source
# File lib/chainer/functions/normalization/batch_normalization.rb, line 172 def self.fixed_batch_normalization(x, gamma, beta, mean, var, eps: 2e-5) FixedBatchNormalization.new(eps: eps).apply([x, gamma, beta, mean, var]).first end
new(eps: 2e-5)
click to toggle source
# File lib/chainer/functions/normalization/batch_normalization.rb, line 176 def initialize(eps: 2e-5) @inv_std = nil @inv_var = nil @eps = eps end
Public Instance Methods
backward(indexes, grad_outputs)
click to toggle source
# File lib/chainer/functions/normalization/batch_normalization.rb, line 209 def backward(indexes, grad_outputs) x, gamma, mean, var = get_retained_inputs gy, = grad_outputs f = FixedBatchNormalizationGrad.new(@eps, @expander, @axis, @inv_std, @inv_var) f.(x, gamma, mean, var, gy) end
forward(inputs)
click to toggle source
# File lib/chainer/functions/normalization/batch_normalization.rb, line 182 def forward(inputs) retain_inputs([0, 1, 3, 4]) x, gamma, beta, mean, var = inputs xp = Chainer.get_array_module(x) # expander inserts singleton dimensions to gamma and beta so that they # can be broadcasted with x. head_ndim = gamma.ndim + 1 # TODO: expander = (None, Ellipsis) + (None,) * (x.ndim - head_ndim) suffix = [1] * (x.ndim - head_ndim) expander = -> (arr) do shape = [1] + arr.shape + suffix arr.reshape(*shape) end @expander = expander @axis = [0] + (head_ndim...(x.ndim)).to_a gamma = expander.(gamma) beta = expander.(beta) var += @eps @inv_var = var.reciprocal @inv_std = xp::NMath.sqrt(@inv_var) y = apply_bn_fwd(xp, x, expander.(mean), expander.(@inv_std), gamma, beta) [y] end