class DNN::Param
Attributes
data[RW]
grad[RW]
trainable[RW]
Public Class Methods
new(data = nil, grad = nil)
click to toggle source
# File lib/dnn/core/param.rb, line 7 def initialize(data = nil, grad = nil) @data = data @grad = grad @trainable = true end
Public Instance Methods
*(other)
click to toggle source
# File lib/dnn/core/param.rb, line 50 def *(other) other = Tensor.convert(other) unless other.is_a?(DNN::Tensor) || other.is_a?(DNN::Param) Layers::Mul.(self, other) end
**(index)
click to toggle source
# File lib/dnn/core/param.rb, line 60 def **(index) Layers::Pow.new(index).(self) end
+(other)
click to toggle source
# File lib/dnn/core/param.rb, line 40 def +(other) other = Tensor.convert(other) unless other.is_a?(DNN::Tensor) || other.is_a?(DNN::Param) Layers::Add.(self, other) end
+@()
click to toggle source
# File lib/dnn/core/param.rb, line 32 def +@ self end
-(other)
click to toggle source
# File lib/dnn/core/param.rb, line 45 def -(other) other = Tensor.convert(other) unless other.is_a?(DNN::Tensor) || other.is_a?(DNN::Param) Layers::Sub.(self, other) end
-@()
click to toggle source
# File lib/dnn/core/param.rb, line 36 def -@ Neg.(self) end
/(other)
click to toggle source
# File lib/dnn/core/param.rb, line 55 def /(other) other = Tensor.convert(other) unless other.is_a?(DNN::Tensor) || other.is_a?(DNN::Param) Layers::Div.(self, other) end
backward(grad)
click to toggle source
# File lib/dnn/core/param.rb, line 13 def backward(grad) if @trainable @grad ||= Xumo::SFloat[0] if @data.shape == grad.shape @grad += grad elsif @data.shape == grad.shape[1..-1] @grad += grad.sum(0) else raise DNNError, "Shape is missmatch." end else @grad = Xumo::SFloat[0] end end
shape()
click to toggle source
# File lib/dnn/core/param.rb, line 28 def shape @data.shape end