class DNN::Layers::Conv2D

Attributes

filter_size[R]
num_filters[R]
padding[R]
strides[R]

Public Class Methods

new(num_filters, filter_size, weight_initializer: Initializers::RandomNormal.new, bias_initializer: Initializers::Zeros.new, weight_regularizer: nil, bias_regularizer: nil, use_bias: true, strides: 1, padding: false) click to toggle source

@param [Integer] num_filters Number of filters. @param [Array | Integer] filter_size Filter size. Filter size is of the form [height, width]. @param [Array | Integer] strides Stride length. Stride length is of the form [height, width]. @param [Array | Boolean] padding Padding size or whether to padding. Padding size is of the form [height, width].

Calls superclass method DNN::Layers::Connection::new
# File lib/dnn/core/layers/cnn_layers.rb, line 127
def initialize(num_filters, filter_size,
               weight_initializer: Initializers::RandomNormal.new,
               bias_initializer: Initializers::Zeros.new,
               weight_regularizer: nil,
               bias_regularizer: nil,
               use_bias: true,
               strides: 1,
               padding: false)
  super(weight_initializer: weight_initializer, bias_initializer: bias_initializer,
        weight_regularizer: weight_regularizer, bias_regularizer: bias_regularizer, use_bias: use_bias)
  @num_filters = num_filters
  @filter_size = filter_size.is_a?(Integer) ? [filter_size, filter_size] : filter_size
  @strides = strides.is_a?(Integer) ? [strides, strides] : strides
  @padding = padding.is_a?(Integer) ? [padding, padding] : padding
end

Public Instance Methods

backward_node(dy) click to toggle source
# File lib/dnn/core/layers/cnn_layers.rb, line 171
def backward_node(dy)
  dy = dy.reshape(dy.shape[0..2].reduce(:*), dy.shape[3])
  if @trainable
    @weight.grad += @col.transpose.dot(dy)
    @bias.grad += dy.sum(0) if @bias
  end
  dcol = dy.dot(@weight.data.transpose)
  dx = col2im(dcol, @x_shape, *@out_size, *@filter_size, @strides)
  @padding ? zero_padding_bwd(dx, @pad_size) : dx
end
build(input_shape) click to toggle source
Calls superclass method DNN::Layers::Layer#build
# File lib/dnn/core/layers/cnn_layers.rb, line 143
def build(input_shape)
  unless input_shape.length == 3
    raise DNNShapeError, "Input shape is #{input_shape}. But input shape must be 3 dimensional."
  end
  prev_h, prev_w, num_prev_filters = *input_shape
  @pad_size = if @padding == true
    calc_conv2d_padding_size(prev_h, prev_w, *@filter_size, @strides)
  elsif @padding.is_a?(Array)
    @padding
  else
    [0, 0]
  end
  @out_size = calc_conv2d_out_size(prev_h, prev_w, *@filter_size, *@pad_size, @strides)
  super
  @weight.data = Xumo::SFloat.new(@filter_size.reduce(:*) * num_prev_filters, @num_filters)
  @bias.data = Xumo::SFloat.new(@num_filters) if @bias
  init_weight_and_bias
end
compute_output_shape() click to toggle source
# File lib/dnn/core/layers/cnn_layers.rb, line 182
def compute_output_shape
  [*@out_size, @num_filters]
end
filters() click to toggle source

@return [Numo::SFloat] Convert weight to filter and return.

# File lib/dnn/core/layers/cnn_layers.rb, line 187
def filters
  num_prev_filters = @input_shape[2]
  @weight.data.reshape(*@filter_size, num_prev_filters, @num_filters)
end
filters=(filters) click to toggle source

@param [Numo::SFloat] filters Convert weight to filters and set.

# File lib/dnn/core/layers/cnn_layers.rb, line 193
def filters=(filters)
  num_prev_filters = @input_shape[2]
  @weight.data = filters.reshape(@filter_size.reduce(:*) * num_prev_filters, @num_filters)
end
forward_node(x) click to toggle source
# File lib/dnn/core/layers/cnn_layers.rb, line 162
def forward_node(x)
  x = zero_padding(x, @pad_size) if @padding
  @x_shape = x.shape
  @col = im2col(x, *@out_size, *@filter_size, @strides)
  y = @col.dot(@weight.data)
  y += @bias.data if @bias
  y.reshape(x.shape[0], *@out_size, y.shape[3])
end
load_hash(hash) click to toggle source
# File lib/dnn/core/layers/cnn_layers.rb, line 205
def load_hash(hash)
  initialize(hash[:num_filters], hash[:filter_size],
             weight_initializer: Initializers::Initializer.from_hash(hash[:weight_initializer]),
             bias_initializer: Initializers::Initializer.from_hash(hash[:bias_initializer]),
             weight_regularizer: Regularizers::Regularizer.from_hash(hash[:weight_regularizer]),
             bias_regularizer: Regularizers::Regularizer.from_hash(hash[:bias_regularizer]),
             use_bias: hash[:use_bias],
             strides: hash[:strides],
             padding: hash[:padding])
end
to_hash() click to toggle source
Calls superclass method DNN::Layers::Connection#to_hash
# File lib/dnn/core/layers/cnn_layers.rb, line 198
def to_hash
  super(num_filters: @num_filters,
        filter_size: @filter_size,
        strides: @strides,
        padding: @padding)
end