class DNN::Layers::Conv2DTranspose
Attributes
filter_size[R]
num_filters[R]
padding[R]
strides[R]
Public Class Methods
new(num_filters, filter_size, weight_initializer: Initializers::RandomNormal.new, bias_initializer: Initializers::Zeros.new, weight_regularizer: nil, bias_regularizer: nil, use_bias: true, strides: 1, padding: false)
click to toggle source
@param [Integer] num_filters
Number of filters. @param [Array | Integer] filter_size
Filter size. Filter size is of the form [height, width]. @param [Array | Integer] strides Stride length. Stride length is of the form [height, width]. @param [Array] padding Padding size. Padding size is of the form [height, width].
Calls superclass method
DNN::Layers::Connection::new
# File lib/dnn/core/layers/cnn_layers.rb, line 230 def initialize(num_filters, filter_size, weight_initializer: Initializers::RandomNormal.new, bias_initializer: Initializers::Zeros.new, weight_regularizer: nil, bias_regularizer: nil, use_bias: true, strides: 1, padding: false) super(weight_initializer: weight_initializer, bias_initializer: bias_initializer, weight_regularizer: weight_regularizer, bias_regularizer: bias_regularizer, use_bias: use_bias) @num_filters = num_filters @filter_size = filter_size.is_a?(Integer) ? [filter_size, filter_size] : filter_size @strides = strides.is_a?(Integer) ? [strides, strides] : strides @padding = padding.is_a?(Integer) ? [padding, padding] : padding end
Public Instance Methods
backward_node(dy)
click to toggle source
# File lib/dnn/core/layers/cnn_layers.rb, line 276 def backward_node(dy) dy = zero_padding(dy, @pad_size) if @padding col = im2col(dy, *@input_shape[0..1], *@filter_size, @strides) if @trainable @weight.grad += col.transpose.dot(@x) @bias.grad += col.reshape(col.shape[0] * @filter_size.reduce(:*), @num_filters).sum(0) if @bias end dx = col.dot(@weight.data) dx.reshape(dy.shape[0], *@input_shape) end
build(input_shape)
click to toggle source
Calls superclass method
DNN::Layers::Layer#build
# File lib/dnn/core/layers/cnn_layers.rb, line 246 def build(input_shape) unless input_shape.length == 3 raise DNNShapeError, "Input shape is #{input_shape}. But input shape must be 3 dimensional." end prev_h, prev_w, num_prev_filters = *input_shape @pad_size = if @padding == true calc_conv2d_transpose_padding_size(prev_h, prev_w, *@filter_size, @strides) elsif @padding.is_a?(Array) @padding else [0, 0] end @out_size = calc_conv2d_transpose_out_size(prev_h, prev_w, *@filter_size, *@pad_size, @strides) super @weight.data = Xumo::SFloat.new(@filter_size.reduce(:*) * @num_filters, num_prev_filters) @bias.data = Xumo::SFloat.new(@num_filters) if @bias init_weight_and_bias end
compute_output_shape()
click to toggle source
# File lib/dnn/core/layers/cnn_layers.rb, line 287 def compute_output_shape [*@out_size, @num_filters] end
filters()
click to toggle source
@return [Numo::SFloat] Convert weight to filter and return.
# File lib/dnn/core/layers/cnn_layers.rb, line 292 def filters num_prev_filters = @input_shape[2] @weight.data.reshape(*@filter_size, @num_filters, num_prev_filters) end
filters=(filters)
click to toggle source
@param [Numo::SFloat] filters Convert weight to filters and set.
# File lib/dnn/core/layers/cnn_layers.rb, line 298 def filters=(filters) num_prev_filters = @input_shape[2] @weight.data = filters.reshape(@filter_size.reduce(:*) * @num_filters, num_prev_filters) end
forward_node(x)
click to toggle source
# File lib/dnn/core/layers/cnn_layers.rb, line 265 def forward_node(x) bsize = x.shape[0] x = x.reshape(x.shape[0..2].reduce(:*), x.shape[3]) @x = x col = x.dot(@weight.data.transpose) img_shape = [bsize, @out_size[0] + @pad_size[0], @out_size[1] + @pad_size[1], @num_filters] y = col2im(col, img_shape, *@input_shape[0..1], *@filter_size, @strides) y += @bias.data if @bias @padding ? zero_padding_bwd(y, @pad_size) : y end
load_hash(hash)
click to toggle source
# File lib/dnn/core/layers/cnn_layers.rb, line 310 def load_hash(hash) initialize(hash[:num_filters], hash[:filter_size], weight_initializer: Initializers::Initializer.from_hash(hash[:weight_initializer]), bias_initializer: Initializers::Initializer.from_hash(hash[:bias_initializer]), weight_regularizer: Regularizers::Regularizer.from_hash(hash[:weight_regularizer]), bias_regularizer: Regularizers::Regularizer.from_hash(hash[:bias_regularizer]), use_bias: hash[:use_bias], strides: hash[:strides], padding: hash[:padding]) end
to_hash()
click to toggle source
Calls superclass method
DNN::Layers::Connection#to_hash
# File lib/dnn/core/layers/cnn_layers.rb, line 303 def to_hash super(num_filters: @num_filters, filter_size: @filter_size, strides: @strides, padding: @padding) end