class TensorFlow::Keras::Layers::Dense

Public Class Methods

new(units, activation: nil, use_bias: true, kernel_initializer: "glorot_uniform", bias_initializer: "zeros", dtype: :float) click to toggle source
# File lib/tensorflow/keras/layers/dense.rb, line 5
def initialize(units, activation: nil, use_bias: true, kernel_initializer: "glorot_uniform", bias_initializer: "zeros", dtype: :float)
  @units = units
  @activation = activation
  @use_bias = use_bias
  @kernel_initializer = kernel_initializer
  @bias_initializer = bias_initializer
  @dtype = dtype
  @built = false
end

Public Instance Methods

build(input_shape) click to toggle source
# File lib/tensorflow/keras/layers/dense.rb, line 15
def build(input_shape)
  last_dim = input_shape.last
  @kernel = Utils.add_weight(name: "kernel", shape: [last_dim, @units], initializer: @kernel_initializer, dtype: @dtype)

  if @use_bias
    @bias = Utils.add_weight(name: "bias", shape: [@units], initializer: @bias_initializer, dtype: @dtype)
  else
    @bias = nil
  end

  @output_shape = [last_dim, @units]

  @built = true
end
call(inputs) click to toggle source
# File lib/tensorflow/keras/layers/dense.rb, line 38
def call(inputs)
  build(inputs.shape) unless @built

  rank = inputs.shape.size

  if rank > 2
    raise Error, "Rank > 2 not supported yet"
  else
    inputs = TensorFlow.cast(inputs, @dtype)
    outputs = TensorFlow.matmul(inputs, @kernel)
  end

  if @use_bias
    outputs = NN.bias_add(outputs, @bias)
  end

  case @activation
  when "relu"
    NN.relu(outputs)
  when "softmax"
    NN.softmax(outputs)
  when nil
    outputs
  else
    raise "Unknown activation: #{@activation}"
  end
end
count_params() click to toggle source
# File lib/tensorflow/keras/layers/dense.rb, line 34
def count_params
  @units + @kernel.shape.inject(&:*)
end
output_shape() click to toggle source
# File lib/tensorflow/keras/layers/dense.rb, line 30
def output_shape
  @output_shape
end