class TNN::FeedForwardNeuralNetwork

Attributes

err_list[RW]
layer_list[RW]
layer_size[RW]
learning_rate[RW]
node_id[RW]
threshold[RW]

Public Class Methods

new(learning_rate=0.1, threshold=0.0) click to toggle source
# File lib/t_nn/feedforward_neural_network.rb, line 11
def initialize(learning_rate=0.1, threshold=0.0)
  @layer_size = 0    #layer iterator
  @layer_list = Array.new
  @link_list = Hash.new
  @node_id = 0
  @learning_rate = learning_rate
  @err_list = Array.new
  @threshold = threshold
end

Public Instance Methods

add_layer(node_num) click to toggle source
# File lib/t_nn/feedforward_neural_network.rb, line 21
def add_layer(node_num)
  node_list = Array.new()
  node_num.times do |num|
    node = Node.new(0.0,"sig", @threshold)
    node.set_id(@node_id)
    node_list.push(node)
    @node_id += 1
  end

  @layer_list.push(node_list)
  # connect link
  if @layer_size != 0  # if not first layer
    # connect link to @layer_size - 1 layer
    connect_nodes
  end
  @layer_size += 1
end
back_propagation(y) click to toggle source

@param y Array teacher_data

# File lib/t_nn/feedforward_neural_network.rb, line 108
def back_propagation(y)

  raise "output size different from node num of output layer"  if get_output_layer.size != y.size
  # raise "o"  if get_output_layer.size != y.size
  delta = {}
  ( @layer_size - 1).downto(0) do |layer_num|
    if ( @layer_size - 1) == layer_num   # if output layer
      @layer_list[layer_num].each_with_index do |output_node, i|
        delta["#{output_node.id}"] = -1.0 * calc_err(y[i], output_node.w) * output_node.w * (1.0 -output_node.w)
      end
    else 
      @layer_list[layer_num].each do |from_node|
        # リンクの更新
        @layer_list[layer_num + 1].each do |to_node|
          update_weight = -1.0 * @learning_rate * delta["#{to_node.id}"] * from_node.w
          @link_list["#{from_node.id}_#{to_node.id}"] = @link_list["#{from_node.id}_#{to_node.id}"] + update_weight 
        end
        # その層のdeltaの更新
        delta["#{from_node.id}"] = calc_delta(delta,layer_num, from_node) * from_node.w * (1.0 - from_node.w)
      end
    end
  end
end
calc_ave_err(y) click to toggle source
# File lib/t_nn/feedforward_neural_network.rb, line 93
def calc_ave_err(y)
  sum_err = 0.0
  @layer_list[@layer_size - 1].each_with_index do |node, i|
    sum_err += calc_err(node.w,y[i]).abs
  end
  ave_err = (sum_err)/y.size
  return ave_err 
end
calc_delta(delta,layer_i, from_node) click to toggle source
# File lib/t_nn/feedforward_neural_network.rb, line 136
def calc_delta(delta,layer_i, from_node)
  sum = 0.0
  @layer_list[layer_i+1].each do |to_node|
    sum += delta["#{to_node.id}"] * @link_list["#{from_node.id}_#{to_node.id}"]
  end
  return sum
end
calc_err(teacher_data, w) click to toggle source
# File lib/t_nn/feedforward_neural_network.rb, line 132
def calc_err(teacher_data, w)
  return (teacher_data -w)
end
connect_nodes() click to toggle source

connect_nodes

# File lib/t_nn/feedforward_neural_network.rb, line 42
def connect_nodes
  @layer_list[@layer_size - 1].each do |from_node|
    @layer_list[@layer_size].each do |to_node|
      @link_list["#{from_node.id}_#{to_node.id}"] = rand(-1.0...1.0)
    end
  end
end
evaluate(x_test, y_test) click to toggle source
# File lib/t_nn/feedforward_neural_network.rb, line 144
def evaluate(x_test, y_test)
  # compare teacher_datas and output of nn
  sum_err = 0.0
  x_test.zip(y_test).each do |x, y|
    propagation(x)
    output = []
    err = 0.0
    @layer_list[@layer_size -1].zip(y).each do |o, y_f|
      output.push(o.w)
      err += (y_f - o.w).abs
    end
    sum_err += (err/y_test[0].size)
    # puts "x #{x}, y #{y} , output #{output}"
  end 
  return (sum_err/y_test.size) * 100.0
  # return 0.0
end
fit(x_train, y_train, epoch) click to toggle source

@param x_train Array @param y_train Array

# File lib/t_nn/feedforward_neural_network.rb, line 56
def fit(x_train, y_train, epoch)
  # input teacher_datas
  epoch.times do 
    epoch_err = 0.0 
    x_train.zip(y_train).each do |x, y|
      x, y = x_train.zip(y_train).sample
      # puts "x #{x}, y #{y}"
      propagation(x)
      epoch_err += calc_ave_err(y)
      back_propagation(y)
    end
    @err_list.push(epoch_err)
  end
end
get_output_layer() click to toggle source
# File lib/t_nn/feedforward_neural_network.rb, line 162
def get_output_layer
  output = []
  @layer_list[@layer_size-1].each do |node|
    output.push(node.w)
  end
  return output
end
propagation(x) click to toggle source
# File lib/t_nn/feedforward_neural_network.rb, line 71
def propagation(x)
  raise "input size is different from  node num of input layer "  if @layer_list[0].size != x.size
  # input data
  @layer_list[0].each_with_index do |node, i|
    node.input (x[i])
  end
  @layer_size.times do |layer_num|
    if layer_num != (@layer_size-1)
      # puts "layernum #{layer_num}"
      @layer_list[layer_num + 1].each do |to_node|
        sum_all_from_node = 0.0
        @layer_list[layer_num].each do |from_node|
          sum_all_from_node += @link_list["#{from_node.id}_#{to_node.id}"] * from_node.w
        end
        to_node.update_w(sum_all_from_node + 1.0)
      end
    end
  end

  return get_output_layer 
end