class Tensorflow::Train::GradientDescentOptimizer

Attributes

learning_rate[RW]

Public Class Methods

new(learning_rate, use_locking: false, name: "GradientDescent") click to toggle source
Calls superclass method
# File lib/tensorflow/train/gradient_descent_optimizer.rb, line 8
def initialize(learning_rate, use_locking: false, name: "GradientDescent")
  @learning_rate = learning_rate
  @learning_rate_tensor = nil
  super(name: name, use_locking: use_locking)
end

Protected Instance Methods

apply_dense(grad, var) click to toggle source
# File lib/tensorflow/train/gradient_descent_optimizer.rb, line 21
def apply_dense(grad, var)
  dtype = grad.output_types.first
  learning_rate = if @learning_rate_tensor.output_types.first == dtype
                    @learning_rate_tensor
                  else
                    Tensorflow.cast(@learning_rate_tensor, dtype)
                  end

  RawOps.resource_apply_gradient_descent(var, learning_rate, grad)
end
prepare() click to toggle source
# File lib/tensorflow/train/gradient_descent_optimizer.rb, line 16
def prepare
  learning_rate = call_if_callable(@learning_rate)
  @learning_rate_tensor = Tensorflow.constant(learning_rate, name: "learning_rate")
end