Skip to content

Instantly share code, notes, and snippets.

@thomasjslone
Created April 29, 2023 13:59
Show Gist options
  • Save thomasjslone/d900cd3b5beee862e5b88541944ba942 to your computer and use it in GitHub Desktop.
Save thomasjslone/d900cd3b5beee862e5b88541944ba942 to your computer and use it in GitHub Desktop.
base concept
class NeuralNetwork
def initialize(num_inputs, num_hidden, num_outputs)
@input_layer = Array.new(num_inputs) { Neuron.new(0) }
@hidden_layer = Array.new(num_hidden) { Neuron.new(num_inputs) }
@output_layer = Array.new(num_outputs) { Neuron.new(num_hidden) }
end
def feed_forward(inputs)
hidden_outputs = @hidden_layer.map { |neuron| neuron.output(inputs) }
@output_layer.map { |neuron| neuron.output(hidden_outputs) }
end
def train(inputs, expected_outputs, learning_rate)
outputs = feed_forward(inputs)
output_errors = @output_layer.map.with_index do |neuron, i|
expected_output = expected_outputs[i]
error = expected_output - neuron.instance_variable_get(:@last_output)
neuron.adjust_weights(@hidden_layer.map(&:last_output), error, learning_rate)
error
end
hidden_errors = @hidden_layer.map.with_index do |neuron, i|
error = @output_layer.map { |n| n.instance_variable_get(:@weights)[i] * output_errors.sum }
neuron.adjust_weights(inputs, error.sum, learning_rate)
error.sum
end
output_errors.sum
end
end
class Neuron
def initialize(num_inputs)
@bias = rand(-1.0..1.0)
@weights = Array.new(num_inputs) { rand(-1.0..1.0) }
@last_output = nil
end
def output(inputs)
sum = @bias
inputs.each_with_index do |input, i|
sum += input * @weights[i]
end
@last_output = sum
sum
end
def adjust_weights(inputs, error, learning_rate)
inputs.each_with_index do |input, i|
@weights[i] += learning_rate * error * input
end
@bias += learning_rate * error
end
end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment