From 583ddeae93809b2c074e5d2320def49b19424dc9 Mon Sep 17 00:00:00 2001 From: Brendan Hansen Date: Fri, 1 Mar 2019 00:20:55 -0600 Subject: [PATCH] changed model of neural network --- nn/data.lua | 14 +++++++++++--- nn/nn.lua | 17 +++++++++-------- 2 files changed, 20 insertions(+), 11 deletions(-) diff --git a/nn/data.lua b/nn/data.lua index b05a2af..061bdaa 100644 --- a/nn/data.lua +++ b/nn/data.lua @@ -92,7 +92,12 @@ function saveNeuralNetwork(network, file_path) for l = 1, numLayers do local layer = network.network[l] - file:write(tostring(layer.bias) .. "\n") + + for n = 1, #layer.neurons do + file:write(tostring(bias) .. ' ') + end + + file:write("\n") for n = 1, #layer.neurons do local neuron = layer.neurons[n] @@ -127,12 +132,15 @@ function loadNeuralNetwork(file_path) for l = 1, numLayers do log.log(log.LOG_INFO, "Reading layer " .. l) - local bias = file:read("*number") - network.network[l].bias = bias local upper = layerCount[l - 1] if upper == nil then upper = layerCount[l] end + for n = 1, layerCount[l] do + local bias = file:read("*number") + network.network[l].neurons[n].bias = bias + end + for n = 1, layerCount[l] do for w = 1, upper do local weight = file:read("*number") diff --git a/nn/nn.lua b/nn/nn.lua index 102b98c..c98608e 100644 --- a/nn/nn.lua +++ b/nn/nn.lua @@ -5,18 +5,21 @@ function Neuron:init(input_count) self.value = 0 self.delta = 0 self.weights = {} + self.bias = math.random() * 1 - .5 for i in range(1, input_count) do self.weights[i] = math.random() * 1 - .5 -- Initialize to random weights end end -function Neuron:activate(inputs, bias) +function Neuron:activate(inputs) local weights = self.weights + + local activation = self.bias for i = 1, #weights do - bias = bias + (weights[i] * inputs[i]) + activation = activation + (weights[i] * inputs[i]) end - self.value = bias / (2 + 2 * math.abs(bias)) + 0.5 + self.value = activation / (2 + 2 * math.abs(activation)) + 0.5 end Layer = class() @@ -28,8 +31,6 @@ function Layer:init(neuron_count, input_count) for i = 1, neuron_count do self.neurons[i] = Neuron(input_count) end - - self.bias = math.random() end NeuralNetwork = class() @@ -59,9 +60,8 @@ function NeuralNetwork:activate(inputs) inputs[j] = prevCells[j].value end - local bias = self.network[i].bias for j = 1, #cells do - cells[j]:activate(inputs, bias) + cells[j]:activate(inputs) end end end @@ -96,10 +96,11 @@ function NeuralNetwork:back_propagate(inputs, outputs) end for i = 2, numLayers do - self.network[i].bias = self.network[i].neurons[#self.network[i].neurons].delta * learningRate for j = 1, #self.network[i].neurons do + self.network[i].neurons[j].bias = self.network[i].neurons[j].bias + self.network[i].neurons[j].delta * learningRate for k = 1, #self.network[i].neurons[j].weights do local weights = self.network[i].neurons[j].weights + weights[k] = weights[k] + self.network[i].neurons[j].delta * learningRate * self.network[i - 1].neurons[k].value end end -- 2.25.1