Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- NeuralNetwork = {
- --Transfer = function( x) return 1 / (1 + math.exp(-x / 1)) end;
- Transfer = function( x) return 1 / (1 + math.exp(-x)); end;
- };
- function NeuralNetwork:Create(Inputs, Outputs, HiddenLayers, NeuronsPerLayer, LearningRate)
- assert(Inputs > 0, "Inputs must be higher then 0");
- assert(Outputs > 0, "Outputs must be higher then 0");
- HiddenLayers = HiddenLayers or math.ceil(Inputs/2)
- NeuronsPerLayer = NeuronsPerLayer or math.ceil(Inputs*0.66666+Outputs)
- LearningRate = LearningRate or 0.5
- print(Inputs, Outputs, HiddenLayers, NeuronsPerLayer, LearningRate);
- local network = setmetatable({
- learningRate = LearningRate,
- numBackPropagates=0,
- },{ __index = NeuralNetwork});
- table.insert(network, {});
- for i = 1,Inputs do
- table.insert(network[1], {});
- end
- for i = 2,HiddenLayers+2 do --plus 2 represents the output layer (also need to skip input layer)
- table.insert(network, {});
- local neuronsInLayer = NeuronsPerLayer;
- if i == HiddenLayers+2 then
- neuronsInLayer = Outputs;
- end
- for j = 1,neuronsInLayer do
- table.insert(network[i], {bias = math.random()*2-1});
- local numNeuronInputs = #network[i-1];
- for k = 1,numNeuronInputs do
- table.insert(network[i][j], math.random()*2-1);
- end
- end
- end
- return network;
- end
- --[[
- CREATE TABLE `networks` (
- `ID` varchar(64) NOT NULL,
- `LearningRate` double DEFAULT NULL,
- `NumBackPropagates` int(11) DEFAULT NULL,
- PRIMARY KEY (`ID`)
- ) ENGINE=InnoDB;
- CREATE TABLE `neurons` (
- `ID` varchar(64) NOT NULL,
- `LayerID` int(11) NOT NULL,
- `NeuronID` int(11) NOT NULL,
- `Bias` double DEFAULT NULL,
- `Weights` longtext,
- PRIMARY KEY (`ID`,`LayerID`,`NeuronID`),
- KEY `layer_id` (`LayerID`),
- KEY `neuron_id` (`NeuronID`)
- ) ENGINE=InnoDB;
- ]]
- function NeuralNetwork:Load(db, name)
- assert(db:Query("SELECT * FROM networks WHERE ID='"..db:EscapeString(name).."';"));
- assert(db:Fetch(), "No neural network found with name "..tostring(name));
- local network = setmetatable({
- learningRate = db:GetRow().LearningRate,
- numBackPropagates = db:GetRow().NumBackPropagates
- },{ __index = NeuralNetwork});
- assert(db:Query("SELECT MAX(LayerID) FROM neurons WHERE ID='"..db:EscapeString(name).."';"));
- assert(db:Fetch(), "No neural network found with name "..tostring(name));
- local layers = tonumber(db:GetRow(1));
- assert(layers>=2, "No neural network found with name "..tostring(name));
- local row;
- local layer;
- local neuron;
- for n=1,layers do
- layer = {};
- table.insert(network, layer);
- assert(db:Query("SELECT * FROM neurons WHERE ID='"..db:EscapeString(name).."' AND LayerID="..n.." ORDER BY NeuronID;"));
- while db:Fetch() do
- row = db:GetRow();
- neuron = {
- bias = row.Bias
- };
- for value in row.Weights:gmatch("|?(.-)|") do
- table.insert(neuron, tonumber(value));
- end
- table.insert(layer, neuron);
- end
- end
- return network;
- end
- function NeuralNetwork:Save(db, name)
- assert(db:Query("DELETE FROM networks WHERE ID='"..db:EscapeString(name).."';"));
- assert(db:Query("DELETE FROM neurons WHERE ID='"..db:EscapeString(name).."';"));
- assert(db:Query("INSERT INTO networks (`ID`, `LearningRate`, `NumBackPropagates`) VALUES ('"..db:EscapeString(name).."', "..tostring(tonumber(self.learningRate))..", " ..tostring(tonumber(self.numBackPropagates))..");"));
- for l=1,#self do
- for n=1,#self[l] do
- local neruon = self[l][n];
- local weights = "|";
- for w=1,#neruon do
- weights = weights .. neruon[w] .. "|";
- end
- assert(db:Query("INSERT INTO `neurons` (`ID`, `LayerID`, `NeuronID`, `Bias`, `Weights`) VALUES ('"..db:EscapeString(name).."', "..l..", "..n..", "..tonumber(neruon.bias or 0)..", '"..db:EscapeString(weights).."');"));
- end
- end
- end
- function NeuralNetwork:ForwardPropagate(inputs)
- assert(#inputs == #self[1], "Inputs does not match neural network inputs");
- self.numBackPropagates = self.numBackPropagates + 1;
- local outputs = {}
- for i = 1,#self do
- for j = 1,#self[i] do
- if i == 1 then
- self[i][j].result = inputs[j];
- else
- self[i][j].result = self[i][j].bias;
- for k = 1,#self[i][j] do
- self[i][j].result = self[i][j].result + (self[i][j][k]*self[i-1][k].result);
- end
- self[i][j].result = NeuralNetwork.Transfer(self[i][j].result);
- if i == #self then
- table.insert(outputs,self[i][j].result);
- end
- end
- end
- end
- return outputs;
- end
- function NeuralNetwork:BackwardPropagate(inputs,outputs)
- assert(#inputs == #self[1], "Inputs does not match neural network input");
- assert(#outputs == #self[#self], "Outputs does not match neural network input");
- self:ForwardPropagate(inputs);
- for i = #self,2,-1 do
- local tempResults = {};
- for j = 1,#self[i] do
- if i == #self then
- self[i][j].delta = (outputs[j] - self[i][j].result) * self[i][j].result * (1 - self[i][j].result)
- else
- local weightDelta = 0;
- for k = 1,#self[i+1] do
- weightDelta = weightDelta + self[i+1][k][j]*self[i+1][k].delta
- end
- self[i][j].delta = self[i][j].result * (1 - self[i][j].result) * weightDelta
- end
- end
- end
- for i = 2,#self do
- for j = 1,#self[i] do
- self[i][j].bias = self[i][j].delta * self.learningRate
- for k = 1,#self[i][j] do
- self[i][j][k] = self[i][j][k] + self[i][j].delta * self.learningRate * self[i-1][k].result
- end
- end
- end
- end
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement