Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- using System;
- using System.Collections.Generic;
- public class main{
- public static void Main(string[] args){
- int[] networkArchitecture = new int[] {3,4,3};
- NeuralNetwork brain = new NeuralNetwork(networkArchitecture);
- double[, ,] trainData = new double[4, 2, 3] {
- {
- {0.1,0.2,0.5},
- {0.1,0.2,0.5}
- },
- {
- {0.2,1,0.1},
- {0.2,1,0.1}
- },
- {
- {1.6,1.4,0.2},
- {1.6,1.4,0.2}
- },
- {
- {0.4,1.2,1.4},
- {0.4,1.2,1.4}
- },
- };
- double[, ,] testData = new double[1, 2, 3] {
- {
- {1.2,0.2,0.7},
- {1.2,0.2,0.7},
- }
- };
- int epoch = 10000000;
- double avgLoss = 0;
- for (int i=0 ; i<epoch ; i++){
- double[] currTrainInput = new double[3];
- currTrainInput[0] = trainData[i%4, 0, 0];
- currTrainInput[1] = trainData[i%4, 0, 1];
- currTrainInput[2] = trainData[i%4, 0, 2];
- double[] currTrainOutput = new double[3];
- currTrainOutput[0] = trainData[i%4, 1, 0];
- currTrainOutput[1] = trainData[i%4, 1, 1];
- currTrainOutput[2] = trainData[i%4, 1, 2];
- double[] output = NeuralNetwork.FeedForward(brain , currTrainInput);
- double loss = calLoss(output, currTrainOutput);
- avgLoss += loss;
- NeuralNetwork.Backward(brain, output, currTrainOutput);
- if (i != 0 && i%500000 == 0){
- Console.WriteLine("avg loss for " + (i) + " iteration is " + (avgLoss/500000));
- avgLoss = 0;
- }
- }
- double[] testInput = new double[3];
- testInput[0] = testData[0, 0, 0];
- testInput[1] = testData[0, 0, 1];
- testInput[2] = testData[0, 0, 2];
- double[] testOutput = new double[3];
- testOutput[0] = testData[0, 1, 0];
- testOutput[1] = testData[0, 1, 1];
- testOutput[2] = testData[0, 1, 2];
- double[] NNOutput = NeuralNetwork.FeedForward(brain , testInput);
- double testLoss = calLoss(NNOutput, testOutput);
- Console.WriteLine("\n\nfinal test loss is " + testLoss);
- // debug.printArray(brain.levels[0].weights[0]);
- // debug.printArray(brain.levels[0].weights[1]);
- // debug.printArray(brain.levels[0].weights[2]);
- // Console.WriteLine("\n\n----------------------\n\n");
- // debug.printArray(brain.levels[1].weights[0]);
- // debug.printArray(brain.levels[1].weights[1]);
- // debug.printArray(brain.levels[1].weights[2]);
- // debug.printArray(brain.levels[1].weights[3]);
- // NeuralNetwork.DebugBackward(brain, NNOutput, testOutput);
- }
- public static double calLoss(double[] calculatedOutputs, double[] estimatedOutputs){
- double calculatedNum = 0;
- double estimatedNum = 0;
- for (int i=0 ; i<3 ; i++){
- calculatedNum += calculatedOutputs[i] * Math.Pow(2, 2-i);
- estimatedNum += estimatedOutputs[i] * Math.Pow(2, 2-i);
- }
- return Math.Abs(calculatedNum - estimatedNum);
- }
- }
- public class NeuralNetwork
- {
- public Level[] levels;
- public NeuralNetwork(int[] neuronCounts)
- {
- levels = new Level[neuronCounts.Length-1];
- for (int i = 0; i < neuronCounts.Length - 1; i++)
- {
- levels[i] = new Level(neuronCounts[i], neuronCounts[i + 1]);
- }
- }
- // public NeuralNetwork(int[] neuronCounts, levelData[] levelInfo)
- // {
- // levels = new Level[levelInfo.Length];
- // for (int i = 0; i < levelInfo.Length; i++)
- // {
- // levels[i] = new Level(levelInfo[i]);
- // }
- // }
- public static double[] FeedForward(NeuralNetwork network, double[] givenInputs)
- {
- double[] outputs = Level.FeedForward(givenInputs, network.levels[0]);
- for (int i = 1; i < network.levels.Length; i++)
- {
- outputs = Level.FeedForward(outputs, network.levels[i]);
- }
- return outputs;
- }
- public static void Backward(NeuralNetwork network, double[] calculatedOutputs, double[] estimatedOutputs)
- {
- double[] delta = new double[calculatedOutputs.Length];
- for (int i = 0; i < calculatedOutputs.Length; i++)
- {
- //define loss function
- // Console.WriteLine(calculatedOutputs[i] + " " + estimatedOutputs[i]);
- delta[i] = calculatedOutputs[i] - estimatedOutputs[i];
- }
- for (int i = network.levels.Length - 1; i >= 0; i--)
- {
- delta = Level.Backward(network.levels[i], delta);
- }
- }
- public static void DebugBackward(NeuralNetwork network, double[] calculatedOutputs, double[] estimatedOutputs)
- {
- double[] delta = new double[calculatedOutputs.Length];
- debug.printArray(calculatedOutputs);
- debug.printArray(estimatedOutputs);
- for (int i = 0; i < calculatedOutputs.Length; i++)
- {
- //define loss function
- delta[i] = calculatedOutputs[i] - estimatedOutputs[i];
- }
- Console.WriteLine("\nvalue of delta 1 is : ");
- debug.printArray(delta);
- for (int i = network.levels.Length - 1; i >= 0; i--)
- {
- delta = Level.DebugBackward(network.levels[i], delta);
- Console.WriteLine("\nvalue of delta 2 is : ");
- debug.printArray(delta);
- }
- Console.WriteLine("\n\n----------------------------------------\n\n");
- }
- }
- public class Level
- {
- public double[] inputs;
- public double[] outputs;
- public double[] biases;
- public double[][] weights;
- public Level(int inputCount, int outputCount)
- {
- inputs = new double[inputCount];
- outputs = new double[outputCount];
- biases = new double[outputCount];
- weights = new double[inputCount][];
- for (int i = 0; i < inputCount; i++)
- {
- weights[i] = new double[outputCount];
- }
- Randomize();
- }
- // public Level(levelData data)
- // {
- // inputs = new double[data.inputs];
- // outputs = new double[data.outputs];
- // biases = new double[data.outputs];
- // weights = new double[data.inputs][];
- // for (int i = 0; i < inputs.Length; i++)
- // {
- // weights[i] = new double[data.outputs];
- // }
- // int temp = 0;
- // for (int i = 0; i < inputs.Length; i++)
- // {
- // for (int j = 0; j < outputs.Length; j++)
- // {
- // weights[i][j] = data.weights[temp];
- // temp++;
- // }
- // }
- // for (int i = 0; i < biases.Length; i++)
- // {
- // biases[i] = data.biases[i];
- // }
- // }
- private void Randomize()
- {
- System.Random random = new System.Random();
- for (int i = 0; i < inputs.Length; i++)
- {
- for (int j = 0; j < outputs.Length; j++)
- {
- weights[i][j] = random.NextDouble();
- }
- }
- for (int i = 0; i < biases.Length; i++)
- {
- biases[i] = random.NextDouble();
- }
- }
- public static double[] FeedForward(double[] givenInputs, Level level)
- {
- for (int i = 0; i < level.inputs.Length; i++)
- {
- level.inputs[i] = givenInputs[i];
- }
- for (int i = 0; i < level.outputs.Length; i++)
- {
- double sum = 0;
- for (int j = 0; j < level.inputs.Length; j++)
- {
- sum += level.inputs[j] * level.weights[j][i];
- }
- level.outputs[i] = sum;
- }
- return level.outputs;
- }
- public static double[] Backward(Level level, double[] errorsFromAboveLayer)
- {
- double[][] differentiationWrtWeight = new double[level.inputs.Length][];
- for (int i = 0; i < level.inputs.Length; i++)
- {
- differentiationWrtWeight[i] = new double[level.outputs.Length];
- }
- for (int i = 0; i < level.inputs.Length; i++)
- {
- for (int j = 0; j < level.outputs.Length; j++)
- {
- differentiationWrtWeight[i][j] = level.inputs[i] * errorsFromAboveLayer[j];
- }
- }
- UpdateWeights(level, differentiationWrtWeight);
- List<double> errorInCurrentLayer = new List<double>();
- for (int i = 0; i < level.inputs.Length; i++)
- {
- double sum = 0;
- for (int j = 0; j < level.outputs.Length; j++)
- {
- sum += level.weights[i][j] * errorsFromAboveLayer[j];
- }
- errorInCurrentLayer.Add(sum);
- }
- double[] temp = errorInCurrentLayer.ToArray();
- return errorInCurrentLayer.ToArray();
- }
- public static double[] DebugBackward(Level level, double[] errorsFromAboveLayer)
- {
- double[][] differentiationWrtWeight = new double[level.inputs.Length][];
- for (int i = 0; i < level.inputs.Length; i++)
- {
- differentiationWrtWeight[i] = new double[level.outputs.Length];
- }
- for (int i = 0; i < level.inputs.Length; i++)
- {
- for (int j = 0; j < level.outputs.Length; j++)
- {
- differentiationWrtWeight[i][j] = level.inputs[i] * errorsFromAboveLayer[j];
- }
- }
- Console.WriteLine("\n\nInputs :");
- string s = "";
- for(int i = 0; i < level.inputs.Length;i++){
- s = s + level.inputs[i].ToString() + " ";
- }
- Console.WriteLine(s);
- Console.WriteLine("\n\nOld Wieghts : ");
- for (int i=0 ; i<level.inputs.Length ; i++){
- debug.printArray(level.weights[i]);
- }
- UpdateWeights(level, differentiationWrtWeight);
- Console.WriteLine("\n\nNew Wieghts : ");
- for (int i=0 ; i<level.inputs.Length ; i++){
- debug.printArray(level.weights[i]);
- }
- List<double> errorInCurrentLayer = new List<double>();
- for (int i = 0; i < level.inputs.Length; i++)
- {
- double sum = 0;
- for (int j = 0; j < level.outputs.Length; j++)
- {
- sum += level.weights[i][j] * errorsFromAboveLayer[j];
- }
- errorInCurrentLayer.Add(sum);
- }
- double[] temp = errorInCurrentLayer.ToArray();
- return errorInCurrentLayer.ToArray();
- }
- private static void UpdateWeights(Level level, double[][] differentiationWrtWeight)
- {
- for (int i = 0; i < level.inputs.Length; i++)
- {
- for (int j = 0; j < level.outputs.Length; j++)
- {
- //use alpha
- level.weights[i][j] = level.weights[i][j] - (differentiationWrtWeight[i][j] * 0.000005);
- }
- }
- }
- }
- public class debug{
- public static void printArray(double[] x){
- string s = "";
- for(int i = 0; i < x.Length;i++){
- s = s + x[i].ToString() + " ";
- }
- Console.WriteLine(s);
- }
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement