Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import com.neuralnetwork.Backpropagation;
- import com.neuralnetwork.Layer;
- import java.util.Random;
- /************************
- * @author Max Rafiandy *
- ************************/
- public class Main {
- public static void main(String[] args) {
- final Random random = new Random();
- double[][] learning_x = { {0, 0}, {0, 1}, {0, 1}, {1, 0} };
- double[][] target_out = { {0}, {0}, {0}, {1} };
- Layer input_layer = new Layer(learning_x[0].length, Layer.INPUT);
- Layer hidden_layer = new Layer(learning_x[0].length
- + target_out[0].length, Layer.HIDDEN);
- Layer output_layer = new Layer(target_out[0].length, Layer.OUTPUT);
- for(int j=0; j<hidden_layer.neuron.length; j++) {
- hidden_layer.bias[j] = random.nextDouble();
- hidden_layer.weight = new double[input_layer.neuron.length]
- [hidden_layer.neuron.length];
- for (int i=0; i<input_layer.neuron.length; i++) {
- hidden_layer.weight[i][j] = random.nextDouble();
- }
- }
- for(int k=0; k<output_layer.neuron.length; k++) {
- output_layer.bias[k] = random.nextDouble();
- output_layer.weight = new double[hidden_layer.neuron.length]
- [output_layer.neuron.length];
- for (int j=0; j<hidden_layer.neuron.length; j++) {
- output_layer.weight[j][k] = random.nextDouble();
- }
- }
- while (true) {
- double mse = 0;
- for (int i=0; i<learning_x.length; i++) {
- input_layer.neuron = learning_x[i];
- mse += Backpropagation.learning(input_layer, hidden_layer,
- output_layer, target_out[i], 0.5);
- }
- System.out.println("Updating Error: " + 0.5*mse);
- if (0.5 * mse < 1e-5) break;
- }
- }
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement