Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import numpy as np
- from sklearn import datasets
- from sklearn.model_selection import train_test_split
- from sklearn.preprocessing import OneHotEncoder
- from sklearn.metrics import accuracy_score
- # Inicializar la semilla para los números aleatorios
- np.random.seed(1)
- # Definir funciones de activación y sus derivadas
- def sigmoid(x):
- return 1 / (1 + np.exp(-x))
- def sigmoid_derivative(x):
- return x * (1 - x)
- def relu(x):
- return np.maximum(0, x)
- def relu_derivative(x):
- return np.where(x > 0, 1, 0)
- # Función para entrenar la red neuronal
- def train(X, y, epochs, learning_rate, activation_hidden, activation_output, derivative_hidden, derivative_output):
- input_layer_neurons = X.shape[1]
- hidden_layer_neurons = 10
- output_layer_neurons = y.shape[1]
- hidden_weights = np.random.randn(input_layer_neurons, hidden_layer_neurons)
- hidden_bias = np.zeros((1, hidden_layer_neurons))
- output_weights = np.random.randn(hidden_layer_neurons, output_layer_neurons)
- output_bias = np.zeros((1, output_layer_neurons))
- for _ in range(epochs):
- hidden_layer_activation = np.dot(X, hidden_weights) + hidden_bias
- hidden_layer_output = activation_hidden(hidden_layer_activation)
- output_layer_activation = np.dot(hidden_layer_output, output_weights) + output_bias
- predicted_output = activation_output(output_layer_activation)
- error = y - predicted_output
- d_predicted_output = error * derivative_output(predicted_output)
- error_hidden_layer = d_predicted_output.dot(output_weights.T)
- d_hidden_layer = error_hidden_layer * derivative_hidden(hidden_layer_output)
- output_weights += hidden_layer_output.T.dot(d_predicted_output) * learning_rate
- output_bias += np.sum(d_predicted_output, axis=0, keepdims=True) * learning_rate
- hidden_weights += X.T.dot(d_hidden_layer) * learning_rate
- hidden_bias += np.sum(d_hidden_layer, axis=0, keepdims=True) * learning_rate
- return hidden_weights, hidden_bias, output_weights, output_bias
- # Función para realizar predicciones
- def predict(X, hidden_weights, hidden_bias, output_weights, output_bias, activation_hidden, activation_output):
- hidden_layer_activation = np.dot(X, hidden_weights) + hidden_bias
- hidden_layer_output = activation_hidden(hidden_layer_activation)
- output_layer_activation = np.dot(hidden_layer_output, output_weights) + output_bias
- predicted_output = activation_output(output_layer_activation)
- return predicted_output
- # Preparar datos
- iris = datasets.load_iris()
- X = iris.data
- y = iris.target.reshape(-1, 1)
- encoder = OneHotEncoder()
- y = encoder.fit_transform(y).toarray()
- X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
- # Leer los parámetros de entrada
- console_input = input().split()
- epochs, learning_rate = int(console_input[0]), float(console_input[1])
- epochs = int(epochs)
- # Entrenar y evaluar la red con ReLU en la capa oculta y Sigmoide en la capa de salida
- hidden_weights_relu, hidden_bias_relu, output_weights_relu, output_bias_relu = train(
- X_train, y_train, epochs, learning_rate, relu, sigmoid, relu_derivative, sigmoid_derivative
- )
- predictions_relu = predict(X_test, hidden_weights_relu, hidden_bias_relu, output_weights_relu, output_bias_relu, relu, sigmoid)
- accuracy_relu = accuracy_score(np.argmax(y_test, axis=1), np.argmax(predictions_relu, axis=1))
- # Entrenar y evaluar la red con Sigmoide en ambas capas
- hidden_weights_sigmoid, hidden_bias_sigmoid, output_weights_sigmoid, output_bias_sigmoid = train(
- X_train, y_train, epochs, learning_rate, sigmoid, sigmoid, sigmoid_derivative, sigmoid_derivative
- )
- predictions_sigmoid = predict(X_test, hidden_weights_sigmoid, hidden_bias_sigmoid, output_weights_sigmoid, output_bias_sigmoid, sigmoid, sigmoid)
- accuracy_sigmoid = accuracy_score(np.argmax(y_test, axis=1), np.argmax(predictions_sigmoid, axis=1))
- # Imprimir las precisiones de ambas redes
- print(f"{round(accuracy_relu,4)} {round(accuracy_sigmoid,4)}")
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement