Advertisement
makispaiktis

Kaggle 2 - Deep Neural Networks + Activation Functions

Jul 11th, 2023
629
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 1.41 KB | None | 0 0
  1. import tensorflow as tf
  2. import matplotlib.pyplot as plt
  3. import pandas as pd
  4. from tensorflow import keras
  5. from tensorflow.keras import layers
  6.  
  7. # Set Matplotlib defaults
  8. plt.style.use('seaborn-whitegrid')
  9. plt.rc('figure', autolayout=True)
  10. plt.rc('axes', labelweight='bold', labelsize='large', titleweight='bold', titlesize=18, titlepad=10)
  11.  
  12. # 1. Read and print the dataset
  13. concrete = pd.read_csv('../input/dl-course-data/concrete.csv')
  14. print(concrete.head(), "Shape = {}".format(concrete.shape), sep='\n\n', end='\n\n')
  15.  
  16. # 2. Create 3 hidden layers and 1 output layer
  17. input_shape = [8]
  18. hidden1 = layers.Dense(units=512, activation='relu', input_shape=input_shape)
  19. hidden2 = layers.Dense(units=512, activation='relu')
  20. hidden3 = layers.Dense(units=512, activation='relu')
  21. output = layers.Dense(units=1)
  22. model = keras.Sequential([hidden1, hidden2, hidden3, output])
  23.  
  24. # 3. Rewrite layers
  25. model = keras.Sequential([ layers.Dense(units=32, input_shape=[8]), layers.Activation('relu'),
  26.                         layers.Dense(units=32), layers.Activation('relu'),
  27.                         layers.Dense(units=1) ])
  28.  
  29. # 4. Alternatives to ReLU
  30. choices = ['relu', 'elu', 'selu', 'swish']
  31.  
  32. activation_layer = layers.Activation('swish')
  33. x = tf.linspace(-3.0, 3.0, 100)
  34. y = activation_layer(x) # once created, a layer is callable just like a function
  35.  
  36. plt.figure(dpi=100)
  37. plt.plot(x, y)
  38. plt.xlim(-3, 3)
  39. plt.xlabel("Input")
  40. plt.ylabel("Output")
  41. plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement