Advertisement
makispaiktis

Kaggle 3 - Stochastic Gradient Descent and Animation of SGD

Jul 11th, 2023 (edited)
720
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 2.34 KB | None | 0 0
  1. # Setup plotting
  2. import matplotlib.pyplot as plt
  3. from learntools.deep_learning_intro.dltools import animate_sgd
  4. import numpy as np
  5. import pandas as pd
  6. from sklearn.preprocessing import StandardScaler, OneHotEncoder
  7. from sklearn.compose import make_column_transformer, make_column_selector
  8. from sklearn.model_selection import train_test_split
  9. from tensorflow import keras
  10. from tensorflow.keras import layers
  11. import pandas as pd
  12.  
  13. # Set Matplotlib defaults
  14. plt.style.use('seaborn-whitegrid')
  15. plt.rc('figure', autolayout=True)
  16. plt.rc('axes', labelweight='bold', labelsize='large', titleweight='bold', titlesize=18, titlepad=10)
  17. plt.rc('animation', html='html5')
  18.  
  19.  
  20. # 1. Read the dataset
  21. fuel = pd.read_csv('../input/dl-course-data/fuel.csv')
  22. X = fuel.copy()
  23. y = X.pop('FE')        # Remove target
  24.  
  25. # 2. Create a preprocessor for numbers and categorical columns
  26. preprocessor = make_column_transformer( (StandardScaler(), make_column_selector(dtype_include=np.number)),
  27.                                         (OneHotEncoder(sparse=False), make_column_selector(dtype_include=object)) )
  28.  
  29. X = preprocessor.fit_transform(X)
  30. y = np.log(y) # log transform target instead of standardizing
  31. input_shape = [X.shape[1]]
  32. print("Input shape: {}".format(input_shape))
  33.  
  34. print('Original data = \n', fuel.head(), end='\n\n\n')
  35. print('Processed data = \n', pd.DataFrame(X[:10,:]).head(), end='\n\n\n')
  36.  
  37. # 3. Create and compile a DL model using layers and neurons
  38. model = keras.Sequential([  layers.Dense(128, activation='relu', input_shape=input_shape),
  39.                             layers.Dense(128, activation='relu'),    
  40.                             layers.Dense(64, activation='relu'),
  41.                             layers.Dense(1)  ])
  42. model.compile(optimizer='adam', loss='mae')
  43.  
  44. # 4. Train the model, see the history in epochs, visualize MAE
  45. history = model.fit(X, y, epochs=200, batch_size=128)
  46. history_df = pd.DataFrame(history.history)
  47. # See all the epochs (1-200)
  48. history_df['loss'].plot();
  49. # Start from epoch 5 (5-200)
  50. history_df.loc[5:, ['loss']].plot();
  51.  
  52.  
  53. # 5. Evaluate training - Animate SGD
  54.  
  55. learning_rate = 0.05
  56. batch_size = 128
  57. num_examples = 256
  58.  
  59. animate_sgd(
  60.     learning_rate=learning_rate,
  61.     batch_size=batch_size,
  62.     num_examples=num_examples,
  63.     # You can also change these, if you like
  64.     steps=50, # total training steps (batches seen)
  65.     true_w=3.0, # the slope of the data
  66.     true_b=2.0) # the bias of the data
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement