Advertisement
max2201111

progres bara

Oct 23rd, 2023
560
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 1.26 KB | Science | 0 0
  1. import tensorflow as tf
  2. import numpy as np
  3. from tensorflow import keras
  4. from tqdm import tqdm
  5.  
  6. # Sample data
  7. X = np.random.rand(300, 20)
  8. y = np.random.randint(0, 2, size=(300,))
  9.  
  10. # Custom AdamW optimizer configuration
  11. learning_rate = 0.001
  12. weight_decay = 0.004
  13.  
  14. # Build a simple neural network
  15. model = keras.Sequential([
  16.     keras.layers.Dense(32, activation='relu', input_shape=(20,)),
  17.     keras.layers.Dense(1, activation='sigmoid')
  18. ])
  19.  
  20. # Create an Adam optimizer
  21. adam_optimizer = tf.keras.optimizers.Adam(learning_rate=learning_rate)
  22.  
  23. # Apply weight decay to kernel (weight) variables
  24. for var in model.trainable_variables:
  25.     if 'kernel' in var.name:
  26.         adam_optimizer.apply_gradients([(-learning_rate * weight_decay * var, var)])
  27.  
  28. # Compile the model using the custom AdamW optimizer with weight decay
  29. model.compile(optimizer=adam_optimizer, loss='binary_crossentropy', metrics=['accuracy'])
  30.  
  31. # Define the number of epochs
  32. total_epochs = 1000
  33.  
  34. # Train the model with a custom progress bar
  35. for epoch in tqdm(range(total_epochs), unit="epoch"):
  36.     if (epoch + 1) % 100 == 0:
  37.         verbose = 1
  38.         tqdm.write(f"Epoch {epoch + 1}/{total_epochs}")
  39.     else:
  40.         verbose = 0
  41.     model.fit(X, y, epochs=1, batch_size=32, verbose=verbose)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement