Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- x = 2 # Initial value (starting point)
- lr = 0.01 # Learning rate
- precision = 0.000001 # Stop when the change in x is smaller than this
- previous_step_size = 1 # Initialize to a large value
- max_iter = 10000 # Maximum number of iterations to prevent infinite loop
- iter = 0 # Iteration counter
- gf = lambda x: (x + 3)**2 # Define the gradient of the function
- import matplotlib.pyplot as plt
- gd = [] # List to store the values of x during each iteration
- while precision < previous_step_size and iter < max_iter:
- prev = x
- x = x - lr * gf(prev) # Update x using the gradient descent formula
- previous_step_size = abs(x - prev)
- iter += 1
- print('Iteration', iter, 'Value:', x)
- gd.append(x) # Append the current value of x to the list
- print('Local Minima:', x)
- plt.plot(gd)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement