1import matplotlib.pyplot as mp, numpy as np
2# Primitive
3origin = lambda x: 2 * x - x ** 2
4x = np.linspace(0, 2, 9999)
5mp.plot(x, origin(x), c='black') # Visualization
6# Derivative of the original function
7derivative = lambda x: 2 - 2 * x
8# Gradient rising demand
9extreme_point = 0 # Initial value
10alpha = 0.1 # Step, that is the learning rate
11presision = 0.001 # Range of tolerance
12while True:
13 mp.scatter(extreme_point, origin(extreme_point)) # Visualization
14 error = alpha * derivative(extreme_point) # Climbing pace
15 extreme_point += error Climbing #
16 if abs(error) < presision:
17 break # Exit iterative error is small
18mp.show()123456789101112131415161718
1import matplotlib.pyplot as mp, numpy as np
2# Primitive
3origin = lambda x: 2 * x - x ** 2
4x = np.linspace(0, 2, 9999)
5mp.plot(x, origin(x), c='black') # Visualization
6# Derivative of the original function
7derivative = lambda x: 2 - 2 * x
8# Gradient rising demand
9extreme_point = 0 # Initial value
10alpha = 0.1 # Step, that is the learning rate
11precision = 0.001 # Range of tolerance
12while True:
13 mp.scatter(extreme_point, origin(extreme_point)) # Visualization
14 error = alpha * derivative(extreme_point) # Climbing pace
15 extreme_point += error Climbing #
16 if abs(error) < precision:
17 break # Exit iterative error is small
18mp.show()123456789101112131415161718