sgd code python

Solutions on MaxInterview for sgd code python by the best coders in the world

showing results for - "sgd code python"
Emmanuel
09 Jun 2020
1 1import numpy as np
2 2
3 3def gradient_descent(
4 4    gradient, x, y, start, learn_rate=0.1, n_iter=50, tolerance=1e-06,
5 5    dtype="float64"
6 6):
7 7    # Checking if the gradient is callable
8 8    if not callable(gradient):
9 9        raise TypeError("'gradient' must be callable")
1010
1111    # Setting up the data type for NumPy arrays
1212    dtype_ = np.dtype(dtype)
1313
1414    # Converting x and y to NumPy arrays
1515    x, y = np.array(x, dtype=dtype_), np.array(y, dtype=dtype_)
1616    if x.shape[0] != y.shape[0]:
1717        raise ValueError("'x' and 'y' lengths do not match")
1818
1919    # Initializing the values of the variables
2020    vector = np.array(start, dtype=dtype_)
2121
2222    # Setting up and checking the learning rate
2323    learn_rate = np.array(learn_rate, dtype=dtype_)
2424    if np.any(learn_rate <= 0):
2525        raise ValueError("'learn_rate' must be greater than zero")
2626
2727    # Setting up and checking the maximal number of iterations
2828    n_iter = int(n_iter)
2929    if n_iter <= 0:
3030        raise ValueError("'n_iter' must be greater than zero")
3131
3232    # Setting up and checking the tolerance
3333    tolerance = np.array(tolerance, dtype=dtype_)
3434    if np.any(tolerance <= 0):
3535        raise ValueError("'tolerance' must be greater than zero")
3636
3737    # Performing the gradient descent loop
3838    for _ in range(n_iter):
3939        # Recalculating the difference
4040        diff = -learn_rate * np.array(gradient(x, y, vector), dtype_)
4141
4242        # Checking if the absolute difference is small enough
4343        if np.all(np.abs(diff) <= tolerance):
4444            break
4545
4646        # Updating the values of the variables
4747        vector += diff
4848
4949    return vector if vector.shape else vector.item()
50