def obj_f_der_point(w, obs_x, obs_y):
"""Addend of the gradient of the objective function in the parameters"""
return np.dot(2 * np.array([1, obs_x]), diff(f(np.array([1, obs_x]), w), obs_y))
# Perform a Stochastic Gradient Descent to get the parameters of the fitting line
training_set = [(x[i], y[i]) for i in range(len(x))]
former_w = np.array([10, 5]) # the chosen starting point for the descent
while epoch < max_epochs:
random.shuffle(training_set)
for point in training_set:
w = former_w - alpha * obj_f_der_point(former_w, point[0], point[1])
if euclidean(former_w, w) <= p:
print('Found parameters (intercept, slope):', w)