Solution
Algorithm Steps:
class Solution:
def compute_gradient(self, a:float, b:float, c:float, x:float):
"""
Function to compute gradient of f(x) w.r.t to each parameter
Args:
a,b,c : coeffients to determine quadratic f(x)
x : x axis value at which gradient to be calculated
Return:
d_x : gradient w.r.t x
"""
d_x = 2 * a * x + b
return d_x
def compute_minima(self, a:float, b:float, c:float, learning_rate, number_of_iteration):
"""
Function to minimise f(x) = a*x^2 + b*x + c
Args:
a,b,c : coeffients to determine quadratic f(x)
learning_rate : learning rate in gradient descent
number_of_iteration: number of time gradient update to be done
Return:
x value where f(x) is minimum
"""
if a <= 0:
return "Minima can not be found"
x = 0 # initial value , you can explore other ways to initilaize this
for i in range(number_of_iteration):
d_x = self.compute_gradient(a,b,c,x)
x = x - learning_rate * d_x
return x