-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathBatchGradientDescent.py
54 lines (42 loc) · 1.83 KB
/
BatchGradientDescent.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
# This code implements the batch_Gradient_Descent optimization algorithm based on the cost function provided by the user.
import sys
import numpy as np
import scipy as sc
from sympy import Symbol, diff, sympify, lambdify
def batchGradientDescent(cost_function, f):
x = Symbol('x')
print("f(x) = ", cost_function)
derivative_fx = diff(cost_function, x)
print("df(x)/dx = ", derivative_fx)
initialApproximation = float(input("\n---> Enter initial approximation: "))
x0 = initialApproximation
learningRate = float(input("---> Enter learning rate: "))
errorTolerance = float(input("---> Enter error tolerance: "))
print("\n---------------------------------------------------------------")
print(" *** Starting Batch Gradient Descent")
print(" ---> x0 =", initialApproximation)
print(" ---> f(x0) =", f(initialApproximation))
#----------------------------------------------------------------------------------------------------
numIterations = 0
xk = x0
while True:
numIterations += 1
derivative_fk = lambdify(x, derivative_fx, "numpy")(xk)
xk = xk - learningRate * derivative_fk
if abs(N(xk - x0)) < errorTolerance:
break
x0 = xk
#----------------------------------------------------------------------------------------------------
print(" *** Number of Iterations =", numIterations)
print(" ---> Minima is at =", xk)
print(" ---> Minimum value of Cost Function =", f(xk))
print("---------------------------------------------------------------\n")
# Code execution section
def main():
x = Symbol('x')
cost_function = input("---> Enter cost function f(x): ").strip()
c_f = sympify(cost_function)
f = lambdify(x, c_f, "numpy")
batchGradientDescent(c_f, f)
if __name__ == "__main__":
main()