Skip to content

Commit 3461489

Browse files
committed
gradient descent dir created
1 parent 4b68047 commit 3461489

File tree

2 files changed

+31
-0
lines changed

2 files changed

+31
-0
lines changed

G/gradient_descent_for_machine_learning/README.md

Whitespace-only changes.
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
import numpy as np #Numpy is implemented for the usage of arrays and mathematical functions
2+
3+
def gradient_descent(x, y):
4+
m_current = b_current = 0
5+
iterations = 1000 #The number of iterations can be changed but it is recommended to keep it between 1000 and 10000
6+
n = len(x)
7+
learning_rate = 0.01350 #The learning rate can be changed but it is recommended to keep it between 0.01 and 0.001
8+
9+
for i in range(iterations):
10+
y_pred = m_current * x + b_current
11+
cost = (1 / (2 * n)) * sum((y_pred - y) ** 2) #The cost function is the mean squared error function which is more efficient than the mean absolute error function
12+
13+
# Calculate the gradients
14+
md = -(1 / n) * sum(x * (y - y_pred)) #The partial derivative of the cost function with respect to m
15+
bd = -(1 / n) * sum(y - y_pred) #The partial derivative of the cost function with respect to b
16+
17+
# Update the coefficients using gradient descent
18+
m_current -= learning_rate * md #The learning rate is multiplied by the partial derivative of the cost function with respect to m
19+
b_current -= learning_rate * bd #The learning rate is multiplied by the partial derivative of the cost function with respect to b
20+
21+
print(f"Iteration {i + 1}: m = {m_current}, b = {b_current}, cost = {cost}") #Print the values of m, b and the cost function for each iteration
22+
23+
return m_current, b_current #Return the final values of m and b
24+
25+
# Example usage:
26+
x = np.array([1, 2, 3, 4, 5])
27+
y = np.array([2, 4, 5, 4, 5])
28+
29+
final_m, final_b = gradient_descent(x, y)
30+
print(f"Final slope (m): {final_m}")
31+
print(f"Final intercept (b): {final_b}")

0 commit comments

Comments
 (0)