-
Notifications
You must be signed in to change notification settings - Fork 0
/
feed_forward.py
55 lines (37 loc) · 1.21 KB
/
feed_forward.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
import numpy as np
from activation_function_f import *
def linear_forward(A, W, b):
"""
liner forward
Arguments:
A : input value
W : weights
b : bias
Returns:
Z :
cache: value needed for backpropagtion
"""
Z = np.dot(W,A)+ b
cache = (A, W, b)
return Z, cache
def linear_activation_forward(A_prev, W, b, activation):
#liner forward and then passing through activation function
#Arguments:
#A_prev --- value from previous layer
#W --- weights
#b --- bias
#activation --- for non-linearity
#Return
#Z --- Output value after passing Neuron
#A --- After passing by activation Function
#cache --- Value Needed for Backpropogtion
if activation == "sigmoid":
# Inputs: "A_prev, W, b". Outputs: "A, activation_cache".
Z, linear_cache = linear_forward(A_prev, W, b)
A, activation_cache = sigmoid(Z)
elif activation == "relu":
# Inputs: "A_prev, W, b". Outputs: "A, activation_cache".
Z, linear_cache = linear_forward(A_prev, W, b)
A, activation_cache = relu(Z)
cache = (linear_cache, activation_cache)
return A, cache