11# Simple multi-layer preceptron / neural network in Python and Numpy
2- # For the class Data Science: Practical Deep Learning Concepts in Theano and TensorFLow
2+ # For the class Data Science: Practical Deep Learning Concepts in Theano and TensorFlow
3+ # https://deeplearningcourses.com/c/data-science-deep-learning-in-theano-tensorflow
34# https://www.udemy.com/data-science-deep-learning-in-theano-tensorflow
45
56import numpy as np
67
78def forward (X , W1 , b1 , W2 , b2 ):
8- # Z = 1 / (1 + np.exp(-( X.dot(W1) + b1 )))
9+ Z = 1 / (1 + np .exp (- ( X .dot (W1 ) + b1 )))
910
1011 # rectifier
11- Z = X .dot (W1 ) + b1
12- Z [Z < 0 ] = 0
12+ # Z = X.dot(W1) + b1
13+ # Z[Z < 0] = 0
1314 # print "Z:", Z
1415
1516 A = Z .dot (W2 ) + b2
@@ -26,9 +27,9 @@ def derivative_b2(T, Y):
2627 return (Y - T ).sum (axis = 0 )
2728
2829def derivative_w1 (X , Z , T , Y , W2 ):
29- # return X.T.dot( ( ( Y-T ).dot(W2.T) * ( Z*(1 - Z) ) ) ) # for sigmoid
30- return X .T .dot ( ( ( Y - T ).dot (W2 .T ) * np .sign (Z ) ) ) # for relu
30+ return X .T .dot ( ( ( Y - T ).dot (W2 .T ) * ( Z * (1 - Z ) ) ) ) # for sigmoid
31+ # return X.T.dot( ( ( Y-T ).dot(W2.T) * np.sign(Z) ) ) # for relu
3132
3233def derivative_b1 (Z , T , Y , W2 ):
33- # return (( Y-T ).dot(W2.T) * ( Z*(1 - Z) )).sum(axis=0) # for sigmoid
34- return (( Y - T ).dot (W2 .T ) * np .sign (Z )).sum (axis = 0 ) # for relu
34+ return (( Y - T ).dot (W2 .T ) * ( Z * (1 - Z ) )).sum (axis = 0 ) # for sigmoid
35+ # return (( Y-T ).dot(W2.T) * np.sign(Z)).sum(axis=0) # for relu
0 commit comments