Skip to content

Commit 867aa1d

Browse files
authored
Merge pull request #52 from keineahnung2345/update-203-activation
update to torch 0.4
2 parents bf4cf1e + 59487ef commit 867aa1d

File tree

1 file changed

+7
-8
lines changed

1 file changed

+7
-8
lines changed

tutorial-contents/203_activation.py

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
My Youtube Channel: https://www.youtube.com/user/MorvanZhou
44
55
Dependencies:
6-
torch: 0.1.11
6+
torch: 0.4
77
matplotlib
88
"""
99
import torch
@@ -17,12 +17,11 @@
1717
x_np = x.data.numpy() # numpy array for plotting
1818

1919
# following are popular activation functions
20-
y_relu = F.relu(x).data.numpy()
21-
y_sigmoid = F.sigmoid(x).data.numpy()
22-
y_tanh = F.tanh(x).data.numpy()
23-
y_softplus = F.softplus(x).data.numpy()
24-
# y_softmax = F.softmax(x) softmax is a special kind of activation function, it is about probability
25-
20+
y_relu = torch.relu(x).data.numpy()
21+
y_sigmoid = torch.sigmoid(x).data.numpy()
22+
y_tanh = torch.tanh(x).data.numpy()
23+
y_softplus = F.softplus(x).data.numpy() # there's no softplus in torch
24+
# y_softmax = torch.softmax(x, dim=0).data.numpy() softmax is a special kind of activation function, it is about probability
2625

2726
# plt to visualize these activation function
2827
plt.figure(1, figsize=(8, 6))
@@ -46,4 +45,4 @@
4645
plt.ylim((-0.2, 6))
4746
plt.legend(loc='best')
4847

49-
plt.show()
48+
plt.show()

0 commit comments

Comments
 (0)