-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathXOR.py
65 lines (52 loc) · 1.46 KB
/
XOR.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from dense import Dense
from activations import Tanh,Relu,Sigmoid
from losses import mse, mse_prime,binary_cross_entropy,binary_cross_entropy_prime
X = np.reshape([[0, 0], [0, 1], [1, 0], [1, 1]], (4, 2, 1))
Y = np.reshape([[0], [1], [1], [0]], (4, 1, 1))
epochs = 10000
learning_rate = 0.01
network = [
Dense(2, 3),
Relu(),
Dense(3, 1),
Sigmoid()
]
# train
for e in range(epochs):
error = 0
for x, y in zip(X, Y):
# forward
output = x
for layer in network:
output = layer.forward(output)
# error
error += binary_cross_entropy(y, output)
# backward
grad = binary_cross_entropy_prime(y, output)
for layer in reversed(network):
grad = layer.backward(grad, learning_rate)
error /= len(X)
print(f"{e + 1}/{epochs}, error={error}")
# prediction
print()
for x in X:
z = x
for layer in network:
z = layer.forward(z)
print(x.tolist(), "->", z)
# decision boundary plot
points = []
for x in np.linspace(0, 1, 20):
for y in np.linspace(0, 1, 20):
z = [[x], [y]]
for layer in network:
z = layer.forward(z)
points.append([x, y, z[0,0]])
points = np.array(points)
fig = plt.figure()
ax = fig.add_subplot(111, projection="3d")
ax.scatter(points[:, 0], points[:, 1], points[:, 2], c=points[:, 2], cmap="winter")
plt.show()