-
Notifications
You must be signed in to change notification settings - Fork 0
/
tiny_mlp.j
44 lines (35 loc) · 1.07 KB
/
tiny_mlp.j
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
(*
[nn ((x, y), (wxu, wyu))] returns the output [u] of the Multi-Layer Perceptron
with inputs (x, y) and weights (wxu, wyu). [wxu] is the weight from [x] to
[u] and [wyu] is the weigth from [y] to [u]. We suggest you use the sigmoid
as activation function :
1
x ↦ ————————————–——
1 + e^{-x}
*)
let sigmoid (x : float) : float =
inv ( 1 + exp (-x))
let nn (p :
(* inputs *) (float * float) *
(* weights *) (float * float))
(* output *) : float
=
let (x: float) = fst (fst p) in
let (y: float) = snd (fst p) in
let (wxu: float) = fst (snd p) in
let (wxy: float) = snd (snd p) in
sigmoid (x * wxu + y * wxy)
(*
For a given input, the [error] function evaluates the distance
between the neural network output and the expected output.
*)
let error (p :
(* inputs *) (float * float) *
(* weights *) (float * float) *
(* expectation *) float)
: float
=
let (xu : float) = snd p in
let (u : float) = nn (fst p) in
let (d : float) = -u + xu in
0.5 * d * d