Skip to content

Commit

Permalink
Add module for Network / MLP
Browse files Browse the repository at this point in the history
  • Loading branch information
rounakdatta committed Apr 12, 2024
1 parent e041c7f commit 1ecf443
Show file tree
Hide file tree
Showing 6 changed files with 46 additions and 6 deletions.
2 changes: 1 addition & 1 deletion lib/dune
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
(library
(name smolgrad)
(public_name smolgrad)
(modules variable neuron layer))
(modules variable neuron layer network))
4 changes: 2 additions & 2 deletions lib/layer.ml
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ module Layer = struct
neurons : Neuron.Neuron.t list
}

let create number_of_inputs number_of_neurons is_non_linear =
let neurons = List.init number_of_neurons (fun _ -> Neuron.Neuron.create number_of_inputs is_non_linear) in
let create number_of_input_dimensions number_of_neurons is_non_linear =
let neurons = List.init number_of_neurons (fun _ -> Neuron.Neuron.create number_of_input_dimensions is_non_linear) in
{ neurons = neurons }

let propagate_input (layer: t) input_vector =
Expand Down
28 changes: 28 additions & 0 deletions lib/network.ml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
module Network = struct
type t = {
layers : Layer.Layer.t list;
}

(* the number of output parameters of a layer is the number of neurons in that layer *)
(* important to understand that the input layer isn't truly a layer with weights and biases; just an abstraction *)
let create number_of_input_dimensions number_of_neurons_per_layer =
let size_of_each_layer = number_of_input_dimensions :: number_of_neurons_per_layer in

let rec build_layers stacked_layers sizes =
match sizes with
| input :: output :: rest ->
let layer = Layer.Layer.create input output true in

(* note how we are stacking the layers in reverse order *)
build_layers (layer :: stacked_layers) (output :: rest)

(* hence we got to reverse it at the end *)
| _ -> List.rev stacked_layers
in
{ layers = build_layers [] size_of_each_layer }

(* the input vector as it propagates through each layer, is considered the intermediate output
and of course the input for the next layer, until the last one is the final output vector *)
let propagate_input (network: t) input_vector =
List.fold_left (fun intermediate_output layer -> Layer.Layer.propagate_input layer intermediate_output) input_vector network.layers
end
8 changes: 8 additions & 0 deletions lib/network.mli
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
(* Multi-Layer Perceptron so that we can connect multiple stacked layers *)
module Network : sig
type t

val create : int -> int list -> t

val propagate_input : t -> Variable.Variable.t list -> Variable.Variable.t list
end
4 changes: 2 additions & 2 deletions lib/neuron.ml
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ module Neuron = struct
bias = neuron.bias;
}

let create number_of_inputs is_non_linear = {
weights = List.init number_of_inputs (fun _ -> Variable.Variable.create (random_weight_initializer));
let create number_of_input_dimensions is_non_linear = {
weights = List.init number_of_input_dimensions (fun _ -> Variable.Variable.create (random_weight_initializer));
bias = Variable.Variable.create 0.0;
is_non_linear = is_non_linear;
}
Expand Down
6 changes: 5 additions & 1 deletion test/neuron_operations.ml
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,17 @@ let test_neuron_initialization () =

let test_neuron_weights_reacting_to_input () =
let n = Neuron.create 3 false in
let n_weight_values = Array.of_list (List.map (fun x -> Variable.data x) (Neuron.parameters n).weights) in

(* make the same neuron react to two different inputs *)
let neuron_activation_for_input_1 = Neuron.weigh_input n [Variable.create 3.0; Variable.create 2.0; Variable.create 1.0] in
let neuron_activation_for_input_2 = Neuron.weigh_input n [Variable.create 8.0; Variable.create 13.0; Variable.create (-4.0)] in

let n_weight_values = Array.of_list (List.map (fun x -> Variable.data x) (Neuron.parameters n).weights) in
(* since it is a weighted sum, we'll cleverly subtract the two, and compare the difference *)
(* remember that weights are initialized as random values, hence all these witchery *)
let remainder = (3.0 -. 8.0) *. n_weight_values.(0) +. (2.0 -. 13.0) *. n_weight_values.(1) +. (1.0 -. (-4.0)) *. n_weight_values.(2) in

(* 0.01 is the float tolerance / round-off *)
Alcotest.(check (float 0.01))
"Neuron: Neuron activation upon input happened correctly"
remainder
Expand Down

0 comments on commit 1ecf443

Please sign in to comment.