added colored printing for fanciness
authorBrendan Hansen <brendan.f.hansen@gmail.com>
Fri, 22 Jan 2021 18:10:11 +0000 (12:10 -0600)
committerBrendan Hansen <brendan.f.hansen@gmail.com>
Fri, 22 Jan 2021 18:10:11 +0000 (12:10 -0600)
src/mnist.onyx
src/neuralnet.onyx

index 54fb416e718cfc6193e1723a2da3b8273359b71a..1099db155fd4d6750aaac581ac47315c599fe66b 100644 (file)
@@ -56,17 +56,31 @@ stocastic_gradient_descent :: (nn: ^NeuralNet, mnist_data: ^MNIST_Data, training
             for i: input.count do input[i] = (cast(float) cast(u32) example[i]) / 255;
 
             neural_net_forward(nn, ~~ input);
+            neural_net_backward(nn, ~~ expected);
+
             if ex % 100 == 0 {
-                print_array(expected);
+                print_colored_array :: (arr: [] $T) {
+                    greatest_idx := 0;
+                    for i: arr.count do if arr[i] > arr[greatest_idx] do greatest_idx = i;
+
+                    for i: arr.count {
+                        if i == greatest_idx {
+                            printf("\x1b[94m%f\x1b[0m ", arr[i]);
+                        } else {
+                            printf("%f ", arr[i]);
+                        }
+                    }
+                    print("\n");
+                }
+
+                print_colored_array(cast([] f32) expected);
 
                 output := neural_net_get_output(nn);
-                print_array(output);
+                print_colored_array(output);
 
                 loss := neural_net_loss(nn, ~~ expected);
                 printf("MSE loss: %f\n", cast(f32) loss);
             }
-
-            neural_net_backward(nn, ~~ expected);
         }
     }
 
index 4b5361443f8384b89b6df028f2c2b0b04b26177c..48c1b3aabad525d543376f9cd25de7a4a521a2e7 100644 (file)
@@ -138,7 +138,7 @@ init_layer :: (use layer: ^Layer, layer_size: u32, prev_layer_size: u32, allocat
 
     deltas = memory.make_slice(float, layer_size, allocator);
 
-    activation = tanh_activation;
+    activation = sigmoid_activation;
 
     if prev_layer_size > 0 {
         weights = memory.make_slice(#type [] float, layer_size, allocator);
@@ -154,7 +154,7 @@ init_layer :: (use layer: ^Layer, layer_size: u32, prev_layer_size: u32, allocat
 randomize_weights_and_biases :: (use layer: ^Layer) {
     for ^weight: weights {
         for ^w: *weight {
-            *w = cast(float) random.float(-1.0f, 1.0f);
+            *w = cast(float) random.float(-0.5f, 0.5f);
         }
     }
 
@@ -184,8 +184,8 @@ layer_forward :: (use layer: ^Layer, prev_layer: ^Layer) {
 
 
 ActivationFunction :: struct {
-    forward  : proc (x : float)           -> float;
-    backward : proc (fx: float, x: float) -> float;
+    forward  : (x : float)           -> float;
+    backward : (fx: float, x: float) -> float;
 }