small changes main
authorBrendan Hansen <brendan.f.hansen@gmail.com>
Wed, 14 Apr 2021 19:58:42 +0000 (14:58 -0500)
committerBrendan Hansen <brendan.f.hansen@gmail.com>
Wed, 14 Apr 2021 20:02:35 +0000 (15:02 -0500)
src/mnist.onyx
src/neuralnet.onyx

index 620041878b13093d5786a0f35479f2a66d0de437..c224ced8a502780b4d41ee0e6b99b0290cf21ca8 100644 (file)
@@ -1,7 +1,6 @@
 #load "core/std"
 
-#load_path "src"
-#load "neuralnet"
+#load "src/neuralnet"
 
 use package core
 
@@ -144,43 +143,3 @@ main :: (args: [] cstr) {
 
     train(^nn, ^mnist_data.base, ^optimizer);
 }
-
-
-
-
-// Old code for printing the outputs fancily:
-/*
-            {
-                print_colored_array :: (arr: [] $T, color_idx: i32, color_code := 94) {
-                    for i: arr.count {
-                        if i == color_idx {
-                            printf("\x1b[%im", color_code);
-                            print(arr[i]);
-                            print("\x1b[0m ");
-                        } else {
-                            print(arr[i]);
-                            print(" ");
-                        }
-                    }
-                    print("\n");
-                }
-
-                color := 94;
-                if prediction != label do color = 91;
-
-                output := nn->get_output();
-
-                print_colored_array(sample.output, label, color);
-                print_colored_array(output, prediction, color);
-
-                loss := nn->get_loss(sample.output, criterion);
-                printf("Loss: %f         Correct: %i / 100\n", cast(f32) loss, past_100_correct);
-
-                past_100_correct = 0;
-                
-                // if ex % 10000 == 0 {
-                //     println("Saving neural network...");
-                //     neural_net_save(nn, "data/still_working.nn");
-                // }
-            }
-            */
index 2c608b9537a3d0bd1d095dd0485e22f25a34ae3d..4289b03e4b2c793e1ba6234630cf9ddbbf8e2cc9 100644 (file)
@@ -516,12 +516,8 @@ Optimizer_Functions :: struct {
 optimizer_init :: (use optim: ^Optimizer, nn: ^NeuralNet, allocator := context.allocator) {
     network = nn;
 
-    #context_scope {
-        context.allocator = allocator;
-
-        variables       = array.make(#type ^Variable);
-        variable_arrays = array.make(#type ^[] Variable);
-    }
+    variables       = array.make(#type ^Variable,    allocator=allocator);
+    variable_arrays = array.make(#type ^[] Variable, allocator=allocator);
 }
 
 optimizer_step :: (use optim: ^Optimizer, scale: f32 = 1) {
@@ -577,4 +573,4 @@ sgd_optimizer_step :: (use optimizer: ^SGD_Optimizer, scale: f32) {
             variable.value += variable.delta * alpha;
         }
     }
-}
\ No newline at end of file
+}