From 018aea90439f101858dc5db814daab05091d0a43 Mon Sep 17 00:00:00 2001 From: Brendan Hansen Date: Wed, 19 Jul 2023 14:45:23 -0500 Subject: [PATCH] random additions and rewrites --- src/mnist.onyx | 40 +++++++++++++---------------------- src/neuralnet.onyx | 52 +++++++++++++++++++--------------------------- 2 files changed, 35 insertions(+), 57 deletions(-) diff --git a/src/mnist.onyx b/src/mnist.onyx index c224ced..5fefa75 100644 --- a/src/mnist.onyx +++ b/src/mnist.onyx @@ -2,24 +2,18 @@ #load "src/neuralnet" -use package core +use core {package, *} MNIST_DataLoader :: struct { - use base : DataLoader(MNIST_Sample); - - images, labels : io.FileStream; + Sample_Type :: MNIST_Sample; + + images, labels : os.File; make :: (image_path := "data/train-images-idx3-ubyte", label_path := "data/train-labels-idx1-ubyte") -> MNIST_DataLoader { mnist_data: MNIST_DataLoader; - mnist_data.vtable = ^mnist_dataloader_functions; - - err : io.Error; - err, mnist_data.images = io.open(image_path); - assert(err == io.Error.None, "There was an error loading the image file"); - err, mnist_data.labels = io.open(label_path); - assert(err == io.Error.None, "There was an error loading the label file"); - + mnist_data.images = os.open(image_path)->expect("There was an error loading the image file"); + mnist_data.labels = os.open(label_path)->expect("There was an error loading the label file"); return mnist_data; } @@ -56,11 +50,6 @@ MNIST_DataLoader :: struct { } } -mnist_dataloader_functions := .{ - get_count = MNIST_DataLoader.get_count, - get_item = MNIST_DataLoader.get_item, -} - MNIST_Sample :: struct { input, output : [] f32; @@ -77,7 +66,7 @@ MNIST_Sample :: struct { train :: ( nn: ^NeuralNet, // The neural network. - dataloader: ^DataLoader($Sample_Type), // Data loader that provides samples of type Sample_Type. + dataloader: ^$D/DataLoader, // Data loader that provides samples of type Sample_Type. optimizer: ^Optimizer, // The optimizer of choice that is expected to have neural net parameters initialized. criterion: Criterion = mean_squared_error, // The criterion of choice. batch_size := 10, // How many samples per batch. @@ -85,12 +74,12 @@ train :: ( epochs := 5, // The number of epochs ) { - sample : Sample_Type; + sample : dataloader.Sample_Type; sample->init(); defer sample->deinit(); - training_example_count := dataloader_get_count(dataloader); - printf("Training sample count: %i\n", training_example_count); + training_example_count := dataloader->get_count(); + printf("Training sample count: {}\n", training_example_count); if batches_per_epoch == -1 { batches_per_epoch = training_example_count / batch_size; @@ -100,14 +89,14 @@ train :: ( past_100_correct := 0; for epoch: epochs { - printf("Staring epoch %i ===================================\n", epoch + 1); + printf("Staring epoch {} ===================================\n", epoch + 1); for batch_num: batches_per_epoch { optimizer_zero_gradient(optimizer); for batch: batch_size { sample_num := random.between(0, training_example_count); - dataloader_get_item(dataloader, sample_num, ^sample); + dataloader->get_item(sample_num, ^sample); nn->forward(sample.input); nn->backward(sample.output, criterion); @@ -121,7 +110,7 @@ train :: ( if batch_num % (100 / batch_size) == 0 { loss := nn->get_loss(sample.output, criterion); - printf("Loss: %f Correct: %i / 100\n", cast(f32) loss, past_100_correct); + printf("Loss: {} Correct: {} / 100\n", cast(f32) loss, past_100_correct); past_100_correct = 0; } @@ -139,7 +128,6 @@ main :: (args: [] cstr) { defer mnist_data->close(); optimizer := sgd_optimizer_create(^nn, learning_rate = 0.01f); - nn->supply_parameters(^optimizer); - train(^nn, ^mnist_data.base, ^optimizer); + train(^nn, ^mnist_data, ^optimizer); } diff --git a/src/neuralnet.onyx b/src/neuralnet.onyx index 4289b03..7330680 100644 --- a/src/neuralnet.onyx +++ b/src/neuralnet.onyx @@ -1,4 +1,4 @@ -use package core +use package core {package, *} // @@ -211,8 +211,7 @@ Layer :: struct { Onyx_NN_Magic_Number := 0x4E4E584F neural_net_save :: (use nn: ^NeuralNet, filename: str) { - err, output_file := io.open(filename, io.OpenMode.Write); - assert(err == io.Error.None, "Failed to open neural net save file for writing."); + output_file := os.open(filename, .Write)->expect("Failed to open neural net save file for writing."); defer io.stream_close(^output_file); writer := io.binary_writer_make(^output_file); @@ -251,8 +250,7 @@ neural_net_save :: (use nn: ^NeuralNet, filename: str) { } neural_net_load :: (filename: str) -> NeuralNet { - err, input_file := io.open(filename, io.OpenMode.Read); - assert(err == io.Error.None, "Failed to open neural net save file for reading."); + input_file := os.open(filename, .Read)->expect("Failed to open neural net save file for reading."); defer io.stream_close(^input_file); reader := io.binary_reader_make(^input_file); @@ -322,12 +320,10 @@ ActivationFunctionID :: enum (u8) { } activation_function_from_id :: (id: ActivationFunctionID) -> ActivationFunction { - use ActivationFunctionID; - switch id { - case Sigmoid do return sigmoid_activation; - case Hyperbolic_Tangent do return tanh_activation; - case ReLU do return relu_activation; + case .Sigmoid do return sigmoid_activation; + case .Hyperbolic_Tangent do return tanh_activation; + case .ReLU do return relu_activation; case #default do return ActivationFunction.{ ActivationFunctionID.Invalid, @@ -466,27 +462,22 @@ mean_absolute_error := Criterion.{ // Specifically, an input and output at a particular index. // -DataLoader :: struct (Sample_Type: type_expr) { - vtable : ^DataLoader_Functions(Sample_Type); -} +DataLoader :: interface (t: $T) { + { T.Sample_Type } -> type_expr; -DataLoader_Functions :: struct (Sample_Type: type_expr) { - get_count : (^DataLoader(Sample_Type)) -> u32; - get_item : (^DataLoader(Sample_Type), index: u32, sample: ^Sample_Type) -> bool; -} + { t->get_count() } -> u32; + { t->get_item(u32.{}, (^T.Sample_Type).{}) } -> bool; -dataloader_get_count :: (use data: ^DataLoader($Sample_Type)) -> u32 { - if vtable == null do return 0; - if vtable.get_count == null_proc do return 0; - - return vtable.get_count(data); + // This should be easier to write... + do { + s :: macro (_: $S/Sample) --- + s(T.Sample_Type.{}); + }; } -dataloader_get_item :: (use data: ^DataLoader($Sample_Type), index: u32, sample: ^Sample_Type) -> bool { - if vtable == null do return false; - if vtable.get_item == null_proc do return false; - - return vtable.get_item(data, index, sample); +#local Sample :: interface (t: $T) { + t->init(); + t->deinit(); } @@ -496,7 +487,6 @@ dataloader_get_item :: (use data: ^DataLoader($Sample_Type), index: u32, sample: Optimizer :: struct { vtable : ^Optimizer_Functions; - network : ^NeuralNet; // TODO(Brendan Hansen): Make these fixed size slices? // This would require know the exact parameter count for the network. @@ -514,10 +504,10 @@ Optimizer_Functions :: struct { } optimizer_init :: (use optim: ^Optimizer, nn: ^NeuralNet, allocator := context.allocator) { - network = nn; + variables = array.make(^Variable, allocator=allocator); + variable_arrays = array.make(^[] Variable, allocator=allocator); - variables = array.make(#type ^Variable, allocator=allocator); - variable_arrays = array.make(#type ^[] Variable, allocator=allocator); + if nn do nn->supply_parameters(optim); } optimizer_step :: (use optim: ^Optimizer, scale: f32 = 1) { -- 2.25.1