#load "src/neuralnet"
-use package core
+use core {package, *}
MNIST_DataLoader :: struct {
- use base : DataLoader(MNIST_Sample);
-
- images, labels : io.FileStream;
+ Sample_Type :: MNIST_Sample;
+
+ images, labels : os.File;
make :: (image_path := "data/train-images-idx3-ubyte", label_path := "data/train-labels-idx1-ubyte") -> MNIST_DataLoader {
mnist_data: MNIST_DataLoader;
- mnist_data.vtable = ^mnist_dataloader_functions;
-
- err : io.Error;
- err, mnist_data.images = io.open(image_path);
- assert(err == io.Error.None, "There was an error loading the image file");
- err, mnist_data.labels = io.open(label_path);
- assert(err == io.Error.None, "There was an error loading the label file");
-
+ mnist_data.images = os.open(image_path)->expect("There was an error loading the image file");
+ mnist_data.labels = os.open(label_path)->expect("There was an error loading the label file");
return mnist_data;
}
}
}
-mnist_dataloader_functions := <DataLoader_Functions(MNIST_Sample)>.{
- get_count = MNIST_DataLoader.get_count,
- get_item = MNIST_DataLoader.get_item,
-}
-
MNIST_Sample :: struct {
input, output : [] f32;
train :: (
nn: ^NeuralNet, // The neural network.
- dataloader: ^DataLoader($Sample_Type), // Data loader that provides samples of type Sample_Type.
+ dataloader: ^$D/DataLoader, // Data loader that provides samples of type Sample_Type.
optimizer: ^Optimizer, // The optimizer of choice that is expected to have neural net parameters initialized.
criterion: Criterion = mean_squared_error, // The criterion of choice.
batch_size := 10, // How many samples per batch.
epochs := 5, // The number of epochs
) {
- sample : Sample_Type;
+ sample : dataloader.Sample_Type;
sample->init();
defer sample->deinit();
- training_example_count := dataloader_get_count(dataloader);
- printf("Training sample count: %i\n", training_example_count);
+ training_example_count := dataloader->get_count();
+ printf("Training sample count: {}\n", training_example_count);
if batches_per_epoch == -1 {
batches_per_epoch = training_example_count / batch_size;
past_100_correct := 0;
for epoch: epochs {
- printf("Staring epoch %i ===================================\n", epoch + 1);
+ printf("Staring epoch {} ===================================\n", epoch + 1);
for batch_num: batches_per_epoch {
optimizer_zero_gradient(optimizer);
for batch: batch_size {
sample_num := random.between(0, training_example_count);
- dataloader_get_item(dataloader, sample_num, ^sample);
+ dataloader->get_item(sample_num, ^sample);
nn->forward(sample.input);
nn->backward(sample.output, criterion);
if batch_num % (100 / batch_size) == 0 {
loss := nn->get_loss(sample.output, criterion);
- printf("Loss: %f Correct: %i / 100\n", cast(f32) loss, past_100_correct);
+ printf("Loss: {} Correct: {} / 100\n", cast(f32) loss, past_100_correct);
past_100_correct = 0;
}
defer mnist_data->close();
optimizer := sgd_optimizer_create(^nn, learning_rate = 0.01f);
- nn->supply_parameters(^optimizer);
- train(^nn, ^mnist_data.base, ^optimizer);
+ train(^nn, ^mnist_data, ^optimizer);
}
-use package core
+use package core {package, *}
//
Onyx_NN_Magic_Number := 0x4E4E584F
neural_net_save :: (use nn: ^NeuralNet, filename: str) {
- err, output_file := io.open(filename, io.OpenMode.Write);
- assert(err == io.Error.None, "Failed to open neural net save file for writing.");
+ output_file := os.open(filename, .Write)->expect("Failed to open neural net save file for writing.");
defer io.stream_close(^output_file);
writer := io.binary_writer_make(^output_file);
}
neural_net_load :: (filename: str) -> NeuralNet {
- err, input_file := io.open(filename, io.OpenMode.Read);
- assert(err == io.Error.None, "Failed to open neural net save file for reading.");
+ input_file := os.open(filename, .Read)->expect("Failed to open neural net save file for reading.");
defer io.stream_close(^input_file);
reader := io.binary_reader_make(^input_file);
}
activation_function_from_id :: (id: ActivationFunctionID) -> ActivationFunction {
- use ActivationFunctionID;
-
switch id {
- case Sigmoid do return sigmoid_activation;
- case Hyperbolic_Tangent do return tanh_activation;
- case ReLU do return relu_activation;
+ case .Sigmoid do return sigmoid_activation;
+ case .Hyperbolic_Tangent do return tanh_activation;
+ case .ReLU do return relu_activation;
case #default do return ActivationFunction.{
ActivationFunctionID.Invalid,
// Specifically, an input and output at a particular index.
//
-DataLoader :: struct (Sample_Type: type_expr) {
- vtable : ^DataLoader_Functions(Sample_Type);
-}
+DataLoader :: interface (t: $T) {
+ { T.Sample_Type } -> type_expr;
-DataLoader_Functions :: struct (Sample_Type: type_expr) {
- get_count : (^DataLoader(Sample_Type)) -> u32;
- get_item : (^DataLoader(Sample_Type), index: u32, sample: ^Sample_Type) -> bool;
-}
+ { t->get_count() } -> u32;
+ { t->get_item(u32.{}, (^T.Sample_Type).{}) } -> bool;
-dataloader_get_count :: (use data: ^DataLoader($Sample_Type)) -> u32 {
- if vtable == null do return 0;
- if vtable.get_count == null_proc do return 0;
-
- return vtable.get_count(data);
+ // This should be easier to write...
+ do {
+ s :: macro (_: $S/Sample) ---
+ s(T.Sample_Type.{});
+ };
}
-dataloader_get_item :: (use data: ^DataLoader($Sample_Type), index: u32, sample: ^Sample_Type) -> bool {
- if vtable == null do return false;
- if vtable.get_item == null_proc do return false;
-
- return vtable.get_item(data, index, sample);
+#local Sample :: interface (t: $T) {
+ t->init();
+ t->deinit();
}
Optimizer :: struct {
vtable : ^Optimizer_Functions;
- network : ^NeuralNet;
// TODO(Brendan Hansen): Make these fixed size slices?
// This would require know the exact parameter count for the network.
}
optimizer_init :: (use optim: ^Optimizer, nn: ^NeuralNet, allocator := context.allocator) {
- network = nn;
+ variables = array.make(^Variable, allocator=allocator);
+ variable_arrays = array.make(^[] Variable, allocator=allocator);
- variables = array.make(#type ^Variable, allocator=allocator);
- variable_arrays = array.make(#type ^[] Variable, allocator=allocator);
+ if nn do nn->supply_parameters(optim);
}
optimizer_step :: (use optim: ^Optimizer, scale: f32 = 1) {