From c9411048a21d17602287314bc57966d846d4b34c Mon Sep 17 00:00:00 2001 From: Brendan Hansen Date: Wed, 20 Jan 2021 20:27:23 -0600 Subject: [PATCH] Initial commit --- .gitignore | 4 ++++ src/mnist.onyx | 15 ++++++++++++++ src/neuralnet.onyx | 50 ++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 69 insertions(+) create mode 100644 .gitignore create mode 100644 src/mnist.onyx create mode 100644 src/neuralnet.onyx diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..4046358 --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +*.wasm +data/ +*.sublime-workspace +*.sublime-project diff --git a/src/mnist.onyx b/src/mnist.onyx new file mode 100644 index 0000000..c22ab2b --- /dev/null +++ b/src/mnist.onyx @@ -0,0 +1,15 @@ +#load "core/std/wasi" + +#load_path "src" +#load "neuralnet" + +use package core + + + +// Load the data +// Feed forward neural net + +main :: (args: [] cstr) { + println(sigmoid(-4.0f)); +} \ No newline at end of file diff --git a/src/neuralnet.onyx b/src/neuralnet.onyx new file mode 100644 index 0000000..f354805 --- /dev/null +++ b/src/neuralnet.onyx @@ -0,0 +1,50 @@ +use package core + +// To easily change to 64-bit floats if needed. +float :: #type f32; + + +NeuralNet :: struct { + layers : [] Layer; + + // CLEANUP: Move these to core.alloc, so the nesting isn't nearly as terrible. + layer_arena : alloc.arena.ArenaState; +} + +make_neural_network :: (layer_sizes: ..i32) -> NeuralNet { + net : NeuralNet; + + net.layer_arena = alloc.arena.make(alloc.heap_allocator, 64 * 1024 * 1024); // 64 MiB + layer_allocator := alloc.arena.make_allocator(^net.layer_arena); + + net.layers = memory.make_slice(Layer, layer_sizes.count); +} + + + + + +Layer :: struct { + neurons : [] float; + weights : [] float; +} + +layer_forward :: (use layer: ^Layer, prev_layer: ^Layer) { + for i: 0 .. neurons.count { + neurons[i] = 0; + for j: 0 .. weights.count { + neurons[i] += prev_layer.neurons[j] * weights[j]; + } + + neurons[i] = sigmoid(neurons[i]); + } +} + + + + +sigmoid :: (x: float) -> float { + ex := math.exp(x); + return ex / (1 + ex); +} + -- 2.25.1