From: Brendan Hansen Date: Fri, 22 Jan 2021 18:10:11 +0000 (-0600) Subject: added colored printing for fanciness X-Git-Url: https://git.brendanfh.com/?a=commitdiff_plain;h=de92469ef4be50c58b4d36b1b2ee1bceed21833c;p=onyx-mnist.git added colored printing for fanciness --- diff --git a/src/mnist.onyx b/src/mnist.onyx index 54fb416..1099db1 100644 --- a/src/mnist.onyx +++ b/src/mnist.onyx @@ -56,17 +56,31 @@ stocastic_gradient_descent :: (nn: ^NeuralNet, mnist_data: ^MNIST_Data, training for i: input.count do input[i] = (cast(float) cast(u32) example[i]) / 255; neural_net_forward(nn, ~~ input); + neural_net_backward(nn, ~~ expected); + if ex % 100 == 0 { - print_array(expected); + print_colored_array :: (arr: [] $T) { + greatest_idx := 0; + for i: arr.count do if arr[i] > arr[greatest_idx] do greatest_idx = i; + + for i: arr.count { + if i == greatest_idx { + printf("\x1b[94m%f\x1b[0m ", arr[i]); + } else { + printf("%f ", arr[i]); + } + } + print("\n"); + } + + print_colored_array(cast([] f32) expected); output := neural_net_get_output(nn); - print_array(output); + print_colored_array(output); loss := neural_net_loss(nn, ~~ expected); printf("MSE loss: %f\n", cast(f32) loss); } - - neural_net_backward(nn, ~~ expected); } } diff --git a/src/neuralnet.onyx b/src/neuralnet.onyx index 4b53614..48c1b3a 100644 --- a/src/neuralnet.onyx +++ b/src/neuralnet.onyx @@ -138,7 +138,7 @@ init_layer :: (use layer: ^Layer, layer_size: u32, prev_layer_size: u32, allocat deltas = memory.make_slice(float, layer_size, allocator); - activation = tanh_activation; + activation = sigmoid_activation; if prev_layer_size > 0 { weights = memory.make_slice(#type [] float, layer_size, allocator); @@ -154,7 +154,7 @@ init_layer :: (use layer: ^Layer, layer_size: u32, prev_layer_size: u32, allocat randomize_weights_and_biases :: (use layer: ^Layer) { for ^weight: weights { for ^w: *weight { - *w = cast(float) random.float(-1.0f, 1.0f); + *w = cast(float) random.float(-0.5f, 0.5f); } } @@ -184,8 +184,8 @@ layer_forward :: (use layer: ^Layer, prev_layer: ^Layer) { ActivationFunction :: struct { - forward : proc (x : float) -> float; - backward : proc (fx: float, x: float) -> float; + forward : (x : float) -> float; + backward : (fx: float, x: float) -> float; }