Skip to content

Commit

Permalink
Add nn_layer_init_weights_gaussian and nn_layer_init_biases_zeros (#19)
Browse files Browse the repository at this point in the history
  • Loading branch information
devfacet authored Apr 13, 2024
1 parent ba1e061 commit f785eb4
Show file tree
Hide file tree
Showing 10 changed files with 132 additions and 2 deletions.
4 changes: 2 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ run-test:
@ARCH=$(ARCH) TECH=$(TECH) ARTIFACT=tests/$(TEST) ARGS="$(ARGS)" scripts/shell/run_artifact.sh
@echo " "

## build-example Build an example (e.g., make build-example ARCH=generic EXAMPLE=arch/generic/neuron)
## build-example Build an example (e.g., make build-example ARCH=generic EXAMPLE=arch/generic/layer)
build-example:
@echo building $(EXAMPLE)
@ARCH=$(ARCH) TECH=$(TECH) ARTIFACT=examples/$(EXAMPLE) scripts/shell/build_artifact.sh
Expand All @@ -68,7 +68,7 @@ build-examples:
$(MAKE) build-example ARCH=$(ARCH) TECH=$(TECH) EXAMPLE=$$example || exit 1; \
done

## run-example Run an examples (e.g., make run-example ARCH=generic EXAMPLE=arch/generic/neuron)
## run-example Run an examples (e.g., make run-example ARCH=generic EXAMPLE=arch/generic/layer)
run-example:
@echo running $(EXAMPLE) $(ARGS)
@ARCH=$(ARCH) ARTIFACT=examples/$(EXAMPLE) ARGS="$(ARGS)" scripts/shell/run_artifact.sh
Expand Down
66 changes: 66 additions & 0 deletions examples/arch/generic/layer/main.c
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
#include "nn_activation.h"
#include "nn_dot_product.h"
#include "nn_layer.h"
#include <stdio.h>
#include <stdlib.h>
#include <time.h>

int main() {
srand((unsigned int)time(NULL));

// Init vars
NNLayer layer;
NNError error;
const int input_size = 4;
const int output_size = 3;
const int batch_size = 2;

// Initialize a layer with the given input and output sizes, ReLU activation function, and dot product function
if (!nn_layer_init(&layer, input_size, output_size, nn_activation_func_relu, nn_dot_product, &error)) {
fprintf(stderr, "error: %s\n", error.message);
return 1;
}

// Initialize the weights of the layer with Gaussian random values scaled by 0.01
if (!nn_layer_init_weights_gaussian(&layer, 0.01f, &error)) {
fprintf(stderr, "error: %s\n", error.message);
return 1;
}

// Initialize the biases of the layer to zero
if (!nn_layer_init_biases_zeros(&layer, &error)) {
fprintf(stderr, "error: %s\n", error.message);
return 1;
}

// Generate random inputs
float inputs[NN_LAYER_MAX_BATCH_SIZE][NN_LAYER_MAX_INPUT_SIZE];
for (size_t i = 0; i < batch_size; ++i) {
for (size_t j = 0; j < input_size; ++j) {
inputs[i][j] = (float)rand() / (float)RAND_MAX;
}
}

// Compute the layer with the given inputs
float outputs[NN_LAYER_MAX_BATCH_SIZE][NN_LAYER_MAX_OUTPUT_SIZE];
if (!nn_layer_forward(&layer, inputs, outputs, 2, &error)) {
fprintf(stderr, "error: %s\n", error.message);
return 1;
}

// Print the inputs
for (size_t i = 0; i < batch_size; ++i) {
for (size_t j = 0; j < input_size; ++j) {
printf("inputs[%zu][%zu] = %f\n", i, j, inputs[i][j]);
}
}

// Print the outputs
for (size_t i = 0; i < batch_size; ++i) {
for (size_t j = 0; j < output_size; ++j) {
printf("outputs[%zu][%zu] = %f\n", i, j, outputs[i][j]);
}
}

return 0;
}
6 changes: 6 additions & 0 deletions include/nn_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,12 @@ typedef struct {
// nn_layer_init initializes a layer with the given arguments.
bool nn_layer_init(NNLayer *layer, size_t input_size, size_t output_size, NNActivationFunction act_func, NNDotProductFunction dot_product_func, NNError *error);

// nn_layer_init_weights_gaussian initializes the weights of the layer with a Gaussian distribution.
bool nn_layer_init_weights_gaussian(NNLayer *layer, float scale, NNError *error);

// nn_layer_init_biases_zeros initializes the biases of the layer to zero.
bool nn_layer_init_biases_zeros(NNLayer *layer, NNError *error);

// nn_layer_set_weights sets the weights of the given layer.
bool nn_layer_set_weights(NNLayer *layer, const float weights[NN_LAYER_MAX_OUTPUT_SIZE][NN_LAYER_MAX_INPUT_SIZE], NNError *error);

Expand Down
48 changes: 48 additions & 0 deletions src/nn_layer.c
Original file line number Diff line number Diff line change
@@ -1,7 +1,14 @@
#include "nn_layer.h"
#include <math.h>
#include <stdbool.h>
#include <stddef.h>
#include <stdio.h>
#include <stdlib.h>

// M_PI is not defined in some compilers.
#ifndef M_PI
#define M_PI 3.14159265358979323846
#endif

// nn_layer_init initializes a layer with the given arguments.
bool nn_layer_init(NNLayer *layer, size_t input_size, size_t output_size, NNActivationFunction act_func, NNDotProductFunction dot_product_func, NNError *error) {
Expand All @@ -26,6 +33,44 @@ bool nn_layer_init(NNLayer *layer, size_t input_size, size_t output_size, NNActi
if (dot_product_func) {
layer->dot_product_func = dot_product_func;
}

return true;
}

// nn_layer_init_weights_gaussian initializes the weights of the layer with a Gaussian distribution.
bool nn_layer_init_weights_gaussian(NNLayer *layer, float scale, NNError *error) {
nn_error_set(error, NN_ERROR_NONE, NULL);
if (layer == NULL) {
nn_error_set(error, NN_ERROR_INVALID_INSTANCE, "layer is NULL");
return false;
}

// Initialize weights with Gaussian random values scaled by 'scale'
for (size_t i = 0; i < layer->output_size; ++i) {
for (size_t j = 0; j < layer->input_size; ++j) {
float u1 = (float)rand() / (float)RAND_MAX;
float u2 = (float)rand() / (float)RAND_MAX;
float rand_std_normal = sqrt(-2.0 * log(u1)) * cos(2.0 * M_PI * u2); // Box-Muller transform
layer->weights[i][j] = scale * rand_std_normal;
}
}

return true;
}

// nn_layer_init_biases_zeros initializes the biases of the layer to zero.
bool nn_layer_init_biases_zeros(NNLayer *layer, NNError *error) {
nn_error_set(error, NN_ERROR_NONE, NULL);
if (layer == NULL) {
nn_error_set(error, NN_ERROR_INVALID_INSTANCE, "layer is NULL");
return false;
}

// Initialize biases to zero
for (size_t i = 0; i < layer->output_size; ++i) {
layer->biases[i] = 0.0;
}

return true;
}

Expand All @@ -41,6 +86,7 @@ bool nn_layer_set_weights(NNLayer *layer, const float weights[NN_LAYER_MAX_OUTPU
layer->weights[i][j] = weights[i][j];
}
}

return true;
}

Expand All @@ -54,6 +100,7 @@ bool nn_layer_set_biases(NNLayer *layer, const float biases[NN_LAYER_MAX_BIASES]
for (size_t i = 0; i < layer->output_size; ++i) {
layer->biases[i] = biases[i];
}

return true;
}

Expand Down Expand Up @@ -81,5 +128,6 @@ bool nn_layer_forward(const NNLayer *layer, const float inputs[NN_LAYER_MAX_BATC
}
}
}

return true;
}
1 change: 1 addition & 0 deletions tests/arch/arm/cmsis-dsp/dot_product_perf/main.c
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

int main(int argc, char *argv[]) {
nn_init_app(argc, argv);
srand((unsigned int)time(NULL));

if (!nn_cmsis_dsp_available()) {
printf("ARM CMSIS-DSP not available\n");
Expand Down
2 changes: 2 additions & 0 deletions tests/arch/arm/cmsis-dsp/neuron_perf/main.c
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,11 @@
#include "nn_test.h"
#include <stdio.h>
#include <stdlib.h>
#include <time.h>

int main(int argc, char *argv[]) {
nn_init_app(argc, argv);
srand((unsigned int)time(NULL));

if (!nn_cmsis_dsp_available()) {
printf("ARM CMSIS-DSP not available\n");
Expand Down
1 change: 1 addition & 0 deletions tests/arch/arm/neon/dot_product_perf/main.c
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

int main(int argc, char *argv[]) {
nn_init_app(argc, argv);
srand((unsigned int)time(NULL));

if (!nn_neon_available()) {
printf("ARM NEON not available\n");
Expand Down
2 changes: 2 additions & 0 deletions tests/arch/arm/neon/neuron_perf/main.c
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,11 @@
#include "nn_test.h"
#include <stdio.h>
#include <stdlib.h>
#include <time.h>

int main(int argc, char *argv[]) {
nn_init_app(argc, argv);
srand((unsigned int)time(NULL));

if (!nn_neon_available()) {
printf("ARM NEON not available\n");
Expand Down
2 changes: 2 additions & 0 deletions tests/arch/generic/dot_product_perf/main.c
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,11 @@
#include "nn_test.h"
#include <stdio.h>
#include <stdlib.h>
#include <time.h>

int main(int argc, char *argv[]) {
nn_init_app(argc, argv);
srand((unsigned int)time(NULL));

// Init vars
const int n_runs = 1000;
Expand Down
2 changes: 2 additions & 0 deletions tests/arch/generic/neuron_perf/main.c
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,11 @@
#include "nn_test.h"
#include <stdio.h>
#include <stdlib.h>
#include <time.h>

int main(int argc, char *argv[]) {
nn_init_app(argc, argv);
srand((unsigned int)time(NULL));

// Init vars
NNNeuron neuron;
Expand Down

0 comments on commit f785eb4

Please sign in to comment.