CX/src/neural.c

72 lines
2 KiB
C
Raw Normal View History

2024-07-06 15:59:22 +00:00
#include <cx.h>
#include <neural.h>
Neural_Network *
neural_new(size_t layer_size, size_t layers) {
Neural_Network *self = malloc(sizeof(Neural_Network));
Neuron *n = NULL;
self->layer_size = layer_size;
self->layers = layers;
self->n = calloc(layer_size*layers, sizeof(Neuron));
for (int j = 0; j < layers; j++) {
n = &(self->n[j*layer_size]);
for (int i = 0; i < layers; i++) {
n->value = 0;
n->threshold = 0;
if (j) {
n->in_values = calloc(layer_size, sizeof(float *));
n->weights = calloc(layer_size, sizeof(float));
n->in_values_size = layer_size;
for (int k = 0; k < layer_size; k++) {
n->in_values[k] = &(self->n[(j-1)*layer_size + k].value);
n->weights[k] = 0.5;
}
}
else {
n->in_values = NULL;
n->weights = NULL;
}
}
}
return self;
}
void
neural_randomize(Neural_Network *self) {
// Does not randomize, just sets 0.5, but it doesn't matter for now.
for (int i = 0; i < self->layers; i++) {
Neuron *n = &(self->n[i*self->layer_size]);
for (int j = 0; j < self->layer_size; j++) {
n[j].threshold = 0.5;
}
}
}
float *
neural_process(Neural_Network *self, float *input) {
float *retval = NULL;
for (int i = 0; i < self->layer_size; i++) {
self->n[i].value = input[i];
}
for (int i = 1; i < self->layers; i++) {
float dot_prod = 0;
for (int j = 0; j < self->layer_size; j++) {
// MATH GOES BRRRRRRRR
dot_prod += *(self->n[i*self->layer_size + j].in_values)[j] *
self->n[i*self->layer_size + j].weights[j];
}
}
retval = malloc(self->layer_size * sizeof(float));
for (int i = 0; i < self->layer_size; i++) {
retval[i] = self->n[self->layer_size*(self->layers-1)].value;
}
return retval;
}