Add a basic neural interface.

This commit is contained in:
Marcel Plch 2024-07-06 17:59:22 +02:00
parent 29251ba60c
commit 87a03582cd
Signed by: dormouse
GPG key ID: 2CA77596BC4BDFFE
4 changed files with 100 additions and 0 deletions

View file

@ -36,6 +36,7 @@ add_executable(
src/tensor.c src/tensor.c
src/model.c src/model.c
src/shader.c src/shader.c
src/neural.c
) )
target_link_libraries( target_link_libraries(

View file

@ -21,11 +21,15 @@
#include <model.h> #include <model.h>
#include <tensor.h> #include <tensor.h>
#include <shader.h> #include <shader.h>
#include <neural.h>
// Declare functions // Declare functions
int cx_glinit(GLFWwindow **); int cx_glinit(GLFWwindow **);
int cx_glrun(GLFWwindow *); int cx_glrun(GLFWwindow *);
int cx_nninit(Neural_Network **);
int cx_nnrun(Neural_Network *);
#endif #endif

24
include/neural.h Normal file
View file

@ -0,0 +1,24 @@
#ifndef NEURAL_H
#define NEURAL_H
typedef struct _neuron {
float value;
float threshold;
float **in_values;
float *weights;
ssize_t in_values_size;
} Neuron;
typedef struct _neural_network {
Neuron *n;
ssize_t layer_size; // Neurons Per Layer
ssize_t layers;
} Neural_Network;
Neural_Network *neural_new(size_t, size_t);
void neural_randomize(Neural_Network *);
float *neural_process(Neural_Network *, float *);
#endif

71
src/neural.c Normal file
View file

@ -0,0 +1,71 @@
#include <cx.h>
#include <neural.h>
Neural_Network *
neural_new(size_t layer_size, size_t layers) {
Neural_Network *self = malloc(sizeof(Neural_Network));
Neuron *n = NULL;
self->layer_size = layer_size;
self->layers = layers;
self->n = calloc(layer_size*layers, sizeof(Neuron));
for (int j = 0; j < layers; j++) {
n = &(self->n[j*layer_size]);
for (int i = 0; i < layers; i++) {
n->value = 0;
n->threshold = 0;
if (j) {
n->in_values = calloc(layer_size, sizeof(float *));
n->weights = calloc(layer_size, sizeof(float));
n->in_values_size = layer_size;
for (int k = 0; k < layer_size; k++) {
n->in_values[k] = &(self->n[(j-1)*layer_size + k].value);
n->weights[k] = 0.5;
}
}
else {
n->in_values = NULL;
n->weights = NULL;
}
}
}
return self;
}
void
neural_randomize(Neural_Network *self) {
// Does not randomize, just sets 0.5, but it doesn't matter for now.
for (int i = 0; i < self->layers; i++) {
Neuron *n = &(self->n[i*self->layer_size]);
for (int j = 0; j < self->layer_size; j++) {
n[j].threshold = 0.5;
}
}
}
float *
neural_process(Neural_Network *self, float *input) {
float *retval = NULL;
for (int i = 0; i < self->layer_size; i++) {
self->n[i].value = input[i];
}
for (int i = 1; i < self->layers; i++) {
float dot_prod = 0;
for (int j = 0; j < self->layer_size; j++) {
// MATH GOES BRRRRRRRR
dot_prod += *(self->n[i*self->layer_size + j].in_values)[j] *
self->n[i*self->layer_size + j].weights[j];
}
}
retval = malloc(self->layer_size * sizeof(float));
for (int i = 0; i < self->layer_size; i++) {
retval[i] = self->n[self->layer_size*(self->layers-1)].value;
}
return retval;
}