diff --git a/src/neural.c b/src/neural.c index 22af43c..496453b 100644 --- a/src/neural.c +++ b/src/neural.c @@ -65,6 +65,44 @@ neural_randomize(Neural_Network *self) { } } +float * +neural_loadData(Neural_Network *self, const char *filename) { + Neural_Layer *nl; + FILE *f; + char *file_data; + float *retval; + int read_cursor = 0; + + file_data = malloc(9*8 * sizeof(char)); + retval = malloc(8*8 * sizeof(float)); + + // Watch out, newlines! + + f = fopen(filename, "r"); + + nl = self->layers[0]; + + fread(file_data, sizeof(char), 9*8, f); // 9*8 - 8*8 value matrix + newlines + for (int i = 0; i < 8*8; i++) { + if (file_data[read_cursor] == '\n') { + read_cursor++; + } + switch (file_data[read_cursor]) { + case '0': + retval[i] = 0.0; + break; + case '1': + retval[i] = 1.0; + break; + default: + fprintf(stderr, "It would really be nice to start testing now.\n"); + return NULL; + break; + } + } + return retval; +} + float * neural_process(Neural_Network *self, float *input) { float *retval = NULL; @@ -99,6 +137,14 @@ neural_process(Neural_Network *self, float *input) { return retval; } +int +neural_train(Neural_Network *self, + const char *testdata, + const float *testresult) { + // Insert algorithm you lazy fuck. + return 0; +} + int neural_getMesh(ModelRegistry *mr, Neural_Network *nn) { Model *model;