HolyFuckItsAlive #13

Merged
jastornig merged 105 commits from Delta-Error-Test into main 2023-09-23 22:27:54 +02:00
4 changed files with 42 additions and 7 deletions
Showing only changes of commit ed563e1e9e - Show all commits

View file

@ -249,6 +249,16 @@ Matrix* transpose(Matrix* matrix) {
}
double matrix_sum(Matrix* matrix) {
double sum = 0;
for (int i = 0; i < matrix->rows; i++) {
for (int j = 0; j < matrix->columns; j++) {
sum += matrix->numbers[i][j];
}
}
return sum;
}
void matrix_save(Matrix* matrix, char* file_string){
// open the file in append mode

View file

@ -37,6 +37,7 @@ Matrix* apply(double (*function)(double), Matrix* matrix);
Matrix* scale(Matrix* matrix, double value);
Matrix* addScalar(Matrix* matrix, double value);
Matrix* transpose(Matrix* matrix);
double matrix_sum(Matrix* matrix);

View file

@ -4,6 +4,11 @@
#include <time.h>
#include <math.h>
double relu(double input);
Matrix* softmax(Matrix* matrix);
double square(double input);
double loss_function(Matrix* output_matrix, int image_label);
Neural_Network* new_network(int input_size, int hidden_size, int output_size, double learning_rate){
Neural_Network *network = malloc(sizeof(Neural_Network));
// initialize networks variables
@ -19,9 +24,7 @@ Neural_Network* new_network(int input_size, int hidden_size, int output_size, do
network->bias_1 = matrix_create(hidden_size, 1);
network->bias_2 = matrix_create(hidden_size, 1);
network->bias_3 = matrix_create(hidden_size, 1);
network.bias_output = matrix_create(output_size, 1);
network->bias_output = matrix_create(output_size, 1);
return network;
}
@ -51,7 +54,6 @@ void free_network(Neural_Network* network){
free(network);
}
void save_network(Neural_Network* network) {
char* file_name = "../networks/newest_network.txt";
@ -168,6 +170,12 @@ Matrix* predict(Neural_Network* network, Matrix* image_data) {
return result;
}
double cost_function(Matrix* calculated, int expected){
calculated->numbers[expected] -= 1;
apply(square, calculated);
}
//void train_network(Neural_Network* network, Matrix* input, Matrix* output);
//void batch_train_network(Neural_Network* network, Image** images, int size);
@ -178,6 +186,10 @@ double relu(double input) {
return input;
}
double relu_derivative(double x) {
return (x > 0) ? 1 : 0;
}
Matrix* softmax(Matrix* matrix) {
double total = 0;
@ -193,4 +205,19 @@ Matrix* softmax(Matrix* matrix) {
}
}
return result_matrix;
}
double square(double input) {
return input * input;
}
double loss_function(Matrix* output_matrix, int image_label) {
Matrix* temp = matrix_copy(output_matrix);
temp->numbers[1, image_label] -= 1;
apply(square, temp);
matrix_free(temp);
return matrix_sum(temp);;
}

View file

@ -41,6 +41,3 @@ Matrix* predict(Neural_Network* network, Matrix* image_data);
void train_network(Neural_Network* network, Matrix* input, Matrix* output);
void batch_train_network(Neural_Network* network, Image** images, int size);
double relu(double input);
Matrix* softmax(Matrix* matrix);