with memleaks

This commit is contained in:
Jakob Stornig 2023-09-24 00:31:29 +02:00
parent 34a23c6eab
commit e0d9353d4b
3 changed files with 20 additions and 12 deletions

View file

@ -6,10 +6,6 @@ typedef struct {
char label;
} Image;
typedef struct {
const Image* image;
const size_t size;
} Image_Container;
static const int MAGIC_NUMBER_LABEL = 2049;

4
main.c
View file

@ -29,9 +29,7 @@ int main() {
//
// int pause;
int imported_count = 0;
Image** images = import_images("../data/train-images.idx3-ubyte", "../data/train-labels.idx1-ubyte", &imported_count, 60000);
matrix_save(images[0]->pixel_values, "image1.txt");
matrix_save(images[1]->pixel_values, "images2.txt");
Image** images = import_images("../data/train-images.idx3-ubyte", "../data/train-labels.idx1-ubyte", &imported_count, 10000);
Neural_Network * net = create_network(3, 28*28, 30, 10);
train_network_with_batches(net, images, imported_count, 1, 10, 3);
}

View file

@ -7,6 +7,13 @@
#include <math.h>
#include "image.h"
typedef struct{
Neural_Network * network;
Matrix ** weights_delta;
Matrix ** biases_delta
};
void evaluate(Neural_Network * network, Image** images, int imageCount){
int numCorrect = 0;
for(int i = 0; i <= imageCount; i++){
@ -23,7 +30,7 @@ void evaluate(Neural_Network * network, Image** images, int imageCount){
}
double sigmoid(double input) {
return 1.0 / (1 + exp(-1 * input));
return 1.0 / (1 + exp(-input));
}
double sigmoid_prime(double input){
@ -62,13 +69,15 @@ void back_prop(Neural_Network * network, Image* training_sample, Matrix ** weigh
//calculate delta for last layer;
//bias
Matrix * subtraction_result = subtract(layer_activations[network->layer_count-1], desired_result);
Matrix * delta = apply(sigmoid_prime, subtraction_result);
Matrix * s_prime = apply(sigmoid_prime, layer_activations_wo_sigmoid[network->layer_count-2]);
Matrix * delta = multiply(subtraction_result, s_prime);
matrix_free(s_prime);
matrix_free(subtraction_result);
biases_delta[network->layer_count-1] = delta;
biases_delta[network->layer_count-2] = delta;
//weights
Matrix * transposed = transpose(layer_activations[network->layer_count-2]);
weights_delta[network->layer_count-1] = dot(delta, transposed);
weights_delta[network->layer_count-2] = dot(delta, transposed);
matrix_free(transposed);
transposed = NULL;
@ -146,7 +155,12 @@ void update_batch(Neural_Network * network, Image** training_data, int batch_sta
matrix_free(network->biases[i]);
network->biases[i] = new_biases;
}
//TODO: update mini batch
free(sum_weights_delta);
free(sum_biases_delta);
for(int i = 0; i < network->layer_count - 1; i++){
matrix_free(weights_delta[i]);
matrix_free(biases_delta[i]);
}
}