old

old stuff I made while in high school
git clone https://github.com/TanguyAndreani/old
Log | Files | Refs | README

neuron.c (5077B)


      1 #include <stdio.h>
      2 #include <stdlib.h>
      3 #include <time.h>
      4 #include <math.h>
      5 
      6 struct Node {
      7     double *weights;
      8     double net_input;
      9     double output;
     10 };
     11 
     12 struct Layer {
     13     int start; // 1 if this is the input layer.
     14     int n; // number of nodes in the layer
     15     struct Node *nodes;
     16     struct Layer *child;
     17     int b; // number of biases
     18     struct Node *biases;
     19 };
     20 
     21 double e = 2.71828;
     22 
     23 void init_layer(struct Layer *l, int n, int b) {
     24   int i;
     25   l->n = n;
     26   l->b = b;
     27   l->start = 0;
     28   l->child = NULL;
     29   l->nodes = malloc(l->n * sizeof(struct Node));
     30   l->biases = malloc(l->b * sizeof(struct Node));
     31   for (i = 0; i < l->n; i++) {
     32     (l->nodes[i]).net_input = 0;
     33     (l->nodes[i]).output = 0;
     34   }
     35 }
     36 
     37 void init_weights(struct Layer *l) {
     38   if (l->child != NULL) {
     39     init_weights(l->child);
     40   } else {
     41     return;
     42   }
     43   int i = 0, k = 0;
     44   for (i = 0; i < l->n; i++) {
     45     (l->nodes[i]).weights = malloc(l->child->n * sizeof(double));
     46     for (k = 0; k < l->child->n; k++) {
     47       (l->nodes[i]).weights[k] = (double)rand()/RAND_MAX*2.0-1.0;
     48       //printf("Initializing some weights (%f)...\n", (l->nodes[i]).weights[k]);
     49     }
     50   }
     51   for (i = 0; i < l->b; i++) {
     52     (l->biases[i]).weights = malloc(l->child->n * sizeof(double));
     53     for (k = 0; k < l->child->n; k++) {
     54       (l->biases[i]).weights[k] = (double)rand()/RAND_MAX*2.0-1.0;
     55     }
     56   }
     57 }
     58 
     59 double f(double x) {
     60   return 1/(1 + pow(e, -x));
     61 }
     62 
     63 double *final_output(struct Layer *l) {
     64   int j = 0, k = 0;
     65   double sum = 0;
     66   double *z;
     67   if (l->child == NULL) {
     68     // output layer
     69     //printf("Getting into the output layer!\n");
     70     z = malloc(l->n * sizeof(double));
     71     for (k = 0; k < l->n; k++) {
     72       z[k] = f((l->nodes[k]).net_input);
     73     }
     74     return z;
     75   } else {
     76     // input layer
     77     //printf("Getting into some layer...\n");
     78     for (j = 0; j < l->child->n; j++) {
     79       sum = 0;
     80       //printf("Computing the net_input for each nodes (%d) of the next layer...\n", j);
     81       for (k = 0; k < l->n; k++) {
     82         if (l->start != 1) {
     83           (l->nodes[k]).output = f((l->nodes[k]).net_input);
     84         }
     85         sum += (l->nodes[k]).weights[j] * (l->nodes[k]).output;
     86       }
     87       for (k = 0; k < l->b; k++) {
     88         sum += (l->biases[k]).weights[j];
     89       }
     90       //printf("Net input of the %d'th of the next layer is %f.\n", j, sum);
     91       (l->child->nodes[j]).net_input = sum;
     92     }
     93     final_output(l->child);
     94   }
     95 }
     96 
     97 // n is the number of output nodes
     98 double total_error(double *z, double *o, int n) {
     99   int k = 0;
    100   double te = 0;
    101   for (k = 0; k < n; k++) {
    102     te += pow(o[k] - z[k], 2);
    103   }
    104   te = te / n;
    105   return te;
    106 }
    107 
    108 void update_weights(struct Layer *root, struct Layer *l, double tres, double *o, int n) {
    109   if (l->child != NULL) {
    110     update_weights(root, l->child, tres, o, n);
    111   } else {
    112     return;
    113   }
    114   double te1, te2, *z;
    115   int i = 0, k = 0;
    116   for (i = 0; i < l->n; i++) {
    117     for (k = 0; k < l->child->n; k++) {
    118       (l->nodes[i]).weights[k] += tres;
    119       z = final_output(root);
    120       te1 = total_error(z, o, n);
    121       free(z);
    122       (l->nodes[i]).weights[k] -= 2*tres;
    123       z = final_output(root);
    124       te2 = total_error(z, o, n);
    125       free(z);
    126       (l->nodes[i]).weights[k] += tres;
    127       (l->nodes[i]).weights[k] -= tres * ((te1 - te2)/(2*tres));
    128       //printf("Updating some weights (%f)...\n", (l->nodes[i]).weights[k]);
    129     }
    130   }
    131   for (i = 0; i < l->b; i++) {
    132     for (k = 0; k < l->child->n; k++) {
    133       (l->biases[i]).weights[k] += tres;
    134       te1 = total_error(final_output(root), o, n);
    135       (l->biases[i]).weights[k] -= 2*tres;
    136       te2 = total_error(final_output(root), o, n);
    137       (l->biases[i]).weights[k] += tres;
    138       (l->biases[i]).weights[k] -= tres * ((te1 - te2)/(2*tres));
    139       //printf("Updating some weights (%f)...\n", (l->biases[i]).weights[k]);
    140     }
    141   }
    142 }
    143 
    144 void load_inputs(struct Layer *l, double *inputs) {
    145   int k = 0;
    146   for (k = 0; k < l->n; k++) {
    147     (l->nodes[k]).output = inputs[k];
    148   }
    149 }
    150 
    151 void clean_network(struct Layer *l) {
    152   int i = 0, k = 0;
    153   if (l->child != NULL) {
    154     clean_network(l->child);
    155     for (i = 0; i < l->n; i++) {
    156       free((l->nodes[i]).weights);
    157     }
    158   }
    159   free(l->nodes);
    160 }
    161 
    162 int main(void) {
    163   int i = 0, k = 0, j = 0;
    164   struct Layer input_layer, hidden_layer, output_layer;
    165   double inputs[][2] = {{0.0, 0.0}, {0.0, 1.0}, {1.0, 0.0}, {1.0, 1.0}};
    166   double outputs[][1] = {{0}, {1}, {1}, {0}};
    167   double *z;
    168   double te;
    169   srand(time(NULL));
    170   init_layer(&input_layer, 7, 2);
    171   init_layer(&hidden_layer, 7, 2);
    172   init_layer(&output_layer, 1, 0);
    173   input_layer.child = &hidden_layer;
    174   hidden_layer.child = &output_layer;
    175   input_layer.start = 1;
    176   init_weights(&input_layer);
    177   do {
    178     for (i = 0; i < 4; i++, j++) {
    179       load_inputs(&input_layer, inputs[i]);
    180       z = final_output(&input_layer);
    181       te = total_error(z, outputs[i], output_layer.n);
    182       printf("te = %f\r", te);
    183       free(z);
    184       update_weights(&input_layer, &input_layer, 0.1, outputs[i], output_layer.n);
    185     }
    186   } while (te >= 0.1);
    187   printf("\nCompleted after %d passes.\n", j);
    188   clean_network(&input_layer);
    189 }