/* mlayer.c * Multilayer Feed-Forward Neural Network Library * (c) Logan Kearsley */ #include #include #include #include "mlayer.h" void printout(float* o, int l){ int i; for(i=0;iD[0];k++) w->P[0][k]=I[k]; } void loadTarget(neuro_t* w, float* T){ int k; for(k=0;kD[w->depth];k++) w->E[k]=T[k]; } void copyOutput(neuro_t* w, float* O){ int k; for(k=0;kD[w->depth];k++) O[k] = w->P[w->depth][k]; } void momentumOn(neuro_t* w){ int d,i,o; int dep = w->depth; w->bits=1; w->C = (float***)malloc(sizeof(float**)*dep); for(d=0;dC[d] = (float**)malloc(sizeof(float*)*(w->D[d])); for(i=0;iD[d];i++){ w->C[d][i] = (float*)malloc(sizeof(float)*(w->D[d+1])); for(o=0;oD[d+1];o++) w->C[d][i][o]=0; } } } void momentumOff(neuro_t* w){ int d,i; int dep = w->depth; w->bits=0; for(d=0;dD[d];i++) free(w->C[d][i]); free(w->C[d]); } free(w->C); } void swapOn(neuro_t* w){ w->I = (float**)malloc(sizeof(float*)); w->I[0] = w->P[0]; w->icount = 0; } void swapOff(neuro_t* w){ int i; w->P[0] = w->I[0]; for(i=1;i<=w->icount;i++) free(w->I[i]); free(w->I); w->icount=0; } int addInput(neuro_t* w){ w->I = (float**)realloc(w->I,sizeof(float*)*(++w->icount+1)); w->I[w->icount] = (float*)malloc(sizeof(float)*w->D[0]); return w->icount+1; } /* mquery- query a multilayer net and put data in the passing structure * Arguments: * neuro_t* w- pointer to network data * */ void mquery(neuro_t* w){ int i,o,k; for(k=1;k<=w->depth;k++){ float* O=w->P[k]; for(o=0;oD[k];o++){ O[o]=0; for(i=0;iD[k-1];i++) O[o]+=w->L[k-1][i][o]*w->P[k-1][i]; O[o] = w->act(O[o]); } } } /* backprop- modify weights in the input or hidden layers * Requires that loadInput, loadTarget, and mquery be run previously * Arguments: * neuro_t* w- pointer to network data * */ float backprop(neuro_t* w){ int i,j,k; float error; float** deltas = (float**)malloc(sizeof(float*)*w->depth); // calculate error terms for output deltas[w->depth-1] = (float*)malloc(sizeof(float)*w->D[w->depth]); for(k=0;kD[w->depth];k++){ error = w->E[k]-w->P[w->depth][k]; deltas[w->depth-1][k] = w->der(w->P[w->depth][k]) * error; } // calculate error terms for hidden for(k=w->depth-1;k>0;k--){ deltas[k-1] = (float*)malloc(sizeof(float)*w->D[k]); for(i=0;iD[k];i++){ error = 0.0; for(j=0;jD[k+1];j++) error += deltas[k][j]*w->L[k][i][j]; deltas[k-1][i] = w->der(w->P[k][i]) * error; } } if(w->bits & 1){ //printf("DB: momentum On.\n"); for(k=0;kdepth;k++){ // update weights for(i=0;iD[k];i++){ for(j=0;jD[k+1];j++){ float change = deltas[k][j]*w->P[k][i]; w->L[k][i][j] += w->eta*change + w->mu*w->C[k][i][j]; w->C[k][i][j] = change; } } } } else { //printf("DB: momentum Off.\n"); for(k=0;kdepth;k++){ // update weights for(i=0;iD[k];i++){ for(j=0;jD[k+1];j++) w->L[k][i][j] += w->eta*deltas[k][j]*w->P[k][i]; } } } error=0.0; for(k=0;kD[w->depth];k++) error += (w->E[k]-w->P[w->depth][k])*(w->E[k]-w->P[w->depth][k]); return error; } void mprntnet(neuro_t* w){ int i; for(i=0;idepth;i++){ printf("\nLayer %d", i+1); printnet(w->L[i], w->P[i], w->P[i+1], w->D[i], w->D[i+1]); printf("\n~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~"); } } /****************************** * Saving and Generating Nets * ******************************/ static inline float** mkstore(int* D, int depth){ //create vector for storing data passed between layers int i,k; float** P = (float**)malloc(sizeof(float*)*(depth+1)); for(i=0;i<=depth;i++){ P[i]=(float*)malloc(sizeof(float)*D[i]); for(k=0;kbits=0; fscanf(f,"%d",&(w->depth)); w->L = (float***)malloc(sizeof(float***)*(w->depth)); w->D = (int*)malloc(sizeof(int)*(w->depth+1)); for(d=0;ddepth;d++){ float** N; fscanf(f,"%d %d",&in,&out); w->D[d] = in; N = (float**)malloc(sizeof(float*)*in); for(i=0;iL[d] = N; } w->D[w->depth]=out; w->P = mkstore(w->D, w->depth); w->E = (float*)malloc(sizeof(float)*w->D[w->depth]); w->act=sigma; w->der=sigder; } void bldnet(FILE* f, neuro_t* w){ int i,k,d; w->bits=0; fread(&d,sizeof(int),1,f); w->L = (float***)malloc(sizeof(float**)*d); w->D = (int*)malloc(sizeof(int)*(d+1)); fread(w->D,sizeof(int),d+1,f); w->depth = d; for(k=0;kD[k],out=w->D[k+1]; w->L[k] = (float**)malloc(sizeof(float*)*in); for(i=0;iL[k][i++],sizeof(float),out,f)) w->L[k][i] = (float*)malloc(sizeof(float)*out); } w->P = mkstore(w->D,d); w->E = (float*)malloc(sizeof(float)*w->D[d]); w->act=sigma; w->der=sigder; } int mrldnet(FILE* f, neuro_t* w){ int i,o,d; int in,out; w->bits=0; fscanf(f,"%d",&d); if(d!=w->depth){ rewind(f); return -1; } for(d=0;ddepth;d++){ fscanf(f,"%d %d",&in,&out); if(in!=w->D[d] || out!=w->D[d+1]){ rewind(f); return -1; } for(i=0;iL[d][i]+o); } w->act=sigma; w->der=sigder; return 0; } int brldnet(FILE* f, neuro_t* w){ int i,d; int in,out; w->bits=0; fread(&d,sizeof(int),1,f); if(d!=w->depth){ rewind(f); return -1; } for(d=0;d<=w->depth;d++){ fread(&i,sizeof(int),1,f); if(i!=w->D[d]){ rewind(f); return -1; } } for(d=0;ddepth;d++){ int in=w->D[d],out=w->D[d+1]; if(in!=w->D[d] || out!=w->D[d+1]){ rewind(f); return -1; } for(i=0;iL[d][i++],sizeof(float),out,f)); } w->act=sigma; w->der=sigder; return 0; } /* mrndnet- generate a multilayer network with random initial weights * * Arguments: * neuro_t* w- pointer to location for storing network data * int* dim- a pointer to a list in which the required layer dimensions are stored * int dep- the number of layers in the network */ static inline float** randNet(int in, int out){ int i,o; float** N = (float**)malloc(sizeof(float*)*in); //srand((unsigned)time(NULL)); for(i=0;ibits=0; w->L = (float***)malloc(sizeof(float**)*dep); w->D = (int*)malloc(sizeof(int)*dep); for(d=0;dL[d] = randNet(dim[d],dim[d+1]); for(d=0;d<=dep;d++) w->D[d] = dim[d]; w->depth = dep; w->P = mkstore(dim, dep); w->E = (float*)malloc(sizeof(float)*dim[dep]); w->act=sigma; w->der=sigder; } /* msvnet- save the weights for a multilayer network to a file * * Arguments- * FILE* f- a pointer to a filestream opened for writing * neuro_t* w- pointer to location for storing network data */ void msvnet(FILE* f, neuro_t* w){ int i,o,d; fprintf(f,"%d ", w->depth); for(d=0;ddepth;d++){ int in=w->D[d],out=w->D[d+1]; fprintf(f,"%d %d ",in,out); for(i=0;iL[d][i][o]); } } } void bsvnet(FILE* f, neuro_t* w){ int i,o,d; fwrite(&(w->depth),sizeof(int),1,f); fwrite(w->D,sizeof(int),w->depth+1,f); for(d=0;ddepth;d++){ int in=w->D[d],out=w->D[d+1]; for(i=0;iL[d][i++],sizeof(float),out,f)); } } void mdstrynet(neuro_t* w){ int d,i; for(d=0;ddepth;d++){ for(i=0;iD[d];i++) free(w->L[d][i]); free(w->P[d]); } free(w->L); free(w->P); free(w->D); free(w->E); w->depth=0; }