#include "paw/pilot.h" #include #include #include #include /* #define MLPHEADER */ #include "mlp_gen.h" /* This file contains routines which can be called for fortran or C code Their name follow the rules: - only lowercase characters - start with mlp - end with _ for being compatible with fortran calling -> to call them from fortran use the name WITHOUT _ -> to call them from C use the name WITH _ - all arguments passed by address J.Schwindling 14-NOV-98 */ #if defined(CERNLIB_QX_SC) #define mlpsetnet mlpsetnet_ #define mlpsetlearn mlpsetlearn_ #define mlpsetnpat mlpsetnpat_ #define mlpsetpat mlpsetpat_ #define mlpinitw mlpinitw_ #define mlpepoch mlpepoch_ #define mlpfree mlpfree_ #define mlpprpawf mlpprpawf_ #define mlpcompute mlpcompute_ #endif #if defined(CERNLIB_QXCAPT) #define mlpsetnet MLPSETNET #define mlpsetlearn MLPSETLEARN #define mlpsetnpat MLPSETNPAT #define mlpsetpat MLPSETPAT #define mlpinitw MLPINITW #define mlpepoch MLPEPOCH #define mlpfree MLPFREE #define mlpprpawf MLPPRPAWF #define mlpcompute MLPCOMPUTE #endif #if defined(CERNLIB_IBM) #pragma linkage (mlpsetnet ,FORTRAN) #pragma linkage (mlpsetlearn ,FORTRAN) #pragma linkage (mlpsetnpat ,FORTRAN) #pragma linkage (mlpsetpat ,FORTRAN) #pragma linkage (mlpinitw ,FORTRAN) #pragma linkage (mlpepoch ,FORTRAN) #pragma linkage (mlpfree ,FORTRAN) #pragma linkage (mlpprpawf ,FORTRAN) #pragma linkage (mlpcompute ,FORTRAN) #endif /***********************************************************/ /* mlpsetnet */ /* */ /* to set the structure of a neural network */ /* inputs: int *nl = number of layers */ /* int *nn = number of neurons */ /* */ /* return value (int) = error value: */ /* 0: no error */ /* 1: N layers > 4 */ /* 2: N layers < 2 */ /* 3: N neurons > NMAX */ /* */ /* Author: J.Schwindling 14-Apr-99 */ /***********************************************************/ int mlp_setnet(int nl, int *nn) { int il,nneu; if((nl)>4) return(1); if((nl)<2) return(2); /* set number of layers */ NET.Nlayer = (int) nl; /* set number of neurons */ for(il=0; ilNMAX) return(3); NET.Nneur[il] = nneu; } /* set transfer functions */ SetDefaultFuncs(); return(0); } int type_of_call mlpsetnet(nl, nn) int *nl, *nn; { return mlp_setnet(*nl, nn); } /***********************************************************/ /* mlplearnalloc */ /* */ /* to allocate memory for learning */ /* should be called after mlpsetnet */ /* */ /* Author: J.Schwindling 14-Apr-99 */ /***********************************************************/ void mlplearnalloc_() { LearnAlloc(); } /***********************************************************/ /* mlplearnfree */ /* */ /* to free memory for learning */ /* */ /* Author: J.Schwindling 14-Apr-99 */ /***********************************************************/ void mlplearnfree_() { LearnFree(); } /***********************************************************/ /* mlpsetlearn */ /* */ /* to define learning method and learning parameters */ /* */ /* inputs: int *lmet = learning method: */ /* 1 = stochastic minimozation */ /* 2 = steepest descent fixed steps */ /* 3 = steepest descent with L.search*/ /* 4 = Ribiere-Polak Conjugate Grad. */ /* 5 = Fletcher-Reeves Conj. Grad. */ /* 6 = BFGS */ /* 7 = Hybrid method */ /* float *eta = learning parameter */ /* (used by methods 1 and 2) */ /* float *decay = decrease factor (per epoch) */ /* of learning parameter */ /* float *epsilon = momentum term (meth. 1,2) */ /* float *Delta = flat spot elimination param. */ /* (methods 1 or 2) */ /* int *nreset = frequency to reset to */ /* steepest gradient (meths 4->7)*/ /* float *tau = governs precision of line */ /* search (meths 4->7)*/ /* float *lambda = size of regularisation term */ /* (method 7) */ /* */ /* return value (int) = error value: */ /* 0: no error */ /* 1: method > 7 */ /* */ /* Author: J.Schwindling 14-Apr-99 */ /***********************************************************/ int mlp_setlearn(int lmet, float eta, float decay, float epsilon, float Delta, int nreset, float tau, float lambda) { int il,in; if(lmet>7) return(1); LEARN.Meth = lmet; LEARN.Nreset = nreset; LEARN.Tau = (dbl) tau; LEARN.Decay = (dbl) decay; LEARN.Lambda = (dbl) lambda; for(il=0; il N) */ /* float *rin = inputs */ /* float *rans = answers */ /* float *pond = weight */ /* */ /* return value (int) = error value: */ /* 0: no error */ /* */ /* Author: J.Schwindling 14-Nov-98 */ /***********************************************************/ int mlp_setpat(int ifile, int ipat, float *rin, float *rans, float pond) { int in; for(in=0; in N) */ /* int *neuron: neuron number (1 -> N) */ /* output: float *weights: weights */ /* */ /* return value (int) : error code = 0: no error */ /* 1: wrong layer */ /* 2: wrong neuron */ /* */ /* Author: J.Schwindling 31-May-99 */ /***********************************************************/ int mlpgetw_(int *layer, int *neuron, float *weights) { int i; if(*layer<2) return 1; if(*layer>NET.Nlayer) return 1; if(*neuron>NET.Nneur[*layer-1]) return 2; for(i=0; i<=NET.Nneur[*layer-2]; i++) { weights[i] = NET.Weights[*layer-1][*neuron-1][i]; } return 0; } /***********************************************************/ /* mlpgetdata */ /* */ /* gets data for a given file / pattern */ /* */ /* input: int *ifile : 0 = learn file */ /* 1 = test file */ /* int *ipat : example number */ /* output: float *rin: inputs */ /* float *rout: outputs from current net */ /* float *rans: desired answers */ /* float *pond: event weight */ /* */ /* Author: J.Schwindling 26-APR-99 */ /***********************************************************/ void mlpgetdata_(int *ifile, int *ipat, float *rin, float *rout, float *rans, float *pond) { type_pat *rrin; dbl *rrout; int in; rrin = (type_pat*) malloc(NET.Nneur[0]*sizeof(type_pat)); rrout = (dbl*) malloc(NET.Nneur[NET.Nlayer-1]*sizeof(dbl)); for(in=0; in +(%lf) * OUT%d\n", (double) NET.Weights[il][in][jn],jn); } fprintf(W,"C\n"); for(in=0; in +(%lf) * OUT%d\n", (double) NET.Weights[il][0][jn],jn); fprintf(W,"C\n"); fprintf(W," END\n"); fprintf(W," REAL FUNCTION SIGMOID(X)\n"); fprintf(W," SIGMOID = 1./(1.+EXP(-X))\n"); fprintf(W," END\n"); fclose(W); } /***********************************************************/ /* mlpreadf */ /* */ /* to read learn.pat or test.pat file */ /* */ /* Author: J.Schwindling 26-Apr-99 */ /***********************************************************/ void mlpreadf_(int *ifile) { int idummy1, idummy2, idummy3; if(*ifile==0) { ReadPatterns("learn.pat", 0, &idummy1, &idummy2, &idummy3); } if(*ifile==1) { ReadPatterns("test.pat", 1, &idummy1, &idummy2, &idummy3); } } /***********************************************************/ /* mlpsettf */ /* */ /* to set the transfer function of a given neuron */ /* */ /* inputs: int *layer: layer number (2 -> Nlayer) */ /* int *neuron: neuron number (1 -> Nneur) */ /* int *tfunc = 0: neuron not activated */ /* = 1: linear neuron */ /* = 2: sigmoid neuron */ /* */ /* return code (int) = 0: no error */ /* = 1: wrong layer number */ /* = 2: wrong neuron number */ /* = 3: wrong transfer function */ /* */ /* Author: J.Schwindling 07-May-99 */ /***********************************************************/ int mlpsettf_(int *layer, int *neuron, int *tfunc) { if(*layer<2 || *layer>NET.Nlayer) return 1; if(*neuron<1 || *neuron>NET.Nneur[*layer-1]) return 2; if(*tfunc<0 || *tfunc>2) return 3; SetTransFunc(*layer,*neuron,*tfunc); return 0; } /***********************************************************/ /* mlpgettf */ /* */ /* to get the transfer function of a given neuron */ /* */ /* inputs: int *layer: layer number (2 -> Nlayer) */ /* int *neuron: neuron number (1 -> Nneur) */ /* output int *tfunc = 0: neuron not activated */ /* = 1: linear neuron */ /* = 2: sigmoid neuron */ /* */ /* return code (int) = 0: no error */ /* = 1: wrong layer number */ /* = 2: wrong neuron number */ /* = 3: wrong transfer function */ /* */ /* Author: J.Schwindling 07-May-99 */ /***********************************************************/ int mlpgettf_(int *layer, int *neuron, int *tfunc) { if(*layer<2 || *layer>NET.Nlayer) return 1; if(*neuron<1 || *neuron>NET.Nneur[*layer-1]) return 2; *tfunc = NET.T_func[*neuron-1][*layer-1]; return 0; } /***********************************************************/ /* mlpnorm */ /* */ /* to normalize the inputs to mean = 0, rms = 1 */ /* */ /* Author: J.Schwindling 18-May-99 */ /***********************************************************/ void mlpnorm_() { DIVERS.Norm = 1; NormalizeInputs(); }