#ifndef MLPHEADER #define MLPHEADER #define NMAX 100 #define NPMAX 100000 typedef double dbl; typedef float type_pat; /* definition du reseau */ struct { int Nlayer, Nneur[4], Nweights; dbl Weights[4][NMAX][NMAX+1]; dbl Outn[4][NMAX], Inn[NMAX][4]; dbl Deriv1[4][NMAX], Deriv2[NMAX][4], Delta[4][NMAX]; /* dbl ***Weights;*/ /* dbl Temp[NMAX][4];*/ int T_func[NMAX][4]; int Rdwt, Debug; } net_; #define NET net_ /* apprentissage */ struct { int Nepoch, Meth, Nreset; dbl Tau,Norm,Decay,Lambda,Alambda; dbl eta[NMAX][4], epsilon[NMAX][4], delta[NMAX][4]; dbl Odw[NMAX+1][NMAX][4]; dbl DeDw[NMAX+1][NMAX][4], ODeDw[NMAX+1][NMAX][4]; } learn_; #define LEARN learn_ struct { int Npat[2],Iponde; type_pat ***Rin, ***Rans, **Pond; dbl Ponds[10]; } pat_; #define PAT pat_ struct { int Dbin; int Ihess; dbl mean[NMAX],sigma[NMAX]; int Norm; char Outf; } divers_; #define DIVERS divers_ #if (defined(EXTERNMLP_GEN)) int MessLang = 0; int OutputWeights = 100; int ExamplesMemory = 0; int WeightsMemory = 0; int PatMemory[2] = {0,0}; int BFGSMemory = 0; int JacobianMemory = 0; int LearnMemory = 0; float MLPfitVersion = 1.33; dbl LastAlpha = 0; int NLineSearchFail = 0; #else extern int MessLang; extern int OutputWeights; extern int ExamplesMemory; extern int WeightsMemory; extern int PatMemory[2]; extern int BFGSMemory; extern int LearnMemory; extern float MLPfitVersion; extern dbl LastAlpha; extern int NLineSearchFail; #endif dbl ***dir; dbl *delta; dbl **BFGSH; dbl *Gamma; dbl **JacobianMatrix; int *ExamplesIndex; dbl **Hessian; void MLP_Out(type_pat *rrin, dbl *rrout); void MLP_Out2(type_pat *rrin, dbl *rrout, dbl *Deriv1); void MLP_Out_T(type_pat *rrin); dbl MLP_Test(int ifile, int regul); dbl MLP_Epoch(int iepoch, dbl *alpmin, int *ntest); int MLP_Train(int *ipat,dbl *err); dbl MLP_Sigmoide(dbl x); dbl MLP_Stochastic(); int StochStep(); dbl DeDwNorm(); dbl DeDwProd(); void DeDwZero(); void DeDwSaveZero(); void DeDwScale(int Nexamples); void DeDwSave(); int DeDwSum(type_pat *ans, dbl *out, int ipat); int SetTransFunc(int layer, int neuron, int func); void SetDefaultFuncs(); void SteepestDir(); void CGDir(dbl beta); dbl DerivDir(); void GetGammaDelta(); void BFGSdir(int Nweights); void InitBFGSH(int Nweights); int GetBFGSH(int Nweights); int LineSearch(dbl *alpmin, int *Ntest, dbl Err0); int DecreaseSearch(dbl *alpmin, int *Ntest, dbl Err0); void MLP_ResLin(); void MLP_Line(dbl ***w0, dbl alpha); int LineSearchHyb(dbl *alpmin, int *Ntest); void MLP_LineHyb(dbl ***w0, dbl alpha); int StochStepHyb(); int FixedStep(dbl alpha); void EtaDecay(); int ShuffleExamples(int n, int *index); double MLP_Rand(dbl min, dbl max); void InitWeights(); void NormalizeInputs(); void LoadWeights(char *filename, int *iepoch); void SaveWeights(char *filename, int iepoch); void SetLambda(double Wmax); void PrintWeights(); int ReadPatterns(char *filename, int ifile, int *inet, int *ilearn, int *iexamples); int CountLexemes(char *string); void getnLexemes(int n, char *s, char **ss); void getLexemes(char *s,char **ss); void LearnAlloc(); void LearnFree(); void MLP_PrFFun(char *filename); void MLP_PrCFun(char *filename); int AllocPatterns(int ifile, int npat, int nin, int nout, int iadd); int FreePatterns(int ifile); void AllocWeights(); void FreeWeights(); #endif /* obsolete routines no longer / future routines not yet in NN_gen.c */ /* dbl **Hessian; void HessianAlloc(int Nweights); void PrHessian(int Nweights); void HessianZero(int Nweights); void HessianSum(int Nweights); dbl sigmoid(dbl *rin,dbl *rout); int SetNet(int *,int *,int *,dbl *); int SetTemp(int *, int *, int *); int SetWeights(int *, int *, dbl *); int GetNet(int *, int *); int GetTFunc(int *, int *, dbl *); int SetLearn(int *, int *, dbl *,dbl *, dbl *); int InitLearn(dbl *,dbl *, dbl *); int Qprop(int); int SetPat(int *,int *,int *,dbl *,int *,dbl *); int SetNPat(int *, int *); int GetWeights(int *, int *, dbl *); int GetOutL(int *, dbl *); int Test1(int *, int *, dbl *, dbl *, dbl *); void MLP_LAresLin(); void MLP_NAresLin(); */