/****************************************************************************************/ /* */ /* Project : 1D-BRNN */ /* Release : 3.3 */ /* */ /* File : Layer.cpp */ /* Description : Single layer of a neural network */ /* */ /* Author(s) : Christophe Magnan (2013) - New generic version */ /* : Jianlin Cheng (2003) - New custom version for SCRATCH */ /* : Gianluca Pollastri (2001) - Customized version for SCRATCH */ /* : Paolo Frasconi (1997) - Initial generic version */ /* */ /* Copyright : Institute for Genomics and Bioinformatics */ /* University of California, Irvine */ /* */ /* Modified : 2015/07/01 */ /* */ /****************************************************************************************/ #include "Layer.h" /*******************************************/ /* Interface */ /*******************************************/ // Constructor Layer::Layer() : Num_Nodes(0),Num_Inputs(0),IsOutputLay(0), Bias(NULL),Weights(NULL),Direct_Outputs(NULL), SigApp_Outputs(NULL),Gradient(NULL), Back_Prop(NULL),dBias(NULL),dWeights(NULL){} // Destructor Layer::~Layer() { for(int n=0;n> Num_Nodes >> Num_Inputs >> IsOutputLay; Bias=new float[Num_Nodes]; Weights=new float*[Num_Nodes]; for(int n=0;n> Weights[n][i]; } is >> Bias[n]; } check_viability(); Direct_Outputs=new float[Num_Nodes]; SigApp_Outputs=new float[Num_Nodes]; } // New random layer void Layer::initialize(int nodes,int inputs) { Num_Nodes=nodes; Num_Inputs=inputs; IsOutputLay=0; drand48(); Bias=new float[Num_Nodes]; Weights=new float*[Num_Nodes]; for(int n=0;nmax){ max=v; argmax=n; } if(v>85){ overflow=1; } else{ norm+=(float)exp(v); } } for(int n=0;n85){ overflow=1; } } for(int n=0;n1)){ v=0; } if(!v){ cerr << "Inconsistent model, cannot proceed\n"; exit(1); } }