/****************************************************************************************/ /* */ /* Project : 1D-BRNN */ /* Release : 3.3 */ /* */ /* File : Layer.h */ /* Description : Single layer of a neural network */ /* */ /* Author(s) : Christophe Magnan (2013) - New generic version */ /* : Jianlin Cheng (2003) - New custom version for SCRATCH */ /* : Gianluca Pollastri (2001) - Customized version for SCRATCH */ /* : Paolo Frasconi (1997) - Initial generic version */ /* */ /* Copyright : Institute for Genomics and Bioinformatics */ /* University of California, Irvine */ /* */ /* Modified : 2015/07/01 */ /* */ /****************************************************************************************/ #include "Import.h" class Layer { public: // Layer parameters int Num_Nodes; // Number of nodes in the layer int Num_Inputs; // Number of features in input of the layer int IsOutputLay; // Flag for the output layer of the BRNN // Layer data float* Bias; // Bias value associated with each node in the layer float** Weights; // Matrix of weights associated with each node in the layer // Propagation data float* Direct_Outputs; // Outputs of the layer after the last propagation float* SigApp_Outputs; // Sigmoid approximation of the direct outputs // Back-Propagation data float* Gradient; // Gradient contributions computed during the last back-propagation float* Back_Prop; // Next values to back-propagate in lower layers float* dBias; // Gradients of the Bias float** dWeights; // Gradients of the Weights // Interface Layer(); ~Layer(); void write(ostream& os); void load(istream& is); void initialize(int nodes,int inputs); void alloc_backpropagation(); void reset_gradient(); void propagate(float* inputs); void back_propagate(float* target); void update_gradient(float* inputs); void update_weights(float epsilon); void check_viability(); };