/****************************************************************************************/ /* */ /* Project : 1D-BRNN */ /* Release : 3.3 */ /* */ /* File : Network.cpp */ /* Description : Single neural network with one hidden layer */ /* */ /* Author(s) : Christophe Magnan (2013) - New generic version */ /* : Jianlin Cheng (2003) - New custom version for SCRATCH */ /* : Gianluca Pollastri (2001) - Customized version for SCRATCH */ /* : Paolo Frasconi (1997) - Initial generic version */ /* */ /* Copyright : Institute for Genomics and Bioinformatics */ /* University of California, Irvine */ /* */ /* Modified : 2015/07/01 */ /* */ /****************************************************************************************/ #include "Network.h" /*******************************************/ /* Interface */ /*******************************************/ // Constructor Network::Network() : Num_Inputs(-1),Num_Hidden(-1),Num_Outputs(-1), Output_Layer(NULL),Hidden_Layer(NULL), Outputs(NULL),Back_Prop(NULL){} // Destructor Network::~Network() { delete Output_Layer; delete Hidden_Layer; } // Write network void Network::write(ostream& os) { os << Num_Outputs << " " << Num_Hidden << " " << Num_Inputs << "\n"; Output_Layer->write(os); Hidden_Layer->write(os); } // Load network void Network::load(istream& is) { is >> Num_Outputs >> Num_Hidden >> Num_Inputs; Output_Layer=new Layer(); Output_Layer->load(is); Hidden_Layer=new Layer(); Hidden_Layer->load(is); check_viability(); } // New random network void Network::initialize(int inputs,int hidden,int outputs) { Num_Inputs=inputs; Num_Hidden=hidden; Num_Outputs=outputs; Output_Layer=new Layer(); Output_Layer->initialize(Num_Outputs,Num_Hidden); Hidden_Layer=new Layer(); Hidden_Layer->initialize(Num_Hidden,Num_Inputs); } // Allocate memory for back-propagation void Network::alloc_backpropagation() { Output_Layer->alloc_backpropagation(); Hidden_Layer->alloc_backpropagation(); } // Reset the gradient matrices void Network::reset_gradient() { Hidden_Layer->reset_gradient(); Output_Layer->reset_gradient(); } // Propagate the input features into the network void Network::propagate(float* inputs) { Hidden_Layer->propagate(inputs); Output_Layer->propagate(Hidden_Layer->SigApp_Outputs); Outputs=Output_Layer->SigApp_Outputs; } // Back-propagate the target values into the network void Network::back_propagate(float* target) { Output_Layer->back_propagate(target); Hidden_Layer->back_propagate(Output_Layer->Back_Prop); Back_Prop=Hidden_Layer->Back_Prop; } // Update the gradient matrices after back-propagation void Network::update_gradient(float* inputs) { Output_Layer->update_gradient(Hidden_Layer->SigApp_Outputs); Hidden_Layer->update_gradient(inputs); } // Update the network after training void Network::update_weights(float epsilon) { Hidden_Layer->update_weights(epsilon); Output_Layer->update_weights(epsilon); } // Check the viability of a loaded network void Network::check_viability() { int v=1; if(Output_Layer->Num_Nodes!=Num_Outputs){ v=0; } if(Hidden_Layer->Num_Nodes!=Num_Hidden){ v=0; } if(Output_Layer->Num_Inputs!=Num_Hidden){ v=0; } if(Hidden_Layer->Num_Inputs!=Num_Inputs){ v=0; } if(!v){ cerr << "Inconsistent model, cannot proceed\n"; exit(1); } }