/** * Neuron base class. * Represents a basic element of neural network, node in the net's graph. * There are several possibilities for creation an object of type Neuron, different constructors suites for * different situations. */ template <typename T> class Neuron { public: /** * A default Neuron constructor. * - Description: Creates a Neuron; general purposes. * - Purpose: Creates a Neuron, linked to nothing, with a Linear network function. * - Prerequisites: None. */ Neuron( ) : mNetFunc( new Linear ), mSumOfCharges( 0.0 ) { }; /** * A Neuron constructor based on NetworkFunction. * - Description: Creates a Neuron; mostly designed to create an output kind of neurons. * @param inNetFunc - a network function which is producing neuron's output signal; * - Purpose: Creates a Neuron, linked to nothing, with a specific network function. * - Prerequisites: The existence of NetworkFunction object. */ Neuron( NetworkFunction * inNetFunc ) : mNetFunc( inNetFunc ), mSumOfCharges( 0.0 ){ }; Neuron( std::vector<NeuralLink<T > *>& inLinksToNeurons, NetworkFunction * inNetFunc ) : mNetFunc( inNetFunc ), mLinksToNeurons(inLinksToNeurons), mSumOfCharges(0.0){ }; /** * A Neuron constructor based on layer of Neurons. * - Description: Creates a Neuron; mostly designed to create an input and hidden kinds of neurons. * @param inNeuronsLinkTo - a vector of pointers to Neurons which is representing a layer; * @param inNetFunc - a network function which is producing neuron's output signal; * - Purpose: Creates a Neuron, linked to every Neuron in provided layer. * - Prerequisites: The existence of std::vector<Neuron *> and NetworkFunction. */ Neuron( std::vector<Neuron *>& inNeuronsLinkTo, NetworkFunction * inNetFunc ); virtual ~Neuron( ); virtual std::vector<NeuralLink<T > *>& GetLinksToNeurons( ){ return mLinksToNeurons; }; virtual NeuralLink<T> * at( const int& inIndexOfNeuralLink ) { return mLinksToNeurons[ inIndexOfNeuralLink ]; }; virtual void SetLinkToNeuron( NeuralLink<T> * inNeuralLink ){ mLinksToNeurons.push_back( inNeuralLink ); }; virtual void Input( double inInputData ){ mSumOfCharges += inInputData; }; virtual double Fire( ); virtual int GetNumOfLinks( ) { return mLinksToNeurons.size( ); }; virtual double GetSumOfCharges( ); virtual void ResetSumOfCharges( ){ mSumOfCharges = 0.0; }; virtual double Process( ) { return mNetFunc->Process( mSumOfCharges ); }; virtual double Process( double inArg ){ return mNetFunc->Process( inArg ); }; virtual double Derivative( ){ return mNetFunc->Derivative( mSumOfCharges ); }; virtual void SetInputLink( NeuralLink<T> * inLink ){ mInputLinks.push_back( inLink ); }; virtual std::vector<NeuralLink<T > *>& GetInputLink( ){ return mInputLinks; }; virtual double PerformTrainingProcess( double inTarget ); virtual void PerformWeightsUpdating( ); virtual void ShowNeuronState( ); protected: NetworkFunction * mNetFunc; std::vector<NeuralLink<T > *> mInputLinks; std::vector<NeuralLink<T > *> mLinksToNeurons; double mSumOfCharges; }; template <typename T> class OutputLayerNeuronDecorator : public Neuron<T> { public: OutputLayerNeuronDecorator( Neuron<T> * inNeuron ){ mOutputCharge = 0; mNeuron = inNeuron; }; virtual ~OutputLayerNeuronDecorator( ); virtual std::vector<NeuralLink<T > *>& GetLinksToNeurons( ){ return mNeuron->GetLinksToNeurons( ) ;}; virtual NeuralLink<T> * at( const int& inIndexOfNeuralLink ){ return ( mNeuron->at( inIndexOfNeuralLink ) ) ;}; virtual void SetLinkToNeuron( NeuralLink<T> * inNeuralLink ){ mNeuron->SetLinkToNeuron( inNeuralLink ); }; virtual double GetSumOfCharges( ) { return mNeuron->GetSumOfCharges( ); }; virtual void ResetSumOfCharges( ){ mNeuron->ResetSumOfCharges( ); }; virtual void Input( double inInputData ){ mNeuron->Input( inInputData ); }; virtual double Fire( ); virtual int GetNumOfLinks( ) { return mNeuron->GetNumOfLinks( ); }; virtual double Process( ) { return mNeuron->Process( ); }; virtual double Process( double inArg ){ return mNeuron->Process( inArg ); }; virtual double Derivative( ) { return mNeuron->Derivative( ); }; virtual void SetInputLink( NeuralLink<T> * inLink ){ mNeuron->SetInputLink( inLink ); }; virtual std::vector<NeuralLink<T > *>& GetInputLink( ) { return mNeuron->GetInputLink( ); }; virtual double PerformTrainingProcess( double inTarget ); virtual void PerformWeightsUpdating( ); virtual void ShowNeuronState( ) { mNeuron->ShowNeuronState( ); }; protected: double mOutputCharge; Neuron<T> * mNeuron; }; template <typename T> class HiddenLayerNeuronDecorator : public Neuron<T> { public: HiddenLayerNeuronDecorator( Neuron<T> * inNeuron ) { mNeuron = inNeuron; }; virtual ~HiddenLayerNeuronDecorator( ); virtual std::vector<NeuralLink<T > *>& GetLinksToNeurons( ){ return mNeuron->GetLinksToNeurons( ); }; virtual void SetLinkToNeuron( NeuralLink<T> * inNeuralLink ){ mNeuron->SetLinkToNeuron( inNeuralLink ); }; virtual double GetSumOfCharges( ){ return mNeuron->GetSumOfCharges( ) ;}; virtual void ResetSumOfCharges( ){mNeuron->ResetSumOfCharges( ); }; virtual void Input( double inInputData ){ mNeuron->Input( inInputData ); }; virtual double Fire( ); virtual int GetNumOfLinks( ){ return mNeuron->GetNumOfLinks( ); }; virtual NeuralLink<T> * ( const int& inIndexOfNeuralLink ){ return ( mNeuron->at( inIndexOfNeuralLink) ); }; virtual double Process( ){ return mNeuron->Process( ); }; virtual double Process( double inArg ){ return mNeuron->Process( inArg ); }; virtual double Derivative( ){ return mNeuron->Derivative( ); }; virtual void SetInputLink( NeuralLink<T> * inLink ){ mNeuron->SetInputLink( inLink ); }; virtual std::vector<NeuralLink<T > *>& GetInputLink( ){ return mNeuron->GetInputLink( ); }; virtual double PerformTrainingProcess( double inTarget ); virtual void PerformWeightsUpdating( ); virtual void ShowNeuronState( ){ mNeuron->ShowNeuronState( ); }; protected: Neuron<T> * mNeuron; };
template <typename T> class Neuron; template <typename T> class NeuralLink { public: NeuralLink( ) : mWeightToNeuron( 0.0 ), mNeuronLinkedTo( 0 ), mWeightCorrectionTerm( 0 ), mErrorInformationTerm( 0 ), mLastTranslatedSignal( 0 ){ }; NeuralLink( Neuron<T> * inNeuronLinkedTo, double inWeightToNeuron = 0.0 ) : mWeightToNeuron( inWeightToNeuron ), mNeuronLinkedTo( inNeuronLinkedTo ), mWeightCorrectionTerm( 0 ), mErrorInformationTerm( 0 ), mLastTranslatedSignal( 0 ){ }; void SetWeight( const double& inWeight ){ mWeightToNeuron = inWeight; }; const double& GetWeight( ){ return mWeightToNeuron; }; void SetNeuronLinkedTo( Neuron<T> * inNeuronLinkedTo ){ mNeuronLinkedTo = inNeuronLinkedTo; }; Neuron<T> * GetNeuronLinkedTo( ){ return mNeuronLinkedTo; }; void SetWeightCorrectionTerm( double inWeightCorrectionTerm ){ mWeightCorrectionTerm = inWeightCorrectionTerm; }; double GetWeightCorrectionTerm( ){ return mWeightCorrectionTerm; }; void UpdateWeight( ){ mWeightToNeuron = mWeightToNeuron + mWeightCorrectionTerm; }; double GetErrorInFormationTerm( ){ return mErrorInformationTerm; }; void SetErrorInFormationTerm( double inEITerm ){ mErrorInformationTerm = inEITerm; }; void SetLastTranslatedSignal( double inLastTranslatedSignal ){ mLastTranslatedSignal = inLastTranslatedSignal; }; double GetLastTranslatedSignal( ){ return mLastTranslatedSignal; }; protected: double mWeightToNeuron; Neuron<T> * mNeuronLinkedTo; double mWeightCorrectionTerm; double mErrorInformationTerm; double mLastTranslatedSignal; };
class NetworkFunction { public: NetworkFunction(){}; virtual ~NetworkFunction(){}; virtual double Process( double inParam ) = 0; virtual double Derivative( double inParam ) = 0; }; class Linear : public NetworkFunction { public: Linear(){}; virtual ~Linear(){}; virtual double Process( double inParam ){ return inParam; }; virtual double Derivative( double inParam ){ return 0; }; }; class Sigmoid : public NetworkFunction { public: Sigmoid(){}; virtual ~Sigmoid(){}; virtual double Process( double inParam ){ return ( 1 / ( 1 + exp( -inParam ) ) ); }; virtual double Derivative( double inParam ){ return ( this->Process(inParam)*(1 - this->Process(inParam)) );}; }; class BipolarSigmoid : public NetworkFunction { public: BipolarSigmoid(){}; virtual ~BipolarSigmoid(){}; virtual double Process( double inParam ){ return ( 2 / ( 1 + exp( -inParam ) ) - 1 ) ;}; virtual double Derivative( double inParam ){ return ( 0.5 * ( 1 + this->Process( inParam ) ) * ( 1 - this->Process( inParam ) ) ); }; };
template <typename T> class NeuronFactory { public: NeuronFactory(){}; virtual ~NeuronFactory(){}; virtual Neuron<T> * CreateInputNeuron( std::vector<Neuron<T > *>& inNeuronsLinkTo, NetworkFunction * inNetFunc ) = 0; virtual Neuron<T> * CreateOutputNeuron( NetworkFunction * inNetFunc ) = 0; virtual Neuron<T> * CreateHiddenNeuron( std::vector<Neuron<T > *>& inNeuronsLinkTo, NetworkFunction * inNetFunc ) = 0; }; template <typename T> class PerceptronNeuronFactory : public NeuronFactory<T> { public: PerceptronNeuronFactory(){}; virtual ~PerceptronNeuronFactory(){}; virtual Neuron<T> * CreateInputNeuron( std::vector<Neuron<T > *>& inNeuronsLinkTo, NetworkFunction * inNetFunc ){ return new Neuron<T>( inNeuronsLinkTo, inNetFunc ); }; virtual Neuron<T> * CreateOutputNeuron( NetworkFunction * inNetFunc ){ return new OutputLayerNeuronDecorator<T>( new Neuron<T>( inNetFunc ) ); }; virtual Neuron<T> * CreateHiddenNeuron( std::vector<Neuron<T > *>& inNeuronsLinkTo, NetworkFunction * inNetFunc ){ return new HiddenLayerNeuronDecorator<T>( new Neuron<T>( inNeuronsLinkTo, inNetFunc ) ); }; };
template <typename T> class TrainAlgorithm; /** * Neural network class. * An object of that type represents a neural network of several types: * - Single layer perceptron; * - Multiple layers perceptron. * * There are several training algorithms available as well: * - Perceptron; * - Backpropagation. * * How to use this class: * To be able to use neural network , you have to create an instance of that class, specifying * a number of input neurons, output neurons, number of hidden layers and amount of neurons in hidden layers. * You can also specify a type of neural network, by passing a string with a name of neural network, otherwise * MultiLayerPerceptron will be used. ( A training algorithm can be changed via public calls); * * Once the neural network was created, all u have to do is to set the biggest MSE required to achieve during * the training phase ( or u can skip this step, then mMinMSE will be set to 0.01 ), * train the network by providing a training data with target results. * Afterwards u can obtain the net response by feeding the net with data; * */ template <typename T> class NeuralNetwork { public: /** * A Neural Network constructor. * - Description: A template constructor. T is a data type, all the nodes will operate with. Create a neural network by providing it with: * @param inInputs - an integer argument - number of input neurons of newly created neural network; * @param inOutputs- an integer argument - number of output neurons of newly created neural network; * @param inNumOfHiddenLayers - an integer argument - number of hidden layers of newly created neural network, default is 0; * @param inNumOfNeuronsInHiddenLayers - an integer argument - number of neurons in hidden layers of newly created neural network ( note that every hidden layer has the same amount of neurons), default is 0; * @param inTypeOfNeuralNetwork - a const char * argument - a type of neural network, we are going to create. The values may be: * <UL> * <LI>MultiLayerPerceptron;</LI> * <LI>Default is MultiLayerPerceptron.</LI> * </UL> * - Purpose: Creates a neural network for solving some interesting problems. * - Prerequisites: The template parameter has to be picked based on your input data. * */ NeuralNetwork( const int& inInputs, const int& inOutputs, const int& inNumOfHiddenLayers = 0, const int& inNumOfNeuronsInHiddenLayers = 0, const char * inTypeOfNeuralNetwork = "MultiLayerPerceptron" ); ~NeuralNetwork( ); /** * Public method Train. * - Description: Method for training the network. * - Purpose: Trains a network, so the weights on the links adjusted in the way to be able to solve problem. * - Prerequisites: * @param inData - a vector of vectors with data to train with; * @param inTarget - a vector of vectors with target data; * - the number of data samples and target samples has to be equal; * - the data and targets has to be in the appropriate order u want the network to learn. */ bool Train( const std::vector<std::vector<T > >& inData, const std::vector<std::vector<T > >& inTarget ); /** * Public method GetNetResponse. * - Description: Method for actually get response from net by feeding it with data. * - Purpose: By calling this method u make the network evaluate the response for u. * - Prerequisites: * @param inData - a vector data to feed with. */ std::vector<int> GetNetResponse( const std::vector<T>& inData ); /** * Public method SetAlgorithm. * - Description: Setter for algorithm of training the net. * - Purpose: Can be used for dynamic change of training algorithm. * - Prerequisites: * @param inTrainingAlgorithm - an existence of already created object of type TrainAlgorithm. */ void SetAlgorithm( TrainAlgorithm<T> * inTrainingAlgorithm ) { mTrainingAlgoritm = inTrainingAlgorithm; }; /** * Public method SetNeuronFactory. * - Description: Setter for the factory, which is making neurons for the net. * - Purpose: Can be used for dynamic change of neuron factory. * - Prerequisites: * @param inNeuronFactory - an existence of already created object of type NeuronFactory. */ void SetNeuronFactory( NeuronFactory<T> * inNeuronFactory ) { mNeuronFactory = inNeuronFactory; }; /** * Public method ShowNetworkState. * - Description: Prints current state to the standard output: weight of every link. * - Purpose: Can be used for monitoring the weights change during training of the net. * - Prerequisites: None. */ void ShowNetworkState( ); /** * Public method GetMinMSE. * - Description: Returns the biggest MSE required to achieve during the training phase. * - Purpose: Can be used for getting the biggest MSE required to achieve during the training phase. * - Prerequisites: None. */ const double& GetMinMSE( ){ return mMinMSE; }; /** * Public method SetMinMSE. * - Description: Setter for the biggest MSE required to achieve during the training phase. * - Purpose: Can be used for setting the biggest MSE required to achieve during the training phase. * - Prerequisites: * @param inMinMse - double value, the biggest MSE required to achieve during the training phase. */ void SetMinMSE( const double& inMinMse ){ mMinMSE = inMinMse; }; /** * Friend class. */ friend class Hebb<T>; /** * Friend class. */ friend class Backpropagation<T>; protected: /** * Protected method GetLayer. * - Description: Getter for the layer by index of that layer. * - Purpose: Can be used by inner implementation for getting access to neural network's layers. * - Prerequisites: * @param inInd - an integer index of layer. */ std::vector<Neuron<T > *>& GetLayer( const int& inInd ){ return mLayers[inInd]; }; /** * Protected method size. * - Description: Returns the number of layers in the network. * - Purpose: Can be used by inner implementation for getting number of layers in the network. * - Prerequisites: None. */ unsigned int size( ){ return mLayers.size( ); }; /** * Protected method GetNumOfOutputs. * - Description: Returns the number of units in the output layer. * - Purpose: Can be used by inner implementation for getting number of units in the output layer. * - Prerequisites: None. */ std::vector<Neuron<T > *>& GetOutputLayer( ){ return mLayers[mLayers.size( )-1]; }; /** * Protected method GetInputLayer. * - Description: Returns the input layer. * - Purpose: Can be used by inner implementation for getting the input layer. * - Prerequisites: None. */ std::vector<Neuron<T > *>& GetInputLayer( ){ return mLayers[0]; }; /** * Protected method GetBiasLayer. * - Description: Returns the vector of Biases. * - Purpose: Can be used by inner implementation for getting vector of Biases. * - Prerequisites: None. */ std::vector<Neuron<T > *>& GetBiasLayer( ) { return mBiasLayer; }; /** * Protected method UpdateWeights. * - Description: Updates the weights of every link between the neurons. * - Purpose: Can be used by inner implementation for updating the weights of links between the neurons. * - Prerequisites: None, but only makes sense, when its called during the training phase. */ void UpdateWeights( ); /** * Protected method ResetCharges. * - Description: Resets the neuron's data received during iteration of net training. * - Purpose: Can be used by inner implementation for reset the neuron's data between iterations. * - Prerequisites: None, but only makes sense, when its called during the training phase. */ void ResetCharges( ); /** * Protected method AddMSE. * - Description: Changes MSE during the training phase. * - Purpose: Can be used by inner implementation for changing MSE during the training phase. * - Prerequisites: * @param inInd - a double amount of MSE to be add. */ void AddMSE( double inPortion ){ mMeanSquaredError += inPortion; }; /** * Protected method GetMSE. * - Description: Getter for MSE value. * - Purpose: Can be used by inner implementation for getting access to the MSE value. * - Prerequisites: None. */ double GetMSE( ){ return mMeanSquaredError; }; /** * Protected method ResetMSE. * - Description: Resets MSE value. * - Purpose: Can be used by inner implementation for resetting MSE value. * - Prerequisites: None. */ void ResetMSE( ) { mMeanSquaredError = 0; }; NeuronFactory<T> * mNeuronFactory; /*!< Member, which is responsible for creating neurons @see SetNeuronFactory */ TrainAlgorithm<T> * mTrainingAlgoritm; /*!< Member, which is responsible for the way the network will trained @see SetAlgorithm */ std::vector<std::vector<Neuron<T > *> > mLayers; /*!< Inner representation of neural networks */ std::vector<Neuron<T > *> mBiasLayer; /*!< Container for biases */ unsigned int mInputs, mOutputs, mHidden; /*!< Number of inputs, outputs and hidden units */ double mMeanSquaredError; /*!< Mean Squared Error which is changing every iteration of the training*/ double mMinMSE; /*!< The biggest Mean Squared Error required for training to stop*/ };
template <typename T> class NeuralNetwork; template <typename T> class TrainAlgorithm { public: virtual ~TrainAlgorithm(){}; virtual double Train(const std::vector<T>& inData, const std::vector<T>& inTarget) = 0; virtual void WeightsInitialization() = 0; protected: }; template <typename T> class Hebb : public TrainAlgorithm<T> { public: Hebb(NeuralNetwork<T> * inNeuralNetwork) : mNeuralNetwork(inNeuralNetwork){}; virtual ~Hebb(){}; virtual double Train(const std::vector<T>& inData, const std::vector<T>& inTarget); virtual void WeightsInitialization(); protected: NeuralNetwork<T> * mNeuralNetwork; }; template <typename T> class Backpropagation : public TrainAlgorithm<T> { public: Backpropagation(NeuralNetwork<T> * inNeuralNetwork); virtual ~Backpropagation(){}; virtual double Train(const std::vector<T>& inData, const std::vector<T>& inTarget); virtual void WeightsInitialization(); protected: void NguyenWidrowWeightsInitialization(); void CommonInitialization(); NeuralNetwork<T> * mNeuralNetwork; };
Source: https://habr.com/ru/post/198268/
All Articles