API Reference‎ > ‎

perceptron_t

class perceptron_t

This is an implementation of a Perceptron Neural Network which learns by example.
You can give it examples of what you want the network to do and the learning algorithm (delta-rule) changes the network's weights. When training is finished, the net will give you the required output for a particular input.



Header


Namespace

  • nu


Constructors

  • perceptron_t() = default;
    Default constructor. Create a not initialized neural network.
    You can initialize the net later by loading the net status from a string stream.

  • perceptron_t( const size_t& n_of_inputs, double learning_rate = 0.1, step_func_t step_f = step_func_t());
    Creates a perceptron neural network
    • n_of_inputs represents the input layer size.
      If (n_of_inputs < 1) this method will throw an exception exception_t::size_mismatch
      learning_rate should be a number within the range (0..1]
    • Learning rate is used to determine how aggressive training should be for the training algorithm.
      momentum can be used to speed up training. 
      A too high momentum will however not benefit training. 
      Setting momentum to 0 will be the same as not using the momentum parameter. 
      The recommended value of this parameter is between 0.0 and 1.0.
    • step_func_t step_f = step_func_t() is an optional step function used by get_sharp_output() method.
  • perceptron_t(const perceptron_t& nn);
    Copy Constructor
  • perceptron_t(perceptron_t&& nn);
    Move Constructor

Assignment operators

  • perceptron_toperator=(const perceptron_t& nn) = default;
    Copy-assignment operator
  • perceptron_toperator=(perceptron_t&& nn)
    Move-assignment operator



String Stream Operators

  • friend std::stringstreamoperator>>(std::stringstream& ss, mlp_neural_net_t& net);
    Load and reinitialize the neural network by using data of the given string stream
    In case of invalid stream format this method will throw an exception exception_t::invalid_sstream_format
  • friend std::stringstreamoperator<<(std::stringstream& ss, mlp_neural_net_t& net) noexcept;
    Save net status into the given string stream

Output Stream Operators 

  • friend std::ostreamoperator<<(std::ostream& os, mlp_neural_net_t& net);
    Print the net status out to the given standard output stream

Public methods

  • void reshuffle_weights() noexcept;
    Reset all net weights by using new random values 
  • void set_inputs(const rvector_tinputs);
    const rvector_tinputs: [in] 
    input vector
    Set new network input vector.
    If inputs.size() != get_inputs_count() this method will throw an exception exception_t::size_mismatch 
  • void get_inputs(rvector_t& inputs) const noexcept;
    Get net inputs
  • double get_output() const noexcept;
    Get net output
  • double get_sharp_output() const noexcept;
    Get f(output) where f is the step function
  • void feed_forward() noexcept;
    Fire all neurons of the net and calculate the outputs 
  • void back_propagate(const double& target) noexcept;
  • void back_propagate(const double& target, double& output) noexcept;
    const doubletarget: [in]  expected output value
    doubleoutput:       [out] net output calculated during feed-forwarding step

    Fire the neuron, calculate the output then apply the BP algorithm to the net
  • double error(const double& target) const noexcept;
    Compute global error

  • virtual std::stringstreamload(std::stringstreamss);
    std::stringstreamss: [in/out] 
    string stream

    Build the net by using data of a given string stream
    In case of invalid stream format this method will throw an exception exception_t::invalid_sstream_format
  • virtual std::stringstreamsave(std::stringstreamssnoexcept;
    std::stringstreamss: [in/out] string stream
    Save net status into a given string stream
  • virtual std::ostreamdump(std::ostreamosnoexcept;
    std::ostreamos: [in/out] 
    output stream

    Print the net state out to a given output stream

Example

/*
 * AND function implemented using a Perceptron neural net
 *
 * AND is a typical example of linearly separable function. This type
 * of function can be learned by a single Perceptron neural net
 *
 * AND takes two input arguments with values in [0,1] 
 * and returns one output in [0,1], as specified in the following table:
 *
 *  x1 x2 |  y   
 * ---+---+----
 *  0 | 0 |  0
 *  0 | 1 |  0
 *  1 | 0 |  0
 *  1 | 1 |  1
 *
 * It computes the logical-AND, which yields 1 if and only if the two 
 * inputs have 1 values.
 *
 */


/* -------------------------------------------------------------------------- */

#include "nu_perceptron.h"
#include <iostream>


/* -------------------------------------------------------------------------- */

int main(int argc, char* argv[])
{
   try {
      nu::step_func_t step_f(
         0.5 /* Lo/Hi-threshold */, 
         0   /* Lo - Output */, 
         1   /* Hi - Output */);

      nu::perceptron_t nn(
         2   /* inputs */, 
         0.2 /* learning rate */, 
         step_f);

      // This is the bipolar-and function used for the training
      auto and_function = [](int a, int b) { return a & b; };


      // ---- TRAINING ---------------------------------------------------------

      nu::perceptron_trainer_t trainer(
         nn, 
         2000,  // Max number of epochs
         0.01     // Min error 
      );

      std::cout
         << "AND training start ( Max epochs count=" << trainer.get_epochs()
         << " Minimum error=" << trainer.get_min_err() << " )"
         << std::endl;
      
      size_t epoch_n = 0;

      for ( auto & training_epoch : trainer )
      {
         bool training_completed = false;

         double err = 0.0;

         for ( int a = 0; a < 2; ++a )
         {
            for ( int b = 0; b < 2; ++b )
            {          
               training_epoch.train(
                  { double(a),double(b) },           // input vector
                  { double(and_function(a, b)) },    // target

                  // cost function
                  [&err](nu::perceptron_t& net, const double & target) 
                  {  
                     err = net.error(target); 
                     return err;
                  }
               );
            }
         }

         if ( epoch_n++ % 100 == 0 )
            std::cout
            << "Epoch #" << epoch_n
            << " Err = " << err 
            << std::endl;

         if ( err < trainer.get_min_err() )
            break;
      }

      // ---- TEST -------------------------------------------------------------
      
      std::cout << " AND Test " << std::endl;

      for ( int a = 0; a < 2; ++a )
      {
         for ( int b = 0; b < 2; ++b )
         {
            double output=0.0;
            nu::vector_t<double> input_vec{ double(a), double(b) };

            nn.set_inputs(input_vec);
            nn.feed_forward();
            output = nn.get_sharp_output();

            // Dump the network status
            std::cout << nn;

            std::cout << "-------------------------------" << std::endl;

            std::cout
               << a << " and " << b << " = " << output << std::endl;

            auto and_res = and_function(a, b);

            // In case you'd play with configuration parameters 
            // and break the code :-)
            if ( int(and_res) != int(output) )
            {
               std::cerr
                  << "ERROR!: and(" << a << "," << b << ") !="
                  << and_res
                  << std::endl;

               return 1;
            }

            std::cout << "-------------------------------" << std::endl;
         }
      }

      std::cout << "Test completed successfully" << std::endl;
   }
   catch ( nu::perceptron_t::exception_t & e )
   {
      std::cerr 
         << "nu::perceptron_t::exception_t n# " << int(e) << std::endl;
      
      std::cerr
         << "Check for configuration parameters and retry" << std::endl;

      return 1;
   }
   catch ( ... )
   {
      std::cerr
         << "Fatal error. Check for configuration parameters and retry" 
         << std::endl;

      return 1;
   }

   return 0;
}

/* -------------------------------------------------------------------------- */

Comments