LTI-Lib latest version v1.9 - last update 10 Apr 2010

ltiLvq.h

00001 /*
00002  * Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006
00003  * Lehrstuhl fuer Technische Informatik, RWTH-Aachen, Germany
00004  *
00005  * This file is part of the LTI-Computer Vision Library (LTI-Lib)
00006  *
00007  * The LTI-Lib is free software; you can redistribute it and/or
00008  * modify it under the terms of the GNU Lesser General Public License (LGPL)
00009  * as published by the Free Software Foundation; either version 2.1 of
00010  * the License, or (at your option) any later version.
00011  *
00012  * The LTI-Lib is distributed in the hope that it will be
00013  * useful, but WITHOUT ANY WARRANTY; without even the implied warranty
00014  * of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
00015  * GNU Lesser General Public License for more details.
00016  *
00017  * You should have received a copy of the GNU Lesser General Public
00018  * License along with the LTI-Lib; see the file LICENSE.  If
00019  * not, write to the Free Software Foundation, Inc., 59 Temple Place -
00020  * Suite 330, Boston, MA 02111-1307, USA.
00021  */
00022 
00023 
00024 /*----------------------------------------------------------------
00025  * project ....: LTI Digital Image/Signal Processing Library
00026  * file .......: ltiRbf.h
00027  * authors ....: Peter Doerfler, Pablo Alvarado
00028  * organization: LTI, RWTH Aachen
00029  * creation ...: 11.8.2000
00030  * revisions ..: $Id: ltiLvq.h,v 1.4 2006/02/07 18:19:51 ltilib Exp $
00031  */
00032 
00033 #ifndef _LTI_LVQ_H_
00034 #define _LTI_LVQ_H_
00035 
00036 #include "ltiVector.h"
00037 #include "ltiMatrix.h"
00038 #include "ltiClassifier.h"
00039 #include "ltiSupervisedInstanceClassifier.h"
00040 #include "ltiStdLayer.h"
00041 
00042 namespace lti {
00043   /**
00044    * LVQ-Net
00045    */
00046   class lvq : public supervisedInstanceClassifier {
00047   public:
00048 
00049     // --------------------------------------------------
00050     // lvq::parameters
00051     // --------------------------------------------------
00052 
00053     /**
00054      * the parameters for the class classifier
00055      */
00056     class parameters : public supervisedInstanceClassifier::parameters {
00057     public:
00058 
00059       /**
00060        * enumeration to specify the network initialization types.
00061        */
00062       enum eLvqInit {
00063         LvqRand,   /*!< random initialization of the vector code */
00064         LvqMaxDist /*!< initialization with the maximum distance */
00065       };
00066 
00067       /**
00068        * default constructor
00069        */
00070       parameters();
00071 
00072       /**
00073        * copy constructor
00074        * @param other the parameters object to be copied
00075        */
00076       parameters(const parameters& other);
00077 
00078       /**
00079        * destructor
00080        */
00081       virtual ~parameters();
00082 
00083       /**
00084        * returns name of this type
00085        */
00086       const char* getTypeName() const;
00087 
00088       /**
00089        * copy the contents of a parameters object
00090        * @param other the parameters object to be copied
00091        * @return a reference to this parameters object
00092        */
00093       parameters& copy(const parameters& other);
00094 
00095       /**
00096        * Alias for copy.
00097        * @param other the parameters object to be copied
00098        * @return a reference to this parameters object
00099        */
00100       inline parameters& operator=(const parameters& other) {
00101         return copy(other);
00102       }
00103 
00104       /**
00105        * returns a pointer to a clone of the parameters
00106        */
00107       virtual classifier::parameters* clone() const;
00108 
00109       // --------------
00110       // the parameters
00111       // --------------
00112 
00113       /**
00114        * learn rate for LVQ1 or OLVQ1
00115        */
00116       double learnRate1;
00117 
00118       /**
00119        * learn rate for LVQ3 or OLVQ3
00120        */
00121       double learnRate2;
00122 
00123       /**
00124        * learn rate factor
00125        * used in LVQ3 and OLVQ3 to change the learn rate
00126        */
00127       double learnRateFactor;
00128 
00129       /**
00130        * window size
00131        */
00132       double windowSize;
00133 
00134       /**
00135        * number of neurons per class
00136        */
00137       int nbNeuronsPerClass;
00138 
00139       /**
00140        * number of presentations for LVQ1 or OLVQ1
00141        */
00142       int nbPresentations1;
00143 
00144       /**
00145        * number of presentations for LVQ3 or OLVQ3
00146        */
00147       int nbPresentations2;
00148 
00149       /**
00150        * the norm type (L1-, L2-norm)
00151        */
00152       eNormType norm;
00153 
00154       /**
00155        * specify the way the codebook vectors should be initialized
00156        */
00157       eLvqInit initType;
00158 
00159       /**
00160        * if true, the OLVQ1 algorithm will be used for the first presentation
00161        * set.  Otherwise the LVQ1 will be used.
00162        */
00163       bool flagOlvq1;
00164 
00165       /**
00166        * if true, the OLVQ3 algorithm will be used for the second presentation
00167        * set.  Otherwise the LVQ3 will be used.
00168        */
00169       bool flagOlvq3;
00170 
00171       /**
00172        * sigma factor
00173        */
00174       double sigmaFactor;
00175 
00176       /**
00177        * generate training statistics
00178        */
00179       bool doStatistics;
00180 
00181       /**
00182        * name of the file where the statistics will be saved
00183        */
00184       std::string statisticsFilename;
00185 
00186       /**
00187        * file name for the network
00188        */
00189       std::string netFilename;
00190 
00191       /**
00192        * has something to do with the statistics... (ask P. Doerfler)
00193        */
00194       bool doTrain2;
00195 
00196       /**
00197        * if true, the "best" network will be saved (see also correctVs3Best)
00198        */
00199       bool saveBest;
00200 
00201       /**
00202        * if saveBest is true, and correctVs3Best is also true, the best
00203        * net with the correct result will be saved.  If correctVs3Best is
00204        * false, the best "three-best" network will be saved.  If saveBest
00205        * is false, this parameter will be ignored.
00206        */
00207       bool correctVs3Best;
00208     };
00209 
00210     // --------------------------------------------------
00211     // lvq::layer
00212     // --------------------------------------------------
00213 
00214     /**
00215      * lvq layer
00216      */
00217     class layer : public stdLayer {
00218     public:
00219       /**
00220        * constructor
00221        */
00222       layer(const std::string& theName = "LVQ Layer");
00223 
00224       /**
00225        * destructor
00226        */
00227       virtual ~layer() {};
00228 
00229       /**
00230        * Find sigmas for weighting excitation of layer with exp()
00231        * @param nbNeurCl number of neurons per class
00232        * @param sigFac   sigma factor
00233        * @param sigma the resulting vector will be left here!
00234        */
00235       void findSigmas(const int& nbNeurCl,
00236                       const double& sigFac,
00237                       dvector& sigma);
00238 
00239     };
00240 
00241     // --------------------------------------------------
00242     // lvq::trainFunctor
00243     // --------------------------------------------------
00244 
00245     /**
00246      * base class of training functors for the lvq networks
00247      */
00248     class trainFunctor : public stdLayer::trainFunctor {
00249     public:
00250       /**
00251        * constructor
00252        * @param theProp the distance propagation functor
00253        */
00254       trainFunctor(const stdLayer::distancePropFunctor& theProp)
00255         : prop(theProp), firstUse(true) {};
00256 
00257       /**
00258        * set the learn rate factor
00259        */
00260       virtual void setLearnFactor(const double& lrFac);
00261 
00262       /**
00263        * set the window size
00264        */
00265       virtual void setWindowSize(const double& winSize);
00266 
00267     protected:
00268       /**
00269        * distance propagation functor
00270        */
00271       stdLayer::distancePropFunctor prop;
00272 
00273       /**
00274        * delta vector
00275        */
00276       dvector delta;
00277 
00278       /**
00279        * delta2 vector
00280        */
00281       dvector delta2;
00282 
00283       /**
00284        * calls vector
00285        */
00286       ivector calls;
00287 
00288       /**
00289        * lerning rate
00290        */
00291       double learnFactor;
00292 
00293       /**
00294        * window size
00295        */
00296       double windowSize;
00297 
00298       /**
00299        * first use of the functor
00300        */
00301       bool firstUse;
00302 
00303       /**
00304        * get the indices of the two minimum values of the vector
00305        * min1 <= min2
00306        */
00307       void twoMinIndex(const dvector& vct,int& min1,int& min2);
00308 
00309     };
00310 
00311     /**
00312      * LVQ1
00313      */
00314     class lvq1TrainFunctor : public trainFunctor {
00315     public:
00316       /**
00317        * constructor
00318        * @param theProp the distance propagation functor
00319        */
00320       lvq1TrainFunctor(stdLayer::distancePropFunctor& theProp)
00321         : trainFunctor(theProp) {};
00322 
00323       /**
00324        * apply operator
00325        */
00326       virtual bool operator()(const dvector& input,
00327                               dmatrix& weights,
00328                               dvector& outLayer,
00329                               const ivector& outID,
00330                               const int& trainID,
00331                               bool& modified);
00332     };
00333 
00334     /**
00335      * OLVQ1
00336      */
00337     class olvq1TrainFunctor : public trainFunctor {
00338     public:
00339       /**
00340        * constructor
00341        */
00342       olvq1TrainFunctor(stdLayer::distancePropFunctor& theProp)
00343         : trainFunctor(theProp) {};
00344 
00345       /**
00346        * set the learn rate
00347        */
00348       void setLearnRate(const double &theLr);
00349 
00350       /**
00351        * apply operator
00352        */
00353       bool operator()(const dvector& input,
00354                       dmatrix& weights,
00355                       dvector& outLayer,
00356                       const ivector& outID,
00357                       const int& trainID,
00358                       bool& modified);
00359 
00360     protected:
00361       /**
00362        * learning rate vector
00363        */
00364       dvector lr;
00365     };
00366 
00367     /**
00368      * LVQ3
00369      */
00370     class lvq3TrainFunctor : public trainFunctor {
00371     public:
00372       /**
00373        * constructor
00374        */
00375       lvq3TrainFunctor(stdLayer::distancePropFunctor& theProp)
00376         : trainFunctor(theProp) {};
00377 
00378       /**
00379        * apply operator
00380        */
00381       bool operator()(const dvector& input,
00382                       dmatrix& weights,
00383                       dvector& outLayer,
00384                       const ivector& outID,
00385                       const int& trainID,
00386                       bool& modified);
00387 
00388 
00389     };
00390 
00391     /**
00392      * OLVQ3
00393      */
00394     class olvq3TrainFunctor : public trainFunctor {
00395     public:
00396       /**
00397        * constructor
00398        */
00399       olvq3TrainFunctor(stdLayer::distancePropFunctor& theProp)
00400         : trainFunctor(theProp) {};
00401 
00402       /**
00403        * set the learn rate
00404        */
00405       void setLearnRate(const double &lRate);
00406 
00407       /**
00408        * apply operator
00409        */
00410       bool operator()(const dvector& input,
00411                       dmatrix& weights,
00412                       dvector& outLayer,
00413                       const ivector& outID,
00414                       const int& trainID,
00415                       bool& modified);
00416     protected:
00417       /**
00418        * learning rate vector
00419        */
00420       dvector lr;
00421 
00422     };
00423 
00424     /**
00425      * LVQ4
00426      */
00427     class lvq4TrainFunctor : public trainFunctor {
00428     public:
00429       /**
00430        * constructor
00431        */
00432       lvq4TrainFunctor(stdLayer::distancePropFunctor& theProp)
00433         : trainFunctor(theProp) {};
00434 
00435       /**
00436        * apply operator
00437        */
00438       bool operator()(const dvector& input,
00439                       dmatrix& weights,
00440                       dvector& outLayer,
00441                       const ivector& outID,
00442                       const int& trainID,
00443                       bool& modified);
00444     };
00445 
00446     // --------------------------------------------------
00447     // lvq::initFunctor
00448     // --------------------------------------------------
00449 
00450     /**
00451      * random initialization
00452      */
00453     class randInitFunctor : public stdLayer::initFunctor {
00454     public:
00455       /**
00456        * LVQ ANNs are best initialized with the training data
00457        * therefore the somewhat bulky constructor
00458        */
00459       randInitFunctor(const dmatrix& theFeatures,
00460                       const ivector& theTrainIDs,
00461                       const ivector& theNbViewsObj,
00462                       const int& theNbObj,
00463                       const int& theNbNeurObj);
00464 
00465       bool operator()(dmatrix& weights, ivector& outID);
00466 
00467     protected:
00468       /**
00469        * reference matrix to the features
00470        */
00471       const dmatrix& features;
00472 
00473       /**
00474        * reference vector to the train IDs
00475        */
00476       const ivector& trainIDs;
00477 
00478       /**
00479        * number of views per object
00480        */
00481       const ivector& nbViewsObj;
00482 
00483       /**
00484        * number of objects
00485        */
00486       const int& nbObj;
00487 
00488       /**
00489        * number of neuron per object
00490        */
00491       const int& nbNeurObj;
00492     };
00493 
00494     /**
00495      * maximum distance initialization
00496      */
00497     class maxDistInitFunctor : public stdLayer::initFunctor {
00498     public:
00499       /**
00500        * LVQ ANNs are best initialized with the training data
00501        * therefore the somewhat bulky constructor
00502        */
00503       maxDistInitFunctor(const dmatrix& theFeatures,
00504                          const ivector& theTrainIDs,
00505                          const ivector& theNbViewsObj,
00506                          const int& theNbObj,
00507                          const int& theNbNeurObj,
00508                          const eNormType& theNorm);
00509 
00510       bool operator()(dmatrix& weights, ivector& outID);
00511 
00512     protected:
00513       /**
00514        * reference to the matrix with the feature vectors
00515        */
00516       const dmatrix& features;
00517 
00518       /**
00519        * vector with the train IDs
00520        */
00521       const ivector& trainIDs;
00522 
00523       /**
00524        * vector with the number of views per objectd
00525        */
00526       const ivector& nbViewsObj;
00527 
00528       /**
00529        * number of objects
00530        */
00531       const int& nbObj;
00532 
00533       /**
00534        * number of neurons per object
00535        */
00536       const int& nbNeurObj;
00537 
00538       /**
00539        * norm
00540        */
00541       const eNormType& norm;
00542     };
00543 
00544     // --------------------------------------------------
00545     // lvq
00546     // --------------------------------------------------
00547 
00548     /**
00549      * Constructor
00550      */
00551     lvq();
00552 
00553     /**
00554      * Copy constructor
00555      */
00556     lvq(const lvq& other);
00557 
00558 
00559     /**
00560      * Destructor
00561      */
00562     ~lvq();
00563 
00564 
00565     /**
00566      * Supervised training.  (LVQ)
00567      * The vectors in the <code>input</code> sequence
00568      * must be trained using as "known" classes the values given in
00569      * <code>ids</code>.
00570      * @param input the sequence of input vectors
00571      * @param ids the output classes ids for the input vectors.
00572      */
00573     bool train(const dmatrix& input,
00574                const ivector& ids);
00575 
00576     /**
00577      * Propagation.
00578      * Propagates feature vector through the net
00579      */
00580     bool classify(const dvector& featurem, outputVector& result) const;
00581 
00582     /**
00583      * dimension of the output layer
00584      */
00585     inline int getDimOutputLayer() const;
00586 
00587     /**
00588      * Gets the test-data for training statistics
00589      */
00590     void setTestSet(const dmatrix& input,
00591                     const ivector& ids);
00592 
00593     /**
00594      * Gets the disjunct training data for object probabilities
00595      */
00596     void setTrainSet2(const dmatrix& input,
00597                       const ivector& ids);
00598 
00599     /**
00600      * Calculate object probabilities with given features
00601      */
00602     void calcObjProbs(const ivector& ids,
00603                       const dmatrix& feats,
00604                       stdLayer::distancePropFunctor& distProp);
00605 
00606     /**
00607      * copy data of "other" functor.
00608      * @param other the functor to be copied
00609      * @return a reference to this functor object
00610      */
00611     lvq& copy(const lvq& other);
00612 
00613     /**
00614      * Alias for copy.
00615      * @param other the functor to be copied
00616      * @return a reference to this functor object
00617      */
00618     inline lvq& operator=(const lvq& other) {
00619       return copy(other);
00620     }
00621 
00622     /**
00623      * returns a pointer to a clone of this functor.
00624      */
00625     virtual classifier* clone() const;
00626 
00627     /**
00628      * returns used parameters
00629      */
00630     const parameters& getParameters() const;
00631 
00632 
00633 #ifdef _INCLUDE_DEPRECATED
00634     /**
00635      * Load network. Gets network information from a '.lvq' file
00636      */
00637 //      bool load(std::istream& instream);
00638 
00639     /**
00640      * Save network. Saves relevant information in a '.lvq' file
00641      */
00642 //      bool save(std::ostream& outstream);
00643 #endif
00644 
00645   protected:
00646 
00647     /**
00648      * returns used parameters
00649      */
00650 //  parameters& getParameters();
00651 
00652   private:
00653     /**
00654      * lvq layer functor
00655      */
00656     layer lvqLayer;
00657 
00658     /**
00659      * pointer to the used distance propagation functor
00660      */
00661     stdLayer::distancePropFunctor* distProp;
00662 
00663     /**
00664      * linear activation functor
00665      */
00666     layer::linearActFunctor linearAct;
00667 
00668     /**
00669      * pointer to the used gaussian activation functor instance
00670      */
00671     stdLayer::gaussActFunctor* gaussAct;
00672 
00673     /**
00674      * classification statistics functor for training data
00675      */
00676 //      classifyStatFunctor trainStat;
00677 
00678     /**
00679      * classification statistics functor for test data
00680      */
00681 //      classifyStatFunctor testStat;
00682 
00683     /**
00684      * input size
00685      */
00686     int sizeIn;
00687 
00688     /**
00689      * number of objects
00690      */
00691     int nbObj;
00692 
00693     /**
00694      * the code-book vectors
00695      */
00696     dmatrix features;
00697 
00698     /**
00699      * sigma vector
00700      */
00701     dvector sigma;
00702 
00703     /**
00704      * internal id to real id
00705      */
00706     ivector internToReal;
00707 
00708     /**
00709      * real to intern id
00710      */
00711     std::map<int,int> realToIntern;
00712 
00713 
00714     /**
00715      * vector with IDs
00716      */
00717     ivector trainID;
00718 
00719     /**
00720      * second set of training feature vectors
00721      */
00722     dmatrix train2Features;
00723 
00724     /**
00725      * IDs for the second training set
00726      */
00727     ivector train2IDs;
00728 
00729 
00730     /**
00731      * set of test feature vectors
00732      */
00733     dmatrix testFeatures;
00734 
00735     /**
00736      * IDs for the test feature set
00737      */
00738     ivector testIDs;
00739 
00740     /**
00741      * generate statistics for the given feature set
00742      * @param stat statistics functor
00743      * @param ids IDs for the feature vector set
00744      * @param feats feature vector set.  Each row of the matrix is a feature
00745      *              vector
00746      * @param save if true the statistics will be saved.
00747      */
00748 //      void statistics(classifyStatFunctor& stat,
00749 //                      const ivector& ids,
00750 //                      const dmatrix& feats,
00751 //                      const bool save);
00752   };
00753 
00754   /**
00755    * return the dimensions of the output layer
00756    */
00757   int lvq::getDimOutputLayer() const {
00758     return nbObj*getParameters().nbNeuronsPerClass;
00759   }
00760 }
00761 
00762 #endif //_LTI_LVQ_H_

Generated on Sat Apr 10 15:25:48 2010 for LTI-Lib by Doxygen 1.6.1