diff --git a/tmva/tmva/inc/TMVA/Envelope.h b/tmva/tmva/inc/TMVA/Envelope.h
index 315a1ba9367d9979143570d6a8a83edf87200a68..ae6f663316e19eeeba3a5d7027a5c85c93d6ee8a 100644
--- a/tmva/tmva/inc/TMVA/Envelope.h
+++ b/tmva/tmva/inc/TMVA/Envelope.h
@@ -44,23 +44,17 @@
 #endif
 
 
-namespace TMVA {    
-    
-       /**
-        *      \class Envelope
-        *         Base class for all machine learning algorithms
-        *      \ingroup TMVA
-        */
-      
+namespace TMVA {
+
       class Envelope:public Configurable
       {
       protected:
           OptionMap                    fMethod;           //Booked method information
-          std::shared_ptr<DataLoader>  fDataLoader;       //data 
+          std::shared_ptr<DataLoader>  fDataLoader;       //data
           std::shared_ptr<TFile>       fFile;             //!file to save the results
           Bool_t                       fModelPersistence; //flag to save the trained model
           Bool_t                       fVerbose;          //flag for extra information
-          
+
           /**
            Constructor for the initialization of Envelopes,
            differents Envelopes may needs differents constructors then
@@ -72,13 +66,13 @@ namespace TMVA {
           */
 
           Envelope(const TString &name,DataLoader *dataloader=nullptr,TFile *file=nullptr,const TString options="");
-          
+
       public:
           /**
            Default destructor
            */
           ~Envelope();
-          
+
           /**
             Method to book the machine learning method to perform the algorithm.
             \param methodname String with the name of the mva method
@@ -110,13 +104,13 @@ namespace TMVA {
             \param file pointer to TFile object.
           */
           void   SetFile(TFile *file);
-          
+
           /**
             Method to get the pointer to TMVA::DataLoader object.
             \return  pointer to TMVA::DataLoader object.
           */
           DataLoader *GetDataLoader();
-          
+
           /**
             Method to set the pointer to TMVA::DataLoader object.
             \param file pointer to TFile object.
@@ -128,56 +122,56 @@ namespace TMVA {
             \return Boolean with the status.
           */
           Bool_t IsModelPersistence();
-          
+
           /**
             Method enable model persistence, then algorithms model is saved in xml or serialized files.
             \param status Boolean with the status.
           */
           void SetModelPersistence(Bool_t status=kTRUE);
-          
+
           /**
             Method to see if the algorithm should print extra information.
             \return Boolean with the status.
           */
           Bool_t IsVerbose();
-          
+
           /**
             Method enable print extra information in the algorithms.
             \param status Boolean with the status.
           */
           void SetVerbose(Bool_t status);
-          
+
           /**
-            Virtual method to be implmented with your algorithm.
+            Virtual method to be implemented with your algorithm.
           */
           virtual void Evaluate() = 0;
-          
+
       protected:
           /**
             Method get the Booked method in a option map object.
             \return TMVA::OptionMap with the information of the Booked method
           */
           OptionMap &GetMethod();
-          
+
           /**
             Utility method to get TMVA::DataInputHandler reference from the DataLoader.
             \return TMVA::DataInputHandler reference.
           */
           DataInputHandler&        GetDataLoaderDataInput() { return *fDataLoader->fDataInputHandler; }
-          
+
           /**
             Utility method to get TMVA::DataSetInfo reference from the DataLoader.
             \return TMVA::DataSetInfo reference.
           */
           DataSetInfo&             GetDataLoaderDataSetInfo(){return fDataLoader->DefaultDataSetInfo();}
-          
+
           /**
             Utility method to get TMVA::DataSetManager pointer from the DataLoader.
             \return TMVA::DataSetManager pointer.
           */
           DataSetManager*          GetDataLoaderDataSetManager(){return fDataLoader->fDataSetManager;}
           ClassDef(Envelope,0);
-          
+
       };
 }
 
diff --git a/tmva/tmva/inc/TMVA/ExpectedErrorPruneTool.h b/tmva/tmva/inc/TMVA/ExpectedErrorPruneTool.h
index 12fce3c48afee7d16553cd62d92288c8e93e20e4..754182f912c9426c1051aea1121a2b8a40b6c2b2 100644
--- a/tmva/tmva/inc/TMVA/ExpectedErrorPruneTool.h
+++ b/tmva/tmva/inc/TMVA/ExpectedErrorPruneTool.h
@@ -33,7 +33,7 @@
 // Uses an upper limit on the error made by the classification done by each node. If the S/S+B of the node //
 // is f, then according to the training sample, the error rate (fraction of misclassified events by this   //
 // node) is (1-f). Now f has a statistical error according to the binomial distribution hence the error on //
-// f can be estimated (same error as the binomial error for efficency calculations                         //
+// f can be estimated (same error as the binomial error for efficiency calculations                         //
 // ( sigma = sqrt(eff(1-eff)/nEvts ) )                                                                     //
 //                                                                                                         //
 // This tool prunes branches from a tree if the expected error of a node is less than that of the sum  of  //
diff --git a/tmva/tmva/src/Envelope.cxx b/tmva/tmva/src/Envelope.cxx
index 6557e294afadf26fa0aff4aad02685130367e7d6..a764c16d23755c8f4f29fe95b3a7aae94e3f8503 100644
--- a/tmva/tmva/src/Envelope.cxx
+++ b/tmva/tmva/src/Envelope.cxx
@@ -1,6 +1,13 @@
 // @(#)root/tmva $Id$
 // Author: Omar Zapata
 
+/*! \class TMVA::Envelope
+\ingroup TMVA
+
+Base class for all machine learning algorithms
+
+*/
+
 #include "TMVA/Envelope.h"
 
 #include "TMVA/Configurable.h"
@@ -20,55 +27,69 @@
 
 using namespace TMVA;
 
-//______________________________________________________________________________
+////////////////////////////////////////////////////////////////////////////////
+
 Envelope::Envelope(const TString &name,DataLoader *dalaloader,TFile *file,const TString options):Configurable(options),fDataLoader(dalaloader),fFile(file),fVerbose(kFALSE)
 {
     SetName(name.Data());
 }
 
-//______________________________________________________________________________
+////////////////////////////////////////////////////////////////////////////////
+
 Envelope::~Envelope()
 {}
 
-//______________________________________________________________________________
+////////////////////////////////////////////////////////////////////////////////
+
 Bool_t  Envelope::IsSilentFile(){return fFile==nullptr;}
 
-//______________________________________________________________________________
+////////////////////////////////////////////////////////////////////////////////
+
 TFile* Envelope::GetFile(){return fFile.get();}
 // TFile* Envelope::GetFile(){return fFile==nullptr?0:fFile.get();}
 
-//______________________________________________________________________________
+////////////////////////////////////////////////////////////////////////////////
+
 void   Envelope::SetFile(TFile *file){fFile=std::shared_ptr<TFile>(file);}
 
-//______________________________________________________________________________
+////////////////////////////////////////////////////////////////////////////////
+
 Bool_t Envelope::IsVerbose(){return fVerbose;}
 
-//______________________________________________________________________________
+////////////////////////////////////////////////////////////////////////////////
+
 void Envelope::SetVerbose(Bool_t status){fVerbose=status;}
 
-//______________________________________________________________________________
+////////////////////////////////////////////////////////////////////////////////
+
 OptionMap &Envelope::GetMethod(){     return fMethod;}
 
-//______________________________________________________________________________
+////////////////////////////////////////////////////////////////////////////////
+
 DataLoader *Envelope::GetDataLoader(){    return fDataLoader.get();}
 
-//______________________________________________________________________________
+////////////////////////////////////////////////////////////////////////////////
+
 void Envelope::SetDataLoader(DataLoader *dalaloader){
         fDataLoader=std::shared_ptr<DataLoader>(dalaloader) ;
 }
 
-//______________________________________________________________________________
+////////////////////////////////////////////////////////////////////////////////
+
 Bool_t TMVA::Envelope::IsModelPersistence(){return fModelPersistence; }
 
-//______________________________________________________________________________
+////////////////////////////////////////////////////////////////////////////////
+
 void TMVA::Envelope::SetModelPersistence(Bool_t status){fModelPersistence=status;}
 
-//______________________________________________________________________________
+////////////////////////////////////////////////////////////////////////////////
+
 void TMVA::Envelope::BookMethod(Types::EMVA method, TString methodTitle, TString options){
     return BookMethod(Types::Instance().GetMethodName( method ),methodTitle,options);
 }
 
-//______________________________________________________________________________
+////////////////////////////////////////////////////////////////////////////////
+
 void TMVA::Envelope::BookMethod(TString methodName, TString methodTitle, TString options){
     fMethod["MethodName"]    = methodName;
     fMethod["MethodTitle"]   = methodTitle;
diff --git a/tmva/tmva/src/ExpectedErrorPruneTool.cxx b/tmva/tmva/src/ExpectedErrorPruneTool.cxx
index 87709a4a9277dff0aaab77a731975dfe1893eb37..a4a064c42d9aec108d7e1994391f83361f8ba406 100644
--- a/tmva/tmva/src/ExpectedErrorPruneTool.cxx
+++ b/tmva/tmva/src/ExpectedErrorPruneTool.cxx
@@ -24,6 +24,24 @@
  *                                                                                *
  **********************************************************************************/
 
+/*! \class TMVA::ExpectedErrorPruneTool
+\ingroup TMVA
+
+A helper class to prune a decision tree using the expected error (C4.5) method
+
+Uses an upper limit on the error made by the classification done by each node.
+If the \f$ \frac{S}{S+B} \f$ of the node is \f$ f \f$, then according to the
+training sample, the error rate (fraction of misclassified events by this
+node) is \f$ (1-f) \f$. Now \f$ f \f$ has a statistical error according to the
+binomial distribution hence the error on \f$ f \f$ can be estimated (same error
+as the binomial error for efficiency calculations
+\f$ (\sigma = \sqrt{\frac{(eff(1-eff)}{nEvts}}) \f$
+
+This tool prunes branches from a tree if the expected error of a node is less
+than that of the sum of the error in its descendants.
+
+*/
+
 #include "TMVA/ExpectedErrorPruneTool.h"
 #include "TMVA/DecisionTree.h"
 #include "TMVA/IPruneTool.h"
@@ -62,7 +80,7 @@ TMVA::ExpectedErrorPruneTool::CalculatePruningInfo( DecisionTree* dt,
    if( isAutomatic ) {
       //SetAutomatic( );
       isAutomatic = kFALSE;
-      Log() << kWARNING << "Sorry autmoatic pruning strength determination is not implemented yet" << Endl;
+      Log() << kWARNING << "Sorry automatic pruning strength determination is not implemented yet" << Endl;
    }
    if( dt == NULL || (IsAutomatic() && validationSample == NULL) ) {
       // must have a valid decision tree to prune, and if the prune strength
@@ -72,8 +90,8 @@ TMVA::ExpectedErrorPruneTool::CalculatePruningInfo( DecisionTree* dt,
    }
    fNodePurityLimit = dt->GetNodePurityLimit();
 
-   if(IsAutomatic()) { 
-      Log() << kFATAL << "Sorry autmoatic pruning strength determination is not implemented yet" << Endl;
+   if(IsAutomatic()) {
+      Log() << kFATAL << "Sorry automatic pruning strength determination is not implemented yet" << Endl;
       /*
         dt->ApplyValidationSample(validationSample);
         Double_t weights = dt->GetSumWeights(validationSample);
@@ -87,8 +105,8 @@ TMVA::ExpectedErrorPruneTool::CalculatePruningInfo( DecisionTree* dt,
         Int_t errCount = 0,
         lastNodeCount = nnodes;
 
-        // find the maxiumum prune strength that still leaves the root's daughter nodes
-      
+        // find the maximum prune strength that still leaves the root's daughter nodes
+
         while ( nnodes > 1 && !forceStop ) {
         fPruneStrength += fDeltaPruneStrength;
         Log() << "----------------------------------------------------" << Endl;
@@ -151,7 +169,7 @@ TMVA::ExpectedErrorPruneTool::CalculatePruningInfo( DecisionTree* dt,
 ////////////////////////////////////////////////////////////////////////////////
 /// recursive pruning of nodes using the Expected Error Pruning (EEP)
 
-void TMVA::ExpectedErrorPruneTool::FindListOfNodes( DecisionTreeNode* node ) 
+void TMVA::ExpectedErrorPruneTool::FindListOfNodes( DecisionTreeNode* node )
 {
    TMVA::DecisionTreeNode *l = (TMVA::DecisionTreeNode*)node->GetLeft();
    TMVA::DecisionTreeNode *r = (TMVA::DecisionTreeNode*)node->GetRight();
@@ -169,7 +187,7 @@ void TMVA::ExpectedErrorPruneTool::FindListOfNodes( DecisionTreeNode* node )
 /// calculate the expected statistical error on the subtree below "node"
 /// which is used in the expected error pruning
 
-Double_t TMVA::ExpectedErrorPruneTool::GetSubTreeError( DecisionTreeNode* node ) const 
+Double_t TMVA::ExpectedErrorPruneTool::GetSubTreeError( DecisionTreeNode* node ) const
 {
    DecisionTreeNode *l = (DecisionTreeNode*)node->GetLeft();
    DecisionTreeNode *r = (DecisionTreeNode*)node->GetRight();
@@ -192,9 +210,10 @@ Double_t TMVA::ExpectedErrorPruneTool::GetSubTreeError( DecisionTreeNode* node )
 /// this node) is (1-f)
 /// Now f has a statistical error according to the binomial distribution
 /// hence the error on f can be estimated (same error as the binomial error
-/// for efficency calculations ( sigma = sqrt(eff(1-eff)/nEvts ) )
+/// for efficiency calculations
+/// \f$ (\sigma = \sqrt{\frac{(eff(1-eff)}{nEvts}}) \f$
 
-Double_t TMVA::ExpectedErrorPruneTool::GetNodeError( DecisionTreeNode *node ) const 
+Double_t TMVA::ExpectedErrorPruneTool::GetNodeError( DecisionTreeNode *node ) const
 {
    Double_t errorRate = 0;
 
@@ -224,13 +243,13 @@ Double_t TMVA::ExpectedErrorPruneTool::GetNodeError( DecisionTreeNode *node ) co
    //          leaf node ( N_leaf / N_parent)
    // step 3:
 
-   // Minimum Error Pruning (MEP) accordig to Niblett/Bratko
+   // Minimum Error Pruning (MEP) according to Niblett/Bratko
    //# of correctly classified events by this node:
    //Double_t n=f*nEvts ;
    //Double_t p_apriori = 0.5, m=100;
    //errorRate = (nEvts  - n + (1-p_apriori) * m ) / (nEvts  + m);
 
-   // Pessimistic error Pruing (proposed by Quinlan (error estimat with continuity approximation)
+   // Pessimistic error Pruning (proposed by Quinlan (error estimat with continuity approximation)
    //# of correctly classified events by this node:
    //Double_t n=f*nEvts ;
    //errorRate = (nEvts  - n + 0.5) / nEvts ;
@@ -246,4 +265,3 @@ Double_t TMVA::ExpectedErrorPruneTool::GetNodeError( DecisionTreeNode *node ) co
    return errorRate;
 }
 
-
diff --git a/tmva/tmva/src/FitterBase.cxx b/tmva/tmva/src/FitterBase.cxx
index 55e57429ae3033b15d2a1df1d63d0a6e49303562..7c87288de15210a84803629d5f7c6d36baf9f40a 100644
--- a/tmva/tmva/src/FitterBase.cxx
+++ b/tmva/tmva/src/FitterBase.cxx
@@ -1,4 +1,4 @@
-// @(#)root/tmva $Id$ 
+// @(#)root/tmva $Id$
 // Author: Andreas Hoecker, Peter Speckmayer, Joerg Stelzer, Helge Voss
 
 /**********************************************************************************
@@ -17,22 +17,20 @@
  *      Helge Voss       <Helge.Voss@cern.ch>     - MPI-K Heidelberg, Germany     *
  *                                                                                *
  * Copyright (c) 2005:                                                            *
- *      CERN, Switzerland                                                         * 
- *      MPI-K Heidelberg, Germany                                                 * 
+ *      CERN, Switzerland                                                         *
+ *      MPI-K Heidelberg, Germany                                                 *
  *                                                                                *
  * Redistribution and use in source and binary forms, with or without             *
  * modification, are permitted according to the terms listed in LICENSE           *
  * (http://tmva.sourceforge.net/LICENSE)                                          *
  **********************************************************************************/
 
-////////////////////////////////////////////////////////////////////////////////
+/*! \class TMVA::FitterBase
+\ingroup TMVA
 
-/*
-  FitterBase
+Base class for TMVA fitters. Also defines generalised fitting interface
 
-  Baseclass for TMVA fitters. Also defines generalised fitting interface
 */
-//_______________________________________________________________________
 
 #include "TMVA/FitterBase.h"
 #ifndef ROOT_TMVA_Interval
@@ -57,12 +55,12 @@ ClassImp(TMVA::FitterBase)
 #endif
 
 ////////////////////////////////////////////////////////////////////////////////
-/// constructor   
+/// constructor
 
-TMVA::FitterBase::FitterBase( IFitterTarget& target, 
-                              const TString& name, 
-                              const std::vector<Interval*> ranges, 
-                              const TString& theOption ) 
+TMVA::FitterBase::FitterBase( IFitterTarget& target,
+                              const TString& name,
+                              const std::vector<Interval*> ranges,
+                              const TString& theOption )
 : Configurable( theOption ),
    fFitterTarget( target ),
    fRanges( ranges ),
@@ -75,7 +73,7 @@ TMVA::FitterBase::FitterBase( IFitterTarget& target,
 }
 
 ////////////////////////////////////////////////////////////////////////////////
-/// estimator function interface for fitting 
+/// estimator function interface for fitting
 
 Double_t TMVA::FitterBase::Run()
 {
@@ -83,13 +81,13 @@ Double_t TMVA::FitterBase::Run()
    for (std::vector<Interval*>::const_iterator parIt = fRanges.begin(); parIt != fRanges.end(); parIt++) {
       pars.push_back( (*parIt)->GetMean() );
    }
-                                                                   
+
    //   delete fLogger;
    return this->Run( pars );
 }
 
 ////////////////////////////////////////////////////////////////////////////////
-/// estimator function interface for fitting 
+/// estimator function interface for fitting
 
 Double_t TMVA::FitterBase::EstimatorFunction( std::vector<Double_t>& parameters )
 {
diff --git a/tmva/tmva/src/GeneticAlgorithm.cxx b/tmva/tmva/src/GeneticAlgorithm.cxx
index 0083f5d9f634068890ea993b7a2b5f54cc66a115..09c4745f591ccd2cc90b2c6fb5b142bc72139c19 100644
--- a/tmva/tmva/src/GeneticAlgorithm.cxx
+++ b/tmva/tmva/src/GeneticAlgorithm.cxx
@@ -1,4 +1,4 @@
-// @(#)root/tmva $Id$    
+// @(#)root/tmva $Id$
 // Author: Peter Speckmayer
 
 /**********************************************************************************
@@ -22,10 +22,12 @@
  * (http://tmva.sourceforge.net/LICENSE)                                          *
  **********************************************************************************/
 
-//_______________________________________________________________________
-//                                                                      
-// Base definition for genetic algorithm                                
-//_______________________________________________________________________
+/*! \class TMVA::GeneticAlgorithm
+\ingroup TMVA
+
+Base definition for genetic algorithm
+
+*/
 
 #include <iostream>
 #include <algorithm>
@@ -50,19 +52,22 @@ namespace TMVA {
 }
 
 ClassImp(TMVA::GeneticAlgorithm)
-   
+
 ////////////////////////////////////////////////////////////////////////////////
 /// Constructor
-/// Parameters: 
-///     int populationSize : defines the number of "Individuals" which are created and tested 
+///
+/// Parameters:
+///
+///  - int populationSize : defines the number of "Individuals" which are created and tested
 ///                          within one Generation (Iteration of the Evolution)
-///     std::vector<TMVA::Interval*> ranges : Interval holds the information of an interval, where the GetMin 
+///  - std::vector<TMVA::Interval*> ranges : Interval holds the information of an interval, where the GetMin
 ///                          gets the low and GetMax gets the high constraint of the variable
 ///                          the size of "ranges" is the number of coefficients which are optimised
-/// Purpose: 
+/// Purpose:
+///
 ///     Creates a random population with individuals of the size ranges.size()
 
-TMVA::GeneticAlgorithm::GeneticAlgorithm( IFitterTarget& target, Int_t populationSize, 
+TMVA::GeneticAlgorithm::GeneticAlgorithm( IFitterTarget& target, Int_t populationSize,
                                           const std::vector<Interval*>& ranges, UInt_t seed )
 : fConvCounter(-1),
    fFitterTarget( target ),
@@ -81,7 +86,7 @@ TMVA::GeneticAlgorithm::GeneticAlgorithm( IFitterTarget& target, Int_t populatio
    fPopulation.SetRandomSeed( seed );
 }
 
-TMVA::GeneticAlgorithm::~GeneticAlgorithm() 
+TMVA::GeneticAlgorithm::~GeneticAlgorithm()
 {
    // destructor; deletes fLogger
    delete fLogger;
@@ -89,10 +94,10 @@ TMVA::GeneticAlgorithm::~GeneticAlgorithm()
 
 
 ////////////////////////////////////////////////////////////////////////////////
-/// calls evolution, but if it is not the first time. 
+/// calls evolution, but if it is not the first time.
 /// If it's the first time, the random population created by the
-/// constructor is still not evaluated, .. therefore we wait for the 
-/// second time init is called. 
+/// constructor is still not evaluated, .. therefore we wait for the
+/// second time init is called.
 
 void TMVA::GeneticAlgorithm::Init()
 {
@@ -103,16 +108,16 @@ void TMVA::GeneticAlgorithm::Init()
 }
 
 ////////////////////////////////////////////////////////////////////////////////
-/// if the "fitnessFunction" is called multiple times for one set of 
-/// factors (because i.e. each event of a TTree has to be assessed with 
-/// each set of Factors proposed by the Genetic Algorithm) the value 
+/// if the "fitnessFunction" is called multiple times for one set of
+/// factors (because i.e. each event of a TTree has to be assessed with
+/// each set of Factors proposed by the Genetic Algorithm) the value
 /// of the current calculation has to be added(? or else) to the value
-/// obtained up to now. 
-/// example: some chi-square is calculated for every event, 
+/// obtained up to now.
+/// example: some chi-square is calculated for every event,
 /// after every event the new chi-square (newValue) has to be simply
-/// added to the oldValue. 
+/// added to the oldValue.
 ///
-/// this function has to be overridden eventually 
+/// this function has to be overridden eventually
 /// it might contain only the following return statement.
 ///        return oldValue + newValue;
 
@@ -123,10 +128,10 @@ Double_t TMVA::GeneticAlgorithm::NewFitness( Double_t /*oldValue*/, Double_t new
 
 ////////////////////////////////////////////////////////////////////////////////
 /// starts the evaluation of the fitness of all different individuals of
-/// the population. 
+/// the population.
 ///
 /// this function calls implicitly (many times) the "fitnessFunction" which
-/// has been overridden by the user. 
+/// has been overridden by the user.
 
 Double_t TMVA::GeneticAlgorithm::CalculateFitness()
 {
@@ -145,46 +150,47 @@ Double_t TMVA::GeneticAlgorithm::CalculateFitness()
       for ( int index = 0; index < fPopulation.GetPopulationSize(); ++index )
          {
             GeneticGenes* genes = fPopulation.GetGenes(index);
-            Double_t fitness = NewFitness( genes->GetFitness(), 
+            Double_t fitness = NewFitness( genes->GetFitness(),
                                            fFitterTarget.EstimatorFunction(genes->GetFactors()) );
             genes->SetFitness( fitness );
-         
+
             if ( bests[thread_number] > fitness )
                bests[thread_number] = fitness;
          }
    }
-   
+
    fBestFitness = *std::min_element(bests, bests+nt);
 
-#else 
+#else
 
    for ( int index = 0; index < fPopulation.GetPopulationSize(); ++index ) {
       GeneticGenes* genes = fPopulation.GetGenes(index);
       Double_t fitness = NewFitness( genes->GetFitness(),
                                      fFitterTarget.EstimatorFunction(genes->GetFactors()) );
       genes->SetFitness( fitness );
-      
+
       if ( fBestFitness  > fitness )
          fBestFitness = fitness;
-      
+
    }
 
 #endif
 
    fPopulation.Sort();
 
-   return fBestFitness; 
+   return fBestFitness;
 }
 
 ////////////////////////////////////////////////////////////////////////////////
-/// this function is called from "init" and controls the evolution of the 
-/// individuals. 
-/// the function can be overridden to change the parameters for mutation rate
+/// this function is called from "init" and controls the evolution of the
+/// individuals.
+///
+/// The function can be overridden to change the parameters for mutation rate
 /// sexual reproduction and so on.
 
 void TMVA::GeneticAlgorithm::Evolution()
 {
-   if ( fMakeCopies ) 
+   if ( fMakeCopies )
       fPopulation.MakeCopies( 5 );
    fPopulation.MakeChildren();
 
@@ -194,27 +200,28 @@ void TMVA::GeneticAlgorithm::Evolution()
 
 ////////////////////////////////////////////////////////////////////////////////
 /// this function provides the ability to change the stepSize of a mutation according to
-/// the success of the last generations. 
-/// 
+/// the success of the last generations.
+///
 /// Parameters:
-///      int ofSteps :  = if OF the number of STEPS given in this variable (ofSteps)
-///      int successSteps : >sucessSteps Generations could improve the result
-///      double factor : than multiply the stepSize ( spread ) by this factor
+///
+///  - int ofSteps :  = if OF the number of STEPS given in this variable (ofSteps)
+///  - int successSteps : >sucessSteps Generations could improve the result
+///  - double factor : than multiply the stepSize ( spread ) by this factor
+///
 /// (if ofSteps == successSteps nothing is changed, if ofSteps < successSteps, the spread
-/// is divided by the factor) 
+/// is divided by the factor)
 ///
-/// using this function one can increase the stepSize of the mutation when we have 
+/// using this function one can increase the stepSize of the mutation when we have
 /// good success (to pass fast through the easy phase-space) and reduce the stepSize
-/// if we are in a difficult "territory" of the phase-space. 
-///
+/// if we are in a difficult "territory" of the phase-space.
 
 Double_t TMVA::GeneticAlgorithm::SpreadControl( Int_t ofSteps, Int_t successSteps, Double_t factor )
 {
    // < is valid for "less" comparison
-   if ( fBestFitness < fLastResult || fSuccessList.size() <=0 ) { 
+   if ( fBestFitness < fLastResult || fSuccessList.size() <=0 ) {
       fLastResult = fBestFitness;
       fSuccessList.push_front( 1 ); // it got better
-   } 
+   }
    else {
       fSuccessList.push_front( 0 ); // it stayed the same
    }
@@ -247,10 +254,9 @@ Double_t TMVA::GeneticAlgorithm::SpreadControl( Int_t ofSteps, Int_t successStep
 ////////////////////////////////////////////////////////////////////////////////
 /// gives back true if the last "steps" steps have lead to an improvement of the
 /// "fitness" of the "individuals" of at least "improvement"
-/// 
-/// this gives a simple measure of if the fitness of the individuals is
-/// converging and no major improvement is to be expected soon. 
 ///
+/// this gives a simple measure of if the fitness of the individuals is
+/// converging and no major improvement is to be expected soon.
 
 Bool_t TMVA::GeneticAlgorithm::HasConverged( Int_t steps, Double_t improvement )
 {
@@ -259,7 +265,7 @@ Bool_t TMVA::GeneticAlgorithm::HasConverged( Int_t steps, Double_t improvement )
    }
    if (TMath::Abs(fBestFitness - fConvValue) <= improvement || steps<0) {
       fConvCounter ++;
-   } 
+   }
    else {
       fConvCounter = 0;
       fConvValue = fBestFitness;