Commit 7029bfa58cfe2d8958c9c14abf660d8c80309f22

Authored by Marcel Hohmann
1 parent c5732764

added basic stats and basic BDT classification for Belle 2 data

DualTrackTagger/include/DualTrackTaggerModule.h
... ... @@ -28,7 +28,8 @@
28 28 #include "TH1F.h"
29 29 #include "TString.h"
30 30  
31   -
  31 +// Include MVA classes - TEST
  32 +#include <analysis/modules/DualTrackTagger/TMVAClassification_BDT5.class.h>
32 33  
33 34  
34 35 namespace Belle2 {
... ... @@ -83,7 +84,8 @@ namespace Belle2 {
83 84  
84 85 std::vector<Particle*> m_candidateParticles; // particles with low pt
85 86 std::vector<Particle*> m_allParticlesInList; // used for statistics
86   - StoreArray<Particle> m_particles;
  87 +
  88 + StoreArray<Particle> m_particles; // all particles in event
87 89  
88 90 // statistics - currently unused.
89 91 long int total_particles;
... ... @@ -95,11 +97,15 @@ namespace Belle2 {
95 97 bool m_BelleFlag;
96 98 bool m_MCFlag;
97 99 std::vector<std::string> m_strParticleLists;
  100 + double m_MVAResponseCut;
  101 +
  102 + //Belle 2 Classifier
  103 + ReadBDT5 *Belle2ClassifierResponse;
98 104  
99 105 //histograms
100 106 TH1F *h_genParticleIndexMulti;
101   - TH1F *h_taggedDuplicate;
102   - TH1F *h_taggedSingle;
  107 + TH1F *h_Duplicates;
  108 + TH1F *h_Singles;
103 109 };
104 110 }
105 111 #endif
... ...
DualTrackTagger/include/TMVAClassification_BDT5.class.h 0 → 100644
Changes suppressed. Click to show
... ... @@ -0,0 +1,16716 @@
  1 +// Class: ReadBDT5
  2 +// Automatically generated by MethodBase::MakeClass
  3 +//
  4 +
  5 +/* configuration options =====================================================
  6 +
  7 +#GEN -*-*-*-*-*-*-*-*-*-*-*- general info -*-*-*-*-*-*-*-*-*-*-*-
  8 +
  9 +Method : BDT::BDT5
  10 +TMVA Release : 4.2.1 [262657]
  11 +ROOT Release : 6.08/06 [395270]
  12 +Creator : mhohmann
  13 +Date : Fri Dec 15 02:14:21 2017
  14 +Host : Linux centos6 4.4.0-97-generic #120-Ubuntu SMP Tue Sep 19 17:28:18 UTC 2017 x86_64 x86_64 x86_64 GNU/Linux
  15 +Dir : /imports/rcs5_data/mhohmann/1_analysis/3_DualTrackTaggerTesting
  16 +Training events: 2357607
  17 +Analysis type : [Classification]
  18 +
  19 +
  20 +#OPT -*-*-*-*-*-*-*-*-*-*-*-*- options -*-*-*-*-*-*-*-*-*-*-*-*-
  21 +
  22 +# Set by User:
  23 +V: "False" [Verbose output (short form of "VerbosityLevel" below - overrides the latter one)]
  24 +H: "False" [Print method-specific help message]
  25 +NTrees: "500" [Number of trees in the forest]
  26 +MaxDepth: "3" [Max depth of the decision tree allowed]
  27 +MinNodeSize: "2.5%" [Minimum percentage of training events required in a leaf node (default: Classification: 5%, Regression: 0.2%)]
  28 +nCuts: "20" [Number of grid points in variable range used in finding optimal cut in node splitting]
  29 +BoostType: "AdaBoost" [Boosting type for the trees in the forest (note: AdaCost is still experimental)]
  30 +UseBaggedBoost: "True" [Use only a random subsample of all events for growing the trees in each boost iteration.]
  31 +AdaBoostBeta: "5.000000e-01" [Learning rate for AdaBoost algorithm]
  32 +BaggedSampleFraction: "5.000000e-01" [Relative size of bagged event sample to original size of the data sample (used whenever bagging is used (i.e. UseBaggedBoost, Bagging,)]
  33 +SeparationType: "giniindex" [Separation criterion for node splitting]
  34 +# Default:
  35 +VerbosityLevel: "Default" [Verbosity level]
  36 +VarTransform: "None" [List of variable transformations performed before training, e.g., "D_Background,P_Signal,G,N_AllClasses" for: "Decorrelation, PCA-transformation, Gaussianisation, Normalisation, each for the given class of events ('AllClasses' denotes all events of all classes, if no class indication is given, 'All' is assumed)"]
  37 +CreateMVAPdfs: "False" [Create PDFs for classifier outputs (signal and background)]
  38 +IgnoreNegWeightsInTraining: "False" [Events with negative weights are ignored in the training (but are included for testing and performance evaluation)]
  39 +AdaBoostR2Loss: "quadratic" [Type of Loss function in AdaBoostR2]
  40 +Shrinkage: "1.000000e+00" [Learning rate for GradBoost algorithm]
  41 +UseRandomisedTrees: "False" [Determine at each node splitting the cut variable only as the best out of a random subset of variables (like in RandomForests)]
  42 +UseNvars: "2" [Size of the subset of variables used with RandomisedTree option]
  43 +UsePoissonNvars: "True" [Interpret "UseNvars" not as fixed number but as mean of a Possion distribution in each split with RandomisedTree option]
  44 +UseYesNoLeaf: "True" [Use Sig or Bkg categories, or the purity=S/(S+B) as classification of the leaf node -> Real-AdaBoost]
  45 +NegWeightTreatment: "inverseboostnegweights" [How to treat events with negative weights in the BDT training (particular the boosting) : IgnoreInTraining; Boost With inverse boostweight; Pair events with negative and positive weights in traning sample and *annihilate* them (experimental!)]
  46 +Css: "1.000000e+00" [AdaCost: cost of true signal selected signal]
  47 +Cts_sb: "1.000000e+00" [AdaCost: cost of true signal selected bkg]
  48 +Ctb_ss: "1.000000e+00" [AdaCost: cost of true bkg selected signal]
  49 +Cbb: "1.000000e+00" [AdaCost: cost of true bkg selected bkg ]
  50 +NodePurityLimit: "5.000000e-01" [In boosting/pruning, nodes with purity > NodePurityLimit are signal; background otherwise.]
  51 +RegressionLossFunctionBDTG: "huber" [Loss function for BDTG regression.]
  52 +HuberQuantile: "7.000000e-01" [In the Huber loss function this is the quantile that separates the core from the tails in the residuals distribution.]
  53 +DoBoostMonitor: "False" [Create control plot with ROC integral vs tree number]
  54 +UseFisherCuts: "False" [Use multivariate splits using the Fisher criterion]
  55 +MinLinCorrForFisher: "8.000000e-01" [The minimum linear correlation between two variables demanded for use in Fisher criterion in node splitting]
  56 +UseExclusiveVars: "False" [Variables already used in fisher criterion are not anymore analysed individually for node splitting]
  57 +DoPreselection: "False" [and and apply automatic pre-selection for 100% efficient signal (bkg) cuts prior to training]
  58 +SigToBkgFraction: "1.000000e+00" [Sig to Bkg ratio used in Training (similar to NodePurityLimit, which cannot be used in real adaboost]
  59 +PruneMethod: "nopruning" [Note: for BDTs use small trees (e.g.MaxDepth=3) and NoPruning: Pruning: Method used for pruning (removal) of statistically insignificant branches ]
  60 +PruneStrength: "0.000000e+00" [Pruning strength]
  61 +PruningValFraction: "5.000000e-01" [Fraction of events to use for optimizing automatic pruning.]
  62 +SkipNormalization: "False" [Skip normalization at initialization, to keep expectation value of BDT output according to the fraction of events]
  63 +nEventsMin: "0" [deprecated: Use MinNodeSize (in % of training events) instead]
  64 +UseBaggedGrad: "False" [deprecated: Use *UseBaggedBoost* instead: Use only a random subsample of all events for growing the trees in each iteration.]
  65 +GradBaggingFraction: "5.000000e-01" [deprecated: Use *BaggedSampleFraction* instead: Defines the fraction of events to be used in each iteration, e.g. when UseBaggedGrad=kTRUE. ]
  66 +UseNTrainEvents: "0" [deprecated: Use *BaggedSampleFraction* instead: Number of randomly picked training events used in randomised (and bagged) trees]
  67 +NNodesMax: "0" [deprecated: Use MaxDepth instead to limit the tree size]
  68 +##
  69 +
  70 +
  71 +#VAR -*-*-*-*-*-*-*-*-*-*-*-* variables *-*-*-*-*-*-*-*-*-*-*-*-
  72 +
  73 +NVar 5
  74 +magDiffP magDiffP magDiffP magDiffP F 'F' [4.32103188359e-05,5.54336166382]
  75 +chargeMult chargeMult chargeMult chargeMult F 'F' [-1,1]
  76 +phi phi phi phi F 'F' [0,3.14042448997]
  77 +pTp1 pTp1 pTp1 pTp1 F 'F' [0,0.5]
  78 +pTp2 pTp2 pTp2 pTp2 F 'F' [0,0.5]
  79 +NSpec 0
  80 +
  81 +
  82 +============================================================================ */
  83 +
  84 +#include <vector>
  85 +#include <cmath>
  86 +#include <string>
  87 +#include <iostream>
  88 +
  89 +#define NN new BDT5Node
  90 +
  91 +#ifndef BDT5Node__def
  92 +#define BDT5Node__def
  93 +
  94 +class BDT5Node {
  95 +
  96 +public:
  97 +
  98 + // constructor of an essentially "empty" node floating in space
  99 + BDT5Node ( BDT5Node* left,BDT5Node* right,
  100 + int selector, double cutValue, bool cutType,
  101 + int nodeType, double purity, double response ) :
  102 + fLeft ( left ),
  103 + fRight ( right ),
  104 + fSelector ( selector ),
  105 + fCutValue ( cutValue ),
  106 + fCutType ( cutType ),
  107 + fNodeType ( nodeType ),
  108 + fPurity ( purity ),
  109 + fResponse ( response ){
  110 + }
  111 +
  112 + virtual ~BDT5Node();
  113 +
  114 + // test event if it decends the tree at this node to the right
  115 + virtual bool GoesRight( const std::vector<double>& inputValues ) const;
  116 + BDT5Node* GetRight( void ) {return fRight; };
  117 +
  118 + // test event if it decends the tree at this node to the left
  119 + virtual bool GoesLeft ( const std::vector<double>& inputValues ) const;
  120 + BDT5Node* GetLeft( void ) { return fLeft; };
  121 +
  122 + // return S/(S+B) (purity) at this node (from training)
  123 +
  124 + double GetPurity( void ) const { return fPurity; }
  125 + // return the node type
  126 + int GetNodeType( void ) const { return fNodeType; }
  127 + double GetResponse(void) const {return fResponse;}
  128 +
  129 +private:
  130 +
  131 + BDT5Node* fLeft; // pointer to the left daughter node
  132 + BDT5Node* fRight; // pointer to the right daughter node
  133 + int fSelector; // index of variable used in node selection (decision tree)
  134 + double fCutValue; // cut value appplied on this node to discriminate bkg against sig
  135 + bool fCutType; // true: if event variable > cutValue ==> signal , false otherwise
  136 + int fNodeType; // Type of node: -1 == Bkg-leaf, 1 == Signal-leaf, 0 = internal
  137 + double fPurity; // Purity of node from training
  138 + double fResponse; // Regression response value of node
  139 +};
  140 +
  141 +//_______________________________________________________________________
  142 + BDT5Node::~BDT5Node()
  143 +{
  144 + if (fLeft != NULL) delete fLeft;
  145 + if (fRight != NULL) delete fRight;
  146 +};
  147 +
  148 +//_______________________________________________________________________
  149 +bool BDT5Node::GoesRight( const std::vector<double>& inputValues ) const
  150 +{
  151 + // test event if it decends the tree at this node to the right
  152 + bool result;
  153 + result = (inputValues[fSelector] > fCutValue );
  154 + if (fCutType == true) return result; //the cuts are selecting Signal ;
  155 + else return !result;
  156 +}
  157 +
  158 +//_______________________________________________________________________
  159 +bool BDT5Node::GoesLeft( const std::vector<double>& inputValues ) const
  160 +{
  161 + // test event if it decends the tree at this node to the left
  162 + if (!this->GoesRight(inputValues)) return true;
  163 + else return false;
  164 +}
  165 +
  166 +#endif
  167 +
  168 +#ifndef IClassifierReader__def
  169 +#define IClassifierReader__def
  170 +
  171 +class IClassifierReader {
  172 +
  173 + public:
  174 +
  175 + // constructor
  176 + IClassifierReader() : fStatusIsClean( true ) {}
  177 + virtual ~IClassifierReader() {}
  178 +
  179 + // return classifier response
  180 + virtual double GetMvaValue( const std::vector<double>& inputValues ) const = 0;
  181 +
  182 + // returns classifier status
  183 + bool IsStatusClean() const { return fStatusIsClean; }
  184 +
  185 + protected:
  186 +
  187 + bool fStatusIsClean;
  188 +};
  189 +
  190 +#endif
  191 +
  192 +class ReadBDT5 : public IClassifierReader {
  193 +
  194 + public:
  195 +
  196 + // constructor
  197 + ReadBDT5( std::vector<std::string>& theInputVars )
  198 + : IClassifierReader(),
  199 + fClassName( "ReadBDT5" ),
  200 + fNvars( 5 ),
  201 + fIsNormalised( false )
  202 + {
  203 + // the training input variables
  204 + const char* inputVars[] = { "magDiffP", "chargeMult", "phi", "pTp1", "pTp2" };
  205 +
  206 + // sanity checks
  207 + if (theInputVars.size() <= 0) {
  208 + std::cout << "Problem in class \"" << fClassName << "\": empty input vector" << std::endl;
  209 + fStatusIsClean = false;
  210 + }
  211 +
  212 + if (theInputVars.size() != fNvars) {
  213 + std::cout << "Problem in class \"" << fClassName << "\": mismatch in number of input values: "
  214 + << theInputVars.size() << " != " << fNvars << std::endl;
  215 + fStatusIsClean = false;
  216 + }
  217 +
  218 + // validate input variables
  219 + for (size_t ivar = 0; ivar < theInputVars.size(); ivar++) {
  220 + if (theInputVars[ivar] != inputVars[ivar]) {
  221 + std::cout << "Problem in class \"" << fClassName << "\": mismatch in input variable names" << std::endl
  222 + << " for variable [" << ivar << "]: " << theInputVars[ivar].c_str() << " != " << inputVars[ivar] << std::endl;
  223 + fStatusIsClean = false;
  224 + }
  225 + }
  226 +
  227 + // initialize min and max vectors (for normalisation)
  228 + fVmin[0] = 0;
  229 + fVmax[0] = 0;
  230 + fVmin[1] = 0;
  231 + fVmax[1] = 0;
  232 + fVmin[2] = 0;
  233 + fVmax[2] = 0;
  234 + fVmin[3] = 0;
  235 + fVmax[3] = 0;
  236 + fVmin[4] = 0;
  237 + fVmax[4] = 0;
  238 +
  239 + // initialize input variable types
  240 + fType[0] = 'F';
  241 + fType[1] = 'F';
  242 + fType[2] = 'F';
  243 + fType[3] = 'F';
  244 + fType[4] = 'F';
  245 +
  246 + // initialize constants
  247 + Initialize();
  248 +
  249 + }
  250 +
  251 + // destructor
  252 + virtual ~ReadBDT5() {
  253 + Clear(); // method-specific
  254 + }
  255 +
  256 + // the classifier response
  257 + // "inputValues" is a vector of input values in the same order as the
  258 + // variables given to the constructor
  259 + double GetMvaValue( const std::vector<double>& inputValues ) const;
  260 +
  261 + private:
  262 +
  263 + // method-specific destructor
  264 + void Clear();
  265 +
  266 + // common member variables
  267 + const char* fClassName;
  268 +
  269 + const size_t fNvars;
  270 + size_t GetNvar() const { return fNvars; }
  271 + char GetType( int ivar ) const { return fType[ivar]; }
  272 +
  273 + // normalisation of input variables
  274 + const bool fIsNormalised;
  275 + bool IsNormalised() const { return fIsNormalised; }
  276 + double fVmin[5];
  277 + double fVmax[5];
  278 + double NormVariable( double x, double xmin, double xmax ) const {
  279 + // normalise to output range: [-1, 1]
  280 + return 2*(x - xmin)/(xmax - xmin) - 1.0;
  281 + }
  282 +
  283 + // type of input variable: 'F' or 'I'
  284 + char fType[5];
  285 +
  286 + // initialize internal variables
  287 + void Initialize();
  288 + double GetMvaValue__( const std::vector<double>& inputValues ) const;
  289 +
  290 + // private members (method specific)
  291 + std::vector<BDT5Node*> fForest; // i.e. root nodes of decision trees
  292 + std::vector<double> fBoostWeights; // the weights applied in the individual boosts
  293 +};
  294 +
  295 +double ReadBDT5::GetMvaValue__( const std::vector<double>& inputValues ) const
  296 +{
  297 + double myMVA = 0;
  298 + double norm = 0;
  299 + for (unsigned int itree=0; itree<fForest.size(); itree++){
  300 + BDT5Node *current = fForest[itree];
  301 + while (current->GetNodeType() == 0) { //intermediate node
  302 + if (current->GoesRight(inputValues)) current=(BDT5Node*)current->GetRight();
  303 + else current=(BDT5Node*)current->GetLeft();
  304 + }
  305 + myMVA += fBoostWeights[itree] * current->GetNodeType();
  306 + norm += fBoostWeights[itree];
  307 + }
  308 + return myMVA /= norm;
  309 +};
  310 +
  311 +void ReadBDT5::Initialize()
  312 +{
  313 + // itree = 0
  314 + fBoostWeights.push_back(0.497828079332584);
  315 + fForest.push_back(
  316 +NN(
  317 +NN(
  318 +NN(
  319 +0,
  320 +0,
  321 +-1, 0, 1, 1, 0.932563,-99) ,
  322 +NN(
  323 +0,
  324 +0,
  325 +-1, 0, 1, -1, 0.48961,-99) ,
  326 +1, -0.904762, 0, 0, 0.878587,-99) ,
  327 +NN(
  328 +NN(
  329 +0,
  330 +0,
  331 +-1, 0.550806, 1, 1, 0.733887,-99) ,
  332 +NN(
  333 +0,
  334 +0,
  335 +-1, 0.281482, 1, -1, 0.324074,-99) ,
  336 +2, 2.62756, 0, 0, 0.385742,-99) ,
  337 +2, 0.448607, 1, 0, 0.500017,-99) );
  338 + // itree = 1
  339 + fBoostWeights.push_back(0.398511);
  340 + fForest.push_back(
  341 +NN(
  342 +NN(
  343 +0,
  344 +0,
  345 +-1, 0, 1, 1, 0.923228,-99) ,
  346 +NN(
  347 +NN(
  348 +0,
  349 +0,
  350 +-1, 0, 1, 1, 0.733221,-99) ,
  351 +NN(
  352 +NN(
  353 +0,
  354 +0,
  355 +-1, 0, 1, 1, 0.557183,-99) ,
  356 +NN(
  357 +0,
  358 +0,
  359 +-1, 0, 1, -1, 0.350306,-99) ,
  360 +0, 0.27829, 1, 0, 0.402708,-99) ,
  361 +2, 2.73452, 0, 0, 0.438446,-99) ,
  362 +2, 0.299088, 1, 0, 0.525942,-99) );
  363 + // itree = 2
  364 + fBoostWeights.push_back(0.3654);
  365 + fForest.push_back(
  366 +NN(
  367 +NN(
  368 +0,
  369 +0,
  370 +-1, 0, 1, 1, 0.905779,-99) ,
  371 +NN(
  372 +NN(
  373 +NN(
  374 +0,
  375 +0,
  376 +-1, 0, 1, 1, 0.755213,-99) ,
  377 +NN(
  378 +0,
  379 +0,
  380 +-1, 0, 1, -1, 0.430182,-99) ,
  381 +0, 0.550806, 1, 0, 0.628503,-99) ,
  382 +NN(
  383 +NN(
  384 +0,
  385 +0,
  386 +-1, 0, 1, 1, 0.600232,-99) ,
  387 +NN(
  388 +0,
  389 +0,
  390 +-1, 0, 1, -1, 0.382493,-99) ,
  391 +2, 0.608338, 1, 0, 0.413663,-99) ,
  392 +2, 2.46378, 0, 0, 0.454119,-99) ,
  393 +2, 0.299071, 1, 0, 0.530749,-99) );
  394 + // itree = 3
  395 + fBoostWeights.push_back(0.276761);
  396 + fForest.push_back(
  397 +NN(
  398 +NN(
  399 +0,
  400 +0,
  401 +-1, 0, 1, 1, 0.888477,-99) ,
  402 +NN(
  403 +NN(
  404 +NN(
  405 +0,
  406 +0,
  407 +-1, 0, 1, 1, 0.705535,-99) ,
  408 +NN(
  409 +0,
  410 +0,
  411 +-1, 0, 1, -1, 0.427951,-99) ,
  412 +1, -0.904762, 1, 0, 0.622742,-99) ,
  413 +NN(
  414 +NN(
  415 +0,
  416 +0,
  417 +-1, 0, 1, 1, 0.521813,-99) ,
  418 +NN(
  419 +0,
  420 +0,
  421 +-1, 0, 1, -1, 0.408934,-99) ,
  422 +2, 1.02066, 1, 0, 0.447592,-99) ,
  423 +2, 2.46378, 0, 0, 0.480177,-99) ,
  424 +2, 0.299071, 1, 0, 0.546651,-99) );
  425 + // itree = 4
  426 + fBoostWeights.push_back(0.240433);
  427 + fForest.push_back(
  428 +NN(
  429 +NN(
  430 +0,
  431 +0,
  432 +-1, 0, 1, 1, 0.875178,-99) ,
  433 +NN(
  434 +NN(
  435 +0,
  436 +0,
  437 +-1, 0, 1, 1, 0.668288,-99) ,
  438 +NN(
  439 +NN(
  440 +0,
  441 +0,
  442 +-1, 0, 1, 1, 0.61339,-99) ,
  443 +NN(
  444 +0,
  445 +0,
  446 +-1, 0, 1, -1, 0.44741,-99) ,
  447 +0, 0.808863, 0, 0, 0.465817,-99) ,
  448 +2, 2.73423, 0, 0, 0.486182,-99) ,
  449 +2, 0.299056, 1, 0, 0.546991,-99) );
  450 + // itree = 5
  451 + fBoostWeights.push_back(0.201683);
  452 + fForest.push_back(
  453 +NN(
  454 +NN(
  455 +0,
  456 +0,
  457 +-1, 0, 1, 1, 0.859036,-99) ,
  458 +NN(
  459 +NN(
  460 +0,
  461 +0,
  462 +-1, 2.32849, 0, 1, 0.539087,-99) ,
  463 +NN(
  464 +0,
  465 +0,
  466 +-1, 0.309545, 1, -1, 0.404701,-99) ,
  467 +4, 0.309545, 1, 0, 0.504454,-99) ,
  468 +2, 0.299071, 1, 0, 0.557761,-99) );
  469 + // itree = 6
  470 + fBoostWeights.push_back(0.183354);
  471 + fForest.push_back(
  472 +NN(
  473 +NN(
  474 +0,
  475 +0,
  476 +-1, 0, 1, 1, 0.846396,-99) ,
  477 +NN(
  478 +NN(
  479 +0,
  480 +0,
  481 +-1, 2.73452, 0, 1, 0.5263,-99) ,
  482 +NN(
  483 +0,
  484 +0,
  485 +-1, 0.142897, 0, -1, 0.397073,-99) ,
  486 +3, 0.309545, 1, 0, 0.492516,-99) ,
  487 +2, 0.299088, 1, 0, 0.544797,-99) );
  488 + // itree = 7
  489 + fBoostWeights.push_back(0.224759);
  490 + fForest.push_back(
  491 +NN(
  492 +NN(
  493 +0,
  494 +0,
  495 +-1, 0, 1, 1, 0.835769,-99) ,
  496 +NN(
  497 +NN(
  498 +NN(
  499 +0,
  500 +0,
  501 +-1, 0, 1, 1, 0.641252,-99) ,
  502 +NN(
  503 +0,
  504 +0,
  505 +-1, 0, 1, -1, 0.483364,-99) ,
  506 +0, 0.570665, 1, 0, 0.574736,-99) ,
  507 +NN(
  508 +NN(
  509 +0,
  510 +0,
  511 +-1, 0, 1, 1, 0.611058,-99) ,
  512 +NN(
  513 +0,
  514 +0,
  515 +-1, 0, 1, -1, 0.436797,-99) ,
  516 +0, 0.808863, 0, 0, 0.453673,-99) ,
  517 +2, 2.32849, 0, 0, 0.481113,-99) ,
  518 +2, 0.299071, 1, 0, 0.532247,-99) );
  519 + // itree = 8
  520 + fBoostWeights.push_back(0.183517);
  521 + fForest.push_back(
  522 +NN(
  523 +NN(
  524 +0,
  525 +0,
  526 +-1, 0, 1, 1, 0.81667,-99) ,
  527 +NN(
  528 +NN(
  529 +NN(
  530 +0,
  531 +0,
  532 +-1, 0, 1, 1, 0.632465,-99) ,
  533 +NN(
  534 +0,
  535 +0,
  536 +-1, 0, 1, -1, 0.45333,-99) ,
  537 +1, -0.904762, 1, 0, 0.574208,-99) ,
  538 +NN(
  539 +NN(
  540 +0,
  541 +0,
  542 +-1, 0, 1, 1, 0.520551,-99) ,
  543 +NN(
  544 +0,
  545 +0,
  546 +-1, 0, 1, -1, 0.433906,-99) ,
  547 +1, -0.904762, 0, 0, 0.47844,-99) ,
  548 +2, 2.46378, 0, 0, 0.495802,-99) ,
  549 +2, 0.299071, 1, 0, 0.540786,-99) );
  550 + // itree = 9
  551 + fBoostWeights.push_back(0.163718);
  552 + fForest.push_back(
  553 +NN(
  554 +NN(
  555 +0,
  556 +0,
  557 +-1, 0, 1, 1, 0.801789,-99) ,
  558 +NN(
  559 +NN(
  560 +0,
  561 +0,
  562 +-1, 2.81828, 0, 1, 0.562205,-99) ,
  563 +NN(
  564 +NN(
  565 +0,
  566 +0,
  567 +-1, 0, 1, 1, 0.577203,-99) ,
  568 +NN(
  569 +0,
  570 +0,
  571 +-1, 0, 1, -1, 0.461497,-99) ,
  572 +0, 0.808863, 0, 0, 0.473293,-99) ,
  573 +2, 2.46392, 0, 0, 0.489338,-99) ,
  574 +2, 0.299088, 1, 0, 0.532242,-99) );
  575 + // itree = 10
  576 + fBoostWeights.push_back(0.134938);
  577 + fForest.push_back(
  578 +NN(
  579 +NN(
  580 +0,
  581 +0,
  582 +-1, 0, 1, 1, 0.787294,-99) ,
  583 +NN(
  584 +NN(
  585 +0,
  586 +0,
  587 +-1, 2.73437, 0, 1, 0.526177,-99) ,
  588 +NN(
  589 +0,
  590 +0,
  591 +-1, 0.142897, 0, -1, 0.452645,-99) ,
  592 +4, 0.285738, 1, 0, 0.502958,-99) ,
  593 +2, 0.299071, 1, 0, 0.541175,-99) );
  594 + // itree = 11
  595 + fBoostWeights.push_back(0.141965);
  596 + fForest.push_back(
  597 +NN(
  598 +NN(
  599 +0,
  600 +0,
  601 +-1, 0, 1, 1, 0.778298,-99) ,
  602 +NN(
  603 +NN(
  604 +0,
  605 +0,
  606 +-1, 0, 1, 1, 0.668606,-99) ,
  607 +NN(
  608 +NN(
  609 +0,
  610 +0,
  611 +-1, 0, 1, 1, 0.633471,-99) ,
  612 +NN(
  613 +0,
  614 +0,
  615 +-1, 0, 1, -1, 0.477199,-99) ,
  616 +0, 0.170165, 1, 0, 0.491633,-99) ,
  617 +0, 1.07196, 0, 0, 0.499169,-99) ,
  618 +2, 0.299071, 1, 0, 0.53613,-99) );
  619 + // itree = 12
  620 + fBoostWeights.push_back(0.129943);
  621 + fForest.push_back(
  622 +NN(
  623 +NN(
  624 +0,
  625 +0,
  626 +-1, 0, 1, 1, 0.765529,-99) ,
  627 +NN(
  628 +NN(
  629 +0,
  630 +0,
  631 +-1, 0.204114, 0, 1, 0.532011,-99) ,
  632 +NN(
  633 +0,
  634 +0,
  635 +-1, 0.148023, 0, -1, 0.460282,-99) ,
  636 +3, 0.285738, 1, 0, 0.5093,-99) ,
  637 +2, 0.299071, 1, 0, 0.542739,-99) );
  638 + // itree = 13
  639 + fBoostWeights.push_back(0.111797);
  640 + fForest.push_back(
  641 +NN(
  642 +NN(
  643 +0,
  644 +0,
  645 +-1, 0, 1, 1, 0.756246,-99) ,
  646 +NN(
  647 +NN(
  648 +0,
  649 +0,
  650 +-1, 0, 1, 1, 0.654251,-99) ,
  651 +NN(
  652 +NN(
  653 +0,
  654 +0,
  655 +-1, 0, 1, 1, 0.59493,-99) ,
  656 +NN(
  657 +0,
  658 +0,
  659 +-1, 0, 1, -1, 0.48815,-99) ,
  660 +2, 2.73452, 0, 0, 0.498508,-99) ,
  661 +0, 1.05348, 0, 0, 0.505486,-99) ,
  662 +2, 0.299088, 1, 0, 0.537864,-99) );
  663 + // itree = 14
  664 + fBoostWeights.push_back(0.0935175);
  665 + fForest.push_back(
  666 +NN(
  667 +0,
  668 +0,
  669 +-1, 0.29907, 1, 1, 0.546623,-99) );
  670 + // itree = 15
  671 + fBoostWeights.push_back(0.101413);
  672 + fForest.push_back(
  673 +NN(
  674 +NN(
  675 +0,
  676 +0,
  677 +-1, 0, 1, 1, 0.736405,-99) ,
  678 +NN(
  679 +NN(
  680 +0,
  681 +0,
  682 +-1, 0, 1, 1, 0.612248,-99) ,
  683 +NN(
  684 +NN(
  685 +0,
  686 +0,
  687 +-1, 0, 1, 1, 0.605289,-99) ,
  688 +NN(
  689 +0,
  690 +0,
  691 +-1, 0, 1, -1, 0.48885,-99) ,
  692 +0, 0.167449, 1, 0, 0.499112,-99) ,
  693 +0, 1.05348, 0, 0, 0.503948,-99) ,
  694 +2, 0.299071, 1, 0, 0.533547,-99) );
  695 + // itree = 16
  696 + fBoostWeights.push_back(0.107771);
  697 + fForest.push_back(
  698 +NN(
  699 +NN(
  700 +0,
  701 +0,
  702 +-1, 0, 1, 1, 0.724258,-99) ,
  703 +NN(
  704 +NN(
  705 +NN(
  706 +0,
  707 +0,
  708 +-1, 0, 1, 1, 0.574167,-99) ,
  709 +NN(
  710 +0,
  711 +0,
  712 +-1, 0, 1, -1, 0.499533,-99) ,
  713 +3, 0.204114, 0, 0, 0.528748,-99) ,
  714 +NN(
  715 +NN(
  716 +0,
  717 +0,
  718 +-1, 0, 1, 1, 0.505945,-99) ,
  719 +NN(
  720 +0,
  721 +0,
  722 +-1, 0, 1, -1, 0.370302,-99) ,
  723 +4, 0.142897, 0, 0, 0.480702,-99) ,
  724 +3, 0.285738, 1, 0, 0.513673,-99) ,
  725 +2, 0.299071, 1, 0, 0.540091,-99) );
  726 + // itree = 17
  727 + fBoostWeights.push_back(0.112022);
  728 + fForest.push_back(
  729 +NN(
  730 +NN(
  731 +0,
  732 +0,
  733 +-1, 0, 1, 1, 0.714708,-99) ,
  734 +NN(
  735 +NN(
  736 +NN(
  737 +0,
  738 +0,
  739 +-1, 0, 1, 1, 0.529231,-99) ,
  740 +NN(
  741 +0,
  742 +0,
  743 +-1, 0, 1, -1, 0.457739,-99) ,
  744 +4, 0.404773, 1, 0, 0.522046,-99) ,
  745 +NN(
  746 +0,
  747 +0,
  748 +-1, 0.591641, 0, -1, 0.442759,-99) ,
  749 +3, 0.404773, 1, 0, 0.513776,-99) ,
  750 +2, 0.299088, 1, 0, 0.538648,-99) );
  751 + // itree = 18
  752 + fBoostWeights.push_back(0.123865);
  753 + fForest.push_back(
  754 +NN(
  755 +NN(
  756 +0,
  757 +0,
  758 +-1, 0, 1, 1, 0.699558,-99) ,
  759 +NN(
  760 +NN(
  761 +NN(
  762 +0,
  763 +0,
  764 +-1, 0, 1, 1, 0.588157,-99) ,
  765 +NN(
  766 +0,
  767 +0,
  768 +-1, 0, 1, -1, 0.45325,-99) ,
  769 +1, -0.904762, 1, 0, 0.539766,-99) ,
  770 +NN(
  771 +NN(
  772 +0,
  773 +0,
  774 +-1, 0, 1, 1, 0.538209,-99) ,
  775 +NN(
  776 +0,
  777 +0,
  778 +-1, 0, 1, -1, 0.471945,-99) ,
  779 +2, 0.84027, 1, 0, 0.490918,-99) ,
  780 +2, 2.1932, 0, 0, 0.504089,-99) ,
  781 +2, 0.299071, 1, 0, 0.528015,-99) );
  782 + // itree = 19
  783 + fBoostWeights.push_back(0.0737033);
  784 + fForest.push_back(
  785 +NN(
  786 +NN(
  787 +0,
  788 +0,
  789 +-1, 0, 1, 1, 0.688017,-99) ,
  790 +NN(
  791 +NN(
  792 +0,
  793 +0,
  794 +-1, 0, 1, 1, 0.617101,-99) ,
  795 +NN(
  796 +NN(
  797 +0,
  798 +0,
  799 +-1, 0, 1, 1, 0.554507,-99) ,
  800 +NN(
  801 +0,
  802 +0,
  803 +-1, 0, 1, -1, 0.496106,-99) ,
  804 +2, 2.59908, 0, 0, 0.504187,-99) ,
  805 +0, 1.03925, 0, 0, 0.509417,-99) ,
  806 +2, 0.299071, 1, 0, 0.531165,-99) );
  807 + // itree = 20
  808 + fBoostWeights.push_back(0.075109);
  809 + fForest.push_back(
  810 +NN(
  811 +NN(
  812 +0,
  813 +0,
  814 +-1, 0, 1, 1, 0.679155,-99) ,
  815 +NN(
  816 +NN(
  817 +0,
  818 +0,
  819 +-1, 0.972899, 0, 1, 0.515254,-99) ,
  820 +NN(
  821 +0,
  822 +0,
  823 +-1, 0, 1, -1, 0.420421,-99) ,
  824 +3, 0.452386, 1, 0, 0.511045,-99) ,
  825 +2, 0.299088, 1, 0, 0.53127,-99) );
  826 + // itree = 21
  827 + fBoostWeights.push_back(0.0874082);
  828 + fForest.push_back(
  829 +NN(
  830 +NN(
  831 +0,
  832 +0,
  833 +-1, 0, 1, 1, 0.670729,-99) ,
  834 +NN(
  835 +NN(
  836 +NN(
  837 +0,
  838 +0,
  839 +-1, 0, 1, 1, 0.548739,-99) ,
  840 +NN(
  841 +0,
  842 +0,
  843 +-1, 0, 1, -1, 0.493678,-99) ,
  844 +3, 0.190511, 0, 0, 0.518889,-99) ,
  845 +NN(
  846 +0,
  847 +0,
  848 +-1, 0.166704, 0, -1, 0.472475,-99) ,
  849 +3, 0.285738, 1, 0, 0.504333,-99) ,
  850 +2, 0.299071, 1, 0, 0.524254,-99) );
  851 + // itree = 22
  852 + fBoostWeights.push_back(0.0766845);
  853 + fForest.push_back(
  854 +NN(
  855 +NN(
  856 +0,
  857 +0,
  858 +-1, 0, 1, 1, 0.657657,-99) ,
  859 +NN(
  860 +NN(
  861 +NN(
  862 +0,
  863 +0,
  864 +-1, 0, 1, 1, 0.520786,-99) ,
  865 +NN(
  866 +0,
  867 +0,
  868 +-1, 0, 1, -1, 0.481589,-99) ,
  869 +4, 0.150833, 0, 0, 0.510501,-99) ,
  870 +NN(
  871 +0,
  872 +0,
  873 +-1, 0, 1, -1, 0.433904,-99) ,
  874 +4, 0.452386, 1, 0, 0.50708,-99) ,
  875 +2, 0.299088, 1, 0, 0.524927,-99) );
  876 + // itree = 23
  877 + fBoostWeights.push_back(0.0686);
  878 + fForest.push_back(
  879 +NN(
  880 +NN(
  881 +0,
  882 +0,
  883 +-1, 0, 1, 1, 0.651711,-99) ,
  884 +NN(
  885 +NN(
  886 +NN(
  887 +0,
  888 +0,
  889 +-1, 0, 1, 1, 0.522527,-99) ,
  890 +NN(
  891 +0,
  892 +0,
  893 +-1, 0, 1, -1, 0.493323,-99) ,
  894 +4, 0.193912, 0, 0, 0.510787,-99) ,
  895 +NN(
  896 +0,
  897 +0,
  898 +-1, 0, 1, -1, 0.433903,-99) ,
  899 +4, 0.452386, 1, 0, 0.507439,-99) ,
  900 +2, 0.299071, 1, 0, 0.524654,-99) );
  901 + // itree = 24
  902 + fBoostWeights.push_back(0.138603);
  903 + fForest.push_back(
  904 +NN(
  905 +NN(
  906 +NN(
  907 +0,
  908 +0,
  909 +-1, 0.135901, 1, 1, 0.692282,-99) ,
  910 +NN(
  911 +NN(
  912 +0,
  913 +0,
  914 +-1, 0, 1, 1, 0.545734,-99) ,
  915 +NN(
  916 +0,
  917 +0,
  918 +-1, 0, 1, -1, 0.279512,-99) ,
  919 +4, 0.119023, 1, 0, 0.382093,-99) ,
  920 +1, -0.904762, 0, 0, 0.595536,-99) ,
  921 +NN(
  922 +NN(
  923 +0,
  924 +0,
  925 +-1, 0, 1, 1, 0.586041,-99) ,
  926 +NN(
  927 +0,
  928 +0,
  929 +-1, 0.285714, 1, -1, 0.481661,-99) ,
  930 +0, 1.00045, 0, 0, 0.488526,-99) ,
  931 +0, 0.259409, 1, 0, 0.51987,-99) );
  932 + // itree = 25
  933 + fBoostWeights.push_back(0.0750074);
  934 + fForest.push_back(
  935 +NN(
  936 +NN(
  937 +0,
  938 +0,
  939 +-1, 0, 1, 1, 0.642923,-99) ,
  940 +NN(
  941 +NN(
  942 +0,
  943 +0,
  944 +-1, 2.05802, 0, 1, 0.524404,-99) ,
  945 +NN(
  946 +NN(
  947 +0,
  948 +0,
  949 +-1, 0, 1, 1, 0.502681,-99) ,
  950 +NN(
  951 +0,
  952 +0,
  953 +-1, 0, 1, -1, 0.395704,-99) ,
  954 +3, 0.142897, 0, 0, 0.483258,-99) ,
  955 +4, 0.285738, 1, 0, 0.511472,-99) ,
  956 +2, 0.299088, 1, 0, 0.52693,-99) );