Commit ccf70c31956249f72aacff17dc68b9078f2e77e2
1 parent
f0fa644a
updated MVA
Showing
6 changed files
with
50788 additions
and
14 deletions
Show diff stats
... | ... | @@ -0,0 +1,15 @@ |
1 | +#ifndef DUALTRACKCLUSTER_H | |
2 | +#define DUALTRACKCLUSTER_H | |
3 | + | |
4 | +#include <vector> | |
5 | +#include <analysis/dataobjects/Particle.h> | |
6 | +#include <framework/datastore/StoreArray.h> | |
7 | + | |
8 | +class TrackCluster{ | |
9 | + public: | |
10 | + void addToCluster(); | |
11 | + private: | |
12 | + //std::vector<Particle*> m_particles; | |
13 | +}; | |
14 | + | |
15 | +#endif | ... | ... |
DualTrackTagger/include/DualTrackTaggerModule.h
... | ... | @@ -31,7 +31,9 @@ |
31 | 31 | #include "TFile.h" |
32 | 32 | |
33 | 33 | // Include MVA classes - TEST |
34 | -#include <analysis/modules/DualTrackTagger/TMVAClassification_BDT5.class.h> | |
34 | +#include <analysis/modules/DualTrackTagger/TMVAClassification_B2_011_BDT_5.class.h> | |
35 | +#include <analysis/modules/DualTrackTagger/TMVAClassification_B2_013_BDT_5.class.h> | |
36 | +#include <analysis/modules/DualTrackTagger/TMVAClassification_B2_211_BDT_5.class.h> | |
35 | 37 | |
36 | 38 | |
37 | 39 | namespace Belle2 { |
... | ... | @@ -133,11 +135,22 @@ namespace Belle2 { |
133 | 135 | bool m_BelleFlag; |
134 | 136 | bool m_MCFlag; |
135 | 137 | std::vector<std::string> m_strParticleLists; |
136 | - double m_MVAResponseCut; | |
137 | 138 | TString m_histoFileName; |
138 | 139 | |
140 | + | |
139 | 141 | //Belle 2 Classifier |
140 | - ReadBDT5 *Belle2ClassifierResponse; | |
142 | + double m_B2_011_responseCut; | |
143 | + double m_B2_013_responseCut; | |
144 | + double m_B2_211_responseCut; | |
145 | + | |
146 | + ReadB2_011_BDT_5 *B2_011_MVA; | |
147 | + ReadB2_013_BDT_5 *B2_013_MVA; | |
148 | + ReadB2_211_BDT_5 *B2_211_MVA; | |
149 | + | |
150 | + //Belle Classifer; | |
151 | + double m_B_011_responseCut; | |
152 | + double m_B_013_responseCut; | |
153 | + double m_B_211_responseCut; | |
141 | 154 | |
142 | 155 | //histograms |
143 | 156 | TFile *m_histoFile; | ... | ... |
DualTrackTagger/include/TMVAClassification_B2_011_BDT_5.class.h
0 → 100644
Changes suppressed. Click to show
... | ... | @@ -0,0 +1,17322 @@ |
1 | +// Class: ReadB2_011_BDT_5 | |
2 | +// Automatically generated by MethodBase::MakeClass | |
3 | +// | |
4 | + | |
5 | +/* configuration options ===================================================== | |
6 | + | |
7 | +#GEN -*-*-*-*-*-*-*-*-*-*-*- general info -*-*-*-*-*-*-*-*-*-*-*- | |
8 | + | |
9 | +Method : BDT::B2_011_BDT_5 | |
10 | +TMVA Release : 4.2.1 [262657] | |
11 | +ROOT Release : 6.08/06 [395270] | |
12 | +Creator : mhohmann | |
13 | +Date : Mon Jan 15 13:54:36 2018 | |
14 | +Host : Linux centos6 4.4.0-97-generic #120-Ubuntu SMP Tue Sep 19 17:28:18 UTC 2017 x86_64 x86_64 x86_64 GNU/Linux | |
15 | +Dir : /gpfs/home/belle2/mhohmann/3_iVub/1_DualTrack | |
16 | +Training events: 349041 | |
17 | +Analysis type : [Classification] | |
18 | + | |
19 | + | |
20 | +#OPT -*-*-*-*-*-*-*-*-*-*-*-*- options -*-*-*-*-*-*-*-*-*-*-*-*- | |
21 | + | |
22 | +# Set by User: | |
23 | +V: "False" [Verbose output (short form of "VerbosityLevel" below - overrides the latter one)] | |
24 | +H: "False" [Print method-specific help message] | |
25 | +NTrees: "500" [Number of trees in the forest] | |
26 | +MaxDepth: "3" [Max depth of the decision tree allowed] | |
27 | +MiB2_011_NNodeSize: "2.5%" [Minimum percentage of training events required in a leaf node (default: Classification: 5%, Regression: 0.2%)] | |
28 | +nCuts: "20" [Number of grid points in variable range used in finding optimal cut in node splitting] | |
29 | +BoostType: "AdaBoost" [Boosting type for the trees in the forest (note: AdaCost is still experimental)] | |
30 | +UseBaggedBoost: "True" [Use only a random subsample of all events for growing the trees in each boost iteration.] | |
31 | +AdaBoostBeta: "5.000000e-01" [Learning rate for AdaBoost algorithm] | |
32 | +BaggedSampleFraction: "5.000000e-01" [Relative size of bagged event sample to original size of the data sample (used whenever bagging is used (i.e. UseBaggedBoost, Bagging,)] | |
33 | +SeparationType: "giniindex" [Separation criterion for node splitting] | |
34 | +# Default: | |
35 | +VerbosityLevel: "Default" [Verbosity level] | |
36 | +VarTransform: "None" [List of variable transformations performed before training, e.g., "D_Background,P_Signal,G,N_AllClasses" for: "Decorrelation, PCA-transformation, Gaussianisation, Normalisation, each for the given class of events ('AllClasses' denotes all events of all classes, if no class indication is given, 'All' is assumed)"] | |
37 | +CreateMVAPdfs: "False" [Create PDFs for classifier outputs (signal and background)] | |
38 | +IgnoreNegWeightsInTraining: "False" [Events with negative weights are ignored in the training (but are included for testing and performance evaluation)] | |
39 | +AdaBoostR2Loss: "quadratic" [Type of Loss function in AdaBoostR2] | |
40 | +Shrinkage: "1.000000e+00" [Learning rate for GradBoost algorithm] | |
41 | +UseRandomisedTrees: "False" [Determine at each node splitting the cut variable only as the best out of a random subset of variables (like in RandomForests)] | |
42 | +UseNvars: "2" [Size of the subset of variables used with RandomisedTree option] | |
43 | +UsePoissoB2_011_NNvars: "True" [Interpret "UseNvars" not as fixed number but as mean of a Possion distribution in each split with RandomisedTree option] | |
44 | +UseYesNoLeaf: "True" [Use Sig or Bkg categories, or the purity=S/(S+B) as classification of the leaf node -> Real-AdaBoost] | |
45 | +NegWeightTreatment: "inverseboostnegweights" [How to treat events with negative weights in the BDT training (particular the boosting) : IgnoreInTraining; Boost With inverse boostweight; Pair events with negative and positive weights in traning sample and *aB2_011_NNihilate* them (experimental!)] | |
46 | +Css: "1.000000e+00" [AdaCost: cost of true signal selected signal] | |
47 | +Cts_sb: "1.000000e+00" [AdaCost: cost of true signal selected bkg] | |
48 | +Ctb_ss: "1.000000e+00" [AdaCost: cost of true bkg selected signal] | |
49 | +Cbb: "1.000000e+00" [AdaCost: cost of true bkg selected bkg ] | |
50 | +NodePurityLimit: "5.000000e-01" [In boosting/pruning, nodes with purity > NodePurityLimit are signal; background otherwise.] | |
51 | +RegressionLossFunctionBDTG: "huber" [Loss function for BDTG regression.] | |
52 | +HuberQuantile: "7.000000e-01" [In the Huber loss function this is the quantile that separates the core from the tails in the residuals distribution.] | |
53 | +DoBoostMonitor: "False" [Create control plot with ROC integral vs tree number] | |
54 | +UseFisherCuts: "False" [Use multivariate splits using the Fisher criterion] | |
55 | +MinLinCorrForFisher: "8.000000e-01" [The minimum linear correlation between two variables demanded for use in Fisher criterion in node splitting] | |
56 | +UseExclusiveVars: "False" [Variables already used in fisher criterion are not anymore analysed individually for node splitting] | |
57 | +DoPreselection: "False" [and and apply automatic pre-selection for 100% efficient signal (bkg) cuts prior to training] | |
58 | +SigToBkgFraction: "1.000000e+00" [Sig to Bkg ratio used in Training (similar to NodePurityLimit, which caB2_011_NNot be used in real adaboost] | |
59 | +PruneMethod: "nopruning" [Note: for BDTs use small trees (e.g.MaxDepth=3) and NoPruning: Pruning: Method used for pruning (removal) of statistically insignificant branches ] | |
60 | +PruneStrength: "0.000000e+00" [Pruning strength] | |
61 | +PruningValFraction: "5.000000e-01" [Fraction of events to use for optimizing automatic pruning.] | |
62 | +SkipNormalization: "False" [Skip normalization at initialization, to keep expectation value of BDT output according to the fraction of events] | |
63 | +nEventsMin: "0" [deprecated: Use MiB2_011_NNodeSize (in % of training events) instead] | |
64 | +UseBaggedGrad: "False" [deprecated: Use *UseBaggedBoost* instead: Use only a random subsample of all events for growing the trees in each iteration.] | |
65 | +GradBaggingFraction: "5.000000e-01" [deprecated: Use *BaggedSampleFraction* instead: Defines the fraction of events to be used in each iteration, e.g. when UseBaggedGrad=kTRUE. ] | |
66 | +UseNTrainEvents: "0" [deprecated: Use *BaggedSampleFraction* instead: Number of randomly picked training events used in randomised (and bagged) trees] | |
67 | +B2_011_NNodesMax: "0" [deprecated: Use MaxDepth instead to limit the tree size] | |
68 | +## | |
69 | + | |
70 | + | |
71 | +#VAR -*-*-*-*-*-*-*-*-*-*-*-* variables *-*-*-*-*-*-*-*-*-*-*-*- | |
72 | + | |
73 | +NVar 5 | |
74 | +magDiffP magDiffP magDiffP magDiffP F 'F' [0,5.71271848679] | |
75 | +chargeMult chargeMult chargeMult chargeMult F 'F' [-1,1] | |
76 | +phi phi phi phi F 'F' [0,3.13901996613] | |
77 | +pTp1 pTp1 pTp1 pTp1 F 'F' [0.00120415294077,0.499999076128] | |
78 | +pTp2 pTp2 pTp2 pTp2 F 'F' [0.0014912445331,0.499998599291] | |
79 | +NSpec 0 | |
80 | + | |
81 | + | |
82 | +============================================================================ */ | |
83 | + | |
84 | +#include <vector> | |
85 | +#include <cmath> | |
86 | +#include <string> | |
87 | +#include <iostream> | |
88 | + | |
89 | +#define B2_011_NN new B2_011_BDT_5Node | |
90 | + | |
91 | +#ifndef B2_011_BDT_5Node__def | |
92 | +#define B2_011_BDT_5Node__def | |
93 | + | |
94 | +class B2_011_BDT_5Node { | |
95 | + | |
96 | +public: | |
97 | + | |
98 | + // constructor of an essentially "empty" node floating in space | |
99 | + B2_011_BDT_5Node ( B2_011_BDT_5Node* left,B2_011_BDT_5Node* right, | |
100 | + int selector, double cutValue, bool cutType, | |
101 | + int nodeType, double purity, double response ) : | |
102 | + fLeft ( left ), | |
103 | + fRight ( right ), | |
104 | + fSelector ( selector ), | |
105 | + fCutValue ( cutValue ), | |
106 | + fCutType ( cutType ), | |
107 | + fNodeType ( nodeType ), | |
108 | + fPurity ( purity ), | |
109 | + fResponse ( response ){ | |
110 | + } | |
111 | + | |
112 | + virtual ~B2_011_BDT_5Node(); | |
113 | + | |
114 | + // test event if it decends the tree at this node to the right | |
115 | + virtual bool GoesRight( const std::vector<double>& inputValues ) const; | |
116 | + B2_011_BDT_5Node* GetRight( void ) {return fRight; }; | |
117 | + | |
118 | + // test event if it decends the tree at this node to the left | |
119 | + virtual bool GoesLeft ( const std::vector<double>& inputValues ) const; | |
120 | + B2_011_BDT_5Node* GetLeft( void ) { return fLeft; }; | |
121 | + | |
122 | + // return S/(S+B) (purity) at this node (from training) | |
123 | + | |
124 | + double GetPurity( void ) const { return fPurity; } | |
125 | + // return the node type | |
126 | + int GetNodeType( void ) const { return fNodeType; } | |
127 | + double GetResponse(void) const {return fResponse;} | |
128 | + | |
129 | +private: | |
130 | + | |
131 | + B2_011_BDT_5Node* fLeft; // pointer to the left daughter node | |
132 | + B2_011_BDT_5Node* fRight; // pointer to the right daughter node | |
133 | + int fSelector; // index of variable used in node selection (decision tree) | |
134 | + double fCutValue; // cut value appplied on this node to discriminate bkg against sig | |
135 | + bool fCutType; // true: if event variable > cutValue ==> signal , false otherwise | |
136 | + int fNodeType; // Type of node: -1 == Bkg-leaf, 1 == Signal-leaf, 0 = internal | |
137 | + double fPurity; // Purity of node from training | |
138 | + double fResponse; // Regression response value of node | |
139 | +}; | |
140 | + | |
141 | +//_______________________________________________________________________ | |
142 | + B2_011_BDT_5Node::~B2_011_BDT_5Node() | |
143 | +{ | |
144 | + if (fLeft != NULL) delete fLeft; | |
145 | + if (fRight != NULL) delete fRight; | |
146 | +}; | |
147 | + | |
148 | +//_______________________________________________________________________ | |
149 | +bool B2_011_BDT_5Node::GoesRight( const std::vector<double>& inputValues ) const | |
150 | +{ | |
151 | + // test event if it decends the tree at this node to the right | |
152 | + bool result; | |
153 | + result = (inputValues[fSelector] > fCutValue ); | |
154 | + if (fCutType == true) return result; //the cuts are selecting Signal ; | |
155 | + else return !result; | |
156 | +} | |
157 | + | |
158 | +//_______________________________________________________________________ | |
159 | +bool B2_011_BDT_5Node::GoesLeft( const std::vector<double>& inputValues ) const | |
160 | +{ | |
161 | + // test event if it decends the tree at this node to the left | |
162 | + if (!this->GoesRight(inputValues)) return true; | |
163 | + else return false; | |
164 | +} | |
165 | + | |
166 | +#endif | |
167 | + | |
168 | +#ifndef IClassifierReader__def | |
169 | +#define IClassifierReader__def | |
170 | + | |
171 | +class IClassifierReader { | |
172 | + | |
173 | + public: | |
174 | + | |
175 | + // constructor | |
176 | + IClassifierReader() : fStatusIsClean( true ) {} | |
177 | + virtual ~IClassifierReader() {} | |
178 | + | |
179 | + // return classifier response | |
180 | + virtual double GetMvaValue( const std::vector<double>& inputValues ) const = 0; | |
181 | + | |
182 | + // returns classifier status | |
183 | + bool IsStatusClean() const { return fStatusIsClean; } | |
184 | + | |
185 | + protected: | |
186 | + | |
187 | + bool fStatusIsClean; | |
188 | +}; | |
189 | + | |
190 | +#endif | |
191 | + | |
192 | +class ReadB2_011_BDT_5 : public IClassifierReader { | |
193 | + | |
194 | + public: | |
195 | + | |
196 | + // constructor | |
197 | + ReadB2_011_BDT_5( std::vector<std::string>& theInputVars ) | |
198 | + : IClassifierReader(), | |
199 | + fClassName( "ReadB2_011_BDT_5" ), | |
200 | + fNvars( 5 ), | |
201 | + fIsNormalised( false ) | |
202 | + { | |
203 | + // the training input variables | |
204 | + const char* inputVars[] = { "magDiffP", "chargeMult", "phi", "pTp1", "pTp2" }; | |
205 | + | |
206 | + // sanity checks | |
207 | + if (theInputVars.size() <= 0) { | |
208 | + std::cout << "Problem in class \"" << fClassName << "\": empty input vector" << std::endl; | |
209 | + fStatusIsClean = false; | |
210 | + } | |
211 | + | |
212 | + if (theInputVars.size() != fNvars) { | |
213 | + std::cout << "Problem in class \"" << fClassName << "\": mismatch in number of input values: " | |
214 | + << theInputVars.size() << " != " << fNvars << std::endl; | |
215 | + fStatusIsClean = false; | |
216 | + } | |
217 | + | |
218 | + // validate input variables | |
219 | + for (size_t ivar = 0; ivar < theInputVars.size(); ivar++) { | |
220 | + if (theInputVars[ivar] != inputVars[ivar]) { | |
221 | + std::cout << "Problem in class \"" << fClassName << "\": mismatch in input variable names" << std::endl | |
222 | + << " for variable [" << ivar << "]: " << theInputVars[ivar].c_str() << " != " << inputVars[ivar] << std::endl; | |
223 | + fStatusIsClean = false; | |
224 | + } | |
225 | + } | |
226 | + | |
227 | + // initialize min and max vectors (for normalisation) | |
228 | + fVmin[0] = 0; | |
229 | + fVmax[0] = 0; | |
230 | + fVmin[1] = 0; | |
231 | + fVmax[1] = 0; | |
232 | + fVmin[2] = 0; | |
233 | + fVmax[2] = 0; | |
234 | + fVmin[3] = 0; | |
235 | + fVmax[3] = 0; | |
236 | + fVmin[4] = 0; | |
237 | + fVmax[4] = 0; | |
238 | + | |
239 | + // initialize input variable types | |
240 | + fType[0] = 'F'; | |
241 | + fType[1] = 'F'; | |
242 | + fType[2] = 'F'; | |
243 | + fType[3] = 'F'; | |
244 | + fType[4] = 'F'; | |
245 | + | |
246 | + // initialize constants | |
247 | + Initialize(); | |
248 | + | |
249 | + } | |
250 | + | |
251 | + // destructor | |
252 | + virtual ~ReadB2_011_BDT_5() { | |
253 | + Clear(); // method-specific | |
254 | + } | |
255 | + | |
256 | + // the classifier response | |
257 | + // "inputValues" is a vector of input values in the same order as the | |
258 | + // variables given to the constructor | |
259 | + double GetMvaValue( const std::vector<double>& inputValues ) const; | |
260 | + | |
261 | + private: | |
262 | + | |
263 | + // method-specific destructor | |
264 | + void Clear(); | |
265 | + | |
266 | + // common member variables | |
267 | + const char* fClassName; | |
268 | + | |
269 | + const size_t fNvars; | |
270 | + size_t GetNvar() const { return fNvars; } | |
271 | + char GetType( int ivar ) const { return fType[ivar]; } | |
272 | + | |
273 | + // normalisation of input variables | |
274 | + const bool fIsNormalised; | |
275 | + bool IsNormalised() const { return fIsNormalised; } | |
276 | + double fVmin[5]; | |
277 | + double fVmax[5]; | |
278 | + double NormVariable( double x, double xmin, double xmax ) const { | |
279 | + // normalise to output range: [-1, 1] | |
280 | + return 2*(x - xmin)/(xmax - xmin) - 1.0; | |
281 | + } | |
282 | + | |
283 | + // type of input variable: 'F' or 'I' | |
284 | + char fType[5]; | |
285 | + | |
286 | + // initialize internal variables | |
287 | + void Initialize(); | |
288 | + double GetMvaValue__( const std::vector<double>& inputValues ) const; | |
289 | + | |
290 | + // private members (method specific) | |
291 | + std::vector<B2_011_BDT_5Node*> fForest; // i.e. root nodes of decision trees | |
292 | + std::vector<double> fBoostWeights; // the weights applied in the individual boosts | |
293 | +}; | |
294 | + | |
295 | +double ReadB2_011_BDT_5::GetMvaValue__( const std::vector<double>& inputValues ) const | |
296 | +{ | |
297 | + double myMVA = 0; | |
298 | + double norm = 0; | |
299 | + for (unsigned int itree=0; itree<fForest.size(); itree++){ | |
300 | + B2_011_BDT_5Node *current = fForest[itree]; | |
301 | + while (current->GetNodeType() == 0) { //intermediate node | |
302 | + if (current->GoesRight(inputValues)) current=(B2_011_BDT_5Node*)current->GetRight(); | |
303 | + else current=(B2_011_BDT_5Node*)current->GetLeft(); | |
304 | + } | |
305 | + myMVA += fBoostWeights[itree] * current->GetNodeType(); | |
306 | + norm += fBoostWeights[itree]; | |
307 | + } | |
308 | + return myMVA /= norm; | |
309 | +}; | |
310 | + | |
311 | +void ReadB2_011_BDT_5::Initialize() | |
312 | +{ | |
313 | + // itree = 0 | |
314 | + fBoostWeights.push_back(0.505504531943869); | |
315 | + fForest.push_back( | |
316 | +B2_011_NN( | |
317 | +B2_011_NN( | |
318 | +B2_011_NN( | |
319 | +0, | |
320 | +0, | |
321 | +-1, 0.0733936, 1, 1, 0.881302,-99) , | |
322 | +B2_011_NN( | |
323 | +B2_011_NN( | |
324 | +0, | |
325 | +0, | |
326 | +-1, 0, 1, 1, 0.85983,-99) , | |
327 | +B2_011_NN( | |
328 | +0, | |
329 | +0, | |
330 | +-1, 0, 1, -1, 0.492521,-99) , | |
331 | +2, 2.24141, 0, 0, 0.580143,-99) , | |
332 | +0, 0.154132, 1, 0, 0.713761,-99) , | |
333 | +B2_011_NN( | |
334 | +B2_011_NN( | |
335 | +0, | |
336 | +0, | |
337 | +-1, 0, 1, 1, 0.506231,-99) , | |
338 | +B2_011_NN( | |
339 | +0, | |
340 | +0, | |
341 | +-1, 0.647992, 1, -1, 0.224334,-99) , | |
342 | +2, 2.69191, 0, 0, 0.252646,-99) , | |
343 | +0, 0.359642, 1, 0, 0.50155,-99) ); | |
344 | + // itree = 1 | |
345 | + fBoostWeights.push_back(0.481284); | |
346 | + fForest.push_back( | |
347 | +B2_011_NN( | |
348 | +B2_011_NN( | |
349 | +0, | |
350 | +0, | |
351 | +-1, 0.11988, 1, 1, 0.7151,-99) , | |
352 | +B2_011_NN( | |
353 | +B2_011_NN( | |
354 | +B2_011_NN( | |
355 | +0, | |
356 | +0, | |
357 | +-1, 0, 1, 1, 0.614762,-99) , | |
358 | +B2_011_NN( | |
359 | +0, | |
360 | +0, | |
361 | +-1, 0, 1, -1, 0.305662,-99) , | |
362 | +2, 2.38648, 0, 0, 0.359264,-99) , | |
363 | +B2_011_NN( | |
364 | +0, | |
365 | +0, | |
366 | +-1, 2.53848, 0, -1, 0.226698,-99) , | |
367 | +0, 0.522339, 1, 0, 0.283109,-99) , | |
368 | +0, 0.359642, 1, 0, 0.518211,-99) ); | |
369 | + // itree = 2 | |
370 | + fBoostWeights.push_back(0.37938); | |
371 | + fForest.push_back( | |
372 | +B2_011_NN( | |
373 | +B2_011_NN( | |
374 | +0, | |
375 | +0, | |
376 | +-1, 0.0782737, 1, 1, 0.723921,-99) , | |
377 | +B2_011_NN( | |
378 | +B2_011_NN( | |
379 | +B2_011_NN( | |
380 | +0, | |
381 | +0, | |
382 | +-1, 0, 1, 1, 0.676693,-99) , | |
383 | +B2_011_NN( | |
384 | +0, | |
385 | +0, | |
386 | +-1, 0, 1, -1, 0.449839,-99) , | |
387 | +3, 0.240815, 1, 0, 0.58927,-99) , | |
388 | +B2_011_NN( | |
389 | +0, | |
390 | +0, | |
391 | +-1, 0.528532, 1, -1, 0.343758,-99) , | |
392 | +2, 2.6917, 0, 0, 0.372373,-99) , | |
393 | +0, 0.271229, 1, 0, 0.511755,-99) ); | |
394 | + // itree = 3 | |
395 | + fBoostWeights.push_back(0.315202); | |
396 | + fForest.push_back( | |
397 | +B2_011_NN( | |
398 | +B2_011_NN( | |
399 | +0, | |
400 | +0, | |
401 | +-1, 0.0777213, 1, 1, 0.685681,-99) , | |
402 | +B2_011_NN( | |
403 | +B2_011_NN( | |
404 | +B2_011_NN( | |
405 | +0, | |
406 | +0, | |
407 | +-1, 0, 1, 1, 0.703098,-99) , | |
408 | +B2_011_NN( | |
409 | +0, | |
410 | +0, | |
411 | +-1, 0, 1, -1, 0.425548,-99) , | |
412 | +2, 2.5379, 0, 0, 0.468738,-99) , | |
413 | +B2_011_NN( | |
414 | +0, | |
415 | +0, | |
416 | +-1, 0.287993, 1, -1, 0.318614,-99) , | |
417 | +0, 0.531125, 1, 0, 0.406181,-99) , | |
418 | +0, 0.272034, 1, 0, 0.517452,-99) ); | |
419 | + // itree = 4 | |
420 | + fBoostWeights.push_back(0.223758); | |
421 | + fForest.push_back( | |
422 | +B2_011_NN( | |
423 | +B2_011_NN( | |
424 | +B2_011_NN( | |
425 | +0, | |
426 | +0, | |
427 | +-1, 0, 1, 1, 0.86758,-99) , | |
428 | +B2_011_NN( | |
429 | +B2_011_NN( | |
430 | +0, | |
431 | +0, | |
432 | +-1, 0, 1, 1, 0.702847,-99) , | |
433 | +B2_011_NN( | |
434 | +0, | |
435 | +0, | |
436 | +-1, 0, 1, -1, 0.485237,-99) , | |
437 | +1, -0.904762, 0, 0, 0.614692,-99) , | |
438 | +0, 0.0770658, 1, 0, 0.724655,-99) , | |
439 | +B2_011_NN( | |
440 | +B2_011_NN( | |
441 | +0, | |
442 | +0, | |
443 | +-1, 0, 1, 1, 0.720866,-99) , | |
444 | +B2_011_NN( | |
445 | +0, | |
446 | +0, | |
447 | +-1, 0.522342, 1, -1, 0.439517,-99) , | |
448 | +2, 2.84032, 0, 0, 0.459453,-99) , | |
449 | +0, 0.179821, 1, 0, 0.522943,-99) ); | |
450 | + // itree = 5 | |
451 | + fBoostWeights.push_back(0.19133); | |
452 | + fForest.push_back( | |
453 | +B2_011_NN( | |
454 | +B2_011_NN( | |
455 | +0, | |
456 | +0, | |
457 | +-1, -0.904762, 0, 1, 0.769421,-99) , | |
458 | +B2_011_NN( | |
459 | +B2_011_NN( | |
460 | +0, | |
461 | +0, | |
462 | +-1, 2.59617, 0, 1, 0.543103,-99) , | |
463 | +B2_011_NN( | |
464 | +0, | |
465 | +0, | |
466 | +-1, 0.287993, 1, -1, 0.367797,-99) , | |
467 | +0, 0.55962, 1, 0, 0.498678,-99) , | |
468 | +2, 0.298947, 1, 0, 0.536993,-99) ); | |
469 | + // itree = 6 | |
470 | + fBoostWeights.push_back(0.186058); | |
471 | + fForest.push_back( | |
472 | +B2_011_NN( | |
473 | +B2_011_NN( | |
474 | +B2_011_NN( | |
475 | +0, | |
476 | +0, | |
477 | +-1, 0, 1, 1, 0.852023,-99) , | |
478 | +B2_011_NN( | |
479 | +B2_011_NN( | |
480 | +0, | |
481 | +0, | |
482 | +-1, 0, 1, 1, 0.670531,-99) , | |
483 | +B2_011_NN( | |
484 | +0, | |
485 | +0, | |
486 | +-1, 0, 1, -1, 0.486455,-99) , | |
487 | +1, -0.904762, 0, 0, 0.595024,-99) , | |
488 | +0, 0.0773979, 1, 0, 0.703871,-99) , | |
489 | +B2_011_NN( | |
490 | +B2_011_NN( | |
491 | +0, | |
492 | +0, | |
493 | +-1, 0, 1, 1, 0.700608,-99) , | |
494 | +B2_011_NN( | |
495 | +0, | |
496 | +0, | |
497 | +-1, -0.904762, 0, -1, 0.454893,-99) , | |
498 | +2, 2.84042, 0, 0, 0.471624,-99) , | |
499 | +0, 0.179422, 1, 0, 0.526124,-99) ); | |
500 | + // itree = 7 | |
501 | + fBoostWeights.push_back(0.186803); | |
502 | + fForest.push_back( | |
503 | +B2_011_NN( | |
504 | +B2_011_NN( | |
505 | +B2_011_NN( | |
506 | +0, | |
507 | +0, | |
508 | +-1, 0, 1, 1, 0.877752,-99) , | |
509 | +B2_011_NN( | |
510 | +0, | |
511 | +0, | |
512 | +-1, 0, 1, -1, 0.471862,-99) , | |
513 | +1, -0.904762, 0, 0, 0.752067,-99) , | |
514 | +B2_011_NN( | |
515 | +B2_011_NN( | |
516 | +0, | |
517 | +0, | |
518 | +-1, 2.19252, 0, 1, 0.562944,-99) , | |
519 | +B2_011_NN( | |
520 | +0, | |
521 | +0, | |
522 | +-1, 0.673055, 1, -1, 0.436703,-99) , | |
523 | +0, 0.36198, 1, 0, 0.497274,-99) , | |
524 | +2, 0.299643, 1, 0, 0.53333,-99) ); | |
525 | + // itree = 8 | |
526 | + fBoostWeights.push_back(0.189879); | |
527 | + fForest.push_back( | |
528 | +B2_011_NN( | |
529 | +B2_011_NN( | |
530 | +0, | |
531 | +0, | |
532 | +-1, -0.904762, 0, 1, 0.74081,-99) , | |
533 | +B2_011_NN( | |
534 | +B2_011_NN( | |
535 | +B2_011_NN( | |
536 | +0, | |
537 | +0, | |
538 | +-1, 0, 1, 1, 0.751201,-99) , | |
539 | +B2_011_NN( | |
540 | +0, | |
541 | +0, | |
542 | +-1, 0, 1, -1, 0.415872,-99) , | |
543 | +1, -0.904762, 1, 0, 0.651637,-99) , | |
544 | +B2_011_NN( | |
545 | +B2_011_NN( | |
546 | +0, | |
547 | +0, | |
548 | +-1, 0, 1, 1, 0.571369,-99) , | |
549 | +B2_011_NN( | |
550 | +0, | |
551 | +0, | |
552 | +-1, 0, 1, -1, 0.454385,-99) , | |
553 | +3, 0.0962126, 1, 0, 0.47882,-99) , | |
554 | +2, 2.59802, 0, 0, 0.501918,-99) , | |
555 | +2, 0.299014, 1, 0, 0.533865,-99) ); | |
556 | + // itree = 9 | |
557 | + fBoostWeights.push_back(0.165773); | |
558 | + fForest.push_back( | |
559 | +B2_011_NN( | |
560 | +B2_011_NN( | |
561 | +B2_011_NN( | |
562 | +0, | |
563 | +0, | |
564 | +-1, 0, 1, 1, 0.845938,-99) , | |
565 | +B2_011_NN( | |
566 | +0, | |
567 | +0, | |
568 | +-1, 0, 1, -1, 0.480988,-99) , | |
569 | +1, -0.904762, 0, 0, 0.724292,-99) , | |
570 | +B2_011_NN( | |
571 | +B2_011_NN( | |
572 | +B2_011_NN( | |
573 | +0, | |
574 | +0, | |
575 | +-1, 0, 1, 1, 0.728358,-99) , | |
576 | +B2_011_NN( | |
577 | +0, | |
578 | +0, | |
579 | +-1, 0, 1, -1, 0.370176,-99) , | |
580 | +0, 0.612225, 1, 0, 0.635305,-99) , | |
581 | +B2_011_NN( | |
582 | +B2_011_NN( | |
583 | +0, | |
584 | +0, | |
585 | +-1, 0, 1, 1, 0.570321,-99) , | |
586 | +B2_011_NN( | |
587 | +0, | |
588 | +0, | |
589 | +-1, 0, 1, -1, 0.472792,-99) , | |
590 | +4, 0.0982407, 1, 0, 0.493516,-99) , | |
591 | +2, 2.59801, 0, 0, 0.512146,-99) , | |
592 | +2, 0.298948, 1, 0, 0.540519,-99) ); | |
593 | + // itree = 10 | |
594 | + fBoostWeights.push_back(0.140565); | |
595 | + fForest.push_back( | |
596 | +B2_011_NN( | |
597 | +B2_011_NN( | |
598 | +0, | |
599 | +0, | |
600 | +-1, 0, 1, 1, 0.782157,-99) , | |
601 | +B2_011_NN( | |
602 | +B2_011_NN( | |
603 | +0, | |
604 | +0, | |
605 | +-1, 0, 1, 1, 0.702224,-99) , | |
606 | +B2_011_NN( | |
607 | +B2_011_NN( | |
608 | +0, | |
609 | +0, | |
610 | +-1, 0, 1, 1, 0.560924,-99) , | |
611 | +B2_011_NN( | |
612 | +0, | |
613 | +0, | |
614 | +-1, 0, 1, -1, 0.476627,-99) , | |
615 | +1, -0.904762, 0, 0, 0.521644,-99) , | |
616 | +2, 2.85426, 0, 0, 0.531617,-99) , | |
617 | +2, 0.149474, 1, 0, 0.550821,-99) ); | |
618 | + // itree = 11 | |
619 | + fBoostWeights.push_back(0.100242); | |
620 | + fForest.push_back( | |
621 | +B2_011_NN( | |
622 | +B2_011_NN( | |
623 | +0, | |
624 | +0, | |
625 | +-1, 0, 1, 1, 0.77912,-99) , | |
626 | +B2_011_NN( | |
627 | +B2_011_NN( | |
628 | +0, | |
629 | +0, | |
630 | +-1, 0, 1, 1, 0.685001,-99) , | |
631 | +B2_011_NN( | |
632 | +B2_011_NN( | |
633 | +0, | |
634 | +0, | |
635 | +-1, 0, 1, 1, 0.589972,-99) , | |
636 | +B2_011_NN( | |
637 | +0, | |
638 | +0, | |
639 | +-1, 0, 1, -1, 0.497896,-99) , | |
640 | +3, 0.0972131, 1, 0, 0.518485,-99) , | |
641 | +2, 2.85416, 0, 0, 0.527705,-99) , | |
642 | +2, 0.149469, 1, 0, 0.547106,-99) ); | |
643 | + // itree = 12 | |
644 | + fBoostWeights.push_back(0.120855); | |
645 | + fForest.push_back( | |
646 | +B2_011_NN( | |
647 | +B2_011_NN( | |
648 | +0, | |
649 | +0, | |
650 | +-1, -0.904762, 0, 1, 0.712326,-99) , | |
651 | +B2_011_NN( | |
652 | +B2_011_NN( | |
653 | +B2_011_NN( | |
654 | +0, | |
655 | +0, | |
656 | +-1, 0, 1, 1, 0.724668,-99) , | |
657 | +B2_011_NN( | |
658 | +0, | |
659 | +0, | |
660 | +-1, 0, 1, -1, 0.41775,-99) , | |
661 | +1, -0.904762, 1, 0, 0.627358,-99) , | |
662 | +B2_011_NN( | |
663 | +0, | |
664 | +0, | |
665 | +-1, 0.0972129, 1, 1, 0.516311,-99) , | |
666 | +2, 2.59638, 0, 0, 0.530633,-99) , | |
667 | +2, 0.298762, 1, 0, 0.554284,-99) ); | |
668 | + // itree = 13 | |
669 | + fBoostWeights.push_back(0.139772); | |
670 | + fForest.push_back( | |
671 | +B2_011_NN( | |
672 | +B2_011_NN( | |
673 | +B2_011_NN( | |
674 | +0, | |
675 | +0, | |
676 | +-1, 0, 1, 1, 0.816806,-99) , | |
677 | +B2_011_NN( | |
678 | +0, | |
679 | +0, | |
680 | +-1, 0, 1, -1, 0.47441,-99) , | |
681 | +1, -0.904762, 0, 0, 0.694885,-99) , | |
682 | +B2_011_NN( | |
683 | +B2_011_NN( | |
684 | +B2_011_NN( | |
685 | +0, | |
686 | +0, | |
687 | +-1, 0, 1, 1, 0.695339,-99) , | |
688 | +B2_011_NN( | |
689 | +0, | |
690 | +0, | |
691 | +-1, 0, 1, -1, 0.412249,-99) , | |
692 | +0, 0.628634, 1, 0, 0.620659,-99) , | |
693 | +B2_011_NN( | |
694 | +B2_011_NN( | |
695 | +0, | |
696 | +0, | |
697 | +-1, 0, 1, 1, 0.527662,-99) , | |
698 | +B2_011_NN( | |
699 | +0, | |
700 | +0, | |
701 | +-1, 0, 1, -1, 0.461793,-99) , | |
702 | +4, 0.191399, 0, 0, 0.500767,-99) , | |
703 | +2, 2.59799, 0, 0, 0.515972,-99) , | |
704 | +2, 0.298947, 1, 0, 0.538167,-99) ); | |
705 | + // itree = 14 | |
706 | + fBoostWeights.push_back(0.116839); | |
707 | + fForest.push_back( | |
708 | +B2_011_NN( | |
709 | +B2_011_NN( | |
710 | +0, | |
711 | +0, | |
712 | +-1, -0.904762, 0, 1, 0.69294,-99) , | |
713 | +B2_011_NN( | |
714 | +B2_011_NN( | |
715 | +B2_011_NN( | |
716 | +0, | |
717 | +0, | |
718 | +-1, 0, 1, 1, 0.687185,-99) , | |
719 | +B2_011_NN( | |
720 | +0, | |
721 | +0, | |
722 | +-1, 0, 1, -1, 0.422279,-99) , | |
723 | +1, -0.904762, 1, 0, 0.599848,-99) , | |
724 | +B2_011_NN( | |
725 | +B2_011_NN( | |
726 | +0, | |
727 | +0, | |
728 | +-1, 0, 1, 1, 0.551762,-99) , | |
729 | +B2_011_NN( | |
730 | +0, | |
731 | +0, | |
732 | +-1, 0, 1, -1, 0.484408,-99) , | |
733 | +4, 0.0978127, 1, 0, 0.498359,-99) , | |
734 | +2, 2.598, 0, 0, 0.510878,-99) , | |
735 | +2, 0.298948, 1, 0, 0.533662,-99) ); | |
736 | + // itree = 15 | |
737 | + fBoostWeights.push_back(0.0873668); | |
738 | + fForest.push_back( | |
739 | +B2_011_NN( | |
740 | +B2_011_NN( | |
741 | +0, | |
742 | +0, | |
743 | +-1, 0, 1, 1, 0.729829,-99) , | |
744 | +B2_011_NN( | |
745 | +B2_011_NN( | |
746 | +B2_011_NN( | |
747 | +0, | |
748 | +0, | |
749 | +-1, 0, 1, 1, 0.694619,-99) , | |
750 | +B2_011_NN( | |
751 | +0, | |
752 | +0, | |
753 | +-1, 0, 1, -1, 0.447516,-99) , | |
754 | +1, -0.904762, 1, 0, 0.613686,-99) , | |
755 | +B2_011_NN( | |
756 | +0, | |
757 | +0, | |
758 | +-1, 0.379979, 1, 1, 0.511932,-99) , | |
759 | +2, 2.56953, 0, 0, 0.525021,-99) , | |
760 | +2, 0.149474, 1, 0, 0.539462,-99) ); | |
761 | + // itree = 16 | |
762 | + fBoostWeights.push_back(0.104279); | |
763 | + fForest.push_back( | |
764 | +B2_011_NN( | |
765 | +B2_011_NN( | |
766 | +0, | |
767 | +0, | |
768 | +-1, 0, 1, 1, 0.733156,-99) , | |
769 | +B2_011_NN( | |
770 | +B2_011_NN( | |
771 | +B2_011_NN( | |
772 | +0, | |
773 | +0, | |
774 | +-1, 0, 1, 1, 0.685023,-99) , | |
775 | +B2_011_NN( | |
776 | +0, | |
777 | +0, | |
778 | +-1, 0, 1, -1, 0.429265,-99) , | |
779 | +0, 0.57676, 1, 0, 0.604054,-99) , | |
780 | +B2_011_NN( | |
781 | +B2_011_NN( | |
782 | +0, | |
783 | +0, | |
784 | +-1, 0, 1, 1, 0.567046,-99) , | |
785 | +B2_011_NN( | |
786 | +0, | |
787 | +0, | |
788 | +-1, 0, 1, -1, 0.485937,-99) , | |
789 | +2, 0.495818, 1, 0, 0.498243,-99) , | |
790 | +2, 2.56966, 0, 0, 0.511451,-99) , | |
791 | +2, 0.150164, 1, 0, 0.527383,-99) ); | |
792 | + // itree = 17 | |
793 | + fBoostWeights.push_back(0.0945523); | |
794 | + fForest.push_back( | |
795 | +B2_011_NN( | |
796 | +B2_011_NN( | |
797 | +0, | |
798 | +0, | |
799 | +-1, 0, 1, 1, 0.715071,-99) , | |
800 | +B2_011_NN( | |
801 | +B2_011_NN( | |
802 | +0, | |
803 | +0, | |
804 | +-1, 2.56953, 0, 1, 0.535031,-99) , | |
805 | +B2_011_NN( | |
806 | +0, | |
807 | +0, | |
808 | +-1, 1.85926, 1, -1, 0.466141,-99) , | |
809 | +0, 0.549401, 1, 0, 0.517784,-99) , | |
810 | +2, 0.149474, 1, 0, 0.531347,-99) ); | |
811 | + // itree = 18 | |
812 | + fBoostWeights.push_back(0.087061); | |
813 | + fForest.push_back( | |
814 | +B2_011_NN( | |
815 | +B2_011_NN( | |
816 | +0, | |
817 | +0, | |
818 | +-1, 0, 1, 1, 0.712079,-99) , | |
819 | +B2_011_NN( | |
820 | +B2_011_NN( | |
821 | +B2_011_NN( | |
822 | +0, | |
823 | +0, | |
824 | +-1, 0, 1, 1, 0.665596,-99) , | |
825 | +B2_011_NN( | |
826 | +0, | |
827 | +0, | |
828 | +-1, 0, 1, -1, 0.461418,-99) , | |
829 | +1, -0.904762, 1, 0, 0.595953,-99) , | |
830 | +B2_011_NN( | |
831 | +B2_011_NN( | |
832 | +0, | |
833 | +0, | |
834 | +-1, 0, 1, 1, 0.553867,-99) , | |
835 | +B2_011_NN( | |
836 | +0, | |
837 | +0, | |
838 | +-1, 0, 1, -1, 0.488641,-99) , | |
839 | +3, 0.0724606, 1, 0, 0.497798,-99) , | |
840 | +2, 2.56962, 0, 0, 0.510077,-99) , | |
841 | +2, 0.149477, 1, 0, 0.524498,-99) ); | |
842 | + // itree = 19 | |
843 | + fBoostWeights.push_back(0.119934); | |
844 | + fForest.push_back( | |
845 | +B2_011_NN( | |
846 | +B2_011_NN( | |
847 | +B2_011_NN( | |
848 | +0, | |
849 | +0, | |
850 | +-1, 0, 1, 1, 0.766155,-99) , | |
851 | +B2_011_NN( | |
852 | +B2_011_NN( | |
853 | +0, | |
854 | +0, | |
855 | +-1, 0, 1, 1, 0.58663,-99) , | |
856 | +B2_011_NN( | |
857 | +0, | |
858 | +0, | |
859 | +-1, 0, 1, -1, 0.477464,-99) , | |
860 | +3, 0.104083, 1, 0, 0.52766,-99) , | |
861 | +0, 0.0765313, 1, 0, 0.618126,-99) , | |
862 | +B2_011_NN( | |
863 | +B2_011_NN( | |
864 | +B2_011_NN( | |
865 | +0, | |
866 | +0, | |
867 | +-1, 0, 1, 1, 0.650403,-99) , | |
868 | +B2_011_NN( | |
869 | +0, | |
870 | +0, | |
871 | +-1, 0, 1, -1, 0.452755,-99) , | |
872 | +1, -0.904762, 1, 0, 0.582645,-99) , | |
873 | +B2_011_NN( | |
874 | +B2_011_NN( | |
875 | +0, | |
876 | +0, | |
877 | +-1, 0, 1, 1, 0.513415,-99) , | |
878 | +B2_011_NN( | |
879 | +0, | |
880 | +0, | |
881 | +-1, 0, 1, -1, 0.445464,-99) , | |
882 | +3, 0.167469, 0, 0, 0.493526,-99) , | |
883 | +2, 2.54129, 0, 0, 0.507013,-99) , | |
884 | +0, 0.178579, 1, 0, 0.53139,-99) ); | |
885 | + // itree = 20 | |
886 | + fBoostWeights.push_back(0.0962438); | |
887 | + fForest.push_back( | |
888 | +B2_011_NN( | |
889 | +B2_011_NN( | |
890 | +0, | |
891 | +0, | |
892 | +-1, 0, 1, 1, 0.694749,-99) , | |
893 | +B2_011_NN( | |
894 | +B2_011_NN( | |
895 | +0, | |
896 | +0, | |
897 | +-1, 2.28345, 0, 1, 0.535757,-99) , | |
898 | +B2_011_NN( | |
899 | +0, | |
900 | +0, | |
901 | +-1, 0.191986, 0, -1, 0.459913,-99) , | |
902 | +3, 0.28623, 1, 0, 0.513034,-99) , | |
903 | +2, 0.149547, 1, 0, 0.525617,-99) ); | |
904 | + // itree = 21 | |
905 | + fBoostWeights.push_back(0.0777192); | |
906 | + fForest.push_back( | |
907 | +B2_011_NN( | |
908 | +B2_011_NN( | |
909 | +0, | |
910 | +0, | |
911 | +-1, 0, 1, 1, 0.680271,-99) , | |
912 | +B2_011_NN( | |
913 | +B2_011_NN( | |
914 | +B2_011_NN( | |
915 | +0, | |
916 | +0, | |
917 | +-1, 0, 1, 1, 0.647709,-99) , | |
918 | +B2_011_NN( | |
919 | +0, | |
920 | +0, | |
921 | +-1, 0, 1, -1, 0.484686,-99) , | |
922 | +1, -0.904762, 1, 0, 0.591853,-99) , | |
923 | +B2_011_NN( | |
924 | +B2_011_NN( | |
925 | +0, | |
926 | +0, | |
927 | +-1, 0, 1, 1, 0.565534,-99) , | |
928 | +B2_011_NN( | |
929 | +0, | |
930 | +0, | |
931 | +-1, 0, 1, -1, 0.490448,-99) , | |
932 | +3, 0.0724604, 1, 0, 0.501693,-99) , | |
933 | +2, 2.56961, 0, 0, 0.512985,-99) , | |
934 | +2, 0.149547, 1, 0, 0.524309,-99) ); | |
935 | + // itree = 22 | |
936 | + fBoostWeights.push_back(0.087121); | |
937 | + fForest.push_back( | |
938 | +B2_011_NN( | |
939 | +B2_011_NN( | |