165 <<
" kNN = \n" <<
fnkNN 170 <<
" Trim = \n" <<
fTrim 218 for (kNN::EventVec::const_iterator event =
fEvent.begin();
event !=
fEvent.end(); ++event) {
236 Log() <<
kINFO <<
"Input events are normalized - setting ScaleFrac to 0" <<
Endl;
245 Log() <<
kFATAL <<
"MethodKNN::Train() - mismatched or wrong number of event variables" <<
Endl;
275 kNN::Event event_knn(vvec, weight, event_type);
277 fEvent.push_back(event_knn);
306 for (
Int_t ivar = 0; ivar < nvar; ++ivar) {
317 if (rlist.size() != knn + 2) {
327 Bool_t use_gaus =
false, use_poln =
false;
329 if (
fKernel ==
"Gaus") use_gaus =
true;
330 else if (
fKernel ==
"Poln") use_poln =
true;
340 if (!(kradius > 0.0)) {
351 std::vector<Double_t> rms_vec;
355 if (rms_vec.empty() || rms_vec.size() != event_knn.
GetNVar()) {
362 Double_t weight_all = 0, weight_sig = 0, weight_bac = 0;
364 for (kNN::List::const_iterator lit = rlist.begin(); lit != rlist.end(); ++lit) {
371 if (lit->second < 0.0) {
372 Log() <<
kFATAL <<
"A neighbor has negative distance to query event" <<
Endl;
374 else if (!(lit->second > 0.0)) {
375 Log() <<
kVERBOSE <<
"A neighbor has zero distance to query event" <<
Endl;
386 if (node.
GetEvent().GetType() == 1) {
390 else if (node.
GetEvent().GetType() == 2) {
395 Log() <<
kFATAL <<
"Unknown type for training event" <<
Endl;
401 if (count_all >= knn) {
407 if (!(count_all > 0)) {
408 Log() <<
kFATAL <<
"Size kNN result list is not positive" <<
Endl;
413 if (count_all < knn) {
414 Log() <<
kDEBUG <<
"count_all and kNN have different size: " << count_all <<
" < " << knn <<
Endl;
418 if (!(weight_all > 0.0)) {
419 Log() <<
kFATAL <<
"kNN result total weight is not positive" <<
Endl;
423 return weight_sig/weight_all;
445 std::vector<float> reg_vec;
449 for (
Int_t ivar = 0; ivar < nvar; ++ivar) {
460 if (rlist.size() != knn + 2) {
469 for (kNN::List::const_iterator lit = rlist.begin(); lit != rlist.end(); ++lit) {
476 if (reg_vec.empty()) {
480 for(
UInt_t ivar = 0; ivar < tvec.size(); ++ivar) {
481 if (
fUseWeight) reg_vec[ivar] += tvec[ivar]*weight;
482 else reg_vec[ivar] += tvec[ivar];
491 if (count_all == knn) {
497 if (!(weight_all > 0.0)) {
498 Log() <<
kFATAL <<
"Total weight sum is not positive: " << weight_all <<
Endl;
502 for (
UInt_t ivar = 0; ivar < reg_vec.size(); ++ivar) {
503 reg_vec[ivar] /= weight_all;
529 for (kNN::EventVec::const_iterator event =
fEvent.begin();
event !=
fEvent.end(); ++event) {
531 std::stringstream s(
"");
533 for (
UInt_t ivar = 0; ivar <
event->GetNVar(); ++ivar) {
534 if (ivar>0) s <<
" ";
535 s << std::scientific <<
event->GetVar(ivar);
538 for (
UInt_t itgt = 0; itgt <
event->GetNTgt(); ++itgt) {
539 s <<
" " << std::scientific <<
event->GetTgt(itgt);
552 UInt_t nvar = 0, ntgt = 0;
567 std::stringstream s(
gTools().GetContent(ch) );
569 for(
UInt_t ivar=0; ivar<nvar; ivar++)
572 for(
UInt_t itgt=0; itgt<ntgt; itgt++)
577 kNN::Event event_knn(vvec, evtWeight, evtType, tvec);
578 fEvent.push_back(event_knn);
590 Log() <<
kINFO <<
"Starting ReadWeightsFromStream(std::istream& is) function..." <<
Endl;
601 std::getline(is, line);
603 if (line.empty() || line.find(
"#") != std::string::npos) {
608 std::string::size_type pos=0;
609 while( (pos=line.find(
',',pos)) != std::string::npos ) { count++; pos++; }
614 if (count < 3 || nvar != count - 2) {
625 std::string::size_type prev = 0;
627 for (std::string::size_type ipos = 0; ipos < line.size(); ++ipos) {
628 if (line[ipos] !=
',' && ipos + 1 != line.size()) {
632 if (!(ipos > prev)) {
636 std::string vstring = line.substr(prev, ipos - prev);
637 if (ipos + 1 == line.size()) {
638 vstring = line.substr(prev, ipos - prev + 1);
641 if (vstring.empty()) {
648 else if (vcount == 1) {
649 type = std::atoi(vstring.c_str());
651 else if (vcount == 2) {
652 weight = std::atof(vstring.c_str());
654 else if (vcount - 3 < vvec.size()) {
655 vvec[vcount - 3] = std::atof(vstring.c_str());
679 Log() <<
kINFO <<
"Starting WriteWeightsToStream(TFile &rf) function..." <<
Endl;
689 tree->
Branch(
"event",
"TMVA::kNN::Event", &event);
692 for (kNN::EventVec::const_iterator it =
fEvent.begin(); it !=
fEvent.end(); ++it) {
694 size += tree->
Fill();
704 <<
" events to ROOT file" <<
Endl;
715 Log() <<
kINFO <<
"Starting ReadWeightsFromStream(TFile &rf) function..." <<
Endl;
735 for (
Int_t i = 0; i < nevent; ++i) {
744 <<
" events from ROOT file" <<
Endl;
757 fout <<
" // not implemented for class: \"" << className <<
"\"" << std::endl;
758 fout <<
"};" << std::endl;
772 Log() <<
"The k-nearest neighbor (k-NN) algorithm is a multi-dimensional classification" <<
Endl 773 <<
"and regression algorithm. Similarly to other TMVA algorithms, k-NN uses a set of" <<
Endl 774 <<
"training events for which a classification category/regression target is known. " <<
Endl 775 <<
"The k-NN method compares a test event to all training events using a distance " <<
Endl 776 <<
"function, which is an Euclidean distance in a space defined by the input variables. "<<
Endl 777 <<
"The k-NN method, as implemented in TMVA, uses a kd-tree algorithm to perform a" <<
Endl 778 <<
"quick search for the k events with shortest distance to the test event. The method" <<
Endl 779 <<
"returns a fraction of signal events among the k neighbors. It is recommended" <<
Endl 780 <<
"that a histogram which stores the k-NN decision variable is binned with k+1 bins" <<
Endl 781 <<
"between 0 and 1." <<
Endl;
784 Log() <<
gTools().
Color(
"bold") <<
"--- Performance tuning via configuration options: " 787 Log() <<
"The k-NN method estimates a density of signal and background events in a "<< Endl
788 <<
"neighborhood around the test event. The method assumes that the density of the " << Endl
789 <<
"signal and background events is uniform and constant within the neighborhood. " << Endl
790 <<
"k is an adjustable parameter and it determines an average size of the " << Endl
791 <<
"neighborhood. Small k values (less than 10) are sensitive to statistical " << Endl
792 <<
"fluctuations and large (greater than 100) values might not sufficiently capture " << Endl
793 <<
"local differences between events in the training set. The speed of the k-NN" << Endl
794 <<
"method also increases with larger values of k. " <<
Endl;
796 Log() <<
"The k-NN method assigns equal weight to all input variables. Different scales " << Endl
797 <<
"among the input variables is compensated using ScaleFrac parameter: the input " << Endl
798 <<
"variables are scaled so that the widths for central ScaleFrac*100% events are " << Endl
799 <<
"equal among all the input variables." <<
Endl;
802 Log() <<
gTools().
Color(
"bold") <<
"--- Additional configuration options: " 805 Log() <<
"The method inclues an option to use a Gaussian kernel to smooth out the k-NN" << Endl
806 <<
"response. The kernel re-weights events using a distance to the test event." <<
Endl;
816 if (!(avalue < 1.0)) {
820 const Double_t prod = 1.0 - avalue * avalue * avalue;
822 return (prod * prod * prod);
829 const kNN::Event &event,
const std::vector<Double_t> &svec)
const 831 if (event_knn.
GetNVar() !=
event.GetNVar() || event_knn.
GetNVar() != svec.size()) {
832 Log() <<
kFATAL <<
"Mismatched vectors in Gaussian kernel function" <<
Endl;
839 double sum_exp = 0.0;
841 for(
unsigned int ivar = 0; ivar < event_knn.
GetNVar(); ++ivar) {
843 const Double_t diff_ =
event.GetVar(ivar) - event_knn.
GetVar(ivar);
845 if (!(sigm_ > 0.0)) {
850 sum_exp += diff_*diff_/(2.0*sigm_*sigm_);
872 for (kNN::List::const_iterator lit = rlist.begin(); lit != rlist.end(); ++lit)
874 if (!(lit->second > 0.0))
continue;
876 if (kradius < lit->second || kradius < 0.0) kradius = lit->second;
879 if (kcount >= knn)
break;
892 std::vector<Double_t> rvec;
896 for (kNN::List::const_iterator lit = rlist.begin(); lit != rlist.end(); ++lit)
898 if (!(lit->second > 0.0))
continue;
904 rvec.insert(rvec.end(), event_.
GetNVar(), 0.0);
906 else if (rvec.size() != event_.
GetNVar()) {
907 Log() <<
kFATAL <<
"Wrong number of variables, should never happen!" <<
Endl;
912 for(
unsigned int ivar = 0; ivar < event_.
GetNVar(); ++ivar) {
914 rvec[ivar] += diff_*diff_;
918 if (kcount >= knn)
break;
927 for(
unsigned int ivar = 0; ivar < rvec.size(); ++ivar) {
928 if (!(rvec[ivar] > 0.0)) {
946 for (kNN::List::const_iterator lit = rlist.begin(); lit != rlist.end(); ++lit) {
952 if (node.
GetEvent().GetType() == 1) {
953 sig_vec.push_back(tvec);
955 else if (node.
GetEvent().GetType() == 2) {
956 bac_vec.push_back(tvec);
959 Log() <<
kFATAL <<
"Unknown type for training event" <<
Endl;
void ProcessOptions()
process the options specified by the user
void AddWeightsXMLTo(void *parent) const
write weights to XML
MsgLogger & Endl(MsgLogger &ml)
void MakeClassSpecific(std::ostream &, const TString &) const
write specific classifier response
void DeclareOptions()
MethodKNN options.
virtual Int_t Fill()
Fill all branches.
const List & GetkNNList() const
OptionBase * DeclareOptionRef(T &ref, const TString &name, const TString &desc="")
A ROOT file is a suite of consecutive data records (TKey instances) with a well defined format...
void Train(void)
kNN training
virtual TObject * Get(const char *namecycle)
Return pointer to object identified by namecycle.
void MakeKNN(void)
create kNN
virtual Int_t GetEntry(Long64_t entry=0, Int_t getall=0)
Read all branches of entry and return total number of bytes read.
Bool_t Find(Event event, UInt_t nfind=100, const std::string &option="count") const
find in tree if tree has been filled then search for nfind closest events if metic (fVarScale map) is...
VarType GetVar(UInt_t i) const
void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility
virtual Int_t WriteTObject(const TObject *obj, const char *name=0, Option_t *option="", Int_t bufsize=0)
Write object obj to this directory.
void ReadWeightsFromStream(std::istream &istr)
read the weights
const std::vector< Double_t > getRMS(const kNN::List &rlist, const kNN::Event &event_knn) const
Get polynomial kernel radius.
MethodKNN(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption="KNN", TDirectory *theTargetDir=NULL)
standard constructor
void Init(void)
Initialization.
virtual Int_t SetBranchAddress(const char *bname, void *add, TBranch **ptr=0)
Change branch address, dealing with clone trees properly.
Double_t GetWeight() const
const Event * GetEvent() const
Float_t GetProb(const std::vector< Float_t > &x, Int_t k)
Signal probability with Gaussian approximation.
static Vc_ALWAYS_INLINE Vector< T > abs(const Vector< T > &x)
Double_t GausKernel(const kNN::Event &event_knn, const kNN::Event &event, const std::vector< Double_t > &svec) const
Gaussian kernel.
DataSetInfo & DataInfo() const
LDA fLDA
(untouched) events used for learning
void SetTargets(const VarVec &tvec)
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not...
Bool_t Fill(const UShort_t odepth, UInt_t ifrac, const std::string &option="")
fill the tree
Double_t PolnKernel(Double_t value) const
polynomial kernel
void WriteWeightsToStream(TFile &rf) const
save weights to ROOT file
std::vector< Float_t > & GetTargets()
UInt_t GetNEvents() const
temporary event when testing on a different DataSet than the own one
Double_t GetMvaValue(Double_t *err=0, Double_t *errUpper=0)
Compute classifier response.
virtual ~MethodKNN(void)
destructor
Int_t fTreeOptDepth
Experimental feature for local knn analysis.
void Initialize(const LDAEvents &inputSignal, const LDAEvents &inputBackground)
Create LDA matrix using local events found by knn method.
void Add(const Event &event)
add an event to tree
const Ranking * CreateRanking()
no ranking available
void GetHelpMessage() const
get help message text
UInt_t GetNVariables() const
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
Bool_t IgnoreEventsWithNegWeightsInTraining() const
void ReadWeightsFromXML(void *wghtnode)
virtual void SetDirectory(TDirectory *dir)
Change the tree's directory.
Describe directory structure in memory.
Bool_t IsNormalised() const
virtual Long64_t GetEntries() const
virtual Bool_t HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets)
FDA can handle classification with 2 classes and regression with one regression-target.
const VarVec & GetVars() const
virtual Int_t Branch(TCollection *list, Int_t bufsize=32000, Int_t splitlevel=99, const char *name="")
Create one branch for each element in the collection.
#define REGISTER_METHOD(CLASS)
for example
Abstract ClassifierFactory template that handles arbitrary types.
virtual void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility they are hence without any...
std::vector< std::vector< Float_t > > LDAEvents
Double_t getKernelRadius(const kNN::List &rlist) const
Get polynomial kernel radius.
std::vector< Float_t > * fRegressionReturnVal
A TTree object has a header with a name and a title.
Double_t Sqrt(Double_t x)
const std::vector< Float_t > & GetRegressionValues()
Return vector of averages for target values of k-nearest neighbors.
std::vector< VarType > VarVec
double getLDAValue(const kNN::List &rlist, const kNN::Event &event_knn)
Int_t fnkNN
module where all work is done
void NoErrorCalc(Double_t *const err, Double_t *const errUpper)
const T & GetEvent() const