source: branches/MetisMQI/src/main/java/weka/classifiers/trees/FT.java

Last change on this file was 29, checked in by gnappo, 14 years ago

Taggata versione per la demo e aggiunto branch.

File size: 23.7 KB
RevLine 
[29]1/*
2 *    This program is free software; you can redistribute it and/or modify
3 *    it under the terms of the GNU General Public License as published by
4 *    the Free Software Foundation; either version 2 of the License, or
5 *    (at your option) any later version.
6 *
7 *    This program is distributed in the hope that it will be useful,
8 *    but WITHOUT ANY WARRANTY; without even the implied warranty of
9 *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
10 *    GNU General Public License for more details.
11 *
12 *    You should have received a copy of the GNU General Public License
13 *    along with this program; if not, write to the Free Software
14 *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
15 */
16
17/*
18 *    FT.java
19 *    Copyright (C) 2007 University of Porto, Porto, Portugal
20 *
21 */
22
23package weka.classifiers.trees;
24
25import weka.classifiers.Classifier;
26import weka.classifiers.AbstractClassifier;
27import weka.classifiers.trees.ft.FTInnerNode;
28import weka.classifiers.trees.ft.FTLeavesNode;
29import weka.classifiers.trees.ft.FTNode;
30import weka.classifiers.trees.ft.FTtree;
31import weka.core.AdditionalMeasureProducer;
32import weka.core.Capabilities;
33import weka.core.Drawable;
34import weka.core.Instance;
35import weka.core.Instances;
36import weka.core.Option;
37import weka.core.OptionHandler;
38import weka.core.RevisionUtils;
39import weka.core.SelectedTag;
40import weka.core.Tag;
41import weka.core.TechnicalInformation;
42import weka.core.TechnicalInformationHandler;
43import weka.core.Utils;
44import weka.core.Capabilities.Capability;
45import weka.core.TechnicalInformation.Field;
46import weka.core.TechnicalInformation.Type;
47import weka.filters.Filter;
48import weka.filters.supervised.attribute.NominalToBinary;
49import weka.filters.unsupervised.attribute.ReplaceMissingValues;
50
51import java.util.Enumeration;
52import java.util.Vector;
53
54/**
55 <!-- globalinfo-start -->
56 * Classifier for building 'Functional trees', which are classification trees  that could have logistic regression functions at the inner nodes and/or leaves. The algorithm can deal with binary and multi-class target variables, numeric and nominal attributes and missing values.<br/>
57 * <br/>
58 * For more information see: <br/>
59 * <br/>
60 * Joao Gama (2004). Functional Trees.<br/>
61 * <br/>
62 * Niels Landwehr, Mark Hall, Eibe Frank (2005). Logistic Model Trees.
63 * <p/>
64 <!-- globalinfo-end -->
65 *
66 <!-- technical-bibtex-start -->
67 * BibTeX:
68 * <pre>
69 * &#64;article{Gama2004,
70 *    author = {Joao Gama},
71 *    booktitle = {Machine Learning},
72 *    number = {3},
73 *    pages = {219-250},
74 *    title = {Functional Trees},
75 *    volume = {55},
76 *    year = {2004}
77 * }
78 *
79 * &#64;article{Landwehr2005,
80 *    author = {Niels Landwehr and Mark Hall and Eibe Frank},
81 *    booktitle = {Machine Learning},
82 *    number = {1-2},
83 *    pages = {161-205},
84 *    title = {Logistic Model Trees},
85 *    volume = {95},
86 *    year = {2005}
87 * }
88 * </pre>
89 * <p/>
90 <!-- technical-bibtex-end -->
91 *
92 <!-- options-start -->
93 * Valid options are: <p/>
94 *
95 * <pre> -B
96 *  Binary splits (convert nominal attributes to binary ones) </pre>
97 *
98 * <pre> -P
99 *  Use error on probabilities instead of misclassification error for stopping criterion of LogitBoost.</pre>
100 *
101 * <pre> -I &lt;numIterations&gt;
102 *  Set fixed number of iterations for LogitBoost (instead of using cross-validation)</pre>
103 *
104 * <pre> -F &lt;modelType&gt;
105 *  Set Funtional Tree type to be generate:  0 for FT, 1 for FTLeaves and 2 for FTInner</pre>
106 *
107 * <pre> -M &lt;numInstances&gt;
108 *  Set minimum number of instances at which a node can be split (default 15)</pre>
109 *
110 * <pre> -W &lt;beta&gt;
111 *  Set beta for weight trimming for LogitBoost. Set to 0 (default) for no weight trimming.</pre>
112 *
113 * <pre> -A
114 *  The AIC is used to choose the best iteration.</pre>
115 *
116 <!-- options-end -->
117 *
118 * @author Jo\~{a}o Gama
119 * @author Carlos Ferreira 
120 * @version $Revision: 5928 $
121 */
122public class FT 
123  extends AbstractClassifier
124  implements OptionHandler, AdditionalMeasureProducer, Drawable,
125             TechnicalInformationHandler {
126   
127  /** for serialization */
128  static final long serialVersionUID = -1113212459618105000L;
129 
130  /** Filter to replace missing values*/
131  protected ReplaceMissingValues m_replaceMissing;
132 
133  /** Filter to replace nominal attributes*/
134  protected NominalToBinary m_nominalToBinary;
135   
136  /** root of the logistic model tree*/
137  protected FTtree m_tree;
138 
139  /** convert nominal attributes to binary ?*/
140  protected boolean m_convertNominal;
141 
142  /**use error on probabilties instead of misclassification for stopping criterion of LogitBoost?*/
143  protected boolean m_errorOnProbabilities;
144   
145  /**minimum number of instances at which a node is considered for splitting*/
146  protected int m_minNumInstances;
147
148  /**if non-zero, use fixed number of iterations for LogitBoost*/
149  protected int m_numBoostingIterations;
150 
151  /**Model Type, value: 0 is FT, 1 is FTLeaves, 2  is FTInner*/
152  protected int m_modelType;
153   
154  /**Threshold for trimming weights. Instances with a weight lower than this (as a percentage
155   * of total weights) are not included in the regression fit.
156   **/
157  protected double m_weightTrimBeta;
158 
159  /** If true, the AIC is used to choose the best LogitBoost iteration*/
160  protected boolean m_useAIC ;
161
162  /** model types */
163  public static final int MODEL_FT = 0;
164  public static final int MODEL_FTLeaves = 1;
165  public static final int MODEL_FTInner = 2;
166
167  /** possible model types. */
168  public static final Tag [] TAGS_MODEL = {
169    new Tag(MODEL_FT, "FT"),
170    new Tag(MODEL_FTLeaves, "FTLeaves"),
171    new Tag(MODEL_FTInner, "FTInner")
172  };
173
174 
175  /**
176   * Creates an instance of FT with standard options
177   */
178  public FT() {
179    m_numBoostingIterations=15;
180    m_minNumInstances = 15;
181    m_weightTrimBeta = 0;
182    m_useAIC = false;
183    m_modelType=0;
184  }   
185
186  /**
187   * Returns default capabilities of the classifier.
188   *
189   * @return      the capabilities of this classifier
190   */
191  public Capabilities getCapabilities() {
192    Capabilities result = super.getCapabilities();
193    result.disableAll();
194
195    // attributes
196    result.enable(Capability.NOMINAL_ATTRIBUTES);
197    result.enable(Capability.NUMERIC_ATTRIBUTES);
198    result.enable(Capability.DATE_ATTRIBUTES);
199    result.enable(Capability.MISSING_VALUES);
200
201    // class
202    result.enable(Capability.NOMINAL_CLASS);
203    result.enable(Capability.MISSING_CLASS_VALUES);
204   
205    return result;
206  }
207
208  /**
209   * Builds the classifier.
210   *
211   * @param data the data to train with
212   * @throws Exception if classifier can't be built successfully
213   */
214  public void buildClassifier(Instances data) throws Exception{
215       
216     
217    // can classifier handle the data?
218    getCapabilities().testWithFail(data);
219
220    // remove instances with missing class
221    Instances filteredData = new Instances(data);
222    filteredData.deleteWithMissingClass();
223   
224    //replace missing values
225    m_replaceMissing = new ReplaceMissingValues();
226    m_replaceMissing.setInputFormat(filteredData);     
227    filteredData = Filter.useFilter(filteredData, m_replaceMissing);
228   
229    //possibly convert nominal attributes globally
230    if (m_convertNominal) {         
231      m_nominalToBinary = new NominalToBinary();
232      m_nominalToBinary.setInputFormat(filteredData);   
233      filteredData = Filter.useFilter(filteredData, m_nominalToBinary);
234    }
235       
236    int minNumInstances = 2; 
237   
238   
239    //create a FT  tree root
240    if (m_modelType==0)
241      m_tree = new FTNode( m_errorOnProbabilities, m_numBoostingIterations, m_minNumInstances, 
242                           m_weightTrimBeta, m_useAIC);
243                       
244    //create a FTLeaves  tree root
245    if (m_modelType==1){ 
246      m_tree = new FTLeavesNode(m_errorOnProbabilities, m_numBoostingIterations, m_minNumInstances, 
247                                m_weightTrimBeta, m_useAIC);
248    }
249    //create a FTInner  tree root
250    if (m_modelType==2)
251      m_tree = new FTInnerNode(m_errorOnProbabilities, m_numBoostingIterations, m_minNumInstances, 
252                               m_weightTrimBeta, m_useAIC);
253       
254    //build tree
255    m_tree.buildClassifier(filteredData);
256    // prune tree
257    m_tree.prune();
258    m_tree.assignIDs(0);
259    m_tree.cleanup();         
260  }
261 
262  /**
263   * Returns class probabilities for an instance.
264   *
265   * @param instance the instance to compute the distribution for
266   * @return the class probabilities
267   * @throws Exception if distribution can't be computed successfully
268   */
269  public double [] distributionForInstance(Instance instance) throws Exception {
270     
271    //replace missing values
272    m_replaceMissing.input(instance);
273    instance = m_replaceMissing.output();
274   
275    //possibly convert nominal attributes
276    if (m_convertNominal) {
277      m_nominalToBinary.input(instance);
278      instance = m_nominalToBinary.output();
279    }
280    return m_tree.distributionForInstance(instance);
281  }
282
283  /**
284   * Classifies an instance.
285   *
286   * @param instance the instance to classify
287   * @return the classification
288   * @throws Exception if instance can't be classified successfully
289   */
290  public double classifyInstance(Instance instance) throws Exception {
291
292    double maxProb = -1;
293    int maxIndex = 0;
294   
295    //classify by maximum probability
296    double[] probs = distributionForInstance(instance);       
297    for (int j = 0; j < instance.numClasses(); j++) {
298      if (Utils.gr(probs[j], maxProb)) {
299        maxIndex = j;
300        maxProb = probs[j];
301      }
302    }     
303    return (double)maxIndex;     
304  }   
305     
306  /**
307   * Returns a description of the classifier.
308   *
309   * @return a string representation of the classifier
310   */
311  public String toString() {
312    if (m_tree!=null) {
313      if (m_modelType==0)
314        return "FT tree \n------------------\n" + m_tree.toString();
315      else { 
316        if (m_modelType==1)
317          return "FT Leaves tree \n------------------\n" + m_tree.toString();
318        else 
319          return "FT Inner tree \n------------------\n" + m_tree.toString();
320      }
321    }else{
322      return "No tree built";
323    }
324  }   
325
326  /**
327   * Returns an enumeration describing the available options.
328   *
329   * @return an enumeration of all the available options.
330   */
331  public Enumeration listOptions() {
332    Vector newVector = new Vector(8);
333   
334    newVector.addElement(new Option("\tBinary splits (convert nominal attributes to binary ones) ",
335                                    "B", 0, "-B"));
336   
337    newVector.addElement(new Option("\tUse error on probabilities instead of misclassification error "+
338                                    "for stopping criterion of LogitBoost.",
339                                    "P", 0, "-P"));
340   
341    newVector.addElement(new Option("\tSet fixed number of iterations for LogitBoost (instead of using "+
342                                    "cross-validation)",
343                                    "I",1,"-I <numIterations>"));
344   
345    newVector.addElement(new Option("\tSet Funtional Tree type to be generate: "+
346                                    " 0 for FT, 1 for FTLeaves and 2 for FTInner",
347                                    "F",1,"-F <modelType>"));
348   
349    newVector.addElement(new Option("\tSet minimum number of instances at which a node can be split (default 15)",
350                                    "M",1,"-M <numInstances>"));
351   
352    newVector.addElement(new Option("\tSet beta for weight trimming for LogitBoost. Set to 0 (default) for no weight trimming.",
353                                    "W",1,"-W <beta>"));
354   
355    newVector.addElement(new Option("\tThe AIC is used to choose the best iteration.",
356                                    "A", 0, "-A"));
357   
358    return newVector.elements();
359  }
360   
361  /**
362   * Parses a given list of options. <p/>
363   *
364   <!-- options-start -->
365   * Valid options are: <p/>
366   *
367   * <pre> -B
368   *  Binary splits (convert nominal attributes to binary ones) </pre>
369   *
370   * <pre> -P
371   *  Use error on probabilities instead of misclassification error for stopping criterion of LogitBoost.</pre>
372   *
373   * <pre> -I &lt;numIterations&gt;
374   *  Set fixed number of iterations for LogitBoost (instead of using cross-validation)</pre>
375   *
376   * <pre> -F &lt;modelType&gt;
377   *  Set Funtional Tree type to be generate:  0 for FT, 1 for FTLeaves and 2 for FTInner</pre>
378   *
379   * <pre> -M &lt;numInstances&gt;
380   *  Set minimum number of instances at which a node can be split (default 15)</pre>
381   *
382   * <pre> -W &lt;beta&gt;
383   *  Set beta for weight trimming for LogitBoost. Set to 0 (default) for no weight trimming.</pre>
384   *
385   * <pre> -A
386   *  The AIC is used to choose the best iteration.</pre>
387   *
388   <!-- options-end -->
389   *
390   * @param options the list of options as an array of strings
391   * @throws Exception if an option is not supported
392   */
393  public void setOptions(String[] options) throws Exception {
394
395    setBinSplit(Utils.getFlag('B', options));
396    setErrorOnProbabilities(Utils.getFlag('P', options));
397
398    String optionString = Utils.getOption('I', options);
399    if (optionString.length() != 0) {
400      setNumBoostingIterations((new Integer(optionString)).intValue());
401    }
402
403    optionString = Utils.getOption('F', options);
404    if (optionString.length() != 0) {
405      setModelType(new SelectedTag(Integer.parseInt(optionString), TAGS_MODEL));
406      // setModelType((new Integer(optionString)).intValue());
407    }
408   
409    optionString = Utils.getOption('M', options);
410    if (optionString.length() != 0) {
411      setMinNumInstances((new Integer(optionString)).intValue());
412    }
413
414    optionString = Utils.getOption('W', options);
415    if (optionString.length() != 0) {
416      setWeightTrimBeta((new Double(optionString)).doubleValue());
417    }
418   
419    setUseAIC(Utils.getFlag('A', options));       
420   
421    Utils.checkForRemainingOptions(options);
422       
423  } 
424   
425  /**
426   * Gets the current settings of the Classifier.
427   *
428   * @return an array of strings suitable for passing to setOptions
429   */
430  public String[] getOptions() {
431    String[] options = new String[11];
432    int current = 0;
433
434    if (getBinSplit()) {
435      options[current++] = "-B";
436    } 
437   
438    if (getErrorOnProbabilities()) {
439      options[current++] = "-P";
440    }
441   
442    options[current++] = "-I"; 
443    options[current++] = ""+getNumBoostingIterations();
444   
445    options[current++] = "-F"; 
446    //    options[current++] = ""+getModelType();
447    options[current++] = ""+getModelType().getSelectedTag().getID();
448
449    options[current++] = "-M"; 
450    options[current++] = ""+getMinNumInstances();
451       
452    options[current++] = "-W";
453    options[current++] = ""+getWeightTrimBeta();
454   
455    if (getUseAIC()) {
456      options[current++] = "-A";
457    }
458   
459    while (current < options.length) {
460      options[current++] = "";
461    } 
462    return options;
463  } 
464
465 
466  /**
467   * Get the value of weightTrimBeta.
468   */
469  public double getWeightTrimBeta(){
470    return m_weightTrimBeta;
471  }
472 
473  /**
474   * Get the value of useAIC.
475   *
476   * @return Value of useAIC.
477   */
478  public boolean getUseAIC(){
479    return m_useAIC;
480  }
481 
482 
483  /**
484   * Set the value of weightTrimBeta.
485   */
486  public void setWeightTrimBeta(double n){
487    m_weightTrimBeta = n;
488  }
489 
490  /**
491   * Set the value of useAIC.
492   *
493   * @param c Value to assign to useAIC.
494   */
495  public void setUseAIC(boolean c){
496    m_useAIC = c;
497  }
498 
499  /**
500   * Get the value of binarySplits.
501   *
502   * @return Value of binarySplits.
503   */
504  public boolean getBinSplit(){
505    return m_convertNominal;
506  }
507
508  /**
509   * Get the value of errorOnProbabilities.
510   *
511   * @return Value of errorOnProbabilities.
512   */
513  public boolean getErrorOnProbabilities(){
514    return m_errorOnProbabilities;
515  }
516 
517  /**
518   * Get the value of numBoostingIterations.
519   *
520   * @return Value of numBoostingIterations.
521   */
522  public int getNumBoostingIterations(){
523    return m_numBoostingIterations;
524  }
525 
526  /**
527   * Get the type of functional tree model being used.
528   *
529   * @return the type of functional tree model.
530   */
531  public SelectedTag getModelType() {
532    return new SelectedTag(m_modelType, TAGS_MODEL);
533  } 
534
535  /**
536   * Set the Functional Tree type.
537   *
538   * @param newMethod Value corresponding to tree type.
539   */
540  public void setModelType(SelectedTag newMethod){
541    if (newMethod.getTags() == TAGS_MODEL) {
542      int c = newMethod.getSelectedTag().getID();
543      if (c==0 || c==1 || c==2) {
544        m_modelType = c;
545      } else  {
546        throw new IllegalArgumentException("Wrong model type, -F value should be: 0, for FT, 1, " +
547                                           "for FTLeaves, and 2, for FTInner "); 
548      }
549    }
550  }
551 
552  /**
553   * Get the value of minNumInstances.
554   *
555   * @return Value of minNumInstances.
556   */
557  public int getMinNumInstances(){
558    return m_minNumInstances;
559  }
560   
561  /**
562   * Set the value of binarySplits.
563   *
564   * @param c Value to assign to binarySplits.
565   */
566  public void setBinSplit(boolean c){
567    m_convertNominal=c;
568  }
569
570  /**
571   * Set the value of errorOnProbabilities.
572   *
573   * @param c Value to assign to errorOnProbabilities.
574   */
575  public void setErrorOnProbabilities(boolean c){
576    m_errorOnProbabilities = c;
577  }
578 
579  /**
580   * Set the value of numBoostingIterations.
581   *
582   * @param c Value to assign to numBoostingIterations.
583   */
584  public void setNumBoostingIterations(int c){
585    m_numBoostingIterations = c;
586  }
587   
588  /**
589   * Set the value of minNumInstances.
590   *
591   * @param c Value to assign to minNumInstances.
592   */
593  public void setMinNumInstances(int c){
594    m_minNumInstances = c;
595  }
596   
597  /**
598   *  Returns the type of graph this classifier
599   *  represents.
600   *  @return Drawable.TREE
601   */   
602  public int graphType() {
603    return Drawable.TREE;
604  }
605
606  /**
607   * Returns graph describing the tree.
608   *
609   * @return the graph describing the tree
610   * @throws Exception if graph can't be computed
611   */
612  public String graph() throws Exception {
613
614    return m_tree.graph();
615  }
616
617  /**
618   * Returns the size of the tree
619   * @return the size of the tree
620   */
621  public int measureTreeSize(){
622    return m_tree.numNodes();
623  }
624   
625  /**
626   * Returns the number of leaves in the tree
627   * @return the number of leaves in the tree
628   */
629  public int measureNumLeaves(){
630    return m_tree.numLeaves();
631  }
632     
633  /**
634   * Returns an enumeration of the additional measure names
635   * @return an enumeration of the measure names
636   */
637  public Enumeration enumerateMeasures() {
638    Vector newVector = new Vector(2);
639    newVector.addElement("measureTreeSize");
640    newVector.addElement("measureNumLeaves");
641       
642    return newVector.elements();
643  }
644   
645
646  /**
647   * Returns the value of the named measure
648   * @param additionalMeasureName the name of the measure to query for its value
649   * @return the value of the named measure
650   * @throws IllegalArgumentException if the named measure is not supported
651   */
652  public double getMeasure(String additionalMeasureName) {
653    if (additionalMeasureName.compareToIgnoreCase("measureTreeSize") == 0) {
654      return measureTreeSize();
655    } else if (additionalMeasureName.compareToIgnoreCase("measureNumLeaves") == 0) {
656      return measureNumLeaves();
657    } else {
658      throw new IllegalArgumentException(additionalMeasureName
659                                         + " not supported (FT)");
660    }
661  }   
662   
663  /**
664   * Returns a string describing classifier
665   * @return a description suitable for
666   * displaying in the explorer/experimenter gui
667   */
668  public String globalInfo() {
669    return "Classifier for building 'Functional trees', which are classification trees  that could have "
670      +"logistic regression functions at the inner nodes and/or leaves. The algorithm can deal with " 
671      +"binary and multi-class target variables, numeric and nominal attributes and missing values.\n\n"
672      +"For more information see: \n\n"
673      + getTechnicalInformation().toString();
674  }
675
676
677  /**
678   * Returns an instance of a TechnicalInformation object, containing
679   * detailed information about the technical background of this class,
680   * e.g., paper reference or book this class is based on.
681   *
682   * @return the technical information about this class
683   */
684  public TechnicalInformation getTechnicalInformation() {
685    TechnicalInformation        result;
686    TechnicalInformation        additional;
687     
688    result = new TechnicalInformation(Type.ARTICLE);
689    result.setValue(Field.AUTHOR, "Joao Gama");
690    result.setValue(Field.TITLE, "Functional Trees");
691    result.setValue(Field.BOOKTITLE, "Machine Learning");
692    result.setValue(Field.YEAR, "2004");
693    result.setValue(Field.VOLUME, "55");
694    result.setValue(Field.PAGES, "219-250");
695    result.setValue(Field.NUMBER, "3");
696   
697    additional = result.add(Type.ARTICLE);
698    additional.setValue(Field.AUTHOR, "Niels Landwehr and Mark Hall and Eibe Frank");
699    additional.setValue(Field.TITLE, "Logistic Model Trees");
700    additional.setValue(Field.BOOKTITLE, "Machine Learning");
701    additional.setValue(Field.YEAR, "2005");
702    additional.setValue(Field.VOLUME, "95");
703    additional.setValue(Field.PAGES, "161-205");
704    additional.setValue(Field.NUMBER, "1-2");
705   
706    return result;
707  }
708
709  /**
710   * Returns the tip text for this property
711   * @return tip text for this property suitable for
712   * displaying in the explorer/experimenter gui
713   */
714  public String modelTypeTipText() {
715    return "The type of FT model. 0, for FT, 1, " +
716      "for FTLeaves, and 2, for FTInner";
717  }
718
719  /**
720   * Returns the tip text for this property
721   * @return tip text for this property suitable for
722   * displaying in the explorer/experimenter gui
723   */
724  public String binSplitTipText() {
725    return "Convert all nominal attributes to binary ones before building the tree. "
726      +"This means that all splits in the final tree will be binary.";
727   
728  } 
729 
730  /**
731   * Returns the tip text for this property
732   * @return tip text for this property suitable for
733   * displaying in the explorer/experimenter gui
734   */
735  public String errorOnProbabilitiesTipText() {
736    return "Minimize error on probabilities instead of misclassification error when cross-validating the number "
737      +"of LogitBoost iterations. When set, the number of LogitBoost iterations is chosen that minimizes "
738      +"the root mean squared error instead of the misclassification error.";     
739  } 
740 
741  /**
742   * Returns the tip text for this property
743   * @return tip text for this property suitable for
744   * displaying in the explorer/experimenter gui
745   */
746  public String numBoostingIterationsTipText() {
747    return "Set a fixed number of iterations for LogitBoost. If >= 0, this sets a fixed number of LogitBoost "
748      +"iterations that is used everywhere in the tree. If < 0, the number is cross-validated.";
749  } 
750
751  /**
752   * Returns the tip text for this property
753   * @return tip text for this property suitable for
754   * displaying in the explorer/experimenter gui
755   */
756  public String minNumInstancesTipText() {
757    return "Set the minimum number of instances at which a node is considered for splitting. "
758      +"The default value is 15.";
759  } 
760     
761  /**
762   * Returns the tip text for this property
763   * @return tip text for this property suitable for
764   * displaying in the explorer/experimenter gui
765   */
766  public String weightTrimBetaTipText() {
767    return "Set the beta value used for weight trimming in LogitBoost. "
768      +"Only instances carrying (1 - beta)% of the weight from previous iteration "
769      +"are used in the next iteration. Set to 0 for no weight trimming. "
770      +"The default value is 0.";
771  }
772
773  /**
774   * Returns the tip text for this property
775   * @return tip text for this property suitable for
776   * displaying in the explorer/experimenter gui
777   */
778  public String useAICTipText() {
779    return "The AIC is used to determine when to stop LogitBoost iterations. "
780      +"The default is not to use AIC.";
781  }
782 
783  /**
784   * Returns the revision string.
785   *
786   * @return            the revision
787   */
788  public String getRevision() {
789    return RevisionUtils.extract("$Revision: 5928 $");
790  }
791 
792  /**
793   * Main method for testing this class
794   *
795   * @param argv the commandline options
796   */
797  public static void main (String [] argv) {   
798    runClassifier(new FT(), argv);
799  } 
800}
801
Note: See TracBrowser for help on using the repository browser.