source: branches/MetisMQI/src/main/java/weka/classifiers/functions/SimpleLogistic.java @ 38

Last change on this file since 38 was 29, checked in by gnappo, 14 years ago

Taggata versione per la demo e aggiunto branch.

File size: 24.2 KB
Line 
1/*
2 *    This program is free software; you can redistribute it and/or modify
3 *    it under the terms of the GNU General Public License as published by
4 *    the Free Software Foundation; either version 2 of the License, or
5 *    (at your option) any later version.
6 *
7 *    This program is distributed in the hope that it will be useful,
8 *    but WITHOUT ANY WARRANTY; without even the implied warranty of
9 *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
10 *    GNU General Public License for more details.
11 *
12 *    You should have received a copy of the GNU General Public License
13 *    along with this program; if not, write to the Free Software
14 *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
15 */
16
17/*
18 *    SimpleLogistic.java
19 *    Copyright (C) 2003 University of Waikato, Hamilton, New Zealand
20 *
21 */
22
23package weka.classifiers.functions;
24
25import weka.classifiers.Classifier;
26import weka.classifiers.AbstractClassifier;
27import weka.classifiers.trees.lmt.LogisticBase;
28import weka.core.AdditionalMeasureProducer;
29import weka.core.Capabilities;
30import weka.core.Instance;
31import weka.core.Instances;
32import weka.core.Option;
33import weka.core.OptionHandler;
34import weka.core.RevisionUtils;
35import weka.core.TechnicalInformation;
36import weka.core.TechnicalInformationHandler;
37import weka.core.Utils;
38import weka.core.WeightedInstancesHandler;
39import weka.core.Capabilities.Capability;
40import weka.core.TechnicalInformation.Field;
41import weka.core.TechnicalInformation.Type;
42import weka.filters.Filter;
43import weka.filters.unsupervised.attribute.NominalToBinary;
44import weka.filters.unsupervised.attribute.ReplaceMissingValues;
45
46import java.util.Enumeration;
47import java.util.Vector;
48
49/**
50 <!-- globalinfo-start -->
51 * Classifier for building linear logistic regression models. LogitBoost with simple regression functions as base learners is used for fitting the logistic models. The optimal number of LogitBoost iterations to perform is cross-validated, which leads to automatic attribute selection. For more information see:<br/>
52 * Niels Landwehr, Mark Hall, Eibe Frank (2005). Logistic Model Trees.<br/>
53 * <br/>
54 * Marc Sumner, Eibe Frank, Mark Hall: Speeding up Logistic Model Tree Induction. In: 9th European Conference on Principles and Practice of Knowledge Discovery in Databases, 675-683, 2005.
55 * <p/>
56 <!-- globalinfo-end -->
57 *
58 <!-- technical-bibtex-start -->
59 * BibTeX:
60 * <pre>
61 * &#64;article{Landwehr2005,
62 *    author = {Niels Landwehr and Mark Hall and Eibe Frank},
63 *    booktitle = {Machine Learning},
64 *    number = {1-2},
65 *    pages = {161-205},
66 *    title = {Logistic Model Trees},
67 *    volume = {95},
68 *    year = {2005}
69 * }
70 *
71 * &#64;inproceedings{Sumner2005,
72 *    author = {Marc Sumner and Eibe Frank and Mark Hall},
73 *    booktitle = {9th European Conference on Principles and Practice of Knowledge Discovery in Databases},
74 *    pages = {675-683},
75 *    publisher = {Springer},
76 *    title = {Speeding up Logistic Model Tree Induction},
77 *    year = {2005}
78 * }
79 * </pre>
80 * <p/>
81 <!-- technical-bibtex-end -->
82 *
83 <!-- options-start -->
84 * Valid options are: <p/>
85 *
86 * <pre> -I &lt;iterations&gt;
87 *  Set fixed number of iterations for LogitBoost</pre>
88 *
89 * <pre> -S
90 *  Use stopping criterion on training set (instead of
91 *  cross-validation)</pre>
92 *
93 * <pre> -P
94 *  Use error on probabilities (rmse) instead of
95 *  misclassification error for stopping criterion</pre>
96 *
97 * <pre> -M &lt;iterations&gt;
98 *  Set maximum number of boosting iterations</pre>
99 *
100 * <pre> -H &lt;iterations&gt;
101 *  Set parameter for heuristic for early stopping of
102 *  LogitBoost.
103 *  If enabled, the minimum is selected greedily, stopping
104 *  if the current minimum has not changed for iter iterations.
105 *  By default, heuristic is enabled with value 50. Set to
106 *  zero to disable heuristic.</pre>
107 *
108 * <pre> -W &lt;beta&gt;
109 *  Set beta for weight trimming for LogitBoost. Set to 0 for no weight trimming.
110 * </pre>
111 *
112 * <pre> -A
113 *  The AIC is used to choose the best iteration (instead of CV or training error).
114 * </pre>
115 *
116 <!-- options-end -->
117 *
118 * @author Niels Landwehr
119 * @author Marc Sumner
120 * @version $Revision: 5928 $
121 */
122public class SimpleLogistic 
123  extends AbstractClassifier
124  implements OptionHandler, AdditionalMeasureProducer, WeightedInstancesHandler,
125             TechnicalInformationHandler {
126
127    /** for serialization */
128    static final long serialVersionUID = 7397710626304705059L;
129 
130    /**The actual logistic regression model */
131    protected LogisticBase m_boostedModel;
132   
133    /**Filter for converting nominal attributes to binary ones*/
134    protected NominalToBinary m_NominalToBinary = null;
135
136    /**Filter for replacing missing values*/
137    protected ReplaceMissingValues m_ReplaceMissingValues = null;
138   
139    /**If non-negative, use this as fixed number of LogitBoost iterations*/ 
140    protected int m_numBoostingIterations;
141   
142    /**Maximum number of iterations for LogitBoost*/
143    protected int m_maxBoostingIterations = 500;
144   
145    /**Parameter for the heuristic for early stopping of LogitBoost*/
146    protected int m_heuristicStop = 50;
147
148    /**If true, cross-validate number of LogitBoost iterations*/
149    protected boolean m_useCrossValidation;
150
151    /**If true, use minimize error on probabilities instead of misclassification error*/
152    protected boolean m_errorOnProbabilities;
153   
154    /**Threshold for trimming weights. Instances with a weight lower than this (as a percentage
155     * of total weights) are not included in the regression fit.
156     */
157    protected double m_weightTrimBeta = 0;
158   
159    /** If true, the AIC is used to choose the best iteration*/
160    private boolean m_useAIC = false;
161
162    /**
163     * Constructor for creating SimpleLogistic object with standard options.
164     */
165    public SimpleLogistic() {
166        m_numBoostingIterations = 0;
167        m_useCrossValidation = true;
168        m_errorOnProbabilities = false;
169        m_weightTrimBeta = 0;
170        m_useAIC = false;
171    }
172
173    /**
174     * Constructor for creating SimpleLogistic object.
175     * @param numBoostingIterations if non-negative, use this as fixed number of iterations for LogitBoost
176     * @param useCrossValidation cross-validate number of LogitBoost iterations.
177     * @param errorOnProbabilities minimize error on probabilities instead of misclassification error
178     */
179    public SimpleLogistic(int numBoostingIterations, boolean useCrossValidation, 
180                              boolean errorOnProbabilities) { 
181        m_numBoostingIterations = numBoostingIterations;
182        m_useCrossValidation = useCrossValidation;
183        m_errorOnProbabilities = errorOnProbabilities;
184        m_weightTrimBeta = 0;
185        m_useAIC = false;
186    }
187
188    /**
189     * Returns default capabilities of the classifier.
190     *
191     * @return      the capabilities of this classifier
192     */
193    public Capabilities getCapabilities() {
194      Capabilities result = super.getCapabilities();
195      result.disableAll();
196
197      // attributes
198      result.enable(Capability.NOMINAL_ATTRIBUTES);
199      result.enable(Capability.NUMERIC_ATTRIBUTES);
200      result.enable(Capability.DATE_ATTRIBUTES);
201      result.enable(Capability.MISSING_VALUES);
202
203      // class
204      result.enable(Capability.NOMINAL_CLASS);
205      result.enable(Capability.MISSING_CLASS_VALUES);
206     
207      return result;
208    }
209
210    /**
211     * Builds the logistic regression using LogitBoost.
212     * @param data the training data
213     * @throws Exception if something goes wrong
214     */
215    public void buildClassifier(Instances data) throws Exception {
216
217      // can classifier handle the data?
218      getCapabilities().testWithFail(data);
219
220      // remove instances with missing class
221      data = new Instances(data);
222      data.deleteWithMissingClass();
223
224        //replace missing values
225        m_ReplaceMissingValues = new ReplaceMissingValues();
226        m_ReplaceMissingValues.setInputFormat(data);
227        data = Filter.useFilter(data, m_ReplaceMissingValues);
228       
229        //convert nominal attributes
230        m_NominalToBinary = new NominalToBinary();
231        m_NominalToBinary.setInputFormat(data);
232        data = Filter.useFilter(data, m_NominalToBinary);
233       
234        //create actual logistic model
235        m_boostedModel = new LogisticBase(m_numBoostingIterations, m_useCrossValidation, m_errorOnProbabilities);
236        m_boostedModel.setMaxIterations(m_maxBoostingIterations);
237        m_boostedModel.setHeuristicStop(m_heuristicStop);
238        m_boostedModel.setWeightTrimBeta(m_weightTrimBeta);
239        m_boostedModel.setUseAIC(m_useAIC);
240       
241        //build logistic model
242        m_boostedModel.buildClassifier(data);
243    }
244   
245    /**
246     * Returns class probabilities for an instance.
247     *
248     * @param inst the instance to compute the probabilities for
249     * @return the probabilities
250     * @throws Exception if distribution can't be computed successfully
251     */
252    public double[] distributionForInstance(Instance inst) 
253        throws Exception {
254       
255        //replace missing values / convert nominal atts
256        m_ReplaceMissingValues.input(inst);
257        inst = m_ReplaceMissingValues.output();
258        m_NominalToBinary.input(inst);
259        inst = m_NominalToBinary.output();     
260       
261        //obtain probs from logistic model
262        return m_boostedModel.distributionForInstance(inst);   
263    }
264
265    /**
266     * Returns an enumeration describing the available options.
267     *
268     * @return an enumeration of all the available options.
269     */
270    public Enumeration listOptions() {
271        Vector newVector = new Vector();
272       
273        newVector.addElement(new Option(
274            "\tSet fixed number of iterations for LogitBoost",
275            "I",1,"-I <iterations>"));
276       
277        newVector.addElement(new Option(
278            "\tUse stopping criterion on training set (instead of\n"
279            + "\tcross-validation)",
280            "S",0,"-S"));
281       
282        newVector.addElement(new Option(
283            "\tUse error on probabilities (rmse) instead of\n"
284            + "\tmisclassification error for stopping criterion",
285            "P",0,"-P"));
286
287        newVector.addElement(new Option(
288            "\tSet maximum number of boosting iterations",
289            "M",1,"-M <iterations>"));
290
291        newVector.addElement(new Option(
292            "\tSet parameter for heuristic for early stopping of\n"
293            + "\tLogitBoost.\n"
294            + "\tIf enabled, the minimum is selected greedily, stopping\n"
295            + "\tif the current minimum has not changed for iter iterations.\n"
296            + "\tBy default, heuristic is enabled with value 50. Set to\n"
297            + "\tzero to disable heuristic.",
298            "H",1,"-H <iterations>"));
299       
300        newVector.addElement(new Option("\tSet beta for weight trimming for LogitBoost. Set to 0 for no weight trimming.\n",
301                                        "W",1,"-W <beta>"));
302       
303        newVector.addElement(new Option("\tThe AIC is used to choose the best iteration (instead of CV or training error).\n",
304                                        "A", 0, "-A"));
305       
306        return newVector.elements();
307    } 
308   
309
310    /**
311     * Parses a given list of options. <p/>
312     *
313     <!-- options-start -->
314     * Valid options are: <p/>
315     *
316     * <pre> -I &lt;iterations&gt;
317     *  Set fixed number of iterations for LogitBoost</pre>
318     *
319     * <pre> -S
320     *  Use stopping criterion on training set (instead of
321     *  cross-validation)</pre>
322     *
323     * <pre> -P
324     *  Use error on probabilities (rmse) instead of
325     *  misclassification error for stopping criterion</pre>
326     *
327     * <pre> -M &lt;iterations&gt;
328     *  Set maximum number of boosting iterations</pre>
329     *
330     * <pre> -H &lt;iterations&gt;
331     *  Set parameter for heuristic for early stopping of
332     *  LogitBoost.
333     *  If enabled, the minimum is selected greedily, stopping
334     *  if the current minimum has not changed for iter iterations.
335     *  By default, heuristic is enabled with value 50. Set to
336     *  zero to disable heuristic.</pre>
337     *
338     * <pre> -W &lt;beta&gt;
339     *  Set beta for weight trimming for LogitBoost. Set to 0 for no weight trimming.
340     * </pre>
341     *
342     * <pre> -A
343     *  The AIC is used to choose the best iteration (instead of CV or training error).
344     * </pre>
345     *
346     <!-- options-end -->
347     *
348     * @param options the list of options as an array of strings
349     * @throws Exception if an option is not supported
350     */
351    public void setOptions(String[] options) throws Exception {
352
353        String optionString = Utils.getOption('I', options);
354        if (optionString.length() != 0) {
355            setNumBoostingIterations((new Integer(optionString)).intValue());
356        }
357               
358        setUseCrossValidation(!Utils.getFlag('S', options));
359        setErrorOnProbabilities(Utils.getFlag('P', options));
360       
361        optionString = Utils.getOption('M', options);
362        if (optionString.length() != 0) {
363            setMaxBoostingIterations((new Integer(optionString)).intValue());
364        }
365
366        optionString = Utils.getOption('H', options);
367        if (optionString.length() != 0) {
368            setHeuristicStop((new Integer(optionString)).intValue());
369        }
370       
371        optionString = Utils.getOption('W', options);
372        if (optionString.length() != 0) {
373            setWeightTrimBeta((new Double(optionString)).doubleValue());
374        }
375       
376        setUseAIC(Utils.getFlag('A', options));       
377
378        Utils.checkForRemainingOptions(options);
379    } 
380
381    /**
382     * Gets the current settings of the Classifier.
383     *
384     * @return an array of strings suitable for passing to setOptions
385     */
386    public String[] getOptions() {
387        String[] options = new String[11];
388        int current = 0;
389               
390        options[current++] = "-I"; 
391        options[current++] = ""+getNumBoostingIterations();
392       
393        if (!getUseCrossValidation()) {
394            options[current++] = "-S";
395        } 
396
397        if (getErrorOnProbabilities()) {
398            options[current++] = "-P";
399        } 
400
401        options[current++] = "-M"; 
402        options[current++] = ""+getMaxBoostingIterations();
403       
404        options[current++] = "-H"; 
405        options[current++] = ""+getHeuristicStop();
406       
407        options[current++] = "-W";
408        options[current++] = ""+getWeightTrimBeta();
409       
410        if (getUseAIC()) {
411            options[current++] = "-A";
412        }
413
414        while (current < options.length) {
415            options[current++] = "";
416        } 
417        return options;
418    } 
419
420    /**
421     * Get the value of numBoostingIterations.
422     *
423     * @return the number of boosting iterations
424     */
425    public int getNumBoostingIterations(){
426        return m_numBoostingIterations;
427    }
428    /**
429     * Get the value of useCrossValidation.
430     *
431     * @return true if cross-validation is used
432     */
433    public boolean getUseCrossValidation(){
434        return m_useCrossValidation;
435    }
436
437    /**
438     * Get the value of errorOnProbabilities.
439     *
440     * @return  If true, use minimize error on probabilities instead of
441     *          misclassification error
442     */
443    public boolean getErrorOnProbabilities(){
444        return m_errorOnProbabilities;
445    }
446   
447    /**
448     * Get the value of maxBoostingIterations.
449     *
450     * @return the maximum number of boosting iterations
451     */
452    public int getMaxBoostingIterations(){
453        return m_maxBoostingIterations;
454    }
455
456    /**
457     * Get the value of heuristicStop.
458     *
459     * @return the value of heuristicStop
460     */
461    public int getHeuristicStop(){
462        return m_heuristicStop;
463    }
464   
465    /**
466     * Get the value of weightTrimBeta.
467     */
468    public double getWeightTrimBeta(){
469        return m_weightTrimBeta;
470    }
471   
472    /**
473     * Get the value of useAIC.
474     *
475     * @return Value of useAIC.
476     */
477    public boolean getUseAIC(){
478        return m_useAIC;
479    }
480   
481    /**
482     * Set the value of numBoostingIterations.
483     *
484     * @param n the number of boosting iterations
485     */
486    public void setNumBoostingIterations(int n){
487        m_numBoostingIterations = n;
488    }
489
490    /**
491     * Set the value of useCrossValidation.
492     *
493     * @param l whether to use cross-validation
494     */
495    public void setUseCrossValidation(boolean l){
496        m_useCrossValidation = l;
497    }
498
499    /**
500     * Set the value of errorOnProbabilities.
501     *
502     * @param l If true, use minimize error on probabilities instead of
503     *          misclassification error
504     */
505    public void setErrorOnProbabilities(boolean l){
506        m_errorOnProbabilities = l;
507    }
508
509    /**
510     * Set the value of maxBoostingIterations.
511     *
512     * @param n the maximum number of boosting iterations
513     */
514    public void setMaxBoostingIterations(int n){
515        m_maxBoostingIterations = n;
516    } 
517
518    /**
519     * Set the value of heuristicStop.
520     *
521     * @param n the value of heuristicStop
522     */
523    public void setHeuristicStop(int n){
524        if (n == 0) 
525          m_heuristicStop = m_maxBoostingIterations; 
526        else 
527          m_heuristicStop = n;
528    }
529   
530    /**
531     * Set the value of weightTrimBeta.
532     */
533    public void setWeightTrimBeta(double n){
534        m_weightTrimBeta = n;
535    }
536   
537    /**
538     * Set the value of useAIC.
539     *
540     * @param c Value to assign to useAIC.
541     */
542    public void setUseAIC(boolean c){
543        m_useAIC = c;
544    }
545
546    /**
547     * Get the number of LogitBoost iterations performed (= the number of
548     * regression functions fit by LogitBoost).
549     *
550     * @return the number of LogitBoost iterations performed
551     */
552    public int getNumRegressions(){
553        return m_boostedModel.getNumRegressions();
554    }
555
556    /**
557     * Returns a description of the logistic model (attributes/coefficients).
558     *
559     * @return the model as string
560     */
561    public String toString(){
562        if (m_boostedModel == null) return "No model built";
563        return "SimpleLogistic:\n" + m_boostedModel.toString();
564    }
565
566    /**
567     * Returns the fraction of all attributes in the data that are used in the
568     * logistic model (in percent). An attribute is used in the model if it is
569     * used in any of the models for the different classes.
570     *
571     * @return percentage of attributes used in the model
572     */
573    public double measureAttributesUsed(){
574        return m_boostedModel.percentAttributesUsed();
575    }
576       
577     /**
578     * Returns an enumeration of the additional measure names
579     * @return an enumeration of the measure names
580     */
581    public Enumeration enumerateMeasures() {
582        Vector newVector = new Vector(3);
583        newVector.addElement("measureAttributesUsed");
584        newVector.addElement("measureNumIterations");
585        return newVector.elements();
586    }
587   
588    /**
589     * Returns the value of the named measure
590     * @param additionalMeasureName the name of the measure to query for its value
591     * @return the value of the named measure
592     * @throws IllegalArgumentException if the named measure is not supported
593     */
594    public double getMeasure(String additionalMeasureName) {
595        if (additionalMeasureName.compareToIgnoreCase("measureAttributesUsed") == 0) {
596            return measureAttributesUsed();
597        } else if(additionalMeasureName.compareToIgnoreCase("measureNumIterations") == 0){
598            return getNumRegressions();
599        } else {
600            throw new IllegalArgumentException(additionalMeasureName
601                                               + " not supported (SimpleLogistic)");
602        }
603    }   
604
605
606    /**
607     * Returns a string describing classifier
608     * @return a description suitable for
609     * displaying in the explorer/experimenter gui
610     */
611    public String globalInfo() {
612        return "Classifier for building linear logistic regression models. LogitBoost with simple regression "
613            +"functions as base learners is used for fitting the logistic models. The optimal number of LogitBoost "
614            +"iterations to perform is cross-validated, which leads to automatic attribute selection. "
615            +"For more information see:\n"
616            + getTechnicalInformation().toString();
617    }
618
619    /**
620     * Returns an instance of a TechnicalInformation object, containing
621     * detailed information about the technical background of this class,
622     * e.g., paper reference or book this class is based on.
623     *
624     * @return the technical information about this class
625     */
626    public TechnicalInformation getTechnicalInformation() {
627      TechnicalInformation      result;
628      TechnicalInformation      additional;
629     
630      result = new TechnicalInformation(Type.ARTICLE);
631      result.setValue(Field.AUTHOR, "Niels Landwehr and Mark Hall and Eibe Frank");
632      result.setValue(Field.TITLE, "Logistic Model Trees");
633      result.setValue(Field.BOOKTITLE, "Machine Learning");
634      result.setValue(Field.YEAR, "2005");
635      result.setValue(Field.VOLUME, "95");
636      result.setValue(Field.PAGES, "161-205");
637      result.setValue(Field.NUMBER, "1-2");
638     
639      additional = result.add(Type.INPROCEEDINGS);
640      additional.setValue(Field.AUTHOR, "Marc Sumner and Eibe Frank and Mark Hall");
641      additional.setValue(Field.TITLE, "Speeding up Logistic Model Tree Induction");
642      additional.setValue(Field.BOOKTITLE, "9th European Conference on Principles and Practice of Knowledge Discovery in Databases");
643      additional.setValue(Field.YEAR, "2005");
644      additional.setValue(Field.PAGES, "675-683");
645      additional.setValue(Field.PUBLISHER, "Springer");
646     
647      return result;
648    }
649
650    /**
651     * Returns the tip text for this property
652     * @return tip text for this property suitable for
653     * displaying in the explorer/experimenter gui
654     */
655    public String numBoostingIterationsTipText() {
656        return "Set fixed number of iterations for LogitBoost. If >= 0, this sets the number of LogitBoost iterations "
657            +"to perform. If < 0, the number is cross-validated or a stopping criterion on the training set is used "
658            +"(depending on the value of useCrossValidation).";
659    }
660
661    /**
662     * Returns the tip text for this property
663     * @return tip text for this property suitable for
664     * displaying in the explorer/experimenter gui
665     */
666    public String useCrossValidationTipText() {
667        return "Sets whether the number of LogitBoost iterations is to be cross-validated or the stopping criterion "
668            +"on the training set should be used. If not set (and no fixed number of iterations was given), "
669            +"the number of LogitBoost iterations is used that minimizes the error on the training set "
670            +"(misclassification error or error on probabilities depending on errorOnProbabilities).";
671    }
672
673    /**
674     * Returns the tip text for this property
675     * @return tip text for this property suitable for
676     * displaying in the explorer/experimenter gui
677     */
678    public String errorOnProbabilitiesTipText() {
679        return "Use error on the probabilties as error measure when determining the best number of LogitBoost iterations. "
680            +"If set, the number of LogitBoost iterations is chosen that minimizes the root mean squared error "
681            +"(either on the training set or in the cross-validation, depending on useCrossValidation).";
682    }
683
684    /**
685     * Returns the tip text for this property
686     * @return tip text for this property suitable for
687     * displaying in the explorer/experimenter gui
688     */
689    public String maxBoostingIterationsTipText() {
690        return "Sets the maximum number of iterations for LogitBoost. Default value is 500, for very small/large "
691            +"datasets a lower/higher value might be preferable.";
692    }
693
694    /**
695     * Returns the tip text for this property
696     * @return tip text for this property suitable for
697     * displaying in the explorer/experimenter gui
698     */
699    public String heuristicStopTipText() {
700        return "If heuristicStop > 0, the heuristic for greedy stopping while cross-validating the number of "
701            +"LogitBoost iterations is enabled. This means LogitBoost is stopped if no new error minimum "
702            +"has been reached in the last heuristicStop iterations. It is recommended to use this heuristic, "
703            +"it gives a large speed-up especially on small datasets. The default value is 50.";
704    }   
705   
706    /**
707     * Returns the tip text for this property
708     * @return tip text for this property suitable for
709     * displaying in the explorer/experimenter gui
710     */
711    public String weightTrimBetaTipText() {
712        return "Set the beta value used for weight trimming in LogitBoost. "
713        +"Only instances carrying (1 - beta)% of the weight from previous iteration "
714        +"are used in the next iteration. Set to 0 for no weight trimming. "
715        +"The default value is 0.";
716    }
717
718    /**
719     * Returns the tip text for this property
720     * @return tip text for this property suitable for
721     * displaying in the explorer/experimenter gui
722     */
723    public String useAICTipText() {
724        return "The AIC is used to determine when to stop LogitBoost iterations "
725        +"(instead of cross-validation or training error).";
726    }
727   
728    /**
729     * Returns the revision string.
730     *
731     * @return          the revision
732     */
733    public String getRevision() {
734      return RevisionUtils.extract("$Revision: 5928 $");
735    }
736
737    /**
738     * Main method for testing this class
739     *
740     * @param argv commandline options
741     */
742    public static void main(String[] argv) {   
743        runClassifier(new SimpleLogistic(), argv);
744    }
745}
Note: See TracBrowser for help on using the repository browser.