source: src/main/java/weka/classifiers/meta/AdaBoostM1.java @ 18

Last change on this file since 18 was 4, checked in by gnappo, 14 years ago

Import di weka.

File size: 23.3 KB
RevLine 
[4]1/*
2 *    This program is free software; you can redistribute it and/or modify
3 *    it under the terms of the GNU General Public License as published by
4 *    the Free Software Foundation; either version 2 of the License, or
5 *    (at your option) any later version.
6 *
7 *    This program is distributed in the hope that it will be useful,
8 *    but WITHOUT ANY WARRANTY; without even the implied warranty of
9 *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
10 *    GNU General Public License for more details.
11 *
12 *    You should have received a copy of the GNU General Public License
13 *    along with this program; if not, write to the Free Software
14 *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
15 */
16
17/*
18 *    AdaBoostM1.java
19 *    Copyright (C) 1999 University of Waikato, Hamilton, New Zealand
20 *
21 */
22
23package weka.classifiers.meta;
24
25import weka.classifiers.Classifier;
26import weka.classifiers.AbstractClassifier;
27import weka.classifiers.Evaluation;
28import weka.classifiers.RandomizableIteratedSingleClassifierEnhancer;
29import weka.classifiers.Sourcable;
30import weka.core.Capabilities;
31import weka.core.Instance;
32import weka.core.Instances;
33import weka.core.Option;
34import weka.core.Randomizable;
35import weka.core.RevisionUtils;
36import weka.core.TechnicalInformation;
37import weka.core.TechnicalInformationHandler;
38import weka.core.Utils;
39import weka.core.WeightedInstancesHandler;
40import weka.core.Capabilities.Capability;
41import weka.core.TechnicalInformation.Field;
42import weka.core.TechnicalInformation.Type;
43
44import java.util.Enumeration;
45import java.util.Random;
46import java.util.Vector;
47
48/**
49 <!-- globalinfo-start -->
50 * Class for boosting a nominal class classifier using the Adaboost M1 method. Only nominal class problems can be tackled. Often dramatically improves performance, but sometimes overfits.<br/>
51 * <br/>
52 * For more information, see<br/>
53 * <br/>
54 * Yoav Freund, Robert E. Schapire: Experiments with a new boosting algorithm. In: Thirteenth International Conference on Machine Learning, San Francisco, 148-156, 1996.
55 * <p/>
56 <!-- globalinfo-end -->
57 *
58 <!-- technical-bibtex-start -->
59 * BibTeX:
60 * <pre>
61 * &#64;inproceedings{Freund1996,
62 *    address = {San Francisco},
63 *    author = {Yoav Freund and Robert E. Schapire},
64 *    booktitle = {Thirteenth International Conference on Machine Learning},
65 *    pages = {148-156},
66 *    publisher = {Morgan Kaufmann},
67 *    title = {Experiments with a new boosting algorithm},
68 *    year = {1996}
69 * }
70 * </pre>
71 * <p/>
72 <!-- technical-bibtex-end -->
73 *
74 <!-- options-start -->
75 * Valid options are: <p/>
76 *
77 * <pre> -P &lt;num&gt;
78 *  Percentage of weight mass to base training on.
79 *  (default 100, reduce to around 90 speed up)</pre>
80 *
81 * <pre> -Q
82 *  Use resampling for boosting.</pre>
83 *
84 * <pre> -S &lt;num&gt;
85 *  Random number seed.
86 *  (default 1)</pre>
87 *
88 * <pre> -I &lt;num&gt;
89 *  Number of iterations.
90 *  (default 10)</pre>
91 *
92 * <pre> -D
93 *  If set, classifier is run in debug mode and
94 *  may output additional info to the console</pre>
95 *
96 * <pre> -W
97 *  Full name of base classifier.
98 *  (default: weka.classifiers.trees.DecisionStump)</pre>
99 *
100 * <pre>
101 * Options specific to classifier weka.classifiers.trees.DecisionStump:
102 * </pre>
103 *
104 * <pre> -D
105 *  If set, classifier is run in debug mode and
106 *  may output additional info to the console</pre>
107 *
108 <!-- options-end -->
109 *
110 * Options after -- are passed to the designated classifier.<p>
111 *
112 * @author Eibe Frank (eibe@cs.waikato.ac.nz)
113 * @author Len Trigg (trigg@cs.waikato.ac.nz)
114 * @version $Revision: 5928 $
115 */
116public class AdaBoostM1 
117  extends RandomizableIteratedSingleClassifierEnhancer
118  implements WeightedInstancesHandler, Sourcable, TechnicalInformationHandler {
119
120  /** for serialization */
121  static final long serialVersionUID = -7378107808933117974L;
122 
123  /** Max num iterations tried to find classifier with non-zero error. */ 
124  private static int MAX_NUM_RESAMPLING_ITERATIONS = 10;
125 
126  /** Array for storing the weights for the votes. */
127  protected double [] m_Betas;
128
129  /** The number of successfully generated base classifiers. */
130  protected int m_NumIterationsPerformed;
131
132  /** Weight Threshold. The percentage of weight mass used in training */
133  protected int m_WeightThreshold = 100;
134
135  /** Use boosting with reweighting? */
136  protected boolean m_UseResampling;
137
138  /** The number of classes */
139  protected int m_NumClasses;
140 
141  /** a ZeroR model in case no model can be built from the data */
142  protected Classifier m_ZeroR;
143   
144  /**
145   * Constructor.
146   */
147  public AdaBoostM1() {
148   
149    m_Classifier = new weka.classifiers.trees.DecisionStump();
150  }
151   
152  /**
153   * Returns a string describing classifier
154   * @return a description suitable for
155   * displaying in the explorer/experimenter gui
156   */
157  public String globalInfo() {
158 
159    return "Class for boosting a nominal class classifier using the Adaboost "
160      + "M1 method. Only nominal class problems can be tackled. Often "
161      + "dramatically improves performance, but sometimes overfits.\n\n"
162      + "For more information, see\n\n"
163      + getTechnicalInformation().toString();
164  }
165
166  /**
167   * Returns an instance of a TechnicalInformation object, containing
168   * detailed information about the technical background of this class,
169   * e.g., paper reference or book this class is based on.
170   *
171   * @return the technical information about this class
172   */
173  public TechnicalInformation getTechnicalInformation() {
174    TechnicalInformation        result;
175   
176    result = new TechnicalInformation(Type.INPROCEEDINGS);
177    result.setValue(Field.AUTHOR, "Yoav Freund and Robert E. Schapire");
178    result.setValue(Field.TITLE, "Experiments with a new boosting algorithm");
179    result.setValue(Field.BOOKTITLE, "Thirteenth International Conference on Machine Learning");
180    result.setValue(Field.YEAR, "1996");
181    result.setValue(Field.PAGES, "148-156");
182    result.setValue(Field.PUBLISHER, "Morgan Kaufmann");
183    result.setValue(Field.ADDRESS, "San Francisco");
184   
185    return result;
186  }
187
188  /**
189   * String describing default classifier.
190   *
191   * @return the default classifier classname
192   */
193  protected String defaultClassifierString() {
194   
195    return "weka.classifiers.trees.DecisionStump";
196  }
197
198  /**
199   * Select only instances with weights that contribute to
200   * the specified quantile of the weight distribution
201   *
202   * @param data the input instances
203   * @param quantile the specified quantile eg 0.9 to select
204   * 90% of the weight mass
205   * @return the selected instances
206   */
207  protected Instances selectWeightQuantile(Instances data, double quantile) { 
208
209    int numInstances = data.numInstances();
210    Instances trainData = new Instances(data, numInstances);
211    double [] weights = new double [numInstances];
212
213    double sumOfWeights = 0;
214    for(int i = 0; i < numInstances; i++) {
215      weights[i] = data.instance(i).weight();
216      sumOfWeights += weights[i];
217    }
218    double weightMassToSelect = sumOfWeights * quantile;
219    int [] sortedIndices = Utils.sort(weights);
220
221    // Select the instances
222    sumOfWeights = 0;
223    for(int i = numInstances - 1; i >= 0; i--) {
224      Instance instance = (Instance)data.instance(sortedIndices[i]).copy();
225      trainData.add(instance);
226      sumOfWeights += weights[sortedIndices[i]];
227      if ((sumOfWeights > weightMassToSelect) && 
228          (i > 0) && 
229          (weights[sortedIndices[i]] != weights[sortedIndices[i - 1]])) {
230        break;
231      }
232    }
233    if (m_Debug) {
234      System.err.println("Selected " + trainData.numInstances()
235                         + " out of " + numInstances);
236    }
237    return trainData;
238  }
239
240  /**
241   * Returns an enumeration describing the available options.
242   *
243   * @return an enumeration of all the available options.
244   */
245  public Enumeration listOptions() {
246
247    Vector newVector = new Vector();
248
249    newVector.addElement(new Option(
250        "\tPercentage of weight mass to base training on.\n"
251        +"\t(default 100, reduce to around 90 speed up)",
252        "P", 1, "-P <num>"));
253   
254    newVector.addElement(new Option(
255        "\tUse resampling for boosting.",
256        "Q", 0, "-Q"));
257
258    Enumeration enu = super.listOptions();
259    while (enu.hasMoreElements()) {
260      newVector.addElement(enu.nextElement());
261    }
262   
263    return newVector.elements();
264  }
265
266
267  /**
268   * Parses a given list of options. <p/>
269   *
270   <!-- options-start -->
271   * Valid options are: <p/>
272   *
273   * <pre> -P &lt;num&gt;
274   *  Percentage of weight mass to base training on.
275   *  (default 100, reduce to around 90 speed up)</pre>
276   *
277   * <pre> -Q
278   *  Use resampling for boosting.</pre>
279   *
280   * <pre> -S &lt;num&gt;
281   *  Random number seed.
282   *  (default 1)</pre>
283   *
284   * <pre> -I &lt;num&gt;
285   *  Number of iterations.
286   *  (default 10)</pre>
287   *
288   * <pre> -D
289   *  If set, classifier is run in debug mode and
290   *  may output additional info to the console</pre>
291   *
292   * <pre> -W
293   *  Full name of base classifier.
294   *  (default: weka.classifiers.trees.DecisionStump)</pre>
295   *
296   * <pre>
297   * Options specific to classifier weka.classifiers.trees.DecisionStump:
298   * </pre>
299   *
300   * <pre> -D
301   *  If set, classifier is run in debug mode and
302   *  may output additional info to the console</pre>
303   *
304   <!-- options-end -->
305   *
306   * Options after -- are passed to the designated classifier.<p>
307   *
308   * @param options the list of options as an array of strings
309   * @throws Exception if an option is not supported
310   */
311  public void setOptions(String[] options) throws Exception {
312
313    String thresholdString = Utils.getOption('P', options);
314    if (thresholdString.length() != 0) {
315      setWeightThreshold(Integer.parseInt(thresholdString));
316    } else {
317      setWeightThreshold(100);
318    }
319     
320    setUseResampling(Utils.getFlag('Q', options));
321
322    super.setOptions(options);
323  }
324
325  /**
326   * Gets the current settings of the Classifier.
327   *
328   * @return an array of strings suitable for passing to setOptions
329   */
330  public String[] getOptions() {
331    Vector        result;
332    String[]      options;
333    int           i;
334   
335    result = new Vector();
336
337    if (getUseResampling())
338      result.add("-Q");
339
340    result.add("-P");
341    result.add("" + getWeightThreshold());
342   
343    options = super.getOptions();
344    for (i = 0; i < options.length; i++)
345      result.add(options[i]);
346
347    return (String[]) result.toArray(new String[result.size()]);
348  }
349 
350  /**
351   * Returns the tip text for this property
352   * @return tip text for this property suitable for
353   * displaying in the explorer/experimenter gui
354   */
355  public String weightThresholdTipText() {
356    return "Weight threshold for weight pruning.";
357  }
358
359  /**
360   * Set weight threshold
361   *
362   * @param threshold the percentage of weight mass used for training
363   */
364  public void setWeightThreshold(int threshold) {
365
366    m_WeightThreshold = threshold;
367  }
368
369  /**
370   * Get the degree of weight thresholding
371   *
372   * @return the percentage of weight mass used for training
373   */
374  public int getWeightThreshold() {
375
376    return m_WeightThreshold;
377  }
378 
379  /**
380   * Returns the tip text for this property
381   * @return tip text for this property suitable for
382   * displaying in the explorer/experimenter gui
383   */
384  public String useResamplingTipText() {
385    return "Whether resampling is used instead of reweighting.";
386  }
387
388  /**
389   * Set resampling mode
390   *
391   * @param r true if resampling should be done
392   */
393  public void setUseResampling(boolean r) {
394
395    m_UseResampling = r;
396  }
397
398  /**
399   * Get whether resampling is turned on
400   *
401   * @return true if resampling output is on
402   */
403  public boolean getUseResampling() {
404
405    return m_UseResampling;
406  }
407
408  /**
409   * Returns default capabilities of the classifier.
410   *
411   * @return      the capabilities of this classifier
412   */
413  public Capabilities getCapabilities() {
414    Capabilities result = super.getCapabilities();
415
416    // class
417    result.disableAllClasses();
418    result.disableAllClassDependencies();
419    if (super.getCapabilities().handles(Capability.NOMINAL_CLASS))
420      result.enable(Capability.NOMINAL_CLASS);
421    if (super.getCapabilities().handles(Capability.BINARY_CLASS))
422      result.enable(Capability.BINARY_CLASS);
423   
424    return result;
425  }
426
427  /**
428   * Boosting method.
429   *
430   * @param data the training data to be used for generating the
431   * boosted classifier.
432   * @throws Exception if the classifier could not be built successfully
433   */
434
435  public void buildClassifier(Instances data) throws Exception {
436
437    super.buildClassifier(data);
438
439    // can classifier handle the data?
440    getCapabilities().testWithFail(data);
441
442    // remove instances with missing class
443    data = new Instances(data);
444    data.deleteWithMissingClass();
445   
446    // only class? -> build ZeroR model
447    if (data.numAttributes() == 1) {
448      System.err.println(
449          "Cannot build model (only class attribute present in data!), "
450          + "using ZeroR model instead!");
451      m_ZeroR = new weka.classifiers.rules.ZeroR();
452      m_ZeroR.buildClassifier(data);
453      return;
454    }
455    else {
456      m_ZeroR = null;
457    }
458   
459    m_NumClasses = data.numClasses();
460    if ((!m_UseResampling) && 
461        (m_Classifier instanceof WeightedInstancesHandler)) {
462      buildClassifierWithWeights(data);
463    } else {
464      buildClassifierUsingResampling(data);
465    }
466  }
467
468  /**
469   * Boosting method. Boosts using resampling
470   *
471   * @param data the training data to be used for generating the
472   * boosted classifier.
473   * @throws Exception if the classifier could not be built successfully
474   */
475  protected void buildClassifierUsingResampling(Instances data) 
476    throws Exception {
477
478    Instances trainData, sample, training;
479    double epsilon, reweight, sumProbs;
480    Evaluation evaluation;
481    int numInstances = data.numInstances();
482    Random randomInstance = new Random(m_Seed);
483    int resamplingIterations = 0;
484
485    // Initialize data
486    m_Betas = new double [m_Classifiers.length];
487    m_NumIterationsPerformed = 0;
488    // Create a copy of the data so that when the weights are diddled
489    // with it doesn't mess up the weights for anyone else
490    training = new Instances(data, 0, numInstances);
491    sumProbs = training.sumOfWeights();
492    for (int i = 0; i < training.numInstances(); i++) {
493      training.instance(i).setWeight(training.instance(i).
494                                      weight() / sumProbs);
495    }
496   
497    // Do boostrap iterations
498    for (m_NumIterationsPerformed = 0; m_NumIterationsPerformed < m_Classifiers.length; 
499         m_NumIterationsPerformed++) {
500      if (m_Debug) {
501        System.err.println("Training classifier " + (m_NumIterationsPerformed + 1));
502      }
503
504      // Select instances to train the classifier on
505      if (m_WeightThreshold < 100) {
506        trainData = selectWeightQuantile(training, 
507                                         (double)m_WeightThreshold / 100);
508      } else {
509        trainData = new Instances(training);
510      }
511     
512      // Resample
513      resamplingIterations = 0;
514      double[] weights = new double[trainData.numInstances()];
515      for (int i = 0; i < weights.length; i++) {
516        weights[i] = trainData.instance(i).weight();
517      }
518      do {
519        sample = trainData.resampleWithWeights(randomInstance, weights);
520
521        // Build and evaluate classifier
522        m_Classifiers[m_NumIterationsPerformed].buildClassifier(sample);
523        evaluation = new Evaluation(data);
524        evaluation.evaluateModel(m_Classifiers[m_NumIterationsPerformed], 
525                                 training);
526        epsilon = evaluation.errorRate();
527        resamplingIterations++;
528      } while (Utils.eq(epsilon, 0) && 
529              (resamplingIterations < MAX_NUM_RESAMPLING_ITERATIONS));
530       
531      // Stop if error too big or 0
532      if (Utils.grOrEq(epsilon, 0.5) || Utils.eq(epsilon, 0)) {
533        if (m_NumIterationsPerformed == 0) {
534          m_NumIterationsPerformed = 1; // If we're the first we have to to use it
535        }
536        break;
537      }
538     
539      // Determine the weight to assign to this model
540      m_Betas[m_NumIterationsPerformed] = Math.log((1 - epsilon) / epsilon);
541      reweight = (1 - epsilon) / epsilon;
542      if (m_Debug) {
543        System.err.println("\terror rate = " + epsilon
544                           +"  beta = " + m_Betas[m_NumIterationsPerformed]);
545      }
546 
547      // Update instance weights
548      setWeights(training, reweight);
549    }
550  }
551
552  /**
553   * Sets the weights for the next iteration.
554   *
555   * @param training the training instances
556   * @param reweight the reweighting factor
557   * @throws Exception if something goes wrong
558   */
559  protected void setWeights(Instances training, double reweight) 
560    throws Exception {
561
562    double oldSumOfWeights, newSumOfWeights;
563
564    oldSumOfWeights = training.sumOfWeights();
565    Enumeration enu = training.enumerateInstances();
566    while (enu.hasMoreElements()) {
567      Instance instance = (Instance) enu.nextElement();
568      if (!Utils.eq(m_Classifiers[m_NumIterationsPerformed].classifyInstance(instance), 
569                    instance.classValue()))
570        instance.setWeight(instance.weight() * reweight);
571    }
572   
573    // Renormalize weights
574    newSumOfWeights = training.sumOfWeights();
575    enu = training.enumerateInstances();
576    while (enu.hasMoreElements()) {
577      Instance instance = (Instance) enu.nextElement();
578      instance.setWeight(instance.weight() * oldSumOfWeights
579                         / newSumOfWeights);
580    }
581  }
582
583  /**
584   * Boosting method. Boosts any classifier that can handle weighted
585   * instances.
586   *
587   * @param data the training data to be used for generating the
588   * boosted classifier.
589   * @throws Exception if the classifier could not be built successfully
590   */
591  protected void buildClassifierWithWeights(Instances data) 
592    throws Exception {
593
594    Instances trainData, training;
595    double epsilon, reweight;
596    Evaluation evaluation;
597    int numInstances = data.numInstances();
598    Random randomInstance = new Random(m_Seed);
599
600    // Initialize data
601    m_Betas = new double [m_Classifiers.length];
602    m_NumIterationsPerformed = 0;
603
604    // Create a copy of the data so that when the weights are diddled
605    // with it doesn't mess up the weights for anyone else
606    training = new Instances(data, 0, numInstances);
607   
608    // Do boostrap iterations
609    for (m_NumIterationsPerformed = 0; m_NumIterationsPerformed < m_Classifiers.length; 
610         m_NumIterationsPerformed++) {
611      if (m_Debug) {
612        System.err.println("Training classifier " + (m_NumIterationsPerformed + 1));
613      }
614      // Select instances to train the classifier on
615      if (m_WeightThreshold < 100) {
616        trainData = selectWeightQuantile(training, 
617                                         (double)m_WeightThreshold / 100);
618      } else {
619        trainData = new Instances(training, 0, numInstances);
620      }
621
622      // Build the classifier
623      if (m_Classifiers[m_NumIterationsPerformed] instanceof Randomizable)
624        ((Randomizable) m_Classifiers[m_NumIterationsPerformed]).setSeed(randomInstance.nextInt());
625      m_Classifiers[m_NumIterationsPerformed].buildClassifier(trainData);
626
627      // Evaluate the classifier
628      evaluation = new Evaluation(data);
629      evaluation.evaluateModel(m_Classifiers[m_NumIterationsPerformed], training);
630      epsilon = evaluation.errorRate();
631
632      // Stop if error too small or error too big and ignore this model
633      if (Utils.grOrEq(epsilon, 0.5) || Utils.eq(epsilon, 0)) {
634        if (m_NumIterationsPerformed == 0) {
635          m_NumIterationsPerformed = 1; // If we're the first we have to to use it
636        }
637        break;
638      }
639      // Determine the weight to assign to this model
640      m_Betas[m_NumIterationsPerformed] = Math.log((1 - epsilon) / epsilon);
641      reweight = (1 - epsilon) / epsilon;
642      if (m_Debug) {
643        System.err.println("\terror rate = " + epsilon
644                           +"  beta = " + m_Betas[m_NumIterationsPerformed]);
645      }
646 
647      // Update instance weights
648      setWeights(training, reweight);
649    }
650  }
651 
652  /**
653   * Calculates the class membership probabilities for the given test instance.
654   *
655   * @param instance the instance to be classified
656   * @return predicted class probability distribution
657   * @throws Exception if instance could not be classified
658   * successfully
659   */
660  public double [] distributionForInstance(Instance instance) 
661    throws Exception {
662     
663    // default model?
664    if (m_ZeroR != null) {
665      return m_ZeroR.distributionForInstance(instance);
666    }
667   
668    if (m_NumIterationsPerformed == 0) {
669      throw new Exception("No model built");
670    }
671    double [] sums = new double [instance.numClasses()]; 
672   
673    if (m_NumIterationsPerformed == 1) {
674      return m_Classifiers[0].distributionForInstance(instance);
675    } else {
676      for (int i = 0; i < m_NumIterationsPerformed; i++) {
677        sums[(int)m_Classifiers[i].classifyInstance(instance)] += m_Betas[i];
678      }
679      return Utils.logs2probs(sums);
680    }
681  }
682
683  /**
684   * Returns the boosted model as Java source code.
685   *
686   * @param className the classname of the generated class
687   * @return the tree as Java source code
688   * @throws Exception if something goes wrong
689   */
690  public String toSource(String className) throws Exception {
691
692    if (m_NumIterationsPerformed == 0) {
693      throw new Exception("No model built yet");
694    }
695    if (!(m_Classifiers[0] instanceof Sourcable)) {
696      throw new Exception("Base learner " + m_Classifier.getClass().getName()
697                          + " is not Sourcable");
698    }
699
700    StringBuffer text = new StringBuffer("class ");
701    text.append(className).append(" {\n\n");
702
703    text.append("  public static double classify(Object[] i) {\n");
704
705    if (m_NumIterationsPerformed == 1) {
706      text.append("    return " + className + "_0.classify(i);\n");
707    } else {
708      text.append("    double [] sums = new double [" + m_NumClasses + "];\n");
709      for (int i = 0; i < m_NumIterationsPerformed; i++) {
710        text.append("    sums[(int) " + className + '_' + i
711                    + ".classify(i)] += " + m_Betas[i] + ";\n");
712      }
713      text.append("    double maxV = sums[0];\n" +
714                  "    int maxI = 0;\n"+
715                  "    for (int j = 1; j < " + m_NumClasses + "; j++) {\n"+
716                  "      if (sums[j] > maxV) { maxV = sums[j]; maxI = j; }\n"+
717                  "    }\n    return (double) maxI;\n");
718    }
719    text.append("  }\n}\n");
720
721    for (int i = 0; i < m_Classifiers.length; i++) {
722        text.append(((Sourcable)m_Classifiers[i])
723                    .toSource(className + '_' + i));
724    }
725    return text.toString();
726  }
727
728  /**
729   * Returns description of the boosted classifier.
730   *
731   * @return description of the boosted classifier as a string
732   */
733  public String toString() {
734   
735    // only ZeroR model?
736    if (m_ZeroR != null) {
737      StringBuffer buf = new StringBuffer();
738      buf.append(this.getClass().getName().replaceAll(".*\\.", "") + "\n");
739      buf.append(this.getClass().getName().replaceAll(".*\\.", "").replaceAll(".", "=") + "\n\n");
740      buf.append("Warning: No model could be built, hence ZeroR model is used:\n\n");
741      buf.append(m_ZeroR.toString());
742      return buf.toString();
743    }
744   
745    StringBuffer text = new StringBuffer();
746   
747    if (m_NumIterationsPerformed == 0) {
748      text.append("AdaBoostM1: No model built yet.\n");
749    } else if (m_NumIterationsPerformed == 1) {
750      text.append("AdaBoostM1: No boosting possible, one classifier used!\n");
751      text.append(m_Classifiers[0].toString() + "\n");
752    } else {
753      text.append("AdaBoostM1: Base classifiers and their weights: \n\n");
754      for (int i = 0; i < m_NumIterationsPerformed ; i++) {
755        text.append(m_Classifiers[i].toString() + "\n\n");
756        text.append("Weight: " + Utils.roundDouble(m_Betas[i], 2) + "\n\n");
757      }
758      text.append("Number of performed Iterations: " 
759                  + m_NumIterationsPerformed + "\n");
760    }
761   
762    return text.toString();
763  }
764 
765  /**
766   * Returns the revision string.
767   *
768   * @return            the revision
769   */
770  public String getRevision() {
771    return RevisionUtils.extract("$Revision: 5928 $");
772  }
773
774  /**
775   * Main method for testing this class.
776   *
777   * @param argv the options
778   */
779  public static void main(String [] argv) {
780    runClassifier(new AdaBoostM1(), argv);
781  }
782}
Note: See TracBrowser for help on using the repository browser.