source: branches/MetisMQI/src/main/java/weka/classifiers/meta/RealAdaBoost.java @ 37

Last change on this file since 37 was 29, checked in by gnappo, 14 years ago

Taggata versione per la demo e aggiunto branch.

File size: 21.5 KB
Line 
1/*
2 *    This program is free software; you can redistribute it and/or modify
3 *    it under the terms of the GNU General Public License as published by
4 *    the Free Software Foundation; either version 2 of the License, or
5 *    (at your option) any later version.
6 *
7 *    This program is distributed in the hope that it will be useful,
8 *    but WITHOUT ANY WARRANTY; without even the implied warranty of
9 *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
10 *    GNU General Public License for more details.
11 *
12 *    You should have received a copy of the GNU General Public License
13 *    along with this program; if not, write to the Free Software
14 *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
15 */
16
17/*
18 *    RealAdaBoost.java
19 *    Copyright (C) 1999, 2009 University of Waikato, Hamilton, New Zealand
20 *
21 */
22
23package weka.classifiers.meta;
24
25import weka.classifiers.Classifier;
26import weka.classifiers.AbstractClassifier;
27import weka.classifiers.Evaluation;
28import weka.classifiers.RandomizableIteratedSingleClassifierEnhancer;
29import weka.core.Capabilities;
30import weka.core.Instance;
31import weka.core.Instances;
32import weka.core.Option;
33import weka.core.Randomizable;
34import weka.core.RevisionUtils;
35import weka.core.TechnicalInformation;
36import weka.core.TechnicalInformationHandler;
37import weka.core.Utils;
38import weka.core.WeightedInstancesHandler;
39import weka.core.Capabilities.Capability;
40import weka.core.TechnicalInformation.Field;
41import weka.core.TechnicalInformation.Type;
42
43import java.util.Enumeration;
44import java.util.Random;
45import java.util.Vector;
46
47/**
48 <!-- globalinfo-start -->
49 * Class for boosting a 2-class classifier using the Real Adaboost method.<br/>
50 * <br/>
51 * For more information, see<br/>
52 * <br/>
53 * J. Friedman, T. Hastie, R. Tibshirani (2000). Additive Logistic Regression: a Statistical View of Boosting. Annals of Statistics. 95(2):337-407.
54 * <p/>
55 <!-- globalinfo-end -->
56 *
57 <!-- technical-bibtex-start -->
58 * BibTeX:
59 * <pre>
60 * &#64;article{Friedman2000,
61 *    author = {J. Friedman and T. Hastie and R. Tibshirani},
62 *    journal = {Annals of Statistics},
63 *    number = {2},
64 *    pages = {337-407},
65 *    title = {Additive Logistic Regression: a Statistical View of Boosting},
66 *    volume = {95},
67 *    year = {2000}
68 * }
69 * </pre>
70 * <p/>
71 <!-- technical-bibtex-end -->
72 *
73 <!-- options-start -->
74 * Valid options are: <p/>
75 *
76 * <pre> -P &lt;num&gt;
77 *  Percentage of weight mass to base training on.
78 *  (default 100, reduce to around 90 speed up)</pre>
79 *
80 * <pre> -Q
81 *  Use resampling for boosting.</pre>
82 *
83 * <pre> -H &lt;num&gt;
84 *  Shrinkage parameter.
85 *  (default 1)</pre>
86 *
87 * <pre> -S &lt;num&gt;
88 *  Random number seed.
89 *  (default 1)</pre>
90 *
91 * <pre> -I &lt;num&gt;
92 *  Number of iterations.
93 *  (default 10)</pre>
94 *
95 * <pre> -D
96 *  If set, classifier is run in debug mode and
97 *  may output additional info to the console</pre>
98 *
99 * <pre> -W
100 *  Full name of base classifier.
101 *  (default: weka.classifiers.trees.DecisionStump)</pre>
102 *
103 * <pre>
104 * Options specific to classifier weka.classifiers.trees.DecisionStump:
105 * </pre>
106 *
107 * <pre> -D
108 *  If set, classifier is run in debug mode and
109 *  may output additional info to the console</pre>
110 *
111 <!-- options-end -->
112 *
113 * Options after -- are passed to the designated classifier.<p>
114 *
115 * @author Eibe Frank (eibe@cs.waikato.ac.nz)
116 * @author Len Trigg (trigg@cs.waikato.ac.nz)
117 * @version $Revision: 6136 $
118 */
119public class RealAdaBoost 
120  extends RandomizableIteratedSingleClassifierEnhancer
121  implements WeightedInstancesHandler, TechnicalInformationHandler {
122
123  /** for serialization */
124  static final long serialVersionUID = -7378109809933197974L;
125
126  /** The number of successfully generated base classifiers. */
127  protected int m_NumIterationsPerformed;
128
129  /** Weight Threshold. The percentage of weight mass used in training */
130  protected int m_WeightThreshold = 100;
131
132  /** The value of the shrinkage parameter */
133  protected double m_Shrinkage = 1;
134
135  /** Use boosting with reweighting? */
136  protected boolean m_UseResampling;
137 
138  /** a ZeroR model in case no model can be built from the data */
139  protected Classifier m_ZeroR;
140
141  /** Sum of weights on training data */
142  protected double m_SumOfWeights;
143   
144  /**
145   * Constructor.
146   */
147  public RealAdaBoost() {
148   
149    m_Classifier = new weka.classifiers.trees.DecisionStump();
150  }
151   
152  /**
153   * Returns a string describing classifier
154   * @return a description suitable for
155   * displaying in the explorer/experimenter gui
156   */
157  public String globalInfo() {
158 
159    return "Class for boosting a 2-class classifier using the Real Adaboost method.\n\n"
160      + "For more information, see\n\n"
161      + getTechnicalInformation().toString();
162  }
163
164  /**
165   * Returns an instance of a TechnicalInformation object, containing
166   * detailed information about the technical background of this class,
167   * e.g., paper reference or book this class is based on.
168   *
169   * @return the technical information about this class
170   */
171  public TechnicalInformation getTechnicalInformation() {
172    TechnicalInformation        result;
173
174    result = new TechnicalInformation(Type.ARTICLE);
175    result.setValue(Field.AUTHOR, "J. Friedman and T. Hastie and R. Tibshirani");
176    result.setValue(Field.TITLE, "Additive Logistic Regression: a Statistical View of Boosting");
177    result.setValue(Field.JOURNAL, "Annals of Statistics");
178    result.setValue(Field.VOLUME, "95");
179    result.setValue(Field.NUMBER, "2");
180    result.setValue(Field.PAGES, "337-407");
181    result.setValue(Field.YEAR, "2000");
182   
183    return result;
184  }
185
186  /**
187   * String describing default classifier.
188   *
189   * @return the default classifier classname
190   */
191  protected String defaultClassifierString() {
192   
193    return "weka.classifiers.trees.DecisionStump";
194  }
195
196  /**
197   * Select only instances with weights that contribute to
198   * the specified quantile of the weight distribution
199   *
200   * @param data the input instances
201   * @param quantile the specified quantile eg 0.9 to select
202   * 90% of the weight mass
203   * @return the selected instances
204   */
205  protected Instances selectWeightQuantile(Instances data, double quantile) { 
206
207    int numInstances = data.numInstances();
208    Instances trainData = new Instances(data, numInstances);
209    double [] weights = new double [numInstances];
210
211    double sumOfWeights = 0;
212    for(int i = 0; i < numInstances; i++) {
213      weights[i] = data.instance(i).weight();
214      sumOfWeights += weights[i];
215    }
216    double weightMassToSelect = sumOfWeights * quantile;
217    int [] sortedIndices = Utils.sort(weights);
218
219    // Select the instances
220    sumOfWeights = 0;
221    for(int i = numInstances - 1; i >= 0; i--) {
222      Instance instance = (Instance)data.instance(sortedIndices[i]).copy();
223      trainData.add(instance);
224      sumOfWeights += weights[sortedIndices[i]];
225      if ((sumOfWeights > weightMassToSelect) && 
226          (i > 0) && 
227          (weights[sortedIndices[i]] != weights[sortedIndices[i - 1]])) {
228        break;
229      }
230    }
231    if (m_Debug) {
232      System.err.println("Selected " + trainData.numInstances()
233                         + " out of " + numInstances);
234    }
235    return trainData;
236  }
237
238  /**
239   * Returns an enumeration describing the available options.
240   *
241   * @return an enumeration of all the available options.
242   */
243  public Enumeration listOptions() {
244
245    Vector newVector = new Vector();
246
247    newVector.addElement(new Option(
248        "\tPercentage of weight mass to base training on.\n"
249        +"\t(default 100, reduce to around 90 speed up)",
250        "P", 1, "-P <num>"));
251   
252    newVector.addElement(new Option(
253        "\tUse resampling for boosting.",
254        "Q", 0, "-Q"));
255
256    newVector.addElement(new Option(
257              "\tShrinkage parameter.\n"
258              +"\t(default 1)",
259              "H", 1, "-H <num>"));
260
261    Enumeration enu = super.listOptions();
262    while (enu.hasMoreElements()) {
263      newVector.addElement(enu.nextElement());
264    }
265   
266    return newVector.elements();
267  }
268
269
270  /**
271   * Parses a given list of options. <p/>
272   *
273   <!-- options-start -->
274   * Valid options are: <p/>
275   *
276   * <pre> -P &lt;num&gt;
277   *  Percentage of weight mass to base training on.
278   *  (default 100, reduce to around 90 speed up)</pre>
279   *
280   * <pre> -Q
281   *  Use resampling for boosting.</pre>
282   *
283   * <pre> -H &lt;num&gt;
284   *  Shrinkage parameter.
285   *  (default 1)</pre>
286   *
287   * <pre> -S &lt;num&gt;
288   *  Random number seed.
289   *  (default 1)</pre>
290   *
291   * <pre> -I &lt;num&gt;
292   *  Number of iterations.
293   *  (default 10)</pre>
294   *
295   * <pre> -D
296   *  If set, classifier is run in debug mode and
297   *  may output additional info to the console</pre>
298   *
299   * <pre> -W
300   *  Full name of base classifier.
301   *  (default: weka.classifiers.trees.DecisionStump)</pre>
302   *
303   * <pre>
304   * Options specific to classifier weka.classifiers.trees.DecisionStump:
305   * </pre>
306   *
307   * <pre> -D
308   *  If set, classifier is run in debug mode and
309   *  may output additional info to the console</pre>
310   *
311   <!-- options-end -->
312   *
313   * Options after -- are passed to the designated classifier.<p>
314   *
315   * @param options the list of options as an array of strings
316   * @throws Exception if an option is not supported
317   */
318  public void setOptions(String[] options) throws Exception {
319
320    String thresholdString = Utils.getOption('P', options);
321    if (thresholdString.length() != 0) {
322      setWeightThreshold(Integer.parseInt(thresholdString));
323    } else {
324      setWeightThreshold(100);
325    }
326
327    String shrinkageString = Utils.getOption('H', options);
328    if (shrinkageString.length() != 0) {
329      setShrinkage(new Double(shrinkageString).
330        doubleValue());
331    } else {
332      setShrinkage(1.0);
333    }
334     
335    setUseResampling(Utils.getFlag('Q', options));
336
337    super.setOptions(options);
338  }
339
340  /**
341   * Gets the current settings of the Classifier.
342   *
343   * @return an array of strings suitable for passing to setOptions
344   */
345  public String[] getOptions() {
346    Vector        result;
347    String[]      options;
348    int           i;
349   
350    result = new Vector();
351
352    if (getUseResampling())
353      result.add("-Q");
354
355    result.add("-P");
356    result.add("" + getWeightThreshold());
357
358    result.add("-H");
359    result.add("" + getShrinkage());
360   
361    options = super.getOptions();
362    for (i = 0; i < options.length; i++)
363      result.add(options[i]);
364
365    return (String[]) result.toArray(new String[result.size()]);
366  }
367 
368  /**
369   * Returns the tip text for this property
370   * @return tip text for this property suitable for
371   * displaying in the explorer/experimenter gui
372   */
373  public String shrinkageTipText() {
374    return "Shrinkage parameter (use small value like 0.1 to reduce "
375      + "overfitting).";
376  }
377                         
378  /**
379   * Get the value of Shrinkage.
380   *
381   * @return Value of Shrinkage.
382   */
383  public double getShrinkage() {
384   
385    return m_Shrinkage;
386  }
387 
388  /**
389   * Set the value of Shrinkage.
390   *
391   * @param newShrinkage Value to assign to Shrinkage.
392   */
393  public void setShrinkage(double newShrinkage) {
394   
395    m_Shrinkage = newShrinkage;
396  }
397 
398  /**
399   * Returns the tip text for this property
400   * @return tip text for this property suitable for
401   * displaying in the explorer/experimenter gui
402   */
403  public String weightThresholdTipText() {
404    return "Weight threshold for weight pruning.";
405  }
406
407  /**
408   * Set weight threshold
409   *
410   * @param threshold the percentage of weight mass used for training
411   */
412  public void setWeightThreshold(int threshold) {
413
414    m_WeightThreshold = threshold;
415  }
416
417  /**
418   * Get the degree of weight thresholding
419   *
420   * @return the percentage of weight mass used for training
421   */
422  public int getWeightThreshold() {
423
424    return m_WeightThreshold;
425  }
426 
427  /**
428   * Returns the tip text for this property
429   * @return tip text for this property suitable for
430   * displaying in the explorer/experimenter gui
431   */
432  public String useResamplingTipText() {
433    return "Whether resampling is used instead of reweighting.";
434  }
435
436  /**
437   * Set resampling mode
438   *
439   * @param r true if resampling should be done
440   */
441  public void setUseResampling(boolean r) {
442
443    m_UseResampling = r;
444  }
445
446  /**
447   * Get whether resampling is turned on
448   *
449   * @return true if resampling output is on
450   */
451  public boolean getUseResampling() {
452
453    return m_UseResampling;
454  }
455
456  /**
457   * Returns default capabilities of the classifier.
458   *
459   * @return      the capabilities of this classifier
460   */
461  public Capabilities getCapabilities() {
462    Capabilities result = super.getCapabilities();
463
464    // class
465    result.disableAllClasses();
466    result.disableAllClassDependencies();
467    if (super.getCapabilities().handles(Capability.BINARY_CLASS))
468      result.enable(Capability.BINARY_CLASS);
469   
470    return result;
471  }
472
473  /**
474   * Boosting method.
475   *
476   * @param data the training data to be used for generating the
477   * boosted classifier.
478   * @throws Exception if the classifier could not be built successfully
479   */
480
481  public void buildClassifier(Instances data) throws Exception {
482
483    super.buildClassifier(data);
484
485    // can classifier handle the data?
486    getCapabilities().testWithFail(data);
487
488    // remove instances with missing class
489    data = new Instances(data);
490    data.deleteWithMissingClass();
491   
492    m_SumOfWeights = data.sumOfWeights();
493
494    if ((!m_UseResampling) && 
495        (m_Classifier instanceof WeightedInstancesHandler)) {
496      buildClassifierWithWeights(data);
497    } else {
498      buildClassifierUsingResampling(data);
499    }
500  }
501
502  /**
503   * Boosting method. Boosts using resampling
504   *
505   * @param data the training data to be used for generating the
506   * boosted classifier.
507   * @throws Exception if the classifier could not be built successfully
508   */
509  protected void buildClassifierUsingResampling(Instances data) 
510    throws Exception {
511
512    Instances trainData, sample, training, trainingWeightsNotNormalized;
513    double sumProbs;
514    int numInstances = data.numInstances();
515    Random randomInstance = new Random(m_Seed);
516    double minLoss = Double.MAX_VALUE;
517
518    // Create a copy of the data so that when the weights are diddled
519    // with it doesn't mess up the weights for anyone else
520    trainingWeightsNotNormalized = new Instances(data, 0, numInstances);
521   
522    // Do boostrap iterations
523    for (m_NumIterationsPerformed = -1; m_NumIterationsPerformed < m_Classifiers.length; 
524         m_NumIterationsPerformed++) {
525      if (m_Debug) {
526        System.err.println("Training classifier " + (m_NumIterationsPerformed + 1));
527      }
528
529      training = new Instances(trainingWeightsNotNormalized);
530      normalizeWeights(training, 1.0);
531
532      // Select instances to train the classifier on
533      if (m_WeightThreshold < 100) {
534        trainData = selectWeightQuantile(training, 
535                                         (double)m_WeightThreshold / 100);
536      } else {
537        trainData = new Instances(training);
538      }
539     
540      // Resample
541      double[] weights = new double[trainData.numInstances()];
542      for (int i = 0; i < weights.length; i++) {
543        weights[i] = trainData.instance(i).weight();
544      }
545
546      sample = trainData.resampleWithWeights(randomInstance, weights);
547     
548      // Build classifier
549      if (m_NumIterationsPerformed == -1) {
550        m_ZeroR = new weka.classifiers.rules.ZeroR();
551        m_ZeroR.buildClassifier(data);
552      } else {
553        m_Classifiers[m_NumIterationsPerformed].buildClassifier(sample);
554      }
555 
556      // Update instance weights
557      setWeights(trainingWeightsNotNormalized, m_NumIterationsPerformed);
558
559      // Has progress been made?
560      double loss = 0;
561      for (Instance inst : trainingWeightsNotNormalized) {
562        loss += Math.log(inst.weight());
563      }
564      if (m_Debug) {
565        System.err.println("Current loss on log scale: " + loss);
566      }
567      if ((m_NumIterationsPerformed > -1) && (loss > minLoss)) {
568        if (m_Debug) {
569          System.err.println("Loss has increased: bailing out.");
570        }
571        break;
572      }
573      minLoss = loss;
574    }
575  }
576
577  /**
578   * Sets the weights for the next iteration.
579   *
580   * @param training the training instances
581   * @throws Exception if something goes wrong
582   */
583  protected void setWeights(Instances training, int iteration) 
584    throws Exception {
585
586    for (Instance instance: training) {
587      double reweight = 1;
588      double prob = 1, shrinkage = m_Shrinkage;
589
590      if (iteration == -1) {
591        prob = m_ZeroR.distributionForInstance(instance)[0]; 
592        shrinkage = 1.0;
593      } else {
594        prob = m_Classifiers[iteration].distributionForInstance(instance)[0]; 
595
596        // Make sure that probabilities are never 0 or 1 using ad-hoc smoothing
597        prob = (m_SumOfWeights * prob + 1) / (m_SumOfWeights + 2);
598      }
599
600      if (instance.classValue() == 1) {
601        reweight = shrinkage * 0.5 * (Math.log(prob) - Math.log(1 - prob));
602      } else {
603        reweight = shrinkage * 0.5 * (Math.log(1 - prob) - Math.log(prob));
604      }
605      instance.setWeight(instance.weight() * Math.exp(reweight));
606    }
607  }
608
609  /**
610   * Normalize the weights for the next iteration.
611   *
612   * @param training the training instances
613   * @throws Exception if something goes wrong
614   */
615  protected void normalizeWeights(Instances training, double oldSumOfWeights) 
616    throws Exception {
617
618    // Renormalize weights
619    double newSumOfWeights = training.sumOfWeights();
620    for (Instance instance: training) {
621      instance.setWeight(instance.weight() * oldSumOfWeights / newSumOfWeights);
622    }
623  }
624
625  /**
626   * Boosting method. Boosts any classifier that can handle weighted
627   * instances.
628   *
629   * @param data the training data to be used for generating the
630   * boosted classifier.
631   * @throws Exception if the classifier could not be built successfully
632   */
633  protected void buildClassifierWithWeights(Instances data) 
634    throws Exception {
635
636    Instances trainData, training, trainingWeightsNotNormalized;
637    int numInstances = data.numInstances();
638    Random randomInstance = new Random(m_Seed);
639    double minLoss = Double.MAX_VALUE;
640
641    // Create a copy of the data so that when the weights are diddled
642    // with it doesn't mess up the weights for anyone else
643    trainingWeightsNotNormalized = new Instances(data, 0, numInstances);
644   
645    // Do boostrap iterations
646    for (m_NumIterationsPerformed = -1; m_NumIterationsPerformed < m_Classifiers.length; 
647         m_NumIterationsPerformed++) {
648      if (m_Debug) {
649        System.err.println("Training classifier " + (m_NumIterationsPerformed + 1));
650      }
651
652      training = new Instances(trainingWeightsNotNormalized);
653      normalizeWeights(training, m_SumOfWeights);
654
655      // Select instances to train the classifier on
656      if (m_WeightThreshold < 100) {
657        trainData = selectWeightQuantile(training, 
658                                         (double)m_WeightThreshold / 100);
659      } else {
660        trainData = new Instances(training, 0, numInstances);
661      }
662
663      // Build classifier
664      if (m_NumIterationsPerformed == -1) {
665        m_ZeroR = new weka.classifiers.rules.ZeroR();
666        m_ZeroR.buildClassifier(data);
667      } else {
668        if (m_Classifiers[m_NumIterationsPerformed] instanceof Randomizable)
669          ((Randomizable) m_Classifiers[m_NumIterationsPerformed]).setSeed(randomInstance.nextInt());
670        m_Classifiers[m_NumIterationsPerformed].buildClassifier(trainData);
671      }
672
673 
674      // Update instance weights
675      setWeights(trainingWeightsNotNormalized, m_NumIterationsPerformed);
676
677      // Has progress been made?
678      double loss = 0;
679      for (Instance inst : trainingWeightsNotNormalized) {
680        loss += Math.log(inst.weight());
681      }
682      if (m_Debug) {
683        System.err.println("Current loss on log scale: " + loss);
684      }
685      if ((m_NumIterationsPerformed > -1) && (loss > minLoss)) {
686        if (m_Debug) {
687          System.err.println("Loss has increased: bailing out.");
688        }
689        break;
690      }
691      minLoss = loss;
692    }
693  }
694 
695  /**
696   * Calculates the class membership probabilities for the given test instance.
697   *
698   * @param instance the instance to be classified
699   * @return predicted class probability distribution
700   * @throws Exception if instance could not be classified
701   * successfully
702   */
703  public double [] distributionForInstance(Instance instance) 
704    throws Exception {
705
706    double [] sums = new double [instance.numClasses()]; 
707    for (int i = -1; i < m_NumIterationsPerformed; i++) {
708      double prob = 1, shrinkage = m_Shrinkage;
709      if (i == -1) {
710        prob = m_ZeroR.distributionForInstance(instance)[0]; 
711        shrinkage = 1.0;
712      } else {
713        prob = m_Classifiers[i].distributionForInstance(instance)[0]; 
714       
715        // Make sure that probabilities are never 0 or 1 using ad-hoc smoothing
716        prob = (m_SumOfWeights * prob + 1) / (m_SumOfWeights + 2);
717      }
718      sums[0] += shrinkage * 0.5 * (Math.log(prob) - Math.log(1 - prob));
719    }
720    sums[1] = -sums[0];
721    return Utils.logs2probs(sums);
722  }
723
724  /**
725   * Returns description of the boosted classifier.
726   *
727   * @return description of the boosted classifier as a string
728   */
729  public String toString() {
730   
731    StringBuffer text = new StringBuffer();
732
733    if (m_ZeroR == null) {
734      text.append("No model built yet.\n\n");
735    } else {
736      text.append("RealAdaBoost: Base classifiers: \n\n");
737      text.append(m_ZeroR.toString() + "\n\n");   
738      for (int i = 0; i < m_NumIterationsPerformed ; i++) {
739        text.append(m_Classifiers[i].toString() + "\n\n");
740      }
741      text.append("Number of performed Iterations: " 
742                  + m_NumIterationsPerformed + "\n");
743    }
744
745    return text.toString();
746  }
747 
748  /**
749   * Returns the revision string.
750   *
751   * @return            the revision
752   */
753  public String getRevision() {
754    return RevisionUtils.extract("$Revision: 6136 $");
755  }
756
757  /**
758   * Main method for testing this class.
759   *
760   * @param argv the options
761   */
762  public static void main(String [] argv) {
763    runClassifier(new RealAdaBoost(), argv);
764  }
765}
766
Note: See TracBrowser for help on using the repository browser.