source: branches/MetisMQI/src/main/java/weka/classifiers/meta/MultiScheme.java @ 38

Last change on this file since 38 was 29, checked in by gnappo, 14 years ago

Taggata versione per la demo e aggiunto branch.

File size: 13.2 KB
Line 
1/*
2 *    This program is free software; you can redistribute it and/or modify
3 *    it under the terms of the GNU General Public License as published by
4 *    the Free Software Foundation; either version 2 of the License, or
5 *    (at your option) any later version.
6 *
7 *    This program is distributed in the hope that it will be useful,
8 *    but WITHOUT ANY WARRANTY; without even the implied warranty of
9 *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
10 *    GNU General Public License for more details.
11 *
12 *    You should have received a copy of the GNU General Public License
13 *    along with this program; if not, write to the Free Software
14 *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
15 */
16
17/*
18 *    MultiScheme.java
19 *    Copyright (C) 1999 University of Waikato, Hamilton, New Zealand
20 *
21 */
22
23package weka.classifiers.meta;
24
25import weka.classifiers.Classifier;
26import weka.classifiers.AbstractClassifier;
27import weka.classifiers.Evaluation;
28import weka.classifiers.RandomizableMultipleClassifiersCombiner;
29import weka.core.Instance;
30import weka.core.Instances;
31import weka.core.Option;
32import weka.core.OptionHandler;
33import weka.core.RevisionUtils;
34import weka.core.Utils;
35
36import java.util.Enumeration;
37import java.util.Random;
38import java.util.Vector;
39
40/**
41 <!-- globalinfo-start -->
42 * Class for selecting a classifier from among several using cross validation on the training data or the performance on the training data. Performance is measured based on percent correct (classification) or mean-squared error (regression).
43 * <p/>
44 <!-- globalinfo-end -->
45 *
46 <!-- options-start -->
47 * Valid options are: <p/>
48 *
49 * <pre> -X &lt;number of folds&gt;
50 *  Use cross validation for model selection using the
51 *  given number of folds. (default 0, is to
52 *  use training error)</pre>
53 *
54 * <pre> -S &lt;num&gt;
55 *  Random number seed.
56 *  (default 1)</pre>
57 *
58 * <pre> -B &lt;classifier specification&gt;
59 *  Full class name of classifier to include, followed
60 *  by scheme options. May be specified multiple times.
61 *  (default: "weka.classifiers.rules.ZeroR")</pre>
62 *
63 * <pre> -D
64 *  If set, classifier is run in debug mode and
65 *  may output additional info to the console</pre>
66 *
67 <!-- options-end -->
68 *
69 * @author Len Trigg (trigg@cs.waikato.ac.nz)
70 * @version $Revision: 5928 $
71 */
72public class MultiScheme 
73  extends RandomizableMultipleClassifiersCombiner {
74
75  /** for serialization */
76  static final long serialVersionUID = 5710744346128957520L;
77 
78  /** The classifier that had the best performance on training data. */
79  protected Classifier m_Classifier;
80 
81  /** The index into the vector for the selected scheme */
82  protected int m_ClassifierIndex;
83
84  /**
85   * Number of folds to use for cross validation (0 means use training
86   * error for selection)
87   */
88  protected int m_NumXValFolds;
89   
90  /**
91   * Returns a string describing classifier
92   * @return a description suitable for
93   * displaying in the explorer/experimenter gui
94   */
95  public String globalInfo() {
96
97    return  "Class for selecting a classifier from among several using cross "
98      + "validation on the training data or the performance on the "
99      + "training data. Performance is measured based on percent correct "
100      + "(classification) or mean-squared error (regression).";
101  }
102
103  /**
104   * Returns an enumeration describing the available options.
105   *
106   * @return an enumeration of all the available options.
107   */
108  public Enumeration listOptions() {
109
110    Vector newVector = new Vector(1);
111    newVector.addElement(new Option(
112              "\tUse cross validation for model selection using the\n"
113              + "\tgiven number of folds. (default 0, is to\n"
114              + "\tuse training error)",
115              "X", 1, "-X <number of folds>"));
116
117    Enumeration enu = super.listOptions();
118    while (enu.hasMoreElements()) {
119      newVector.addElement(enu.nextElement());
120    }
121    return newVector.elements();
122  }
123
124  /**
125   * Parses a given list of options. <p/>
126   *
127   <!-- options-start -->
128   * Valid options are: <p/>
129   *
130   * <pre> -X &lt;number of folds&gt;
131   *  Use cross validation for model selection using the
132   *  given number of folds. (default 0, is to
133   *  use training error)</pre>
134   *
135   * <pre> -S &lt;num&gt;
136   *  Random number seed.
137   *  (default 1)</pre>
138   *
139   * <pre> -B &lt;classifier specification&gt;
140   *  Full class name of classifier to include, followed
141   *  by scheme options. May be specified multiple times.
142   *  (default: "weka.classifiers.rules.ZeroR")</pre>
143   *
144   * <pre> -D
145   *  If set, classifier is run in debug mode and
146   *  may output additional info to the console</pre>
147   *
148   <!-- options-end -->
149   *
150   * @param options the list of options as an array of strings
151   * @throws Exception if an option is not supported
152   */
153  public void setOptions(String[] options) throws Exception {
154   
155    String numFoldsString = Utils.getOption('X', options);
156    if (numFoldsString.length() != 0) {
157      setNumFolds(Integer.parseInt(numFoldsString));
158    } else {
159      setNumFolds(0);
160    }
161    super.setOptions(options);
162  }
163
164  /**
165   * Gets the current settings of the Classifier.
166   *
167   * @return an array of strings suitable for passing to setOptions
168   */
169  public String [] getOptions() {
170
171
172    String [] superOptions = super.getOptions();
173    String [] options = new String [superOptions.length + 2];
174
175    int current = 0;
176    options[current++] = "-X"; options[current++] = "" + getNumFolds();
177
178    System.arraycopy(superOptions, 0, options, current, 
179                     superOptions.length);
180
181    return options;
182  }
183 
184  /**
185   * Returns the tip text for this property
186   * @return tip text for this property suitable for
187   * displaying in the explorer/experimenter gui
188   */
189  public String classifiersTipText() {
190    return "The classifiers to be chosen from.";
191  }
192
193  /**
194   * Sets the list of possible classifers to choose from.
195   *
196   * @param classifiers an array of classifiers with all options set.
197   */
198  public void setClassifiers(Classifier [] classifiers) {
199
200    m_Classifiers = classifiers;
201  }
202
203  /**
204   * Gets the list of possible classifers to choose from.
205   *
206   * @return the array of Classifiers
207   */
208  public Classifier [] getClassifiers() {
209
210    return m_Classifiers;
211  }
212 
213  /**
214   * Gets a single classifier from the set of available classifiers.
215   *
216   * @param index the index of the classifier wanted
217   * @return the Classifier
218   */
219  public Classifier getClassifier(int index) {
220
221    return m_Classifiers[index];
222  }
223 
224  /**
225   * Gets the classifier specification string, which contains the class name of
226   * the classifier and any options to the classifier
227   *
228   * @param index the index of the classifier string to retrieve, starting from
229   * 0.
230   * @return the classifier string, or the empty string if no classifier
231   * has been assigned (or the index given is out of range).
232   */
233  protected String getClassifierSpec(int index) {
234   
235    if (m_Classifiers.length < index) {
236      return "";
237    }
238    Classifier c = getClassifier(index);
239    if (c instanceof OptionHandler) {
240      return c.getClass().getName() + " "
241        + Utils.joinOptions(((OptionHandler)c).getOptions());
242    }
243    return c.getClass().getName();
244  }
245 
246  /**
247   * Returns the tip text for this property
248   * @return tip text for this property suitable for
249   * displaying in the explorer/experimenter gui
250   */
251  public String seedTipText() {
252    return "The seed used for randomizing the data " +
253      "for cross-validation.";
254  }
255
256  /**
257   * Sets the seed for random number generation.
258   *
259   * @param seed the random number seed
260   */
261  public void setSeed(int seed) {
262   
263    m_Seed = seed;;
264  }
265
266  /**
267   * Gets the random number seed.
268   *
269   * @return the random number seed
270   */
271  public int getSeed() {
272
273    return m_Seed;
274  }
275 
276  /**
277   * Returns the tip text for this property
278   * @return tip text for this property suitable for
279   * displaying in the explorer/experimenter gui
280   */
281  public String numFoldsTipText() {
282    return "The number of folds used for cross-validation (if 0, " +
283      "performance on training data will be used).";
284  }
285
286  /**
287   * Gets the number of folds for cross-validation. A number less
288   * than 2 specifies using training error rather than cross-validation.
289   *
290   * @return the number of folds for cross-validation
291   */
292  public int getNumFolds() {
293
294    return m_NumXValFolds;
295  }
296
297  /**
298   * Sets the number of folds for cross-validation. A number less
299   * than 2 specifies using training error rather than cross-validation.
300   *
301   * @param numFolds the number of folds for cross-validation
302   */
303  public void setNumFolds(int numFolds) {
304   
305    m_NumXValFolds = numFolds;
306  }
307 
308  /**
309   * Returns the tip text for this property
310   * @return tip text for this property suitable for
311   * displaying in the explorer/experimenter gui
312   */
313  public String debugTipText() {
314    return "Whether debug information is output to console.";
315  }
316
317  /**
318   * Set debugging mode
319   *
320   * @param debug true if debug output should be printed
321   */
322  public void setDebug(boolean debug) {
323
324    m_Debug = debug;
325  }
326
327  /**
328   * Get whether debugging is turned on
329   *
330   * @return true if debugging output is on
331   */
332  public boolean getDebug() {
333
334    return m_Debug;
335  }
336 
337  /**
338   * Get the index of the classifier that was determined as best during
339   * cross-validation.
340   *
341   * @return the index in the classifier array
342   */
343  public int getBestClassifierIndex() {
344    return m_ClassifierIndex;
345  }
346
347  /**
348   * Buildclassifier selects a classifier from the set of classifiers
349   * by minimising error on the training data.
350   *
351   * @param data the training data to be used for generating the
352   * boosted classifier.
353   * @throws Exception if the classifier could not be built successfully
354   */
355  public void buildClassifier(Instances data) throws Exception {
356
357    if (m_Classifiers.length == 0) {
358      throw new Exception("No base classifiers have been set!");
359    }
360
361    // can classifier handle the data?
362    getCapabilities().testWithFail(data);
363
364    // remove instances with missing class
365    Instances newData = new Instances(data);
366    newData.deleteWithMissingClass();
367   
368    Random random = new Random(m_Seed);
369    newData.randomize(random);
370    if (newData.classAttribute().isNominal() && (m_NumXValFolds > 1)) {
371      newData.stratify(m_NumXValFolds);
372    }
373    Instances train = newData;               // train on all data by default
374    Instances test = newData;               // test on training data by default
375    Classifier bestClassifier = null;
376    int bestIndex = -1;
377    double bestPerformance = Double.NaN;
378    int numClassifiers = m_Classifiers.length;
379    for (int i = 0; i < numClassifiers; i++) {
380      Classifier currentClassifier = getClassifier(i);
381      Evaluation evaluation;
382      if (m_NumXValFolds > 1) {
383        evaluation = new Evaluation(newData);
384        for (int j = 0; j < m_NumXValFolds; j++) {
385
386          // We want to randomize the data the same way for every
387          // learning scheme.
388          train = newData.trainCV(m_NumXValFolds, j, new Random (1));
389          test = newData.testCV(m_NumXValFolds, j);
390          currentClassifier.buildClassifier(train);
391          evaluation.setPriors(train);
392          evaluation.evaluateModel(currentClassifier, test);
393        }
394      } else {
395        currentClassifier.buildClassifier(train);
396        evaluation = new Evaluation(train);
397        evaluation.evaluateModel(currentClassifier, test);
398      }
399
400      double error = evaluation.errorRate();
401      if (m_Debug) {
402        System.err.println("Error rate: " + Utils.doubleToString(error, 6, 4)
403                           + " for classifier "
404                           + currentClassifier.getClass().getName());
405      }
406
407      if ((i == 0) || (error < bestPerformance)) {
408        bestClassifier = currentClassifier;
409        bestPerformance = error;
410        bestIndex = i;
411      }
412    }
413    m_ClassifierIndex = bestIndex;
414    if (m_NumXValFolds > 1) {
415      bestClassifier.buildClassifier(newData);
416    }
417    m_Classifier = bestClassifier;
418  }
419
420  /**
421   * Returns class probabilities.
422   *
423   * @param instance the instance to be classified
424   * @return the distribution for the instance
425   * @throws Exception if instance could not be classified
426   * successfully
427   */
428  public double[] distributionForInstance(Instance instance) throws Exception {
429
430    return m_Classifier.distributionForInstance(instance);
431  }
432
433  /**
434   * Output a representation of this classifier
435   * @return a string representation of the classifier
436   */
437  public String toString() {
438
439    if (m_Classifier == null) {
440      return "MultiScheme: No model built yet.";
441    }
442
443    String result = "MultiScheme selection using";
444    if (m_NumXValFolds > 1) {
445      result += " cross validation error";
446    } else {
447      result += " error on training data";
448    }
449    result += " from the following:\n";
450    for (int i = 0; i < m_Classifiers.length; i++) {
451      result += '\t' + getClassifierSpec(i) + '\n';
452    }
453
454    result += "Selected scheme: "
455      + getClassifierSpec(m_ClassifierIndex)
456      + "\n\n"
457      + m_Classifier.toString();
458    return result;
459  }
460 
461  /**
462   * Returns the revision string.
463   *
464   * @return            the revision
465   */
466  public String getRevision() {
467    return RevisionUtils.extract("$Revision: 5928 $");
468  }
469
470  /**
471   * Main method for testing this class.
472   *
473   * @param argv should contain the following arguments:
474   * -t training file [-T test file] [-c class index]
475   */
476  public static void main(String [] argv) {
477    runClassifier(new MultiScheme(), argv);
478  }
479}
Note: See TracBrowser for help on using the repository browser.