source: src/main/java/weka/experiment/RegressionSplitEvaluator.java @ 13

Last change on this file since 13 was 4, checked in by gnappo, 14 years ago

Import di weka.

File size: 23.0 KB
RevLine 
[4]1/*
2 *    This program is free software; you can redistribute it and/or modify
3 *    it under the terms of the GNU General Public License as published by
4 *    the Free Software Foundation; either version 2 of the License, or
5 *    (at your option) any later version.
6 *
7 *    This program is distributed in the hope that it will be useful,
8 *    but WITHOUT ANY WARRANTY; without even the implied warranty of
9 *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
10 *    GNU General Public License for more details.
11 *
12 *    You should have received a copy of the GNU General Public License
13 *    along with this program; if not, write to the Free Software
14 *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
15 */
16
17/*
18 *    RegressionSplitEvaluator.java
19 *    Copyright (C) 1999 University of Waikato, Hamilton, New Zealand
20 *
21 */
22
23
24package weka.experiment;
25
26import weka.classifiers.Classifier;
27import weka.classifiers.AbstractClassifier;
28import weka.classifiers.Evaluation;
29import weka.classifiers.rules.ZeroR;
30import weka.core.AdditionalMeasureProducer;
31import weka.core.Attribute;
32import weka.core.Instance;
33import weka.core.Instances;
34import weka.core.Option;
35import weka.core.OptionHandler;
36import weka.core.RevisionHandler;
37import weka.core.RevisionUtils;
38import weka.core.Summarizable;
39import weka.core.Utils;
40
41import java.io.ByteArrayOutputStream;
42import java.io.ObjectOutputStream;
43import java.io.ObjectStreamClass;
44import java.io.Serializable;
45import java.lang.management.ManagementFactory;
46import java.lang.management.ThreadMXBean;
47import java.util.Enumeration;
48import java.util.Vector;
49
50/**
51 <!-- globalinfo-start -->
52 * A SplitEvaluator that produces results for a classification scheme on a numeric class attribute.
53 * <p/>
54 <!-- globalinfo-end -->
55 *
56 <!-- options-start -->
57 * Valid options are: <p/>
58 *
59 * <pre> -W &lt;class name&gt;
60 *  The full class name of the classifier.
61 *  eg: weka.classifiers.bayes.NaiveBayes</pre>
62 *
63 * <pre>
64 * Options specific to classifier weka.classifiers.rules.ZeroR:
65 * </pre>
66 *
67 * <pre> -D
68 *  If set, classifier is run in debug mode and
69 *  may output additional info to the console</pre>
70 *
71 <!-- options-end -->
72 *
73 * @author Len Trigg (trigg@cs.waikato.ac.nz)
74 * @version $Revision: 5987 $
75 */
76public class RegressionSplitEvaluator 
77  implements SplitEvaluator, OptionHandler, AdditionalMeasureProducer,
78             RevisionHandler {
79 
80  /** for serialization */
81  static final long serialVersionUID = -328181640503349202L;
82
83  /** The template classifier */
84  protected Classifier m_Template = new ZeroR();
85
86  /** The classifier used for evaluation */
87  protected Classifier m_Classifier;
88 
89  /** The names of any additional measures to look for in SplitEvaluators */
90  protected String [] m_AdditionalMeasures = null;
91
92  /** Array of booleans corresponding to the measures in m_AdditionalMeasures
93      indicating which of the AdditionalMeasures the current classifier
94      can produce */
95  protected boolean [] m_doesProduce = null;
96
97  /** Holds the statistics for the most recent application of the classifier */
98  protected String m_result = null;
99
100  /** The classifier options (if any) */
101  protected String m_ClassifierOptions = "";
102
103  /** The classifier version */
104  protected String m_ClassifierVersion = "";
105
106  /** The length of a key */
107  private static final int KEY_SIZE = 3;
108
109  /** The length of a result */
110  private static final int RESULT_SIZE = 23;
111
112  /**
113   * No args constructor.
114   */
115  public RegressionSplitEvaluator() {
116
117    updateOptions();
118  }
119
120  /**
121   * Returns a string describing this split evaluator
122   * @return a description of the split evaluator suitable for
123   * displaying in the explorer/experimenter gui
124   */
125  public String globalInfo() {
126    return "A SplitEvaluator that produces results for a classification "
127      +"scheme on a numeric class attribute.";
128  }
129
130  /**
131   * Returns an enumeration describing the available options..
132   *
133   * @return an enumeration of all the available options.
134   */
135  public Enumeration listOptions() {
136
137    Vector newVector = new Vector(1);
138
139    newVector.addElement(new Option(
140             "\tThe full class name of the classifier.\n"
141              +"\teg: weka.classifiers.bayes.NaiveBayes", 
142             "W", 1, 
143             "-W <class name>"));
144
145    if ((m_Template != null) &&
146        (m_Template instanceof OptionHandler)) {
147      newVector.addElement(new Option(
148             "",
149             "", 0, "\nOptions specific to classifier "
150             + m_Template.getClass().getName() + ":"));
151      Enumeration enu = ((OptionHandler)m_Template).listOptions();
152      while (enu.hasMoreElements()) {
153        newVector.addElement(enu.nextElement());
154      }
155    }
156    return newVector.elements();
157  }
158
159  /**
160   * Parses a given list of options. <p/>
161   *
162   <!-- options-start -->
163   * Valid options are: <p/>
164   *
165   * <pre> -W &lt;class name&gt;
166   *  The full class name of the classifier.
167   *  eg: weka.classifiers.bayes.NaiveBayes</pre>
168   *
169   * <pre>
170   * Options specific to classifier weka.classifiers.rules.ZeroR:
171   * </pre>
172   *
173   * <pre> -D
174   *  If set, classifier is run in debug mode and
175   *  may output additional info to the console</pre>
176   *
177   <!-- options-end -->
178   *
179   * All option after -- will be passed to the classifier.
180   *
181   * @param options the list of options as an array of strings
182   * @throws Exception if an option is not supported
183   */
184  public void setOptions(String[] options) throws Exception {
185   
186    String cName = Utils.getOption('W', options);
187    if (cName.length() == 0) {
188      throw new Exception("A classifier must be specified with"
189                          + " the -W option.");
190    }
191    // Do it first without options, so if an exception is thrown during
192    // the option setting, listOptions will contain options for the actual
193    // Classifier.
194    setClassifier(AbstractClassifier.forName(cName, null));
195    if (getClassifier() instanceof OptionHandler) {
196      ((OptionHandler) getClassifier())
197        .setOptions(Utils.partitionOptions(options));
198      updateOptions();
199    }
200  }
201
202  /**
203   * Gets the current settings of the Classifier.
204   *
205   * @return an array of strings suitable for passing to setOptions
206   */
207  public String [] getOptions() {
208
209    String [] classifierOptions = new String [0];
210    if ((m_Template != null) && 
211        (m_Template instanceof OptionHandler)) {
212      classifierOptions = ((OptionHandler)m_Template).getOptions();
213    }
214   
215    String [] options = new String [classifierOptions.length + 3];
216    int current = 0;
217
218    if (getClassifier() != null) {
219      options[current++] = "-W";
220      options[current++] = getClassifier().getClass().getName();
221    }
222    options[current++] = "--";
223
224    System.arraycopy(classifierOptions, 0, options, current, 
225                     classifierOptions.length);
226    current += classifierOptions.length;
227    while (current < options.length) {
228      options[current++] = "";
229    }
230    return options;
231  }
232
233  /**
234   * Set a list of method names for additional measures to look for
235   * in Classifiers. This could contain many measures (of which only a
236   * subset may be produceable by the current Classifier) if an experiment
237   * is the type that iterates over a set of properties.
238   * @param additionalMeasures an array of method names.
239   */
240  public void setAdditionalMeasures(String [] additionalMeasures) {
241    m_AdditionalMeasures = additionalMeasures;
242
243    // determine which (if any) of the additional measures this classifier
244    // can produce
245    if (m_AdditionalMeasures != null && m_AdditionalMeasures.length > 0) {
246      m_doesProduce = new boolean [m_AdditionalMeasures.length];
247
248      if (m_Template instanceof AdditionalMeasureProducer) {
249        Enumeration en = ((AdditionalMeasureProducer)m_Template).
250          enumerateMeasures();
251        while (en.hasMoreElements()) {
252          String mname = (String)en.nextElement();
253          for (int j=0;j<m_AdditionalMeasures.length;j++) {
254            if (mname.compareToIgnoreCase(m_AdditionalMeasures[j]) == 0) {
255              m_doesProduce[j] = true;
256            }
257          }
258        }
259      }
260    } else {
261      m_doesProduce = null;
262    }
263  }
264 
265
266    /**
267   * Returns an enumeration of any additional measure names that might be
268   * in the classifier
269   * @return an enumeration of the measure names
270   */
271  public Enumeration enumerateMeasures() {
272    Vector newVector = new Vector();
273    if (m_Template instanceof AdditionalMeasureProducer) {
274      Enumeration en = ((AdditionalMeasureProducer)m_Template).
275        enumerateMeasures();
276      while (en.hasMoreElements()) {
277        String mname = (String)en.nextElement();
278        newVector.addElement(mname);
279      }
280    }
281    return newVector.elements();
282  }
283 
284  /**
285   * Returns the value of the named measure
286   * @param additionalMeasureName the name of the measure to query for its value
287   * @return the value of the named measure
288   * @throws IllegalArgumentException if the named measure is not supported
289   */
290  public double getMeasure(String additionalMeasureName) {
291    if (m_Template instanceof AdditionalMeasureProducer) {
292      if (m_Classifier == null) {
293        throw new IllegalArgumentException("ClassifierSplitEvaluator: " +
294                                           "Can't return result for measure, " +
295                                           "classifier has not been built yet.");
296      }
297      return ((AdditionalMeasureProducer)m_Classifier).
298        getMeasure(additionalMeasureName);
299    } else {
300      throw new IllegalArgumentException("ClassifierSplitEvaluator: "
301                          +"Can't return value for : "+additionalMeasureName
302                          +". "+m_Template.getClass().getName()+" "
303                          +"is not an AdditionalMeasureProducer");
304    }
305  }
306
307  /**
308   * Gets the data types of each of the key columns produced for a single run.
309   * The number of key fields must be constant
310   * for a given SplitEvaluator.
311   *
312   * @return an array containing objects of the type of each key column. The
313   * objects should be Strings, or Doubles.
314   */
315  public Object [] getKeyTypes() {
316
317    Object [] keyTypes = new Object[KEY_SIZE];
318    keyTypes[0] = "";
319    keyTypes[1] = "";
320    keyTypes[2] = "";
321    return keyTypes;
322  }
323
324  /**
325   * Gets the names of each of the key columns produced for a single run.
326   * The number of key fields must be constant
327   * for a given SplitEvaluator.
328   *
329   * @return an array containing the name of each key column
330   */
331  public String [] getKeyNames() {
332
333    String [] keyNames = new String[KEY_SIZE];
334    keyNames[0] = "Scheme";
335    keyNames[1] = "Scheme_options";
336    keyNames[2] = "Scheme_version_ID";
337    return keyNames;
338  }
339
340  /**
341   * Gets the key describing the current SplitEvaluator. For example
342   * This may contain the name of the classifier used for classifier
343   * predictive evaluation. The number of key fields must be constant
344   * for a given SplitEvaluator.
345   *
346   * @return an array of objects containing the key.
347   */
348  public Object [] getKey(){
349
350    Object [] key = new Object[KEY_SIZE];
351    key[0] = m_Template.getClass().getName();
352    key[1] = m_ClassifierOptions;
353    key[2] = m_ClassifierVersion;
354    return key;
355  }
356
357  /**
358   * Gets the data types of each of the result columns produced for a
359   * single run. The number of result fields must be constant
360   * for a given SplitEvaluator.
361   *
362   * @return an array containing objects of the type of each result column.
363   * The objects should be Strings, or Doubles.
364   */
365  public Object [] getResultTypes() {
366    int addm = (m_AdditionalMeasures != null) 
367      ? m_AdditionalMeasures.length 
368      : 0;
369    Object [] resultTypes = new Object[RESULT_SIZE+addm];
370    Double doub = new Double(0);
371    int current = 0;
372    resultTypes[current++] = doub;
373    resultTypes[current++] = doub;
374
375    resultTypes[current++] = doub;
376    resultTypes[current++] = doub;
377    resultTypes[current++] = doub;
378    resultTypes[current++] = doub;
379    resultTypes[current++] = doub;
380
381    resultTypes[current++] = doub;
382    resultTypes[current++] = doub;
383    resultTypes[current++] = doub;
384    resultTypes[current++] = doub;
385    resultTypes[current++] = doub;
386    resultTypes[current++] = doub;
387
388    // Timing stats
389    resultTypes[current++] = doub;
390    resultTypes[current++] = doub;
391    resultTypes[current++] = doub;
392    resultTypes[current++] = doub;
393   
394    // sizes
395    resultTypes[current++] = doub;
396    resultTypes[current++] = doub;
397    resultTypes[current++] = doub;
398
399    // Prediction interval statistics
400    resultTypes[current++] = doub;
401    resultTypes[current++] = doub;
402
403    resultTypes[current++] = "";
404
405    // add any additional measures
406    for (int i=0;i<addm;i++) {
407      resultTypes[current++] = doub;
408    }
409    if (current != RESULT_SIZE+addm) {
410      throw new Error("ResultTypes didn't fit RESULT_SIZE");
411    }
412    return resultTypes;
413  }
414
415  /**
416   * Gets the names of each of the result columns produced for a single run.
417   * The number of result fields must be constant
418   * for a given SplitEvaluator.
419   *
420   * @return an array containing the name of each result column
421   */
422  public String [] getResultNames() {
423    int addm = (m_AdditionalMeasures != null) 
424      ? m_AdditionalMeasures.length 
425      : 0;
426    String [] resultNames = new String[RESULT_SIZE+addm];
427    int current = 0;
428    resultNames[current++] = "Number_of_training_instances";
429    resultNames[current++] = "Number_of_testing_instances";
430
431    // Sensitive stats - certainty of predictions
432    resultNames[current++] = "Mean_absolute_error";
433    resultNames[current++] = "Root_mean_squared_error";
434    resultNames[current++] = "Relative_absolute_error";
435    resultNames[current++] = "Root_relative_squared_error";
436    resultNames[current++] = "Correlation_coefficient";
437
438    // SF stats
439    resultNames[current++] = "SF_prior_entropy";
440    resultNames[current++] = "SF_scheme_entropy";
441    resultNames[current++] = "SF_entropy_gain";
442    resultNames[current++] = "SF_mean_prior_entropy";
443    resultNames[current++] = "SF_mean_scheme_entropy";
444    resultNames[current++] = "SF_mean_entropy_gain";
445
446    // Timing stats
447    resultNames[current++] = "Elapsed_Time_training";
448    resultNames[current++] = "Elapsed_Time_testing";
449    resultNames[current++] = "UserCPU_Time_training";
450    resultNames[current++] = "UserCPU_Time_testing";
451
452    // sizes
453    resultNames[current++] = "Serialized_Model_Size";
454    resultNames[current++] = "Serialized_Train_Set_Size";
455    resultNames[current++] = "Serialized_Test_Set_Size";
456   
457    // Prediction interval statistics
458    resultNames[current++] = "Coverage_of_Test_Cases_By_Regions";
459    resultNames[current++] = "Size_of_Predicted_Regions";
460
461    // Classifier defined extras
462    resultNames[current++] = "Summary";
463    // add any additional measures
464    for (int i=0;i<addm;i++) {
465      resultNames[current++] = m_AdditionalMeasures[i];
466    }
467    if (current != RESULT_SIZE+addm) {
468      throw new Error("ResultNames didn't fit RESULT_SIZE");
469    }
470    return resultNames;
471  }
472
473  /**
474   * Gets the results for the supplied train and test datasets. Now performs
475   * a deep copy of the classifier before it is built and evaluated (just in case
476   * the classifier is not initialized properly in buildClassifier()).
477   *
478   * @param train the training Instances.
479   * @param test the testing Instances.
480   * @return the results stored in an array. The objects stored in
481   * the array may be Strings, Doubles, or null (for the missing value).
482   * @throws Exception if a problem occurs while getting the results
483   */
484  public Object [] getResult(Instances train, Instances test) 
485    throws Exception {
486
487    if (train.classAttribute().type() != Attribute.NUMERIC) {
488      throw new Exception("Class attribute is not numeric!");
489    }
490    if (m_Template == null) {
491      throw new Exception("No classifier has been specified");
492    }
493    ThreadMXBean thMonitor = ManagementFactory.getThreadMXBean();
494    boolean canMeasureCPUTime = thMonitor.isThreadCpuTimeSupported();
495    if(!thMonitor.isThreadCpuTimeEnabled())
496      thMonitor.setThreadCpuTimeEnabled(true);
497   
498    int addm = (m_AdditionalMeasures != null) ? m_AdditionalMeasures.length : 0;
499    Object [] result = new Object[RESULT_SIZE+addm];
500    long thID = Thread.currentThread().getId();
501    long CPUStartTime=-1, trainCPUTimeElapsed=-1, testCPUTimeElapsed=-1,
502         trainTimeStart, trainTimeElapsed, testTimeStart, testTimeElapsed;   
503    Evaluation eval = new Evaluation(train);
504    m_Classifier = AbstractClassifier.makeCopy(m_Template);
505
506    trainTimeStart = System.currentTimeMillis();
507    if(canMeasureCPUTime)
508      CPUStartTime = thMonitor.getThreadUserTime(thID);
509    m_Classifier.buildClassifier(train);
510    if(canMeasureCPUTime)
511      trainCPUTimeElapsed = thMonitor.getThreadUserTime(thID) - CPUStartTime;
512    trainTimeElapsed = System.currentTimeMillis() - trainTimeStart;
513    testTimeStart = System.currentTimeMillis();
514    if(canMeasureCPUTime)
515      CPUStartTime = thMonitor.getThreadUserTime(thID);
516    eval.evaluateModel(m_Classifier, test);
517    if(canMeasureCPUTime)
518      testCPUTimeElapsed = thMonitor.getThreadUserTime(thID) - CPUStartTime;
519    testTimeElapsed = System.currentTimeMillis() - testTimeStart;
520    thMonitor = null;
521   
522    m_result = eval.toSummaryString();
523    // The results stored are all per instance -- can be multiplied by the
524    // number of instances to get absolute numbers
525    int current = 0;
526    result[current++] = new Double(train.numInstances());
527    result[current++] = new Double(eval.numInstances());
528
529    result[current++] = new Double(eval.meanAbsoluteError());
530    result[current++] = new Double(eval.rootMeanSquaredError());
531    result[current++] = new Double(eval.relativeAbsoluteError());
532    result[current++] = new Double(eval.rootRelativeSquaredError());
533    result[current++] = new Double(eval.correlationCoefficient());
534
535    result[current++] = new Double(eval.SFPriorEntropy());
536    result[current++] = new Double(eval.SFSchemeEntropy());
537    result[current++] = new Double(eval.SFEntropyGain());
538    result[current++] = new Double(eval.SFMeanPriorEntropy());
539    result[current++] = new Double(eval.SFMeanSchemeEntropy());
540    result[current++] = new Double(eval.SFMeanEntropyGain());
541   
542    // Timing stats
543    result[current++] = new Double(trainTimeElapsed / 1000.0);
544    result[current++] = new Double(testTimeElapsed / 1000.0);
545    if(canMeasureCPUTime) {
546      result[current++] = new Double((trainCPUTimeElapsed/1000000.0) / 1000.0);
547      result[current++] = new Double((testCPUTimeElapsed /1000000.0) / 1000.0);
548    }
549    else {
550      result[current++] = new Double(Utils.missingValue());
551      result[current++] = new Double(Utils.missingValue());
552    }
553   
554    // sizes
555    ByteArrayOutputStream bastream = new ByteArrayOutputStream();
556    ObjectOutputStream oostream = new ObjectOutputStream(bastream);
557    oostream.writeObject(m_Classifier);
558    result[current++] = new Double(bastream.size());
559    bastream = new ByteArrayOutputStream();
560    oostream = new ObjectOutputStream(bastream);
561    oostream.writeObject(train);
562    result[current++] = new Double(bastream.size());
563    bastream = new ByteArrayOutputStream();
564    oostream = new ObjectOutputStream(bastream);
565    oostream.writeObject(test);
566    result[current++] = new Double(bastream.size());
567   
568    // Prediction interval statistics
569    result[current++] = new Double(eval.coverageOfTestCasesByPredictedRegions());
570    result[current++] = new Double(eval.sizeOfPredictedRegions());
571
572    if (m_Classifier instanceof Summarizable) {
573      result[current++] = ((Summarizable)m_Classifier).toSummaryString();
574    } else {
575      result[current++] = null;
576    }
577   
578    for (int i=0;i<addm;i++) {
579      if (m_doesProduce[i]) {
580        try {
581          double dv = ((AdditionalMeasureProducer)m_Classifier).
582          getMeasure(m_AdditionalMeasures[i]);
583          if (!Utils.isMissingValue(dv)) {
584            Double value = new Double(dv);
585            result[current++] = value;
586          } else {
587            result[current++] = null;
588          }
589        } catch (Exception ex) {
590          System.err.println(ex);
591        }
592      } else {
593        result[current++] = null;
594      }
595    }
596   
597    if (current != RESULT_SIZE+addm) {
598      throw new Error("Results didn't fit RESULT_SIZE");
599    }
600    return result;
601  }
602
603  /**
604   * Returns the tip text for this property
605   * @return tip text for this property suitable for
606   * displaying in the explorer/experimenter gui
607   */
608  public String classifierTipText() {
609    return "The classifier to use.";
610  }
611
612  /**
613   * Get the value of Classifier.
614   *
615   * @return Value of Classifier.
616   */
617  public Classifier getClassifier() {
618   
619    return m_Template;
620  }
621 
622  /**
623   * Sets the classifier.
624   *
625   * @param newClassifier the new classifier to use.
626   */
627  public void setClassifier(Classifier newClassifier) {
628   
629    m_Template = newClassifier;
630    updateOptions();
631
632    System.err.println("RegressionSplitEvaluator: In set classifier");
633  }
634
635  /**
636   * Updates the options that the current classifier is using.
637   */
638  protected void updateOptions() {
639   
640    if (m_Template instanceof OptionHandler) {
641      m_ClassifierOptions = Utils.joinOptions(((OptionHandler)m_Template)
642                                              .getOptions());
643    } else {
644      m_ClassifierOptions = "";
645    }
646    if (m_Template instanceof Serializable) {
647      ObjectStreamClass obs = ObjectStreamClass.lookup(m_Template
648                                                       .getClass());
649      m_ClassifierVersion = "" + obs.getSerialVersionUID();
650    } else {
651      m_ClassifierVersion = "";
652    }
653  }
654
655  /**
656   * Set the Classifier to use, given it's class name. A new classifier will be
657   * instantiated.
658   *
659   * @param newClassifierName the Classifier class name.
660   * @throws Exception if the class name is invalid.
661   */
662  public void setClassifierName(String newClassifierName) throws Exception {
663
664    try {
665      setClassifier((Classifier)Class.forName(newClassifierName)
666                    .newInstance());
667    } catch (Exception ex) {
668      throw new Exception("Can't find Classifier with class name: "
669                          + newClassifierName);
670    }
671  }
672
673  /**
674   * Gets the raw output from the classifier
675   * @return the raw output from the classifier
676   */
677  public String getRawResultOutput() {
678    StringBuffer result = new StringBuffer();
679
680    if (m_Classifier == null) {
681      return "<null> classifier";
682    }
683    result.append(toString());
684    result.append("Classifier model: \n"+m_Classifier.toString()+'\n');
685
686    // append the performance statistics
687    if (m_result != null) {
688      result.append(m_result);
689     
690      if (m_doesProduce != null) {
691        for (int i=0;i<m_doesProduce.length;i++) {
692          if (m_doesProduce[i]) {
693            try {
694              double dv = ((AdditionalMeasureProducer)m_Classifier).
695                getMeasure(m_AdditionalMeasures[i]);
696              if (!Utils.isMissingValue(dv)) {
697                Double value = new Double(dv);
698                result.append(m_AdditionalMeasures[i]+" : "+value+'\n');
699              } else {
700                result.append(m_AdditionalMeasures[i]+" : "+'?'+'\n');
701              }
702            } catch (Exception ex) {
703              System.err.println(ex);
704            }
705          } 
706        }
707      }
708    }
709    return result.toString();
710  }
711
712  /**
713   * Returns a text description of the split evaluator.
714   *
715   * @return a text description of the split evaluator.
716   */
717  public String toString() {
718
719    String result = "RegressionSplitEvaluator: ";
720    if (m_Template == null) {
721      return result + "<null> classifier";
722    }
723    return result + m_Template.getClass().getName() + " " 
724      + m_ClassifierOptions + "(version " + m_ClassifierVersion + ")";
725  }
726 
727  /**
728   * Returns the revision string.
729   *
730   * @return            the revision
731   */
732  public String getRevision() {
733    return RevisionUtils.extract("$Revision: 5987 $");
734  }
735} // RegressionSplitEvaluator
Note: See TracBrowser for help on using the repository browser.