source: src/main/java/weka/classifiers/functions/SMOreg.java @ 9

Last change on this file since 9 was 4, checked in by gnappo, 14 years ago

Import di weka.

File size: 24.2 KB
RevLine 
[4]1/*
2 *    This program is free software; you can redistribute it and/or modify
3 *    it under the terms of the GNU General Public License as published by
4 *    the Free Software Foundation; either version 2 of the License, or
5 *    (at your option) any later version.
6 *
7 *    This program is distributed in the hope that it will be useful,
8 *    but WITHOUT ANY WARRANTY; without even the implied warranty of
9 *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
10 *    GNU General Public License for more details.
11 *
12 *    You should have received a copy of the GNU General Public License
13 *    along with this program; if not, write to the Free Software
14 *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
15 */
16
17/*
18 *    SMOreg.java
19 *    Copyright (C) 2006 University of Waikato, Hamilton, New Zealand
20 *
21 */
22
23package weka.classifiers.functions;
24
25import weka.classifiers.Classifier;
26import weka.classifiers.AbstractClassifier;
27import weka.classifiers.functions.supportVector.Kernel;
28import weka.classifiers.functions.supportVector.PolyKernel;
29import weka.classifiers.functions.supportVector.RegOptimizer;
30import weka.classifiers.functions.supportVector.RegSMOImproved;
31import weka.core.AdditionalMeasureProducer;
32import weka.core.Capabilities;
33import weka.core.Instance;
34import weka.core.Instances;
35import weka.core.Option;
36import weka.core.OptionHandler;
37import weka.core.RevisionUtils;
38import weka.core.SelectedTag;
39import weka.core.Tag;
40import weka.core.TechnicalInformation;
41import weka.core.TechnicalInformationHandler;
42import weka.core.Utils;
43import weka.core.WeightedInstancesHandler;
44import weka.core.Capabilities.Capability;
45import weka.core.TechnicalInformation.Field;
46import weka.core.TechnicalInformation.Type;
47import weka.filters.Filter;
48import weka.filters.unsupervised.attribute.NominalToBinary;
49import weka.filters.unsupervised.attribute.Normalize;
50import weka.filters.unsupervised.attribute.ReplaceMissingValues;
51import weka.filters.unsupervised.attribute.Standardize;
52
53import java.util.Enumeration;
54import java.util.Vector;
55
56/**
57 <!-- globalinfo-start -->
58 * SMOreg implements the support vector machine for regression. The parameters can be learned using various algorithms. The algorithm is selected by setting the RegOptimizer. The most popular algorithm (RegSMOImproved) is due to Shevade, Keerthi et al and this is the default RegOptimizer.<br/>
59 * <br/>
60 * For more information see:<br/>
61 * <br/>
62 * S.K. Shevade, S.S. Keerthi, C. Bhattacharyya, K.R.K. Murthy: Improvements to the SMO Algorithm for SVM Regression. In: IEEE Transactions on Neural Networks, 1999.<br/>
63 * <br/>
64 * A.J. Smola, B. Schoelkopf (1998). A tutorial on support vector regression.
65 * <p/>
66 <!-- globalinfo-end -->
67 *
68 <!-- technical-bibtex-start -->
69 * BibTeX:
70 * <pre>
71 * &#64;inproceedings{Shevade1999,
72 *    author = {S.K. Shevade and S.S. Keerthi and C. Bhattacharyya and K.R.K. Murthy},
73 *    booktitle = {IEEE Transactions on Neural Networks},
74 *    title = {Improvements to the SMO Algorithm for SVM Regression},
75 *    year = {1999},
76 *    PS = {http://guppy.mpe.nus.edu.sg/\~mpessk/svm/ieee_smo_reg.ps.gz}
77 * }
78 *
79 * &#64;techreport{Smola1998,
80 *    author = {A.J. Smola and B. Schoelkopf},
81 *    note = {NeuroCOLT2 Technical Report NC2-TR-1998-030},
82 *    title = {A tutorial on support vector regression},
83 *    year = {1998}
84 * }
85 * </pre>
86 * <p/>
87 <!-- technical-bibtex-end -->
88 *
89 <!-- options-start -->
90 * Valid options are: <p/>
91 *
92 * <pre> -C &lt;double&gt;
93 *  The complexity constant C.
94 *  (default 1)</pre>
95 *
96 * <pre> -N
97 *  Whether to 0=normalize/1=standardize/2=neither.
98 *  (default 0=normalize)</pre>
99 *
100 * <pre> -I &lt;classname and parameters&gt;
101 *  Optimizer class used for solving quadratic optimization problem
102 *  (default weka.classifiers.functions.supportVector.RegSMOImproved)</pre>
103 *
104 * <pre> -K &lt;classname and parameters&gt;
105 *  The Kernel to use.
106 *  (default: weka.classifiers.functions.supportVector.PolyKernel)</pre>
107 *
108 * <pre>
109 * Options specific to optimizer ('-I') weka.classifiers.functions.supportVector.RegSMOImproved:
110 * </pre>
111 *
112 * <pre> -T &lt;double&gt;
113 *  The tolerance parameter for checking the stopping criterion.
114 *  (default 0.001)</pre>
115 *
116 * <pre> -V
117 *  Use variant 1 of the algorithm when true, otherwise use variant 2.
118 *  (default true)</pre>
119 *
120 * <pre> -P &lt;double&gt;
121 *  The epsilon for round-off error.
122 *  (default 1.0e-12)</pre>
123 *
124 * <pre> -L &lt;double&gt;
125 *  The epsilon parameter in epsilon-insensitive loss function.
126 *  (default 1.0e-3)</pre>
127 *
128 * <pre> -W &lt;double&gt;
129 *  The random number seed.
130 *  (default 1)</pre>
131 *
132 * <pre>
133 * Options specific to kernel ('-K') weka.classifiers.functions.supportVector.PolyKernel:
134 * </pre>
135 *
136 * <pre> -D
137 *  Enables debugging output (if available) to be printed.
138 *  (default: off)</pre>
139 *
140 * <pre> -no-checks
141 *  Turns off all checks - use with caution!
142 *  (default: checks on)</pre>
143 *
144 * <pre> -C &lt;num&gt;
145 *  The size of the cache (a prime number), 0 for full cache and
146 *  -1 to turn it off.
147 *  (default: 250007)</pre>
148 *
149 * <pre> -E &lt;num&gt;
150 *  The Exponent to use.
151 *  (default: 1.0)</pre>
152 *
153 * <pre> -L
154 *  Use lower-order terms.
155 *  (default: no)</pre>
156 *
157 <!-- options-end -->
158 *
159 * @author  Remco Bouckaert (remco@cs.waikato.ac.nz,rrb@xm.co.nz)
160 * @version $Revision: 5928 $
161 */
162public class SMOreg 
163  extends AbstractClassifier
164  implements WeightedInstancesHandler, AdditionalMeasureProducer, 
165             TechnicalInformationHandler {
166 
167  /** for serialization */
168  private static final long serialVersionUID = -7149606251113102827L;
169 
170  /** The filter to apply to the training data: Normalzie */
171  public static final int FILTER_NORMALIZE = 0;
172  /** The filter to apply to the training data: Standardize */
173  public static final int FILTER_STANDARDIZE = 1;
174  /** The filter to apply to the training data: None */
175  public static final int FILTER_NONE = 2;
176  /** The filter to apply to the training data */
177  public static final Tag[] TAGS_FILTER =
178  {
179    new Tag(FILTER_NORMALIZE, "Normalize training data"),
180    new Tag(FILTER_STANDARDIZE, "Standardize training data"),
181    new Tag(FILTER_NONE, "No normalization/standardization"),
182  };
183 
184  /** Whether to normalize/standardize/neither */
185  protected int m_filterType = FILTER_NORMALIZE;
186 
187  /** The filter used to make attributes numeric. */
188  protected NominalToBinary m_NominalToBinary;
189 
190  /** The filter used to standardize/normalize all values. */
191  protected Filter m_Filter = null;
192 
193  /** The filter used to get rid of missing values. */
194  protected ReplaceMissingValues m_Missing;
195 
196  /** Only numeric attributes in the dataset? If so, less need to filter */
197  protected boolean m_onlyNumeric;
198 
199  /** capacity parameter **/
200  protected double m_C = 1.0;
201 
202  /** coefficients used by normalization filter for doing its linear transformation
203   * so that result = svmoutput * m_x1 + m_x0 **/
204  protected double m_x1 = 1.0;
205  protected double m_x0 = 0.0;
206 
207  /** contains the algorithm used for learning **/
208  protected RegOptimizer m_optimizer = new RegSMOImproved();
209
210  /** the configured kernel */
211  protected Kernel m_kernel = new PolyKernel();
212 
213  /**
214   * Returns a string describing classifier
215   *
216   * @return            a description suitable for
217   *                    displaying in the explorer/experimenter gui
218   */
219  public String globalInfo() {
220    return
221        "SMOreg implements the support vector machine for regression. "
222      + "The parameters can be learned using various algorithms. The "
223      + "algorithm is selected by setting the RegOptimizer. The most "
224      + "popular algorithm (" 
225      + RegSMOImproved.class.getName().replaceAll(".*\\.", "") 
226      + ") is due to Shevade, Keerthi " 
227      + "et al and this is the default RegOptimizer.\n\n"
228      + "For more information see:\n\n"
229      + getTechnicalInformation().toString();
230  }
231
232  /**
233   * Returns an instance of a TechnicalInformation object, containing
234   * detailed information about the technical background of this class,
235   * e.g., paper reference or book this class is based on.
236   *
237   * @return the technical information about this class
238   */
239  public TechnicalInformation getTechnicalInformation() {
240    TechnicalInformation        result;
241    TechnicalInformation        additional;
242   
243    result = new TechnicalInformation(Type.INPROCEEDINGS);
244    result.setValue(Field.AUTHOR, "S.K. Shevade and S.S. Keerthi and C. Bhattacharyya and K.R.K. Murthy");
245    result.setValue(Field.TITLE, "Improvements to the SMO Algorithm for SVM Regression");
246    result.setValue(Field.BOOKTITLE, "IEEE Transactions on Neural Networks");
247    result.setValue(Field.YEAR, "1999");
248    result.setValue(Field.PS, "http://guppy.mpe.nus.edu.sg/~mpessk/svm/ieee_smo_reg.ps.gz");
249
250    additional = result.add(Type.TECHREPORT);
251    additional.setValue(Field.AUTHOR, "A.J. Smola and B. Schoelkopf");
252    additional.setValue(Field.TITLE, "A tutorial on support vector regression");
253    additional.setValue(Field.NOTE, "NeuroCOLT2 Technical Report NC2-TR-1998-030");
254    additional.setValue(Field.YEAR, "1998");
255
256    return result;
257  }
258 
259  /**
260   * Returns an enumeration describing the available options.
261   *
262   * @return an enumeration of all the available options.
263   */
264  public Enumeration listOptions() {
265    Enumeration enm;
266    Vector result = new Vector();
267   
268    result.addElement(new Option(
269        "\tThe complexity constant C.\n"
270        + "\t(default 1)", 
271        "C", 1, "-C <double>"));
272   
273    result.addElement(new Option(
274        "\tWhether to 0=normalize/1=standardize/2=neither.\n" 
275        + "\t(default 0=normalize)", 
276        "N", 1, "-N"));
277   
278    result.addElement(new Option(
279        "\tOptimizer class used for solving quadratic optimization problem\n" 
280        + "\t(default " + RegSMOImproved.class.getName() + ")",
281        "I", 1, "-I <classname and parameters>"));
282   
283    result.addElement(new Option(
284        "\tThe Kernel to use.\n"
285        + "\t(default: weka.classifiers.functions.supportVector.PolyKernel)",
286        "K", 1, "-K <classname and parameters>"));
287
288    result.addElement(new Option(
289        "",
290        "", 0, "\nOptions specific to optimizer ('-I') "
291        + getRegOptimizer().getClass().getName() + ":"));
292
293    enm = ((OptionHandler) getRegOptimizer()).listOptions();
294    while (enm.hasMoreElements())
295      result.addElement(enm.nextElement());
296
297    result.addElement(new Option(
298        "",
299        "", 0, "\nOptions specific to kernel ('-K') "
300        + getKernel().getClass().getName() + ":"));
301   
302    enm = ((OptionHandler) getKernel()).listOptions();
303    while (enm.hasMoreElements())
304      result.addElement(enm.nextElement());
305
306    return result.elements();
307  }
308 
309  /**
310   * Parses a given list of options. <p/>
311   *
312   <!-- options-start -->
313   * Valid options are: <p/>
314   *
315   * <pre> -C &lt;double&gt;
316   *  The complexity constant C.
317   *  (default 1)</pre>
318   *
319   * <pre> -N
320   *  Whether to 0=normalize/1=standardize/2=neither.
321   *  (default 0=normalize)</pre>
322   *
323   * <pre> -I &lt;classname and parameters&gt;
324   *  Optimizer class used for solving quadratic optimization problem
325   *  (default weka.classifiers.functions.supportVector.RegSMOImproved)</pre>
326   *
327   * <pre> -K &lt;classname and parameters&gt;
328   *  The Kernel to use.
329   *  (default: weka.classifiers.functions.supportVector.PolyKernel)</pre>
330   *
331   * <pre>
332   * Options specific to optimizer ('-I') weka.classifiers.functions.supportVector.RegSMOImproved:
333   * </pre>
334   *
335   * <pre> -T &lt;double&gt;
336   *  The tolerance parameter for checking the stopping criterion.
337   *  (default 0.001)</pre>
338   *
339   * <pre> -V
340   *  Use variant 1 of the algorithm when true, otherwise use variant 2.
341   *  (default true)</pre>
342   *
343   * <pre> -P &lt;double&gt;
344   *  The epsilon for round-off error.
345   *  (default 1.0e-12)</pre>
346   *
347   * <pre> -L &lt;double&gt;
348   *  The epsilon parameter in epsilon-insensitive loss function.
349   *  (default 1.0e-3)</pre>
350   *
351   * <pre> -W &lt;double&gt;
352   *  The random number seed.
353   *  (default 1)</pre>
354   *
355   * <pre>
356   * Options specific to kernel ('-K') weka.classifiers.functions.supportVector.PolyKernel:
357   * </pre>
358   *
359   * <pre> -D
360   *  Enables debugging output (if available) to be printed.
361   *  (default: off)</pre>
362   *
363   * <pre> -no-checks
364   *  Turns off all checks - use with caution!
365   *  (default: checks on)</pre>
366   *
367   * <pre> -C &lt;num&gt;
368   *  The size of the cache (a prime number), 0 for full cache and
369   *  -1 to turn it off.
370   *  (default: 250007)</pre>
371   *
372   * <pre> -E &lt;num&gt;
373   *  The Exponent to use.
374   *  (default: 1.0)</pre>
375   *
376   * <pre> -L
377   *  Use lower-order terms.
378   *  (default: no)</pre>
379   *
380   <!-- options-end -->
381   *
382   * @param options the list of options as an array of strings
383   * @throws Exception if an option is not supported
384   */
385  public void setOptions(String[] options) throws Exception {
386    String      tmpStr;
387    String[]    tmpOptions;
388   
389    tmpStr = Utils.getOption('C', options);
390    if (tmpStr.length() != 0) {
391      setC(Double.parseDouble(tmpStr));
392    } else {
393      setC(1.0);
394    }
395   
396    String nString = Utils.getOption('N', options);
397    if (nString.length() != 0) {
398      setFilterType(new SelectedTag(Integer.parseInt(nString), TAGS_FILTER));
399    } else {
400      setFilterType(new SelectedTag(FILTER_NORMALIZE, TAGS_FILTER));
401    }
402   
403    tmpStr = Utils.getOption('I', options);
404    tmpOptions = Utils.splitOptions(tmpStr);
405    if (tmpOptions.length != 0) {
406      tmpStr        = tmpOptions[0];
407      tmpOptions[0] = "";
408      setRegOptimizer(
409          (RegOptimizer) Utils.forName(RegOptimizer.class, tmpStr, tmpOptions));
410    }
411    else {
412      setRegOptimizer(new RegSMOImproved());
413    }
414
415    tmpStr     = Utils.getOption('K', options);
416    tmpOptions = Utils.splitOptions(tmpStr);
417    if (tmpOptions.length != 0) {
418      tmpStr        = tmpOptions[0];
419      tmpOptions[0] = "";
420      setKernel(Kernel.forName(tmpStr, tmpOptions));
421    }
422    else {
423      setKernel(new PolyKernel());
424    }
425  }
426 
427  /**
428   * Gets the current settings of the classifier.
429   *
430   * @return an array of strings suitable for passing to setOptions
431   */
432  public String[] getOptions() {
433    int         i;
434    Vector      result;
435    String[]    options;
436
437    result = new Vector();
438
439    options = super.getOptions();
440    for (i = 0; i < options.length; i++)
441      result.add(options[i]);
442   
443    result.add("-C");
444    result.add("" + getC());
445   
446    result.add("-N");
447    result.add("" + m_filterType);
448   
449    result.add("-I");
450    result.add("" + getRegOptimizer().getClass().getName() + " " + Utils.joinOptions(getRegOptimizer().getOptions()));
451
452    result.add("-K");
453    result.add("" + getKernel().getClass().getName() + " " + Utils.joinOptions(getKernel().getOptions()));
454
455    return (String[]) result.toArray(new String[result.size()]);         
456  }
457 
458  /**
459   * Returns default capabilities of the classifier.
460   *
461   * @return            the capabilities of this classifier
462   */
463  public Capabilities getCapabilities() {
464    Capabilities result = getKernel().getCapabilities();
465    result.setOwner(this);
466
467    // attribute
468    result.enableAllAttributeDependencies();
469    // with NominalToBinary we can also handle nominal attributes, but only
470    // if the kernel can handle numeric attributes
471    if (result.handles(Capability.NUMERIC_ATTRIBUTES))
472      result.enable(Capability.NOMINAL_ATTRIBUTES);
473    result.enable(Capability.MISSING_VALUES);
474   
475    // class
476    result.disableAllClasses();
477    result.disableAllClassDependencies();
478    result.enable(Capability.NUMERIC_CLASS);
479    result.enable(Capability.DATE_CLASS);
480    result.enable(Capability.MISSING_CLASS_VALUES);
481   
482    return result;
483  }
484 
485  /**
486   * Method for building the classifier.
487   *
488   * @param instances the set of training instances
489   * @throws Exception if the classifier can't be built successfully
490   */
491  public void buildClassifier(Instances instances) throws Exception {
492    // can classifier handle the data?
493    getCapabilities().testWithFail(instances);
494
495    // remove instances with missing class
496    instances = new Instances(instances);
497    instances.deleteWithMissingClass();
498   
499    // Removes all the instances with weight equal to 0.
500    // MUST be done since condition (8) of Keerthi's paper
501    // is made with the assertion Ci > 0 (See equation (3a).
502    Instances data = new Instances(instances, 0);
503    for (int i = 0; i < instances.numInstances(); i++) {
504      if (instances.instance(i).weight() > 0) {
505        data.add(instances.instance(i));
506      }
507    }
508   
509    if (data.numInstances() == 0) {
510      throw new Exception("No training instances left after removing " + "instance with either a weight null or a missing class!");
511    }
512    instances = data;
513   
514    m_onlyNumeric = true;
515    for (int i = 0; i < instances.numAttributes(); i++) {
516      if (i != instances.classIndex()) {
517        if (!instances.attribute(i).isNumeric()) {
518          m_onlyNumeric = false;
519          break;
520        }
521      }
522    }
523    m_Missing = new ReplaceMissingValues();
524    m_Missing.setInputFormat(instances);
525    instances = Filter.useFilter(instances, m_Missing);
526   
527    if (!m_onlyNumeric) {
528      m_NominalToBinary = new NominalToBinary();
529      m_NominalToBinary.setInputFormat(instances);
530      instances = Filter.useFilter(instances, m_NominalToBinary);
531    } else {
532      m_NominalToBinary = null;
533    }
534   
535    // retrieve two different class values used to determine filter transformation
536    double y0 = instances.instance(0).classValue();
537    int index = 1;
538    while (index < instances.numInstances() && instances.instance(index).classValue() == y0) {
539      index++;
540    }
541    if (index == instances.numInstances()) {
542      // degenerate case, all class values are equal
543      // we don't want to deal with this, too much hassle
544      throw new Exception("All class values are the same. At least two class values should be different");
545    }
546    double y1 = instances.instance(index).classValue();
547   
548    // apply filters
549    if (m_filterType == FILTER_STANDARDIZE) {
550      m_Filter = new Standardize();
551      ((Standardize)m_Filter).setIgnoreClass(true);
552      m_Filter.setInputFormat(instances);
553      instances = Filter.useFilter(instances, m_Filter);                       
554    } else if (m_filterType == FILTER_NORMALIZE) {
555      m_Filter = new Normalize();
556      ((Normalize)m_Filter).setIgnoreClass(true);
557      m_Filter.setInputFormat(instances);
558      instances = Filter.useFilter(instances, m_Filter);
559    } else {
560      m_Filter = null;
561    }
562    if (m_Filter != null) {
563      double z0 = instances.instance(0).classValue();
564      double z1 = instances.instance(index).classValue();
565      m_x1 = (y0-y1) / (z0 - z1); // no division by zero, since y0 != y1 guaranteed => z0 != z1 ???
566      m_x0 = (y0 - m_x1 * z0); // = y1 - m_x1 * z1
567    } else {
568      m_x1 = 1.0;
569      m_x0 = 0.0;                       
570    }
571   
572    m_optimizer.setSMOReg(this);
573    m_optimizer.buildClassifier(instances);
574  }
575 
576  /**
577   * Classifies the given instance using the linear regression function.
578   *
579   * @param instance the test instance
580   * @return the classification
581   * @throws Exception if classification can't be done successfully
582   */
583  public double classifyInstance(Instance instance) throws Exception {
584    // Filter instance
585    m_Missing.input(instance);
586    m_Missing.batchFinished();
587    instance = m_Missing.output();
588   
589    if (!m_onlyNumeric) {
590      m_NominalToBinary.input(instance);
591      m_NominalToBinary.batchFinished();
592      instance = m_NominalToBinary.output();
593    }
594   
595    if (m_Filter != null) {
596      m_Filter.input(instance);
597      m_Filter.batchFinished();
598      instance = m_Filter.output();
599    }
600   
601    double result = m_optimizer.SVMOutput(instance);
602    return result * m_x1 + m_x0;
603  }
604 
605  /**
606   * Returns the tip text for this property
607   *
608   * @return            tip text for this property suitable for
609   *                    displaying in the explorer/experimenter gui
610   */
611  public String regOptimizerTipText() {
612    return "The learning algorithm.";
613  }
614 
615  /**
616   * sets the learning algorithm
617   *
618   * @param regOptimizer        the learning algorithm
619   */
620  public void setRegOptimizer(RegOptimizer regOptimizer) {
621    m_optimizer = regOptimizer;
622  }
623 
624  /**
625   * returns the learning algorithm
626   *
627   * @return            the learning algorithm
628   */
629  public RegOptimizer getRegOptimizer() {
630    return m_optimizer;
631  }
632 
633  /**
634   * Returns the tip text for this property
635   *
636   * @return            tip text for this property suitable for
637   *                    displaying in the explorer/experimenter gui
638   */
639  public String kernelTipText() {
640    return "The kernel to use.";
641  }
642 
643  /**
644   * sets the kernel to use
645   *
646   * @param value       the kernel to use
647   */
648  public void setKernel(Kernel value) {
649    m_kernel = value;
650  }
651 
652  /**
653   * Returns the kernel to use
654   *
655   * @return            the current kernel
656   */
657  public Kernel getKernel() {
658    return m_kernel;
659  }
660 
661  /**
662   * Returns the tip text for this property
663   *
664   * @return            tip text for this property suitable for
665   *                    displaying in the explorer/experimenter gui
666   */
667  public String cTipText() {
668    return "The complexity parameter C.";
669  }
670 
671  /**
672   * Get the value of C.
673   *
674   * @return            Value of C.
675   */
676  public double getC() {
677    return m_C;
678  }
679 
680  /**
681   * Set the value of C.
682   *
683   * @param v           Value to assign to C.
684   */
685  public void setC(double v) {
686    m_C = v;
687  }
688 
689  /**
690   * Returns the tip text for this property
691   *
692   * @return            tip text for this property suitable for
693   *                    displaying in the explorer/experimenter gui
694   */
695  public String filterTypeTipText() {
696    return "Determines how/if the data will be transformed.";
697  }
698 
699  /**
700   * Gets how the training data will be transformed. Will be one of
701   * FILTER_NORMALIZE, FILTER_STANDARDIZE, FILTER_NONE.
702   *
703   * @return            the filtering mode
704   */
705  public SelectedTag getFilterType() {
706    return new SelectedTag(m_filterType, TAGS_FILTER);
707  }
708 
709  /**
710   * Sets how the training data will be transformed. Should be one of
711   * FILTER_NORMALIZE, FILTER_STANDARDIZE, FILTER_NONE.
712   *
713   * @param newType     the new filtering mode
714   */
715  public void setFilterType(SelectedTag newType) {
716    if (newType.getTags() == TAGS_FILTER) {
717      m_filterType = newType.getSelectedTag().getID();
718    }
719  }
720 
721  /**
722   * Prints out the classifier.
723   *
724   * @return a description of the classifier as a string
725   */
726  public String toString() {
727    StringBuffer text = new StringBuffer();
728   
729    if (m_optimizer == null || !m_optimizer.modelBuilt()) {
730      return "SMOreg: No model built yet.";
731    }
732   
733    try {
734      text.append(m_optimizer.toString());
735    } 
736    catch (Exception e) {
737      return "Can't print SMVreg classifier.";
738    }
739   
740    return text.toString();
741  }
742 
743  /**
744   * Returns an enumeration of the measure names. Additional measures
745   * must follow the naming convention of starting with "measure", eg.
746   * double measureBlah()
747   *
748   * @return an enumeration of the measure names
749   */
750  public Enumeration enumerateMeasures() {
751    Vector result = new Vector();
752   
753    result.addElement("measureKernelEvaluations");
754    result.addElement("measureCacheHits");
755   
756    return result.elements();
757  }
758 
759  /**
760   * Returns the value of the named measure
761   * @param measureName the name of the measure to query for its value
762   * @return the value of the named measure
763   * @throws IllegalArgumentException if the named measure is not supported
764   */
765  public double getMeasure(String measureName) {
766    if (measureName.equals("measureKernelEvaluations"))
767      return measureKernelEvaluations();
768    else if (measureName.equals("measureCacheHits"))
769      return measureCacheHits();
770    else
771      throw new IllegalArgumentException("Measure '" +  measureName + "' is not supported!");
772  }
773 
774  /**
775   * number of kernel evaluations used in learing
776   *
777   * @return            the number of kernel evaluations
778   */
779  protected double measureKernelEvaluations() {
780    if (m_optimizer != null) {
781      return m_optimizer.getKernelEvaluations();
782    } else {
783      return 0; 
784    }
785  }
786 
787  /**
788   * number of kernel cache hits used during learing
789   *
790   * @return            the number of kernel cache hits
791   */
792  protected double measureCacheHits() {
793    if (m_optimizer != null) {
794      return m_optimizer.getCacheHits();
795    } else {
796      return 0; 
797    }
798  }
799 
800  /**
801   * Returns the revision string.
802   *
803   * @return            the revision
804   */
805  public String getRevision() {
806    return RevisionUtils.extract("$Revision: 5928 $");
807  }
808 
809  /**
810   * Main method for running this classifier.
811   *
812   * @param args        the commandline options
813   */
814  public static void main(String[] args) {
815    runClassifier(new SMOreg(), args);
816  }
817}
Note: See TracBrowser for help on using the repository browser.