source: src/main/java/weka/classifiers/functions/PaceRegression.java @ 10

Last change on this file since 10 was 4, checked in by gnappo, 14 years ago

Import di weka.

File size: 24.9 KB
RevLine 
[4]1/*
2 *    This program is free software; you can redistribute it and/or modify
3 *    it under the terms of the GNU General Public License as published by
4 *    the Free Software Foundation; either version 2 of the License, or
5 *    (at your option) any later version.
6 *
7 *    This program is distributed in the hope that it will be useful,
8 *    but WITHOUT ANY WARRANTY; without even the implied warranty of
9 *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
10 *    GNU General Public License for more details.
11 *
12 *    You should have received a copy of the GNU General Public License
13 *    along with this program; if not, write to the Free Software
14 *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.  */
15
16/*
17 *    PaceRegression.java
18 *    Copyright (C) 2002 University of Waikato, Hamilton, New Zealand
19 */
20
21package weka.classifiers.functions;
22
23import weka.classifiers.Classifier;
24import weka.classifiers.AbstractClassifier;
25import weka.classifiers.functions.pace.ChisqMixture;
26import weka.classifiers.functions.pace.MixtureDistribution;
27import weka.classifiers.functions.pace.NormalMixture;
28import weka.classifiers.functions.pace.PaceMatrix;
29import weka.core.Capabilities;
30import weka.core.Instance;
31import weka.core.Instances;
32import weka.core.NoSupportForMissingValuesException;
33import weka.core.Option;
34import weka.core.OptionHandler;
35import weka.core.RevisionUtils;
36import weka.core.SelectedTag;
37import weka.core.Tag;
38import weka.core.TechnicalInformation;
39import weka.core.TechnicalInformationHandler;
40import weka.core.Utils;
41import weka.core.WeightedInstancesHandler;
42import weka.core.WekaException;
43import weka.core.Capabilities.Capability;
44import weka.core.TechnicalInformation.Field;
45import weka.core.TechnicalInformation.Type;
46import weka.core.matrix.DoubleVector;
47import weka.core.matrix.IntVector;
48
49import java.util.Enumeration;
50import java.util.Vector;
51
52/**
53 <!-- globalinfo-start -->
54 * Class for building pace regression linear models and using them for prediction. <br/>
55 * <br/>
56 * Under regularity conditions, pace regression is provably optimal when the number of coefficients tends to infinity. It consists of a group of estimators that are either overall optimal or optimal under certain conditions.<br/>
57 * <br/>
58 * The current work of the pace regression theory, and therefore also this implementation, do not handle: <br/>
59 * <br/>
60 * - missing values <br/>
61 * - non-binary nominal attributes <br/>
62 * - the case that n - k is small where n is the number of instances and k is the number of coefficients (the threshold used in this implmentation is 20)<br/>
63 * <br/>
64 * For more information see:<br/>
65 * <br/>
66 * Wang, Y (2000). A new approach to fitting linear models in high dimensional spaces. Hamilton, New Zealand.<br/>
67 * <br/>
68 * Wang, Y., Witten, I. H.: Modeling for optimal probability prediction. In: Proceedings of the Nineteenth International Conference in Machine Learning, Sydney, Australia, 650-657, 2002.
69 * <p/>
70 <!-- globalinfo-end -->
71 * 
72 <!-- technical-bibtex-start -->
73 * BibTeX:
74 * <pre>
75 * &#64;phdthesis{Wang2000,
76 *    address = {Hamilton, New Zealand},
77 *    author = {Wang, Y},
78 *    school = {Department of Computer Science, University of Waikato},
79 *    title = {A new approach to fitting linear models in high dimensional spaces},
80 *    year = {2000}
81 * }
82 *
83 * &#64;inproceedings{Wang2002,
84 *    address = {Sydney, Australia},
85 *    author = {Wang, Y. and Witten, I. H.},
86 *    booktitle = {Proceedings of the Nineteenth International Conference in Machine Learning},
87 *    pages = {650-657},
88 *    title = {Modeling for optimal probability prediction},
89 *    year = {2002}
90 * }
91 * </pre>
92 * <p/>
93 <!-- technical-bibtex-end -->
94 *
95 <!-- options-start -->
96 * Valid options are: <p/>
97 *
98 * <pre> -D
99 *  Produce debugging output.
100 *  (default no debugging output)</pre>
101 *
102 * <pre> -E &lt;estimator&gt;
103 *  The estimator can be one of the following:
104 *   eb -- Empirical Bayes estimator for noraml mixture (default)
105 *   nested -- Optimal nested model selector for normal mixture
106 *   subset -- Optimal subset selector for normal mixture
107 *   pace2 -- PACE2 for Chi-square mixture
108 *   pace4 -- PACE4 for Chi-square mixture
109 *   pace6 -- PACE6 for Chi-square mixture
110 *
111 *   ols -- Ordinary least squares estimator
112 *   aic -- AIC estimator
113 *   bic -- BIC estimator
114 *   ric -- RIC estimator
115 *   olsc -- Ordinary least squares subset selector with a threshold</pre>
116 *
117 * <pre> -S &lt;threshold value&gt;
118 *  Threshold value for the OLSC estimator</pre>
119 *
120 <!-- options-end -->
121 *
122 * @author Yong Wang (yongwang@cs.waikato.ac.nz)
123 * @author Gabi Schmidberger (gabi@cs.waikato.ac.nz)
124 * @version $Revision: 5928 $
125 */
126public class PaceRegression 
127  extends AbstractClassifier
128  implements OptionHandler, WeightedInstancesHandler, TechnicalInformationHandler {
129
130  /** for serialization */
131  static final long serialVersionUID = 7230266976059115435L;
132 
133  /** The model used */
134  Instances m_Model = null;
135
136  /** Array for storing coefficients of linear regression. */
137  private double[] m_Coefficients;
138
139  /** The index of the class attribute */
140  private int m_ClassIndex;
141
142  /** True if debug output will be printed */
143  private boolean m_Debug;
144
145  /** estimator type: Ordinary least squares */
146  private static final int olsEstimator = 0;
147  /** estimator type: Empirical Bayes */
148  private static final int ebEstimator = 1;
149  /** estimator type: Nested model selector */
150  private static final int nestedEstimator = 2;
151  /** estimator type: Subset selector */
152  private static final int subsetEstimator = 3; 
153  /** estimator type:PACE2  */
154  private static final int pace2Estimator = 4; 
155  /** estimator type: PACE4 */
156  private static final int pace4Estimator = 5; 
157  /** estimator type: PACE6 */
158  private static final int pace6Estimator = 6; 
159  /** estimator type: Ordinary least squares selection */
160  private static final int olscEstimator = 7;
161  /** estimator type: AIC */
162  private static final int aicEstimator = 8;
163  /** estimator type: BIC */
164  private static final int bicEstimator = 9;
165  /** estimator type: RIC */
166  private static final int ricEstimator = 10;
167  /** estimator types */
168  public static final Tag [] TAGS_ESTIMATOR = {
169    new Tag(olsEstimator, "Ordinary least squares"),
170    new Tag(ebEstimator, "Empirical Bayes"),
171    new Tag(nestedEstimator, "Nested model selector"),
172    new Tag(subsetEstimator, "Subset selector"),
173    new Tag(pace2Estimator, "PACE2"),
174    new Tag(pace4Estimator, "PACE4"),
175    new Tag(pace6Estimator, "PACE6"),
176    new Tag(olscEstimator, "Ordinary least squares selection"),
177    new Tag(aicEstimator, "AIC"),
178    new Tag(bicEstimator, "BIC"),
179    new Tag(ricEstimator, "RIC")
180  };
181
182  /** the estimator */
183  private int paceEstimator = ebEstimator; 
184 
185  private double olscThreshold = 2;  // AIC
186 
187  /**
188   * Returns a string describing this classifier
189   * @return a description of the classifier suitable for
190   * displaying in the explorer/experimenter gui
191   */
192  public String globalInfo() {
193    return "Class for building pace regression linear models and using them for "
194      +"prediction. \n\n"
195      +"Under regularity conditions, pace regression is provably optimal when "
196      +"the number of coefficients tends to infinity. It consists of a group of "
197      +"estimators that are either overall optimal or optimal under certain "
198      +"conditions.\n\n"
199      +"The current work of the pace regression theory, and therefore also this "
200      +"implementation, do not handle: \n\n"
201      +"- missing values \n"
202      +"- non-binary nominal attributes \n"
203      +"- the case that n - k is small where n is the number of instances and k is " 
204      +"the number of coefficients (the threshold used in this implmentation is 20)\n\n"
205      +"For more information see:\n\n"
206      + getTechnicalInformation().toString();
207  }
208
209  /**
210   * Returns an instance of a TechnicalInformation object, containing
211   * detailed information about the technical background of this class,
212   * e.g., paper reference or book this class is based on.
213   *
214   * @return the technical information about this class
215   */
216  public TechnicalInformation getTechnicalInformation() {
217    TechnicalInformation        result;
218    TechnicalInformation        additional;
219   
220    result = new TechnicalInformation(Type.PHDTHESIS);
221    result.setValue(Field.AUTHOR, "Wang, Y");
222    result.setValue(Field.YEAR, "2000");
223    result.setValue(Field.TITLE, "A new approach to fitting linear models in high dimensional spaces");
224    result.setValue(Field.SCHOOL, "Department of Computer Science, University of Waikato");
225    result.setValue(Field.ADDRESS, "Hamilton, New Zealand");
226
227    additional = result.add(Type.INPROCEEDINGS);
228    additional.setValue(Field.AUTHOR, "Wang, Y. and Witten, I. H.");
229    additional.setValue(Field.YEAR, "2002");
230    additional.setValue(Field.TITLE, "Modeling for optimal probability prediction");
231    additional.setValue(Field.BOOKTITLE, "Proceedings of the Nineteenth International Conference in Machine Learning");
232    additional.setValue(Field.YEAR, "2002");
233    additional.setValue(Field.PAGES, "650-657");
234    additional.setValue(Field.ADDRESS, "Sydney, Australia");
235   
236    return result;
237  }
238
239  /**
240   * Returns default capabilities of the classifier.
241   *
242   * @return      the capabilities of this classifier
243   */
244  public Capabilities getCapabilities() {
245    Capabilities result = super.getCapabilities();
246    result.disableAll();
247
248    // attributes
249    result.enable(Capability.BINARY_ATTRIBUTES);
250    result.enable(Capability.NUMERIC_ATTRIBUTES);
251
252    // class
253    result.enable(Capability.NUMERIC_CLASS);
254    result.enable(Capability.DATE_CLASS);
255    result.enable(Capability.MISSING_CLASS_VALUES);
256   
257    return result;
258  }
259
260  /**
261   * Builds a pace regression model for the given data.
262   *
263   * @param data the training data to be used for generating the
264   * linear regression function
265   * @throws Exception if the classifier could not be built successfully
266   */
267  public void buildClassifier(Instances data) throws Exception {
268
269    // can classifier handle the data?
270    Capabilities cap = getCapabilities();
271    cap.setMinimumNumberInstances(20 + data.numAttributes());
272    cap.testWithFail(data);
273
274    // remove instances with missing class
275    data = new Instances(data);
276    data.deleteWithMissingClass();
277   
278    /*
279     * initialize the following
280     */
281    m_Model = new Instances(data, 0);
282    m_ClassIndex = data.classIndex();
283    double[][] transformedDataMatrix = 
284    getTransformedDataMatrix(data, m_ClassIndex);
285    double[] classValueVector = data.attributeToDoubleArray(m_ClassIndex);
286   
287    m_Coefficients = null;
288
289    /*
290     * Perform pace regression
291     */
292    m_Coefficients = pace(transformedDataMatrix, classValueVector);
293  }
294
295  /**
296   * pace regression
297   *
298   * @param matrix_X matrix with observations
299   * @param vector_Y vektor with class values
300   * @return vector with coefficients
301   */
302  private double [] pace(double[][] matrix_X, double [] vector_Y) {
303   
304    PaceMatrix X = new PaceMatrix( matrix_X );
305    PaceMatrix Y = new PaceMatrix( vector_Y, vector_Y.length );
306    IntVector pvt = IntVector.seq(0, X.getColumnDimension()-1);
307    int n = X.getRowDimension();
308    int kr = X.getColumnDimension();
309
310    X.lsqrSelection( Y, pvt, 1 );
311    X.positiveDiagonal( Y, pvt );
312   
313    PaceMatrix sol = (PaceMatrix) Y.clone();
314    X.rsolve( sol, pvt, pvt.size() );
315    DoubleVector r = Y.getColumn( pvt.size(), n-1, 0);
316    double sde = Math.sqrt(r.sum2() / r.size());
317   
318    DoubleVector aHat = Y.getColumn( 0, pvt.size()-1, 0).times( 1./sde );
319
320    DoubleVector aTilde = null;
321    switch( paceEstimator) {
322    case ebEstimator: 
323    case nestedEstimator:
324    case subsetEstimator:
325      NormalMixture d = new NormalMixture();
326      d.fit( aHat, MixtureDistribution.NNMMethod ); 
327      if( paceEstimator == ebEstimator ) 
328        aTilde = d.empiricalBayesEstimate( aHat );
329      else if( paceEstimator == ebEstimator ) 
330        aTilde = d.subsetEstimate( aHat );
331      else aTilde = d.nestedEstimate( aHat );
332      break;
333    case pace2Estimator: 
334    case pace4Estimator:
335    case pace6Estimator:
336      DoubleVector AHat = aHat.square();
337      ChisqMixture dc = new ChisqMixture();
338      dc.fit( AHat, MixtureDistribution.NNMMethod ); 
339      DoubleVector ATilde; 
340      if( paceEstimator == pace6Estimator ) 
341        ATilde = dc.pace6( AHat );
342      else if( paceEstimator == pace2Estimator ) 
343        ATilde = dc.pace2( AHat );
344      else ATilde = dc.pace4( AHat );
345      aTilde = ATilde.sqrt().times( aHat.sign() );
346      break;
347    case olsEstimator: 
348      aTilde = aHat.copy();
349      break;
350    case aicEstimator: 
351    case bicEstimator:
352    case ricEstimator: 
353    case olscEstimator:
354      if(paceEstimator == aicEstimator) olscThreshold = 2;
355      else if(paceEstimator == bicEstimator) olscThreshold = Math.log( n );
356      else if(paceEstimator == ricEstimator) olscThreshold = 2*Math.log( kr );
357      aTilde = aHat.copy();
358      for( int i = 0; i < aTilde.size(); i++ )
359        if( Math.abs(aTilde.get(i)) < Math.sqrt(olscThreshold) ) 
360          aTilde.set(i, 0);
361    }
362    PaceMatrix YTilde = new PaceMatrix((new PaceMatrix(aTilde)).times( sde ));
363    X.rsolve( YTilde, pvt, pvt.size() );
364    DoubleVector betaTilde = YTilde.getColumn(0).unpivoting( pvt, kr );
365   
366    return betaTilde.getArrayCopy();
367  }
368
369  /**
370   * Checks if an instance has a missing value.
371   * @param instance the instance
372   * @param model the data
373   * @return true if missing value is present
374   */
375  public boolean checkForMissing(Instance instance, Instances model) {
376
377    for (int j = 0; j < instance.numAttributes(); j++) {
378      if (j != model.classIndex()) {
379        if (instance.isMissing(j)) {
380          return true;
381        }
382      }
383    }
384    return false;
385  }
386
387  /**
388   * Transforms dataset into a two-dimensional array.
389   *
390   * @param data dataset
391   * @param classIndex index of the class attribute
392   * @return the transformed data
393   */
394  private double [][] getTransformedDataMatrix(Instances data, 
395                                               int classIndex) {
396    int numInstances = data.numInstances();
397    int numAttributes = data.numAttributes();
398    int middle = classIndex;
399    if (middle < 0) { 
400      middle = numAttributes;
401    }
402
403    double[][] result = new double[numInstances]
404    [numAttributes];
405    for (int i = 0; i < numInstances; i++) {
406      Instance inst = data.instance(i);
407     
408      result[i][0] = 1.0;
409
410      // the class value (lies on index middle) is left out
411      for (int j = 0; j < middle; j++) {
412        result[i][j + 1] = inst.value(j);
413      }
414      for (int j = middle + 1; j < numAttributes; j++) {
415        result[i][j] = inst.value(j);
416      }
417    }
418    return result;
419  }
420
421
422  /**
423   * Classifies the given instance using the linear regression function.
424   *
425   * @param instance the test instance
426   * @return the classification
427   * @throws Exception if classification can't be done successfully
428   */
429  public double classifyInstance(Instance instance) throws Exception {
430   
431    if (m_Coefficients == null) {
432      throw new Exception("Pace Regression: No model built yet.");
433    }
434   
435    // check for missing data and throw exception if some are found
436    if (checkForMissing(instance, m_Model)) {
437      throw new NoSupportForMissingValuesException("Can't handle missing values!");
438    }
439
440    // Calculate the dependent variable from the regression model
441    return regressionPrediction(instance,
442                                m_Coefficients);
443  }
444
445  /**
446   * Outputs the linear regression model as a string.
447   *
448   * @return the model as string
449   */
450  public String toString() {
451
452    if (m_Coefficients == null) {
453      return "Pace Regression: No model built yet.";
454    }
455    //    try {
456    StringBuffer text = new StringBuffer();
457   
458    text.append("\nPace Regression Model\n\n");
459   
460    text.append(m_Model.classAttribute().name()+" =\n\n");
461    int index = 0;       
462   
463    text.append(Utils.doubleToString(m_Coefficients[0],
464                                     12, 4) );
465   
466    for (int i = 1; i < m_Coefficients.length; i++) {
467     
468      // jump over the class attribute
469      if (index == m_ClassIndex) index++;
470     
471      if (m_Coefficients[i] != 0.0) {
472        // output a coefficient if unequal zero
473        text.append(" +\n");
474        text.append(Utils.doubleToString(m_Coefficients[i], 12, 4)
475                    + " * ");
476        text.append(m_Model.attribute(index).name());
477      }
478      index ++;
479    }
480   
481    return text.toString();
482  }
483 
484  /**
485   * Returns an enumeration describing the available options.
486   *
487   * @return an enumeration of all the available options.
488   */
489  public Enumeration listOptions() {
490   
491    Vector newVector = new Vector(2);
492    newVector.addElement(new Option("\tProduce debugging output.\n"
493                                    + "\t(default no debugging output)",
494                                    "D", 0, "-D"));
495    newVector.addElement(new Option("\tThe estimator can be one of the following:\n" + 
496                                    "\t\teb -- Empirical Bayes estimator for noraml mixture (default)\n" +
497                                    "\t\tnested -- Optimal nested model selector for normal mixture\n" + 
498                                    "\t\tsubset -- Optimal subset selector for normal mixture\n" +
499                                    "\t\tpace2 -- PACE2 for Chi-square mixture\n" +
500                                    "\t\tpace4 -- PACE4 for Chi-square mixture\n" +
501                                    "\t\tpace6 -- PACE6 for Chi-square mixture\n\n" + 
502                                    "\t\tols -- Ordinary least squares estimator\n" + 
503                                    "\t\taic -- AIC estimator\n" + 
504                                    "\t\tbic -- BIC estimator\n" + 
505                                    "\t\tric -- RIC estimator\n" + 
506                                    "\t\tolsc -- Ordinary least squares subset selector with a threshold", 
507                                    "E", 0, "-E <estimator>"));
508    newVector.addElement(new Option("\tThreshold value for the OLSC estimator",
509                                    "S", 0, "-S <threshold value>"));
510    return newVector.elements();
511  }
512
513  /**
514   * Parses a given list of options. <p/>
515   *
516   <!-- options-start -->
517   * Valid options are: <p/>
518   *
519   * <pre> -D
520   *  Produce debugging output.
521   *  (default no debugging output)</pre>
522   *
523   * <pre> -E &lt;estimator&gt;
524   *  The estimator can be one of the following:
525   *   eb -- Empirical Bayes estimator for noraml mixture (default)
526   *   nested -- Optimal nested model selector for normal mixture
527   *   subset -- Optimal subset selector for normal mixture
528   *   pace2 -- PACE2 for Chi-square mixture
529   *   pace4 -- PACE4 for Chi-square mixture
530   *   pace6 -- PACE6 for Chi-square mixture
531   *
532   *   ols -- Ordinary least squares estimator
533   *   aic -- AIC estimator
534   *   bic -- BIC estimator
535   *   ric -- RIC estimator
536   *   olsc -- Ordinary least squares subset selector with a threshold</pre>
537   *
538   * <pre> -S &lt;threshold value&gt;
539   *  Threshold value for the OLSC estimator</pre>
540   *
541   <!-- options-end -->
542   *
543   * @param options the list of options as an array of strings
544   * @throws Exception if an option is not supported
545   */
546  public void setOptions(String[] options) throws Exception {
547   
548    setDebug(Utils.getFlag('D', options));
549
550    String estimator = Utils.getOption('E', options);
551    if ( estimator.equals("ols") ) paceEstimator = olsEstimator;
552    else if ( estimator.equals("olsc") ) paceEstimator = olscEstimator;
553    else if( estimator.equals("eb") || estimator.equals("") ) 
554      paceEstimator = ebEstimator;
555    else if ( estimator.equals("nested") ) paceEstimator = nestedEstimator;
556    else if ( estimator.equals("subset") ) paceEstimator = subsetEstimator;
557    else if ( estimator.equals("pace2") ) paceEstimator = pace2Estimator; 
558    else if ( estimator.equals("pace4") ) paceEstimator = pace4Estimator;
559    else if ( estimator.equals("pace6") ) paceEstimator = pace6Estimator;
560    else if ( estimator.equals("aic") ) paceEstimator = aicEstimator;
561    else if ( estimator.equals("bic") ) paceEstimator = bicEstimator;
562    else if ( estimator.equals("ric") ) paceEstimator = ricEstimator;
563    else throw new WekaException("unknown estimator " + estimator + 
564                                 " for -E option" );
565
566    String string = Utils.getOption('S', options);
567    if( ! string.equals("") ) olscThreshold = Double.parseDouble( string );
568   
569  }
570
571  /**
572   * Returns the coefficients for this linear model.
573   *
574   * @return the coefficients for this linear model
575   */
576  public double[] coefficients() {
577
578    double[] coefficients = new double[m_Coefficients.length];
579    for (int i = 0; i < coefficients.length; i++) {
580      coefficients[i] = m_Coefficients[i];
581    }
582    return coefficients;
583  }
584
585  /**
586   * Gets the current settings of the classifier.
587   *
588   * @return an array of strings suitable for passing to setOptions
589   */
590  public String [] getOptions() {
591
592    String [] options = new String [6];
593    int current = 0;
594
595    if (getDebug()) {
596      options[current++] = "-D";
597    }
598
599    options[current++] = "-E";
600    switch (paceEstimator) {
601    case olsEstimator: options[current++] = "ols";
602      break;
603    case olscEstimator: options[current++] = "olsc";
604      options[current++] = "-S";
605      options[current++] = "" + olscThreshold;
606      break;
607    case ebEstimator: options[current++] = "eb";
608      break;
609    case nestedEstimator: options[current++] = "nested";
610      break;
611    case subsetEstimator: options[current++] = "subset";
612      break;
613    case pace2Estimator: options[current++] = "pace2";
614      break; 
615    case pace4Estimator: options[current++] = "pace4";
616      break;
617    case pace6Estimator: options[current++] = "pace6";
618      break;
619    case aicEstimator: options[current++] = "aic";
620      break;
621    case bicEstimator: options[current++] = "bic";
622      break;
623    case ricEstimator: options[current++] = "ric";
624      break;
625    }
626
627    while (current < options.length) {
628      options[current++] = "";
629    }
630    return options;
631  }
632
633 
634  /**
635   * Get the number of coefficients used in the model
636   *
637   * @return the number of coefficients
638   */
639  public int numParameters()
640  {
641    return m_Coefficients.length-1;
642  }
643
644  /**
645   * Returns the tip text for this property
646   * @return tip text for this property suitable for
647   * displaying in the explorer/experimenter gui
648   */
649  public String debugTipText() {
650    return "Output debug information to the console.";
651  }
652
653  /**
654   * Controls whether debugging output will be printed
655   *
656   * @param debug true if debugging output should be printed
657   */
658  public void setDebug(boolean debug) {
659
660    m_Debug = debug;
661  }
662
663  /**
664   * Controls whether debugging output will be printed
665   *
666   * @return true if debugging output should be printed
667   */
668  public boolean getDebug() {
669
670    return m_Debug;
671  }
672
673  /**
674   * Returns the tip text for this property
675   * @return tip text for this property suitable for
676   * displaying in the explorer/experimenter gui
677   */
678  public String estimatorTipText() {
679    return "The estimator to use.\n\n"
680      +"eb -- Empirical Bayes estimator for noraml mixture (default)\n"
681      +"nested -- Optimal nested model selector for normal mixture\n"
682      +"subset -- Optimal subset selector for normal mixture\n"
683      +"pace2 -- PACE2 for Chi-square mixture\n"
684      +"pace4 -- PACE4 for Chi-square mixture\n"
685      +"pace6 -- PACE6 for Chi-square mixture\n"
686      +"ols -- Ordinary least squares estimator\n"
687      +"aic -- AIC estimator\n"
688      +"bic -- BIC estimator\n"
689      +"ric -- RIC estimator\n"
690      +"olsc -- Ordinary least squares subset selector with a threshold";
691  }
692 
693  /**
694   * Gets the estimator
695   *
696   * @return the estimator
697   */
698  public SelectedTag getEstimator() {
699
700    return new SelectedTag(paceEstimator, TAGS_ESTIMATOR);
701  }
702 
703  /**
704   * Sets the estimator.
705   *
706   * @param estimator the new estimator
707   */
708  public void setEstimator(SelectedTag estimator) {
709   
710    if (estimator.getTags() == TAGS_ESTIMATOR) {
711      paceEstimator = estimator.getSelectedTag().getID();
712    }
713  }
714
715  /**
716   * Returns the tip text for this property
717   * @return tip text for this property suitable for
718   * displaying in the explorer/experimenter gui
719   */
720  public String thresholdTipText() {
721    return "Threshold for the olsc estimator.";
722  }
723
724  /**
725   * Set threshold for the olsc estimator
726   *
727   * @param newThreshold the threshold for the olsc estimator
728   */
729  public void setThreshold(double newThreshold) {
730
731    olscThreshold = newThreshold;
732  }
733
734  /**
735   * Gets the threshold for olsc estimator
736   *
737   * @return the threshold
738   */
739  public double getThreshold() {
740
741    return olscThreshold;
742  }
743
744
745  /**
746   * Calculate the dependent value for a given instance for a
747   * given regression model.
748   *
749   * @param transformedInstance the input instance
750   * @param coefficients an array of coefficients for the regression
751   * model
752   * @return the regression value for the instance.
753   * @throws Exception if the class attribute of the input instance
754   * is not assigned
755   */
756  private double regressionPrediction(Instance transformedInstance,
757                                      double [] coefficients) 
758    throws Exception {
759
760    int column = 0;
761    double result = coefficients[column];
762    for (int j = 0; j < transformedInstance.numAttributes(); j++) {
763      if (m_ClassIndex != j) {
764        column++;
765        result += coefficients[column] * transformedInstance.value(j);
766      }
767    }
768   
769    return result;
770  }
771 
772  /**
773   * Returns the revision string.
774   *
775   * @return            the revision
776   */
777  public String getRevision() {
778    return RevisionUtils.extract("$Revision: 5928 $");
779  }
780
781  /**
782   * Generates a linear regression function predictor.
783   *
784   * @param argv the options
785   */
786  public static void main(String argv[]) {
787    runClassifier(new PaceRegression(), argv);
788  }
789}
790
Note: See TracBrowser for help on using the repository browser.