source: branches/MetisMQI/src/main/java/weka/classifiers/lazy/LWL.java @ 38

Last change on this file since 38 was 29, checked in by gnappo, 14 years ago

Taggata versione per la demo e aggiunto branch.

File size: 23.1 KB
Line 
1/*
2 *    This program is free software; you can redistribute it and/or modify
3 *    it under the terms of the GNU General Public License as published by
4 *    the Free Software Foundation; either version 2 of the License, or
5 *    (at your option) any later version.
6 *
7 *    This program is distributed in the hope that it will be useful,
8 *    but WITHOUT ANY WARRANTY; without even the implied warranty of
9 *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
10 *    GNU General Public License for more details.
11 *
12 *    You should have received a copy of the GNU General Public License
13 *    along with this program; if not, write to the Free Software
14 *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
15 */
16
17/*
18 *    LWL.java
19 *    Copyright (C) 1999, 2002, 2003 University of Waikato, Hamilton, New Zealand
20 *
21 */
22
23package weka.classifiers.lazy;
24
25import weka.classifiers.Classifier;
26import weka.classifiers.AbstractClassifier;
27import weka.classifiers.SingleClassifierEnhancer;
28import weka.classifiers.UpdateableClassifier;
29import weka.core.Capabilities;
30import weka.core.Instance;
31import weka.core.Instances;
32import weka.core.neighboursearch.LinearNNSearch;
33import weka.core.neighboursearch.NearestNeighbourSearch;
34import weka.core.Option;
35import weka.core.RevisionUtils;
36import weka.core.TechnicalInformation;
37import weka.core.TechnicalInformationHandler;
38import weka.core.Utils;
39import weka.core.WeightedInstancesHandler;
40import weka.core.Capabilities.Capability;
41import weka.core.TechnicalInformation.Field;
42import weka.core.TechnicalInformation.Type;
43
44import java.util.Enumeration;
45import java.util.Vector;
46
47/**
48 <!-- globalinfo-start -->
49 * Locally weighted learning. Uses an instance-based algorithm to assign instance weights which are then used by a specified WeightedInstancesHandler.<br/>
50 * Can do classification (e.g. using naive Bayes) or regression (e.g. using linear regression).<br/>
51 * <br/>
52 * For more info, see<br/>
53 * <br/>
54 * Eibe Frank, Mark Hall, Bernhard Pfahringer: Locally Weighted Naive Bayes. In: 19th Conference in Uncertainty in Artificial Intelligence, 249-256, 2003.<br/>
55 * <br/>
56 * C. Atkeson, A. Moore, S. Schaal (1996). Locally weighted learning. AI Review..
57 * <p/>
58 <!-- globalinfo-end -->
59 *
60 <!-- technical-bibtex-start -->
61 * BibTeX:
62 * <pre>
63 * &#64;inproceedings{Frank2003,
64 *    author = {Eibe Frank and Mark Hall and Bernhard Pfahringer},
65 *    booktitle = {19th Conference in Uncertainty in Artificial Intelligence},
66 *    pages = {249-256},
67 *    publisher = {Morgan Kaufmann},
68 *    title = {Locally Weighted Naive Bayes},
69 *    year = {2003}
70 * }
71 *
72 * &#64;article{Atkeson1996,
73 *    author = {C. Atkeson and A. Moore and S. Schaal},
74 *    journal = {AI Review},
75 *    title = {Locally weighted learning},
76 *    year = {1996}
77 * }
78 * </pre>
79 * <p/>
80 <!-- technical-bibtex-end -->
81 *
82 <!-- options-start -->
83 * Valid options are: <p/>
84 *
85 * <pre> -A
86 *  The nearest neighbour search algorithm to use (default: weka.core.neighboursearch.LinearNNSearch).
87 * </pre>
88 *
89 * <pre> -K &lt;number of neighbours&gt;
90 *  Set the number of neighbours used to set the kernel bandwidth.
91 *  (default all)</pre>
92 *
93 * <pre> -U &lt;number of weighting method&gt;
94 *  Set the weighting kernel shape to use. 0=Linear, 1=Epanechnikov,
95 *  2=Tricube, 3=Inverse, 4=Gaussian.
96 *  (default 0 = Linear)</pre>
97 *
98 * <pre> -D
99 *  If set, classifier is run in debug mode and
100 *  may output additional info to the console</pre>
101 *
102 * <pre> -W
103 *  Full name of base classifier.
104 *  (default: weka.classifiers.trees.DecisionStump)</pre>
105 *
106 * <pre>
107 * Options specific to classifier weka.classifiers.trees.DecisionStump:
108 * </pre>
109 *
110 * <pre> -D
111 *  If set, classifier is run in debug mode and
112 *  may output additional info to the console</pre>
113 *
114 <!-- options-end -->
115 *
116 * @author Len Trigg (trigg@cs.waikato.ac.nz)
117 * @author Eibe Frank (eibe@cs.waikato.ac.nz)
118 * @author Ashraf M. Kibriya (amk14[at-the-rate]cs[dot]waikato[dot]ac[dot]nz)
119 * @version $Revision: 6055 $
120 */
121public class LWL 
122  extends SingleClassifierEnhancer
123  implements UpdateableClassifier, WeightedInstancesHandler, 
124             TechnicalInformationHandler {
125
126  /** for serialization. */
127  static final long serialVersionUID = 1979797405383665815L;
128
129  /** The training instances used for classification. */
130  protected Instances m_Train;
131   
132  /** The number of neighbours used to select the kernel bandwidth. */
133  protected int m_kNN = -1;
134
135  /** The weighting kernel method currently selected. */
136  protected int m_WeightKernel = LINEAR;
137
138  /** True if m_kNN should be set to all instances. */
139  protected boolean m_UseAllK = true;
140 
141  /** The nearest neighbour search algorithm to use.
142   * (Default: weka.core.neighboursearch.LinearNNSearch)
143   */
144  protected NearestNeighbourSearch m_NNSearch =  new LinearNNSearch();
145 
146  /** The available kernel weighting methods. */
147  public static final int LINEAR       = 0;
148  public static final int EPANECHNIKOV = 1;
149  public static final int TRICUBE      = 2; 
150  public static final int INVERSE      = 3;
151  public static final int GAUSS        = 4;
152  public static final int CONSTANT     = 5;
153
154  /** a ZeroR model in case no model can be built from the data. */
155  protected Classifier m_ZeroR;
156   
157  /**
158   * Returns a string describing classifier.
159   * @return a description suitable for
160   * displaying in the explorer/experimenter gui
161   */
162  public String globalInfo() {
163    return 
164        "Locally weighted learning. Uses an instance-based algorithm to "
165      + "assign instance weights which are then used by a specified "
166      + "WeightedInstancesHandler.\n"
167      + "Can do classification (e.g. using naive Bayes) or regression "
168      + "(e.g. using linear regression).\n\n"
169      + "For more info, see\n\n"
170      + getTechnicalInformation().toString();
171  }
172
173  /**
174   * Returns an instance of a TechnicalInformation object, containing
175   * detailed information about the technical background of this class,
176   * e.g., paper reference or book this class is based on.
177   *
178   * @return the technical information about this class
179   */
180  public TechnicalInformation getTechnicalInformation() {
181    TechnicalInformation        result;
182    TechnicalInformation        additional;
183   
184    result = new TechnicalInformation(Type.INPROCEEDINGS);
185    result.setValue(Field.AUTHOR, "Eibe Frank and Mark Hall and Bernhard Pfahringer");
186    result.setValue(Field.YEAR, "2003");
187    result.setValue(Field.TITLE, "Locally Weighted Naive Bayes");
188    result.setValue(Field.BOOKTITLE, "19th Conference in Uncertainty in Artificial Intelligence");
189    result.setValue(Field.PAGES, "249-256");
190    result.setValue(Field.PUBLISHER, "Morgan Kaufmann");
191   
192    additional = result.add(Type.ARTICLE);
193    additional.setValue(Field.AUTHOR, "C. Atkeson and A. Moore and S. Schaal");
194    additional.setValue(Field.YEAR, "1996");
195    additional.setValue(Field.TITLE, "Locally weighted learning");
196    additional.setValue(Field.JOURNAL, "AI Review");
197   
198    return result;
199  }
200   
201  /**
202   * Constructor.
203   */
204  public LWL() {   
205    m_Classifier = new weka.classifiers.trees.DecisionStump();
206  }
207
208  /**
209   * String describing default classifier.
210   *
211   * @return the default classifier classname
212   */
213  protected String defaultClassifierString() {
214   
215    return "weka.classifiers.trees.DecisionStump";
216  }
217
218  /**
219   * Returns an enumeration of the additional measure names
220   * produced by the neighbour search algorithm.
221   * @return an enumeration of the measure names
222   */
223  public Enumeration enumerateMeasures() {
224    return m_NNSearch.enumerateMeasures();
225  }
226 
227  /**
228   * Returns the value of the named measure from the
229   * neighbour search algorithm.
230   * @param additionalMeasureName the name of the measure to query for its value
231   * @return the value of the named measure
232   * @throws IllegalArgumentException if the named measure is not supported
233   */
234  public double getMeasure(String additionalMeasureName) {
235    return m_NNSearch.getMeasure(additionalMeasureName);
236  }
237
238  /**
239   * Returns an enumeration describing the available options.
240   *
241   * @return an enumeration of all the available options.
242   */
243  public Enumeration listOptions() {
244   
245    Vector newVector = new Vector(3);
246    newVector.addElement(new Option("\tThe nearest neighbour search " +
247                                    "algorithm to use " +
248                                    "(default: weka.core.neighboursearch.LinearNNSearch).\n",
249                                    "A", 0, "-A"));
250    newVector.addElement(new Option("\tSet the number of neighbours used to set"
251                                    +" the kernel bandwidth.\n"
252                                    +"\t(default all)",
253                                    "K", 1, "-K <number of neighbours>"));
254    newVector.addElement(new Option("\tSet the weighting kernel shape to use."
255                                    +" 0=Linear, 1=Epanechnikov,\n"
256                                    +"\t2=Tricube, 3=Inverse, 4=Gaussian.\n"
257                                    +"\t(default 0 = Linear)",
258                                    "U", 1,"-U <number of weighting method>"));
259   
260    Enumeration enu = super.listOptions();
261    while (enu.hasMoreElements()) {
262      newVector.addElement(enu.nextElement());
263    }
264
265    return newVector.elements();
266  }
267
268  /**
269   * Parses a given list of options. <p/>
270   *
271   <!-- options-start -->
272   * Valid options are: <p/>
273   *
274   * <pre> -A
275   *  The nearest neighbour search algorithm to use (default: weka.core.neighboursearch.LinearNNSearch).
276   * </pre>
277   *
278   * <pre> -K &lt;number of neighbours&gt;
279   *  Set the number of neighbours used to set the kernel bandwidth.
280   *  (default all)</pre>
281   *
282   * <pre> -U &lt;number of weighting method&gt;
283   *  Set the weighting kernel shape to use. 0=Linear, 1=Epanechnikov,
284   *  2=Tricube, 3=Inverse, 4=Gaussian.
285   *  (default 0 = Linear)</pre>
286   *
287   * <pre> -D
288   *  If set, classifier is run in debug mode and
289   *  may output additional info to the console</pre>
290   *
291   * <pre> -W
292   *  Full name of base classifier.
293   *  (default: weka.classifiers.trees.DecisionStump)</pre>
294   *
295   * <pre>
296   * Options specific to classifier weka.classifiers.trees.DecisionStump:
297   * </pre>
298   *
299   * <pre> -D
300   *  If set, classifier is run in debug mode and
301   *  may output additional info to the console</pre>
302   *
303   <!-- options-end -->
304   *
305   * @param options the list of options as an array of strings
306   * @throws Exception if an option is not supported
307   */
308  public void setOptions(String[] options) throws Exception {
309
310    String knnString = Utils.getOption('K', options);
311    if (knnString.length() != 0) {
312      setKNN(Integer.parseInt(knnString));
313    } else {
314      setKNN(-1);
315    }
316
317    String weightString = Utils.getOption('U', options);
318    if (weightString.length() != 0) {
319      setWeightingKernel(Integer.parseInt(weightString));
320    } else {
321      setWeightingKernel(LINEAR);
322    }
323   
324    String nnSearchClass = Utils.getOption('A', options);
325    if(nnSearchClass.length() != 0) {
326      String nnSearchClassSpec[] = Utils.splitOptions(nnSearchClass);
327      if(nnSearchClassSpec.length == 0) { 
328        throw new Exception("Invalid NearestNeighbourSearch algorithm " +
329                            "specification string."); 
330      }
331      String className = nnSearchClassSpec[0];
332      nnSearchClassSpec[0] = "";
333
334      setNearestNeighbourSearchAlgorithm( (NearestNeighbourSearch)
335                  Utils.forName( NearestNeighbourSearch.class, 
336                                 className, 
337                                 nnSearchClassSpec)
338                                        );
339    }
340    else 
341      this.setNearestNeighbourSearchAlgorithm(new LinearNNSearch());
342
343    super.setOptions(options);
344  }
345
346  /**
347   * Gets the current settings of the classifier.
348   *
349   * @return an array of strings suitable for passing to setOptions
350   */
351  public String [] getOptions() {
352
353    String [] superOptions = super.getOptions();
354    String [] options = new String [superOptions.length + 6];
355
356    int current = 0;
357
358    options[current++] = "-U"; options[current++] = "" + getWeightingKernel();
359    if ( (getKNN() == 0) && m_UseAllK) {
360      options[current++] = "-K"; options[current++] = "-1";
361    }
362    else {
363      options[current++] = "-K"; options[current++] = "" + getKNN();
364    }
365    options[current++] = "-A";
366    options[current++] = m_NNSearch.getClass().getName()+" "+Utils.joinOptions(m_NNSearch.getOptions()); 
367
368    System.arraycopy(superOptions, 0, options, current,
369                     superOptions.length);
370
371    return options;
372  }
373 
374  /**
375   * Returns the tip text for this property.
376   * @return tip text for this property suitable for
377   * displaying in the explorer/experimenter gui
378   */
379  public String KNNTipText() {
380    return "How many neighbours are used to determine the width of the "
381      + "weighting function (<= 0 means all neighbours).";
382  }
383
384  /**
385   * Sets the number of neighbours used for kernel bandwidth setting.
386   * The bandwidth is taken as the distance to the kth neighbour.
387   *
388   * @param knn the number of neighbours included inside the kernel
389   * bandwidth, or 0 to specify using all neighbors.
390   */
391  public void setKNN(int knn) {
392
393    m_kNN = knn;
394    if (knn <= 0) {
395      m_kNN = 0;
396      m_UseAllK = true;
397    } else {
398      m_UseAllK = false;
399    }
400  }
401
402  /**
403   * Gets the number of neighbours used for kernel bandwidth setting.
404   * The bandwidth is taken as the distance to the kth neighbour.
405   *
406   * @return the number of neighbours included inside the kernel
407   * bandwidth, or 0 for all neighbours
408   */
409  public int getKNN() {
410
411    return m_kNN;
412  }
413
414  /**
415   * Returns the tip text for this property.
416   * @return tip text for this property suitable for
417   * displaying in the explorer/experimenter gui
418   */
419  public String weightingKernelTipText() {
420    return "Determines weighting function. [0 = Linear, 1 = Epnechnikov,"+
421           "2 = Tricube, 3 = Inverse, 4 = Gaussian and 5 = Constant. "+
422           "(default 0 = Linear)].";
423  }
424
425  /**
426   * Sets the kernel weighting method to use. Must be one of LINEAR,
427   * EPANECHNIKOV,  TRICUBE, INVERSE, GAUSS or CONSTANT, other values
428   * are ignored.
429   *
430   * @param kernel the new kernel method to use. Must be one of LINEAR,
431   * EPANECHNIKOV,  TRICUBE, INVERSE, GAUSS or CONSTANT.
432   */
433  public void setWeightingKernel(int kernel) {
434
435    if ((kernel != LINEAR)
436        && (kernel != EPANECHNIKOV)
437        && (kernel != TRICUBE)
438        && (kernel != INVERSE)
439        && (kernel != GAUSS)
440        && (kernel != CONSTANT)) {
441      return;
442    }
443    m_WeightKernel = kernel;
444  }
445
446  /**
447   * Gets the kernel weighting method to use.
448   *
449   * @return the new kernel method to use. Will be one of LINEAR,
450   * EPANECHNIKOV,  TRICUBE, INVERSE, GAUSS or CONSTANT.
451   */
452  public int getWeightingKernel() {
453
454    return m_WeightKernel;
455  }
456
457  /**
458   * Returns the tip text for this property.
459   * @return tip text for this property suitable for
460   * displaying in the explorer/experimenter gui
461   */
462  public String nearestNeighbourSearchAlgorithmTipText() {
463    return "The nearest neighbour search algorithm to use (Default: LinearNN).";
464  }
465 
466  /**
467   * Returns the current nearestNeighbourSearch algorithm in use.
468   * @return the NearestNeighbourSearch algorithm currently in use.
469   */
470  public NearestNeighbourSearch getNearestNeighbourSearchAlgorithm() {
471    return m_NNSearch;
472  }
473 
474  /**
475   * Sets the nearestNeighbourSearch algorithm to be used for finding nearest
476   * neighbour(s).
477   * @param nearestNeighbourSearchAlgorithm - The NearestNeighbourSearch class.
478   */
479  public void setNearestNeighbourSearchAlgorithm(NearestNeighbourSearch nearestNeighbourSearchAlgorithm) {
480    m_NNSearch = nearestNeighbourSearchAlgorithm;
481  }
482
483  /**
484   * Returns default capabilities of the classifier.
485   *
486   * @return      the capabilities of this classifier
487   */
488  public Capabilities getCapabilities() {
489    Capabilities      result;
490   
491    if (m_Classifier != null) {
492      result = m_Classifier.getCapabilities();
493    } else {
494      result = super.getCapabilities();
495    }
496   
497    result.setMinimumNumberInstances(0);
498   
499    // set dependencies
500    for (Capability cap: Capability.values())
501      result.enableDependency(cap);
502   
503    return result;
504  }
505 
506  /**
507   * Generates the classifier.
508   *
509   * @param instances set of instances serving as training data
510   * @throws Exception if the classifier has not been generated successfully
511   */
512  public void buildClassifier(Instances instances) throws Exception {
513
514    if (!(m_Classifier instanceof WeightedInstancesHandler)) {
515      throw new IllegalArgumentException("Classifier must be a "
516                                         + "WeightedInstancesHandler!");
517    }
518
519    // can classifier handle the data?
520    getCapabilities().testWithFail(instances);
521
522    // remove instances with missing class
523    instances = new Instances(instances);
524    instances.deleteWithMissingClass();
525   
526    // only class? -> build ZeroR model
527    if (instances.numAttributes() == 1) {
528      System.err.println(
529          "Cannot build model (only class attribute present in data!), "
530          + "using ZeroR model instead!");
531      m_ZeroR = new weka.classifiers.rules.ZeroR();
532      m_ZeroR.buildClassifier(instances);
533      return;
534    }
535    else {
536      m_ZeroR = null;
537    }
538   
539    m_Train = new Instances(instances, 0, instances.numInstances());
540
541    m_NNSearch.setInstances(m_Train);
542  }
543
544  /**
545   * Adds the supplied instance to the training set.
546   *
547   * @param instance the instance to add
548   * @throws Exception if instance could not be incorporated
549   * successfully
550   */
551  public void updateClassifier(Instance instance) throws Exception {
552
553    if (m_Train == null) {
554      throw new Exception("No training instance structure set!");
555    }
556    else if (m_Train.equalHeaders(instance.dataset()) == false) {
557      throw new Exception("Incompatible instance types\n" + m_Train.equalHeadersMsg(instance.dataset()));
558    }
559    if (!instance.classIsMissing()) {
560      m_NNSearch.update(instance);
561      m_Train.add(instance);
562    }
563  }
564 
565  /**
566   * Calculates the class membership probabilities for the given test instance.
567   *
568   * @param instance the instance to be classified
569   * @return preedicted class probability distribution
570   * @throws Exception if distribution can't be computed successfully
571   */
572  public double[] distributionForInstance(Instance instance) throws Exception {
573   
574    // default model?
575    if (m_ZeroR != null) {
576      return m_ZeroR.distributionForInstance(instance);
577    }
578   
579    if (m_Train.numInstances() == 0) {
580      throw new Exception("No training instances!");
581    }
582   
583    m_NNSearch.addInstanceInfo(instance);
584   
585    int k = m_Train.numInstances();
586    if( (!m_UseAllK && (m_kNN < k)) /*&&
587       !(m_WeightKernel==INVERSE ||
588         m_WeightKernel==GAUSS)*/ ) {
589      k = m_kNN;
590    }
591   
592    Instances neighbours = m_NNSearch.kNearestNeighbours(instance, k);
593    double distances[] = m_NNSearch.getDistances();
594
595    if (m_Debug) {
596      System.out.println("Test Instance: "+instance);
597      System.out.println("For "+k+" kept " + neighbours.numInstances() + " out of " + 
598                         m_Train.numInstances() + " instances.");
599    }
600   
601    //IF LinearNN has skipped so much that <k neighbours are remaining.
602    if(k>distances.length)
603      k = distances.length;
604
605    if (m_Debug) {
606      System.out.println("Instance Distances");
607      for (int i = 0; i < distances.length; i++) {
608        System.out.println("" + distances[i]);
609      }
610    }
611
612    // Determine the bandwidth
613    double bandwidth = distances[k-1];
614
615    // Check for bandwidth zero
616    if (bandwidth <= 0) {
617      //if the kth distance is zero than give all instances the same weight
618      for(int i=0; i < distances.length; i++)
619        distances[i] = 1;
620    } else {
621      // Rescale the distances by the bandwidth
622      for (int i = 0; i < distances.length; i++)
623        distances[i] = distances[i] / bandwidth;
624    }
625   
626    // Pass the distances through a weighting kernel
627    for (int i = 0; i < distances.length; i++) {
628      switch (m_WeightKernel) {
629        case LINEAR:
630          distances[i] = 1.0001 - distances[i];
631          break;
632        case EPANECHNIKOV:
633          distances[i] = 3/4D*(1.0001 - distances[i]*distances[i]);
634          break;
635        case TRICUBE:
636          distances[i] = Math.pow( (1.0001 - Math.pow(distances[i], 3)), 3 );
637          break;
638        case CONSTANT:
639          //System.err.println("using constant kernel");
640          distances[i] = 1;
641          break;
642        case INVERSE:
643          distances[i] = 1.0 / (1.0 + distances[i]);
644          break;
645        case GAUSS:
646          distances[i] = Math.exp(-distances[i] * distances[i]);
647          break;
648      }
649    }
650
651    if (m_Debug) {
652      System.out.println("Instance Weights");
653      for (int i = 0; i < distances.length; i++) {
654        System.out.println("" + distances[i]);
655      }
656    }
657   
658    // Set the weights on the training data
659    double sumOfWeights = 0, newSumOfWeights = 0;
660    for (int i = 0; i < distances.length; i++) {
661      double weight = distances[i];
662      Instance inst = (Instance) neighbours.instance(i);
663      sumOfWeights += inst.weight();
664      newSumOfWeights += inst.weight() * weight;
665      inst.setWeight(inst.weight() * weight);
666      //weightedTrain.add(newInst);
667    }
668   
669    // Rescale weights
670    for (int i = 0; i < neighbours.numInstances(); i++) {
671      Instance inst = neighbours.instance(i);
672      inst.setWeight(inst.weight() * sumOfWeights / newSumOfWeights);
673    }
674
675    // Create a weighted classifier
676    m_Classifier.buildClassifier(neighbours);
677
678    if (m_Debug) {
679      System.out.println("Classifying test instance: " + instance);
680      System.out.println("Built base classifier:\n" 
681                         + m_Classifier.toString());
682    }
683
684    // Return the classifier's predictions
685    return m_Classifier.distributionForInstance(instance);
686  }
687 
688  /**
689   * Returns a description of this classifier.
690   *
691   * @return a description of this classifier as a string.
692   */
693  public String toString() {
694
695    // only ZeroR model?
696    if (m_ZeroR != null) {
697      StringBuffer buf = new StringBuffer();
698      buf.append(this.getClass().getName().replaceAll(".*\\.", "") + "\n");
699      buf.append(this.getClass().getName().replaceAll(".*\\.", "").replaceAll(".", "=") + "\n\n");
700      buf.append("Warning: No model could be built, hence ZeroR model is used:\n\n");
701      buf.append(m_ZeroR.toString());
702      return buf.toString();
703    }
704   
705    if (m_Train == null) {
706      return "Locally weighted learning: No model built yet.";
707    }
708    String result = "Locally weighted learning\n"
709      + "===========================\n";
710
711    result += "Using classifier: " + m_Classifier.getClass().getName() + "\n";
712
713    switch (m_WeightKernel) {
714    case LINEAR:
715      result += "Using linear weighting kernels\n";
716      break;
717    case EPANECHNIKOV:
718      result += "Using epanechnikov weighting kernels\n";
719      break;
720    case TRICUBE:
721      result += "Using tricube weighting kernels\n";
722      break;
723    case INVERSE:
724      result += "Using inverse-distance weighting kernels\n";
725      break;
726    case GAUSS:
727      result += "Using gaussian weighting kernels\n";
728      break;
729    case CONSTANT:
730      result += "Using constant weighting kernels\n";
731      break;
732    }
733    result += "Using " + (m_UseAllK ? "all" : "" + m_kNN) + " neighbours";
734    return result;
735  }
736 
737  /**
738   * Returns the revision string.
739   *
740   * @return            the revision
741   */
742  public String getRevision() {
743    return RevisionUtils.extract("$Revision: 6055 $");
744  }
745 
746  /**
747   * Main method for testing this class.
748   *
749   * @param argv the options
750   */
751  public static void main(String [] argv) {
752    runClassifier(new LWL(), argv);
753  }
754}
Note: See TracBrowser for help on using the repository browser.