source: src/main/java/weka/classifiers/bayes/WAODE.java @ 11

Last change on this file since 11 was 4, checked in by gnappo, 14 years ago

Import di weka.

File size: 16.1 KB
Line 
1/*
2 * This program is free software; you can redistribute it and/or modify
3 * it under the terms of the GNU General Public License as published by
4 * the Free Software Foundation; either version 2 of the License, or
5 * (at your option) any later version.
6 *
7 * This program is distributed in the hope that it will be useful,
8 * but WITHOUT ANY WARRANTY; without even the implied warranty of
9 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
10 * GNU General Public License for more details.
11 *
12 * You should have received a copy of the GNU General Public License
13 * along with this program; if not, write to the Free Software
14 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
15 */
16
17/*
18 *    WAODE.java
19 *    Copyright 2006 Liangxiao Jiang
20 */
21
22package weka.classifiers.bayes;
23
24import weka.classifiers.Classifier;
25import weka.classifiers.AbstractClassifier;
26import weka.core.Capabilities;
27import weka.core.Instance;
28import weka.core.Instances;
29import weka.core.Option;
30import weka.core.RevisionUtils;
31import weka.core.TechnicalInformation;
32import weka.core.TechnicalInformationHandler;
33import weka.core.Utils;
34import weka.core.Capabilities.Capability;
35import weka.core.TechnicalInformation.Field;
36import weka.core.TechnicalInformation.Type;
37
38import java.util.Enumeration;
39import java.util.Vector;
40
41/**
42 <!-- globalinfo-start -->
43 * WAODE contructs the model called Weightily Averaged One-Dependence Estimators.<br/>
44 * <br/>
45 * For more information, see<br/>
46 * <br/>
47 * L. Jiang, H. Zhang: Weightily Averaged One-Dependence Estimators. In: Proceedings of the 9th Biennial Pacific Rim International Conference on Artificial Intelligence, PRICAI 2006, 970-974, 2006.
48 * <p/>
49 <!-- globalinfo-end -->
50 *
51 <!-- technical-bibtex-start -->
52 * BibTeX:
53 * <pre>
54 * &#64;inproceedings{Jiang2006,
55 *    author = {L. Jiang and H. Zhang},
56 *    booktitle = {Proceedings of the 9th Biennial Pacific Rim International Conference on Artificial Intelligence, PRICAI 2006},
57 *    pages = {970-974},
58 *    series = {LNAI},
59 *    title = {Weightily Averaged One-Dependence Estimators},
60 *    volume = {4099},
61 *    year = {2006}
62 * }
63 * </pre>
64 * <p/>
65 <!-- technical-bibtex-end -->
66 *
67 <!-- options-start -->
68 * Valid options are: <p/>
69 *
70 * <pre> -D
71 *  If set, classifier is run in debug mode and
72 *  may output additional info to the console</pre>
73 *
74 * <pre> -I
75 *  Whether to print some more internals.
76 *  (default: no)</pre>
77 *
78 <!-- options-end -->
79 *
80 * @author  Liangxiao Jiang (ljiang@cug.edu.cn)
81 * @author  H. Zhang (hzhang@unb.ca)
82 * @version $Revision: 5928 $
83 */
84public class WAODE 
85  extends AbstractClassifier
86  implements TechnicalInformationHandler {
87 
88  /** for serialization */
89  private static final long serialVersionUID = 2170978824284697882L;
90
91  /** The number of each class value occurs in the dataset */
92  private double[] m_ClassCounts;
93 
94  /** The number of each attribute value occurs in the dataset */
95  private double[] m_AttCounts;
96 
97  /** The number of two attributes values occurs in the dataset */
98  private double[][] m_AttAttCounts;
99 
100  /** The number of class and two attributes values occurs in the dataset */
101  private double[][][] m_ClassAttAttCounts;
102 
103  /** The number of values for each attribute in the dataset */
104  private int[] m_NumAttValues;
105 
106  /** The number of values for all attributes in the dataset */
107  private int m_TotalAttValues;
108 
109  /** The number of classes in the dataset */
110  private int m_NumClasses;
111 
112  /** The number of attributes including class in the dataset */
113  private int m_NumAttributes;
114 
115  /** The number of instances in the dataset */
116  private int m_NumInstances;
117 
118  /** The index of the class attribute in the dataset */
119  private int m_ClassIndex;
120 
121  /** The starting index of each attribute in the dataset */
122  private int[] m_StartAttIndex;
123 
124  /** The array of mutual information between each attribute and class */
125  private double[] m_mutualInformation;
126 
127  /** the header information of the training data */
128  private Instances m_Header = null;
129 
130  /** whether to print more internals in the toString method
131   * @see #toString() */
132  private boolean m_Internals = false;
133
134  /** a ZeroR model in case no model can be built from the data */
135  private Classifier m_ZeroR;
136 
137  /**
138   * Returns a string describing this classifier
139   *
140   * @return            a description of the classifier suitable for
141   *                    displaying in the explorer/experimenter gui
142   */
143  public String globalInfo() {
144    return 
145        "WAODE contructs the model called Weightily Averaged One-Dependence "
146      + "Estimators.\n\n"
147      + "For more information, see\n\n"
148      + getTechnicalInformation().toString();
149  }
150
151  /**
152   * Gets an enumeration describing the available options.
153   *
154   * @return an enumeration of all the available options.
155   */
156  public Enumeration listOptions() {
157    Vector result = new Vector();
158    Enumeration enm = super.listOptions();
159    while (enm.hasMoreElements())
160      result.add(enm.nextElement());
161     
162    result.addElement(new Option(
163        "\tWhether to print some more internals.\n"
164        + "\t(default: no)",
165        "I", 0, "-I"));
166
167    return result.elements();
168  }
169
170
171  /**
172   * Parses a given list of options. <p/>
173   *
174   <!-- options-start -->
175   * Valid options are: <p/>
176   *
177   * <pre> -D
178   *  If set, classifier is run in debug mode and
179   *  may output additional info to the console</pre>
180   *
181   * <pre> -I
182   *  Whether to print some more internals.
183   *  (default: no)</pre>
184   *
185   <!-- options-end -->
186   *
187   * @param options the list of options as an array of strings
188   * @throws Exception if an option is not supported
189   */
190  public void setOptions(String[] options) throws Exception {
191    super.setOptions(options);
192
193    setInternals(Utils.getFlag('I', options));
194  }
195
196  /**
197   * Gets the current settings of the filter.
198   *
199   * @return an array of strings suitable for passing to setOptions
200   */
201  public String[] getOptions() {
202    Vector        result;
203    String[]      options;
204    int           i;
205
206    result = new Vector();
207
208    options = super.getOptions();
209    for (i = 0; i < options.length; i++)
210      result.add(options[i]);
211
212    if (getInternals())
213      result.add("-I");
214
215    return (String[]) result.toArray(new String[result.size()]);
216  }
217 
218  /**
219   * Returns the tip text for this property
220   *
221   * @return tip text for this property suitable for
222   * displaying in the explorer/experimenter gui
223   */
224  public String internalsTipText() {
225    return "Prints more internals of the classifier.";
226  }
227
228  /**
229   * Sets whether internals about classifier are printed via toString().
230   *
231   * @param value if internals should be printed
232   * @see #toString()
233   */
234  public void setInternals(boolean value) {
235    m_Internals = value;
236  }
237
238  /**
239   * Gets whether more internals of the classifier are printed.
240   *
241   * @return true if more internals are printed
242   */
243  public boolean getInternals() {
244    return m_Internals;
245  }
246
247  /**
248   * Returns an instance of a TechnicalInformation object, containing
249   * detailed information about the technical background of this class,
250   * e.g., paper reference or book this class is based on.
251   *
252   * @return the technical information about this class
253   */
254  public TechnicalInformation getTechnicalInformation() {
255    TechnicalInformation        result;
256   
257    result = new TechnicalInformation(Type.INPROCEEDINGS);
258    result.setValue(Field.AUTHOR, "L. Jiang and H. Zhang");
259    result.setValue(Field.TITLE, "Weightily Averaged One-Dependence Estimators");
260    result.setValue(Field.BOOKTITLE, "Proceedings of the 9th Biennial Pacific Rim International Conference on Artificial Intelligence, PRICAI 2006");
261    result.setValue(Field.YEAR, "2006");
262    result.setValue(Field.PAGES, "970-974");
263    result.setValue(Field.SERIES, "LNAI");
264    result.setValue(Field.VOLUME, "4099");
265
266    return result;
267  }
268
269  /**
270   * Returns default capabilities of the classifier.
271   *
272   * @return      the capabilities of this classifier
273   */
274  public Capabilities getCapabilities() {
275    Capabilities result = super.getCapabilities();
276    result.disableAll();
277
278    // attributes
279    result.enable(Capability.NOMINAL_ATTRIBUTES);
280
281    // class
282    result.enable(Capability.NOMINAL_CLASS);
283   
284    return result;
285  }
286 
287  /**
288   * Generates the classifier.
289   *
290   * @param instances set of instances serving as training data
291   * @throws Exception if the classifier has not been generated successfully
292   */
293  public void buildClassifier(Instances instances) throws Exception {
294   
295    // can classifier handle the data?
296    getCapabilities().testWithFail(instances);
297
298    // only class? -> build ZeroR model
299    if (instances.numAttributes() == 1) {
300      System.err.println(
301          "Cannot build model (only class attribute present in data!), "
302          + "using ZeroR model instead!");
303      m_ZeroR = new weka.classifiers.rules.ZeroR();
304      m_ZeroR.buildClassifier(instances);
305      return;
306    }
307    else {
308      m_ZeroR = null;
309    }
310
311    // reset variable
312    m_NumClasses = instances.numClasses();
313    m_ClassIndex = instances.classIndex();
314    m_NumAttributes = instances.numAttributes();
315    m_NumInstances = instances.numInstances();
316    m_TotalAttValues = 0;
317   
318    // allocate space for attribute reference arrays
319    m_StartAttIndex = new int[m_NumAttributes];
320    m_NumAttValues = new int[m_NumAttributes];
321   
322    // set the starting index of each attribute and the number of values for
323    // each attribute and the total number of values for all attributes (not including class).
324    for (int i = 0; i < m_NumAttributes; i++) {
325      if (i != m_ClassIndex) {
326        m_StartAttIndex[i] = m_TotalAttValues;
327        m_NumAttValues[i] = instances.attribute(i).numValues();
328        m_TotalAttValues += m_NumAttValues[i];
329      }
330      else {
331        m_StartAttIndex[i] = -1;
332        m_NumAttValues[i] = m_NumClasses;
333      }
334    }
335   
336    // allocate space for counts and frequencies
337    m_ClassCounts = new double[m_NumClasses];
338    m_AttCounts = new double[m_TotalAttValues];
339    m_AttAttCounts = new double[m_TotalAttValues][m_TotalAttValues];
340    m_ClassAttAttCounts = new double[m_NumClasses][m_TotalAttValues][m_TotalAttValues];
341    m_Header = new Instances(instances, 0);
342   
343    // Calculate the counts
344    for (int k = 0; k < m_NumInstances; k++) {
345      int classVal=(int)instances.instance(k).classValue();
346      m_ClassCounts[classVal] ++;
347      int[] attIndex = new int[m_NumAttributes];
348      for (int i = 0; i < m_NumAttributes; i++) {
349        if (i == m_ClassIndex){
350          attIndex[i] = -1;
351        }
352        else{
353          attIndex[i] = m_StartAttIndex[i] + (int)instances.instance(k).value(i);
354          m_AttCounts[attIndex[i]]++;
355        }
356      }
357      for (int Att1 = 0; Att1 < m_NumAttributes; Att1++) {
358        if (attIndex[Att1] == -1) continue;
359        for (int Att2 = 0; Att2 < m_NumAttributes; Att2++) {
360          if ((attIndex[Att2] != -1)) {
361            m_AttAttCounts[attIndex[Att1]][attIndex[Att2]] ++;
362            m_ClassAttAttCounts[classVal][attIndex[Att1]][attIndex[Att2]] ++;
363          }
364        }
365      }
366    }
367   
368    //compute mutual information between each attribute and class
369    m_mutualInformation=new double[m_NumAttributes];
370    for (int att=0;att<m_NumAttributes;att++){
371      if (att == m_ClassIndex) continue;
372      m_mutualInformation[att]=mutualInfo(att);
373    }
374  }
375 
376  /**
377   * Computes mutual information between each attribute and class attribute.
378   *
379   * @param att is the attribute
380   * @return the conditional mutual information between son and parent given class
381   */
382  private double mutualInfo(int att) {
383   
384    double mutualInfo=0;
385    int attIndex=m_StartAttIndex[att];
386    double[] PriorsClass = new double[m_NumClasses];
387    double[] PriorsAttribute = new double[m_NumAttValues[att]];
388    double[][] PriorsClassAttribute=new double[m_NumClasses][m_NumAttValues[att]];
389   
390    for (int i=0;i<m_NumClasses;i++){
391      PriorsClass[i]=m_ClassCounts[i]/m_NumInstances;
392    }
393   
394    for (int j=0;j<m_NumAttValues[att];j++){
395      PriorsAttribute[j]=m_AttCounts[attIndex+j]/m_NumInstances;
396    }
397   
398    for (int i=0;i<m_NumClasses;i++){
399      for (int j=0;j<m_NumAttValues[att];j++){
400        PriorsClassAttribute[i][j]=m_ClassAttAttCounts[i][attIndex+j][attIndex+j]/m_NumInstances;
401      }
402    }
403   
404    for (int i=0;i<m_NumClasses;i++){
405      for (int j=0;j<m_NumAttValues[att];j++){
406        mutualInfo+=PriorsClassAttribute[i][j]*log2(PriorsClassAttribute[i][j],PriorsClass[i]*PriorsAttribute[j]);
407      }
408    }
409    return mutualInfo;
410  }
411 
412  /**
413   * compute the logarithm whose base is 2.
414   *
415   * @param x numerator of the fraction.
416   * @param y denominator of the fraction.
417   * @return the natual logarithm of this fraction.
418   */
419  private double log2(double x,double y){
420   
421    if (x < Utils.SMALL || y < Utils.SMALL)
422      return 0.0;
423    else
424      return Math.log(x/y)/Math.log(2);
425  }
426 
427  /**
428   * Calculates the class membership probabilities for the given test instance
429   *
430   * @param instance the instance to be classified
431   * @return predicted class probability distribution
432   * @throws Exception if there is a problem generating the prediction
433   */
434  public double[] distributionForInstance(Instance instance) throws Exception {
435   
436    // default model?
437    if (m_ZeroR != null) {
438      return m_ZeroR.distributionForInstance(instance);
439    }
440   
441    //Definition of local variables
442    double[] probs = new double[m_NumClasses];
443    double prob;
444    double mutualInfoSum;
445   
446    // store instance's att values in an int array
447    int[] attIndex = new int[m_NumAttributes];
448    for (int att = 0; att < m_NumAttributes; att++) {
449      if (att == m_ClassIndex)
450        attIndex[att] = -1;
451      else
452        attIndex[att] = m_StartAttIndex[att] + (int)instance.value(att);
453    }
454   
455    // calculate probabilities for each possible class value
456    for (int classVal = 0; classVal < m_NumClasses; classVal++) {
457      probs[classVal] = 0;
458      prob=1;
459      mutualInfoSum=0.0;
460      for (int parent = 0; parent < m_NumAttributes; parent++) {
461        if (attIndex[parent]==-1) continue;
462        prob=(m_ClassAttAttCounts[classVal][attIndex[parent]][attIndex[parent]] + 1.0/(m_NumClasses*m_NumAttValues[parent]))/(m_NumInstances + 1.0);
463        for (int son = 0; son < m_NumAttributes; son++) {
464          if (attIndex[son]==-1 || son == parent) continue;
465          prob*=(m_ClassAttAttCounts[classVal][attIndex[parent]][attIndex[son]] + 1.0/m_NumAttValues[son])/(m_ClassAttAttCounts[classVal][attIndex[parent]][attIndex[parent]] + 1.0);
466        }
467        mutualInfoSum+=m_mutualInformation[parent];
468        probs[classVal]+=m_mutualInformation[parent]*prob;
469      }
470      probs[classVal]/=mutualInfoSum;
471    }
472    if (!Double.isNaN(Utils.sum(probs)))
473      Utils.normalize(probs);
474    return probs;
475  }
476 
477  /**
478   * returns a string representation of the classifier
479   *
480   * @return string representation of the classifier
481   */
482  public String toString() {
483    StringBuffer        result;
484    String              classname;
485    int                 i;
486   
487    // only ZeroR model?
488    if (m_ZeroR != null) {
489      result = new StringBuffer();
490      result.append(this.getClass().getName().replaceAll(".*\\.", "") + "\n");
491      result.append(this.getClass().getName().replaceAll(".*\\.", "").replaceAll(".", "=") + "\n\n");
492      result.append("Warning: No model could be built, hence ZeroR model is used:\n\n");
493      result.append(m_ZeroR.toString());
494    }
495    else {
496      classname = this.getClass().getName().replaceAll(".*\\.", "");
497      result    = new StringBuffer();
498      result.append(classname + "\n");
499      result.append(classname.replaceAll(".", "=") + "\n\n");
500
501      if (m_Header == null) {
502        result.append("No Model built yet.\n");
503      }
504      else {
505        if (getInternals()) {
506          result.append("Mutual information of attributes with class attribute:\n");
507          for (i = 0; i < m_Header.numAttributes(); i++) {
508            // skip class
509            if (i == m_Header.classIndex())
510              continue;
511
512            result.append(
513                (i+1) + ". " + m_Header.attribute(i).name() + ": " 
514                + Utils.doubleToString(m_mutualInformation[i], 6) + "\n");
515          }
516        }
517        else {
518          result.append("Model built successfully.\n");
519        }
520      }
521    }
522   
523    return result.toString();
524  }
525 
526  /**
527   * Returns the revision string.
528   *
529   * @return            the revision
530   */
531  public String getRevision() {
532    return RevisionUtils.extract("$Revision: 5928 $");
533  }
534 
535  /**
536   * Main method for testing this class.
537   *
538   * @param argv the commandline options, use -h to list all options
539   */
540  public static void main(String[] argv) {
541    runClassifier(new WAODE(), argv);
542  }
543}
Note: See TracBrowser for help on using the repository browser.