source: src/main/java/weka/classifiers/mi/MISVM.java @ 4

Last change on this file since 4 was 4, checked in by gnappo, 14 years ago

Import di weka.

File size: 23.6 KB
Line 
1/*
2 *    This program is free software; you can redistribute it and/or modify
3 *    it under the terms of the GNU General Public License as published by
4 *    the Free Software Foundation; either version 2 of the License, or
5 *    (at your option) any later version.
6 *
7 *    This program is distributed in the hope that it will be useful,
8 *    but WITHOUT ANY WARRANTY; without even the implied warranty of
9 *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
10 *    GNU General Public License for more details.
11 *
12 *    You should have received a copy of the GNU General Public License
13 *    along with this program; if not, write to the Free Software
14 *    Foundation, InumBag., 675 Mass Ave, Cambridge, MA 02139, USA.
15 */
16
17/*
18 * MISVM.java
19 * Copyright (C) 2005 University of Waikato, Hamilton, New Zealand
20 *
21 */
22
23package weka.classifiers.mi;
24
25import weka.classifiers.Classifier;
26import weka.classifiers.AbstractClassifier;
27import weka.classifiers.functions.SMO;
28import weka.classifiers.functions.supportVector.Kernel;
29import weka.classifiers.functions.supportVector.PolyKernel;
30import weka.core.Capabilities;
31import weka.core.Instance;
32import weka.core.Instances;
33import weka.core.MultiInstanceCapabilitiesHandler;
34import weka.core.Option;
35import weka.core.OptionHandler;
36import weka.core.RevisionUtils;
37import weka.core.SelectedTag;
38import weka.core.Tag;
39import weka.core.TechnicalInformation;
40import weka.core.TechnicalInformationHandler;
41import weka.core.Utils;
42import weka.core.Capabilities.Capability;
43import weka.core.TechnicalInformation.Field;
44import weka.core.TechnicalInformation.Type;
45import weka.filters.Filter;
46import weka.filters.unsupervised.attribute.MultiInstanceToPropositional;
47import weka.filters.unsupervised.attribute.Normalize;
48import weka.filters.unsupervised.attribute.Standardize;
49import weka.filters.unsupervised.instance.SparseToNonSparse;
50
51import java.util.Enumeration;
52import java.util.Vector;
53
54
55/**
56 <!-- globalinfo-start -->
57 * Implements Stuart Andrews' mi_SVM (Maximum pattern Margin Formulation of MIL). Applying weka.classifiers.functions.SMO to solve multiple instances problem.<br/>
58 * The algorithm first assign the bag label to each instance in the bag as its initial class label.  After that applying SMO to compute SVM solution for all instances in positive bags And then reassign the class label of each instance in the positive bag according to the SVM result Keep on iteration until labels do not change anymore.<br/>
59 * <br/>
60 * For more information see:<br/>
61 * <br/>
62 * Stuart Andrews, Ioannis Tsochantaridis, Thomas Hofmann: Support Vector Machines for Multiple-Instance Learning. In: Advances in Neural Information Processing Systems 15, 561-568, 2003.
63 * <p/>
64 <!-- globalinfo-end -->
65 *
66 <!-- technical-bibtex-start -->
67 * BibTeX:
68 * <pre>
69 * &#64;inproceedings{Andrews2003,
70 *    author = {Stuart Andrews and Ioannis Tsochantaridis and Thomas Hofmann},
71 *    booktitle = {Advances in Neural Information Processing Systems 15},
72 *    pages = {561-568},
73 *    publisher = {MIT Press},
74 *    title = {Support Vector Machines for Multiple-Instance Learning},
75 *    year = {2003}
76 * }
77 * </pre>
78 * <p/>
79 <!-- technical-bibtex-end -->
80 *
81 <!-- options-start -->
82 * Valid options are: <p/>
83 *
84 * <pre> -D
85 *  If set, classifier is run in debug mode and
86 *  may output additional info to the console</pre>
87 *
88 * <pre> -C &lt;double&gt;
89 *  The complexity constant C. (default 1)</pre>
90 *
91 * <pre> -N &lt;default 0&gt;
92 *  Whether to 0=normalize/1=standardize/2=neither.
93 *  (default: 0=normalize)</pre>
94 *
95 * <pre> -I &lt;num&gt;
96 *  The maximum number of iterations to perform.
97 *  (default: 500)</pre>
98 *
99 * <pre> -K &lt;classname and parameters&gt;
100 *  The Kernel to use.
101 *  (default: weka.classifiers.functions.supportVector.PolyKernel)</pre>
102 *
103 * <pre>
104 * Options specific to kernel weka.classifiers.functions.supportVector.PolyKernel:
105 * </pre>
106 *
107 * <pre> -D
108 *  Enables debugging output (if available) to be printed.
109 *  (default: off)</pre>
110 *
111 * <pre> -no-checks
112 *  Turns off all checks - use with caution!
113 *  (default: checks on)</pre>
114 *
115 * <pre> -C &lt;num&gt;
116 *  The size of the cache (a prime number), 0 for full cache and
117 *  -1 to turn it off.
118 *  (default: 250007)</pre>
119 *
120 * <pre> -E &lt;num&gt;
121 *  The Exponent to use.
122 *  (default: 1.0)</pre>
123 *
124 * <pre> -L
125 *  Use lower-order terms.
126 *  (default: no)</pre>
127 *
128 <!-- options-end -->
129 *
130 * @author Lin Dong (ld21@cs.waikato.ac.nz)
131 * @version $Revision: 5928 $
132 * @see weka.classifiers.functions.SMO
133 */
134public class MISVM 
135  extends AbstractClassifier
136  implements OptionHandler, MultiInstanceCapabilitiesHandler,
137             TechnicalInformationHandler {
138
139  /** for serialization */
140  static final long serialVersionUID = 7622231064035278145L;
141 
142  /** The filter used to transform the sparse datasets to nonsparse */
143  protected Filter m_SparseFilter = new SparseToNonSparse();
144
145  /** The SMO classifier used to compute SVM soluton w,b for the dataset */
146  protected SVM m_SVM;
147
148  /** the kernel to use */
149  protected Kernel m_kernel = new PolyKernel();
150
151  /** The complexity parameter. */
152  protected double m_C = 1.0;
153
154  /** The filter used to standardize/normalize all values. */
155  protected Filter m_Filter =null;
156
157  /** Whether to normalize/standardize/neither */
158  protected int m_filterType = FILTER_NORMALIZE;
159
160  /** Normalize training data */
161  public static final int FILTER_NORMALIZE = 0;
162  /** Standardize training data */
163  public static final int FILTER_STANDARDIZE = 1;
164  /** No normalization/standardization */
165  public static final int FILTER_NONE = 2;
166  /** The filter to apply to the training data */
167  public static final Tag [] TAGS_FILTER = {
168    new Tag(FILTER_NORMALIZE, "Normalize training data"),
169    new Tag(FILTER_STANDARDIZE, "Standardize training data"),
170    new Tag(FILTER_NONE, "No normalization/standardization"),
171  };
172
173  /** the maximum number of iterations to perform */
174  protected int m_MaxIterations = 500;
175 
176  /** filter used to convert the MI dataset into single-instance dataset */
177  protected MultiInstanceToPropositional m_ConvertToProp = new MultiInstanceToPropositional();
178
179  /**
180   * Returns a string describing this filter
181   *
182   * @return a description of the filter suitable for
183   * displaying in the explorer/experimenter gui
184   */
185  public String globalInfo() {
186    return 
187         "Implements Stuart Andrews' mi_SVM (Maximum pattern Margin "
188       + "Formulation of MIL). Applying weka.classifiers.functions.SMO "
189       + "to solve multiple instances problem.\n"
190       + "The algorithm first assign the bag label to each instance in the "
191       + "bag as its initial class label.  After that applying SMO to compute "
192       + "SVM solution for all instances in positive bags And then reassign "
193       + "the class label of each instance in the positive bag according to "
194       + "the SVM result Keep on iteration until labels do not change "
195       + "anymore.\n\n"
196       + "For more information see:\n\n"
197       + getTechnicalInformation().toString();
198  }
199
200  /**
201   * Returns an instance of a TechnicalInformation object, containing
202   * detailed information about the technical background of this class,
203   * e.g., paper reference or book this class is based on.
204   *
205   * @return the technical information about this class
206   */
207  public TechnicalInformation getTechnicalInformation() {
208    TechnicalInformation        result;
209   
210    result = new TechnicalInformation(Type.INPROCEEDINGS);
211    result.setValue(Field.AUTHOR, "Stuart Andrews and Ioannis Tsochantaridis and Thomas Hofmann");
212    result.setValue(Field.YEAR, "2003");
213    result.setValue(Field.TITLE, "Support Vector Machines for Multiple-Instance Learning");
214    result.setValue(Field.BOOKTITLE, "Advances in Neural Information Processing Systems 15");
215    result.setValue(Field.PUBLISHER, "MIT Press");
216    result.setValue(Field.PAGES, "561-568");
217   
218    return result;
219  }
220
221  /**
222   * Returns an enumeration describing the available options
223   *
224   * @return an enumeration of all the available options
225   */
226  public Enumeration listOptions() {
227    Vector result = new Vector();
228   
229    Enumeration enm = super.listOptions();
230    while (enm.hasMoreElements())
231      result.addElement(enm.nextElement());
232
233    result.addElement(new Option(
234          "\tThe complexity constant C. (default 1)",
235          "C", 1, "-C <double>"));
236   
237    result.addElement(new Option(
238        "\tWhether to 0=normalize/1=standardize/2=neither.\n"
239        + "\t(default: 0=normalize)",
240        "N", 1, "-N <default 0>"));
241 
242    result.addElement(new Option(
243        "\tThe maximum number of iterations to perform.\n"
244        + "\t(default: 500)",
245        "I", 1, "-I <num>"));
246 
247    result.addElement(new Option(
248        "\tThe Kernel to use.\n"
249        + "\t(default: weka.classifiers.functions.supportVector.PolyKernel)",
250        "K", 1, "-K <classname and parameters>"));
251
252    result.addElement(new Option(
253        "",
254        "", 0, "\nOptions specific to kernel "
255        + getKernel().getClass().getName() + ":"));
256   
257    enm = ((OptionHandler) getKernel()).listOptions();
258    while (enm.hasMoreElements())
259      result.addElement(enm.nextElement());
260
261    return result.elements();
262  }
263
264
265
266  /**
267   * Parses a given list of options. <p/>
268   *
269   <!-- options-start -->
270   * Valid options are: <p/>
271   *
272   * <pre> -D
273   *  If set, classifier is run in debug mode and
274   *  may output additional info to the console</pre>
275   *
276   * <pre> -C &lt;double&gt;
277   *  The complexity constant C. (default 1)</pre>
278   *
279   * <pre> -N &lt;default 0&gt;
280   *  Whether to 0=normalize/1=standardize/2=neither.
281   *  (default: 0=normalize)</pre>
282   *
283   * <pre> -I &lt;num&gt;
284   *  The maximum number of iterations to perform.
285   *  (default: 500)</pre>
286   *
287   * <pre> -K &lt;classname and parameters&gt;
288   *  The Kernel to use.
289   *  (default: weka.classifiers.functions.supportVector.PolyKernel)</pre>
290   *
291   * <pre>
292   * Options specific to kernel weka.classifiers.functions.supportVector.PolyKernel:
293   * </pre>
294   *
295   * <pre> -D
296   *  Enables debugging output (if available) to be printed.
297   *  (default: off)</pre>
298   *
299   * <pre> -no-checks
300   *  Turns off all checks - use with caution!
301   *  (default: checks on)</pre>
302   *
303   * <pre> -C &lt;num&gt;
304   *  The size of the cache (a prime number), 0 for full cache and
305   *  -1 to turn it off.
306   *  (default: 250007)</pre>
307   *
308   * <pre> -E &lt;num&gt;
309   *  The Exponent to use.
310   *  (default: 1.0)</pre>
311   *
312   * <pre> -L
313   *  Use lower-order terms.
314   *  (default: no)</pre>
315   *
316   <!-- options-end -->
317   *
318   * @param options the list of options as an array of strings
319   * @throws Exception if an option is not supported
320   */
321  public void setOptions(String[] options) throws Exception {
322    String      tmpStr;
323    String[]    tmpOptions;
324   
325    tmpStr = Utils.getOption('C', options);
326    if (tmpStr.length() != 0)
327      setC(Double.parseDouble(tmpStr));
328    else
329      setC(1.0);
330   
331    tmpStr = Utils.getOption('N', options);
332    if (tmpStr.length() != 0)
333      setFilterType(new SelectedTag(Integer.parseInt(tmpStr), TAGS_FILTER));
334    else
335      setFilterType(new SelectedTag(FILTER_NORMALIZE, TAGS_FILTER));
336
337    tmpStr = Utils.getOption('I', options);
338    if (tmpStr.length() != 0)
339      setMaxIterations(Integer.parseInt(tmpStr));
340    else
341      setMaxIterations(500);
342   
343    tmpStr     = Utils.getOption('K', options);
344    tmpOptions = Utils.splitOptions(tmpStr);
345    if (tmpOptions.length != 0) {
346      tmpStr        = tmpOptions[0];
347      tmpOptions[0] = "";
348      setKernel(Kernel.forName(tmpStr, tmpOptions));
349    }
350   
351    super.setOptions(options);
352  }
353
354
355  /**
356   * Gets the current settings of the classifier.
357   *
358   * @return an array of strings suitable for passing to setOptions
359   */
360  public String[] getOptions() {
361    Vector        result;
362   
363    result = new Vector();
364
365    if (getDebug())
366      result.add("-D");
367   
368    result.add("-C");
369    result.add("" + getC());
370   
371    result.add("-N");
372    result.add("" + m_filterType);
373
374    result.add("-K");
375    result.add("" + getKernel().getClass().getName() + " " + Utils.joinOptions(getKernel().getOptions()));
376
377    return (String[]) result.toArray(new String[result.size()]);
378  }
379 
380  /**
381   * Returns the tip text for this property
382   *
383   * @return            tip text for this property suitable for
384   *                    displaying in the explorer/experimenter gui
385   */
386  public String kernelTipText() {
387    return "The kernel to use.";
388  }
389
390  /**
391   * Gets the kernel to use.
392   *
393   * @return            the kernel
394   */
395  public Kernel getKernel() {
396    return m_kernel;
397  }
398   
399  /**
400   * Sets the kernel to use.
401   *
402   * @param value       the kernel
403   */
404  public void setKernel(Kernel value) {
405    m_kernel = value;
406  }
407
408  /**
409   * Returns the tip text for this property
410   *
411   * @return tip text for this property suitable for
412   * displaying in the explorer/experimenter gui
413   */
414  public String filterTypeTipText() {
415    return "The filter type for transforming the training data.";
416  }
417
418  /**
419   * Sets how the training data will be transformed. Should be one of
420   * FILTER_NORMALIZE, FILTER_STANDARDIZE, FILTER_NONE.
421   *
422   * @param newType the new filtering mode
423   */
424  public void setFilterType(SelectedTag newType) {
425
426    if (newType.getTags() == TAGS_FILTER) {
427      m_filterType = newType.getSelectedTag().getID();
428    }
429  }
430
431  /**
432   * Gets how the training data will be transformed. Will be one of
433   * FILTER_NORMALIZE, FILTER_STANDARDIZE, FILTER_NONE.
434   *
435   * @return the filtering mode
436   */
437  public SelectedTag getFilterType() {
438
439    return new SelectedTag(m_filterType, TAGS_FILTER);
440  }
441
442  /**
443   * Returns the tip text for this property
444   *
445   * @return tip text for this property suitable for
446   * displaying in the explorer/experimenter gui
447   */
448  public String cTipText() {
449    return "The value for C.";
450  }
451
452  /**
453   * Get the value of C.
454   *
455   * @return Value of C.
456   */
457  public double getC() {
458
459    return m_C;
460  }
461
462  /**
463   * Set the value of C.
464   *
465   * @param v  Value to assign to C.
466   */
467  public void setC(double v) {
468    m_C = v;
469  }
470
471  /**
472   * Returns the tip text for this property
473   *
474   * @return            tip text for this property suitable for
475   *                    displaying in the explorer/experimenter gui
476   */
477  public String maxIterationsTipText() {
478    return "The maximum number of iterations to perform.";
479  }
480
481  /**
482   * Gets the maximum number of iterations.
483   *
484   * @return            the maximum number of iterations.
485   */
486  public int getMaxIterations() {
487    return m_MaxIterations;
488  }
489
490  /**
491   * Sets the maximum number of iterations.
492   *
493   * @param value       the maximum number of iterations.
494   */
495  public void setMaxIterations(int value) {
496    if (value < 1)
497      System.out.println(
498          "At least 1 iteration is necessary (provided: " + value + ")!");
499    else
500      m_MaxIterations = value;
501  }
502
503  /**
504   * adapted version of SMO
505   */
506  private class SVM
507    extends SMO {
508   
509    /** for serialization */
510    static final long serialVersionUID = -8325638229658828931L;
511   
512    /**
513     * Constructor
514     */
515    protected SVM (){
516      super();
517    }
518
519    /**
520     * Computes SVM output for given instance.
521     *
522     * @param index the instance for which output is to be computed
523     * @param inst the instance
524     * @return the output of the SVM for the given instance
525     * @throws Exception in case of an error
526     */
527    protected double output(int index, Instance inst) throws Exception {
528      double output = 0;
529      output = m_classifiers[0][1].SVMOutput(index, inst);
530      return output;
531    }
532   
533    /**
534     * Returns the revision string.
535     *
536     * @return          the revision
537     */
538    public String getRevision() {
539      return RevisionUtils.extract("$Revision: 5928 $");
540    }
541  }
542
543  /**
544   * Returns default capabilities of the classifier.
545   *
546   * @return      the capabilities of this classifier
547   */
548  public Capabilities getCapabilities() {
549    Capabilities result = super.getCapabilities();
550    result.disableAll();
551
552    // attributes
553    result.enable(Capability.NOMINAL_ATTRIBUTES);
554    result.enable(Capability.RELATIONAL_ATTRIBUTES);
555    result.enable(Capability.MISSING_VALUES);
556
557    // class
558    result.disableAllClasses();
559    result.disableAllClassDependencies();
560    result.enable(Capability.BINARY_CLASS);
561    result.enable(Capability.MISSING_CLASS_VALUES);
562   
563    // other
564    result.enable(Capability.ONLY_MULTIINSTANCE);
565   
566    return result;
567  }
568
569  /**
570   * Returns the capabilities of this multi-instance classifier for the
571   * relational data.
572   *
573   * @return            the capabilities of this object
574   * @see               Capabilities
575   */
576  public Capabilities getMultiInstanceCapabilities() {
577    SVM                 classifier;
578    Capabilities        result;
579
580    classifier = null;
581    result     = null;
582   
583    try {
584      classifier = new SVM();
585      classifier.setKernel(Kernel.makeCopy(getKernel()));
586      result = classifier.getCapabilities();
587      result.setOwner(this);
588    }
589    catch (Exception e) {
590      e.printStackTrace();
591    }
592   
593    // class
594    result.disableAllClasses();
595    result.enable(Capability.NO_CLASS);
596   
597    return result;
598  }
599
600  /**
601   * Builds the classifier
602   *
603   * @param train the training data to be used for generating the
604   * boosted classifier.
605   * @throws Exception if the classifier could not be built successfully
606   */
607  public void buildClassifier(Instances train) throws Exception {
608    // can classifier handle the data?
609    getCapabilities().testWithFail(train);
610
611    // remove instances with missing class
612    train = new Instances(train);
613    train.deleteWithMissingClass();
614   
615    int numBags = train.numInstances(); //number of bags
616    int []bagSize= new int [numBags];   
617    int classes [] = new int [numBags];
618
619    Vector instLabels = new Vector();  //store the class label assigned to each single instance
620    Vector pre_instLabels=new Vector();
621
622    for(int h=0; h<numBags; h++)  {//h_th bag
623      classes[h] = (int) train.instance(h).classValue(); 
624      bagSize[h]=train.instance(h).relationalValue(1).numInstances();
625      for (int i=0; i<bagSize[h];i++)
626        instLabels.addElement(new Double(classes[h]));           
627    }
628
629    // convert the training dataset into single-instance dataset
630    m_ConvertToProp.setWeightMethod(
631        new SelectedTag(
632          MultiInstanceToPropositional.WEIGHTMETHOD_1, 
633          MultiInstanceToPropositional.TAGS_WEIGHTMETHOD)); 
634    m_ConvertToProp.setInputFormat(train);
635    train = Filter.useFilter( train, m_ConvertToProp);
636    train.deleteAttributeAt(0); //remove the bagIndex attribute;
637
638    if (m_filterType == FILTER_STANDARDIZE) 
639      m_Filter = new Standardize();
640    else if (m_filterType == FILTER_NORMALIZE)
641      m_Filter = new Normalize();
642    else 
643      m_Filter = null;
644
645    if (m_Filter!=null) {
646      m_Filter.setInputFormat(train);
647      train = Filter.useFilter(train, m_Filter);
648    }   
649
650    if (m_Debug) {
651      System.out.println("\nIteration History..." );
652    }
653
654    if (getDebug())
655      System.out.println("\nstart building model ...");
656
657    int index;
658    double sum, max_output; 
659    Vector max_index = new Vector();
660    Instance inst=null;
661
662    int loopNum=0;
663    do {
664      loopNum++;
665      index=-1;
666      if (m_Debug)
667        System.out.println("=====================loop: "+loopNum);
668
669      //store the previous label information
670      pre_instLabels=(Vector)instLabels.clone();   
671
672      // set the proper SMO options in order to build a SVM model
673      m_SVM = new SVM();
674      m_SVM.setC(getC());
675      m_SVM.setKernel(Kernel.makeCopy(getKernel()));
676      // SVM model do not normalize / standardize the input dataset as the the dataset has already been processed 
677      m_SVM.setFilterType(new SelectedTag(FILTER_NONE, TAGS_FILTER)); 
678
679      m_SVM.buildClassifier(train); 
680
681      for(int h=0; h<numBags; h++)  {//h_th bag
682        if (classes[h]==1) { //positive bag
683          if (m_Debug)
684            System.out.println("--------------- "+h+" ----------------");
685          sum=0;
686
687          //compute outputs f=(w,x)+b for all instance in positive bags
688          for (int i=0; i<bagSize[h]; i++){
689            index ++; 
690
691            inst=train.instance(index); 
692            double output =m_SVM.output(-1, inst); //System.out.println(output);
693            if (output<=0){
694              if (inst.classValue()==1.0) {     
695                train.instance(index).setClassValue(0.0);
696                instLabels.set(index, new Double(0.0));
697
698                if (m_Debug)
699                  System.out.println( index+ "- changed to 0");
700              }
701            }
702            else { 
703              if (inst.classValue()==0.0) {
704                train.instance(index).setClassValue(1.0);
705                instLabels.set(index, new Double(1.0));
706
707                if (m_Debug)
708                  System.out.println(index+ "+ changed to 1");
709              }
710            }
711            sum += train.instance(index).classValue(); 
712          }
713
714          /* if class value of all instances in a positive bag
715             are changed to 0.0, find the instance with max SVMOutput value
716             and assign the class value 1.0 to it.
717             */
718          if (sum==0){ 
719            //find the instance with max SVMOutput value 
720            max_output=-Double.MAX_VALUE;
721            max_index.clear();
722            for (int j=index-bagSize[h]+1; j<index+1; j++){
723              inst=train.instance(j);
724              double output = m_SVM.output(-1, inst);
725              if(max_output<output) {
726                max_output=output;
727                max_index.clear();
728                max_index.add(new Integer(j));
729              }
730              else if(max_output==output) 
731                max_index.add(new Integer(j));
732            }
733
734            //assign the class value 1.0 to the instances with max SVMOutput
735            for (int vecIndex=0; vecIndex<max_index.size(); vecIndex ++) {
736              Integer i =(Integer)max_index.get(vecIndex);
737              train.instance(i.intValue()).setClassValue(1.0);
738              instLabels.set(i.intValue(), new Double(1.0));
739
740              if (m_Debug)
741                System.out.println("##change to 1 ###outpput: "+max_output+" max_index: "+i+ " bag: "+h);
742            }
743
744          }
745        }else   //negative bags
746          index += bagSize[h];
747      }
748    }while(!instLabels.equals(pre_instLabels) && loopNum < m_MaxIterations);
749
750    if (getDebug())
751      System.out.println("finish building model.");
752  }
753
754  /**
755   * Computes the distribution for a given exemplar
756   *
757   * @param exmp the exemplar for which distribution is computed
758   * @return the distribution
759   * @throws Exception if the distribution can't be computed successfully
760   */
761  public double[] distributionForInstance(Instance exmp)
762    throws Exception {
763
764    double sum=0;
765    double classValue;
766    double[] distribution = new double[2];
767
768    Instances testData = new Instances(exmp.dataset(), 0);
769    testData.add(exmp);
770
771    // convert the training dataset into single-instance dataset
772    testData = Filter.useFilter(testData, m_ConvertToProp);     
773    testData.deleteAttributeAt(0); //remove the bagIndex attribute     
774
775    if (m_Filter != null)       
776      testData = Filter.useFilter(testData, m_Filter); 
777
778    for(int j = 0; j < testData.numInstances(); j++){
779      Instance inst = testData.instance(j);
780      double output = m_SVM.output(-1, inst); 
781      if (output <= 0)
782        classValue = 0.0;
783      else
784        classValue = 1.0;
785      sum += classValue;
786    }
787    if (sum == 0)
788      distribution[0] = 1.0;
789    else 
790      distribution[0] = 0.0;
791    distribution [1] = 1.0 - distribution[0];
792
793    return distribution;
794  }
795 
796  /**
797   * Returns the revision string.
798   *
799   * @return            the revision
800   */
801  public String getRevision() {
802    return RevisionUtils.extract("$Revision: 5928 $");
803  }
804
805  /**
806   * Main method for testing this class.
807   *
808   * @param argv should contain the command line arguments to the
809   * scheme (see Evaluation)
810   */
811  public static void main(String[] argv) {
812    runClassifier(new MISVM(), argv);
813  }
814}
Note: See TracBrowser for help on using the repository browser.