[29] | 1 | /* |
---|
| 2 | * This program is free software; you can redistribute it and/or modify |
---|
| 3 | * it under the terms of the GNU General Public License as published by |
---|
| 4 | * the Free Software Foundation; either version 2 of the License, or |
---|
| 5 | * (at your option) any later version. |
---|
| 6 | * |
---|
| 7 | * This program is distributed in the hope that it will be useful, |
---|
| 8 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
---|
| 9 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
---|
| 10 | * GNU General Public License for more details. |
---|
| 11 | * |
---|
| 12 | * You should have received a copy of the GNU General Public License |
---|
| 13 | * along with this program; if not, write to the Free Software |
---|
| 14 | * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. |
---|
| 15 | */ |
---|
| 16 | |
---|
| 17 | /* |
---|
| 18 | * Grading.java |
---|
| 19 | * Copyright (C) 2000 University of Waikato |
---|
| 20 | * |
---|
| 21 | */ |
---|
| 22 | |
---|
| 23 | package weka.classifiers.meta; |
---|
| 24 | |
---|
| 25 | import weka.classifiers.Classifier; |
---|
| 26 | import weka.classifiers.AbstractClassifier; |
---|
| 27 | import weka.core.Attribute; |
---|
| 28 | import weka.core.FastVector; |
---|
| 29 | import weka.core.Instance; |
---|
| 30 | import weka.core.DenseInstance; |
---|
| 31 | import weka.core.Instances; |
---|
| 32 | import weka.core.RevisionUtils; |
---|
| 33 | import weka.core.TechnicalInformation; |
---|
| 34 | import weka.core.TechnicalInformationHandler; |
---|
| 35 | import weka.core.Utils; |
---|
| 36 | import weka.core.TechnicalInformation.Field; |
---|
| 37 | import weka.core.TechnicalInformation.Type; |
---|
| 38 | |
---|
| 39 | import java.util.Random; |
---|
| 40 | |
---|
| 41 | /** |
---|
| 42 | <!-- globalinfo-start --> |
---|
| 43 | * Implements Grading. The base classifiers are "graded".<br/> |
---|
| 44 | * <br/> |
---|
| 45 | * For more information, see<br/> |
---|
| 46 | * <br/> |
---|
| 47 | * A.K. Seewald, J. Fuernkranz: An Evaluation of Grading Classifiers. In: Advances in Intelligent Data Analysis: 4th International Conference, Berlin/Heidelberg/New York/Tokyo, 115-124, 2001. |
---|
| 48 | * <p/> |
---|
| 49 | <!-- globalinfo-end --> |
---|
| 50 | * |
---|
| 51 | <!-- technical-bibtex-start --> |
---|
| 52 | * BibTeX: |
---|
| 53 | * <pre> |
---|
| 54 | * @inproceedings{Seewald2001, |
---|
| 55 | * address = {Berlin/Heidelberg/New York/Tokyo}, |
---|
| 56 | * author = {A.K. Seewald and J. Fuernkranz}, |
---|
| 57 | * booktitle = {Advances in Intelligent Data Analysis: 4th International Conference}, |
---|
| 58 | * editor = {F. Hoffmann et al.}, |
---|
| 59 | * pages = {115-124}, |
---|
| 60 | * publisher = {Springer}, |
---|
| 61 | * title = {An Evaluation of Grading Classifiers}, |
---|
| 62 | * year = {2001} |
---|
| 63 | * } |
---|
| 64 | * </pre> |
---|
| 65 | * <p/> |
---|
| 66 | <!-- technical-bibtex-end --> |
---|
| 67 | * |
---|
| 68 | <!-- options-start --> |
---|
| 69 | * Valid options are: <p/> |
---|
| 70 | * |
---|
| 71 | * <pre> -M <scheme specification> |
---|
| 72 | * Full name of meta classifier, followed by options. |
---|
| 73 | * (default: "weka.classifiers.rules.Zero")</pre> |
---|
| 74 | * |
---|
| 75 | * <pre> -X <number of folds> |
---|
| 76 | * Sets the number of cross-validation folds.</pre> |
---|
| 77 | * |
---|
| 78 | * <pre> -S <num> |
---|
| 79 | * Random number seed. |
---|
| 80 | * (default 1)</pre> |
---|
| 81 | * |
---|
| 82 | * <pre> -B <classifier specification> |
---|
| 83 | * Full class name of classifier to include, followed |
---|
| 84 | * by scheme options. May be specified multiple times. |
---|
| 85 | * (default: "weka.classifiers.rules.ZeroR")</pre> |
---|
| 86 | * |
---|
| 87 | * <pre> -D |
---|
| 88 | * If set, classifier is run in debug mode and |
---|
| 89 | * may output additional info to the console</pre> |
---|
| 90 | * |
---|
| 91 | <!-- options-end --> |
---|
| 92 | * |
---|
| 93 | * @author Alexander K. Seewald (alex@seewald.at) |
---|
| 94 | * @author Eibe Frank (eibe@cs.waikato.ac.nz) |
---|
| 95 | * @version $Revision: 5987 $ |
---|
| 96 | */ |
---|
| 97 | public class Grading |
---|
| 98 | extends Stacking |
---|
| 99 | implements TechnicalInformationHandler { |
---|
| 100 | |
---|
| 101 | /** for serialization */ |
---|
| 102 | static final long serialVersionUID = 5207837947890081170L; |
---|
| 103 | |
---|
| 104 | /** The meta classifiers, one for each base classifier. */ |
---|
| 105 | protected Classifier [] m_MetaClassifiers = new Classifier[0]; |
---|
| 106 | |
---|
| 107 | /** InstPerClass */ |
---|
| 108 | protected double [] m_InstPerClass = null; |
---|
| 109 | |
---|
| 110 | /** |
---|
| 111 | * Returns a string describing classifier |
---|
| 112 | * @return a description suitable for |
---|
| 113 | * displaying in the explorer/experimenter gui |
---|
| 114 | */ |
---|
| 115 | public String globalInfo() { |
---|
| 116 | |
---|
| 117 | return |
---|
| 118 | "Implements Grading. The base classifiers are \"graded\".\n\n" |
---|
| 119 | + "For more information, see\n\n" |
---|
| 120 | + getTechnicalInformation().toString(); |
---|
| 121 | } |
---|
| 122 | |
---|
| 123 | /** |
---|
| 124 | * Returns an instance of a TechnicalInformation object, containing |
---|
| 125 | * detailed information about the technical background of this class, |
---|
| 126 | * e.g., paper reference or book this class is based on. |
---|
| 127 | * |
---|
| 128 | * @return the technical information about this class |
---|
| 129 | */ |
---|
| 130 | public TechnicalInformation getTechnicalInformation() { |
---|
| 131 | TechnicalInformation result; |
---|
| 132 | |
---|
| 133 | result = new TechnicalInformation(Type.INPROCEEDINGS); |
---|
| 134 | result.setValue(Field.AUTHOR, "A.K. Seewald and J. Fuernkranz"); |
---|
| 135 | result.setValue(Field.TITLE, "An Evaluation of Grading Classifiers"); |
---|
| 136 | result.setValue(Field.BOOKTITLE, "Advances in Intelligent Data Analysis: 4th International Conference"); |
---|
| 137 | result.setValue(Field.EDITOR, "F. Hoffmann et al."); |
---|
| 138 | result.setValue(Field.YEAR, "2001"); |
---|
| 139 | result.setValue(Field.PAGES, "115-124"); |
---|
| 140 | result.setValue(Field.PUBLISHER, "Springer"); |
---|
| 141 | result.setValue(Field.ADDRESS, "Berlin/Heidelberg/New York/Tokyo"); |
---|
| 142 | |
---|
| 143 | return result; |
---|
| 144 | } |
---|
| 145 | |
---|
| 146 | /** |
---|
| 147 | * Generates the meta data |
---|
| 148 | * |
---|
| 149 | * @param newData the data to work on |
---|
| 150 | * @param random the random number generator used in the generation |
---|
| 151 | * @throws Exception if generation fails |
---|
| 152 | */ |
---|
| 153 | protected void generateMetaLevel(Instances newData, Random random) |
---|
| 154 | throws Exception { |
---|
| 155 | |
---|
| 156 | m_MetaFormat = metaFormat(newData); |
---|
| 157 | Instances [] metaData = new Instances[m_Classifiers.length]; |
---|
| 158 | for (int i = 0; i < m_Classifiers.length; i++) { |
---|
| 159 | metaData[i] = metaFormat(newData); |
---|
| 160 | } |
---|
| 161 | for (int j = 0; j < m_NumFolds; j++) { |
---|
| 162 | |
---|
| 163 | Instances train = newData.trainCV(m_NumFolds, j, random); |
---|
| 164 | Instances test = newData.testCV(m_NumFolds, j); |
---|
| 165 | |
---|
| 166 | // Build base classifiers |
---|
| 167 | for (int i = 0; i < m_Classifiers.length; i++) { |
---|
| 168 | getClassifier(i).buildClassifier(train); |
---|
| 169 | for (int k = 0; k < test.numInstances(); k++) { |
---|
| 170 | metaData[i].add(metaInstance(test.instance(k),i)); |
---|
| 171 | } |
---|
| 172 | } |
---|
| 173 | } |
---|
| 174 | |
---|
| 175 | // calculate InstPerClass |
---|
| 176 | m_InstPerClass = new double[newData.numClasses()]; |
---|
| 177 | for (int i=0; i < newData.numClasses(); i++) m_InstPerClass[i]=0.0; |
---|
| 178 | for (int i=0; i < newData.numInstances(); i++) { |
---|
| 179 | m_InstPerClass[(int)newData.instance(i).classValue()]++; |
---|
| 180 | } |
---|
| 181 | |
---|
| 182 | m_MetaClassifiers = AbstractClassifier.makeCopies(m_MetaClassifier, |
---|
| 183 | m_Classifiers.length); |
---|
| 184 | |
---|
| 185 | for (int i = 0; i < m_Classifiers.length; i++) { |
---|
| 186 | m_MetaClassifiers[i].buildClassifier(metaData[i]); |
---|
| 187 | } |
---|
| 188 | } |
---|
| 189 | |
---|
| 190 | /** |
---|
| 191 | * Returns class probabilities for a given instance using the stacked classifier. |
---|
| 192 | * One class will always get all the probability mass (i.e. probability one). |
---|
| 193 | * |
---|
| 194 | * @param instance the instance to be classified |
---|
| 195 | * @throws Exception if instance could not be classified |
---|
| 196 | * successfully |
---|
| 197 | * @return the class distribution for the given instance |
---|
| 198 | */ |
---|
| 199 | public double[] distributionForInstance(Instance instance) throws Exception { |
---|
| 200 | |
---|
| 201 | double maxPreds; |
---|
| 202 | int numPreds=0; |
---|
| 203 | int numClassifiers=m_Classifiers.length; |
---|
| 204 | int idxPreds; |
---|
| 205 | double [] predConfs = new double[numClassifiers]; |
---|
| 206 | double [] preds; |
---|
| 207 | |
---|
| 208 | for (int i=0; i<numClassifiers; i++) { |
---|
| 209 | preds = m_MetaClassifiers[i].distributionForInstance(metaInstance(instance,i)); |
---|
| 210 | if (m_MetaClassifiers[i].classifyInstance(metaInstance(instance,i))==1) |
---|
| 211 | predConfs[i]=preds[1]; |
---|
| 212 | else |
---|
| 213 | predConfs[i]=-preds[0]; |
---|
| 214 | } |
---|
| 215 | if (predConfs[Utils.maxIndex(predConfs)]<0.0) { // no correct classifiers |
---|
| 216 | for (int i=0; i<numClassifiers; i++) // use neg. confidences instead |
---|
| 217 | predConfs[i]=1.0+predConfs[i]; |
---|
| 218 | } else { |
---|
| 219 | for (int i=0; i<numClassifiers; i++) // otherwise ignore neg. conf |
---|
| 220 | if (predConfs[i]<0) predConfs[i]=0.0; |
---|
| 221 | } |
---|
| 222 | |
---|
| 223 | /*System.out.print(preds[0]); |
---|
| 224 | System.out.print(":"); |
---|
| 225 | System.out.print(preds[1]); |
---|
| 226 | System.out.println("#");*/ |
---|
| 227 | |
---|
| 228 | preds=new double[instance.numClasses()]; |
---|
| 229 | for (int i=0; i<instance.numClasses(); i++) preds[i]=0.0; |
---|
| 230 | for (int i=0; i<numClassifiers; i++) { |
---|
| 231 | idxPreds=(int)(m_Classifiers[i].classifyInstance(instance)); |
---|
| 232 | preds[idxPreds]+=predConfs[i]; |
---|
| 233 | } |
---|
| 234 | |
---|
| 235 | maxPreds=preds[Utils.maxIndex(preds)]; |
---|
| 236 | int MaxInstPerClass=-100; |
---|
| 237 | int MaxClass=-1; |
---|
| 238 | for (int i=0; i<instance.numClasses(); i++) { |
---|
| 239 | if (preds[i]==maxPreds) { |
---|
| 240 | numPreds++; |
---|
| 241 | if (m_InstPerClass[i]>MaxInstPerClass) { |
---|
| 242 | MaxInstPerClass=(int)m_InstPerClass[i]; |
---|
| 243 | MaxClass=i; |
---|
| 244 | } |
---|
| 245 | } |
---|
| 246 | } |
---|
| 247 | |
---|
| 248 | int predictedIndex; |
---|
| 249 | if (numPreds==1) |
---|
| 250 | predictedIndex = Utils.maxIndex(preds); |
---|
| 251 | else |
---|
| 252 | { |
---|
| 253 | // System.out.print("?"); |
---|
| 254 | // System.out.print(instance.toString()); |
---|
| 255 | // for (int i=0; i<instance.numClasses(); i++) { |
---|
| 256 | // System.out.print("/"); |
---|
| 257 | // System.out.print(preds[i]); |
---|
| 258 | // } |
---|
| 259 | // System.out.println(MaxClass); |
---|
| 260 | predictedIndex = MaxClass; |
---|
| 261 | } |
---|
| 262 | double[] classProbs = new double[instance.numClasses()]; |
---|
| 263 | classProbs[predictedIndex] = 1.0; |
---|
| 264 | return classProbs; |
---|
| 265 | } |
---|
| 266 | |
---|
| 267 | /** |
---|
| 268 | * Output a representation of this classifier |
---|
| 269 | * |
---|
| 270 | * @return a string representation of the classifier |
---|
| 271 | */ |
---|
| 272 | public String toString() { |
---|
| 273 | |
---|
| 274 | if (m_Classifiers.length == 0) { |
---|
| 275 | return "Grading: No base schemes entered."; |
---|
| 276 | } |
---|
| 277 | if (m_MetaClassifiers.length == 0) { |
---|
| 278 | return "Grading: No meta scheme selected."; |
---|
| 279 | } |
---|
| 280 | if (m_MetaFormat == null) { |
---|
| 281 | return "Grading: No model built yet."; |
---|
| 282 | } |
---|
| 283 | String result = "Grading\n\nBase classifiers\n\n"; |
---|
| 284 | for (int i = 0; i < m_Classifiers.length; i++) { |
---|
| 285 | result += getClassifier(i).toString() +"\n\n"; |
---|
| 286 | } |
---|
| 287 | |
---|
| 288 | result += "\n\nMeta classifiers\n\n"; |
---|
| 289 | for (int i = 0; i < m_Classifiers.length; i++) { |
---|
| 290 | result += m_MetaClassifiers[i].toString() +"\n\n"; |
---|
| 291 | } |
---|
| 292 | |
---|
| 293 | return result; |
---|
| 294 | } |
---|
| 295 | |
---|
| 296 | /** |
---|
| 297 | * Makes the format for the level-1 data. |
---|
| 298 | * |
---|
| 299 | * @param instances the level-0 format |
---|
| 300 | * @return the format for the meta data |
---|
| 301 | * @throws Exception if an error occurs |
---|
| 302 | */ |
---|
| 303 | protected Instances metaFormat(Instances instances) throws Exception { |
---|
| 304 | |
---|
| 305 | FastVector attributes = new FastVector(); |
---|
| 306 | Instances metaFormat; |
---|
| 307 | |
---|
| 308 | for (int i = 0; i<instances.numAttributes(); i++) { |
---|
| 309 | if ( i != instances.classIndex() ) { |
---|
| 310 | attributes.addElement(instances.attribute(i)); |
---|
| 311 | } |
---|
| 312 | } |
---|
| 313 | |
---|
| 314 | FastVector nomElements = new FastVector(2); |
---|
| 315 | nomElements.addElement("0"); |
---|
| 316 | nomElements.addElement("1"); |
---|
| 317 | attributes.addElement(new Attribute("PredConf",nomElements)); |
---|
| 318 | |
---|
| 319 | metaFormat = new Instances("Meta format", attributes, 0); |
---|
| 320 | metaFormat.setClassIndex(metaFormat.numAttributes()-1); |
---|
| 321 | return metaFormat; |
---|
| 322 | } |
---|
| 323 | |
---|
| 324 | /** |
---|
| 325 | * Makes a level-1 instance from the given instance. |
---|
| 326 | * |
---|
| 327 | * @param instance the instance to be transformed |
---|
| 328 | * @param k index of the classifier |
---|
| 329 | * @return the level-1 instance |
---|
| 330 | * @throws Exception if an error occurs |
---|
| 331 | */ |
---|
| 332 | protected Instance metaInstance(Instance instance, int k) throws Exception { |
---|
| 333 | |
---|
| 334 | double[] values = new double[m_MetaFormat.numAttributes()]; |
---|
| 335 | Instance metaInstance; |
---|
| 336 | double predConf; |
---|
| 337 | int i; |
---|
| 338 | int maxIdx; |
---|
| 339 | double maxVal; |
---|
| 340 | |
---|
| 341 | int idx = 0; |
---|
| 342 | for (i = 0; i < instance.numAttributes(); i++) { |
---|
| 343 | if (i != instance.classIndex()) { |
---|
| 344 | values[idx] = instance.value(i); |
---|
| 345 | idx++; |
---|
| 346 | } |
---|
| 347 | } |
---|
| 348 | |
---|
| 349 | Classifier classifier = getClassifier(k); |
---|
| 350 | |
---|
| 351 | if (m_BaseFormat.classAttribute().isNumeric()) { |
---|
| 352 | throw new Exception("Class Attribute must not be numeric!"); |
---|
| 353 | } else { |
---|
| 354 | double[] dist = classifier.distributionForInstance(instance); |
---|
| 355 | |
---|
| 356 | maxIdx=0; |
---|
| 357 | maxVal=dist[0]; |
---|
| 358 | for (int j = 1; j < dist.length; j++) { |
---|
| 359 | if (dist[j]>maxVal) { |
---|
| 360 | maxVal=dist[j]; |
---|
| 361 | maxIdx=j; |
---|
| 362 | } |
---|
| 363 | } |
---|
| 364 | predConf= (instance.classValue()==maxIdx) ? 1:0; |
---|
| 365 | } |
---|
| 366 | |
---|
| 367 | values[idx]=predConf; |
---|
| 368 | metaInstance = new DenseInstance(1, values); |
---|
| 369 | metaInstance.setDataset(m_MetaFormat); |
---|
| 370 | return metaInstance; |
---|
| 371 | } |
---|
| 372 | |
---|
| 373 | /** |
---|
| 374 | * Returns the revision string. |
---|
| 375 | * |
---|
| 376 | * @return the revision |
---|
| 377 | */ |
---|
| 378 | public String getRevision() { |
---|
| 379 | return RevisionUtils.extract("$Revision: 5987 $"); |
---|
| 380 | } |
---|
| 381 | |
---|
| 382 | /** |
---|
| 383 | * Main method for testing this class. |
---|
| 384 | * |
---|
| 385 | * @param argv should contain the following arguments: |
---|
| 386 | * -t training file [-T test file] [-c class index] |
---|
| 387 | */ |
---|
| 388 | public static void main(String [] argv) { |
---|
| 389 | runClassifier(new Grading(), argv); |
---|
| 390 | } |
---|
| 391 | } |
---|
| 392 | |
---|