[4] | 1 | /* |
---|
| 2 | * This program is free software; you can redistribute it and/or modify |
---|
| 3 | * it under the terms of the GNU General Public License as published by |
---|
| 4 | * the Free Software Foundation; either version 2 of the License, or |
---|
| 5 | * (at your option) any later version. |
---|
| 6 | * |
---|
| 7 | * This program is distributed in the hope that it will be useful, |
---|
| 8 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
---|
| 9 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
---|
| 10 | * GNU General Public License for more details. |
---|
| 11 | * |
---|
| 12 | * You should have received a copy of the GNU General Public License |
---|
| 13 | * along with this program; if not, write to the Free Software |
---|
| 14 | * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. |
---|
| 15 | */ |
---|
| 16 | |
---|
| 17 | /* |
---|
| 18 | * StackingC.java |
---|
| 19 | * Copyright (C) 1999 University of Waikato, Hamilton, New Zealand |
---|
| 20 | * |
---|
| 21 | */ |
---|
| 22 | |
---|
| 23 | package weka.classifiers.meta; |
---|
| 24 | |
---|
| 25 | import weka.classifiers.Classifier; |
---|
| 26 | import weka.classifiers.AbstractClassifier; |
---|
| 27 | import weka.classifiers.functions.LinearRegression; |
---|
| 28 | import weka.core.Instance; |
---|
| 29 | import weka.core.Instances; |
---|
| 30 | import weka.core.OptionHandler; |
---|
| 31 | import weka.core.RevisionUtils; |
---|
| 32 | import weka.core.TechnicalInformation; |
---|
| 33 | import weka.core.TechnicalInformationHandler; |
---|
| 34 | import weka.core.Utils; |
---|
| 35 | import weka.core.TechnicalInformation.Field; |
---|
| 36 | import weka.core.TechnicalInformation.Type; |
---|
| 37 | import weka.filters.Filter; |
---|
| 38 | import weka.filters.unsupervised.attribute.MakeIndicator; |
---|
| 39 | import weka.filters.unsupervised.attribute.Remove; |
---|
| 40 | |
---|
| 41 | import java.util.Random; |
---|
| 42 | |
---|
| 43 | /** |
---|
| 44 | <!-- globalinfo-start --> |
---|
| 45 | * Implements StackingC (more efficient version of stacking).<br/> |
---|
| 46 | * <br/> |
---|
| 47 | * For more information, see<br/> |
---|
| 48 | * <br/> |
---|
| 49 | * A.K. Seewald: How to Make Stacking Better and Faster While Also Taking Care of an Unknown Weakness. In: Nineteenth International Conference on Machine Learning, 554-561, 2002.<br/> |
---|
| 50 | * <br/> |
---|
| 51 | * Note: requires meta classifier to be a numeric prediction scheme. |
---|
| 52 | * <p/> |
---|
| 53 | <!-- globalinfo-end --> |
---|
| 54 | * |
---|
| 55 | <!-- technical-bibtex-start --> |
---|
| 56 | * BibTeX: |
---|
| 57 | * <pre> |
---|
| 58 | * @inproceedings{Seewald2002, |
---|
| 59 | * author = {A.K. Seewald}, |
---|
| 60 | * booktitle = {Nineteenth International Conference on Machine Learning}, |
---|
| 61 | * editor = {C. Sammut and A. Hoffmann}, |
---|
| 62 | * pages = {554-561}, |
---|
| 63 | * publisher = {Morgan Kaufmann Publishers}, |
---|
| 64 | * title = {How to Make Stacking Better and Faster While Also Taking Care of an Unknown Weakness}, |
---|
| 65 | * year = {2002} |
---|
| 66 | * } |
---|
| 67 | * </pre> |
---|
| 68 | * <p/> |
---|
| 69 | <!-- technical-bibtex-end --> |
---|
| 70 | * |
---|
| 71 | <!-- options-start --> |
---|
| 72 | * Valid options are: <p/> |
---|
| 73 | * |
---|
| 74 | * <pre> -M <scheme specification> |
---|
| 75 | * Full name of meta classifier, followed by options. |
---|
| 76 | * Must be a numeric prediction scheme. Default: Linear Regression.</pre> |
---|
| 77 | * |
---|
| 78 | * <pre> -X <number of folds> |
---|
| 79 | * Sets the number of cross-validation folds.</pre> |
---|
| 80 | * |
---|
| 81 | * <pre> -S <num> |
---|
| 82 | * Random number seed. |
---|
| 83 | * (default 1)</pre> |
---|
| 84 | * |
---|
| 85 | * <pre> -B <classifier specification> |
---|
| 86 | * Full class name of classifier to include, followed |
---|
| 87 | * by scheme options. May be specified multiple times. |
---|
| 88 | * (default: "weka.classifiers.rules.ZeroR")</pre> |
---|
| 89 | * |
---|
| 90 | * <pre> -D |
---|
| 91 | * If set, classifier is run in debug mode and |
---|
| 92 | * may output additional info to the console</pre> |
---|
| 93 | * |
---|
| 94 | <!-- options-end --> |
---|
| 95 | * |
---|
| 96 | * @author Eibe Frank (eibe@cs.waikato.ac.nz) |
---|
| 97 | * @author Alexander K. Seewald (alex@seewald.at) |
---|
| 98 | * @version $Revision: 5928 $ |
---|
| 99 | */ |
---|
| 100 | public class StackingC |
---|
| 101 | extends Stacking |
---|
| 102 | implements OptionHandler, TechnicalInformationHandler { |
---|
| 103 | |
---|
| 104 | /** for serialization */ |
---|
| 105 | static final long serialVersionUID = -6717545616603725198L; |
---|
| 106 | |
---|
| 107 | /** The meta classifiers (one for each class, like in ClassificationViaRegression) */ |
---|
| 108 | protected Classifier [] m_MetaClassifiers = null; |
---|
| 109 | |
---|
| 110 | /** Filter to transform metaData - Remove */ |
---|
| 111 | protected Remove m_attrFilter = null; |
---|
| 112 | /** Filter to transform metaData - MakeIndicator */ |
---|
| 113 | protected MakeIndicator m_makeIndicatorFilter = null; |
---|
| 114 | |
---|
| 115 | /** |
---|
| 116 | * The constructor. |
---|
| 117 | */ |
---|
| 118 | public StackingC() { |
---|
| 119 | m_MetaClassifier = new weka.classifiers.functions.LinearRegression(); |
---|
| 120 | ((LinearRegression)(getMetaClassifier())). |
---|
| 121 | setAttributeSelectionMethod(new |
---|
| 122 | weka.core.SelectedTag(1, LinearRegression.TAGS_SELECTION)); |
---|
| 123 | } |
---|
| 124 | |
---|
| 125 | /** |
---|
| 126 | * Returns a string describing classifier |
---|
| 127 | * @return a description suitable for |
---|
| 128 | * displaying in the explorer/experimenter gui |
---|
| 129 | */ |
---|
| 130 | public String globalInfo() { |
---|
| 131 | |
---|
| 132 | return "Implements StackingC (more efficient version of stacking).\n\n" |
---|
| 133 | + "For more information, see\n\n" |
---|
| 134 | + getTechnicalInformation().toString() + "\n\n" |
---|
| 135 | + "Note: requires meta classifier to be a numeric prediction scheme."; |
---|
| 136 | } |
---|
| 137 | |
---|
| 138 | /** |
---|
| 139 | * Returns an instance of a TechnicalInformation object, containing |
---|
| 140 | * detailed information about the technical background of this class, |
---|
| 141 | * e.g., paper reference or book this class is based on. |
---|
| 142 | * |
---|
| 143 | * @return the technical information about this class |
---|
| 144 | */ |
---|
| 145 | public TechnicalInformation getTechnicalInformation() { |
---|
| 146 | TechnicalInformation result; |
---|
| 147 | |
---|
| 148 | result = new TechnicalInformation(Type.INPROCEEDINGS); |
---|
| 149 | result.setValue(Field.AUTHOR, "A.K. Seewald"); |
---|
| 150 | result.setValue(Field.TITLE, "How to Make Stacking Better and Faster While Also Taking Care of an Unknown Weakness"); |
---|
| 151 | result.setValue(Field.BOOKTITLE, "Nineteenth International Conference on Machine Learning"); |
---|
| 152 | result.setValue(Field.EDITOR, "C. Sammut and A. Hoffmann"); |
---|
| 153 | result.setValue(Field.YEAR, "2002"); |
---|
| 154 | result.setValue(Field.PAGES, "554-561"); |
---|
| 155 | result.setValue(Field.PUBLISHER, "Morgan Kaufmann Publishers"); |
---|
| 156 | |
---|
| 157 | return result; |
---|
| 158 | } |
---|
| 159 | |
---|
| 160 | /** |
---|
| 161 | * String describing option for setting meta classifier |
---|
| 162 | * |
---|
| 163 | * @return string describing the option |
---|
| 164 | */ |
---|
| 165 | protected String metaOption() { |
---|
| 166 | |
---|
| 167 | return "\tFull name of meta classifier, followed by options.\n" |
---|
| 168 | + "\tMust be a numeric prediction scheme. Default: Linear Regression."; |
---|
| 169 | } |
---|
| 170 | |
---|
| 171 | /** |
---|
| 172 | * Process options setting meta classifier. |
---|
| 173 | * |
---|
| 174 | * @param options the meta options to parse |
---|
| 175 | * @throws Exception if parsing fails |
---|
| 176 | */ |
---|
| 177 | protected void processMetaOptions(String[] options) throws Exception { |
---|
| 178 | |
---|
| 179 | String classifierString = Utils.getOption('M', options); |
---|
| 180 | String [] classifierSpec = Utils.splitOptions(classifierString); |
---|
| 181 | if (classifierSpec.length != 0) { |
---|
| 182 | String classifierName = classifierSpec[0]; |
---|
| 183 | classifierSpec[0] = ""; |
---|
| 184 | setMetaClassifier(AbstractClassifier.forName(classifierName, classifierSpec)); |
---|
| 185 | } else { |
---|
| 186 | ((LinearRegression)(getMetaClassifier())). |
---|
| 187 | setAttributeSelectionMethod(new |
---|
| 188 | weka.core.SelectedTag(1,LinearRegression.TAGS_SELECTION)); |
---|
| 189 | } |
---|
| 190 | } |
---|
| 191 | |
---|
| 192 | /** |
---|
| 193 | * Method that builds meta level. |
---|
| 194 | * |
---|
| 195 | * @param newData the data to work with |
---|
| 196 | * @param random the random number generator to use for cross-validation |
---|
| 197 | * @throws Exception if generation fails |
---|
| 198 | */ |
---|
| 199 | protected void generateMetaLevel(Instances newData, Random random) |
---|
| 200 | throws Exception { |
---|
| 201 | |
---|
| 202 | Instances metaData = metaFormat(newData); |
---|
| 203 | m_MetaFormat = new Instances(metaData, 0); |
---|
| 204 | for (int j = 0; j < m_NumFolds; j++) { |
---|
| 205 | Instances train = newData.trainCV(m_NumFolds, j, random); |
---|
| 206 | |
---|
| 207 | // Build base classifiers |
---|
| 208 | for (int i = 0; i < m_Classifiers.length; i++) { |
---|
| 209 | getClassifier(i).buildClassifier(train); |
---|
| 210 | } |
---|
| 211 | |
---|
| 212 | // Classify test instances and add to meta data |
---|
| 213 | Instances test = newData.testCV(m_NumFolds, j); |
---|
| 214 | for (int i = 0; i < test.numInstances(); i++) { |
---|
| 215 | metaData.add(metaInstance(test.instance(i))); |
---|
| 216 | } |
---|
| 217 | } |
---|
| 218 | |
---|
| 219 | m_MetaClassifiers = AbstractClassifier.makeCopies(m_MetaClassifier, |
---|
| 220 | m_BaseFormat.numClasses()); |
---|
| 221 | |
---|
| 222 | int [] arrIdc = new int[m_Classifiers.length + 1]; |
---|
| 223 | arrIdc[m_Classifiers.length] = metaData.numAttributes() - 1; |
---|
| 224 | Instances newInsts; |
---|
| 225 | for (int i = 0; i < m_MetaClassifiers.length; i++) { |
---|
| 226 | for (int j = 0; j < m_Classifiers.length; j++) { |
---|
| 227 | arrIdc[j] = m_BaseFormat.numClasses() * j + i; |
---|
| 228 | } |
---|
| 229 | m_makeIndicatorFilter = new weka.filters.unsupervised.attribute.MakeIndicator(); |
---|
| 230 | m_makeIndicatorFilter.setAttributeIndex("" + (metaData.classIndex() + 1)); |
---|
| 231 | m_makeIndicatorFilter.setNumeric(true); |
---|
| 232 | m_makeIndicatorFilter.setValueIndex(i); |
---|
| 233 | m_makeIndicatorFilter.setInputFormat(metaData); |
---|
| 234 | newInsts = Filter.useFilter(metaData,m_makeIndicatorFilter); |
---|
| 235 | |
---|
| 236 | m_attrFilter = new weka.filters.unsupervised.attribute.Remove(); |
---|
| 237 | m_attrFilter.setInvertSelection(true); |
---|
| 238 | m_attrFilter.setAttributeIndicesArray(arrIdc); |
---|
| 239 | m_attrFilter.setInputFormat(m_makeIndicatorFilter.getOutputFormat()); |
---|
| 240 | newInsts = Filter.useFilter(newInsts,m_attrFilter); |
---|
| 241 | |
---|
| 242 | newInsts.setClassIndex(newInsts.numAttributes()-1); |
---|
| 243 | |
---|
| 244 | m_MetaClassifiers[i].buildClassifier(newInsts); |
---|
| 245 | } |
---|
| 246 | } |
---|
| 247 | |
---|
| 248 | /** |
---|
| 249 | * Classifies a given instance using the stacked classifier. |
---|
| 250 | * |
---|
| 251 | * @param instance the instance to be classified |
---|
| 252 | * @return the distribution |
---|
| 253 | * @throws Exception if instance could not be classified |
---|
| 254 | * successfully |
---|
| 255 | */ |
---|
| 256 | public double[] distributionForInstance(Instance instance) throws Exception { |
---|
| 257 | |
---|
| 258 | int [] arrIdc = new int[m_Classifiers.length+1]; |
---|
| 259 | arrIdc[m_Classifiers.length] = m_MetaFormat.numAttributes() - 1; |
---|
| 260 | double [] classProbs = new double[m_BaseFormat.numClasses()]; |
---|
| 261 | Instance newInst; |
---|
| 262 | double sum = 0; |
---|
| 263 | |
---|
| 264 | for (int i = 0; i < m_MetaClassifiers.length; i++) { |
---|
| 265 | for (int j = 0; j < m_Classifiers.length; j++) { |
---|
| 266 | arrIdc[j] = m_BaseFormat.numClasses() * j + i; |
---|
| 267 | } |
---|
| 268 | m_makeIndicatorFilter.setAttributeIndex("" + (m_MetaFormat.classIndex() + 1)); |
---|
| 269 | m_makeIndicatorFilter.setNumeric(true); |
---|
| 270 | m_makeIndicatorFilter.setValueIndex(i); |
---|
| 271 | m_makeIndicatorFilter.setInputFormat(m_MetaFormat); |
---|
| 272 | m_makeIndicatorFilter.input(metaInstance(instance)); |
---|
| 273 | m_makeIndicatorFilter.batchFinished(); |
---|
| 274 | newInst = m_makeIndicatorFilter.output(); |
---|
| 275 | |
---|
| 276 | m_attrFilter.setAttributeIndicesArray(arrIdc); |
---|
| 277 | m_attrFilter.setInvertSelection(true); |
---|
| 278 | m_attrFilter.setInputFormat(m_makeIndicatorFilter.getOutputFormat()); |
---|
| 279 | m_attrFilter.input(newInst); |
---|
| 280 | m_attrFilter.batchFinished(); |
---|
| 281 | newInst = m_attrFilter.output(); |
---|
| 282 | |
---|
| 283 | classProbs[i]=m_MetaClassifiers[i].classifyInstance(newInst); |
---|
| 284 | if (classProbs[i] > 1) { classProbs[i] = 1; } |
---|
| 285 | if (classProbs[i] < 0) { classProbs[i] = 0; } |
---|
| 286 | sum += classProbs[i]; |
---|
| 287 | } |
---|
| 288 | |
---|
| 289 | if (sum!=0) Utils.normalize(classProbs,sum); |
---|
| 290 | |
---|
| 291 | return classProbs; |
---|
| 292 | } |
---|
| 293 | |
---|
| 294 | /** |
---|
| 295 | * Output a representation of this classifier |
---|
| 296 | * |
---|
| 297 | * @return a string representation of the classifier |
---|
| 298 | */ |
---|
| 299 | public String toString() { |
---|
| 300 | |
---|
| 301 | if (m_MetaFormat == null) { |
---|
| 302 | return "StackingC: No model built yet."; |
---|
| 303 | } |
---|
| 304 | String result = "StackingC\n\nBase classifiers\n\n"; |
---|
| 305 | for (int i = 0; i < m_Classifiers.length; i++) { |
---|
| 306 | result += getClassifier(i).toString() +"\n\n"; |
---|
| 307 | } |
---|
| 308 | |
---|
| 309 | result += "\n\nMeta classifiers (one for each class)\n\n"; |
---|
| 310 | for (int i = 0; i< m_MetaClassifiers.length; i++) { |
---|
| 311 | result += m_MetaClassifiers[i].toString() +"\n\n"; |
---|
| 312 | } |
---|
| 313 | |
---|
| 314 | return result; |
---|
| 315 | } |
---|
| 316 | |
---|
| 317 | /** |
---|
| 318 | * Returns the revision string. |
---|
| 319 | * |
---|
| 320 | * @return the revision |
---|
| 321 | */ |
---|
| 322 | public String getRevision() { |
---|
| 323 | return RevisionUtils.extract("$Revision: 5928 $"); |
---|
| 324 | } |
---|
| 325 | |
---|
| 326 | /** |
---|
| 327 | * Main method for testing this class. |
---|
| 328 | * |
---|
| 329 | * @param argv should contain the following arguments: |
---|
| 330 | * -t training file [-T test file] [-c class index] |
---|
| 331 | */ |
---|
| 332 | public static void main(String [] argv) { |
---|
| 333 | runClassifier(new StackingC(), argv); |
---|
| 334 | } |
---|
| 335 | } |
---|