[4] | 1 | /* |
---|
| 2 | * This program is free software; you can redistribute it and/or modify |
---|
| 3 | * it under the terms of the GNU General Public License as published by |
---|
| 4 | * the Free Software Foundation; either version 2 of the License, or |
---|
| 5 | * (at your option) any later version. |
---|
| 6 | * |
---|
| 7 | * This program is distributed in the hope that it will be useful, |
---|
| 8 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
---|
| 9 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
---|
| 10 | * GNU General Public License for more details. |
---|
| 11 | * |
---|
| 12 | * You should have received a copy of the GNU General Public License |
---|
| 13 | * along with this program; if not, write to the Free Software |
---|
| 14 | * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. |
---|
| 15 | */ |
---|
| 16 | |
---|
| 17 | /* |
---|
| 18 | * Winnow.java |
---|
| 19 | * Copyright (C) 2002 J. Lindgren |
---|
| 20 | * |
---|
| 21 | */ |
---|
| 22 | package weka.classifiers.functions; |
---|
| 23 | |
---|
| 24 | import weka.classifiers.Classifier; |
---|
| 25 | import weka.classifiers.AbstractClassifier; |
---|
| 26 | import weka.classifiers.UpdateableClassifier; |
---|
| 27 | import weka.core.Capabilities; |
---|
| 28 | import weka.core.Instance; |
---|
| 29 | import weka.core.Instances; |
---|
| 30 | import weka.core.Option; |
---|
| 31 | import weka.core.RevisionUtils; |
---|
| 32 | import weka.core.TechnicalInformation; |
---|
| 33 | import weka.core.TechnicalInformationHandler; |
---|
| 34 | import weka.core.Utils; |
---|
| 35 | import weka.core.Capabilities.Capability; |
---|
| 36 | import weka.core.TechnicalInformation.Field; |
---|
| 37 | import weka.core.TechnicalInformation.Type; |
---|
| 38 | import weka.filters.Filter; |
---|
| 39 | import weka.filters.unsupervised.attribute.NominalToBinary; |
---|
| 40 | import weka.filters.unsupervised.attribute.ReplaceMissingValues; |
---|
| 41 | |
---|
| 42 | import java.util.Enumeration; |
---|
| 43 | import java.util.Random; |
---|
| 44 | import java.util.Vector; |
---|
| 45 | |
---|
| 46 | /** |
---|
| 47 | <!-- globalinfo-start --> |
---|
| 48 | * Implements Winnow and Balanced Winnow algorithms by Littlestone.<br/> |
---|
| 49 | * <br/> |
---|
| 50 | * For more information, see<br/> |
---|
| 51 | * <br/> |
---|
| 52 | * N. Littlestone (1988). Learning quickly when irrelevant attributes are abound: A new linear threshold algorithm. Machine Learning. 2:285-318.<br/> |
---|
| 53 | * <br/> |
---|
| 54 | * N. Littlestone (1989). Mistake bounds and logarithmic linear-threshold learning algorithms. University of California, Santa Cruz.<br/> |
---|
| 55 | * <br/> |
---|
| 56 | * Does classification for problems with nominal attributes (which it converts into binary attributes). |
---|
| 57 | * <p/> |
---|
| 58 | <!-- globalinfo-end --> |
---|
| 59 | * |
---|
| 60 | <!-- technical-bibtex-start --> |
---|
| 61 | * BibTeX: |
---|
| 62 | * <pre> |
---|
| 63 | * @article{Littlestone1988, |
---|
| 64 | * author = {N. Littlestone}, |
---|
| 65 | * journal = {Machine Learning}, |
---|
| 66 | * pages = {285-318}, |
---|
| 67 | * title = {Learning quickly when irrelevant attributes are abound: A new linear threshold algorithm}, |
---|
| 68 | * volume = {2}, |
---|
| 69 | * year = {1988} |
---|
| 70 | * } |
---|
| 71 | * |
---|
| 72 | * @techreport{Littlestone1989, |
---|
| 73 | * address = {University of California, Santa Cruz}, |
---|
| 74 | * author = {N. Littlestone}, |
---|
| 75 | * institution = {University of California}, |
---|
| 76 | * note = {Technical Report UCSC-CRL-89-11}, |
---|
| 77 | * title = {Mistake bounds and logarithmic linear-threshold learning algorithms}, |
---|
| 78 | * year = {1989} |
---|
| 79 | * } |
---|
| 80 | * </pre> |
---|
| 81 | * <p/> |
---|
| 82 | <!-- technical-bibtex-end --> |
---|
| 83 | * |
---|
| 84 | <!-- options-start --> |
---|
| 85 | * Valid options are: <p/> |
---|
| 86 | * |
---|
| 87 | * <pre> -L |
---|
| 88 | * Use the baLanced version |
---|
| 89 | * (default false)</pre> |
---|
| 90 | * |
---|
| 91 | * <pre> -I <int> |
---|
| 92 | * The number of iterations to be performed. |
---|
| 93 | * (default 1)</pre> |
---|
| 94 | * |
---|
| 95 | * <pre> -A <double> |
---|
| 96 | * Promotion coefficient alpha. |
---|
| 97 | * (default 2.0)</pre> |
---|
| 98 | * |
---|
| 99 | * <pre> -B <double> |
---|
| 100 | * Demotion coefficient beta. |
---|
| 101 | * (default 0.5)</pre> |
---|
| 102 | * |
---|
| 103 | * <pre> -H <double> |
---|
| 104 | * Prediction threshold. |
---|
| 105 | * (default -1.0 == number of attributes)</pre> |
---|
| 106 | * |
---|
| 107 | * <pre> -W <double> |
---|
| 108 | * Starting weights. |
---|
| 109 | * (default 2.0)</pre> |
---|
| 110 | * |
---|
| 111 | * <pre> -S <int> |
---|
| 112 | * Default random seed. |
---|
| 113 | * (default 1)</pre> |
---|
| 114 | * |
---|
| 115 | <!-- options-end --> |
---|
| 116 | * |
---|
| 117 | * @author J. Lindgren (jtlindgr at cs.helsinki.fi) |
---|
| 118 | * @version $Revision: 5928 $ |
---|
| 119 | */ |
---|
| 120 | public class Winnow |
---|
| 121 | extends AbstractClassifier |
---|
| 122 | implements UpdateableClassifier, TechnicalInformationHandler { |
---|
| 123 | |
---|
| 124 | /** for serialization */ |
---|
| 125 | static final long serialVersionUID = 3543770107994321324L; |
---|
| 126 | |
---|
| 127 | /** Use the balanced variant? **/ |
---|
| 128 | protected boolean m_Balanced; |
---|
| 129 | |
---|
| 130 | /** The number of iterations **/ |
---|
| 131 | protected int m_numIterations = 1; |
---|
| 132 | |
---|
| 133 | /** The promotion coefficient **/ |
---|
| 134 | protected double m_Alpha = 2.0; |
---|
| 135 | |
---|
| 136 | /** The demotion coefficient **/ |
---|
| 137 | protected double m_Beta = 0.5; |
---|
| 138 | |
---|
| 139 | /** Prediction threshold, <0 == numAttributes **/ |
---|
| 140 | protected double m_Threshold = -1.0; |
---|
| 141 | |
---|
| 142 | /** Random seed used for shuffling the dataset, -1 == disable **/ |
---|
| 143 | protected int m_Seed = 1; |
---|
| 144 | |
---|
| 145 | /** Accumulated mistake count (for statistics) **/ |
---|
| 146 | protected int m_Mistakes; |
---|
| 147 | |
---|
| 148 | /** Starting weights for the prediction vector(s) **/ |
---|
| 149 | protected double m_defaultWeight = 2.0; |
---|
| 150 | |
---|
| 151 | /** The weight vector for prediction (pos) */ |
---|
| 152 | private double[] m_predPosVector = null; |
---|
| 153 | |
---|
| 154 | /** The weight vector for prediction (neg) */ |
---|
| 155 | private double[] m_predNegVector = null; |
---|
| 156 | |
---|
| 157 | /** The true threshold used for prediction **/ |
---|
| 158 | private double m_actualThreshold; |
---|
| 159 | |
---|
| 160 | /** The training instances */ |
---|
| 161 | private Instances m_Train = null; |
---|
| 162 | |
---|
| 163 | /** The filter used to make attributes numeric. */ |
---|
| 164 | private NominalToBinary m_NominalToBinary; |
---|
| 165 | |
---|
| 166 | /** The filter used to get rid of missing values. */ |
---|
| 167 | private ReplaceMissingValues m_ReplaceMissingValues; |
---|
| 168 | |
---|
| 169 | /** |
---|
| 170 | * Returns a string describing classifier |
---|
| 171 | * @return a description suitable for |
---|
| 172 | * displaying in the explorer/experimenter gui |
---|
| 173 | */ |
---|
| 174 | public String globalInfo() { |
---|
| 175 | |
---|
| 176 | return "Implements Winnow and Balanced Winnow algorithms by " |
---|
| 177 | + "Littlestone.\n\n" |
---|
| 178 | + "For more information, see\n\n" |
---|
| 179 | + getTechnicalInformation().toString() |
---|
| 180 | + "\n\n" |
---|
| 181 | + "Does classification for problems with nominal attributes " |
---|
| 182 | + "(which it converts into binary attributes)."; |
---|
| 183 | } |
---|
| 184 | |
---|
| 185 | /** |
---|
| 186 | * Returns an instance of a TechnicalInformation object, containing |
---|
| 187 | * detailed information about the technical background of this class, |
---|
| 188 | * e.g., paper reference or book this class is based on. |
---|
| 189 | * |
---|
| 190 | * @return the technical information about this class |
---|
| 191 | */ |
---|
| 192 | public TechnicalInformation getTechnicalInformation() { |
---|
| 193 | TechnicalInformation result; |
---|
| 194 | TechnicalInformation additional; |
---|
| 195 | |
---|
| 196 | result = new TechnicalInformation(Type.ARTICLE); |
---|
| 197 | result.setValue(Field.AUTHOR, "N. Littlestone"); |
---|
| 198 | result.setValue(Field.YEAR, "1988"); |
---|
| 199 | result.setValue(Field.TITLE, "Learning quickly when irrelevant attributes are abound: A new linear threshold algorithm"); |
---|
| 200 | result.setValue(Field.JOURNAL, "Machine Learning"); |
---|
| 201 | result.setValue(Field.VOLUME, "2"); |
---|
| 202 | result.setValue(Field.PAGES, "285-318"); |
---|
| 203 | |
---|
| 204 | additional = result.add(Type.TECHREPORT); |
---|
| 205 | additional.setValue(Field.AUTHOR, "N. Littlestone"); |
---|
| 206 | additional.setValue(Field.YEAR, "1989"); |
---|
| 207 | additional.setValue(Field.TITLE, "Mistake bounds and logarithmic linear-threshold learning algorithms"); |
---|
| 208 | additional.setValue(Field.INSTITUTION, "University of California"); |
---|
| 209 | additional.setValue(Field.ADDRESS, "University of California, Santa Cruz"); |
---|
| 210 | additional.setValue(Field.NOTE, "Technical Report UCSC-CRL-89-11"); |
---|
| 211 | |
---|
| 212 | return result; |
---|
| 213 | } |
---|
| 214 | |
---|
| 215 | /** |
---|
| 216 | * Returns an enumeration describing the available options |
---|
| 217 | * |
---|
| 218 | * @return an enumeration of all the available options |
---|
| 219 | */ |
---|
| 220 | public Enumeration listOptions() { |
---|
| 221 | |
---|
| 222 | Vector newVector = new Vector(7); |
---|
| 223 | |
---|
| 224 | newVector.addElement(new Option("\tUse the baLanced version\n" |
---|
| 225 | + "\t(default false)", |
---|
| 226 | "L", 0, "-L")); |
---|
| 227 | newVector.addElement(new Option("\tThe number of iterations to be performed.\n" |
---|
| 228 | + "\t(default 1)", |
---|
| 229 | "I", 1, "-I <int>")); |
---|
| 230 | newVector.addElement(new Option("\tPromotion coefficient alpha.\n" |
---|
| 231 | + "\t(default 2.0)", |
---|
| 232 | "A", 1, "-A <double>")); |
---|
| 233 | newVector.addElement(new Option("\tDemotion coefficient beta.\n" |
---|
| 234 | + "\t(default 0.5)", |
---|
| 235 | "B", 1, "-B <double>")); |
---|
| 236 | newVector.addElement(new Option("\tPrediction threshold.\n" |
---|
| 237 | + "\t(default -1.0 == number of attributes)", |
---|
| 238 | "H", 1, "-H <double>")); |
---|
| 239 | newVector.addElement(new Option("\tStarting weights.\n" |
---|
| 240 | + "\t(default 2.0)", |
---|
| 241 | "W", 1, "-W <double>")); |
---|
| 242 | newVector.addElement(new Option("\tDefault random seed.\n" |
---|
| 243 | + "\t(default 1)", |
---|
| 244 | "S", 1, "-S <int>")); |
---|
| 245 | |
---|
| 246 | return newVector.elements(); |
---|
| 247 | } |
---|
| 248 | |
---|
| 249 | /** |
---|
| 250 | * Parses a given list of options.<p/> |
---|
| 251 | * |
---|
| 252 | <!-- options-start --> |
---|
| 253 | * Valid options are: <p/> |
---|
| 254 | * |
---|
| 255 | * <pre> -L |
---|
| 256 | * Use the baLanced version |
---|
| 257 | * (default false)</pre> |
---|
| 258 | * |
---|
| 259 | * <pre> -I <int> |
---|
| 260 | * The number of iterations to be performed. |
---|
| 261 | * (default 1)</pre> |
---|
| 262 | * |
---|
| 263 | * <pre> -A <double> |
---|
| 264 | * Promotion coefficient alpha. |
---|
| 265 | * (default 2.0)</pre> |
---|
| 266 | * |
---|
| 267 | * <pre> -B <double> |
---|
| 268 | * Demotion coefficient beta. |
---|
| 269 | * (default 0.5)</pre> |
---|
| 270 | * |
---|
| 271 | * <pre> -H <double> |
---|
| 272 | * Prediction threshold. |
---|
| 273 | * (default -1.0 == number of attributes)</pre> |
---|
| 274 | * |
---|
| 275 | * <pre> -W <double> |
---|
| 276 | * Starting weights. |
---|
| 277 | * (default 2.0)</pre> |
---|
| 278 | * |
---|
| 279 | * <pre> -S <int> |
---|
| 280 | * Default random seed. |
---|
| 281 | * (default 1)</pre> |
---|
| 282 | * |
---|
| 283 | <!-- options-end --> |
---|
| 284 | * |
---|
| 285 | * @param options the list of options as an array of strings |
---|
| 286 | * @throws Exception if an option is not supported |
---|
| 287 | */ |
---|
| 288 | public void setOptions(String[] options) throws Exception { |
---|
| 289 | |
---|
| 290 | m_Balanced = Utils.getFlag('L', options); |
---|
| 291 | |
---|
| 292 | String iterationsString = Utils.getOption('I', options); |
---|
| 293 | if (iterationsString.length() != 0) { |
---|
| 294 | m_numIterations = Integer.parseInt(iterationsString); |
---|
| 295 | } |
---|
| 296 | String alphaString = Utils.getOption('A', options); |
---|
| 297 | if (alphaString.length() != 0) { |
---|
| 298 | m_Alpha = (new Double(alphaString)).doubleValue(); |
---|
| 299 | } |
---|
| 300 | String betaString = Utils.getOption('B', options); |
---|
| 301 | if (betaString.length() != 0) { |
---|
| 302 | m_Beta = (new Double(betaString)).doubleValue(); |
---|
| 303 | } |
---|
| 304 | String tString = Utils.getOption('H', options); |
---|
| 305 | if (tString.length() != 0) { |
---|
| 306 | m_Threshold = (new Double(tString)).doubleValue(); |
---|
| 307 | } |
---|
| 308 | String wString = Utils.getOption('W', options); |
---|
| 309 | if (wString.length() != 0) { |
---|
| 310 | m_defaultWeight = (new Double(wString)).doubleValue(); |
---|
| 311 | } |
---|
| 312 | String rString = Utils.getOption('S', options); |
---|
| 313 | if (rString.length() != 0) { |
---|
| 314 | m_Seed = Integer.parseInt(rString); |
---|
| 315 | } |
---|
| 316 | } |
---|
| 317 | |
---|
| 318 | /** |
---|
| 319 | * Gets the current settings of the classifier. |
---|
| 320 | * |
---|
| 321 | * @return an array of strings suitable for passing to setOptions |
---|
| 322 | */ |
---|
| 323 | public String[] getOptions() { |
---|
| 324 | |
---|
| 325 | String[] options = new String [20]; |
---|
| 326 | int current = 0; |
---|
| 327 | |
---|
| 328 | if(m_Balanced) { |
---|
| 329 | options[current++] = "-L"; |
---|
| 330 | } |
---|
| 331 | |
---|
| 332 | options[current++] = "-I"; options[current++] = "" + m_numIterations; |
---|
| 333 | options[current++] = "-A"; options[current++] = "" + m_Alpha; |
---|
| 334 | options[current++] = "-B"; options[current++] = "" + m_Beta; |
---|
| 335 | options[current++] = "-H"; options[current++] = "" + m_Threshold; |
---|
| 336 | options[current++] = "-W"; options[current++] = "" + m_defaultWeight; |
---|
| 337 | options[current++] = "-S"; options[current++] = "" + m_Seed; |
---|
| 338 | while (current < options.length) { |
---|
| 339 | options[current++] = ""; |
---|
| 340 | } |
---|
| 341 | return options; |
---|
| 342 | } |
---|
| 343 | |
---|
| 344 | /** |
---|
| 345 | * Returns default capabilities of the classifier. |
---|
| 346 | * |
---|
| 347 | * @return the capabilities of this classifier |
---|
| 348 | */ |
---|
| 349 | public Capabilities getCapabilities() { |
---|
| 350 | Capabilities result = super.getCapabilities(); |
---|
| 351 | result.disableAll(); |
---|
| 352 | |
---|
| 353 | // attributes |
---|
| 354 | result.enable(Capability.NOMINAL_ATTRIBUTES); |
---|
| 355 | result.enable(Capability.MISSING_VALUES); |
---|
| 356 | |
---|
| 357 | // class |
---|
| 358 | result.enable(Capability.BINARY_CLASS); |
---|
| 359 | result.enable(Capability.MISSING_CLASS_VALUES); |
---|
| 360 | |
---|
| 361 | // instances |
---|
| 362 | result.setMinimumNumberInstances(0); |
---|
| 363 | |
---|
| 364 | return result; |
---|
| 365 | } |
---|
| 366 | |
---|
| 367 | /** |
---|
| 368 | * Builds the classifier |
---|
| 369 | * |
---|
| 370 | * @param insts the data to train the classifier with |
---|
| 371 | * @throws Exception if something goes wrong during building |
---|
| 372 | */ |
---|
| 373 | public void buildClassifier(Instances insts) throws Exception { |
---|
| 374 | |
---|
| 375 | // can classifier handle the data? |
---|
| 376 | getCapabilities().testWithFail(insts); |
---|
| 377 | |
---|
| 378 | // remove instances with missing class |
---|
| 379 | insts = new Instances(insts); |
---|
| 380 | insts.deleteWithMissingClass(); |
---|
| 381 | |
---|
| 382 | // Filter data |
---|
| 383 | m_Train = new Instances(insts); |
---|
| 384 | |
---|
| 385 | m_ReplaceMissingValues = new ReplaceMissingValues(); |
---|
| 386 | m_ReplaceMissingValues.setInputFormat(m_Train); |
---|
| 387 | m_Train = Filter.useFilter(m_Train, m_ReplaceMissingValues); |
---|
| 388 | m_NominalToBinary = new NominalToBinary(); |
---|
| 389 | m_NominalToBinary.setInputFormat(m_Train); |
---|
| 390 | m_Train = Filter.useFilter(m_Train, m_NominalToBinary); |
---|
| 391 | |
---|
| 392 | /** Randomize training data */ |
---|
| 393 | if(m_Seed != -1) { |
---|
| 394 | m_Train.randomize(new Random(m_Seed)); |
---|
| 395 | } |
---|
| 396 | |
---|
| 397 | /** Make space to store weights */ |
---|
| 398 | m_predPosVector = new double[m_Train.numAttributes()]; |
---|
| 399 | |
---|
| 400 | if(m_Balanced) { |
---|
| 401 | m_predNegVector = new double[m_Train.numAttributes()]; |
---|
| 402 | } |
---|
| 403 | |
---|
| 404 | /** Initialize the weights to starting values **/ |
---|
| 405 | for(int i = 0; i < m_Train.numAttributes(); i++) |
---|
| 406 | m_predPosVector[i] = m_defaultWeight; |
---|
| 407 | |
---|
| 408 | if(m_Balanced) { |
---|
| 409 | for(int i = 0; i < m_Train.numAttributes(); i++) { |
---|
| 410 | m_predNegVector[i] = m_defaultWeight; |
---|
| 411 | } |
---|
| 412 | } |
---|
| 413 | |
---|
| 414 | /** Set actual prediction threshold **/ |
---|
| 415 | if(m_Threshold<0) { |
---|
| 416 | m_actualThreshold = (double)m_Train.numAttributes()-1; |
---|
| 417 | } else { |
---|
| 418 | m_actualThreshold = m_Threshold; |
---|
| 419 | } |
---|
| 420 | |
---|
| 421 | m_Mistakes=0; |
---|
| 422 | |
---|
| 423 | /** Compute the weight vectors **/ |
---|
| 424 | if(m_Balanced) { |
---|
| 425 | for (int it = 0; it < m_numIterations; it++) { |
---|
| 426 | for (int i = 0; i < m_Train.numInstances(); i++) { |
---|
| 427 | actualUpdateClassifierBalanced(m_Train.instance(i)); |
---|
| 428 | } |
---|
| 429 | } |
---|
| 430 | } else { |
---|
| 431 | for (int it = 0; it < m_numIterations; it++) { |
---|
| 432 | for (int i = 0; i < m_Train.numInstances(); i++) { |
---|
| 433 | actualUpdateClassifier(m_Train.instance(i)); |
---|
| 434 | } |
---|
| 435 | } |
---|
| 436 | } |
---|
| 437 | } |
---|
| 438 | |
---|
| 439 | /** |
---|
| 440 | * Updates the classifier with a new learning example |
---|
| 441 | * |
---|
| 442 | * @param instance the instance to update the classifier with |
---|
| 443 | * @throws Exception if something goes wrong |
---|
| 444 | */ |
---|
| 445 | public void updateClassifier(Instance instance) throws Exception { |
---|
| 446 | |
---|
| 447 | m_ReplaceMissingValues.input(instance); |
---|
| 448 | m_ReplaceMissingValues.batchFinished(); |
---|
| 449 | Instance filtered = m_ReplaceMissingValues.output(); |
---|
| 450 | m_NominalToBinary.input(filtered); |
---|
| 451 | m_NominalToBinary.batchFinished(); |
---|
| 452 | filtered = m_NominalToBinary.output(); |
---|
| 453 | |
---|
| 454 | if(m_Balanced) { |
---|
| 455 | actualUpdateClassifierBalanced(filtered); |
---|
| 456 | } else { |
---|
| 457 | actualUpdateClassifier(filtered); |
---|
| 458 | } |
---|
| 459 | } |
---|
| 460 | |
---|
| 461 | /** |
---|
| 462 | * Actual update routine for prefiltered instances |
---|
| 463 | * |
---|
| 464 | * @param inst the instance to update the classifier with |
---|
| 465 | * @throws Exception if something goes wrong |
---|
| 466 | */ |
---|
| 467 | private void actualUpdateClassifier(Instance inst) throws Exception { |
---|
| 468 | |
---|
| 469 | double posmultiplier; |
---|
| 470 | |
---|
| 471 | if (!inst.classIsMissing()) { |
---|
| 472 | double prediction = makePrediction(inst); |
---|
| 473 | |
---|
| 474 | if (prediction != inst.classValue()) { |
---|
| 475 | m_Mistakes++; |
---|
| 476 | |
---|
| 477 | if(prediction == 0) { |
---|
| 478 | /* false neg: promote */ |
---|
| 479 | posmultiplier=m_Alpha; |
---|
| 480 | } else { |
---|
| 481 | /* false pos: demote */ |
---|
| 482 | posmultiplier=m_Beta; |
---|
| 483 | } |
---|
| 484 | int n1 = inst.numValues(); int classIndex = m_Train.classIndex(); |
---|
| 485 | for(int l = 0 ; l < n1 ; l++) { |
---|
| 486 | if(inst.index(l) != classIndex && inst.valueSparse(l)==1) { |
---|
| 487 | m_predPosVector[inst.index(l)]*=posmultiplier; |
---|
| 488 | } |
---|
| 489 | } |
---|
| 490 | //Utils.normalize(m_predPosVector); |
---|
| 491 | } |
---|
| 492 | } |
---|
| 493 | else { |
---|
| 494 | System.out.println("CLASS MISSING"); |
---|
| 495 | } |
---|
| 496 | } |
---|
| 497 | |
---|
| 498 | /** |
---|
| 499 | * Actual update routine (balanced) for prefiltered instances |
---|
| 500 | * |
---|
| 501 | * @param inst the instance to update the classifier with |
---|
| 502 | * @throws Exception if something goes wrong |
---|
| 503 | */ |
---|
| 504 | private void actualUpdateClassifierBalanced(Instance inst) throws Exception { |
---|
| 505 | |
---|
| 506 | double posmultiplier,negmultiplier; |
---|
| 507 | |
---|
| 508 | if (!inst.classIsMissing()) { |
---|
| 509 | double prediction = makePredictionBalanced(inst); |
---|
| 510 | |
---|
| 511 | if (prediction != inst.classValue()) { |
---|
| 512 | m_Mistakes++; |
---|
| 513 | |
---|
| 514 | if(prediction == 0) { |
---|
| 515 | /* false neg: promote positive, demote negative*/ |
---|
| 516 | posmultiplier=m_Alpha; |
---|
| 517 | negmultiplier=m_Beta; |
---|
| 518 | } else { |
---|
| 519 | /* false pos: demote positive, promote negative */ |
---|
| 520 | posmultiplier=m_Beta; |
---|
| 521 | negmultiplier=m_Alpha; |
---|
| 522 | } |
---|
| 523 | int n1 = inst.numValues(); int classIndex = m_Train.classIndex(); |
---|
| 524 | for(int l = 0 ; l < n1 ; l++) { |
---|
| 525 | if(inst.index(l) != classIndex && inst.valueSparse(l)==1) { |
---|
| 526 | m_predPosVector[inst.index(l)]*=posmultiplier; |
---|
| 527 | m_predNegVector[inst.index(l)]*=negmultiplier; |
---|
| 528 | } |
---|
| 529 | } |
---|
| 530 | //Utils.normalize(m_predPosVector); |
---|
| 531 | //Utils.normalize(m_predNegVector); |
---|
| 532 | } |
---|
| 533 | } |
---|
| 534 | else { |
---|
| 535 | System.out.println("CLASS MISSING"); |
---|
| 536 | } |
---|
| 537 | } |
---|
| 538 | |
---|
| 539 | /** |
---|
| 540 | * Outputs the prediction for the given instance. |
---|
| 541 | * |
---|
| 542 | * @param inst the instance for which prediction is to be computed |
---|
| 543 | * @return the prediction |
---|
| 544 | * @throws Exception if something goes wrong |
---|
| 545 | */ |
---|
| 546 | public double classifyInstance(Instance inst) throws Exception { |
---|
| 547 | |
---|
| 548 | m_ReplaceMissingValues.input(inst); |
---|
| 549 | m_ReplaceMissingValues.batchFinished(); |
---|
| 550 | Instance filtered = m_ReplaceMissingValues.output(); |
---|
| 551 | m_NominalToBinary.input(filtered); |
---|
| 552 | m_NominalToBinary.batchFinished(); |
---|
| 553 | filtered = m_NominalToBinary.output(); |
---|
| 554 | |
---|
| 555 | if(m_Balanced) { |
---|
| 556 | return(makePredictionBalanced(filtered)); |
---|
| 557 | } else { |
---|
| 558 | return(makePrediction(filtered)); |
---|
| 559 | } |
---|
| 560 | } |
---|
| 561 | |
---|
| 562 | /** |
---|
| 563 | * Compute the actual prediction for prefiltered instance |
---|
| 564 | * |
---|
| 565 | * @param inst the instance for which prediction is to be computed |
---|
| 566 | * @return the prediction |
---|
| 567 | * @throws Exception if something goes wrong |
---|
| 568 | */ |
---|
| 569 | private double makePrediction(Instance inst) throws Exception { |
---|
| 570 | |
---|
| 571 | double total = 0; |
---|
| 572 | |
---|
| 573 | int n1 = inst.numValues(); int classIndex = m_Train.classIndex(); |
---|
| 574 | |
---|
| 575 | for(int i=0;i<n1;i++) { |
---|
| 576 | if(inst.index(i) != classIndex && inst.valueSparse(i)==1) { |
---|
| 577 | total+=m_predPosVector[inst.index(i)]; |
---|
| 578 | } |
---|
| 579 | } |
---|
| 580 | |
---|
| 581 | if(total > m_actualThreshold) { |
---|
| 582 | return(1); |
---|
| 583 | } else { |
---|
| 584 | return(0); |
---|
| 585 | } |
---|
| 586 | } |
---|
| 587 | |
---|
| 588 | /** |
---|
| 589 | * Compute our prediction (Balanced) for prefiltered instance |
---|
| 590 | * |
---|
| 591 | * @param inst the instance for which prediction is to be computed |
---|
| 592 | * @return the prediction |
---|
| 593 | * @throws Exception if something goes wrong |
---|
| 594 | */ |
---|
| 595 | private double makePredictionBalanced(Instance inst) throws Exception { |
---|
| 596 | double total=0; |
---|
| 597 | |
---|
| 598 | int n1 = inst.numValues(); int classIndex = m_Train.classIndex(); |
---|
| 599 | for(int i=0;i<n1;i++) { |
---|
| 600 | if(inst.index(i) != classIndex && inst.valueSparse(i)==1) { |
---|
| 601 | total+=(m_predPosVector[inst.index(i)]-m_predNegVector[inst.index(i)]); |
---|
| 602 | } |
---|
| 603 | } |
---|
| 604 | |
---|
| 605 | if(total > m_actualThreshold) { |
---|
| 606 | return(1); |
---|
| 607 | } else { |
---|
| 608 | return(0); |
---|
| 609 | } |
---|
| 610 | } |
---|
| 611 | |
---|
| 612 | /** |
---|
| 613 | * Returns textual description of the classifier. |
---|
| 614 | * |
---|
| 615 | * @return textual description of the classifier |
---|
| 616 | */ |
---|
| 617 | public String toString() { |
---|
| 618 | |
---|
| 619 | if(m_predPosVector==null) |
---|
| 620 | return("Winnow: No model built yet."); |
---|
| 621 | |
---|
| 622 | String result = "Winnow\n\nAttribute weights\n\n"; |
---|
| 623 | |
---|
| 624 | int classIndex = m_Train.classIndex(); |
---|
| 625 | |
---|
| 626 | if(!m_Balanced) { |
---|
| 627 | for( int i = 0 ; i < m_Train.numAttributes(); i++) { |
---|
| 628 | if(i!=classIndex) |
---|
| 629 | result += "w" + i + " " + m_predPosVector[i] + "\n"; |
---|
| 630 | } |
---|
| 631 | } else { |
---|
| 632 | for( int i = 0 ; i < m_Train.numAttributes(); i++) { |
---|
| 633 | if(i!=classIndex) { |
---|
| 634 | result += "w" + i + " p " + m_predPosVector[i]; |
---|
| 635 | result += " n " + m_predNegVector[i]; |
---|
| 636 | |
---|
| 637 | double wdiff=m_predPosVector[i]-m_predNegVector[i]; |
---|
| 638 | |
---|
| 639 | result += " d " + wdiff + "\n"; |
---|
| 640 | } |
---|
| 641 | } |
---|
| 642 | } |
---|
| 643 | result += "\nCumulated mistake count: " + m_Mistakes + "\n\n"; |
---|
| 644 | |
---|
| 645 | return(result); |
---|
| 646 | } |
---|
| 647 | |
---|
| 648 | /** |
---|
| 649 | * Returns the tip text for this property |
---|
| 650 | * @return tip text for this property suitable for |
---|
| 651 | * displaying in the explorer/experimenter gui |
---|
| 652 | */ |
---|
| 653 | public String balancedTipText() { |
---|
| 654 | return "Whether to use the balanced version of the algorithm."; |
---|
| 655 | } |
---|
| 656 | |
---|
| 657 | /** |
---|
| 658 | * Get the value of Balanced. |
---|
| 659 | * |
---|
| 660 | * @return Value of Balanced. |
---|
| 661 | */ |
---|
| 662 | public boolean getBalanced() { |
---|
| 663 | |
---|
| 664 | return m_Balanced; |
---|
| 665 | } |
---|
| 666 | |
---|
| 667 | /** |
---|
| 668 | * Set the value of Balanced. |
---|
| 669 | * |
---|
| 670 | * @param b Value to assign to Balanced. |
---|
| 671 | */ |
---|
| 672 | public void setBalanced(boolean b) { |
---|
| 673 | |
---|
| 674 | m_Balanced = b; |
---|
| 675 | } |
---|
| 676 | |
---|
| 677 | /** |
---|
| 678 | * Returns the tip text for this property |
---|
| 679 | * @return tip text for this property suitable for |
---|
| 680 | * displaying in the explorer/experimenter gui |
---|
| 681 | */ |
---|
| 682 | public String alphaTipText() { |
---|
| 683 | return "Promotion coefficient alpha."; |
---|
| 684 | } |
---|
| 685 | |
---|
| 686 | /** |
---|
| 687 | * Get the value of Alpha. |
---|
| 688 | * |
---|
| 689 | * @return Value of Alpha. |
---|
| 690 | */ |
---|
| 691 | public double getAlpha() { |
---|
| 692 | |
---|
| 693 | return(m_Alpha); |
---|
| 694 | } |
---|
| 695 | |
---|
| 696 | /** |
---|
| 697 | * Set the value of Alpha. |
---|
| 698 | * |
---|
| 699 | * @param a Value to assign to Alpha. |
---|
| 700 | */ |
---|
| 701 | public void setAlpha(double a) { |
---|
| 702 | |
---|
| 703 | m_Alpha = a; |
---|
| 704 | } |
---|
| 705 | |
---|
| 706 | /** |
---|
| 707 | * Returns the tip text for this property |
---|
| 708 | * @return tip text for this property suitable for |
---|
| 709 | * displaying in the explorer/experimenter gui |
---|
| 710 | */ |
---|
| 711 | public String betaTipText() { |
---|
| 712 | return "Demotion coefficient beta."; |
---|
| 713 | } |
---|
| 714 | |
---|
| 715 | /** |
---|
| 716 | * Get the value of Beta. |
---|
| 717 | * |
---|
| 718 | * @return Value of Beta. |
---|
| 719 | */ |
---|
| 720 | public double getBeta() { |
---|
| 721 | |
---|
| 722 | return(m_Beta); |
---|
| 723 | } |
---|
| 724 | |
---|
| 725 | /** |
---|
| 726 | * Set the value of Beta. |
---|
| 727 | * |
---|
| 728 | * @param b Value to assign to Beta. |
---|
| 729 | */ |
---|
| 730 | public void setBeta(double b) { |
---|
| 731 | |
---|
| 732 | m_Beta = b; |
---|
| 733 | } |
---|
| 734 | |
---|
| 735 | /** |
---|
| 736 | * Returns the tip text for this property |
---|
| 737 | * @return tip text for this property suitable for |
---|
| 738 | * displaying in the explorer/experimenter gui |
---|
| 739 | */ |
---|
| 740 | public String thresholdTipText() { |
---|
| 741 | return "Prediction threshold (-1 means: set to number of attributes)."; |
---|
| 742 | } |
---|
| 743 | |
---|
| 744 | /** |
---|
| 745 | * Get the value of Threshold. |
---|
| 746 | * |
---|
| 747 | * @return Value of Threshold. |
---|
| 748 | */ |
---|
| 749 | public double getThreshold() { |
---|
| 750 | |
---|
| 751 | return m_Threshold; |
---|
| 752 | } |
---|
| 753 | |
---|
| 754 | /** |
---|
| 755 | * Set the value of Threshold. |
---|
| 756 | * |
---|
| 757 | * @param t Value to assign to Threshold. |
---|
| 758 | */ |
---|
| 759 | public void setThreshold(double t) { |
---|
| 760 | |
---|
| 761 | m_Threshold = t; |
---|
| 762 | } |
---|
| 763 | |
---|
| 764 | /** |
---|
| 765 | * Returns the tip text for this property |
---|
| 766 | * @return tip text for this property suitable for |
---|
| 767 | * displaying in the explorer/experimenter gui |
---|
| 768 | */ |
---|
| 769 | public String defaultWeightTipText() { |
---|
| 770 | return "Initial value of weights/coefficients."; |
---|
| 771 | } |
---|
| 772 | |
---|
| 773 | /** |
---|
| 774 | * Get the value of defaultWeight. |
---|
| 775 | * |
---|
| 776 | * @return Value of defaultWeight. |
---|
| 777 | */ |
---|
| 778 | public double getDefaultWeight() { |
---|
| 779 | |
---|
| 780 | return m_defaultWeight; |
---|
| 781 | } |
---|
| 782 | |
---|
| 783 | /** |
---|
| 784 | * Set the value of defaultWeight. |
---|
| 785 | * |
---|
| 786 | * @param w Value to assign to defaultWeight. |
---|
| 787 | */ |
---|
| 788 | public void setDefaultWeight(double w) { |
---|
| 789 | |
---|
| 790 | m_defaultWeight = w; |
---|
| 791 | } |
---|
| 792 | |
---|
| 793 | /** |
---|
| 794 | * Returns the tip text for this property |
---|
| 795 | * @return tip text for this property suitable for |
---|
| 796 | * displaying in the explorer/experimenter gui |
---|
| 797 | */ |
---|
| 798 | public String numIterationsTipText() { |
---|
| 799 | return "The number of iterations to be performed."; |
---|
| 800 | } |
---|
| 801 | |
---|
| 802 | /** |
---|
| 803 | * Get the value of numIterations. |
---|
| 804 | * |
---|
| 805 | * @return Value of numIterations. |
---|
| 806 | */ |
---|
| 807 | public int getNumIterations() { |
---|
| 808 | |
---|
| 809 | return m_numIterations; |
---|
| 810 | } |
---|
| 811 | |
---|
| 812 | /** |
---|
| 813 | * Set the value of numIterations. |
---|
| 814 | * |
---|
| 815 | * @param v Value to assign to numIterations. |
---|
| 816 | */ |
---|
| 817 | public void setNumIterations(int v) { |
---|
| 818 | |
---|
| 819 | m_numIterations = v; |
---|
| 820 | } |
---|
| 821 | |
---|
| 822 | /** |
---|
| 823 | * Returns the tip text for this property |
---|
| 824 | * @return tip text for this property suitable for |
---|
| 825 | * displaying in the explorer/experimenter gui |
---|
| 826 | */ |
---|
| 827 | public String seedTipText() { |
---|
| 828 | return "Random number seed used for data shuffling (-1 means no " |
---|
| 829 | + "randomization)."; |
---|
| 830 | } |
---|
| 831 | |
---|
| 832 | /** |
---|
| 833 | * Get the value of Seed. |
---|
| 834 | * |
---|
| 835 | * @return Value of Seed. |
---|
| 836 | */ |
---|
| 837 | public int getSeed() { |
---|
| 838 | |
---|
| 839 | return m_Seed; |
---|
| 840 | } |
---|
| 841 | |
---|
| 842 | /** |
---|
| 843 | * Set the value of Seed. |
---|
| 844 | * |
---|
| 845 | * @param v Value to assign to Seed. |
---|
| 846 | */ |
---|
| 847 | public void setSeed(int v) { |
---|
| 848 | |
---|
| 849 | m_Seed = v; |
---|
| 850 | } |
---|
| 851 | |
---|
| 852 | /** |
---|
| 853 | * Returns the revision string. |
---|
| 854 | * |
---|
| 855 | * @return the revision |
---|
| 856 | */ |
---|
| 857 | public String getRevision() { |
---|
| 858 | return RevisionUtils.extract("$Revision: 5928 $"); |
---|
| 859 | } |
---|
| 860 | |
---|
| 861 | /** |
---|
| 862 | * Main method. |
---|
| 863 | * |
---|
| 864 | * @param argv the commandline options |
---|
| 865 | */ |
---|
| 866 | public static void main(String[] argv) { |
---|
| 867 | runClassifier(new Winnow(), argv); |
---|
| 868 | } |
---|
| 869 | } |
---|