/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
/*
* MIEMDD.java
* Copyright (C) 2005 University of Waikato, Hamilton, New Zealand
*
*/
package weka.classifiers.mi;
import weka.classifiers.RandomizableClassifier;
import weka.core.Capabilities;
import weka.core.FastVector;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.MultiInstanceCapabilitiesHandler;
import weka.core.Optimization;
import weka.core.Option;
import weka.core.OptionHandler;
import weka.core.RevisionUtils;
import weka.core.SelectedTag;
import weka.core.Tag;
import weka.core.TechnicalInformation;
import weka.core.TechnicalInformationHandler;
import weka.core.Utils;
import weka.core.Capabilities.Capability;
import weka.core.TechnicalInformation.Field;
import weka.core.TechnicalInformation.Type;
import weka.filters.Filter;
import weka.filters.unsupervised.attribute.Normalize;
import weka.filters.unsupervised.attribute.ReplaceMissingValues;
import weka.filters.unsupervised.attribute.Standardize;
import java.util.Enumeration;
import java.util.Random;
import java.util.Vector;
/**
* EMDD model builds heavily upon Dietterich's Diverse Density (DD) algorithm.
* It is a general framework for MI learning of converting the MI problem to a single-instance setting using EM. In this implementation, we use most-likely cause DD model and only use 3 random selected postive bags as initial starting points of EM.
*
* For more information see:
*
* Qi Zhang, Sally A. Goldman: EM-DD: An Improved Multiple-Instance Learning Technique. In: Advances in Neural Information Processing Systems 14, 1073-108, 2001.
*
* @inproceedings{Zhang2001, * author = {Qi Zhang and Sally A. Goldman}, * booktitle = {Advances in Neural Information Processing Systems 14}, * pages = {1073-108}, * publisher = {MIT Press}, * title = {EM-DD: An Improved Multiple-Instance Learning Technique}, * year = {2001} * } ** * * Valid options are: * *
-N <num> * Whether to 0=normalize/1=standardize/2=neither. * (default 1=standardize)* *
-S <num> * Random number seed. * (default 1)* *
-D * If set, classifier is run in debug mode and * may output additional info to the console* * * @author Eibe Frank (eibe@cs.waikato.ac.nz) * @author Lin Dong (ld21@cs.waikato.ac.nz) * @version $Revision: 5481 $ */ public class MIEMDD extends RandomizableClassifier implements OptionHandler, MultiInstanceCapabilitiesHandler, TechnicalInformationHandler { /** for serialization */ static final long serialVersionUID = 3899547154866223734L; /** The index of the class attribute */ protected int m_ClassIndex; protected double[] m_Par; /** The number of the class labels */ protected int m_NumClasses; /** Class labels for each bag */ protected int[] m_Classes; /** MI data */ protected double[][][] m_Data; /** All attribute names */ protected Instances m_Attributes; /** MI data */ protected double[][] m_emData; /** The filter used to standardize/normalize all values. */ protected Filter m_Filter = null; /** Whether to normalize/standardize/neither, default:standardize */ protected int m_filterType = FILTER_STANDARDIZE; /** Normalize training data */ public static final int FILTER_NORMALIZE = 0; /** Standardize training data */ public static final int FILTER_STANDARDIZE = 1; /** No normalization/standardization */ public static final int FILTER_NONE = 2; /** The filter to apply to the training data */ public static final Tag[] TAGS_FILTER = { new Tag(FILTER_NORMALIZE, "Normalize training data"), new Tag(FILTER_STANDARDIZE, "Standardize training data"), new Tag(FILTER_NONE, "No normalization/standardization"), }; /** The filter used to get rid of missing values. */ protected ReplaceMissingValues m_Missing = new ReplaceMissingValues(); /** * Returns a string describing this filter * * @return a description of the filter suitable for * displaying in the explorer/experimenter gui */ public String globalInfo() { return "EMDD model builds heavily upon Dietterich's Diverse Density (DD) " + "algorithm.\nIt is a general framework for MI learning of converting " + "the MI problem to a single-instance setting using EM. In this " + "implementation, we use most-likely cause DD model and only use 3 " + "random selected postive bags as initial starting points of EM.\n\n" + "For more information see:\n\n" + getTechnicalInformation().toString(); } /** * Returns an instance of a TechnicalInformation object, containing * detailed information about the technical background of this class, * e.g., paper reference or book this class is based on. * * @return the technical information about this class */ public TechnicalInformation getTechnicalInformation() { TechnicalInformation result; result = new TechnicalInformation(Type.INPROCEEDINGS); result.setValue(Field.AUTHOR, "Qi Zhang and Sally A. Goldman"); result.setValue(Field.TITLE, "EM-DD: An Improved Multiple-Instance Learning Technique"); result.setValue(Field.BOOKTITLE, "Advances in Neural Information Processing Systems 14"); result.setValue(Field.YEAR, "2001"); result.setValue(Field.PAGES, "1073-108"); result.setValue(Field.PUBLISHER, "MIT Press"); return result; } /** * Returns an enumeration describing the available options * * @return an enumeration of all the available options */ public Enumeration listOptions() { Vector result = new Vector(); result.addElement(new Option( "\tWhether to 0=normalize/1=standardize/2=neither.\n" + "\t(default 1=standardize)", "N", 1, "-N
-N <num> * Whether to 0=normalize/1=standardize/2=neither. * (default 1=standardize)* *
-S <num> * Random number seed. * (default 1)* *
-D * If set, classifier is run in debug mode and * may output additional info to the console* * * @param options the list of options as an array of strings * @throws Exception if an option is not supported */ public void setOptions(String[] options) throws Exception { String tmpStr; tmpStr = Utils.getOption('N', options); if (tmpStr.length() != 0) { setFilterType(new SelectedTag(Integer.parseInt(tmpStr), TAGS_FILTER)); } else { setFilterType(new SelectedTag(FILTER_STANDARDIZE, TAGS_FILTER)); } super.setOptions(options); } /** * Gets the current settings of the classifier. * * @return an array of strings suitable for passing to setOptions */ public String[] getOptions() { Vector result; String[] options; int i; result = new Vector(); options = super.getOptions(); for (i = 0; i < options.length; i++) result.add(options[i]); result.add("-N"); result.add("" + m_filterType); return (String[]) result.toArray(new String[result.size()]); } /** * Returns the tip text for this property * * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String filterTypeTipText() { return "The filter type for transforming the training data."; } /** * Gets how the training data will be transformed. Will be one of * FILTER_NORMALIZE, FILTER_STANDARDIZE, FILTER_NONE. * * @return the filtering mode */ public SelectedTag getFilterType() { return new SelectedTag(m_filterType, TAGS_FILTER); } /** * Sets how the training data will be transformed. Should be one of * FILTER_NORMALIZE, FILTER_STANDARDIZE, FILTER_NONE. * * @param newType the new filtering mode */ public void setFilterType(SelectedTag newType) { if (newType.getTags() == TAGS_FILTER) { m_filterType = newType.getSelectedTag().getID(); } } private class OptEng extends Optimization { /** * Evaluate objective function * @param x the current values of variables * @return the value of the objective function */ protected double objectiveFunction(double[] x){ double nll = 0; // -LogLikelihood for (int i=0; i