swh:1:snp:0f920b1e114986636ba2e45b5c1a83473fb6cf12
Raw File
Tip revision: d2854b77f74f8ee2cb0f4916e5d5a940f035fc6e authored by Lars Kotthoff on 07 January 2021, 17:10:26 UTC
version 0.32
Tip revision: d2854b7
information.gain.Rd
\name{entropy.based}
\alias{information.gain}
\alias{gain.ratio}
\alias{symmetrical.uncertainty}
\title{ Entropy-based filters }
\description{
  The algorithms find weights of discrete attributes basing on their correlation with continous class attribute.
}
\usage{
information.gain(formula, data, unit)
gain.ratio(formula, data, unit)
symmetrical.uncertainty(formula, data, unit)
}
\arguments{
  \item{formula}{ A symbolic description of a model. }
  \item{data}{ Data to process. }
  \item{unit}{ Unit for computing entropy (passed to \code{\link[entropy]{entropy}}. Default is "log".}
}
\details{
  \code{information.gain} is \deqn{H(Class) + H(Attribute) - H(Class, Attribute)}{H(Class) + H(Attribute) - H(Class, Attribute)}.
  
  \code{gain.ratio} is \deqn{\frac{H(Class) + H(Attribute) - H(Class, Attribute)}{H(Attribute)}}{(H(Class) + H(Attribute) - H(Class, Attribute)) / H(Attribute)}

  \code{symmetrical.uncertainty} is \deqn{2\frac{H(Class) + H(Attribute) - H(Class, Attribute)}{H(Attribute) + H(Class)}}{2 * (H(Class) + H(Attribute) - H(Class, Attribute)) / (H(Attribute) + H(Class))}
}
\value{
a data.frame containing the worth of attributes in the first column and their names as row names
}
\author{ Piotr Romanski, Lars Kotthoff }
\examples{
  data(iris)

  weights <- information.gain(Species~., iris)
  print(weights)
  subset <- cutoff.k(weights, 2)
  f <- as.simple.formula(subset, "Species")
  print(f)

  weights <- information.gain(Species~., iris, unit = "log2")
  print(weights)

  weights <- gain.ratio(Species~., iris)
  print(weights)
  subset <- cutoff.k(weights, 2)
  f <- as.simple.formula(subset, "Species")
  print(f)

  weights <- symmetrical.uncertainty(Species~., iris)
  print(weights)
  subset <- cutoff.biggest.diff(weights)
  f <- as.simple.formula(subset, "Species")
  print(f)

}
back to top