Revision 63c8e8a453ea587001e2438a8ce51cf0e1e1675c authored by Charles J. Geyer on 23 March 2009, 00:00:00 UTC, committed by Gabor Csardi on 23 March 2009, 00:00:00 UTC
1 parent b524c08
Raw File
mlogl-unco.Rout.save

R : Copyright 2006, The R Foundation for Statistical Computing
Version 2.3.1 (2006-06-01)
ISBN 3-900051-07-0

R is free software and comes with ABSOLUTELY NO WARRANTY.
You are welcome to redistribute it under certain conditions.
Type 'license()' or 'licence()' for distribution details.

R is a collaborative project with many contributors.
Type 'contributors()' for more information and
'citation()' on how to cite R or R packages in publications.

Type 'demo()' for some demos, 'help()' for on-line help, or
'help.start()' for an HTML browser interface to help.
Type 'q()' to quit R.

> 
>  library(aster)
Loading required package: trust
> 
>  set.seed(42)
>  nind <- 25
>  nnode <- 5
>  ncoef <- nnode + 1
> 
>  famlist <- fam.default()
>  fam <- c(1, 1, 2, 3, 3)
>  pred <- c(0, 1, 1, 2, 3)
> 
>  modmat <- array(0, c(nind, nnode, ncoef))
>  modmat[ , , 1] <- 1
>  for (i in 2:nnode)
+      modmat[ , i, i] <- 1
>  modmat[ , , ncoef] <- rnorm(nind * nnode)
> 
>  beta <- rnorm(ncoef) / 10
> 
>  phi <- matrix(modmat, ncol = ncoef) %*% beta
>  phi <- matrix(phi, ncol = nnode)
> 
>  aster:::setfam(fam.default())
> 
>  theta <- .C("aster_phi2theta",
+      nind = as.integer(nind),
+      nnode = as.integer(nnode),
+      pred = as.integer(pred),
+      fam = as.integer(fam),
+      phi = as.double(phi),
+      theta = matrix(as.double(0), nind, nnode))$theta
> 
>  root <- sample(1:3, nind * nnode, replace = TRUE)
>  root <- matrix(root, nind, nnode)
> 
>  x <- raster(theta, pred, fam, root)
>  
>  zip <- rep(0, nind * nnode)
> 
>  out <- mlogl(beta, pred, fam, x, root, modmat, deriv = 2,
+      type = "unco", origin = zip)
>  print(out)
$value
[1] 123.9680

$gradient
[1] 41.9553321  0.5679575 12.9399758 -1.4608417 28.3767383 27.9290757

$hessian
          [,1]      [,2]       [,3]      [,4]       [,5]       [,6]
[1,] 618.58754 40.278506 173.359839 72.121848 305.696826  26.306053
[2,]  40.27851 11.702546   3.353047 17.380877   5.231394  -2.212379
[3,] 173.35984  3.353047  61.948882  4.980101  96.895585  10.514502
[4,]  72.12185 17.380877   4.980101 38.114502   7.770363  -1.323664
[5,] 305.69683  5.231394  96.895585  7.770363 186.157029  20.455780
[6,]  26.30605 -2.212379  10.514502 -1.323664  20.455780 303.041693

> 
>  aster:::setfam(fam.default())
> 
>  a <- .C("aster_theta2phi",
+      nind = as.integer(nind),
+      nnode = as.integer(nnode),
+      pred = as.integer(pred),
+      fam = as.integer(fam),
+      theta = as.double(zip),
+      phi = matrix(as.double(0), nind, nnode),
+      PACKAGE = "aster")$phi
> 
>  M <- matrix(modmat, ncol = ncoef)
> 
>  alpha <- as.numeric(lm(as.numeric(a) ~ 0 + M)$coefficients)
>  
>  out.too <- mlogl(beta - alpha, pred, fam, x, root, modmat, deriv = 2,
+      type = "unco")
>  all.equal(out, out.too)
[1] TRUE
> 
>  beta.old <- beta
>  beta <- beta - alpha
> 
>  my.value <- 0
>  for (j in 1:nnode) {
+      ifam <- fam[j]
+      k <- pred[j]
+      if (k > 0)
+          xpred <- x[ , k]
+      else
+          xpred <- root[ , j]
+      for (i in 1:nind)
+          my.value <- my.value -
+              sum(x[i, j] * theta[i, j] -
+              xpred[i] * famfun(famlist[[ifam]], 0, theta[i, j]))
+  }
>  all.equal(out$value, my.value)
[1] TRUE
> 
>  my.grad <- NaN * out$gradient
>  epsilon <- 1e-9
>  for (i in 1:ncoef) {
+      beta.eps <- beta
+      beta.eps[i] <- beta[i] + epsilon
+      out.eps <- mlogl(beta.eps, pred, fam, x, root, modmat, deriv = 0,
+          type = "unco")
+      my.grad[i] <- (out.eps$value - out$value) / epsilon
+  }
> 
>  all.equal(out$gradient, my.grad, tolerance = sqrt(epsilon))
[1] TRUE
> 
>  ##########
> 
>  objfun <- function(beta) {
+      out <- mlogl(beta, pred, fam, x, root, modmat, deriv = 1,
+          type = "unco")
+      result <- out$value
+      attr(result, "gradient") <- out$gradient
+      return(result)
+  }
>  nout <- nlm(objfun, beta, fscale = nind)
>  print(nout)
$minimum
[1] 119.1048

$estimate
[1]  1.62506234 -1.45889278 -0.96558882 -1.61557689 -2.06951752 -0.08435998

$gradient
[1]  8.452131e-06 -2.515249e-05 -3.569333e-06 -2.927416e-06  1.135145e-05
[6] -6.876573e-07

$code
[1] 1

$iterations
[1] 34

>  nout <- nlm(objfun, nout$estimate, fscale = nind)
>  print(nout)
$minimum
[1] 119.1048

$estimate
[1]  1.62506234 -1.45889278 -0.96558882 -1.61557689 -2.06951752 -0.08435998

$gradient
[1]  8.452131e-06 -2.515249e-05 -3.569333e-06 -2.927416e-06  1.135145e-05
[6] -6.876573e-07

$code
[1] 1

$iterations
[1] 0

> 
>  beta.mle.new <- nout$estimate
>  beta.mle.old <- beta.mle.new + alpha
>  mout.new <- mlogl(beta.mle.new, pred, fam, x, root, modmat, deriv = 1,
+          type = "unco")
>  mout.old <- mlogl(beta.mle.old, pred, fam, x, root, modmat, deriv = 1,
+          type = "unco", origin = zip)
>  all.equal(mout.new, mout.old)
[1] "Component 2: Mean relative  difference: 3.221245e-08"
> 
>  ##########
> 
>  my.hess <- matrix(NaN, ncoef, ncoef)
>  for (i in 1:ncoef) {
+      beta.eps <- beta
+      beta.eps[i] <- beta[i] + epsilon
+      out.eps <- mlogl(beta.eps, pred, fam, x, root, modmat, deriv = 1,
+          type = "unco")
+      my.hess[ , i] <- (out.eps$gradient - out$gradient) / epsilon
+  }
> 
>  all.equal(out$hessian, my.hess, tolerance = sqrt(epsilon))
[1] TRUE
> 
>  ##########
> 
>  objfun <- function(beta) {
+      out <- mlogl(beta, pred, fam, x, root, modmat, deriv = 2,
+          type = "unco")
+      result <- out$value
+      attr(result, "gradient") <- out$gradient
+      attr(result, "hessian") <- out$hessian
+      return(result)
+  }
>  nout <- try(nlm(objfun, beta, fscale = nind))
>  print(nout)
$minimum
[1] 119.1066

$estimate
[1]  1.61154159 -1.43994211 -0.97003887 -1.60248718 -2.04287896 -0.08345522

$gradient
[1] -0.027848502  0.008147191 -0.078372174  0.004814663  0.103718207
[6]  0.050151163

$code
[1] 4

$iterations
[1] 100

>  nout <- nlm(objfun, nout$estimate, fscale = nind, iterlim = 1000)
>  print(nout)
$minimum
[1] 119.1048

$estimate
[1]  1.62499634 -1.45880221 -0.96551285 -1.61551376 -2.06945265 -0.08435909

$gradient
[1] -8.888587e-05  1.875676e-05  5.313826e-05  2.266534e-05  6.445778e-05
[6]  4.977037e-05

$code
[1] 2

$iterations
[1] 972

> 
>  objfun.old <- function(beta) {
+      out <- mlogl(beta, pred, fam, x, root, modmat, deriv = 2,
+          type = "unco", origin = zip)
+      result <- out$value
+      attr(result, "gradient") <- out$gradient
+      attr(result, "hessian") <- out$hessian
+      return(result)
+  }
>  nout.old <- nlm(objfun.old, beta.mle.old, fscale = nind, iterlim = 1000)
>  print(nout.old)
$minimum
[1] 119.1048

$estimate
[1] -0.06808484 -0.30707046  0.18623351  0.07757029 -0.37637034 -0.08435998

$gradient
[1]  8.452130e-06 -2.515249e-05 -3.569333e-06 -2.927416e-06  1.135145e-05
[6] -6.876573e-07

$code
[1] 1

$iterations
[1] 0

>  all.equal(nout$minimum, nout.old$minimum)
[1] TRUE
>  all.equal(nout$estimate, nout.old$estimate - alpha)
[1] "Mean relative  difference: 4.622505e-05"
> 
> 
back to top