https://github.com/cran/gstat
Raw File
Tip revision: 6452e9e00339be4420cd3b01a58628e55e5d40bd authored by Edzer Pebesma on 26 September 2019, 13:00:02 UTC
version 2.0-3
Tip revision: 6452e9e
allier.Rout.save

R version 3.2.2 (2015-08-14) -- "Fire Safety"
Copyright (C) 2015 The R Foundation for Statistical Computing
Platform: x86_64-pc-linux-gnu (64-bit)

R is free software and comes with ABSOLUTELY NO WARRANTY.
You are welcome to redistribute it under certain conditions.
Type 'license()' or 'licence()' for distribution details.

R is a collaborative project with many contributors.
Type 'contributors()' for more information and
'citation()' on how to cite R or R packages in publications.

Type 'demo()' for some demos, 'help()' for on-line help, or
'help.start()' for an HTML browser interface to help.
Type 'q()' to quit R.

> # Sytze de Bruin's post to r-sig-geo, Nov 16, 2015:
> library(sp)
> library(gstat)
> 
> # some data
> x <- c(215, 330, 410, 470, 545)
> y <- c(230, 310, 330, 340, 365)
> fc <- c(0.211, 0.251, 0.281, 0.262, 0.242)
> por <- c(0.438, 0.457, 0.419, 0.430, 0.468)
> Allier <- data.frame(x, y, fc, por)
> coordinates(Allier) = ~x+y
> 
> # gstat object for co-kriging using linear model of co-regionalization
> g <- gstat(id=c("fc"), formula=fc~1, data=Allier,
+            model=vgm(0.00247, "Sph", 480, 0.00166))
> g <- gstat(g, id="por", formula=por~1, data=Allier,
+            model=vgm(0.00239, "Sph", 480, 0.00118))
> g <- gstat(g, id=c("fc", "por"),
+            model=vgm(0.00151, "Sph", 480, -0.00124))
> 
> # predict at single point
> g$set = list(choleski = 0) # LDL'
> A <- predict(g, SpatialPoints(data.frame(x=450, y=350)), debug = 32)
Linear Model of Coregionalization found. Good.
[using ordinary cokriging]
we're at location X: 450 Y: 350 Z: 0
zero block size
we're at point X: 450 Y: 350 Z: 0

# X:
Matrix: 10 by 2
rbind(
c(        1,         0), # row 1
c(        1,         0), # row 2
c(        1,         0), # row 3
c(        1,         0), # row 4
c(        1,         0), # row 5
c(        0,         1), # row 6
c(        0,         1), # row 7
c(        0,         1), # row 8
c(        0,         1), # row 9
c(        0,         1)  # row 10
)
[using generalized covariances: max_val - semivariance()]
# Covariances (x_i, x_j) matrix C (upper triangle):
Matrix: 10 by 10
rbind(
c(  0.00413, 0.00141939, 0.000895995, 0.000565582, 0.000224073,   0.00027, 0.000867723, 0.000547754, 0.000345761, 0.000136984), # row 1
c(0.00141939,   0.00413, 0.00183976, 0.00139762, 0.000879083, 0.000867723,   0.00027, 0.00112471, 0.000854416, 0.000537415), # row 2
c(0.000895995, 0.00183976,   0.00413,  0.002003, 0.00142381, 0.000547754, 0.00112471,   0.00027, 0.00122451, 0.000870426), # row 3
c(0.000565582, 0.00139762,  0.002003,   0.00413, 0.0018653, 0.000345761, 0.000854416, 0.00122451,   0.00027, 0.00114032), # row 4
c(0.000224073, 0.000879083, 0.00142381, 0.0018653,   0.00413, 0.000136984, 0.000537415, 0.000870426, 0.00114032,   0.00027), # row 5
c(  0.00027, 0.000867723, 0.000547754, 0.000345761, 0.000136984,   0.00357, 0.00137342, 0.000866975, 0.000547264, 0.000216816), # row 6
c(0.000867723,   0.00027, 0.00112471, 0.000854416, 0.000537415, 0.00137342,   0.00357, 0.00178017, 0.00135235, 0.000850611), # row 7
c(0.000547754, 0.00112471,   0.00027, 0.00122451, 0.000870426, 0.000866975, 0.00178017,   0.00357, 0.00193813, 0.00137769), # row 8
c(0.000345761, 0.000854416, 0.00122451,   0.00027, 0.00114032, 0.000547264, 0.00135235, 0.00193813,   0.00357, 0.00180488), # row 9
c(0.000136984, 0.000537415, 0.000870426, 0.00114032,   0.00027, 0.000216816, 0.000850611, 0.00137769, 0.00180488,   0.00357)  # row 10
)

# glm->C, LDL' decomposed::
Matrix: 10 by 10
rbind(
c(0.00339858,  0.372269,  0.208612,  0.127967, 0.0167461, -0.0258472,  0.223862,  0.148767,  0.104047, 0.0383709), # row 1
c(        0, 0.00266093,  0.483725,  0.327484,  0.153742,  0.233021, -0.125118,   0.26241,  0.219271,  0.150536), # row 2
c(        0,         0, 0.00227574,  0.540203,  0.303585, 0.0580706,  0.314376, -0.175899,  0.295181,  0.243817), # row 3
c(        0,         0,         0, 0.00248407,  0.483068, 0.000300872,  0.115816,  0.377362, -0.115338,  0.319418), # row 4
c(        0,         0,         0,         0, 0.00369007, -0.0381423, 0.000474341,  0.120915,   0.37773, 0.0756303), # row 5
c(        0,         0,         0,         0,         0, 0.00301913,  0.354325,  0.235465,  0.164684, 0.0607327), # row 6
c(        0,         0,         0,         0,         0,         0, 0.00262317,  0.415337,  0.347059,  0.238266), # row 7
c(        0,         0,         0,         0,         0,         0,         0, 0.00245825,  0.467207,  0.385909), # row 8
c(        0,         0,         0,         0,         0,         0,         0,         0, 0.00265751,  0.505569), # row 9
c(        0,         0,         0,         0,         0,         0,         0,         0,         0,   0.00357)  # row 10
)

# X'C-1 X:
Matrix: 2 by 2
rbind(
c(  665.434,  -232.313), # row 1
c( -232.313,   729.441)  # row 2
)

# beta:
Vector: dim: 2
c( 0.244216,  0.440933)
# Cov(beta), (X'C-1 X)-1:
Matrix: 2 by 2
rbind(
c(0.00169077, 0.000538479), # row 1
c(0.000538479, 0.00154241)  # row 2
)

# Corr(beta):
Matrix: 2 by 2
rbind(
c(        1,  0.333447), # row 1
c( 0.333447,         1)  # row 2
)

# X0 (X values at prediction location x0):
Matrix: 2 by 2
rbind(
c(        1,         0), # row 1
c(        0,         1)  # row 2
)

# BLUE(mu), E(y(x0)) = X0'beta:
Vector: dim: 2
c( 0.244216,  0.440933)
# Covariances (x_i, x_0), C0:
Matrix: 10 by 2
rbind(
c(0.000638447, 0.000390306), # row 1
c(0.00151625, 0.000926937), # row 2
c(0.00212581, 0.00129958), # row 3
c(0.00229753, 0.00140456), # row 4
c(0.00173757, 0.00106224), # row 5
c(0.000390306, 0.000617769), # row 6
c(0.000926937, 0.00146714), # row 7
c(0.00129958, 0.00205695), # row 8
c(0.00140456, 0.00222311), # row 9
c(0.00106224, 0.00168129)  # row 10
)

# C-1 C0:
Matrix: 10 by 2
rbind(
c(0.00892694, -0.0179946), # row 1
c( 0.061935, -0.0285501), # row 2
c( 0.225195, 0.0783437), # row 3
c( 0.356735,  0.190177), # row 4
c(0.0941282, -0.0231359), # row 5
c(-0.0211192, 0.00493471), # row 6
c(-0.0335076, 0.0556009), # row 7
c(0.0919475,  0.242576), # row 8
c(   0.2232,  0.398927), # row 9
c(-0.0271532, 0.0889953)  # row 10
)

# [a] Cov_ij(B,B) or Cov_ij(0,0):
Matrix: 2 by 2
rbind(
c(  0.00413,   0.00027), # row 1
c(  0.00027,   0.00357)  # row 2
)

# [c] (x0-X'C-1 c0)'(X'C-1 X)-1(x0-X'C-1 c0):
Matrix: 2 by 2
rbind(
c(0.000128687, -0.000106836), # row 1
c(-0.000106836, 8.94523e-05)  # row 2
)

# [b] c0'C-1 c0:
Matrix: 2 by 2
rbind(
c(0.00192634, 0.00153207), # row 1
c(0.00153207, 0.00193094)  # row 2
)

# Best Linear Unbiased Predictor:
Vector: dim: 2
c(  0.25309,  0.441258)
# MSPE ([a]-[b]+[c]):
Matrix: 2 by 2
rbind(
c(0.00233235, -0.0013689), # row 1
c(-0.0013689, 0.00172851)  # row 2
)

# kriging weights:
Matrix: 10 by 2
rbind(
c(0.0711772, -0.064988), # row 1
c( 0.108451, -0.066123), # row 2
c(  0.26816, 0.0431405), # row 3
c( 0.401959,  0.153312), # row 4
c( 0.150253, -0.0653416), # row 5
c(-0.0762727, 0.0567592), # row 6
c(-0.0776047, 0.0937809), # row 7
c(0.0506315,  0.277731), # row 8
c( 0.179933,  0.435972), # row 9
c(-0.0766876,  0.135756)  # row 10
)


> g$set = list(choleski = 1) # Choleski
> B <- predict(g, SpatialPoints(data.frame(x=450, y=350)), debug = 32)
Linear Model of Coregionalization found. Good.
[using ordinary cokriging]
we're at location X: 450 Y: 350 Z: 0
zero block size
we're at point X: 450 Y: 350 Z: 0

# X:
Matrix: 10 by 2
rbind(
c(        1,         0), # row 1
c(        1,         0), # row 2
c(        1,         0), # row 3
c(        1,         0), # row 4
c(        1,         0), # row 5
c(        0,         1), # row 6
c(        0,         1), # row 7
c(        0,         1), # row 8
c(        0,         1), # row 9
c(        0,         1)  # row 10
)
[using generalized covariances: max_val - semivariance()]
# Covariances (x_i, x_j) matrix C (upper triangle):
Matrix: 10 by 10
rbind(
c(  0.00413, 0.00141939, 0.000895995, 0.000565582, 0.000224073,   0.00027, 0.000867723, 0.000547754, 0.000345761, 0.000136984), # row 1
c(0.00141939,   0.00413, 0.00183976, 0.00139762, 0.000879083, 0.000867723,   0.00027, 0.00112471, 0.000854416, 0.000537415), # row 2
c(0.000895995, 0.00183976,   0.00413,  0.002003, 0.00142381, 0.000547754, 0.00112471,   0.00027, 0.00122451, 0.000870426), # row 3
c(0.000565582, 0.00139762,  0.002003,   0.00413, 0.0018653, 0.000345761, 0.000854416, 0.00122451,   0.00027, 0.00114032), # row 4
c(0.000224073, 0.000879083, 0.00142381, 0.0018653,   0.00413, 0.000136984, 0.000537415, 0.000870426, 0.00114032,   0.00027), # row 5
c(  0.00027, 0.000867723, 0.000547754, 0.000345761, 0.000136984,   0.00357, 0.00137342, 0.000866975, 0.000547264, 0.000216816), # row 6
c(0.000867723,   0.00027, 0.00112471, 0.000854416, 0.000537415, 0.00137342,   0.00357, 0.00178017, 0.00135235, 0.000850611), # row 7
c(0.000547754, 0.00112471,   0.00027, 0.00122451, 0.000870426, 0.000866975, 0.00178017,   0.00357, 0.00193813, 0.00137769), # row 8
c(0.000345761, 0.000854416, 0.00122451,   0.00027, 0.00114032, 0.000547264, 0.00135235, 0.00193813,   0.00357, 0.00180488), # row 9
c(0.000136984, 0.000537415, 0.000870426, 0.00114032,   0.00027, 0.000216816, 0.000850611, 0.00137769, 0.00180488,   0.00357)  # row 10
)

# glm->C, Choleski decomposed::
Matrix: 10 by 10
rbind(
c(0.0642651, 0.0220865, 0.0139422, 0.00880077, 0.0034867, 0.00420135, 0.0135022, 0.00852336, 0.00538023, 0.00213155), # row 1
c(        0, 0.0603505, 0.0253821, 0.0199376, 0.0132903, 0.0128405, -0.000467545,  0.015517, 0.0121885, 0.00812481), # row 2
c(        0,         0, 0.0573704, 0.0239538, 0.0180906, 0.00284571, 0.0165299, -0.0042302, 0.0146438, 0.0110594), # row 3
c(        0,         0,         0,  0.055509, 0.0204705, -0.000277214, 0.00628642, 0.0169603, -0.00668605, 0.0125144), # row 4
c(        0,         0,         0,         0, 0.0565235, -0.00166522, 0.00121771, 0.00643667, 0.0147112, -0.00533688), # row 5
c(        0,         0,         0,         0,         0, 0.0581079, 0.0220181, 0.0113475, 0.00600821, 0.00114691), # row 6
c(        0,         0,         0,         0,         0,         0, 0.0508767, 0.0270843, 0.0183812, 0.0107199), # row 7
c(        0,         0,         0,         0,         0,         0,         0, 0.0452468,  0.026914, 0.0176584), # row 8
c(        0,         0,         0,         0,         0,         0,         0,         0,  0.042645,  0.023811), # row 9
c(        0,         0,         0,         0,         0,         0,         0,         0,         0, 0.0468724)  # row 10
)

# X'C-1 X:
Matrix: 2 by 2
rbind(
c(  665.434,  -232.313), # row 1
c( -232.313,   729.441)  # row 2
)

# beta:
Vector: dim: 2
c( 0.244216,  0.440933)
# Cov(beta), (X'C-1 X)-1:
Matrix: 2 by 2
rbind(
c(0.00169077, 0.000538479), # row 1
c(0.000538479, 0.00154241)  # row 2
)

# Corr(beta):
Matrix: 2 by 2
rbind(
c(        1,  0.333447), # row 1
c( 0.333447,         1)  # row 2
)

# X0 (X values at prediction location x0):
Matrix: 2 by 2
rbind(
c(        1,         0), # row 1
c(        0,         1)  # row 2
)

# BLUE(mu), E(y(x0)) = X0'beta:
Vector: dim: 2
c( 0.244216,  0.440933)
# Covariances (x_i, x_0), C0:
Matrix: 10 by 2
rbind(
c(0.000638447, 0.000390306), # row 1
c(0.00151625, 0.000926937), # row 2
c(0.00212581, 0.00129958), # row 3
c(0.00229753, 0.00140456), # row 4
c(0.00173757, 0.00106224), # row 5
c(0.000390306, 0.000617769), # row 6
c(0.000926937, 0.00146714), # row 7
c(0.00129958, 0.00205695), # row 8
c(0.00140456, 0.00222311), # row 9
c(0.00106224, 0.00168129)  # row 10
)

# C-1 C0:
Matrix: 10 by 2
rbind(
c(0.00892694, -0.0179946), # row 1
c( 0.061935, -0.0285501), # row 2
c( 0.225195, 0.0783437), # row 3
c( 0.356735,  0.190177), # row 4
c(0.0941282, -0.0231359), # row 5
c(-0.0211192, 0.00493471), # row 6
c(-0.0335076, 0.0556009), # row 7
c(0.0919475,  0.242576), # row 8
c(   0.2232,  0.398927), # row 9
c(-0.0271532, 0.0889953)  # row 10
)

# [a] Cov_ij(B,B) or Cov_ij(0,0):
Matrix: 2 by 2
rbind(
c(  0.00413,   0.00027), # row 1
c(  0.00027,   0.00357)  # row 2
)

# [c] (x0-X'C-1 c0)'(X'C-1 X)-1(x0-X'C-1 c0):
Matrix: 2 by 2
rbind(
c(0.000128687, -0.000106836), # row 1
c(-0.000106836, 8.94523e-05)  # row 2
)

# [b] c0'C-1 c0:
Matrix: 2 by 2
rbind(
c(0.00192634, 0.00153207), # row 1
c(0.00153207, 0.00193094)  # row 2
)

# Best Linear Unbiased Predictor:
Vector: dim: 2
c(  0.25309,  0.441258)
# MSPE ([a]-[b]+[c]):
Matrix: 2 by 2
rbind(
c(0.00233235, -0.0013689), # row 1
c(-0.0013689, 0.00172851)  # row 2
)

# kriging weights:
Matrix: 10 by 2
rbind(
c(0.0711772, -0.064988), # row 1
c( 0.108451, -0.066123), # row 2
c(  0.26816, 0.0431405), # row 3
c( 0.401959,  0.153312), # row 4
c( 0.150253, -0.0653416), # row 5
c(-0.0762727, 0.0567592), # row 6
c(-0.0776047, 0.0937809), # row 7
c(0.0506315,  0.277731), # row 8
c( 0.179933,  0.435972), # row 9
c(-0.0766876,  0.135756)  # row 10
)


> identical(A,B)
[1] FALSE
> 
> # TRUE
> 
> proc.time()
   user  system elapsed 
  0.732   0.308   0.692 
back to top