Load libraries, functions and data.
# Load helper functions
knitr::opts_chunk$set(fig.path = "figs/", echo = TRUE, message = FALSE, warnings = FALSE)
# load helper functions, data and do preliminary processing of the ensemble.
source('JULES-ES-1p0-common.R')
library(sensitivity)
# Helper functions
# rotate a matric 90 degrees clockwise for plotting
rotate <- function(x) t(apply(x, 2, rev))
sensvar = function(oaat_pred, n, d){
# Calculate variance as a global sensitivity meansure
out = rep(NA,d)
for(i in 1:d){
ix = seq(from = ((i*n) - (n-1)), to = (i*n), by = 1)
out[i] = var(oaat_pred$mean[ix])
}
out
}
twoStep_sens <- function(X, y, n=21, predtype = 'UK', nugget=NULL, nuggetEstim=FALSE, noiseVar=NULL, seed=NULL, trace=FALSE, maxit=100,
REPORT=10, factr=1e7, pgtol=0.0, parinit=NULL, popsize=100){
# Sensitivity analysis with twoStep emulator.
# Calculates the variance of the output varied one at a time across each input.
d = ncol(X)
X_norm <- normalize(X)
X_oaat <- oaat_design(X_norm, n, med = TRUE)
colnames(X_oaat) = colnames(X)
twoStep_em = twoStep_glmnet(X=X, y=y, nugget=nugget, nuggetEstim=nuggetEstim, noiseVar=noiseVar,
seed=seed, trace=trace, maxit=maxit,
REPORT=REPORT, factr=factr, pgtol=pgtol,
parinit=parinit, popsize=popsize)
oaat_pred = predict(twoStep_em$emulator, newdata = X_oaat, type = predtype)
sens = sensvar(oaat_pred = oaat_pred, n=n, d=d)
out = sens
out
}
oaatSensvarKm <- function(X, y, n = 21, med = TRUE, hold = NULL, formula = ~., predtype = 'UK', ...){
# one-at-a-time sensitivity summary with a standard dicekriging emulator
d = ncol(X)
X_norm <- normalize(X)
X_oaat <- oaat_design(X_norm, n, med = med, hold = hold)
colnames(X_oaat) = colnames(X)
em <- km(formula = formula, design = X, response = y, ...)
oaat_pred = predict(em, newdata = X_oaat, type = predtype)
sens = sensvar(oaat_pred = oaat_pred, n=n, d=d)
out = sens
out
}
oaatSensvarKmList <- function(X, em_list, n = 21, med = TRUE, hold = NULL, formula = ~., predtype = 'UK', ...){
# one-at-a-time sensitivity summary with a standard dicekriging emulator
d = ncol(X)
X_oaat <- oaat_design(X, n, med = med, hold = hold)
colnames(X_oaat) = colnames(X)
em <- em_list[[i]]
oaat_pred = predict(em, newdata = X_oaat, type = predtype)
sens = sensvar(oaat_pred = oaat_pred, n=n, d=d)
out = sens
out
}
oaatSensvarSummaryPlot <- function(oat_sens_mat){
# relies on rotate(), fields, which need sorting for transfer to package
ynames <- rownames(oat_sens_mat)
xnames <- colnames(oat_sens_mat)
normsens <- normalize(t(oat_sens_mat))
normsens_mean <- apply(normsens,1, mean)
sort_ix <- sort(normsens_mean, decreasing = TRUE, index.return = TRUE)
par(mar = c(15,12,5,1), mfrow = c(1,2))
layout(matrix(c(1,1,2), ncol = 3, nrow = 1))
image(rotate(normsens[sort_ix$ix, ]), axes = FALSE, col = blues)
axis(1, at = seq(from = 0, to = 1, length.out = length(ynames)), labels = ynames, las = 3, cex.axis = 1.2)
axis(2, at = seq(from = 1, to = 0, length.out = length(xnames)), labels = xnames[sort_ix$ix], las = 1, cex.axis = 1.2)
mtext('One-at-a-time sensitivity', side = 3, adj = 0, line = 2, cex = 1)
lab_ix <- (1:length(xnames)) - 0.5
par(yaxs = 'i', mar = c(15,1,5,5))
plot(rev(normsens_mean[sort_ix$ix]), lab_ix, xlab = 'mean oaat variance (normalized)', ylab = '', ylim = c(0,length(xnames)), type = 'n', yaxt = 'n')
abline(h = lab_ix, col = 'grey', lty = 'dashed')
points( rev(normsens_mean[sort_ix$ix]),lab_ix, col = zissou5[1], pch = 19, cex = 1.5)
image.plot(legend.only = TRUE,
zlim = c(0,1),
col = blues,
legend.args = list(text = 'Relative sensitivity', side = 3, line = 1),
horizontal = TRUE
)
}
sensMatSummaryPlot <- function(sens_mat, col = blues, maintext = 'Sensitivity Matrix', xlab = 'sensitivity summary'){
# Summary plots of a sensitivity matrix
# relies on rotate(), fields, which need sorting for transfer to package
ynames <- rownames(sens_mat)
xnames <- colnames(sens_mat)
normsens <- normalize(t(sens_mat))
normsens_mean <- apply(normsens,1, mean)
sort_ix <- sort(normsens_mean, decreasing = TRUE, index.return = TRUE)
par(mar = c(15,12,5,1), mfrow = c(1,2))
layout(matrix(c(1,1,2), ncol = 3, nrow = 1))
image(rotate(normsens[sort_ix$ix, ]), axes = FALSE, col = col)
axis(1, at = seq(from = 0, to = 1, length.out = length(ynames)), labels = ynames, las = 3, cex.axis = 1.2)
axis(2, at = seq(from = 1, to = 0, length.out = length(xnames)), labels = xnames[sort_ix$ix], las = 1, cex.axis = 1.2)
mtext(maintext, side = 3, adj = 0, line = 2, cex = 1)
lab_ix <- (1:length(xnames)) - 0.5
par(yaxs = 'i', mar = c(15,1,5,5))
plot(rev(normsens_mean[sort_ix$ix]), lab_ix, xlab = xlab, ylab = '', ylim = c(0,length(xnames)), type = 'n', yaxt = 'n')
abline(h = lab_ix, col = 'grey', lty = 'dashed')
points( rev(normsens_mean[sort_ix$ix]),lab_ix, col = zissou5[1], pch = 19, cex = 1.5)
image.plot(legend.only = TRUE,
zlim = c(0,1),
col = col,
legend.args = list(text = 'Relative sensitivity', side = 3, line = 1),
horizontal = TRUE
)
}
bp_convert <- function(fastmodel){
# get the FAST summary into an easier format for barplot
fast_summ <- print(fastmodel)
fast_diff <- fast_summ[ ,2] - fast_summ[ ,1]
fast_bp <- t(cbind(fast_summ[ ,1], fast_diff))
fast_bp
}
multiFAST<- function(X, Y, fit_list = NULL, n = 1000){
# Generate a design for the FAST99 analysis
X_fast <- fast99(model = NULL, factors = colnames(X), n = n,
q = "qunif", q.arg = list(min = 0, max = 1))
if(is.null(fit_list)){
fit_list <- createKmFitList(X = X, Y = Y)
}
else{
fit_list <- fit_list
}
fast_tell_list <- vector(mode = 'list', length = ncol(Y))
for(i in 1:ncol(Y)){
fit <- fit_list[[i]]
# Predict the response at the FAST99 design points using the emulator
pred_fast <- predict(fit, newdata = X_fast$X, type = 'UK')
# Calculate the sensitivity indices
fast_tell <- tell(X_fast, pred_fast$mean)
fast_tell_list[[i]] <- fast_tell
}
return(list(fit_list = fit_list, fast_tell_list = fast_tell_list))
}
oaatSensvarRank <- function(oat_sens_mat){
ynames <- rownames(oat_sens_mat)
xnames <- colnames(oat_sens_mat)
normsens <- normalize(t(oat_sens_mat))
normsens_mean <- apply(normsens,1, mean)
rank <- rank(-normsens_mean)
return(list(mean = normsens_mean, rank = rank))
}
SensRank <- function(sens_mat){
# summarising and ranking parameters in a sensitivity matrix
ynames <- rownames(sens_mat)
xnames <- colnames(sens_mat)
normsens <- normalize(t(sens_mat))
normsens_mean <- apply(normsens,1, mean)
rank <- rank(-normsens_mean)
return(list(mean = normsens_mean, rank = rank))
}
to find the “standard” value in normalized space, we can normalize a vector of “1s” with respect to the original design
X_standard <- matrix(rep(1,d), ncol = d, nrow = 1)
X_standard_norm <- normalize(X_standard, wrt = lhs)
X_level1a_unnorm <- unnormalize(X_level1a, un.mins = lhs_min, un.maxes = lhs_max)
X_level1a_wave01_unnorm <- unnormalize(X_level1a_wave01, un.mins = lhs_min, un.maxes = lhs_max)
Define constraint level 1a as those members that run, and have F0_io <0.9 & b_wl_io > 0.15 (normalised).
#if (file.exists("oaat_level1a_Y.rdata")) {
# load("oaat_level1a_Y.rdata")
#} else {
oat_var_sensmat_level1a_Y <- matrix(NA, nrow = length(y_names_sum), ncol = ncol(X_level1a))
for(i in 1:length(y_names_sum)){
yname <- y_names_sum[i]
y <- Y_level1a[, yname]
oat <- oaatSensvarKmList(X = X_level1a, em_list = emlist_km_Y_level1a, med = FALSE, hold = X_standard_norm)
oat_var_sensmat_level1a_Y[i, ] <- oat
}
#save(y_names_sum, oat_var_sensmat_level1a_Y, file = "oaat_level1a_Y.rdata")
#}
rownames(oat_var_sensmat_level1a_Y) <- y_names_sum
colnames(oat_var_sensmat_level1a_Y) <- colnames(X_level1a)
#normsens_level1a_Y <- normalize(t(oat_var_sensmat_level1a_Y))
#pdf(file = 'figs/oat_var_sensmat_level1a_Y.pdf', width = 7, height = 8)
oaatSensvarSummaryPlot(oat_var_sensmat_level1a_Y)
#dev.off()
#if (file.exists("oaat_level1a_YAnom.rdata")) {
# load("oaat_level1a_YAnom.rdata")
#} else {
oat_var_sensmat_level1a_YAnom <- matrix(NA, nrow = length(y_names_sum), ncol = ncol(X_level1a))
for(i in 1:length(y_names_sum)){
yname <- y_names_sum[i]
y <- YAnom_level1a[, yname]
oat <- oaatSensvarKmList(X = X_level1a, em_list = emlist_km_YAnom_level1a, med = FALSE, hold = X_standard_norm)
oat_var_sensmat_level1a_YAnom[i, ] <- oat
}
#save(y_names_sum, oat_var_sensmat_level1a_YAnom, file = "oaat_level1a_YAnom.rdata")
#}
rownames(oat_var_sensmat_level1a_YAnom) <- y_names_sum
colnames(oat_var_sensmat_level1a_YAnom) <- colnames(X_level1a)
# Normalise sensitivities
#normsens_level1a_YAnom <- normalize(t(oat_var_sensmat_level1a_YAnom))
#pdf(file = 'figs/oat_var_sensmat_level1a_YAnom.pdf', width = 7, height = 8)
oaatSensvarSummaryPlot(oat_var_sensmat_level1a_YAnom)
#dev.off()
“Constraining variables” being those we use to constrain the model (npp, nbp, cSoil and cVeg). It’s hard to maintain a high vegetation carbon in particular.
Further idea: What parameter values might you choose to do this, and what might be the trade-offs you have to make?
#fit_list_const_level1a <- createKmFitList(X = X_level1a, Y = Y_const_level1a_scaled)
Y_const_level1a_scaled_list <- mat2list(Y_const_level1a_scaled)
fit_list_const_level1a <- mclapply(X = Y_const_level1a_scaled_list, FUN = km, formula = ~., design = X_level1a,
mc.cores = 4, control = list(trace = FALSE))
# Check that oatSensVar and the plotting make sense
oaat_sens_cVeg <- oaatSensvarKm(X = X_level1a, y = Y_const_level1a_scaled[,"cVeg_lnd_sum"])
optimisation start
------------------
* estimation method : MLE
* optimisation method : BFGS
* analytical gradient : used
* trend model : ~alpha_io + a_wl_io + bio_hum_cn + b_wl_io + dcatch_dlai_io +
dqcrit_io + dz0v_dh_io + f0_io + fd_io + g_area_io + g_root_io +
g_wood_io + gs_nvg_io + hw_sw_io + kaps_roth + knl_io + lai_max_io +
lai_min_io + lma_io + n_inorg_turnover + nmass_io + nr_io +
retran_l_io + retran_r_io + r_grow_io + rootd_ft_io + sigl_io +
sorp + tleaf_of_io + tlow_io + tupp_io + l_vg_soil
* covariance model :
- type : matern5_2
- nugget : NO
- parameters lower bounds : 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10 1e-10
- parameters upper bounds : 2 2 1.999244 1.698141 1.994798 1.990622 2 1.782043 2 2 2 1.998084 2 2 2 1.99598 2 1.996551 1.997502 1.99279 1.985766 1.99623 1.983347 1.991191 2 1.990947 1.995643 1.994232 1.992247 2 2 2
- best initial criterion value(s) : -2313.187
N = 32, M = 5 machine precision = 2.22045e-16
At X0, 0 variables are exactly at the bounds
At iterate 0 f= 2313.2 |proj g|= 1.9662
At iterate 1 f = 2312.6 |proj g|= 1.7711
At iterate 2 f = 2312.1 |proj g|= 1.5621
At iterate 3 f = 2311.8 |proj g|= 1.3975
At iterate 4 f = 2311.4 |proj g|= 1.946
At iterate 5 f = 2310.6 |proj g|= 1.1194
At iterate 6 f = 2310.4 |proj g|= 1.3714
At iterate 7 f = 2310.2 |proj g|= 1.9506
At iterate 8 f = 2309.9 |proj g|= 1.1649
At iterate 9 f = 2309 |proj g|= 1.3938
At iterate 10 f = 2307.7 |proj g|= 1.0686
At iterate 11 f = 2307.5 |proj g|= 1.7489
At iterate 12 f = 2307.1 |proj g|= 1.9538
At iterate 13 f = 2306.9 |proj g|= 1.9458
At iterate 14 f = 2306.5 |proj g|= 1.9419
At iterate 15 f = 2306 |proj g|= 1.4037
At iterate 16 f = 2305.9 |proj g|= 1.2844
At iterate 17 f = 2305.8 |proj g|= 1.5794
At iterate 18 f = 2305.4 |proj g|= 1.6413
At iterate 19 f = 2305.4 |proj g|= 1.9447
At iterate 20 f = 2305.3 |proj g|= 1.9431
At iterate 21 f = 2305.1 |proj g|= 0.80895
At iterate 22 f = 2304.5 |proj g|= 0.76408
At iterate 23 f = 2304.1 |proj g|= 1.0295
At iterate 24 f = 2303.9 |proj g|= 1.0252
At iterate 25 f = 2303.9 |proj g|= 1.9504
At iterate 26 f = 2303.9 |proj g|= 1.9504
At iterate 27 f = 2303.9 |proj g|= 0.9316
At iterate 28 f = 2303.8 |proj g|= 0.92937
At iterate 29 f = 2303.8 |proj g|= 1.7509
At iterate 30 f = 2303.8 |proj g|= 1.7595
At iterate 31 f = 2303.7 |proj g|= 1.7673
At iterate 32 f = 2303.6 |proj g|= 1.7592
At iterate 33 f = 2303.6 |proj g|= 1.9504
At iterate 34 f = 2303.5 |proj g|= 1.9501
At iterate 35 f = 2303.5 |proj g|= 1.4488
At iterate 36 f = 2303.4 |proj g|= 0.94581
At iterate 37 f = 2303.3 |proj g|= 0.89706
At iterate 38 f = 2303.1 |proj g|= 0.83388
At iterate 39 f = 2302.9 |proj g|= 0.8892
At iterate 40 f = 2302.8 |proj g|= 1.768
At iterate 41 f = 2302.8 |proj g|= 1.9483
At iterate 42 f = 2302.7 |proj g|= 1.9487
At iterate 43 f = 2302.7 |proj g|= 0.3197
At iterate 44 f = 2302.7 |proj g|= 0.29872
At iterate 45 f = 2302.6 |proj g|= 0.36766
At iterate 46 f = 2302.6 |proj g|= 0.878
At iterate 47 f = 2302.6 |proj g|= 0.87982
At iterate 48 f = 2302.6 |proj g|= 0.9486
At iterate 49 f = 2302.6 |proj g|= 0.75616
At iterate 50 f = 2302.6 |proj g|= 0.63227
At iterate 51 f = 2302.6 |proj g|= 1.9481
At iterate 52 f = 2302.5 |proj g|= 0.52027
At iterate 53 f = 2302.5 |proj g|= 0.28047
At iterate 54 f = 2302.5 |proj g|= 0.27537
At iterate 55 f = 2302.5 |proj g|= 0.27236
At iterate 56 f = 2302.5 |proj g|= 1.9485
At iterate 57 f = 2302.5 |proj g|= 1.2162
At iterate 58 f = 2302.5 |proj g|= 0.40663
At iterate 59 f = 2302.5 |proj g|= 0.70448
At iterate 60 f = 2302.5 |proj g|= 0.94228
At iterate 61 f = 2302.5 |proj g|= 0.92322
At iterate 62 f = 2302.5 |proj g|= 1.4126
At iterate 63 f = 2302.5 |proj g|= 1.9498
At iterate 64 f = 2302.4 |proj g|= 1.7444
At iterate 65 f = 2302.4 |proj g|= 1.2583
At iterate 66 f = 2302.4 |proj g|= 0.89898
At iterate 67 f = 2302.4 |proj g|= 1.4319
At iterate 68 f = 2302.4 |proj g|= 0.63223
At iterate 69 f = 2302.4 |proj g|= 1.1094
At iterate 70 f = 2302.3 |proj g|= 0.28589
At iterate 71 f = 2302.3 |proj g|= 1.9504
At iterate 72 f = 2302.3 |proj g|= 0.54066
At iterate 73 f = 2302.3 |proj g|= 0.35811
At iterate 74 f = 2302.3 |proj g|= 0.36032
At iterate 75 f = 2302.3 |proj g|= 0.91612
At iterate 76 f = 2302.3 |proj g|= 1.6242
At iterate 77 f = 2302.3 |proj g|= 0.39104
At iterate 78 f = 2302.3 |proj g|= 1.1859
At iterate 79 f = 2302.3 |proj g|= 0.50952
At iterate 80 f = 2302.3 |proj g|= 1.9502
At iterate 81 f = 2302.3 |proj g|= 1.9501
At iterate 82 f = 2302.3 |proj g|= 0.74862
At iterate 83 f = 2302.3 |proj g|= 1.0332
At iterate 84 f = 2302.3 |proj g|= 0.58104
At iterate 85 f = 2302.3 |proj g|= 0.50235
At iterate 86 f = 2302.3 |proj g|= 0.40363
At iterate 87 f = 2302.3 |proj g|= 0.42349
At iterate 88 f = 2302.3 |proj g|= 0.40638
At iterate 89 f = 2302.3 |proj g|= 0.30942
At iterate 90 f = 2302.2 |proj g|= 0.37206
At iterate 91 f = 2302.2 |proj g|= 0.32353
At iterate 92 f = 2302.2 |proj g|= 1.773
At iterate 93 f = 2302.2 |proj g|= 1.4087
At iterate 94 f = 2302.2 |proj g|= 0.13526
At iterate 95 f = 2302.2 |proj g|= 0.2696
At iterate 96 f = 2302.2 |proj g|= 0.27163
At iterate 97 f = 2302.2 |proj g|= 0.26918
At iterate 98 f = 2302.2 |proj g|= 1.1204
At iterate 99 f = 2302.1 |proj g|= 0.39905
At iterate 100 f = 2302.1 |proj g|= 1.4841
At iterate 101 f = 2302.1 |proj g|= 0.20603
final value 2302.148961
stopped after 101 iterations
X_oaat_level1a <- oaat_design(X_level1a, n=21, med = FALSE, hold = X_standard_norm)
colnames(X_oaat_level1a) = colnames(X)
y_oaat <- predict.km(fit_list_const_level1a[[4]], newdata = X_oaat_level1a, type = 'UK')
First, what parameters affect vegetation carbon and how? How sure are we about that?
oaatLinePlot(X_oaat = X_oaat_level1a, y_oaat_mean = y_oaat$mean, y_oaat_sd = y_oaat$sd,
n_oaat = 21,nr = 6, nc = 6)
NA
NA
Y_oaat_const_level1a_scaled <- matrix(ncol = ncol(Y_const_level1a_scaled), nrow = nrow(X_oaat_level1a))
for(i in 1:ncol(Y_const_level1a_scaled)){
y_oaat <- predict.km(fit_list_const_level1a[[i]], newdata = X_oaat_level1a, type = 'UK')
Y_oaat_const_level1a_scaled[,i] <- y_oaat$mean
}
What might be the trade-offs for a high (or accurate) vegetation carbon? are they acceptable? Plot the oaat sensitivity of the other 3 outputs we’re calibrating on.
Plotting these graphs in the original input space (multiplication factors) and providing the standard has the pleasing side effect of showing what you could do to standard inputs to increase or decrease a particular output.
Y_oaat_const_level1a_scaled_norm <- normalize(Y_oaat_const_level1a_scaled)
oaatLinePlotMulti <- function(X_oaat, Y_oaat, n_oaat, nr, nc, cols, ...){
par(mfrow = c(nr,nc), oma = c(0.1,0.1,3,0.1), mar = c(2,2,3,1), las = 1)
for(i in 1:ncol(X_oaat)){
ix <- seq(from = ((i*n_oaat) - (n_oaat-1)), to = (i*n_oaat), by = 1)
plot(X_oaat[ix,i], Y_oaat[ix,1],
ylim = c(0,1),
xlab = colnames(X_oaat)[i],
type= 'n',
bty = 'n')
for(j in 1:ncol(Y_oaat)){
lines(X_oaat[ix,i], Y_oaat[ix, j], lty = 'solid', col = cols[j], ...)
abline(v = 1, lty = 'dashed', col = 'grey')
mtext(colnames(X_oaat)[i], side = 3, line = 0.5)
}
}
}
X_oaat_level1a_unnorm <- unnormalize(X_oaat_level1a, un.mins = lhs_min, un.maxes = lhs_max)
#pdf(file = 'figs/Y_oaat_const_level1a_scaled_norm.pdf', width = 10, height = 10)
oaatLinePlotMulti(X_oaat = X_oaat_level1a_unnorm, Y_oaat = Y_oaat_const_level1a_scaled_norm , n_oaat = 21, nr = 6, nc = 6,
lwd = 3, col = cbPal[c(1,2,6,8)])
reset()
legend('top', c('nbp', 'npp', 'csoil', 'cveg'), col = cbPal[c(1,2,6,8)], lty = 'solid', lwd = 3, horiz = TRUE)
#dev.off()
# Build list of emulators for both waves, standard constraint parameters.
Y_const_level1a_wave01_scaled_list <- mat2list(Y_const_level1a_wave01_scaled)
fit_list_const_level1a_wave01 <- mclapply(X = Y_const_level1a_wave01_scaled_list, FUN = km, formula = ~., design = X_level1a_wave01,
mc.cores = 4, control = list(trace = FALSE))
Y_oaat_const_level1a_wave01_scaled <- matrix(ncol = ncol(Y_const_level1a_wave01_scaled), nrow = nrow(X_oaat_level1a))
for(i in 1:ncol(Y_oaat_const_level1a_wave01_scaled)){
y_oaat <- predict.km(fit_list_const_level1a_wave01[[i]], newdata = X_oaat_level1a, type = 'UK')
Y_oaat_const_level1a_wave01_scaled[,i] <- y_oaat$mean
}
Y_oaat_const_level1a_wave01_scaled_norm <- normalize(Y_oaat_const_level1a_wave01_scaled)
#pdf(file = 'figs/Y_oaat_const_level1a_wave01_scaled_norm.pdf', width = 10, height = 10)
oaatLinePlotMulti(X_oaat = X_oaat_level1a_unnorm, Y_oaat = Y_oaat_const_level1a_wave01_scaled_norm , n_oaat = 21, nr = 6, nc = 6,
lwd = 3, col = cbPal[c(1,2,6,8)])
reset()
legend('top', c('nbp', 'npp', 'csoil', 'cveg'), col = cbPal[c(1,2,6,8)], lty = 'solid', lwd = 3, horiz = TRUE)
#dev.off()
Y_sum_level1a_wave01_list <- mat2list(Y_sum_level1a_wave01)
if (file.exists("emlist_km_Y_level1a_wave01_2022-05-24.rdata")) {
load("emlist_km_Y_level1a_wave01_2022-05-24.rdata")
} else {
# Here, the list is a list version of the matrix Y_
emlist_km_Y_level1a_wave01 <- mclapply(X = Y_sum_level1a_wave01_list, FUN = km, formula = ~., design = X_level1a_wave01, mc.cores = 4)
save( emlist_km_Y_level1a_wave01, file = "emlist_km_Y_level1a_2022-05-24.rdata")
}
#if (file.exists("oaat_level1a_Y.rdata")) {
# load("oaat_level1a_Y.rdata")
#} else {
oat_var_sensmat_level1a_wave01_Y <- matrix(NA, nrow = length(y_names_sum), ncol = ncol(X_level1a))
for(i in 1:length(y_names_sum)){
yname <- y_names_sum[i]
y <- Y_level1a[, yname]
oat <- oaatSensvarKmList(X = X_level1a_wave01, em_list = emlist_km_Y_level1a_wave01, med = FALSE, hold = X_standard_norm)
oat_var_sensmat_level1a_wave01_Y[i, ] <- oat
}
#save(y_names_sum, oat_var_sensmat_level1a_Y, file = "oaat_level1a_Y.rdata")
#}
rownames(oat_var_sensmat_level1a_wave01_Y) <- y_names_sum
colnames(oat_var_sensmat_level1a_wave01_Y) <- colnames(X_level1a)
#normsens_level1a_Y <- normalize(t(oat_var_sensmat_level1a_Y))
#pdf(file = 'figs/oat_var_sensmat_level1a_wave01_Y.pdf', width = 7, height = 8)
oaatSensvarSummaryPlot(oat_var_sensmat_level1a_wave01_Y)
#dev.off()
YAnom_sum_level1a_wave01_list <- mat2list(YAnom_sum_level1a_wave01)
if (file.exists("emlist_km_Y_level1a_wave01_2022-05-25.rdata")) {
load("emlist_km_Y_level1a_wave01_2022-05-25.rdata")
} else {
# Here, the list is a list version of the matrix Y_
emlist_km_YAnom_level1a_wave01 <- mclapply(X = YAnom_sum_level1a_wave01_list, FUN = km, formula = ~., design = X_level1a_wave01, mc.cores = 4)
save( emlist_km_YAnom_level1a_wave01, file = "emlist_km_YAnom_level1a_2022-05-25.rdata")
}
oat_var_sensmat_level1a_wave01_YAnom <- matrix(NA, nrow = length(y_names_sum), ncol = ncol(X_level1a))
for(i in 1:length(y_names_sum)){
yname <- y_names_sum[i]
y <- Y_level1a[, yname]
oat <- oaatSensvarKmList(X = X_level1a_wave01, em_list = emlist_km_YAnom_level1a_wave01, med = FALSE, hold = X_standard_norm)
oat_var_sensmat_level1a_wave01_YAnom[i, ] <- oat
}
#save(y_names_sum, oat_var_sensmat_level1a_Y, file = "oaat_level1a_Y.rdata")
#}
rownames(oat_var_sensmat_level1a_wave01_YAnom) <- y_names_sum
colnames(oat_var_sensmat_level1a_wave01_YAnom) <- colnames(X_level1a)
#normsens_level1a_Y <- normalize(t(oat_var_sensmat_level1a_Y))
#pdf(file = 'figs/oat_var_sensmat_level1a_wave01_YAnom.pdf', width = 7, height = 8)
oaatSensvarSummaryPlot(oat_var_sensmat_level1a_wave01_YAnom)
#dev.off()
We use a FAST99 algorithm by Saltelli et al (2000), from the R package “sensitivity”
# Need to think about how mins and maxes are dealt with - we have a truncated input design
# Generate a design for the FAST99 analysis
X_fast <- fast99(model = NULL, factors = colnames(X_level1a_wave01), n = 3000,
q = "qunif", q.arg = list(min = 0, max = 1))
Create a list of sensitivity analyses, one for each column of the “sum” (modern) output matrix. (This now uses wave01 data)
MF_Y_sum_level1a <- multiFAST(X = X_level1a_wave01, Y = Y_sum_level1a_wave01, fit_list = emlist_km_Y_level1a_wave01, n = 1000)
Create a sensitivity summary matrix from the list of sensitivity analyses.
FAST_total_Y_sum_level1a <- matrix(nrow = length(MF_Y_sum_level1a$fast_tell_list), ncol = d)
for(i in 1:length(MF_Y_sum_level1a$fast_tell_list)){
# sum the direct effect and interaction terms to get a total
FAST_total_Y_sum_level1a[i, ] <- apply(bp_convert(MF_Y_sum_level1a$fast_tell_list[[i]]),2,sum)
}
colnames(FAST_total_Y_sum_level1a) <- colnames(X_level1a)
rownames(FAST_total_Y_sum_level1a) <- colnames(Y_sum_level1a)
Plot the summary matrix
#pdf(file = 'figs/FAST_sensmat_Y_level1a_wave01.pdf', width = 7, height = 8)
sensMatSummaryPlot(FAST_total_Y_sum_level1a)
#dev.off()
Now create a list of sensitivity analyses for the anomaly at the end of the run.
MF_YAnom_sum_level1a <- multiFAST(X = X_level1a, Y = YAnom_sum_level1a, fit_list = emlist_km_YAnom_level1a)
Create the sensitivity summary matrix for the anomaly
Plot the sensitivity summary matrix
#pdf(file = 'figs/FAST_sensmat_YAnom_level1a_wave01.pdf', width = 7, height = 8)
sensMatSummaryPlot(FAST_total_YAnom_sum_level1a)
#dev.off()
knit_exit()
# ---------------------------------------------------------------------------------
# Monte carlo filtering for sensitivity analysis
# ---------------------------------------------------------------------------------
# Uniform sample from across parameter space
# Split the sample into 'behavioural' (NROY) and 'Non behavioural (Ruled Out)
# Build cdfs of the marginal distributions in each case
# Perform a KS test to see if the smaples are drawn from different distributions
# The KS statistic is an indicator of the importance of the parameter in splitting the
# samples.
# "Not in" function
'%!in%' <- function(x,y)!('%in%'(x,y))
mcf = function(X, nroy_ix){
## Monte Carlo Filtering function
## X ............... Complete sample from input space
## nroy.ix ........... index of cases of X which are NROY (Not Ruled Out Yet), or 'behavioural'.
## produces ks statistic for each column of the input matrix X
## A larger ks statistic means that input is more important for
## determining if a sample is NROY or not
X_nroy = X[nroy_ix, ]
ref = 1:nrow(X)
ro_ix = which(ref %!in% nroy_ix)
X_ro = X[ro_ix, ]
kss = rep(NA, length = ncol(X))
for(i in 1:ncol(X)){
ks = ks.test(X_ro[,i], X_nroy[,i])
kss[i] = ks$statistic
}
out = kss
out
}
This repeats some code from the constraint analysis in order to do MCF using the observations (constraints) we have.
# nbp npp csoil cveg
Y_lower <- c(-10, 35, 750, 300)
Y_upper <- c(10, 80, 3000, 800)
# I'm going to set it so that + 4sd aligns approximately with the original limits
# given by Andy Wiltshire. This gives room for uncertainty from the emulator
Y_target = Y_upper - (abs(Y_upper - (Y_lower)) / 2 )# abs() to fix the problem with negative numbers
# standard deviation is derived from the limits and the central target
# (this distance is assumed to be 4 standard deviations.
Y_sd = (Y_upper - Y_target) / 4
names(Y_sd) = colnames(Y_const_level1a_wave01_scaled)
p = ncol(Y_const_level1a_wave01_scaled)
obs_sd_list = as.list(rep(0.01,p))
disc_list = as.list(rep(0,p))
disc_sd_list = as.list(Y_sd)
thres = 3
mins_aug = apply(X_level1a, 2, FUN = min)
maxes_aug =apply(X_level1a, 2, FUN = max)
# convert Y_target for ingestion into function
Y_target = matrix(Y_target, nrow = 1)
# First build an emulator list for the Y
emlist_Y_const_level1a_wave01_scaled <- mclapply(X = Y_const_level1a_wave01_scaled_list, FUN = km, formula = ~.,
design = X_level1a_wave01, mc.cores = 4,
control = list(trace = FALSE))
# Samples from a uniform distribution across all of input space
nsamp_unif <- 10000
X_unif <- samp_unif(nsamp_unif, mins = (rep(0, d)), maxes = rep(1,d))
Y_unif <- matrix(nrow = nsamp_unif, ncol = ncol(Y_const_level1a_wave01_scaled))
colnames(Y_unif) <- colnames(Y_const_level1a_wave01_scaled)
# Build an emulator for each output individually
for(i in 1:ncol(Y_const_level1a_wave01_scaled)){
em <- emlist_Y_const_level1a_wave01_scaled[[i]]
pred <- predict(em, newdata = X_unif, type = 'UK')
Y_unif[,i] <- pred$mean
}
# This uses MCF with the constraints set by AW, rather than with a formal history match.
mcf_nbp = mcf(X_unif, which(Y_unif[,'nbp_lnd_sum'] > 0))
mcf_npp = mcf(X_unif, which(Y_unif[,'npp_nlim_lnd_sum'] > 35 & Y_unif[,'npp_nlim_lnd_sum'] < 80))
mcf_cSoil = mcf(X_unif, which(Y_unif[,'cSoil_lnd_sum'] > 750 & Y_unif[,'cSoil_lnd_sum'] < 3000))
mcf_cVeg <- mcf(X_unif, which(Y_unif[,'cVeg_lnd_sum'] > 300 & Y_unif[,'cVeg_lnd_sum'] < 800))
mcf_all_const <- mcf(X_unif, which(Y_unif[,'cVeg_lnd_sum'] > 300 & Y_unif[,'cVeg_lnd_sum'] < 800 & Y_unif[,'cSoil_lnd_sum'] > 750 & Y_unif[,'cSoil_lnd_sum'] < 3000 & Y_unif[,'npp_nlim_lnd_sum'] > 35 & Y_unif[,'npp_nlim_lnd_sum'] < 80 & Y_unif[,'nbp_lnd_sum'] > 0))
mcf_summary <- matrix(rbind(mcf_nbp, mcf_npp, mcf_cSoil, mcf_cVeg, mcf_all_const), nrow = ncol(Y_const_level1a_wave01_scaled)+1)
colnames(mcf_summary) <- colnames(X_level1a)
rownames(mcf_summary) <- c('nbp', 'npp', 'cSoil', 'cVeg', 'all')
#pdf(file = 'figs/MCF_sensmat_Yconst_level1a_wave01.pdf', width = 6, height = 8 )
sensMatSummaryPlot(mcf_summary)
#dev.off()
# using all together is quite similar to using the mean
plot(1:32, mcf_all_const, ylim = c(0,0.7), pch = 19)
points(1:32, mcf_npp, col = 'red', pch = 19)
points(1:32, mcf_cVeg, col = 'green', pch = 19)
points(1:32, mcf_cSoil, col = 'brown', pch = 19)
points(1:32, mcf_nbp, col = 'gold', pch = 19)
legend('topleft', legend = c('all', 'npp', 'cVeg', 'cSoil', 'nbp'),
col = c('black','red','green', 'brown', 'gold' ),
pch = 19)
sensrank_Y_level1a_mcf <- SensRank(mcf_summary[1:4, ])
The idea here is to summarise the relative importance of the input parameters. The sensitivity measures are normalised
sensrank_Y_level1a_oat <- SensRank(oat_var_sensmat_level1a_wave01_Y)
sensrank_YAnom_level1a_oat <- SensRank(oat_var_sensmat_level1a_wave01_YAnom)
sensrank_FAST <- SensRank(FAST_total_Y_sum_level1a)
sensrank_FAST_YAnom <- SensRank(FAST_total_YAnom_sum_level1a)
sens_ranks <- cbind(sensrank_Y_level1a_oat$rank,sensrank_FAST$rank, sensrank_YAnom_level1a_oat$rank, sensrank_FAST_YAnom$rank, sensrank_Y_level1a_mcf$rank)
colnames(sens_ranks) <- c('OAT_modern_value', 'FAST_modern_value', 'OAT_anomaly', 'FAST_anomaly', 'MCF_modern_value')
min_rank <- apply(sens_ranks,1, min)
all_ranks <- cbind(sens_ranks, min_rank)
#plot(sens_ranks[,1], sens_ranks[,2], xlab = 'modern value rank', ylab = 'anomaly rank')
rank_ix <- sort(min_rank, decreasing = FALSE, index.return = TRUE)
# All ranks is the table of rankings, with min_rank being the highest ranking
sens_table <- all_ranks[rank_ix$ix, ]
sens_table
library(xtable)
xtable(sens_table, digits = 0)
knit_exit()
code from here is only for reference (to be amended)
# First, use MCF on the design
#
# Calculate the implausibility of each input point for
# each set of observations
run.impl.amaz = impl(em = Y_tropics,
em.sd = 0,
disc = 0,
disc.sd = 0,
obs = obs_amazon,
obs.sd = 0.075)
run.nroy.ix.amaz = which(run.impl.amaz < 3)
run.impl.seasia = impl(em = Y_tropics,
em.sd = 0,
disc = 0,
disc.sd = 0,
obs = obs_seasia,
obs.sd = 0.075)
run.nroy.ix.seasia = which(run.impl.seasia < 3)
run.impl.congo = impl(em = Y_tropics,
em.sd = 0,
disc = 0,
disc.sd = 0,
obs = obs_congo,
obs.sd = 0.075)
run.nroy.ix.congo = which(run.impl.congo < 3)
mcf.amaz = mcf(X_tropics, run.nroy.ix.amaz)
mcf.seasia = mcf(X_tropics, run.nroy.ix.seasia)
mcf.congo = mcf(X_tropics, run.nroy.ix.congo)
dev.new(width = 6, height = 6)
par(mar = c(8,4,3,1))
plot(1:ncol(X_tropics), mcf.amaz, col = col.amaz, pch = 19,
ylim = c(0,0.4),
ylab = 'MCF sensitivity', xlab = '',
axes = FALSE,
pty = 'n'
)
abline(v = 1:ncol(X_tropics), lty = 'dashed', col = 'lightgrey')
points(1:ncol(X_tropics), mcf.amaz, col = col.amaz, pch = 19)
points(1:ncol(X_tropics), mcf.seasia, col = col.seasia, pch = 19)
points(1:ncol(X_tropics), mcf.congo, col = col.congo, pch = 19)
axis(side = 1, labels = colnames(X_tropics), las = 2, at = 1:ncol(X_tropics))
axis(side = 2)
# -------------------------------------------------------------------------
# Generate uncertainty estimates on the MCF by emulating and
# bootstrapping the samples.
# -------------------------------------------------------------------------
mcf.emboot = function(X, emfit, bootcol = c(8,9),
disc, disc.sd, obs, obs.sd, thres = 3, n.mcf = 1000, n.reps = 3000){
## Function that does Monte Carlo Filtering using an emulated sample.
## Inputs for emulation are sampled from the unit cube apart from
## those in columns bootcol, which are bootstrapped from the design.
##
em.mcfmat = matrix(nrow = n.reps, ncol = ncol(X))
for(i in 1:n.reps){
# Sample from uniform distributions for the
# standard input parameters
X.mcf = samp.unif(n = n.mcf, mins = rep(0, ncol(X)), maxes = rep(1, ncol(X)))
# Sample from the model run inputs for the temp and precip
# (here indicated by bootcol columns)
X.tp.ix = sample(1:nrow(X), size = n.mcf, replace = TRUE)
X.tp.runsamp = X[X.tp.ix , bootcol]
# bind the samples together
X.mcf[, bootcol] = X.tp.runsamp
colnames(X.mcf) <- colnames(X)
# Predict model output at the sampled inputs
pred.mcf = predict(emfit, newdata = X.mcf, type = 'UK')
# find the implausibility of the predicted inputs
em.impl.mcf = impl(em = pred.mcf$mean,
em.sd = pred.mcf$sd,
disc = disc,
disc.sd = disc.sd,
obs = obs,
obs.sd = obs.sd)
# Which part of the sample is NROY (or "behavioural")
em.nroy.ix = which(em.impl.mcf < thres)
em.mcf= mcf(X.mcf, em.nroy.ix)
em.mcfmat[i, ] = em.mcf
}
mcf.mean = apply(em.mcfmat, 2, mean)
mcf.sd = apply(em.mcfmat, 2, sd)
return(list(mean = mcf.mean, sd = mcf.sd))
}
# How big might the uncertainty bounds be if we use just 300 points
# (as in the ensemble) to estimate the MCF sensitivity analysis indices?
n.mcf.seq = c(seq(from = 100, to = 1000, by = 100), 1500, 2000, 3000)
mcf.seq.mean = matrix(NA, nrow = length(n.mcf.seq), ncol = ncol(X_tropics_norm))
mcf.seq.sd = matrix(NA, nrow = length(n.mcf.seq), ncol = ncol(X_tropics_norm))
for(i in 1:length(n.mcf.seq)){
mcf.em.amaz.seq = mcf.emboot(X = X_tropics_norm, em = tropics_fit,
bootcol = c(8,9), disc = 0, disc.sd = 0, obs = obs_amazon, obs.sd = 0.05,
thres = 3, n.mcf = n.mcf.seq[i], n.reps = 1000)
mcf.seq.mean[i, ] = mcf.em.amaz.seq$mean
mcf.seq.sd[i, ] = mcf.em.amaz.seq$sd
}
# What is our estimate of MCF uncertainty when we have 300 ensemble members?
mcf.em.amaz.300 = mcf.emboot(X = X_tropics_norm, em = tropics_fit,
bootcol = c(8,9), disc = 0, disc.sd = 0, obs = obs_amazon, obs.sd = 0.05,
thres = 3, n.mcf = 300, n.reps = 1000)
mcf.em.seasia.300 = mcf.emboot(X = X_tropics_norm, em = tropics_fit,
bootcol = c(8,9), disc = 0, disc.sd = 0, obs = obs_seasia, obs.sd = 0.05,
thres = 3, n.mcf = 300, n.reps = 1000)
mcf.em.congo.300 = mcf.emboot(X = X_tropics_norm, em = tropics_fit,
bootcol = c(8,9), disc = 0, disc.sd = 0, obs = obs_congo, obs.sd = 0.05,
thres = 3, n.mcf = 300, n.reps = 1000)
cbPal <- c("#000000", "#E69F00", "#56B4E9", "#009E73", "#F0E442", "#0072B2", "#D55E00", "#CC79A7")
# The Mean and the Standard deviation of the estimate
# of MCF sensitivity both drop as the number of MCF samples increases.
#dev.new(width = 10, height = 8)
pdf(width = 10, height = 8, file = 'graphics/mcf_mean_sd_vs_n.pdf')
par(mfrow = c(1,2), las = 1)
matplot(n.mcf.seq, mcf.seq.mean, main = 'Mean', xlab = 'Emulated Ensemble members', ylab = 'KS statistic Mean', type = 'o', col = cbPal)
matplot(n.mcf.seq, mcf.seq.sd, main = 'Standard deviation', xlab = 'Emulated Ensemble members', ylab = 'KS statistic standard deviation', type = 'o', col = cbPal)
legend('topright', pch = as.character(1:9), legend = colnames(X_tropics_norm), col = cbPal, text.col = cbPal)
dev.off()
# This puts the mean and estimate MCF sensitivity indices in context with their
# estimated uncertainty.
dev.new(width = 6, height = 10)
matplot(n.mcf.seq, mcf.seq.mean, main = 'Mean', xlab = 'Ensemble members', ylab = 'MCF Sensitivity Index', type = 'o', col = rep(cbPal,2), ylim = c(0,0.35), pch = 19, lwd = 1.2, lty = 'solid')
for(i in 1: ncol(mcf.seq.mean)){
arrows(x0 = n.mcf.seq, y0 = mcf.seq.mean[,i] - (mcf.seq.sd[,i]),
x1 = n.mcf.seq, y1 = mcf.seq.mean[,i] + (mcf.seq.sd[, i]),
length=0.05, angle=90, code=3, col = rep(cbPal,2)[i],lwd = 1.2
)
}
# Calculate MCF indices with 5000 emulated ensemble members.
# Bootstrap uncertainty estimates.
mcf.em.amaz = mcf.emboot(X = X_tropics_norm, em = tropics_fit,
bootcol = c(8,9), disc = 0, disc.sd = 0, obs = obs_amazon, obs.sd = 0.05,
thres = 3, n.mcf = 5000, n.reps = 1000)
mcf.em.seasia = mcf.emboot(X = X_tropics_norm, em = tropics_fit,
bootcol = c(8,9), disc = 0, disc.sd = 0, obs = obs_seasia, obs.sd = 0.05,
thres = 3, n.mcf = 5000, n.reps = 1000)
mcf.em.congo = mcf.emboot(X = X_tropics_norm, em = tropics_fit,
bootcol = c(8,9), disc = 0, disc.sd = 0, obs = obs_congo, obs.sd = 0.05,
thres = 3, n.mcf = 5000, n.reps = 1000)
#dev.new(width = 9, height = 6)
pdf(file = 'graphics/mcf.pdf', width = 9, height = 6)
par(las = 1, mar = c(8,4,3,1))
ylim = c(0,0.27)
xlim = c(0.5,9.5)
plot((1:length(mcf.em.amaz$mean))-0.15, mcf.em.amaz$mean,
pch = 19, col = col.amaz, ylim = ylim, xlim = xlim,
pty = 'n', xaxs = 'i', yaxs = 'i',
xlab = '', ylab = 'KS statistic',
axes = FALSE)
i = seq(from = 1, to = 10, by = 2)
rect(i-0.5, ylim[1], i+0.5, ylim[2], col = "grey92", border=NA)
points((1:length(mcf.em.amaz$mean))-0.15, mcf.em.amaz$mean, pch = 19, col = col.amaz)
arrows(x0 = (1:length(mcf.em.amaz$mean))-0.15, y0 = mcf.em.amaz$mean - (2*mcf.em.amaz$sd ),
x1 = (1:length(mcf.em.amaz$mean))-0.15, y1 = mcf.em.amaz$mean + (2*mcf.em.amaz$sd),
col = col.amaz, length=0.05, angle=90, code=3)
points((1:length(mcf.em.seasia$mean)), mcf.em.seasia$mean, pch = 19, col = col.seasia)
arrows(x0 = 1:length(mcf.em.seasia$mean), y0 = mcf.em.seasia$mean - (2*mcf.em.seasia$sd ),
x1 = 1:length(mcf.em.seasia$mean), y1 = mcf.em.seasia$mean + (2*mcf.em.seasia$sd),
col = col.seasia, length=0.05, angle=90, code=3)
points((1:length(mcf.em.congo$mean))+0.15, mcf.em.congo$mean, pch = 19, col = col.congo)
arrows(x0 = (1:length(mcf.em.congo$mean))+0.15, y0 = mcf.em.congo$mean - (2*mcf.em.congo$sd ),
x1 = (1:length(mcf.em.congo$mean))+0.15, y1 = mcf.em.congo$mean + (2*mcf.em.congo$sd),
col = col.congo,length=0.05, angle=90, code=3)
axis(1, labels = colnames(X_tropics_norm), at = 1:9, las = 2)
axis(2)
legend('topleft',legend = c('Amazon','SE Asia', 'C Africa'),
col = c(col.amaz, col.seasia, col.congo), pch = 19, bty = 'n')
text(0.5, 0.20, 'Error bars indicate \n \u00B1 2 standard deviations',
pos = 4, col = 'black',cex = 0.8 )
dev.off()
# Plot both the run-generated and emulated MCF sensitivity
#dev.new(width = 8, height = 6)
pdf(file = 'graphics/mcf_300_5000.pdf', width = 8, height = 6)
par(las = 1, mar = c(8,4,4,2))
ylim = c(0,0.43)
plot((1:length(mcf.em.amaz$mean))-0.2, mcf.em.amaz$mean,
pch = 19, col = col.amaz, ylim = ylim, xlim = c(0.5,9.5),
pty = 'n', xaxs = 'i', yaxs = 'i',
xlab = '', ylab = 'KS statistic',
axes = FALSE)
i = seq(from = 1, to = 10, by = 2)
rect(i-0.5, ylim[1], i+0.5, ylim[2], col = "lightgrey", border=NA)
points((1:length(mcf.em.amaz$mean))-0.2, mcf.em.amaz$mean, pch = 19, col = col.amaz)
arrows(x0 = (1:length(mcf.em.amaz$mean)) - 0.2, y0 = mcf.em.amaz$mean - (2*mcf.em.amaz$sd ),
x1 = (1:length(mcf.em.amaz$mean)) - 0.2, y1 = mcf.em.amaz$mean + (2*mcf.em.amaz$sd),
col = col.amaz, length=0.05, angle=90, code=3)
points((1:length(mcf.em.amaz$mean))-0.2, mcf.amaz, pch = 21, col = col.amaz)
arrows(x0 = 1:length(mcf.em.amaz$mean)-0.2, y0 = mcf.amaz - (2*mcf.em.amaz.300$sd ),
x1 = 1:length(mcf.em.amaz$mean)-0.2, y1 = mcf.amaz + (2*mcf.em.amaz.300$sd),
lty = 'dotted',
col = col.amaz, length=0.05, angle=90, code=3)
points(1:length(mcf.em.seasia$mean), mcf.em.seasia$mean, pch = 19, col = col.seasia)
arrows(x0 = 1:length(mcf.em.seasia$mean), y0 = mcf.em.seasia$mean - (2*mcf.em.seasia$sd ),
x1 = 1:length(mcf.em.seasia$mean), y1 = mcf.em.seasia$mean + (2*mcf.em.seasia$sd),
col = col.seasia, length=0.05, angle=90, code=3)
points((1:length(mcf.em.amaz$mean)), mcf.seasia, pch = 21, col = col.seasia)
arrows(x0 = 1:length(mcf.em.amaz$mean), y0 = mcf.seasia - (2*mcf.em.seasia.300$sd ),
x1 = 1:length(mcf.em.amaz$mean), y1 = mcf.seasia + (2*mcf.em.seasia.300$sd),
lty = 'dotted',
col = col.seasia, length=0.05, angle=90, code=3)
points((1:length(mcf.em.congo$mean))+0.2, mcf.em.congo$mean, pch = 19, col = col.congo)
arrows(x0 = (1:length(mcf.em.congo$mean))+0.2, y0 = mcf.em.congo$mean - (2*mcf.em.congo$sd ),
x1 = (1:length(mcf.em.congo$mean))+0.2, y1 = mcf.em.congo$mean + (2*mcf.em.congo$sd),
col = col.congo,length=0.05, angle=90, code=3)
points((1:length(mcf.em.congo$mean))+0.2, mcf.congo, pch = 21, col = col.congo)
arrows(x0 = 1:length(mcf.em.amaz$mean)+0.2, y0 = mcf.congo - (2*mcf.em.congo.300$sd ),
x1 = 1:length(mcf.em.amaz$mean) +0.2, y1 = mcf.congo + (2*mcf.em.congo.300$sd),
lty = 'dotted',
col = col.congo, length=0.05, angle=90, code=3)
axis(1, labels = colnames(X_tropics_norm), at = 1:9, las = 2)
axis(2)
legend('topleft',legend = c('Amazon','SE Asia', 'C Africa'),
col = c(col.amaz, col.seasia, col.congo), pch = 19, bty = 'n')
text(0.5, 0.32, 'Error bars indicate \n \u00B1 2 standard deviations',
pos = 4, col = 'black',cex = 0.8 )
text(0.5, 0.29, 'Open points & dotted lines indicate ensemble-only results',
pos = 4, col = 'black',cex = 0.8 )
dev.off()
# How does this sensitivity analysis measure up to the FAST99 version?
pdf(width = 12, height = 7, file = 'graphics/fast99_vs_mcf2.pdf')
#dev.new(width = 12, height = 7)
par(mfrow = c(1,2), mar = c(5,5,3,2), las = 1)
plot(print(fast.tell)[,1], mcf.amaz, col = col.amaz, pch = as.character(1:9),
ylim = c(0,0.42), xlim = c(0,0.32),
xlab = 'FAST99 first-order sensitivity',
ylab = 'MCF sensitivity (KS statistic)',
main = 'MCF using 300 ensemble members',
pty = 'n'
)
arrows(x0 = print(fast.tell)[,1], y0 = mcf.amaz - (2*mcf.em.amaz.300$sd),
x1 = print(fast.tell)[,1], y1 = mcf.amaz + (2*mcf.em.amaz.300$sd),
col = col.amaz, length=0.05, angle=90, code=3,
lty = 'solid', lwd = 0.8)
arrows(x0 = print(fast.tell)[,1], y0 = mcf.seasia - (2*mcf.em.seasia.300$sd),
x1 = print(fast.tell)[,1], y1 = mcf.seasia + (2*mcf.em.seasia.300$sd),
col = col.seasia, length=0.05, angle=90, code=3,
lty = 'solid', lwd = 0.8)
arrows(x0 = print(fast.tell)[,1], y0 = mcf.congo - (2*mcf.em.congo.300$sd),
x1 = print(fast.tell)[,1], y1 = mcf.congo + (2*mcf.em.congo.300$sd),
col = col.congo, length=0.05, angle=90, code=3,
lty = 'solid', lwd = 0.8)
points(print(fast.tell)[,1], mcf.amaz, col = col.amaz, pch = as.character(1:9), font = 2)
points(print(fast.tell)[,1], mcf.seasia, col = col.seasia, pch = as.character(1:9), font = 2)
points(print(fast.tell)[,1], mcf.congo, col = col.congo, pch = as.character(1:9), font = 2)
legend('topleft', pch = as.character(1:9), legend = colnames(X_tropics_norm), cex = 0.8, bty = 'n')
legend('top', lty = 'solid', legend = c('Amazon', 'SE Asia', 'C Africa'),
text.col = c(col.amaz,col.seasia, col.congo),
col = c(col.amaz,col.seasia, col.congo)
, cex = 0.8, bty = 'n')
abline(0,1, lty = 'dashed')
plot(print(fast.tell)[,1], mcf.em.amaz$mean, col = col.amaz, pch = as.character(1:9),
ylim = c(0,0.42), xlim = c(0,0.32),
xlab = 'FAST99 first-order sensitivity',
ylab = 'MCF sensitivity (KS statistic)',
pty = 'n',
main = 'MCF using 5000 emulated ensemble members'
)
arrows(x0 = print(fast.tell)[,1], y0 = mcf.em.amaz$mean - (2*mcf.em.amaz$sd),
x1 = print(fast.tell)[,1], y1 = mcf.em.amaz$mean + (2*mcf.em.amaz$sd),
col = col.amaz,length=0.05, angle=90, code=3)
arrows(x0 = print(fast.tell)[,1], y0 = mcf.em.seasia$mean - (2*mcf.em.seasia$sd),
x1 = print(fast.tell)[,1], y1 = mcf.em.seasia$mean + (2*mcf.em.seasia$sd),
col = col.seasia,length=0.05, angle=90, code=3)
arrows(x0 = print(fast.tell)[,1], y0 = mcf.em.congo$mean - (2*mcf.em.congo$sd),
x1 = print(fast.tell)[,1], y1 = mcf.em.congo$mean + (2*mcf.em.congo$sd),
col = col.congo,length=0.05, angle=90, code=3)
points(print(fast.tell)[,1], mcf.em.amaz$mean, col = col.amaz, pch = as.character(1:9), font = 2)
points(print(fast.tell)[,1], mcf.em.seasia$mean, col = col.seasia, pch = as.character(1:9), font = 2)
points(print(fast.tell)[,1], mcf.em.congo$mean, col = col.congo, pch = as.character(1:9), font = 2)
abline(0,1, lty = 'dashed')
dev.off()
# Just use the 5000 ensemble member example for the paper.
pdf(width = 7, height = 7, file = 'graphics/fast99_vs_mcf3.pdf')
#dev.new(width = 12, height = 7)
par(mar = c(5,5,3,2), las = 1)
plot(print(fast.tell)[,1], mcf.em.amaz$mean, col = col.amaz, pch = as.character(1:9),
ylim = c(0,0.32), xlim = c(0,0.32),
xlab = 'FAST99 first-order sensitivity',
ylab = 'MCF sensitivity (KS statistic)',
pty = 'n'
)
arrows(x0 = print(fast.tell)[,1], y0 = mcf.em.amaz$mean - (2*mcf.em.amaz$sd),
x1 = print(fast.tell)[,1], y1 = mcf.em.amaz$mean + (2*mcf.em.amaz$sd),
col = col.amaz,length=0.05, angle=90, code=3)
arrows(x0 = print(fast.tell)[,1], y0 = mcf.em.seasia$mean - (2*mcf.em.seasia$sd),
x1 = print(fast.tell)[,1], y1 = mcf.em.seasia$mean + (2*mcf.em.seasia$sd),
col = col.seasia,length=0.05, angle=90, code=3)
arrows(x0 = print(fast.tell)[,1], y0 = mcf.em.congo$mean - (2*mcf.em.congo$sd),
x1 = print(fast.tell)[,1], y1 = mcf.em.congo$mean + (2*mcf.em.congo$sd),
col = col.congo,length=0.05, angle=90, code=3)
points(print(fast.tell)[,1], mcf.em.amaz$mean, col = col.amaz, pch = as.character(1:9), font = 2)
points(print(fast.tell)[,1], mcf.em.seasia$mean, col = col.seasia, pch = as.character(1:9), font = 2)
points(print(fast.tell)[,1], mcf.em.congo$mean, col = col.congo, pch = as.character(1:9), font = 2)
abline(0,1, lty = 'dashed')
legend('topleft', pch = as.character(1:9), legend = colnames(X_tropics_norm), cex = 0.8, bty = 'n')
legend('top', lty = 'solid', legend = c('Amazon', 'SE Asia', 'C Africa'),
text.col = c(col.amaz,col.seasia, col.congo),
col = c(col.amaz,col.seasia, col.congo)
, cex = 0.8, bty = 'n')
dev.off()